language: c
+sudo: false
+
+cache:
+ directories:
+ - $HOME/travis-cache
+
os:
- linux
- osx
- P4_VERSION="15.2"
- GIT_LFS_VERSION="1.1.0"
- DEFAULT_TEST_TARGET=prove
- - GIT_PROVE_OPTS="--timer --jobs 3"
+ - GIT_PROVE_OPTS="--timer --jobs 3 --state=failed,slow,save"
- GIT_TEST_OPTS="--verbose --tee"
- CFLAGS="-g -O2 -Wall -Werror"
- GIT_TEST_CLONE_2GB=YesPlease
p4 -V | grep Rev.;
echo "$(tput setaf 6)Git-LFS Version$(tput sgr0)";
git-lfs version;
+ mkdir -p $HOME/travis-cache;
+ ln -s $HOME/travis-cache/.prove t/.prove;
before_script: make --jobs=2
--- /dev/null
+Git 2.8 Release Notes
+=====================
+
+Updates since v2.7
+------------------
+
+UI, Workflows & Features
+
+ * "branch --delete" has "branch -d" but "push --delete" does not.
+
+ * "git blame" learned to produce the progress eye-candy when it takes
+ too much time before emitting the first line of the result.
+
+ * "git grep" can now be configured (or told from the command line)
+ how many threads to use when searching in the working tree files.
+
+ * Some "git notes" operations, e.g. "git log --notes=<note>", should
+ be able to read notes from any tree-ish that is shaped like a notes
+ tree, but the notes infrastructure required that the argument must
+ be a ref under refs/notes/. Loosen it to require a valid ref only
+ when the operation would update the notes (in which case we must
+ have a place to store the updated notes tree, iow, a ref).
+
+ * "git grep" by default does not fall back to its "--no-index"
+ behaviour outside a directory under Git's control (otherwise the
+ user may by mistake end up running a huge recursive search); with a
+ new configuration (set in $HOME/.gitconfig--by definition this
+ cannot be set in the config file per project), this safety can be
+ disabled.
+
+ * "git pull --rebase" has been extended to allow invoking
+ "rebase -i".
+
+ * "git p4" learned to cope with the type of a file getting changed.
+
+ * "git format-patch" learned to notice format.outputDirectory
+ configuration variable. This allows "-o <dir>" option to be
+ omitted on the command line if you always use the same directory in
+ your workflow.
+
+ * "interpret-trailers" has been taught to optionally update a file in
+ place, instead of always writing the result to the standard output.
+
+ * Many commands that read files that are expected to contain text
+ that is generated (or can be edited) by the end user to control
+ their behaviour (e.g. "git grep -f <filename>") have been updated
+ to be more tolerant to lines that are terminated with CRLF (they
+ used to treat such a line to contain payload that ends with CR,
+ which is usually not what the users expect).
+
+ * "git notes merge" used to limit the source of the merged notes tree
+ to somewhere under refs/notes/ hierarchy, which was too limiting
+ when inventing a workflow to exchange notes with remote
+ repositories using remote-tracking notes trees (located in e.g.
+ refs/remote-notes/ or somesuch).
+
+ * "git ls-files" learned a new "--eol" option to help diagnose
+ end-of-line problems.
+
+ * "ls-remote" learned an option to show which branch the remote
+ repository advertises as its primary by pointing its HEAD at.
+
+ * New http.proxyAuthMethod configuration variable can be used to
+ specify what authentication method to use, as a way to work around
+ proxies that do not give error response expected by libcurl when
+ CURLAUTH_ANY is used. Also, the codepath for proxy authentication
+ has been taught to use credential API to store the authentication
+ material in user's keyrings.
+
+
+Performance, Internal Implementation, Development Support etc.
+
+ * Add a framework to spawn a group of processes in parallel, and use
+ it to run "git fetch --recurse-submodules" in parallel.
+
+ * A slight update to the Makefile to mark "phoney" targets
+ as such correctly.
+
+ * In-core storage of the reverse index for .pack files (which lets
+ you go from a pack offset to an object name) has been streamlined.
+
+ * d95138e6 (setup: set env $GIT_WORK_TREE when work tree is set, like
+ $GIT_DIR, 2015-06-26) attempted to work around a glitch in alias
+ handling by overwriting GIT_WORK_TREE environment variable to
+ affect subprocesses when set_git_work_tree() gets called, which
+ resulted in a rather unpleasant regression to "clone" and "init".
+ Try to address the same issue by always restoring the environment
+ and respawning the real underlying command when handling alias.
+
+ * The low-level code that is used to create symbolic references has
+ been updated to share more code with the code that deals with
+ normal references.
+
+ * strbuf_getline() and friends have been redefined to make it easier
+ to identify which callsite of (new) strbuf_getline_lf() should
+ allow and silently ignore carriage-return at the end of the line to
+ help users on DOSsy systems.
+
+ * "git shortlog" used to accumulate various pieces of information
+ regardless of what was asked to be shown in the final output. It
+ has been optimized by noticing what need not to be collected
+ (e.g. there is no need to collect the log messages when showing
+ only the number of changes).
+
+ * "git checkout $branch" (and other operations that share the same
+ underlying machinery) has been optimized.
+
+ * Automated tests in Travis CI environment has been optimized by
+ persisting runtime statistics of previous "prove" run, executing
+ tests that take longer before other ones; this reduces the total
+ wallclock time.
+
+
+Also contains various documentation updates and code clean-ups.
+
+
+Fixes since v2.7
+----------------
+
+Unless otherwise noted, all the fixes since v2.7 in the maintenance
+track are contained in this release (see the maintenance releases'
+notes for details).
+
+ * An earlier change in 2.5.x-era broke users' hooks and aliases by
+ exporting GIT_WORK_TREE to point at the root of the working tree,
+ interfering when they tried to use a different working tree without
+ setting GIT_WORK_TREE environment themselves.
+ (merge df1e6ea nd/stop-setenv-work-tree later to maint).
+
+ * The "exclude_list" structure has the usual "alloc, nr" pair of
+ fields to be used by ALLOC_GROW(), but clear_exclude_list() forgot
+ to reset 'alloc' to 0 when it cleared 'nr' to discard the managed
+ array.
+ (merge 2653a8c nd/dir-exclude-cleanup later to maint).
+
+ * Paths that have been told the index about with "add -N" are not
+ quite yet in the index, but a few commands behaved as if they
+ already are in a harmful way.
+ (merge 4d55200 nd/ita-cleanup later to maint).
+
+ * "git send-email" was confused by escaped quotes stored in the alias
+ files saved by "mutt", which has been corrected.
+ (merge 2c510f2 ew/send-email-mutt-alias-fix later to maint).
+
+ * A few unportable C construct have been spotted by clang compiler
+ and have been fixed.
+ (merge a0df2e5 jk/clang-pedantic later to maint).
+
+ * The documentation has been updated to hint the connection between
+ the '--signoff' option and DCO.
+ (merge b2c150d dw/signoff-doc later to maint).
+
+ * "git reflog" incorrectly assumed that all objects that used to be
+ at the tip of a ref must be commits, which caused it to segfault.
+ (merge aecad37 dk/reflog-walk-with-non-commit later to maint).
+
+ * The ignore mechanism saw a few regressions around untracked file
+ listing and sparse checkout selection areas in 2.7.0; the change
+ that is responsible for the regression has been reverted.
+ (merge 8c72236 nd/exclusion-regression-fix later to maint).
+
+ * Some codepaths used fopen(3) when opening a fixed path in $GIT_DIR
+ (e.g. COMMIT_EDITMSG) that is meant to be left after the command is
+ done. This however did not work well if the repository is set to
+ be shared with core.sharedRepository and the umask of the previous
+ user is tighter. They have been made to work better by calling
+ unlink(2) and retrying after fopen(3) fails with EPERM.
+ (merge ea56518 js/fopen-harder later to maint).
+
+ * Asking gitweb for a nonexistent commit left a warning in the server
+ log.
+
+ Somebody may want to follow this up with an additional test, perhaps?
+ IIRC, we do test that no Perl warnings are given to the server log,
+ so this should have been caught if our test coverage were good.
+ (merge a9eb90a ho/gitweb-squelch-undef-warning later to maint).
+
+ * "git rebase", unlike all other callers of "gc --auto", did not
+ ignore the exit code from "gc --auto".
+ (merge 8c24f5b jk/ok-to-fail-gc-auto-in-rebase later to maint).
+
+ * Many codepaths that run "gc --auto" before exiting kept packfiles
+ mapped and left the file descriptors to them open, which was not
+ friendly to systems that cannot remove files that are open. They
+ now close the packs before doing so.
+ (merge d562102 js/close-packs-before-gc later to maint).
+
+ * A recent optimization to filter-branch in v2.7.0 introduced a
+ regression when --prune-empty filter is used, which has been
+ corrected.
+ (merge 1dc413e jk/filter-branch-no-index later to maint).
+
+ * The description for SANITY prerequisite the test suite uses has
+ been clarified both in the comment and in the implementation.
+ (merge 719c3da jk/sanity later to maint).
+
+ * "git tag" started listing a tag "foo" as "tags/foo" when a branch
+ named "foo" exists in the same repository; remove this unnecessary
+ disambiguation, which is a regression introduced in v2.7.0.
+ (merge 0571979 jk/list-tag-2.7-regression later to maint).
+
+ * The way "git svn" uses auth parameter was broken by Subversion
+ 1.9.0 and later.
+ (merge 0b66415 ew/svn-1.9.0-auth later to maint).
+
+ * The "split" subcommand of "git subtree" (in contrib/) incorrectly
+ skipped merges when it shouldn't, which was corrected.
+ (merge 933cfeb dw/subtree-split-do-not-drop-merge later to maint).
+
+ * A few options of "git diff" did not work well when the command was
+ run from a subdirectory.
+ (merge a97262c nd/diff-with-path-params later to maint).
+
+ * The command line completion learned a handful of additional options
+ and command specific syntax.
+ (merge fa4b5e3 jk/completion-rebase later to maint).
+ (merge f7c2e1a pw/completion-show-branch later to maint).
+ (merge d7d4ca8 pw/completion-stash later to maint).
+ (merge e6414b4 tb/complete-word-diff-regex later to maint).
+
+ * dirname() emulation has been added, as Msys2 lacks it.
+ (merge e7d5ce8 js/dirname-basename later to maint).
+
+ * The underlying machinery used by "ls-files -o" and other commands
+ have been taught not to create empty submodule ref cache for a
+ directory that is not a submodule. This removes a ton of wasted
+ CPU cycles.
+ (merge a2d5156 jk/ref-cache-non-repository-optim later to maint).
+
+ * Other minor clean-ups and documentation updates
+ (merge 99487cf ss/user-manual later to maint).
+ (merge e914ef0 ew/for-each-ref-doc later to maint).
+ (merge 36fc7d8 sg/t6050-failing-editor-test-fix later to maint).
+ (merge 60253a6 ss/clone-depth-single-doc later to maint).
+ (merge bd02e97 lv/add-doc-working-tree later to maint).
iso format is used. For supported values, see the discussion
of the --date option at linkgit:git-log[1].
+--[no-]progress::
+ Progress status is reported on the standard error stream
+ by default when it is attached to a terminal. This flag
+ enables progress reporting even if not attached to a
+ terminal. Can't use `--progress` together with `--porcelain`
+ or `--incremental`.
+
-M|<num>|::
Detect moved or copied lines within a file. When a commit
moves or copies a block of lines (e.g. the original file
so that locally committed merge commits will not be flattened
by running 'git pull'.
+
+When the value is `interactive`, the rebase is run in interactive mode.
++
*NOTE*: this is a possibly dangerous operation; do *not* use
it unless you understand the implications (see linkgit:git-rebase[1]
for details).
format-patch is invoked, but in addition can be set to "auto", to
generate a cover-letter only when there's more than one patch.
+format.outputDirectory::
+ Set a custom directory to store the resulting files instead of the
+ current working directory.
+
filter.<driver>.clean::
The command which is used to convert the content of a worktree
file to a blob upon checkin. See linkgit:gitattributes[5] for
option is ignored when the 'grep.patternType' option is set to a value
other than 'default'.
+grep.threads::
+ Number of grep worker threads to use.
+ See `grep.threads` in linkgit:git-grep[1] for more information.
+
+grep.fallbackToNoIndex::
+ If set to true, fall back to git grep --no-index if git grep
+ is executed outside of a git repository. Defaults to false.
+
gpg.program::
Use this custom program instead of "gpg" found on $PATH when
making or verifying a PGP signature. The program must support the
http.proxy::
Override the HTTP proxy, normally configured using the 'http_proxy',
- 'https_proxy', and 'all_proxy' environment variables (see
- `curl(1)`). This can be overridden on a per-remote basis; see
- remote.<name>.proxy
+ 'https_proxy', and 'all_proxy' environment variables (see `curl(1)`). In
+ addition to the syntax understood by curl, it is possible to specify a
+ proxy string with a user name but no password, in which case git will
+ attempt to acquire one in the same way it does for other credentials. See
+ linkgit:gitcredentials[7] for more information. The syntax thus is
+ '[protocol://][user[:password]@]proxyhost[:port]'. This can be overridden
+ on a per-remote basis; see remote.<name>.proxy
+
+http.proxyAuthMethod::
+ Set the method with which to authenticate against the HTTP proxy. This
+ only takes effect if the configured proxy string contains a user name part
+ (i.e. is of the form 'user@host' or 'user@host:port'). This can be
+ overridden on a per-remote basis; see `remote.<name>.proxyAuthMethod`.
+ Both can be overridden by the 'GIT_HTTP_PROXY_AUTHMETHOD' environment
+ variable. Possible values are:
++
+--
+* `anyauth` - Automatically pick a suitable authentication method. It is
+ assumed that the proxy answers an unauthenticated request with a 407
+ status code and one or more Proxy-authenticate headers with supported
+ authentication methods. This is the default.
+* `basic` - HTTP Basic authentication
+* `digest` - HTTP Digest authentication; this prevents the password from being
+ transmitted to the proxy in clear text
+* `negotiate` - GSS-Negotiate authentication (compare the --negotiate option
+ of `curl(1)`)
+* `ntlm` - NTLM authentication (compare the --ntlm option of `curl(1)`)
+--
http.cookieFile::
File containing previously stored cookie lines which should be used
so that locally committed merge commits will not be flattened
by running 'git pull'.
+
+When the value is `interactive`, the rebase is run in interactive mode.
++
*NOTE*: this is a possibly dangerous operation; do *not* use
it unless you understand the implications (see linkgit:git-rebase[1]
for details).
the proxy to use for that remote. Set to the empty string to
disable proxying for that remote.
+remote.<name>.proxyAuthMethod::
+ For remotes that require curl (http, https and ftp), the method to use for
+ authenticating against the proxy in use (probably set in
+ `remote.<name>.proxy`). See `http.proxyAuthMethod`.
+
remote.<name>.fetch::
The default set of "refspec" for linkgit:git-fetch[1]. See
linkgit:git-fetch[1].
reference to a commit that isn't already in the local submodule
clone.
+-j::
+--jobs=<n>::
+ Number of parallel children to be used for fetching submodules.
+ Each will fetch from different submodules, such that fetching many
+ submodules will be faster. By default submodules will be fetched
+ one at a time.
+
--no-recurse-submodules::
Disable recursive fetching of submodules (this has the same effect as
using the '--recurse-submodules=no' option).
[verse]
'git blame' [-c] [-b] [-l] [--root] [-t] [-f] [-n] [-s] [-e] [-p] [-w] [--incremental]
[-L <range>] [-S <revs-file>] [-M] [-C] [-C] [-C] [--since=<date>]
- [--abbrev=<n>] [<rev> | --contents <file> | --reverse <rev>] [--] <file>
+ [--progress] [--abbrev=<n>] [<rev> | --contents <file> | --reverse <rev>]
+ [--] <file>
DESCRIPTION
-----------
output, unless the `--stdout` option is specified.
If `-o` is specified, output files are created in <dir>. Otherwise
-they are created in the current working directory.
+they are created in the current working directory. The default path
+can be set with the 'format.outputDirectory' configuration option.
+The `-o` option takes precedence over `format.outputDirectory`.
+To store patches in the current working directory even when
+`format.outputDirectory` points elsewhere, use `-o .`.
By default, the subject of a single patch is "[PATCH] " followed by
the concatenation of lines from the commit message up to the first blank
[--break] [--heading] [-p | --show-function]
[-A <post-context>] [-B <pre-context>] [-C <context>]
[-W | --function-context]
+ [--threads <num>]
[-f <file>] [-e] <pattern>
[--and|--or|--not|(|)|-e <pattern>...]
[ [--[no-]exclude-standard] [--cached | --no-index | --untracked] | <tree>...]
option is ignored when the 'grep.patternType' option is set to a value
other than 'default'.
+grep.threads::
+ Number of grep worker threads to use. If unset (or set to 0),
+ 8 threads are used by default (for now).
+
grep.fullName::
If set to true, enable '--full-name' option by default.
+grep.fallbackToNoIndex::
+ If set to true, fall back to git grep --no-index if git grep
+ is executed outside of a git repository. Defaults to false.
+
OPTIONS
-------
effectively showing the whole function in which the match was
found.
+--threads <num>::
+ Number of grep worker threads to use.
+ See `grep.threads` in 'CONFIGURATION' for more information.
+
-f <file>::
Read patterns from <file>, one per line.
SYNOPSIS
--------
[verse]
-'git interpret-trailers' [--trim-empty] [(--trailer <token>[(=|:)<value>])...] [<file>...]
+'git interpret-trailers' [--in-place] [--trim-empty] [(--trailer <token>[(=|:)<value>])...] [<file>...]
DESCRIPTION
-----------
OPTIONS
-------
+--in-place::
+ Edit the files in place.
+
--trim-empty::
If the <value> part of any trailer contains only whitespace,
the whole trailer will be removed from the resulting message.
Signed-off-by: Bob <bob@example.com>
------------
+* Use the '--in-place' option to edit a message file in place:
++
+------------
+$ cat msg.txt
+subject
+
+message
+
+Signed-off-by: Bob <bob@example.com>
+$ git interpret-trailers --trailer 'Acked-by: Alice <alice@example.com>' --in-place msg.txt
+$ cat msg.txt
+subject
+
+message
+
+Signed-off-by: Bob <bob@example.com>
+Acked-by: Alice <alice@example.com>
+------------
+
* Extract the last commit as a patch, and add a 'Cc' and a
'Reviewed-by' trailer to it:
+
'git ls-files' [-z] [-t] [-v]
(--[cached|deleted|others|ignored|stage|unmerged|killed|modified])*
(-[c|d|o|i|s|u|k|m])*
+ [--eol]
[-x <pattern>|--exclude=<pattern>]
[-X <file>|--exclude-from=<file>]
[--exclude-per-directory=<file>]
possible for manual inspection; the exact format may change at
any time.
+--eol::
+ Show <eolinfo> and <eolattr> of files.
+ <eolinfo> is the file content identification used by Git when
+ the "text" attribute is "auto" (or not set and core.autocrlf is not false).
+ <eolinfo> is either "-text", "none", "lf", "crlf", "mixed" or "".
++
+"" means the file is not a regular file, it is not in the index or
+not accessable in the working tree.
++
+<eolattr> is the attribute that is used when checking out or committing,
+it is either "", "-text", "text", "text=auto", "text eol=lf", "text eol=crlf".
+Note: Currently Git does not support "text=auto eol=lf" or "text=auto eol=crlf",
+that may change in the future.
++
+Both the <eolinfo> in the index ("i/<eolinfo>")
+and in the working tree ("w/<eolinfo>") are shown for regular files,
+followed by the ("attr/<eolattr>").
+
\--::
Do not interpret any more arguments as options.
[<tag> ]<mode> <object> <stage> <file>
+'git ls-files --eol' will show
+ i/<eolinfo><SPACES>w/<eolinfo><SPACES>attr/<eolattr><SPACE*><TAB><file>
+
'git ls-files --unmerged' and 'git ls-files --stage' can be used to examine
detailed information on unmerged paths.
SYNOPSIS
--------
[verse]
-'git ls-remote' [--heads] [--tags] [--upload-pack=<exec>]
- [--exit-code] <repository> [<refs>...]
+'git ls-remote' [--heads] [--tags] [--refs] [--upload-pack=<exec>]
+ [-q | --quiet] [--exit-code] [--get-url]
+ [--symref] [<repository> [<refs>...]]
DESCRIPTION
-----------
both, references stored in refs/heads and refs/tags are
displayed.
+--refs::
+ Do not show peeled tags or pseudorefs like HEAD in the output.
+
+-q::
+--quiet::
+ Do not print remote URL to stderr.
+
--upload-pack=<exec>::
Specify the full path of 'git-upload-pack' on the remote
host. This allows listing references from repositories accessed via
"url.<base>.insteadOf" config setting (See linkgit:git-config[1]) and
exit without talking to the remote.
+--symref::
+ In addition to the object pointed by it, show the underlying
+ ref pointed by it when showing a symbolic ref. Currently,
+ upload-pack only shows the symref HEAD, so it will be the only
+ one shown by ls-remote.
+
<repository>::
The "remote" repository to query. This parameter can be
either a URL or the name of a remote (see the GIT URLS and
include::merge-options.txt[]
-r::
---rebase[=false|true|preserve]::
+--rebase[=false|true|preserve|interactive]::
When true, rebase the current branch on top of the upstream
branch after fetching. If there is a remote-tracking branch
corresponding to the upstream branch and the upstream branch
+
When false, merge the current branch into the upstream branch.
+
+When `interactive`, enable the interactive mode of rebase.
++
See `pull.rebase`, `branch.<name>.rebase` and `branch.autoSetupRebase` in
linkgit:git-config[1] if you want to make `git pull` always use
`--rebase` instead of merging.
--------
[verse]
'git push' [--all | --mirror | --tags] [--follow-tags] [--atomic] [-n | --dry-run] [--receive-pack=<git-receive-pack>]
- [--repo=<repository>] [-f | --force] [--prune] [-v | --verbose]
+ [--repo=<repository>] [-f | --force] [-d | --delete] [--prune] [-v | --verbose]
[-u | --set-upstream]
[--[no-]signed|--sign=(true|false|if-asked)]
[--force-with-lease[=<refname>[:<expect>]]]
commit may be copied to the output.
ifndef::git-rev-list[]
---notes[=<ref>]::
+--notes[=<treeish>]::
Show the notes (see linkgit:git-notes[1]) that annotate the
commit, when showing the commit log message. This is the default
for `git log`, `git show` and `git whatchanged` commands when
'core.notesRef' and 'notes.displayRef' variables (or corresponding
environment overrides). See linkgit:git-config[1] for more details.
+
-With an optional '<ref>' argument, show this notes ref instead of the
-default notes ref(s). The ref specifies the full refname when it begins
+With an optional '<treeish>' argument, use the treeish to find the notes
+to display. The treeish can specify the full refname when it begins
with `refs/notes/`; when it begins with `notes/`, `refs/` and otherwise
`refs/notes/` is prefixed to form a full name of the ref.
+
"--notes --notes=foo --no-notes --notes=bar" will only show notes
from "refs/notes/bar".
---show-notes[=<ref>]::
+--show-notes[=<treeish>]::
--[no-]standard-notes::
These options are deprecated. Use the above --notes/--no-notes
options instead.
'@'::
'@' alone is a shortcut for 'HEAD'.
-'<refname>@\{<date>\}', e.g. 'master@\{yesterday\}', 'HEAD@\{5 minutes ago\}'::
+'<refname>@{<date>}', e.g. 'master@\{yesterday\}', 'HEAD@{5 minutes ago}'::
A ref followed by the suffix '@' with a date specification
enclosed in a brace
- pair (e.g. '\{yesterday\}', '\{1 month 2 weeks 3 days 1 hour 1
- second ago\}' or '\{1979-02-26 18:30:00\}') specifies the value
+ pair (e.g. '\{yesterday\}', '{1 month 2 weeks 3 days 1 hour 1
+ second ago}' or '{1979-02-26 18:30:00}') specifies the value
of the ref at a prior point in time. This suffix may only be
used immediately following a ref name and the ref must have an
existing log ('$GIT_DIR/logs/<ref>'). Note that this looks up the state
'master' branch last week. If you want to look at commits made during
certain times, see '--since' and '--until'.
-'<refname>@\{<n>\}', e.g. 'master@\{1\}'::
+'<refname>@{<n>}', e.g. 'master@\{1\}'::
A ref followed by the suffix '@' with an ordinal specification
enclosed in a brace pair (e.g. '\{1\}', '\{15\}') specifies
the n-th prior value of that ref. For example 'master@\{1\}'
immediately following a ref name and the ref must have an existing
log ('$GIT_DIR/logs/<refname>').
-'@\{<n>\}', e.g. '@\{1\}'::
+'@{<n>}', e.g. '@\{1\}'::
You can use the '@' construct with an empty ref part to get at a
reflog entry of the current branch. For example, if you are on
branch 'blabla' then '@\{1\}' means the same as 'blabla@\{1\}'.
-'@\{-<n>\}', e.g. '@\{-1\}'::
- The construct '@\{-<n>\}' means the <n>th branch/commit checked out
+'@{-<n>}', e.g. '@{-1}'::
+ The construct '@{-<n>}' means the <n>th branch/commit checked out
before the current one.
'<branchname>@\{upstream\}', e.g. 'master@\{upstream\}', '@\{u\}'::
'<rev>{caret}1{caret}1{caret}1'. See below for an illustration of
the usage of this form.
-'<rev>{caret}\{<type>\}', e.g. 'v0.99.8{caret}\{commit\}'::
+'<rev>{caret}{<type>}', e.g. 'v0.99.8{caret}\{commit\}'::
A suffix '{caret}' followed by an object type name enclosed in
brace pair means dereference the object at '<rev>' recursively until
an object of type '<type>' is found or the object cannot be
'rev{caret}\{tag\}' can be used to ensure that 'rev' identifies an
existing tag object.
-'<rev>{caret}\{\}', e.g. 'v0.99.8{caret}\{\}'::
+'<rev>{caret}{}', e.g. 'v0.99.8{caret}{}'::
A suffix '{caret}' followed by an empty brace pair
means the object could be a tag,
and dereference the tag recursively until a non-tag object is
found.
-'<rev>{caret}\{/<text>\}', e.g. 'HEAD^{/fix nasty bug}'::
+'<rev>{caret}{/<text>}', e.g. 'HEAD^{/fix nasty bug}'::
A suffix '{caret}' to a revision parameter, followed by a brace
pair that contains a text led by a slash,
is the same as the ':/fix nasty bug' syntax below except that
The proxy to use for curl (http, https, ftp, etc.) URLs.
+`http_proxy_authmethod`::
+
+ The method used for authenticating against `http_proxy`.
+
struct remotes can be found by name with remote_get(), and iterated
through with for_each_remote(). remote_get(NULL) will return the
default remote, given the current branch and configuration.
#!/bin/sh
GVF=GIT-VERSION-FILE
-DEF_VER=v2.7.1
+DEF_VER=v2.7.0.GIT
LF='
'
export DEFAULT_EDITOR DEFAULT_PAGER
+.PHONY: doc man html info pdf
doc:
$(MAKE) -C Documentation all
$(LOCALIZED_PERL)
mv $@+ $@
+.PHONY: pot
pot: po/git.pot
POFILES := $(wildcard po/*.po)
install_bindir_programs := $(patsubst %,%$X,$(BINDIR_PROGRAMS_NEED_X)) $(BINDIR_PROGRAMS_NO_X)
+.PHONY: profile-install profile-fast-install
profile-install: profile
$(MAKE) install
done && \
./check_bindir "z$$bindir" "z$$execdir" "$$bindir/git-add$X"
+.PHONY: install-gitweb install-doc install-man install-html install-info install-pdf
+.PHONY: quick-install-doc quick-install-man quick-install-html
install-gitweb:
$(MAKE) -C gitweb install
htmldocs = git-htmldocs-$(GIT_VERSION)
manpages = git-manpages-$(GIT_VERSION)
+.PHONY: dist-doc distclean
dist-doc:
$(RM) -r .doc-tmp-dir
mkdir .doc-tmp-dir
ALL_COMMANDS += gitk
ALL_COMMANDS += gitweb
ALL_COMMANDS += git-gui git-citool
+
+.PHONY: check-docs
check-docs::
@(for v in $(ALL_COMMANDS); \
do \
### Test suite coverage testing
#
.PHONY: coverage coverage-clean coverage-compile coverage-test coverage-report
+.PHONY: coverage-untested-functions cover_db cover_db_html
.PHONY: coverage-clean-results
coverage:
-Documentation/RelNotes/2.7.1.txt
\ No newline at end of file
+Documentation/RelNotes/2.8.0.txt
\ No newline at end of file
if (!fp)
die_errno("Could not open file '%s'", filename);
- while (strbuf_getline(&str, fp, '\n') != EOF) {
+ while (strbuf_getline_lf(&str, fp) != EOF) {
strbuf_trim(&str);
if (sq_dequote_to_argv_array(str.buf, array))
die("Badly quoted content in file '%s': %s",
if (!fp)
return 0;
- if (strbuf_getline(&str, fp, '\n') != EOF)
+ if (strbuf_getline_lf(&str, fp) != EOF)
res = !strcmp(str.buf, oid_to_hex(oid));
strbuf_release(&str);
strerror(errno));
}
} else {
- strbuf_getline(&str, fp, '\n');
+ strbuf_getline_lf(&str, fp);
*read_bad = strbuf_detach(&str, NULL);
- strbuf_getline(&str, fp, '\n');
+ strbuf_getline_lf(&str, fp);
*read_good = strbuf_detach(&str, NULL);
}
strbuf_release(&str);
return !st.st_size;
}
-/**
- * Like strbuf_getline(), but treats both '\n' and "\r\n" as line terminators.
- */
-static int strbuf_getline_crlf(struct strbuf *sb, FILE *fp)
-{
- if (strbuf_getwholeline(sb, fp, '\n'))
- return EOF;
- if (sb->buf[sb->len - 1] == '\n') {
- strbuf_setlen(sb, sb->len - 1);
- if (sb->len > 0 && sb->buf[sb->len - 1] == '\r')
- strbuf_setlen(sb, sb->len - 1);
- }
- return 0;
-}
-
/**
* Returns the length of the first line of msg.
*/
struct strbuf sb = STRBUF_INIT;
const char *str;
- if (strbuf_getline(&sb, fp, '\n'))
+ if (strbuf_getline_lf(&sb, fp))
goto fail;
if (!skip_prefix(sb.buf, key, &str))
fp = xfopen(am_path(state, "rewritten"), "r");
- while (!strbuf_getline(&sb, fp, '\n')) {
+ while (!strbuf_getline_lf(&sb, fp)) {
unsigned char from_obj[GIT_SHA1_RAWSZ], to_obj[GIT_SHA1_RAWSZ];
if (sb.len != GIT_SHA1_HEXSZ * 2 + 1) {
if (regcomp(®ex, header_regex, REG_NOSUB | REG_EXTENDED))
die("invalid pattern: %s", header_regex);
- while (!strbuf_getline_crlf(&sb, fp)) {
+ while (!strbuf_getline(&sb, fp)) {
if (!sb.len)
break; /* End of header */
fp = xfopen(*paths, "r");
- while (!strbuf_getline_crlf(&l1, fp)) {
+ while (!strbuf_getline(&l1, fp)) {
if (l1.len)
break;
}
}
strbuf_reset(&l2);
- strbuf_getline_crlf(&l2, fp);
+ strbuf_getline(&l2, fp);
strbuf_reset(&l3);
- strbuf_getline_crlf(&l3, fp);
+ strbuf_getline(&l3, fp);
/*
* If the second line is empty and the third is a From, Author or Date
struct strbuf sb = STRBUF_INIT;
int subject_printed = 0;
- while (!strbuf_getline(&sb, in, '\n')) {
+ while (!strbuf_getline_lf(&sb, in)) {
const char *str;
if (str_isspace(sb.buf))
return error(_("could not open '%s' for reading: %s"), *paths,
strerror(errno));
- while (!strbuf_getline(&sb, fp, '\n')) {
+ while (!strbuf_getline_lf(&sb, fp)) {
if (*sb.buf == '#')
continue; /* skip comment lines */
{
struct strbuf sb = STRBUF_INIT;
- while (!strbuf_getline(&sb, in, '\n')) {
+ while (!strbuf_getline_lf(&sb, in)) {
const char *str;
if (skip_prefix(sb.buf, "# User ", &str))
/* Extract message and author information */
fp = xfopen(am_path(state, "info"), "r");
- while (!strbuf_getline(&sb, fp, '\n')) {
+ while (!strbuf_getline_lf(&sb, fp)) {
const char *x;
if (skip_prefix(sb.buf, "Subject: ", &x)) {
FILE *fp = xfopen(mail, "r");
const char *x;
- if (strbuf_getline(&sb, fp, '\n'))
+ if (strbuf_getline_lf(&sb, fp))
return -1;
if (!skip_prefix(sb.buf, "From ", &x))
#include "line-range.h"
#include "line-log.h"
#include "dir.h"
+#include "progress.h"
static char blame_usage[] = N_("git blame [<options>] [<rev-opts>] [<rev>] [--] <file>");
static int xdl_opts;
static int abbrev = -1;
static int no_whole_file_rename;
+static int show_progress;
static struct date_mode blame_date_mode = { DATE_ISO8601 };
static size_t blame_date_width;
char path[FLEX_ARRAY];
};
+struct progress_info {
+ struct progress *progress;
+ int blamed_lines;
+};
+
static int diff_hunks(mmfile_t *file_a, mmfile_t *file_b, long ctxlen,
xdl_emit_hunk_consume_func_t hunk_func, void *cb_data)
{
* The blame_entry is found to be guilty for the range.
* Show it in incremental output.
*/
-static void found_guilty_entry(struct blame_entry *ent)
+static void found_guilty_entry(struct blame_entry *ent,
+ struct progress_info *pi)
{
if (incremental) {
struct origin *suspect = ent->suspect;
write_filename_info(suspect->path);
maybe_flush_or_die(stdout, "stdout");
}
+ pi->blamed_lines += ent->num_lines;
+ display_progress(pi->progress, pi->blamed_lines);
}
/*
{
struct rev_info *revs = sb->revs;
struct commit *commit = prio_queue_get(&sb->commits);
+ struct progress_info pi = { NULL, 0 };
+
+ if (show_progress)
+ pi.progress = start_progress_delay(_("Blaming lines"),
+ sb->num_lines, 50, 1);
while (commit) {
struct blame_entry *ent;
suspect->guilty = 1;
for (;;) {
struct blame_entry *next = ent->next;
- found_guilty_entry(ent);
+ found_guilty_entry(ent, &pi);
if (next) {
ent = next;
continue;
if (DEBUG) /* sanity */
sanity_check_refcnt(sb);
}
+
+ stop_progress(&pi.progress);
}
static const char *format_time(unsigned long time, const char *tz_str,
ce->ce_mode = create_ce_mode(mode);
add_cache_entry(ce, ADD_CACHE_OK_TO_ADD|ADD_CACHE_OK_TO_REPLACE);
- /*
- * We are not going to write this out, so this does not matter
- * right now, but someday we might optimize diff-index --cached
- * with cache-tree information.
- */
cache_tree_invalidate_path(&the_index, path);
return commit;
OPT_BOOL('b', NULL, &blank_boundary, N_("Show blank SHA-1 for boundary commits (Default: off)")),
OPT_BOOL(0, "root", &show_root, N_("Do not treat root commits as boundaries (Default: off)")),
OPT_BOOL(0, "show-stats", &show_stats, N_("Show work cost statistics")),
+ OPT_BOOL(0, "progress", &show_progress, N_("Force progress reporting")),
OPT_BIT(0, "score-debug", &output_option, N_("Show output score for blame entries"), OUTPUT_SHOW_SCORE),
OPT_BIT('f', "show-name", &output_option, N_("Show original filename (Default: auto)"), OUTPUT_SHOW_NAME),
OPT_BIT('n', "show-number", &output_option, N_("Show original linenumber (Default: off)"), OUTPUT_SHOW_NUMBER),
save_commit_buffer = 0;
dashdash_pos = 0;
+ show_progress = -1;
parse_options_start(&ctx, argc, argv, prefix, options,
PARSE_OPT_KEEP_DASHDASH | PARSE_OPT_KEEP_ARGV0);
DIFF_OPT_CLR(&revs.diffopt, FOLLOW_RENAMES);
argc = parse_options_end(&ctx);
+ if (incremental || (output_option & OUTPUT_PORCELAIN)) {
+ if (show_progress > 0)
+ die("--progress can't be used with --incremental or porcelain formats");
+ show_progress = 0;
+ } else if (show_progress < 0)
+ show_progress = isatty(2);
+
if (0 < abbrev)
/* one more abbrev length is needed for the boundary commit */
abbrev++;
read_mailmap(&mailmap, NULL);
+ assign_blame(&sb, opt);
+
if (!incremental)
setup_pager();
- assign_blame(&sb, opt);
-
free(final_commit_name);
if (incremental)
save_warning = warn_on_object_refname_ambiguity;
warn_on_object_refname_ambiguity = 0;
- while (strbuf_getline(&buf, stdin, '\n') != EOF) {
+ while (strbuf_getline(&buf, stdin) != EOF) {
if (data.split_on_whitespace) {
/*
* Split at first whitespace, tying off the beginning
struct git_attr_check *check)
{
struct strbuf buf, nbuf;
- int line_termination = nul_term_line ? 0 : '\n';
+ strbuf_getline_fn getline_fn;
+ getline_fn = nul_term_line ? strbuf_getline_nul : strbuf_getline_lf;
strbuf_init(&buf, 0);
strbuf_init(&nbuf, 0);
- while (strbuf_getline(&buf, stdin, line_termination) != EOF) {
- if (line_termination && buf.buf[0] == '"') {
+ while (getline_fn(&buf, stdin) != EOF) {
+ if (!nul_term_line && buf.buf[0] == '"') {
strbuf_reset(&nbuf);
if (unquote_c_style(&nbuf, buf.buf, NULL))
die("line is badly quoted");
{
struct strbuf buf, nbuf;
char *pathspec[2] = { NULL, NULL };
- int line_termination = nul_term_line ? 0 : '\n';
+ strbuf_getline_fn getline_fn;
int num_ignored = 0;
+ getline_fn = nul_term_line ? strbuf_getline_nul : strbuf_getline_lf;
strbuf_init(&buf, 0);
strbuf_init(&nbuf, 0);
- while (strbuf_getline(&buf, stdin, line_termination) != EOF) {
- if (line_termination && buf.buf[0] == '"') {
+ while (getline_fn(&buf, stdin) != EOF) {
+ if (!nul_term_line && buf.buf[0] == '"') {
strbuf_reset(&nbuf);
if (unquote_c_style(&nbuf, buf.buf, NULL))
die("line is badly quoted");
if (use_stdin) {
struct strbuf buf = STRBUF_INIT;
- while (strbuf_getline(&buf, stdin, '\n') != EOF) {
+ while (strbuf_getline_lf(&buf, stdin) != EOF) {
check_mailmap(&mailmap, buf.buf);
maybe_flush_or_die(stdout, "stdout");
}
#include "parse-options.h"
#define CHECKOUT_ALL 4
-static int line_termination = '\n';
+static int nul_term_line;
static int checkout_stage; /* default to checkout stage0 */
static int to_tempfile;
static char topath[4][TEMPORARY_FILENAME_LENGTH + 1];
fputs(topath[checkout_stage], stdout);
putchar('\t');
- write_name_quoted_relative(name, prefix, stdout, line_termination);
+ write_name_quoted_relative(name, prefix, stdout,
+ nul_term_line ? '\0' : '\n');
for (i = 0; i < 4; i++) {
topath[i][0] = 0;
static int option_parse_z(const struct option *opt,
const char *arg, int unset)
{
- if (unset)
- line_termination = '\n';
- else
- line_termination = 0;
+ nul_term_line = !unset;
return 0;
}
if (read_from_stdin) {
struct strbuf buf = STRBUF_INIT, nbuf = STRBUF_INIT;
+ strbuf_getline_fn getline_fn;
if (all)
die("git checkout-index: don't mix '--all' and '--stdin'");
- while (strbuf_getline(&buf, stdin, line_termination) != EOF) {
+ getline_fn = nul_term_line ? strbuf_getline_nul : strbuf_getline_lf;
+ while (getline_fn(&buf, stdin) != EOF) {
char *p;
- if (line_termination && buf.buf[0] == '"') {
+ if (!nul_term_line && buf.buf[0] == '"') {
strbuf_reset(&nbuf);
if (unquote_c_style(&nbuf, buf.buf, NULL))
die("line is badly quoted");
describe_detached_head(_("HEAD is now at"), new->commit);
}
} else if (new->path) { /* Switch branches. */
- create_symref("HEAD", new->path, msg.buf);
+ if (create_symref("HEAD", new->path, msg.buf) < 0)
+ die("unable to update HEAD");
if (!opts->quiet) {
if (old->path && !strcmp(new->path, old->path)) {
if (opts->new_branch_force)
clean_get_color(CLEAN_COLOR_RESET));
}
- if (strbuf_getline(&choice, stdin, '\n') != EOF) {
+ if (strbuf_getline_lf(&choice, stdin) != EOF) {
strbuf_trim(&choice);
} else {
eof = 1;
clean_print_color(CLEAN_COLOR_PROMPT);
printf(_("Input ignore patterns>> "));
clean_print_color(CLEAN_COLOR_RESET);
- if (strbuf_getline(&confirm, stdin, '\n') != EOF)
+ if (strbuf_getline_lf(&confirm, stdin) != EOF)
strbuf_trim(&confirm);
else
putchar('\n');
qname = quote_path_relative(item->string, NULL, &buf);
/* TRANSLATORS: Make sure to keep [y/N] as is */
printf(_("Remove %s [y/N]? "), qname);
- if (strbuf_getline(&confirm, stdin, '\n') != EOF) {
+ if (strbuf_getline_lf(&confirm, stdin) != EOF) {
strbuf_trim(&confirm);
} else {
putchar('\n');
FILE *in = fopen(src->buf, "r");
struct strbuf line = STRBUF_INIT;
- while (strbuf_getline(&line, in, '\n') != EOF) {
+ while (strbuf_getline(&line, in) != EOF) {
char *abs_path;
if (!line.len || line.buf[0] == '#')
continue;
struct strbuf head_ref = STRBUF_INIT;
strbuf_addstr(&head_ref, branch_top);
strbuf_addstr(&head_ref, "HEAD");
- create_symref(head_ref.buf,
- remote_head_points_at->peer_ref->name,
- msg);
+ if (create_symref(head_ref.buf,
+ remote_head_points_at->peer_ref->name,
+ msg) < 0)
+ die("unable to update %s", head_ref.buf);
+ strbuf_release(&head_ref);
}
}
const char *head;
if (our && skip_prefix(our->name, "refs/heads/", &head)) {
/* Local default branch link */
- create_symref("HEAD", our->name, NULL);
+ if (create_symref("HEAD", our->name, NULL) < 0)
+ die("unable to update HEAD");
if (!option_bare) {
update_ref(msg, "HEAD", our->old_oid.hash, NULL, 0,
UPDATE_REFS_DIE_ON_ERR);
die(_("--command must be the first argument"));
}
finalize_colopts(&colopts, -1);
- while (!strbuf_getline(&sb, stdin, '\n'))
+ while (!strbuf_getline(&sb, stdin))
string_list_append(&list, sb.buf);
print_columns(&list, colopts, &copts);
if (fp == NULL)
die_errno(_("could not open '%s' for reading"),
git_path_merge_head());
- while (strbuf_getline(&m, fp, '\n') != EOF) {
+ while (strbuf_getline_lf(&m, fp) != EOF) {
struct commit *parent;
parent = get_merge_parent(m.buf);
else {
/* read from stdin one ref per line, until EOF */
struct strbuf line = STRBUF_INIT;
- while (strbuf_getline(&line, stdin, '\n') != EOF)
+ while (strbuf_getline_lf(&line, stdin) != EOF)
add_sought_entry(&sought, &nr_sought, &alloc_sought, line.buf);
strbuf_release(&line);
}
static int all, append, dry_run, force, keep, multiple, update_head_ok, verbosity;
static int progress = -1, recurse_submodules = RECURSE_SUBMODULES_DEFAULT;
static int tags = TAGS_DEFAULT, unshallow, update_shallow;
+static int max_children = 1;
static const char *depth;
static const char *upload_pack;
static struct strbuf default_rla = STRBUF_INIT;
N_("fetch all tags and associated objects"), TAGS_SET),
OPT_SET_INT('n', NULL, &tags,
N_("do not fetch all tags (--no-tags)"), TAGS_UNSET),
+ OPT_INTEGER('j', "jobs", &max_children,
+ N_("number of submodules fetched in parallel")),
OPT_BOOL('p', "prune", &prune,
N_("prune remote-tracking branches no longer on remote")),
{ OPTION_CALLBACK, 0, "recurse-submodules", NULL, N_("on-demand"),
result = fetch_populated_submodules(&options,
submodule_prefix,
recurse_submodules,
- verbosity < 0);
+ verbosity < 0,
+ max_children);
argv_array_clear(&options);
}
NULL
};
-static int use_threads = 1;
+#define GREP_NUM_THREADS_DEFAULT 8
+static int num_threads;
#ifndef NO_PTHREADS
-#define THREADS 8
-static pthread_t threads[THREADS];
+static pthread_t *threads;
/* We use one producer thread and THREADS consumer
* threads. The producer adds struct work_items to 'todo' and the
static inline void grep_lock(void)
{
- if (use_threads)
+ if (num_threads)
pthread_mutex_lock(&grep_mutex);
}
static inline void grep_unlock(void)
{
- if (use_threads)
+ if (num_threads)
pthread_mutex_unlock(&grep_mutex);
}
strbuf_init(&todo[i].out, 0);
}
- for (i = 0; i < ARRAY_SIZE(threads); i++) {
+ threads = xcalloc(num_threads, sizeof(*threads));
+ for (i = 0; i < num_threads; i++) {
int err;
struct grep_opt *o = grep_opt_dup(opt);
o->output = strbuf_out;
pthread_cond_broadcast(&cond_add);
grep_unlock();
- for (i = 0; i < ARRAY_SIZE(threads); i++) {
+ for (i = 0; i < num_threads; i++) {
void *h;
pthread_join(threads[i], &h);
hit |= (int) (intptr_t) h;
}
+ free(threads);
+
pthread_mutex_destroy(&grep_mutex);
pthread_mutex_destroy(&grep_read_mutex);
pthread_mutex_destroy(&grep_attr_mutex);
int st = grep_config(var, value, cb);
if (git_color_default_config(var, value, cb) < 0)
st = -1;
+
+ if (!strcmp(var, "grep.threads")) {
+ num_threads = git_config_int(var, value);
+ if (num_threads < 0)
+ die(_("invalid number of threads specified (%d) for %s"),
+ num_threads, var);
+ }
+
return st;
}
}
#ifndef NO_PTHREADS
- if (use_threads) {
+ if (num_threads) {
add_work(opt, GREP_SOURCE_SHA1, pathbuf.buf, path, sha1);
strbuf_release(&pathbuf);
return 0;
strbuf_addstr(&buf, filename);
#ifndef NO_PTHREADS
- if (use_threads) {
+ if (num_threads) {
add_work(opt, GREP_SOURCE_FILE, buf.buf, filename, filename);
strbuf_release(&buf);
return 0;
for (nr = 0; nr < active_nr; nr++) {
const struct cache_entry *ce = active_cache[nr];
- if (!S_ISREG(ce->ce_mode))
+ if (!S_ISREG(ce->ce_mode) || ce_intent_to_add(ce))
continue;
if (!ce_path_match(ce, pathspec, NULL))
continue;
patterns = from_stdin ? stdin : fopen(arg, "r");
if (!patterns)
die_errno(_("cannot open '%s'"), arg);
- while (strbuf_getline(&sb, patterns, '\n') == 0) {
+ while (strbuf_getline(&sb, patterns) == 0) {
/* ignore empty line like grep does */
if (sb.len == 0)
continue;
N_("show <n> context lines before matches")),
OPT_INTEGER('A', "after-context", &opt.post_context,
N_("show <n> context lines after matches")),
+ OPT_INTEGER(0, "threads", &num_threads,
+ N_("use <n> worker threads")),
OPT_NUMBER_CALLBACK(&opt, N_("shortcut for -C NUM"),
context_callback),
OPT_BOOL('p', "show-function", &opt.funcname,
PARSE_OPT_STOP_AT_NON_OPTION);
grep_commit_pattern_type(pattern_type_arg, &opt);
- if (use_index && !startup_info->have_repository)
- /* die the same way as if we did it at the beginning */
- setup_git_directory();
+ if (use_index && !startup_info->have_repository) {
+ int fallback = 0;
+ git_config_get_bool("grep.fallbacktonoindex", &fallback);
+ if (fallback)
+ use_index = 0;
+ else
+ /* die the same way as if we did it at the beginning */
+ setup_git_directory();
+ }
/*
* skip a -- separator; we know it cannot be
opt.output_priv = &path_list;
opt.output = append_path;
string_list_append(&path_list, show_in_pager);
- use_threads = 0;
}
if (!opt.pattern_list)
}
#ifndef NO_PTHREADS
- if (list.nr || cached || online_cpus() == 1)
- use_threads = 0;
+ if (list.nr || cached || show_in_pager)
+ num_threads = 0;
+ else if (num_threads == 0)
+ num_threads = GREP_NUM_THREADS_DEFAULT;
+ else if (num_threads < 0)
+ die(_("invalid number of threads specified (%d)"), num_threads);
#else
- use_threads = 0;
+ num_threads = 0;
#endif
#ifndef NO_PTHREADS
- if (use_threads) {
+ if (num_threads) {
if (!(opt.name_only || opt.unmatch_name_only || opt.count)
&& (opt.pre_context || opt.post_context ||
opt.file_break || opt.funcbody))
hit = grep_objects(&opt, &pathspec, &list);
}
- if (use_threads)
+ if (num_threads)
hit |= wait_all();
if (hit && show_in_pager)
run_pager(&opt, prefix);
{
struct strbuf buf = STRBUF_INIT, nbuf = STRBUF_INIT;
- while (strbuf_getline(&buf, stdin, '\n') != EOF) {
+ while (strbuf_getline(&buf, stdin) != EOF) {
if (buf.buf[0] == '"') {
strbuf_reset(&nbuf);
if (unquote_c_style(&nbuf, buf.buf, NULL))
#include "trailer.h"
static const char * const git_interpret_trailers_usage[] = {
- N_("git interpret-trailers [--trim-empty] [(--trailer <token>[(=|:)<value>])...] [<file>...]"),
+ N_("git interpret-trailers [--in-place] [--trim-empty] [(--trailer <token>[(=|:)<value>])...] [<file>...]"),
NULL
};
int cmd_interpret_trailers(int argc, const char **argv, const char *prefix)
{
+ int in_place = 0;
int trim_empty = 0;
struct string_list trailers = STRING_LIST_INIT_DUP;
struct option options[] = {
+ OPT_BOOL(0, "in-place", &in_place, N_("edit files in place")),
OPT_BOOL(0, "trim-empty", &trim_empty, N_("trim empty trailers")),
OPT_STRING_LIST(0, "trailer", &trailers, N_("trailer"),
N_("trailer(s) to add")),
if (argc) {
int i;
for (i = 0; i < argc; i++)
- process_trailers(argv[i], trim_empty, &trailers);
- } else
- process_trailers(NULL, trim_empty, &trailers);
+ process_trailers(argv[i], in_place, trim_empty, &trailers);
+ } else {
+ if (in_place)
+ die(_("no input file given for in-place editing"));
+ process_trailers(NULL, in_place, trim_empty, &trailers);
+ }
string_list_clear(&trailers, 0);
static const char *signature = git_version_string;
static const char *signature_file;
static int config_cover_letter;
+static const char *config_output_directory;
enum {
COVER_UNSET,
config_cover_letter = git_config_bool(var, value) ? COVER_ON : COVER_OFF;
return 0;
}
+ if (!strcmp(var, "format.outputdirectory"))
+ return git_config_string(&config_output_directory, var, value);
return git_log_config(var, value, cb);
}
if (rev.show_notes)
init_display_notes(&rev.notes_opt);
+ if (!output_directory && !use_stdout)
+ output_directory = config_output_directory;
+
if (!use_stdout)
output_directory = set_outdir(prefix, output_directory);
else
static int show_valid_bit;
static int line_terminator = '\n';
static int debug_mode;
+static int show_eol;
static const char *prefix;
static int max_prefix_len;
static const char *tag_skip_worktree = "";
static const char *tag_resolve_undo = "";
+static void write_eolinfo(const struct cache_entry *ce, const char *path)
+{
+ if (!show_eol)
+ return;
+ else {
+ struct stat st;
+ const char *i_txt = "";
+ const char *w_txt = "";
+ const char *a_txt = get_convert_attr_ascii(path);
+ if (ce && S_ISREG(ce->ce_mode))
+ i_txt = get_cached_convert_stats_ascii(ce->name);
+ if (!lstat(path, &st) && S_ISREG(st.st_mode))
+ w_txt = get_wt_convert_stats_ascii(path);
+ printf("i/%-5s w/%-5s attr/%-17s\t", i_txt, w_txt, a_txt);
+ }
+}
+
static void write_name(const char *name)
{
/*
return;
fputs(tag, stdout);
+ write_eolinfo(NULL, ent->name);
write_name(ent->name);
}
find_unique_abbrev(ce->sha1,abbrev),
ce_stage(ce));
}
+ write_eolinfo(ce, ce->name);
write_name(ce->name);
if (debug_mode) {
const struct stat_data *sd = &ce->ce_stat_data;
OPT_BIT(0, "directory", &dir.flags,
N_("show 'other' directories' names only"),
DIR_SHOW_OTHER_DIRECTORIES),
+ OPT_BOOL(0, "eol", &show_eol, N_("show line endings of files")),
OPT_NEGBIT(0, "empty-directory", &dir.flags,
N_("don't show empty directories"),
DIR_HIDE_EMPTY_DIRECTORIES),
#include "transport.h"
#include "remote.h"
-static const char ls_remote_usage[] =
-"git ls-remote [--heads] [--tags] [--upload-pack=<exec>]\n"
-" [-q | --quiet] [--exit-code] [--get-url] [<repository> [<refs>...]]";
+static const char * const ls_remote_usage[] = {
+ N_("git ls-remote [--heads] [--tags] [--refs] [--upload-pack=<exec>]\n"
+ " [-q | --quiet] [--exit-code] [--get-url]\n"
+ " [--symref] [<repository> [<refs>...]]"),
+ NULL
+};
/*
* Is there one among the list of patterns that match the tail part
int cmd_ls_remote(int argc, const char **argv, const char *prefix)
{
- int i;
const char *dest = NULL;
unsigned flags = 0;
int get_url = 0;
int quiet = 0;
int status = 0;
+ int show_symref_target = 0;
const char *uploadpack = NULL;
const char **pattern = NULL;
struct transport *transport;
const struct ref *ref;
- if (argc == 2 && !strcmp("-h", argv[1]))
- usage(ls_remote_usage);
+ struct option options[] = {
+ OPT__QUIET(&quiet, N_("do not print remote URL")),
+ OPT_STRING(0, "upload-pack", &uploadpack, N_("exec"),
+ N_("path of git-upload-pack on the remote host")),
+ { OPTION_STRING, 0, "exec", &uploadpack, N_("exec"),
+ N_("path of git-upload-pack on the remote host"),
+ PARSE_OPT_HIDDEN },
+ OPT_BIT('t', "tags", &flags, N_("limit to tags"), REF_TAGS),
+ OPT_BIT('h', "heads", &flags, N_("limit to heads"), REF_HEADS),
+ OPT_BIT(0, "refs", &flags, N_("do not show peeled tags"), REF_NORMAL),
+ OPT_BOOL(0, "get-url", &get_url,
+ N_("take url.<base>.insteadOf into account")),
+ OPT_SET_INT(0, "exit-code", &status,
+ N_("exit with exit code 2 if no matching refs are found"), 2),
+ OPT_BOOL(0, "symref", &show_symref_target,
+ N_("show underlying ref in addition to the object pointed by it")),
+ OPT_END()
+ };
- for (i = 1; i < argc; i++) {
- const char *arg = argv[i];
+ argc = parse_options(argc, argv, prefix, options, ls_remote_usage,
+ PARSE_OPT_STOP_AT_NON_OPTION);
+ dest = argv[0];
- if (*arg == '-') {
- if (starts_with(arg, "--upload-pack=")) {
- uploadpack = arg + 14;
- continue;
- }
- if (starts_with(arg, "--exec=")) {
- uploadpack = arg + 7;
- continue;
- }
- if (!strcmp("--tags", arg) || !strcmp("-t", arg)) {
- flags |= REF_TAGS;
- continue;
- }
- if (!strcmp("--heads", arg) || !strcmp("-h", arg)) {
- flags |= REF_HEADS;
- continue;
- }
- if (!strcmp("--refs", arg)) {
- flags |= REF_NORMAL;
- continue;
- }
- if (!strcmp("--quiet", arg) || !strcmp("-q", arg)) {
- quiet = 1;
- continue;
- }
- if (!strcmp("--get-url", arg)) {
- get_url = 1;
- continue;
- }
- if (!strcmp("--exit-code", arg)) {
- /* return this code if no refs are reported */
- status = 2;
- continue;
- }
- usage(ls_remote_usage);
- }
- dest = arg;
- i++;
- break;
+ if (argc > 1) {
+ int i;
+ pattern = xcalloc(argc, sizeof(const char *));
+ for (i = 1; i < argc; i++)
+ pattern[i - 1] = xstrfmt("*/%s", argv[i]);
}
- if (argv[i]) {
- int j;
- pattern = xcalloc(argc - i + 1, sizeof(const char *));
- for (j = i; j < argc; j++)
- pattern[j - i] = xstrfmt("*/%s", argv[j]);
- }
remote = remote_get(dest);
if (!remote) {
if (dest)
continue;
if (!tail_match(pattern, ref->name))
continue;
- printf("%s %s\n", oid_to_hex(&ref->old_oid), ref->name);
+ if (show_symref_target && ref->symref)
+ printf("ref: %s\t%s\n", ref->symref, ref->name);
+ printf("%s\t%s\n", oid_to_hex(&ref->old_oid), ref->name);
status = 0; /* we found something */
}
return status;
NULL
};
-static void mktree_line(char *buf, size_t len, int line_termination, int allow_missing)
+static void mktree_line(char *buf, size_t len, int nul_term_line, int allow_missing)
{
char *ptr, *ntr;
unsigned mode;
*ntr++ = 0; /* now at the beginning of SHA1 */
path = ntr + 41; /* at the beginning of name */
- if (line_termination && path[0] == '"') {
+ if (!nul_term_line && path[0] == '"') {
struct strbuf p_uq = STRBUF_INIT;
if (unquote_c_style(&p_uq, path, NULL))
die("invalid quoting");
{
struct strbuf sb = STRBUF_INIT;
unsigned char sha1[20];
- int line_termination = '\n';
+ int nul_term_line = 0;
int allow_missing = 0;
int is_batch_mode = 0;
int got_eof = 0;
+ strbuf_getline_fn getline_fn;
const struct option option[] = {
- OPT_SET_INT('z', NULL, &line_termination, N_("input is NUL terminated"), '\0'),
+ OPT_BOOL('z', NULL, &nul_term_line, N_("input is NUL terminated")),
OPT_SET_INT( 0 , "missing", &allow_missing, N_("allow missing objects"), 1),
OPT_SET_INT( 0 , "batch", &is_batch_mode, N_("allow creation of more than one tree"), 1),
OPT_END()
};
ac = parse_options(ac, av, prefix, option, mktree_usage, 0);
+ getline_fn = nul_term_line ? strbuf_getline_nul : strbuf_getline_lf;
while (!got_eof) {
while (1) {
- if (strbuf_getline(&sb, stdin, line_termination) == EOF) {
+ if (getline_fn(&sb, stdin) == EOF) {
got_eof = 1;
break;
}
break;
die("input format error: (blank line only valid in batch mode)");
}
- mktree_line(sb.buf, sb.len, line_termination, allow_missing);
+ mktree_line(sb.buf, sb.len, nul_term_line, allow_missing);
}
if (is_batch_mode && got_eof && used < 1) {
/*
if (!c)
return 0;
} else {
- init_notes(NULL, NULL, NULL, 0);
+ init_notes(NULL, NULL, NULL, NOTES_INIT_WRITABLE);
t = &default_notes_tree;
}
- while (strbuf_getline(&buf, stdin, '\n') != EOF) {
+ while (strbuf_getline_lf(&buf, stdin) != EOF) {
unsigned char from_obj[20], to_obj[20];
struct strbuf **split;
int err;
return ret;
}
-static struct notes_tree *init_notes_check(const char *subcommand)
+static struct notes_tree *init_notes_check(const char *subcommand,
+ int flags)
{
struct notes_tree *t;
- init_notes(NULL, NULL, NULL, 0);
+ const char *ref;
+ init_notes(NULL, NULL, NULL, flags);
t = &default_notes_tree;
- if (!starts_with(t->ref, "refs/notes/"))
+ ref = (flags & NOTES_INIT_WRITABLE) ? t->update_ref : t->ref;
+ if (!starts_with(ref, "refs/notes/"))
die("Refusing to %s notes in %s (outside of refs/notes/)",
- subcommand, t->ref);
+ subcommand, ref);
return t;
}
usage_with_options(git_notes_list_usage, options);
}
- t = init_notes_check("list");
+ t = init_notes_check("list", 0);
if (argc) {
if (get_sha1(argv[0], object))
die(_("Failed to resolve '%s' as a valid ref."), argv[0]);
if (get_sha1(object_ref, object))
die(_("Failed to resolve '%s' as a valid ref."), object_ref);
- t = init_notes_check("add");
+ t = init_notes_check("add", NOTES_INIT_WRITABLE);
note = get_note(t, object);
if (note) {
if (get_sha1(object_ref, object))
die(_("Failed to resolve '%s' as a valid ref."), object_ref);
- t = init_notes_check("copy");
+ t = init_notes_check("copy", NOTES_INIT_WRITABLE);
note = get_note(t, object);
if (note) {
if (get_sha1(object_ref, object))
die(_("Failed to resolve '%s' as a valid ref."), object_ref);
- t = init_notes_check(argv[0]);
+ t = init_notes_check(argv[0], NOTES_INIT_WRITABLE);
note = get_note(t, object);
prepare_note_data(object, &d, edit ? note : NULL);
if (get_sha1(object_ref, object))
die(_("Failed to resolve '%s' as a valid ref."), object_ref);
- t = init_notes_check("show");
+ t = init_notes_check("show", 0);
note = get_note(t, object);
if (!note)
o.local_ref = default_notes_ref();
strbuf_addstr(&remote_ref, argv[0]);
- expand_notes_ref(&remote_ref);
+ expand_loose_notes_ref(&remote_ref);
o.remote_ref = remote_ref.buf;
- t = init_notes_check("merge");
+ t = init_notes_check("merge", NOTES_INIT_WRITABLE);
if (strategy) {
if (parse_notes_merge_strategy(strategy, &o.strategy)) {
argc = parse_options(argc, argv, prefix, options,
git_notes_remove_usage, 0);
- t = init_notes_check("remove");
+ t = init_notes_check("remove", NOTES_INIT_WRITABLE);
if (!argc && !from_stdin) {
retval = remove_one_note(t, "HEAD", flag);
usage_with_options(git_notes_prune_usage, options);
}
- t = init_notes_check("prune");
+ t = init_notes_check("prune", NOTES_INIT_WRITABLE);
prune_notes(t, (verbose ? NOTES_PRUNE_VERBOSE : 0) |
(show_only ? NOTES_PRUNE_VERBOSE|NOTES_PRUNE_DRYRUN : 0) );
REBASE_INVALID = -1,
REBASE_FALSE = 0,
REBASE_TRUE,
- REBASE_PRESERVE
+ REBASE_PRESERVE,
+ REBASE_INTERACTIVE
};
/**
return REBASE_TRUE;
else if (!strcmp(value, "preserve"))
return REBASE_PRESERVE;
+ else if (!strcmp(value, "interactive"))
+ return REBASE_INTERACTIVE;
if (fatal)
die(_("Invalid value for %s: %s"), key, value);
static char *opt_tags;
static char *opt_prune;
static char *opt_recurse_submodules;
+static char *max_children;
static int opt_dry_run;
static char *opt_keep;
static char *opt_depth;
/* Options passed to git-merge or git-rebase */
OPT_GROUP(N_("Options related to merging")),
{ OPTION_CALLBACK, 'r', "rebase", &opt_rebase,
- "false|true|preserve",
+ "false|true|preserve|interactive",
N_("incorporate changes by rebasing rather than merging"),
PARSE_OPT_OPTARG, parse_opt_rebase },
OPT_PASSTHRU('n', NULL, &opt_diffstat, NULL,
N_("on-demand"),
N_("control recursive fetching of submodules"),
PARSE_OPT_OPTARG),
+ OPT_PASSTHRU('j', "jobs", &max_children, N_("n"),
+ N_("number of submodules pulled in parallel"),
+ PARSE_OPT_OPTARG),
OPT_BOOL(0, "dry-run", &opt_dry_run,
N_("dry run")),
OPT_PASSTHRU('k', "keep", &opt_keep, NULL,
if (!(fp = fopen(filename, "r")))
die_errno(_("could not open '%s' for reading"), filename);
- while (strbuf_getline(&sb, fp, '\n') != EOF) {
+ while (strbuf_getline_lf(&sb, fp) != EOF) {
if (get_sha1_hex(sb.buf, sha1))
continue; /* invalid line: does not start with SHA1 */
if (starts_with(sb.buf + GIT_SHA1_HEXSZ, "\tnot-for-merge\t"))
argv_array_push(&args, opt_prune);
if (opt_recurse_submodules)
argv_array_push(&args, opt_recurse_submodules);
+ if (max_children)
+ argv_array_push(&args, max_children);
if (opt_dry_run)
argv_array_push(&args, "--dry-run");
if (opt_keep)
/* Options passed to git-rebase */
if (opt_rebase == REBASE_PRESERVE)
argv_array_push(&args, "--preserve-merges");
+ else if (opt_rebase == REBASE_INTERACTIVE)
+ argv_array_push(&args, "--interactive");
if (opt_diffstat)
argv_array_push(&args, opt_diffstat);
argv_array_pushv(&args, opt_strategies.argv);
OPT_BIT( 0 , "all", &flags, N_("push all refs"), TRANSPORT_PUSH_ALL),
OPT_BIT( 0 , "mirror", &flags, N_("mirror all refs"),
(TRANSPORT_PUSH_MIRROR|TRANSPORT_PUSH_FORCE)),
- OPT_BOOL( 0, "delete", &deleterefs, N_("delete refs")),
+ OPT_BOOL('d', "delete", &deleterefs, N_("delete refs")),
OPT_BOOL( 0 , "tags", &tags, N_("push tags (can't be used with --all or --mirror)")),
OPT_BIT('n' , "dry-run", &flags, N_("dry run"), TRANSPORT_PUSH_DRY_RUN),
OPT_BIT( 0, "porcelain", &flags, N_("machine-readable output"), TRANSPORT_PUSH_PORCELAIN),
struct branch_info {
char *remote_name;
struct string_list merge;
- int rebase;
+ enum { NO_REBASE, NORMAL_REBASE, INTERACTIVE_REBASE } rebase;
};
static struct string_list branch_list;
if (v >= 0)
info->rebase = v;
else if (!strcmp(value, "preserve"))
- info->rebase = 1;
+ info->rebase = NORMAL_REBASE;
+ else if (!strcmp(value, "interactive"))
+ info->rebase = INTERACTIVE_REBASE;
}
}
return 0;
printf(" %-*s ", show_info->width, item->string);
if (branch_info->rebase) {
- printf_ln(_("rebases onto remote %s"), merge->items[0].string);
+ printf_ln(_(branch_info->rebase == INTERACTIVE_REBASE ?
+ "rebases interactively onto remote %s" :
+ "rebases onto remote %s"), merge->items[0].string);
return 0;
} else if (show_info->any_rebase) {
printf_ln(_(" merges with remote %s"), merge->items[0].string);
return ret;
out = xfdopen(cmd.out, "r");
- while (strbuf_getline(&line, out, '\n') != EOF) {
+ while (strbuf_getline_lf(&line, out) != EOF) {
if (line.len != 40)
die("repack: Expecting 40 character sha1 lines only from pack-objects.");
string_list_append(&names, line.buf);
/* get the usage up to the first line with a -- on it */
for (;;) {
- if (strbuf_getline(&sb, stdin, '\n') == EOF)
+ if (strbuf_getline(&sb, stdin) == EOF)
die("premature end of input");
ALLOC_GROW(usage, unb + 1, usz);
if (!strcmp("--", sb.buf)) {
}
/* parse: (<short>|<short>,<long>|<long>)[*=?!]*<arghint>? SP+ <help> */
- while (strbuf_getline(&sb, stdin, '\n') != EOF) {
+ while (strbuf_getline(&sb, stdin) != EOF) {
const char *s;
const char *help;
struct option *o;
* "intent to add" entry.
*/
if (local_changes && staged_changes) {
- if (!index_only || !(ce->ce_flags & CE_INTENT_TO_ADD))
+ if (!index_only || !ce_intent_to_add(ce))
string_list_append(&files_staged, name);
}
else if (!index_only) {
argv_array_push(&all_refspecs, buf);
} else {
struct strbuf line = STRBUF_INIT;
- while (strbuf_getline(&line, stdin, '\n') != EOF)
+ while (strbuf_getline(&line, stdin) != EOF)
argv_array_push(&all_refspecs, line.buf);
strbuf_release(&line);
}
NULL
};
-static int compare_by_number(const void *a1, const void *a2)
+/*
+ * The util field of our string_list_items will contain one of two things:
+ *
+ * - if --summary is not in use, it will point to a string list of the
+ * oneline subjects assigned to this author
+ *
+ * - if --summary is in use, we don't need that list; we only need to know
+ * its size. So we abuse the pointer slot to store our integer counter.
+ *
+ * This macro accesses the latter.
+ */
+#define UTIL_TO_INT(x) ((intptr_t)(x)->util)
+
+static int compare_by_counter(const void *a1, const void *a2)
+{
+ const struct string_list_item *i1 = a1, *i2 = a2;
+ return UTIL_TO_INT(i2) - UTIL_TO_INT(i1);
+}
+
+static int compare_by_list(const void *a1, const void *a2)
{
const struct string_list_item *i1 = a1, *i2 = a2;
const struct string_list *l1 = i1->util, *l2 = i2->util;
const char *author,
const char *oneline)
{
- const char *dot3 = log->common_repo_prefix;
- char *buffer, *p;
struct string_list_item *item;
const char *mailbuf, *namebuf;
size_t namelen, maillen;
- const char *eol;
- struct strbuf subject = STRBUF_INIT;
struct strbuf namemailbuf = STRBUF_INIT;
struct ident_split ident;
strbuf_addf(&namemailbuf, " <%.*s>", (int)maillen, mailbuf);
item = string_list_insert(&log->list, namemailbuf.buf);
- if (item->util == NULL)
- item->util = xcalloc(1, sizeof(struct string_list));
-
- /* Skip any leading whitespace, including any blank lines. */
- while (*oneline && isspace(*oneline))
- oneline++;
- eol = strchr(oneline, '\n');
- if (!eol)
- eol = oneline + strlen(oneline);
- if (starts_with(oneline, "[PATCH")) {
- char *eob = strchr(oneline, ']');
- if (eob && (!eol || eob < eol))
- oneline = eob + 1;
- }
- while (*oneline && isspace(*oneline) && *oneline != '\n')
- oneline++;
- format_subject(&subject, oneline, " ");
- buffer = strbuf_detach(&subject, NULL);
-
- if (dot3) {
- int dot3len = strlen(dot3);
- if (dot3len > 5) {
- while ((p = strstr(buffer, dot3)) != NULL) {
- int taillen = strlen(p) - dot3len;
- memcpy(p, "/.../", 5);
- memmove(p + 5, p + dot3len, taillen + 1);
+
+ if (log->summary)
+ item->util = (void *)(UTIL_TO_INT(item) + 1);
+ else {
+ const char *dot3 = log->common_repo_prefix;
+ char *buffer, *p;
+ struct strbuf subject = STRBUF_INIT;
+ const char *eol;
+
+ /* Skip any leading whitespace, including any blank lines. */
+ while (*oneline && isspace(*oneline))
+ oneline++;
+ eol = strchr(oneline, '\n');
+ if (!eol)
+ eol = oneline + strlen(oneline);
+ if (starts_with(oneline, "[PATCH")) {
+ char *eob = strchr(oneline, ']');
+ if (eob && (!eol || eob < eol))
+ oneline = eob + 1;
+ }
+ while (*oneline && isspace(*oneline) && *oneline != '\n')
+ oneline++;
+ format_subject(&subject, oneline, " ");
+ buffer = strbuf_detach(&subject, NULL);
+
+ if (dot3) {
+ int dot3len = strlen(dot3);
+ if (dot3len > 5) {
+ while ((p = strstr(buffer, dot3)) != NULL) {
+ int taillen = strlen(p) - dot3len;
+ memcpy(p, "/.../", 5);
+ memmove(p + 5, p + dot3len, taillen + 1);
+ }
}
}
- }
- string_list_append(item->util, buffer);
+ if (item->util == NULL)
+ item->util = xcalloc(1, sizeof(struct string_list));
+ string_list_append(item->util, buffer);
+ }
}
static void read_from_stdin(struct shortlog *log)
{
- char author[1024], oneline[1024];
+ struct strbuf author = STRBUF_INIT;
+ struct strbuf oneline = STRBUF_INIT;
- while (fgets(author, sizeof(author), stdin) != NULL) {
- if (!(author[0] == 'A' || author[0] == 'a') ||
- !starts_with(author + 1, "uthor: "))
+ while (strbuf_getline_lf(&author, stdin) != EOF) {
+ const char *v;
+ if (!skip_prefix(author.buf, "Author: ", &v) &&
+ !skip_prefix(author.buf, "author ", &v))
continue;
- while (fgets(oneline, sizeof(oneline), stdin) &&
- oneline[0] != '\n')
+ while (strbuf_getline_lf(&oneline, stdin) != EOF &&
+ oneline.len)
; /* discard headers */
- while (fgets(oneline, sizeof(oneline), stdin) &&
- oneline[0] == '\n')
+ while (strbuf_getline_lf(&oneline, stdin) != EOF &&
+ !oneline.len)
; /* discard blanks */
- insert_one_record(log, author + 8, oneline);
+ insert_one_record(log, v, oneline.buf);
}
+ strbuf_release(&author);
+ strbuf_release(&oneline);
}
void shortlog_add_commit(struct shortlog *log, struct commit *commit)
{
- const char *author = NULL, *buffer;
- struct strbuf buf = STRBUF_INIT;
- struct strbuf ufbuf = STRBUF_INIT;
-
- pp_commit_easy(CMIT_FMT_RAW, commit, &buf);
- buffer = buf.buf;
- while (*buffer && *buffer != '\n') {
- const char *eol = strchr(buffer, '\n');
-
- if (eol == NULL)
- eol = buffer + strlen(buffer);
+ struct strbuf author = STRBUF_INIT;
+ struct strbuf oneline = STRBUF_INIT;
+ struct pretty_print_context ctx = {0};
+
+ ctx.fmt = CMIT_FMT_USERFORMAT;
+ ctx.abbrev = log->abbrev;
+ ctx.subject = "";
+ ctx.after_subject = "";
+ ctx.date_mode.type = DATE_NORMAL;
+ ctx.output_encoding = get_log_output_encoding();
+
+ format_commit_message(commit, "%an <%ae>", &author, &ctx);
+ if (!log->summary) {
+ if (log->user_format)
+ pretty_print_commit(&ctx, commit, &oneline);
else
- eol++;
-
- if (starts_with(buffer, "author "))
- author = buffer + 7;
- buffer = eol;
- }
- if (!author) {
- warning(_("Missing author: %s"),
- oid_to_hex(&commit->object.oid));
- return;
+ format_commit_message(commit, "%s", &oneline, &ctx);
}
- if (log->user_format) {
- struct pretty_print_context ctx = {0};
- ctx.fmt = CMIT_FMT_USERFORMAT;
- ctx.abbrev = log->abbrev;
- ctx.subject = "";
- ctx.after_subject = "";
- ctx.date_mode.type = DATE_NORMAL;
- ctx.output_encoding = get_log_output_encoding();
- pretty_print_commit(&ctx, commit, &ufbuf);
- buffer = ufbuf.buf;
- } else if (*buffer) {
- buffer++;
- }
- insert_one_record(log, author, !*buffer ? "<none>" : buffer);
- strbuf_release(&ufbuf);
- strbuf_release(&buf);
+
+ insert_one_record(log, author.buf, oneline.len ? oneline.buf : "<none>");
+
+ strbuf_release(&author);
+ strbuf_release(&oneline);
}
static void get_from_rev(struct rev_info *rev, struct shortlog *log)
if (log->sort_by_number)
qsort(log->list.items, log->list.nr, sizeof(struct string_list_item),
- compare_by_number);
+ log->summary ? compare_by_counter : compare_by_list);
for (i = 0; i < log->list.nr; i++) {
- struct string_list *onelines = log->list.items[i].util;
-
+ const struct string_list_item *item = &log->list.items[i];
if (log->summary) {
- printf("%6d\t%s\n", onelines->nr, log->list.items[i].string);
+ printf("%6d\t%s\n", (int)UTIL_TO_INT(item), item->string);
} else {
- printf("%s (%d):\n", log->list.items[i].string, onelines->nr);
+ struct string_list *onelines = item->util;
+ printf("%s (%d):\n", item->string, onelines->nr);
for (j = onelines->nr - 1; j >= 0; j--) {
const char *msg = onelines->items[j].string;
printf(" %s\n", msg);
}
putchar('\n');
+ onelines->strdup_strings = 1;
+ string_list_clear(onelines, 0);
+ free(onelines);
}
- onelines->strdup_strings = 1;
- string_list_clear(onelines, 0);
- free(onelines);
log->list.items[i].util = NULL;
}
report("add '%s'", path);
}
-static void read_index_info(int line_termination)
+static void read_index_info(int nul_term_line)
{
struct strbuf buf = STRBUF_INIT;
struct strbuf uq = STRBUF_INIT;
+ strbuf_getline_fn getline_fn;
- while (strbuf_getline(&buf, stdin, line_termination) != EOF) {
+ getline_fn = nul_term_line ? strbuf_getline_nul : strbuf_getline_lf;
+ while (getline_fn(&buf, stdin) != EOF) {
char *ptr, *tab;
char *path_name;
unsigned char sha1[20];
goto bad_line;
path_name = ptr;
- if (line_termination && path_name[0] == '"') {
+ if (!nul_term_line && path_name[0] == '"') {
strbuf_reset(&uq);
if (unquote_c_style(&uq, path_name, NULL)) {
die("git update-index: bad quoting of path name");
static int stdin_cacheinfo_callback(struct parse_opt_ctx_t *ctx,
const struct option *opt, int unset)
{
- int *line_termination = opt->value;
+ int *nul_term_line = opt->value;
if (ctx->argc != 1)
return error("option '%s' must be the last argument", opt->long_name);
allow_add = allow_replace = allow_remove = 1;
- read_index_info(*line_termination);
+ read_index_info(*nul_term_line);
return 0;
}
int cmd_update_index(int argc, const char **argv, const char *prefix)
{
- int newfd, entries, has_errors = 0, line_termination = '\n';
+ int newfd, entries, has_errors = 0, nul_term_line = 0;
int untracked_cache = -1;
int read_from_stdin = 0;
int prefix_length = prefix ? strlen(prefix) : 0;
int split_index = -1;
struct lock_file *lock_file;
struct parse_opt_ctx_t ctx;
+ strbuf_getline_fn getline_fn;
int parseopt_state = PARSE_OPT_UNKNOWN;
struct option options[] = {
OPT_BIT('q', NULL, &refresh_args.flags,
N_("add to index only; do not add content to object database"), 1),
OPT_SET_INT(0, "force-remove", &force_remove,
N_("remove named paths even if present in worktree"), 1),
- OPT_SET_INT('z', NULL, &line_termination,
- N_("with --stdin: input lines are terminated by null bytes"), '\0'),
+ OPT_BOOL('z', NULL, &nul_term_line,
+ N_("with --stdin: input lines are terminated by null bytes")),
{OPTION_LOWLEVEL_CALLBACK, 0, "stdin", &read_from_stdin, NULL,
N_("read list of paths to be updated from standard input"),
PARSE_OPT_NONEG | PARSE_OPT_NOARG,
(parse_opt_cb *) stdin_callback},
- {OPTION_LOWLEVEL_CALLBACK, 0, "index-info", &line_termination, NULL,
+ {OPTION_LOWLEVEL_CALLBACK, 0, "index-info", &nul_term_line, NULL,
N_("add entries from standard input to the index"),
PARSE_OPT_NONEG | PARSE_OPT_NOARG,
(parse_opt_cb *) stdin_cacheinfo_callback},
}
}
argc = parse_options_end(&ctx);
+
+ getline_fn = nul_term_line ? strbuf_getline_nul : strbuf_getline_lf;
if (preferred_index_format) {
if (preferred_index_format < INDEX_FORMAT_LB ||
INDEX_FORMAT_UB < preferred_index_format)
struct strbuf buf = STRBUF_INIT, nbuf = STRBUF_INIT;
setup_work_tree();
- while (strbuf_getline(&buf, stdin, line_termination) != EOF) {
+ while (getline_fn(&buf, stdin) != EOF) {
char *p;
- if (line_termination && buf.buf[0] == '"') {
+ if (!nul_term_line && buf.buf[0] == '"') {
strbuf_reset(&nbuf);
if (unquote_c_style(&nbuf, buf.buf, NULL))
die("line is badly quoted");
* they are not part of generated trees. Invalidate up
* to root to force cache-tree users to read elsewhere.
*/
- if (ce->ce_flags & CE_INTENT_TO_ADD) {
+ if (ce_intent_to_add(ce)) {
to_invalidate = 1;
continue;
}
#include "convert.h"
#include "trace.h"
#include "string-list.h"
+#include "pack-revindex.h"
#include SHA1_HEADER
#ifndef platform_SHA_CTX
#define ce_uptodate(ce) ((ce)->ce_flags & CE_UPTODATE)
#define ce_skip_worktree(ce) ((ce)->ce_flags & CE_SKIP_WORKTREE)
#define ce_mark_uptodate(ce) ((ce)->ce_flags |= CE_UPTODATE)
+#define ce_intent_to_add(ce) ((ce)->ce_flags & CE_INTENT_TO_ADD)
#define ce_permissions(mode) (((mode) & 0100) ? 0755 : 0644)
static inline unsigned int create_ce_mode(unsigned int mode)
freshened:1,
do_not_close:1;
unsigned char sha1[20];
+ struct revindex_entry *revindex;
/* something like ".git/objects/pack/xxxxx.pack" */
char pack_name[FLEX_ARRAY]; /* more */
} *packed_git;
#include "../run-command.h"
#include "../cache.h"
+#define HCAST(type, handle) ((type)(intptr_t)handle)
+
static const int delay[] = { 0, 1, 10, 20, 40 };
int err_win_to_posix(DWORD winerr)
errno = err_win_to_posix(GetLastError());
return -1;
}
- filedes[0] = _open_osfhandle((int)h[0], O_NOINHERIT);
+ filedes[0] = _open_osfhandle(HCAST(int, h[0]), O_NOINHERIT);
if (filedes[0] < 0) {
CloseHandle(h[0]);
CloseHandle(h[1]);
return -1;
}
- filedes[1] = _open_osfhandle((int)h[1], O_NOINHERIT);
+ filedes[1] = _open_osfhandle(HCAST(int, h[1]), O_NOINHERIT);
if (filedes[1] < 0) {
close(filedes[0]);
CloseHandle(h[1]);
die("cannot run browser");
printf("Launching default browser to display HTML ...\n");
- r = (int)ShellExecute(NULL, "open", htmlpath, NULL, "\\", SW_SHOWNORMAL);
+ r = HCAST(int, ShellExecute(NULL, "open", htmlpath,
+ NULL, "\\", SW_SHOWNORMAL));
FreeLibrary(shell32);
/* see the MSDN documentation referring to the result codes here */
if (r <= 32) {
+#ifdef __MINGW64_VERSION_MAJOR
+#include <stdint.h>
+#include <wchar.h>
+typedef _sigset_t sigset_t;
+#endif
#include <winsock2.h>
#include <ws2tcpip.h>
+/* MinGW-w64 reports to have flockfile, but it does not actually have it. */
+#ifdef __MINGW64_VERSION_MAJOR
+#undef _POSIX_THREAD_SAFE_FUNCTIONS
+#endif
+
/*
* things that are not available in header files
*/
-typedef int pid_t;
typedef int uid_t;
typedef int socklen_t;
+#ifndef __MINGW64_VERSION_MAJOR
+typedef int pid_t;
#define hstrerror strerror
+#endif
#define S_IFLNK 0120000 /* Symbolic link */
#define S_ISLNK(x) (((x) & S_IFMT) == S_IFLNK)
#define S_ISSOCK(x) 0
+#ifndef S_IRWXG
#define S_IRGRP 0
#define S_IWGRP 0
#define S_IXGRP 0
#define S_IRWXG (S_IRGRP | S_IWGRP | S_IXGRP)
+#endif
+#ifndef S_IRWXO
#define S_IROTH 0
#define S_IWOTH 0
#define S_IXOTH 0
#define S_IRWXO (S_IROTH | S_IWOTH | S_IXOTH)
+#endif
#define S_ISUID 0004000
#define S_ISGID 0002000
{ errno = ENOSYS; return -1; }
static inline int fchmod(int fildes, mode_t mode)
{ errno = ENOSYS; return -1; }
+#ifndef __MINGW64_VERSION_MAJOR
static inline pid_t fork(void)
{ errno = ENOSYS; return -1; }
+#endif
static inline unsigned int alarm(unsigned int seconds)
{ return 0; }
static inline int fsync(int fd)
unsigned int sleep (unsigned int seconds);
int mkstemp(char *template);
int gettimeofday(struct timeval *tv, void *tz);
+#ifndef __MINGW64_VERSION_MAJOR
struct tm *gmtime_r(const time_t *timep, struct tm *result);
struct tm *localtime_r(const time_t *timep, struct tm *result);
+#endif
int getpagesize(void); /* defined in MinGW's libgcc.a */
struct passwd *getpwuid(uid_t uid);
int setitimer(int type, struct itimerval *in, struct itimerval *out);
/*
* Use mingw specific stat()/lstat()/fstat() implementations on Windows.
*/
+#ifndef __MINGW64_VERSION_MAJOR
#define off_t off64_t
#define lseek _lseeki64
+#endif
/* use struct stat with 64 bit st_size */
#ifdef stat
int mingw_offset_1st_component(const char *path);
#define offset_1st_component mingw_offset_1st_component
#define PATH_SEP ';'
+#ifndef __MINGW64_VERSION_MAJOR
#define PRIuMAX "I64u"
#define PRId64 "I64d"
+#else
+#include <inttypes.h>
+#endif
void mingw_open_html(const char *path);
#define open_html mingw_open_html
inlining are defined as macros, so these aren't used for them.
*/
+#ifdef __MINGW64_VERSION_MAJOR
+#undef FORCEINLINE
+#endif
#ifndef FORCEINLINE
#if defined(__GNUC__)
#define FORCEINLINE __inline __attribute__ ((always_inline))
/*** Atomic operations ***/
#if (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__) > 40100
+ #undef _ReadWriteBarrier
#define _ReadWriteBarrier() __sync_synchronize()
#else
static __inline__ __attribute__((always_inline)) long __sync_lock_test_and_set(volatile long * const Target, const long Value)
volatile long threadid;
};
+static inline int return_0(int i) { return 0; }
#define MLOCK_T struct win32_mlock_t
#define CURRENT_THREAD win32_getcurrentthreadid()
-#define INITIAL_LOCK(sl) (memset(sl, 0, sizeof(MLOCK_T)), 0)
+#define INITIAL_LOCK(sl) (memset(sl, 0, sizeof(MLOCK_T)), return_0(0))
#define ACQUIRE_LOCK(sl) win32_acquire_lock(sl)
#define RELEASE_LOCK(sl) win32_release_lock(sl)
#define TRY_LOCK(sl) win32_try_lock(sl)
#ifdef WIN32_NATIVE
-#define IsConsoleHandle(h) (((long) (h) & 3) == 3)
+#define IsConsoleHandle(h) (((long) (intptr_t) (h) & 3) == 3)
static BOOL
IsSocketHandle (HANDLE h)
fputs(prompt, output_fh);
fflush(output_fh);
- r = strbuf_getline(&buf, input_fh, '\n');
+ r = strbuf_getline_lf(&buf, input_fh);
if (!echo) {
putc('\n', output_fh);
fflush(output_fh);
*/
#define pthread_mutex_t CRITICAL_SECTION
-#define pthread_mutex_init(a,b) (InitializeCriticalSection((a)), 0)
+static inline int return_0(int i) {
+ return 0;
+}
+#define pthread_mutex_init(a,b) return_0((InitializeCriticalSection((a)), 0))
#define pthread_mutex_destroy(a) DeleteCriticalSection((a))
#define pthread_mutex_lock EnterCriticalSection
#define pthread_mutex_unlock LeaveCriticalSection
static inline int pthread_exit(void *ret)
{
- ExitThread((DWORD)ret);
+ ExitThread((DWORD)(intptr_t)ret);
}
typedef DWORD pthread_key_t;
static HANDLE hconsole1, hconsole2;
#ifdef __MINGW32__
+#if !defined(__MINGW64_VERSION_MAJOR) || __MINGW64_VERSION_MAJOR < 5
typedef struct _CONSOLE_FONT_INFOEX {
ULONG cbSize;
DWORD nFont;
WCHAR FaceName[LF_FACESIZE];
} CONSOLE_FONT_INFOEX, *PCONSOLE_FONT_INFOEX;
#endif
+#endif
typedef BOOL (WINAPI *PGETCURRENTCONSOLEFONTEX)(HANDLE, BOOL,
PCONSOLE_FONT_INFOEX);
HANDLE hresult, hproc = GetCurrentProcess();
if (!DuplicateHandle(hproc, hnd, hproc, &hresult, 0, TRUE,
DUPLICATE_SAME_ACCESS))
- die_lasterr("DuplicateHandle(%li) failed", (long) hnd);
+ die_lasterr("DuplicateHandle(%li) failed",
+ (long) (intptr_t) hnd);
return hresult;
}
NO_INET_NTOP = YesPlease
NO_POSIX_GOODIES = UnfortunatelyYes
DEFAULT_HELP_FORMAT = html
- COMPAT_CFLAGS += -D__USE_MINGW_ACCESS -D_USE_32BIT_TIME_T -DNOGDI -Icompat -Icompat/win32
+ COMPAT_CFLAGS += -DNOGDI -Icompat -Icompat/win32
COMPAT_CFLAGS += -DSTRIP_EXTENSION=\".exe\"
COMPAT_OBJS += compat/mingw.o compat/winansi.o \
compat/win32/pthread.o compat/win32/syslog.o \
compat/win32/dirent.o
BASIC_CFLAGS += -DPROTECT_NTFS_DEFAULT=1
- BASIC_LDFLAGS += -Wl,--large-address-aware
EXTLIBS += -lws2_32
GITLIBS += git.res
PTHREAD_LIBS =
INTERNAL_QSORT = YesPlease
HAVE_LIBCHARSET_H = YesPlease
NO_GETTEXT = YesPlease
+ COMPAT_CLFAGS += -D__USE_MINGW_ACCESS
else
- NO_CURL = YesPlease
+ ifeq ($(shell expr "$(uname_R)" : '2\.'),2)
+ # MSys2
+ prefix = /usr/
+ ifeq (MINGW32,$(MSYSTEM))
+ prefix = /mingw32
+ endif
+ ifeq (MINGW64,$(MSYSTEM))
+ prefix = /mingw64
+ else
+ COMPAT_CFLAGS += -D_USE_32BIT_TIME_T
+ BASIC_LDFLAGS += -Wl,--large-address-aware
+ endif
+ CC = gcc
+ COMPAT_CFLAGS += -D__USE_MINGW_ANSI_STDIO=0
+ INSTALL = /bin/install
+ NO_R_TO_GCC_LINKER = YesPlease
+ INTERNAL_QSORT = YesPlease
+ HAVE_LIBCHARSET_H = YesPlease
+ NO_GETTEXT = YesPlease
+ USE_LIBPCRE= YesPlease
+ NO_CURL =
+ USE_NED_ALLOCATOR = YesPlease
+ else
+ COMPAT_CFLAGS += -D__USE_MINGW_ANSI_STDIO
+ NO_CURL = YesPlease
+ endif
endif
endif
ifeq ($(uname_S),QNX)
--full-name --line-number
--extended-regexp --basic-regexp --fixed-strings
--perl-regexp
+ --threads
--files-with-matches --name-only
--files-without-match
--max-depth
return
;;
branch.*.rebase)
- __gitcomp "false true"
+ __gitcomp "false true preserve interactive"
return
;;
remote.pushdefault)
if test "$templatefile" != ""
then
# Test whether this is just the unaltered template.
- if cnt=`sed -e '/^#/d' < "$templatefile" |
+ if cnt=$(sed -e '/^#/d' < "$templatefile" |
git stripspace |
diff "$GIT_DIR"/COMMIT_BAREMSG - |
- wc -l` &&
+ wc -l) &&
test 0 -lt $cnt
then
have_commitmsg=t
fi
if test -z "$quiet"
then
- commit=`git diff-tree --always --shortstat --pretty="format:%h: %s"\
- --abbrev --summary --root HEAD --`
+ commit=$(git diff-tree --always --shortstat --pretty="format:%h: %s"\
+ --abbrev --summary --root HEAD --)
echo "Created${initial_commit:+ initial} commit $commit"
fi
fi
reflist=$(get_remote_refs_for_fetch "$@")
if test "$tags"
then
- taglist=`IFS=' ' &&
+ taglist=$(IFS=' ' &&
echo "$ls_remote_result" |
git show-ref --exclude-existing=refs/tags/ |
while read sha1 name
do
echo ".${name}:${name}"
- done` || exit
+ done) || exit
if test "$#" -gt 1
then
# remote URL plus explicit refspecs; we need to merge them.
if test "$exit" -eq 1
then
- cnt=`{
+ cnt=$({
git diff-files --name-only
git ls-files --unmerged
- } | wc -l`
+ } | wc -l)
if test $best_cnt -le 0 || test $cnt -le $best_cnt
then
best_strategy=$strategy
,t,)
args= existing=
if [ -d "$PACKDIR" ]; then
- for e in `cd "$PACKDIR" && find . -type f -name '*.pack' \
- | sed -e 's/^\.\///' -e 's/\.pack$//'`
+ for e in $(cd "$PACKDIR" && find . -type f -name '*.pack' \
+ | sed -e 's/^\.\///' -e 's/\.pack$//')
do
if [ -e "$PACKDIR/$e.keep" ]; then
: keep
}'
logmsg=$(git show -s --pretty=raw --encoding="$encoding" "$commit")
- set_author_env=`echo "$logmsg" |
- LANG=C LC_ALL=C sed -ne "$pick_author_script"`
+ set_author_env=$(echo "$logmsg" |
+ LANG=C LC_ALL=C sed -ne "$pick_author_script")
eval "$set_author_env"
export GIT_AUTHOR_NAME
export GIT_AUTHOR_EMAIL
esac >.msg
eval GITHEAD_$head=HEAD
-eval GITHEAD_$next='`git show -s \
+eval GITHEAD_$next='$(git show -s \
--pretty=oneline --encoding="$encoding" "$commit" |
- sed -e "s/^[^ ]* //"`'
+ sed -e "s/^[^ ]* //")'
export GITHEAD_$head GITHEAD_$next
# This three way merge is an interesting one. We are at
GIT_SUBTREE_XML := git-subtree.xml
GIT_SUBTREE_TXT := git-subtree.txt
GIT_SUBTREE_HTML := git-subtree.html
+GIT_SUBTREE_TEST := ../../git-subtree
all:: $(GIT_SUBTREE)
$(ASCIIDOC) -b xhtml11 -d manpage -f $(ASCIIDOC_CONF) \
-agit_version=$(GIT_VERSION) $^
-test:
+$(GIT_SUBTREE_TEST): $(GIT_SUBTREE)
+ cp $< $@
+
+test: $(GIT_SUBTREE_TEST)
$(MAKE) -C t/ test
clean:
case "$a" in
START) sq="$b" ;;
git-subtree-mainline:) main="$b" ;;
- git-subtree-split:) sub="$b" ;;
+ git-subtree-split:)
+ sub="$(git rev-parse "$b^0")" ||
+ die "could not rev-parse split hash $b from commit $sq"
+ ;;
END)
if [ -n "$sub" ]; then
if [ -n "$main" ]; then
case "$a" in
START) sq="$b" ;;
git-subtree-mainline:) main="$b" ;;
- git-subtree-split:) sub="$b" ;;
+ git-subtree-split:)
+ sub="$(git rev-parse "$b^0")" ||
+ die "could not rev-parse split hash $b from commit $sq"
+ ;;
END)
debug " Main is: '$main'"
if [ -z "$main" -a -n "$sub" ]; then
CMT_MSG=$(sed -e '1,/^$/d' -e '/^---$/,$d' "${PATCH}")
DIFF=$(sed -e '1,/^---$/d' "${PATCH}")
-CCS=`echo -e "$CMT_MSG\n$HEADERS" | sed -n -e 's/^Cc: \(.*\)$/\1,/gp' \
- -e 's/^Signed-off-by: \(.*\)/\1,/gp'`
+CCS=$(echo -e "$CMT_MSG\n$HEADERS" | sed -n -e 's/^Cc: \(.*\)$/\1,/gp' \
+ -e 's/^Signed-off-by: \(.*\)/\1,/gp')
echo "$SUBJECT" > $1
echo "Cc: $CCS" >> $1
* translation when the "text" attribute or "auto_crlf" option is set.
*/
+/* Stat bits: When BIN is set, the txt bits are unset */
+#define CONVERT_STAT_BITS_TXT_LF 0x1
+#define CONVERT_STAT_BITS_TXT_CRLF 0x2
+#define CONVERT_STAT_BITS_BIN 0x4
+
enum crlf_action {
CRLF_GUESS = -1,
CRLF_BINARY = 0,
/*
* The same heuristics as diff.c::mmfile_is_binary()
+ * We treat files with bare CR as binary
*/
-static int is_binary(unsigned long size, struct text_stat *stats)
+static int convert_is_binary(unsigned long size, const struct text_stat *stats)
{
-
+ if (stats->cr != stats->crlf)
+ return 1;
if (stats->nul)
return 1;
if ((stats->printable >> 7) < stats->nonprintable)
return 1;
- /*
- * Other heuristics? Average line length might be relevant,
- * as might LF vs CR vs CRLF counts..
- *
- * NOTE! It might be normal to have a low ratio of CRLF to LF
- * (somebody starts with a LF-only file and edits it with an editor
- * that adds CRLF only to lines that are added..). But do we
- * want to support CR-only? Probably not.
- */
return 0;
}
+static unsigned int gather_convert_stats(const char *data, unsigned long size)
+{
+ struct text_stat stats;
+ if (!data || !size)
+ return 0;
+ gather_stats(data, size, &stats);
+ if (convert_is_binary(size, &stats))
+ return CONVERT_STAT_BITS_BIN;
+ else if (stats.crlf && stats.crlf == stats.lf)
+ return CONVERT_STAT_BITS_TXT_CRLF;
+ else if (stats.crlf && stats.lf)
+ return CONVERT_STAT_BITS_TXT_CRLF | CONVERT_STAT_BITS_TXT_LF;
+ else if (stats.lf)
+ return CONVERT_STAT_BITS_TXT_LF;
+ else
+ return 0;
+}
+
+static const char *gather_convert_stats_ascii(const char *data, unsigned long size)
+{
+ unsigned int convert_stats = gather_convert_stats(data, size);
+
+ if (convert_stats & CONVERT_STAT_BITS_BIN)
+ return "-text";
+ switch (convert_stats) {
+ case CONVERT_STAT_BITS_TXT_LF:
+ return "lf";
+ case CONVERT_STAT_BITS_TXT_CRLF:
+ return "crlf";
+ case CONVERT_STAT_BITS_TXT_LF | CONVERT_STAT_BITS_TXT_CRLF:
+ return "mixed";
+ default:
+ return "none";
+ }
+}
+
+const char *get_cached_convert_stats_ascii(const char *path)
+{
+ const char *ret;
+ unsigned long sz;
+ void *data = read_blob_data_from_cache(path, &sz);
+ ret = gather_convert_stats_ascii(data, sz);
+ free(data);
+ return ret;
+}
+
+const char *get_wt_convert_stats_ascii(const char *path)
+{
+ const char *ret = "";
+ struct strbuf sb = STRBUF_INIT;
+ if (strbuf_read_file(&sb, path, 0) >= 0)
+ ret = gather_convert_stats_ascii(sb.buf, sb.len);
+ strbuf_release(&sb);
+ return ret;
+}
+
static enum eol output_eol(enum crlf_action crlf_action)
{
switch (crlf_action) {
gather_stats(src, len, &stats);
if (crlf_action == CRLF_AUTO || crlf_action == CRLF_GUESS) {
- /*
- * We're currently not going to even try to convert stuff
- * that has bare CR characters. Does anybody do that crazy
- * stuff?
- */
- if (stats.cr != stats.crlf)
- return 0;
-
- /*
- * And add some heuristics for binary vs text, of course...
- */
- if (is_binary(len, &stats))
+ if (convert_is_binary(len, &stats))
return 0;
if (crlf_action == CRLF_GUESS) {
return 0;
}
- /* If we have any bare CR characters, we're not going to touch it */
- if (stats.cr != stats.crlf)
- return 0;
-
- if (is_binary(len, &stats))
+ if (convert_is_binary(len, &stats))
return 0;
}
return apply_filter(path, NULL, 0, -1, NULL, ca.drv->clean);
}
+const char *get_convert_attr_ascii(const char *path)
+{
+ struct conv_attrs ca;
+ enum crlf_action crlf_action;
+
+ convert_attrs(&ca, path);
+ crlf_action = input_crlf_action(ca.crlf_action, ca.eol_attr);
+ switch (crlf_action) {
+ case CRLF_GUESS:
+ return "";
+ case CRLF_BINARY:
+ return "-text";
+ case CRLF_TEXT:
+ return "text";
+ case CRLF_INPUT:
+ return "text eol=lf";
+ case CRLF_CRLF:
+ return "text=auto eol=crlf";
+ case CRLF_AUTO:
+ return "text=auto";
+ }
+ return "";
+}
+
int convert_to_git(const char *path, const char *src, size_t len,
struct strbuf *dst, enum safe_crlf checksafe)
{
};
extern enum eol core_eol;
+extern const char *get_cached_convert_stats_ascii(const char *path);
+extern const char *get_wt_convert_stats_ascii(const char *path);
+extern const char *get_convert_attr_ascii(const char *path);
/* returns 1 if *dst was used */
extern int convert_to_git(const char *path, const char *src, size_t len,
static struct strbuf item = STRBUF_INIT;
const char *p;
- strbuf_getline(&item, fh, '\n');
+ strbuf_getline_lf(&item, fh);
if (!skip_prefix(item.buf, "action=", &p))
return error("client sent bogus action line: %s", item.buf);
strbuf_addstr(action, p);
- strbuf_getline(&item, fh, '\n');
+ strbuf_getline_lf(&item, fh);
if (!skip_prefix(item.buf, "timeout=", &p))
return error("client sent bogus timeout line: %s", item.buf);
*timeout = atoi(p);
return found_credential;
}
- while (strbuf_getline(&line, fh, '\n') != EOF) {
+ while (strbuf_getline_lf(&line, fh) != EOF) {
credential_from_url(&entry, line.buf);
if (entry.username && entry.password &&
credential_match(c, &entry)) {
{
struct strbuf line = STRBUF_INIT;
- while (strbuf_getline(&line, fp, '\n') != EOF) {
+ while (strbuf_getline_lf(&line, fp) != EOF) {
char *key = line.buf;
char *value = strchr(key, '=');
return;
}
- while (strbuf_getline(&line, fp, '\n') != EOF) {
+ while (strbuf_getline_lf(&line, fp) != EOF) {
logerror("%s", line.buf);
strbuf_setlen(&line, 0);
}
struct recent_command *rc;
strbuf_detach(&command_buf, NULL);
- stdin_eof = strbuf_getline(&command_buf, stdin, '\n');
+ stdin_eof = strbuf_getline_lf(&command_buf, stdin);
if (stdin_eof)
return EOF;
strbuf_detach(&command_buf, NULL);
for (;;) {
- if (strbuf_getline(&command_buf, stdin, '\n') == EOF)
+ if (strbuf_getline_lf(&command_buf, stdin) == EOF)
die("EOF in data (terminator '%s' not found)", term);
if (term_len == command_buf.len
&& !strcmp(term, command_buf.buf))
if [ $# -eq 0 ]
then
cat <<!
-Usage: `basename $0` git-gui-glossary.txt > git-gui-glossary.pot
+Usage: $(basename $0) git-gui-glossary.txt > git-gui-glossary.pot
!
exit 1;
fi
msgid ""
msgstr ""
"Project-Id-Version: PACKAGE VERSION\n"
-"POT-Creation-Date: `date +'%Y-%m-%d %H:%M%z'`\n"
+"POT-Creation-Date: $(date +'%Y-%m-%d %H:%M%z')\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"
def p4_delete(f):
p4_system(["delete", wildcard_encode(f)])
-def p4_edit(f):
- p4_system(["edit", wildcard_encode(f)])
+def p4_edit(f, *options):
+ p4_system(["edit"] + list(options) + [wildcard_encode(f)])
def p4_revert(f):
p4_system(["revert", wildcard_encode(f)])
diff = read_pipe_lines("git diff-tree -r %s \"%s^\" \"%s\"" % (self.diffOpts, id, id))
filesToAdd = set()
+ filesToChangeType = set()
filesToDelete = set()
editedFiles = set()
pureRenameCopy = set()
os.unlink(dest)
filesToDelete.add(src)
editedFiles.add(dest)
+ elif modifier == "T":
+ filesToChangeType.add(path)
else:
die("unknown modifier %s for %s" % (modifier, path))
#
system(applyPatchCmd)
+ for f in filesToChangeType:
+ p4_edit(f, "-t", "auto")
for f in filesToAdd:
p4_add(f)
for f in filesToDelete:
GIT_PREFIX_ENVIRONMENT
};
static char *orig_env[4];
-static int saved_environment;
+static int saved_env_before_alias;
-static void save_env(void)
+static void save_env_before_alias(void)
{
int i;
- if (saved_environment)
+ if (saved_env_before_alias)
return;
- saved_environment = 1;
+ saved_env_before_alias = 1;
orig_cwd = xgetcwd();
for (i = 0; i < ARRAY_SIZE(env_names); i++) {
orig_env[i] = getenv(env_names[i]);
}
}
-static void restore_env(void)
+static void restore_env(int external_alias)
{
int i;
- if (orig_cwd && chdir(orig_cwd))
+ if (!external_alias && orig_cwd && chdir(orig_cwd))
die_errno("could not move to %s", orig_cwd);
free(orig_cwd);
for (i = 0; i < ARRAY_SIZE(env_names); i++) {
+ if (external_alias &&
+ !strcmp(env_names[i], GIT_PREFIX_ENVIRONMENT))
+ continue;
if (orig_env[i])
setenv(env_names[i], orig_env[i], 1);
else
static int handle_alias(int *argcp, const char ***argv)
{
int envchanged = 0, ret = 0, saved_errno = errno;
- const char *subdir;
int count, option_count;
const char **new_argv;
const char *alias_command;
char *alias_string;
int unused_nongit;
- subdir = setup_git_directory_gently(&unused_nongit);
+ save_env_before_alias();
+ setup_git_directory_gently(&unused_nongit);
alias_command = (*argv)[0];
alias_string = alias_lookup(alias_command);
int argc = *argcp, i;
commit_pager_choice();
+ restore_env(1);
/* build alias_argv */
alias_argv = xmalloc(sizeof(*alias_argv) * (argc + 1));
ret = 1;
}
- if (subdir && chdir(subdir))
- die_errno("Cannot change to '%s'", subdir);
+ restore_env(0);
errno = saved_errno;
* RUN_SETUP for reading from the configuration file.
*/
#define NEED_WORK_TREE (1<<3)
-#define NO_SETUP (1<<4)
struct cmd_struct {
const char *cmd;
{ "cherry", cmd_cherry, RUN_SETUP },
{ "cherry-pick", cmd_cherry_pick, RUN_SETUP | NEED_WORK_TREE },
{ "clean", cmd_clean, RUN_SETUP | NEED_WORK_TREE },
- { "clone", cmd_clone, NO_SETUP },
+ { "clone", cmd_clone },
{ "column", cmd_column, RUN_SETUP_GENTLY },
{ "commit", cmd_commit, RUN_SETUP | NEED_WORK_TREE },
{ "commit-tree", cmd_commit_tree, RUN_SETUP },
{ "hash-object", cmd_hash_object },
{ "help", cmd_help },
{ "index-pack", cmd_index_pack, RUN_SETUP_GENTLY },
- { "init", cmd_init_db, NO_SETUP },
- { "init-db", cmd_init_db, NO_SETUP },
+ { "init", cmd_init_db },
+ { "init-db", cmd_init_db },
{ "interpret-trailers", cmd_interpret_trailers, RUN_SETUP_GENTLY },
{ "log", cmd_log, RUN_SETUP },
{ "ls-files", cmd_ls_files, RUN_SETUP },
builtin = get_builtin(cmd);
if (builtin) {
- if (saved_environment && (builtin->option & NO_SETUP))
- restore_env();
- else
+ /*
+ * XXX: if we can figure out cases where it is _safe_
+ * to do, we can avoid spawning a new process when
+ * saved_env_before_alias is true
+ * (i.e. setup_git_dir* has been run once)
+ */
+ if (!saved_env_before_alias)
exit(run_builtin(builtin, argc, argv));
}
}
*/
if (done_alias)
break;
- save_env();
if (!handle_alias(argcp, argv))
break;
done_alias = 1;
static long curl_low_speed_time = -1;
static int curl_ftp_no_epsv;
static const char *curl_http_proxy;
+static const char *http_proxy_authmethod;
+static struct {
+ const char *name;
+ long curlauth_param;
+} proxy_authmethods[] = {
+ { "basic", CURLAUTH_BASIC },
+ { "digest", CURLAUTH_DIGEST },
+ { "negotiate", CURLAUTH_GSSNEGOTIATE },
+ { "ntlm", CURLAUTH_NTLM },
+#ifdef LIBCURL_CAN_HANDLE_AUTH_ANY
+ { "anyauth", CURLAUTH_ANY },
+#endif
+ /*
+ * CURLAUTH_DIGEST_IE has no corresponding command-line option in
+ * curl(1) and is not included in CURLAUTH_ANY, so we leave it out
+ * here, too
+ */
+};
+static struct credential proxy_auth = CREDENTIAL_INIT;
+static const char *curl_proxyuserpwd;
static const char *curl_cookie_file;
static int curl_save_cookies;
struct credential http_auth = CREDENTIAL_INIT;
#else
slot->results->auth_avail = 0;
#endif
+
+ curl_easy_getinfo(slot->curl, CURLINFO_HTTP_CONNECTCODE,
+ &slot->results->http_connectcode);
}
/* Run callback if appropriate */
if (!strcmp("http.proxy", var))
return git_config_string(&curl_http_proxy, var, value);
+ if (!strcmp("http.proxyauthmethod", var))
+ return git_config_string(&http_proxy_authmethod, var, value);
+
if (!strcmp("http.cookiefile", var))
return git_config_string(&curl_cookie_file, var, value);
if (!strcmp("http.savecookies", var)) {
#endif
}
+/* *var must be free-able */
+static void var_override(const char **var, char *value)
+{
+ if (value) {
+ free((void *)*var);
+ *var = xstrdup(value);
+ }
+}
+
+static void set_proxyauth_name_password(CURL *result)
+{
+#if LIBCURL_VERSION_NUM >= 0x071301
+ curl_easy_setopt(result, CURLOPT_PROXYUSERNAME,
+ proxy_auth.username);
+ curl_easy_setopt(result, CURLOPT_PROXYPASSWORD,
+ proxy_auth.password);
+#else
+ struct strbuf s = STRBUF_INIT;
+
+ strbuf_addstr_urlencode(&s, proxy_auth.username, 1);
+ strbuf_addch(&s, ':');
+ strbuf_addstr_urlencode(&s, proxy_auth.password, 1);
+ curl_proxyuserpwd = strbuf_detach(&s, NULL);
+ curl_easy_setopt(result, CURLOPT_PROXYUSERPWD, curl_proxyuserpwd);
+#endif
+}
+
+static void init_curl_proxy_auth(CURL *result)
+{
+ if (proxy_auth.username) {
+ if (!proxy_auth.password)
+ credential_fill(&proxy_auth);
+ set_proxyauth_name_password(result);
+ }
+
+ var_override(&http_proxy_authmethod, getenv("GIT_HTTP_PROXY_AUTHMETHOD"));
+
+#if LIBCURL_VERSION_NUM >= 0x070a07 /* CURLOPT_PROXYAUTH and CURLAUTH_ANY */
+ if (http_proxy_authmethod) {
+ int i;
+ for (i = 0; i < ARRAY_SIZE(proxy_authmethods); i++) {
+ if (!strcmp(http_proxy_authmethod, proxy_authmethods[i].name)) {
+ curl_easy_setopt(result, CURLOPT_PROXYAUTH,
+ proxy_authmethods[i].curlauth_param);
+ break;
+ }
+ }
+ if (i == ARRAY_SIZE(proxy_authmethods)) {
+ warning("unsupported proxy authentication method %s: using anyauth",
+ http_proxy_authmethod);
+ curl_easy_setopt(result, CURLOPT_PROXYAUTH, CURLAUTH_ANY);
+ }
+ }
+ else
+ curl_easy_setopt(result, CURLOPT_PROXYAUTH, CURLAUTH_ANY);
+#endif
+}
+
static int has_cert_password(void)
{
if (ssl_cert == NULL || ssl_cert_password_required != 1)
curl_easy_setopt(result, CURLOPT_USE_SSL, CURLUSESSL_TRY);
#endif
+ /*
+ * CURL also examines these variables as a fallback; but we need to query
+ * them here in order to decide whether to prompt for missing password (cf.
+ * init_curl_proxy_auth()).
+ *
+ * Unlike many other common environment variables, these are historically
+ * lowercase only. It appears that CURL did not know this and implemented
+ * only uppercase variants, which was later corrected to take both - with
+ * the exception of http_proxy, which is lowercase only also in CURL. As
+ * the lowercase versions are the historical quasi-standard, they take
+ * precedence here, as in CURL.
+ */
+ if (!curl_http_proxy) {
+ if (!strcmp(http_auth.protocol, "https")) {
+ var_override(&curl_http_proxy, getenv("HTTPS_PROXY"));
+ var_override(&curl_http_proxy, getenv("https_proxy"));
+ } else {
+ var_override(&curl_http_proxy, getenv("http_proxy"));
+ }
+ if (!curl_http_proxy) {
+ var_override(&curl_http_proxy, getenv("ALL_PROXY"));
+ var_override(&curl_http_proxy, getenv("all_proxy"));
+ }
+ }
+
if (curl_http_proxy) {
curl_easy_setopt(result, CURLOPT_PROXY, curl_http_proxy);
#if LIBCURL_VERSION_NUM >= 0x071800
curl_easy_setopt(result,
CURLOPT_PROXYTYPE, CURLPROXY_SOCKS4);
#endif
+ if (strstr(curl_http_proxy, "://"))
+ credential_from_url(&proxy_auth, curl_http_proxy);
+ else {
+ struct strbuf url = STRBUF_INIT;
+ strbuf_addf(&url, "http://%s", curl_http_proxy);
+ credential_from_url(&proxy_auth, url.buf);
+ strbuf_release(&url);
+ }
+
+ curl_easy_setopt(result, CURLOPT_PROXY, proxy_auth.host);
}
-#if LIBCURL_VERSION_NUM >= 0x070a07
- curl_easy_setopt(result, CURLOPT_PROXYAUTH, CURLAUTH_ANY);
-#endif
+ init_curl_proxy_auth(result);
set_curl_keepalive(result);
if (remote && remote->http_proxy)
curl_http_proxy = xstrdup(remote->http_proxy);
+ if (remote)
+ var_override(&http_proxy_authmethod, remote->http_proxy_authmethod);
+
pragma_header = curl_slist_append(pragma_header, "Pragma: no-cache");
no_pragma_header = curl_slist_append(no_pragma_header, "Pragma:");
curl_http_proxy = NULL;
}
+ if (proxy_auth.password) {
+ memset(proxy_auth.password, 0, strlen(proxy_auth.password));
+ free(proxy_auth.password);
+ proxy_auth.password = NULL;
+ }
+
+ free((void *)curl_proxyuserpwd);
+ curl_proxyuserpwd = NULL;
+
+ free((void *)http_proxy_authmethod);
+ http_proxy_authmethod = NULL;
+
if (cert_auth.password != NULL) {
memset(cert_auth.password, 0, strlen(cert_auth.password));
free(cert_auth.password);
if (results->curl_result == CURLE_OK) {
credential_approve(&http_auth);
+ if (proxy_auth.password)
+ credential_approve(&proxy_auth);
return HTTP_OK;
} else if (missing_target(results))
return HTTP_MISSING_TARGET;
return HTTP_REAUTH;
}
} else {
+ if (results->http_connectcode == 407)
+ credential_reject(&proxy_auth);
#if LIBCURL_VERSION_NUM >= 0x070c00
if (!curl_errorstr[0])
strlcpy(curl_errorstr,
CURLcode curl_result;
long http_code;
long auth_avail;
+ long http_connectcode;
};
struct active_request_slot {
strerror(errno));
return -1;
}
- if (strbuf_getline(&mailnamebuf, mailname, '\n') == EOF) {
+ if (strbuf_getline(&mailnamebuf, mailname) == EOF) {
if (ferror(mailname))
warning("cannot read /etc/mailname: %s",
strerror(errno));
struct strbuf continuation = STRBUF_INIT;
/* Get the first part of the line. */
- if (strbuf_getline(line, in, '\n'))
+ if (strbuf_getline_lf(line, in))
return 0;
/*
peek = fgetc(in); ungetc(peek, in);
if (peek != ' ' && peek != '\t')
break;
- if (strbuf_getline(&continuation, in, '\n'))
+ if (strbuf_getline_lf(&continuation, in))
break;
continuation.buf[0] = ' ';
strbuf_rtrim(&continuation);
static int find_boundary(struct mailinfo *mi, struct strbuf *line)
{
- while (!strbuf_getline(line, mi->input, '\n')) {
+ while (!strbuf_getline_lf(line, mi->input)) {
if (*(mi->content_top) && is_multipart_boundary(mi, line))
return 1;
}
strbuf_release(&newline);
/* replenish line */
- if (strbuf_getline(line, mi->input, '\n'))
+ if (strbuf_getline_lf(line, mi->input))
return 0;
strbuf_addch(line, '\n');
return 1;
const char *validity)
{
struct strbuf ref = STRBUF_INIT;
- int flags = 0;
+ int flags = NOTES_INIT_WRITABLE;
memset(c, 0, sizeof(*c));
c->validity = xstrdup(validity);
strbuf_addf(&ref, "refs/notes/%s", name);
if (!notes_cache_match_validity(ref.buf, validity))
- flags = NOTES_INIT_EMPTY;
+ flags |= NOTES_INIT_EMPTY;
init_notes(&c->tree, ref.buf, combine_notes_overwrite, flags);
strbuf_release(&ref);
}
unsigned char tree_sha1[20];
unsigned char commit_sha1[20];
- if (!c || !c->tree.initialized || !c->tree.ref || !*c->tree.ref)
+ if (!c || !c->tree.initialized || !c->tree.update_ref ||
+ !*c->tree.update_ref)
return -1;
if (!c->tree.dirty)
return 0;
if (commit_tree(c->validity, strlen(c->validity), tree_sha1, NULL,
commit_sha1, NULL, NULL) < 0)
return -1;
- if (update_ref("update notes cache", c->tree.ref, commit_sha1, NULL,
- 0, UPDATE_REFS_QUIET_ON_ERR) < 0)
+ if (update_ref("update notes cache", c->tree.update_ref, commit_sha1,
+ NULL, 0, UPDATE_REFS_QUIET_ON_ERR) < 0)
return -1;
return 0;
if (!t)
t = &default_notes_tree;
- if (!t->initialized || !t->ref || !*t->ref)
+ if (!t->initialized || !t->update_ref || !*t->update_ref)
die(_("Cannot commit uninitialized/unreferenced notes tree"));
if (!t->dirty)
return; /* don't have to commit an unchanged tree */
create_notes_commit(t, NULL, buf.buf, buf.len, commit_sha1);
strbuf_insert(&buf, 0, "notes: ", 7); /* commit message starts at index 7 */
- update_ref(buf.buf, t->ref, commit_sha1, NULL, 0,
+ update_ref(buf.buf, t->update_ref, commit_sha1, NULL, 0,
UPDATE_REFS_DIE_ON_ERR);
strbuf_release(&buf);
free(c);
return NULL;
}
- c->trees = load_notes_trees(c->refs);
+ c->trees = load_notes_trees(c->refs, NOTES_INIT_WRITABLE);
string_list_clear(c->refs, 0);
free(c->refs);
return c;
t->first_non_note = NULL;
t->prev_non_note = NULL;
t->ref = xstrdup_or_null(notes_ref);
+ t->update_ref = (flags & NOTES_INIT_WRITABLE) ? t->ref : NULL;
t->combine_notes = combine_notes;
t->initialized = 1;
t->dirty = 0;
if (flags & NOTES_INIT_EMPTY || !notes_ref ||
- read_ref(notes_ref, object_sha1))
+ get_sha1_treeish(notes_ref, object_sha1))
return;
+ if (flags & NOTES_INIT_WRITABLE && read_ref(notes_ref, object_sha1))
+ die("Cannot use notes ref %s", notes_ref);
if (get_tree_entry(object_sha1, "", sha1, &mode))
die("Failed to read notes tree referenced by %s (%s)",
notes_ref, sha1_to_hex(object_sha1));
load_subtree(t, &root_tree, t->root, 0);
}
-struct notes_tree **load_notes_trees(struct string_list *refs)
+struct notes_tree **load_notes_trees(struct string_list *refs, int flags)
{
struct string_list_item *item;
int counter = 0;
trees = xmalloc((refs->nr+1) * sizeof(struct notes_tree *));
for_each_string_list_item(item, refs) {
struct notes_tree *t = xcalloc(1, sizeof(struct notes_tree));
- init_notes(t, item->string, combine_notes_ignore, 0);
+ init_notes(t, item->string, combine_notes_ignore, flags);
trees[counter++] = t;
}
trees[counter] = NULL;
item->string);
}
- display_notes_trees = load_notes_trees(&display_notes_refs);
+ display_notes_trees = load_notes_trees(&display_notes_refs, 0);
string_list_clear(&display_notes_refs, 0);
}
else
strbuf_insert(sb, 0, "refs/notes/", 11);
}
+
+void expand_loose_notes_ref(struct strbuf *sb)
+{
+ unsigned char object[20];
+
+ if (get_sha1(sb->buf, object)) {
+ /* fallback to expand_notes_ref */
+ expand_notes_ref(sb);
+ }
+}
struct int_node *root;
struct non_note *first_non_note, *prev_non_note;
char *ref;
+ char *update_ref;
combine_notes_fn combine_notes;
int initialized;
int dirty;
*/
#define NOTES_INIT_EMPTY 1
+/*
+ * By default, the notes tree is only readable, and the notes ref can be
+ * any treeish. The notes tree can however be made writable with this flag,
+ * in which case only strict ref names can be used.
+ */
+#define NOTES_INIT_WRITABLE 2
+
/*
* Initialize the given notes_tree with the notes tree structure at the given
* ref. If given ref is NULL, the value of the $GIT_NOTES_REF environment
* Load the notes tree from each ref listed in 'refs'. The output is
* an array of notes_tree*, terminated by a NULL.
*/
-struct notes_tree **load_notes_trees(struct string_list *refs);
+struct notes_tree **load_notes_trees(struct string_list *refs, int flags);
/*
* Add all refs that match 'glob' to the 'list'.
/* Expand inplace a note ref like "foo" or "notes/foo" into "refs/notes/foo" */
void expand_notes_ref(struct strbuf *sb);
+/*
+ * Similar to expand_notes_ref, but will check whether the ref can be located
+ * via get_sha1 first, and only falls back to expand_notes_ref in the case
+ * where get_sha1 fails.
+ */
+void expand_loose_notes_ref(struct strbuf *sb);
+
#endif
/* Packfile to which this bitmap index belongs to */
struct packed_git *pack;
- /* reverse index for the packfile */
- struct pack_revindex *reverse_index;
-
/*
* Mark the first `reuse_objects` in the packfile as reused:
* they will be sent as-is without using them for repacking
bitmap_git.bitmaps = kh_init_sha1();
bitmap_git.ext_index.positions = kh_init_sha1_pos();
- bitmap_git.reverse_index = revindex_for_pack(bitmap_git.pack);
+ load_pack_revindex(bitmap_git.pack);
if (!(bitmap_git.commits = read_bitmap_1(&bitmap_git)) ||
!(bitmap_git.trees = read_bitmap_1(&bitmap_git)) ||
if (!offset)
return -1;
- return find_revindex_position(bitmap_git.reverse_index, offset);
+ return find_revindex_position(bitmap_git.pack, offset);
}
static int bitmap_position(const unsigned char *sha1)
if (pos + offset < bitmap_git.reuse_objects)
continue;
- entry = &bitmap_git.reverse_index->revindex[pos + offset];
+ entry = &bitmap_git.pack->revindex[pos + offset];
sha1 = nth_packed_object_sha1(bitmap_git.pack, entry->nr);
if (bitmap_git.hashes)
return -1;
bitmap_git.reuse_objects = *entries = reuse_objects;
- *up_to = bitmap_git.reverse_index->revindex[reuse_objects].offset;
+ *up_to = bitmap_git.pack->revindex[reuse_objects].offset;
*packfile = bitmap_git.pack;
return 0;
struct revindex_entry *entry;
struct object_entry *oe;
- entry = &bitmap_git.reverse_index->revindex[i];
+ entry = &bitmap_git.pack->revindex[i];
sha1 = nth_packed_object_sha1(bitmap_git.pack, entry->nr);
oe = packlist_find(mapping, sha1, NULL);
* size is easily available by examining the pack entry header). It is
* also rather expensive to find the sha1 for an object given its offset.
*
- * We build a hashtable of existing packs (pack_revindex), and keep reverse
- * index here -- pack index file is sorted by object name mapping to offset;
- * this pack_revindex[].revindex array is a list of offset/index_nr pairs
+ * The pack index file is sorted by object name mapping to offset;
+ * this revindex array is a list of offset/index_nr pairs
* ordered by offset, so if you know the offset of an object, next offset
* is where its packed representation ends and the index_nr can be used to
* get the object sha1 from the main index.
*/
-static struct pack_revindex *pack_revindex;
-static int pack_revindex_hashsz;
-
-static int pack_revindex_ix(struct packed_git *p)
-{
- unsigned long ui = (unsigned long)(intptr_t)p;
- int i;
-
- ui = ui ^ (ui >> 16); /* defeat structure alignment */
- i = (int)(ui % pack_revindex_hashsz);
- while (pack_revindex[i].p) {
- if (pack_revindex[i].p == p)
- return i;
- if (++i == pack_revindex_hashsz)
- i = 0;
- }
- return -1 - i;
-}
-
-static void init_pack_revindex(void)
-{
- int num;
- struct packed_git *p;
-
- for (num = 0, p = packed_git; p; p = p->next)
- num++;
- if (!num)
- return;
- pack_revindex_hashsz = num * 11;
- pack_revindex = xcalloc(pack_revindex_hashsz, sizeof(*pack_revindex));
- for (p = packed_git; p; p = p->next) {
- num = pack_revindex_ix(p);
- num = - 1 - num;
- pack_revindex[num].p = p;
- }
- /* revindex elements are lazily initialized */
-}
-
/*
* This is a least-significant-digit radix sort.
*
/*
* Ordered list of offsets of objects in the pack.
*/
-static void create_pack_revindex(struct pack_revindex *rix)
+static void create_pack_revindex(struct packed_git *p)
{
- struct packed_git *p = rix->p;
unsigned num_ent = p->num_objects;
unsigned i;
const char *index = p->index_data;
- rix->revindex = xmalloc(sizeof(*rix->revindex) * (num_ent + 1));
+ p->revindex = xmalloc(sizeof(*p->revindex) * (num_ent + 1));
index += 4 * 256;
if (p->index_version > 1) {
for (i = 0; i < num_ent; i++) {
uint32_t off = ntohl(*off_32++);
if (!(off & 0x80000000)) {
- rix->revindex[i].offset = off;
+ p->revindex[i].offset = off;
} else {
- rix->revindex[i].offset =
+ p->revindex[i].offset =
((uint64_t)ntohl(*off_64++)) << 32;
- rix->revindex[i].offset |=
+ p->revindex[i].offset |=
ntohl(*off_64++);
}
- rix->revindex[i].nr = i;
+ p->revindex[i].nr = i;
}
} else {
for (i = 0; i < num_ent; i++) {
uint32_t hl = *((uint32_t *)(index + 24 * i));
- rix->revindex[i].offset = ntohl(hl);
- rix->revindex[i].nr = i;
+ p->revindex[i].offset = ntohl(hl);
+ p->revindex[i].nr = i;
}
}
/* This knows the pack format -- the 20-byte trailer
* follows immediately after the last object data.
*/
- rix->revindex[num_ent].offset = p->pack_size - 20;
- rix->revindex[num_ent].nr = -1;
- sort_revindex(rix->revindex, num_ent, p->pack_size);
+ p->revindex[num_ent].offset = p->pack_size - 20;
+ p->revindex[num_ent].nr = -1;
+ sort_revindex(p->revindex, num_ent, p->pack_size);
}
-struct pack_revindex *revindex_for_pack(struct packed_git *p)
+void load_pack_revindex(struct packed_git *p)
{
- int num;
- struct pack_revindex *rix;
-
- if (!pack_revindex_hashsz)
- init_pack_revindex();
-
- num = pack_revindex_ix(p);
- if (num < 0)
- die("internal error: pack revindex fubar");
-
- rix = &pack_revindex[num];
- if (!rix->revindex)
- create_pack_revindex(rix);
-
- return rix;
+ if (!p->revindex)
+ create_pack_revindex(p);
}
-int find_revindex_position(struct pack_revindex *pridx, off_t ofs)
+int find_revindex_position(struct packed_git *p, off_t ofs)
{
int lo = 0;
- int hi = pridx->p->num_objects + 1;
- struct revindex_entry *revindex = pridx->revindex;
+ int hi = p->num_objects + 1;
+ struct revindex_entry *revindex = p->revindex;
do {
unsigned mi = lo + (hi - lo) / 2;
struct revindex_entry *find_pack_revindex(struct packed_git *p, off_t ofs)
{
- struct pack_revindex *pridx = revindex_for_pack(p);
- int pos = find_revindex_position(pridx, ofs);
+ int pos;
+
+ load_pack_revindex(p);
+ pos = find_revindex_position(p, ofs);
if (pos < 0)
return NULL;
- return pridx->revindex + pos;
+ return p->revindex + pos;
}
#ifndef PACK_REVINDEX_H
#define PACK_REVINDEX_H
+struct packed_git;
+
struct revindex_entry {
off_t offset;
unsigned int nr;
};
-struct pack_revindex {
- struct packed_git *p;
- struct revindex_entry *revindex;
-};
-
-struct pack_revindex *revindex_for_pack(struct packed_git *p);
-int find_revindex_position(struct pack_revindex *pridx, off_t ofs);
+void load_pack_revindex(struct packed_git *p);
+int find_revindex_position(struct packed_git *p, off_t ofs);
struct revindex_entry *find_pack_revindex(struct packed_git *p, off_t ofs);
* by definition never matches what is in the work tree until it
* actually gets added.
*/
- if (ce->ce_flags & CE_INTENT_TO_ADD)
+ if (ce_intent_to_add(ce))
return DATA_CHANGED | TYPE_CHANGED | MODE_CHANGED;
changed = ce_match_stat_basic(ce, st);
if (cache_errno == ENOENT)
fmt = deleted_fmt;
- else if (ce->ce_flags & CE_INTENT_TO_ADD)
+ else if (ce_intent_to_add(ce))
fmt = added_fmt; /* must be before other checks */
else if (changed & TYPE_CHANGED)
fmt = typechange_fmt;
/** rename ref, return 0 on success **/
extern int rename_ref(const char *oldref, const char *newref, const char *logmsg);
-extern int create_symref(const char *ref, const char *refs_heads_master, const char *logmsg);
+extern int create_symref(const char *refname, const char *target, const char *logmsg);
enum action_on_err {
UPDATE_REFS_MSG_ON_ERR,
if (read_ref_full(lock->ref_name,
mustexist ? RESOLVE_REF_READING : 0,
lock->old_oid.hash, NULL)) {
- int save_errno = errno;
- strbuf_addf(err, "can't verify ref %s", lock->ref_name);
- errno = save_errno;
- return -1;
+ if (old_sha1) {
+ int save_errno = errno;
+ strbuf_addf(err, "can't verify ref %s", lock->ref_name);
+ errno = save_errno;
+ return -1;
+ } else {
+ hashclr(lock->old_oid.hash);
+ return 0;
+ }
}
- if (hashcmp(lock->old_oid.hash, old_sha1)) {
+ if (old_sha1 && hashcmp(lock->old_oid.hash, old_sha1)) {
strbuf_addf(err, "ref %s is at %s but expected %s",
lock->ref_name,
sha1_to_hex(lock->old_oid.hash),
const char *orig_refname = refname;
struct ref_lock *lock;
int last_errno = 0;
- int type, lflags;
+ int type;
+ int lflags = 0;
int mustexist = (old_sha1 && !is_null_sha1(old_sha1));
int resolve_flags = 0;
int attempts_remaining = 3;
if (mustexist)
resolve_flags |= RESOLVE_REF_READING;
- if (flags & REF_DELETING) {
+ if (flags & REF_DELETING)
resolve_flags |= RESOLVE_REF_ALLOW_BAD_NAME;
- if (flags & REF_NODEREF)
- resolve_flags |= RESOLVE_REF_NO_RECURSE;
+ if (flags & REF_NODEREF) {
+ resolve_flags |= RESOLVE_REF_NO_RECURSE;
+ lflags |= LOCK_NO_DEREF;
}
refname = resolve_ref_unsafe(refname, resolve_flags,
goto error_return;
}
+
+ if (flags & REF_NODEREF)
+ refname = orig_refname;
+
/*
* If the ref did not exist and we are creating it, make sure
* there is no existing packed ref whose name begins with our
lock->lk = xcalloc(1, sizeof(struct lock_file));
- lflags = 0;
- if (flags & REF_NODEREF) {
- refname = orig_refname;
- lflags |= LOCK_NO_DEREF;
- }
lock->ref_name = xstrdup(refname);
lock->orig_ref_name = xstrdup(orig_refname);
strbuf_git_path(&ref_file, "%s", refname);
goto error_return;
}
}
- if (old_sha1 && verify_lock(lock, old_sha1, mustexist, err)) {
+ if (verify_lock(lock, old_sha1, mustexist, err)) {
last_errno = errno;
goto error_return;
}
return 0;
}
-int create_symref(const char *ref_target, const char *refs_heads_master,
- const char *logmsg)
+static int create_ref_symlink(struct ref_lock *lock, const char *target)
{
- char *lockpath = NULL;
- char ref[1000];
- int fd, len, written;
- char *git_HEAD = git_pathdup("%s", ref_target);
- unsigned char old_sha1[20], new_sha1[20];
- struct strbuf err = STRBUF_INIT;
-
- if (logmsg && read_ref(ref_target, old_sha1))
- hashclr(old_sha1);
-
- if (safe_create_leading_directories(git_HEAD) < 0)
- return error("unable to create directory for %s", git_HEAD);
-
+ int ret = -1;
#ifndef NO_SYMLINK_HEAD
- if (prefer_symlink_refs) {
- unlink(git_HEAD);
- if (!symlink(refs_heads_master, git_HEAD))
- goto done;
+ char *ref_path = get_locked_file_path(lock->lk);
+ unlink(ref_path);
+ ret = symlink(target, ref_path);
+ free(ref_path);
+
+ if (ret)
fprintf(stderr, "no symlink - falling back to symbolic ref\n");
- }
#endif
+ return ret;
+}
- len = snprintf(ref, sizeof(ref), "ref: %s\n", refs_heads_master);
- if (sizeof(ref) <= len) {
- error("refname too long: %s", refs_heads_master);
- goto error_free_return;
- }
- lockpath = mkpathdup("%s.lock", git_HEAD);
- fd = open(lockpath, O_CREAT | O_EXCL | O_WRONLY, 0666);
- if (fd < 0) {
- error("Unable to open %s for writing", lockpath);
- goto error_free_return;
- }
- written = write_in_full(fd, ref, len);
- if (close(fd) != 0 || written != len) {
- error("Unable to write to %s", lockpath);
- goto error_unlink_return;
- }
- if (rename(lockpath, git_HEAD) < 0) {
- error("Unable to create %s", git_HEAD);
- goto error_unlink_return;
- }
- if (adjust_shared_perm(git_HEAD)) {
- error("Unable to fix permissions on %s", lockpath);
- error_unlink_return:
- unlink_or_warn(lockpath);
- error_free_return:
- free(lockpath);
- free(git_HEAD);
- return -1;
+static void update_symref_reflog(struct ref_lock *lock, const char *refname,
+ const char *target, const char *logmsg)
+{
+ struct strbuf err = STRBUF_INIT;
+ unsigned char new_sha1[20];
+ if (logmsg && !read_ref(target, new_sha1) &&
+ log_ref_write(refname, lock->old_oid.hash, new_sha1, logmsg, 0, &err)) {
+ error("%s", err.buf);
+ strbuf_release(&err);
}
- free(lockpath);
+}
-#ifndef NO_SYMLINK_HEAD
- done:
-#endif
- if (logmsg && !read_ref(refs_heads_master, new_sha1) &&
- log_ref_write(ref_target, old_sha1, new_sha1, logmsg, 0, &err)) {
+static int create_symref_locked(struct ref_lock *lock, const char *refname,
+ const char *target, const char *logmsg)
+{
+ if (prefer_symlink_refs && !create_ref_symlink(lock, target)) {
+ update_symref_reflog(lock, refname, target, logmsg);
+ return 0;
+ }
+
+ if (!fdopen_lock_file(lock->lk, "w"))
+ return error("unable to fdopen %s: %s",
+ lock->lk->tempfile.filename.buf, strerror(errno));
+
+ update_symref_reflog(lock, refname, target, logmsg);
+
+ /* no error check; commit_ref will check ferror */
+ fprintf(lock->lk->tempfile.fp, "ref: %s\n", target);
+ if (commit_ref(lock) < 0)
+ return error("unable to write symref for %s: %s", refname,
+ strerror(errno));
+ return 0;
+}
+
+int create_symref(const char *refname, const char *target, const char *logmsg)
+{
+ struct strbuf err = STRBUF_INIT;
+ struct ref_lock *lock;
+ int ret;
+
+ lock = lock_ref_sha1_basic(refname, NULL, NULL, NULL, REF_NODEREF, NULL,
+ &err);
+ if (!lock) {
error("%s", err.buf);
strbuf_release(&err);
+ return -1;
}
- free(git_HEAD);
- return 0;
+ ret = create_symref_locked(lock, refname, target, logmsg);
+ unlock_ref(lock);
+ return ret;
}
int reflog_exists(const char *refname)
die("http transport does not support %s", buf->buf);
strbuf_reset(buf);
- if (strbuf_getline(buf, stdin, '\n') == EOF)
+ if (strbuf_getline_lf(buf, stdin) == EOF)
return;
if (!*buf->buf)
break;
die("http transport does not support %s", buf->buf);
strbuf_reset(buf);
- if (strbuf_getline(buf, stdin, '\n') == EOF)
+ if (strbuf_getline_lf(buf, stdin) == EOF)
goto free_specs;
if (!*buf->buf)
break;
do {
const char *arg;
- if (strbuf_getline(&buf, stdin, '\n') == EOF) {
+ if (strbuf_getline_lf(&buf, stdin) == EOF) {
if (ferror(stdin))
error("remote-curl: error reading command stream from git");
return 1;
fclose(marksfile);
} else {
strbuf_addf(&sb, ":%d ", latestrev);
- while (strbuf_getline(&line, marksfile, '\n') != EOF) {
+ while (strbuf_getline_lf(&line, marksfile) != EOF) {
if (starts_with(line.buf, sb.buf)) {
found++;
break;
marksfilename = marksfilename_sb.buf;
while (1) {
- if (strbuf_getline(&buf, stdin, '\n') == EOF) {
+ if (strbuf_getline_lf(&buf, stdin) == EOF) {
if (ferror(stdin))
die("Error reading command stream");
else
if (!f)
return;
remote->origin = REMOTE_REMOTES;
- while (strbuf_getline(&buf, f, '\n') != EOF) {
+ while (strbuf_getline(&buf, f) != EOF) {
const char *v;
strbuf_rtrim(&buf);
if (!f)
return;
- strbuf_getline(&buf, f, '\n');
+ strbuf_getline_lf(&buf, f);
fclose(f);
strbuf_trim(&buf);
if (!buf.len) {
} else if (!strcmp(subkey, ".proxy")) {
return git_config_string((const char **)&remote->http_proxy,
key, value);
+ } else if (!strcmp(subkey, ".proxyauthmethod")) {
+ return git_config_string((const char **)&remote->http_proxy_authmethod,
+ key, value);
} else if (!strcmp(subkey, ".vcs")) {
return git_config_string(&remote->foreign_vcs, key, value);
}
* for curl remotes only
*/
char *http_proxy;
+ char *http_proxy_authmethod;
};
struct remote *remote_get(const char *name);
static void read_pathspec_from_stdin(struct rev_info *revs, struct strbuf *sb,
struct cmdline_pathspec *prune)
{
- while (strbuf_getwholeline(sb, stdin, '\n') != EOF) {
- int len = sb->len;
- if (len && sb->buf[len - 1] == '\n')
- sb->buf[--len] = '\0';
+ while (strbuf_getline(sb, stdin) != EOF) {
ALLOC_GROW(prune->path, prune->nr + 1, prune->alloc);
prune->path[prune->nr++] = xstrdup(sb->buf);
}
warn_on_object_refname_ambiguity = 0;
strbuf_init(&sb, 1000);
- while (strbuf_getwholeline(&sb, stdin, '\n') != EOF) {
+ while (strbuf_getline(&sb, stdin) != EOF) {
int len = sb.len;
- if (len && sb.buf[len - 1] == '\n')
- sb.buf[--len] = '\0';
if (!len)
break;
if (sb.buf[0] == '-') {
#include "exec_cmd.h"
#include "sigchain.h"
#include "argv-array.h"
+#include "thread-utils.h"
+#include "strbuf.h"
void child_process_init(struct child_process *child)
{
error("waitpid is confused (%s)", argv0);
} else if (WIFSIGNALED(status)) {
code = WTERMSIG(status);
- if (code != SIGINT && code != SIGQUIT)
+ if (code != SIGINT && code != SIGQUIT && code != SIGPIPE)
error("%s died of signal %d", argv0, code);
/*
* This return value is chosen so that code & 0xff
close(cmd->out);
return finish_command(cmd);
}
+
+enum child_state {
+ GIT_CP_FREE,
+ GIT_CP_WORKING,
+ GIT_CP_WAIT_CLEANUP,
+};
+
+struct parallel_processes {
+ void *data;
+
+ int max_processes;
+ int nr_processes;
+
+ get_next_task_fn get_next_task;
+ start_failure_fn start_failure;
+ task_finished_fn task_finished;
+
+ struct {
+ enum child_state state;
+ struct child_process process;
+ struct strbuf err;
+ void *data;
+ } *children;
+ /*
+ * The struct pollfd is logically part of *children,
+ * but the system call expects it as its own array.
+ */
+ struct pollfd *pfd;
+
+ unsigned shutdown : 1;
+
+ int output_owner;
+ struct strbuf buffered_output; /* of finished children */
+};
+
+static int default_start_failure(struct child_process *cp,
+ struct strbuf *err,
+ void *pp_cb,
+ void *pp_task_cb)
+{
+ int i;
+
+ strbuf_addstr(err, "Starting a child failed:");
+ for (i = 0; cp->argv[i]; i++)
+ strbuf_addf(err, " %s", cp->argv[i]);
+
+ return 0;
+}
+
+static int default_task_finished(int result,
+ struct child_process *cp,
+ struct strbuf *err,
+ void *pp_cb,
+ void *pp_task_cb)
+{
+ int i;
+
+ if (!result)
+ return 0;
+
+ strbuf_addf(err, "A child failed with return code %d:", result);
+ for (i = 0; cp->argv[i]; i++)
+ strbuf_addf(err, " %s", cp->argv[i]);
+
+ return 0;
+}
+
+static void kill_children(struct parallel_processes *pp, int signo)
+{
+ int i, n = pp->max_processes;
+
+ for (i = 0; i < n; i++)
+ if (pp->children[i].state == GIT_CP_WORKING)
+ kill(pp->children[i].process.pid, signo);
+}
+
+static struct parallel_processes *pp_for_signal;
+
+static void handle_children_on_signal(int signo)
+{
+ kill_children(pp_for_signal, signo);
+ sigchain_pop(signo);
+ raise(signo);
+}
+
+static void pp_init(struct parallel_processes *pp,
+ int n,
+ get_next_task_fn get_next_task,
+ start_failure_fn start_failure,
+ task_finished_fn task_finished,
+ void *data)
+{
+ int i;
+
+ if (n < 1)
+ n = online_cpus();
+
+ pp->max_processes = n;
+
+ trace_printf("run_processes_parallel: preparing to run up to %d tasks", n);
+
+ pp->data = data;
+ if (!get_next_task)
+ die("BUG: you need to specify a get_next_task function");
+ pp->get_next_task = get_next_task;
+
+ pp->start_failure = start_failure ? start_failure : default_start_failure;
+ pp->task_finished = task_finished ? task_finished : default_task_finished;
+
+ pp->nr_processes = 0;
+ pp->output_owner = 0;
+ pp->shutdown = 0;
+ pp->children = xcalloc(n, sizeof(*pp->children));
+ pp->pfd = xcalloc(n, sizeof(*pp->pfd));
+ strbuf_init(&pp->buffered_output, 0);
+
+ for (i = 0; i < n; i++) {
+ strbuf_init(&pp->children[i].err, 0);
+ child_process_init(&pp->children[i].process);
+ pp->pfd[i].events = POLLIN | POLLHUP;
+ pp->pfd[i].fd = -1;
+ }
+
+ pp_for_signal = pp;
+ sigchain_push_common(handle_children_on_signal);
+}
+
+static void pp_cleanup(struct parallel_processes *pp)
+{
+ int i;
+
+ trace_printf("run_processes_parallel: done");
+ for (i = 0; i < pp->max_processes; i++) {
+ strbuf_release(&pp->children[i].err);
+ child_process_clear(&pp->children[i].process);
+ }
+
+ free(pp->children);
+ free(pp->pfd);
+
+ /*
+ * When get_next_task added messages to the buffer in its last
+ * iteration, the buffered output is non empty.
+ */
+ fputs(pp->buffered_output.buf, stderr);
+ strbuf_release(&pp->buffered_output);
+
+ sigchain_pop_common();
+}
+
+/* returns
+ * 0 if a new task was started.
+ * 1 if no new jobs was started (get_next_task ran out of work, non critical
+ * problem with starting a new command)
+ * <0 no new job was started, user wishes to shutdown early. Use negative code
+ * to signal the children.
+ */
+static int pp_start_one(struct parallel_processes *pp)
+{
+ int i, code;
+
+ for (i = 0; i < pp->max_processes; i++)
+ if (pp->children[i].state == GIT_CP_FREE)
+ break;
+ if (i == pp->max_processes)
+ die("BUG: bookkeeping is hard");
+
+ code = pp->get_next_task(&pp->children[i].process,
+ &pp->children[i].err,
+ pp->data,
+ &pp->children[i].data);
+ if (!code) {
+ strbuf_addbuf(&pp->buffered_output, &pp->children[i].err);
+ strbuf_reset(&pp->children[i].err);
+ return 1;
+ }
+ pp->children[i].process.err = -1;
+ pp->children[i].process.stdout_to_stderr = 1;
+ pp->children[i].process.no_stdin = 1;
+
+ if (start_command(&pp->children[i].process)) {
+ code = pp->start_failure(&pp->children[i].process,
+ &pp->children[i].err,
+ pp->data,
+ &pp->children[i].data);
+ strbuf_addbuf(&pp->buffered_output, &pp->children[i].err);
+ strbuf_reset(&pp->children[i].err);
+ if (code)
+ pp->shutdown = 1;
+ return code;
+ }
+
+ pp->nr_processes++;
+ pp->children[i].state = GIT_CP_WORKING;
+ pp->pfd[i].fd = pp->children[i].process.err;
+ return 0;
+}
+
+static void pp_buffer_stderr(struct parallel_processes *pp, int output_timeout)
+{
+ int i;
+
+ while ((i = poll(pp->pfd, pp->max_processes, output_timeout)) < 0) {
+ if (errno == EINTR)
+ continue;
+ pp_cleanup(pp);
+ die_errno("poll");
+ }
+
+ /* Buffer output from all pipes. */
+ for (i = 0; i < pp->max_processes; i++) {
+ if (pp->children[i].state == GIT_CP_WORKING &&
+ pp->pfd[i].revents & (POLLIN | POLLHUP)) {
+ int n = strbuf_read_once(&pp->children[i].err,
+ pp->children[i].process.err, 0);
+ if (n == 0) {
+ close(pp->children[i].process.err);
+ pp->children[i].state = GIT_CP_WAIT_CLEANUP;
+ } else if (n < 0)
+ if (errno != EAGAIN)
+ die_errno("read");
+ }
+ }
+}
+
+static void pp_output(struct parallel_processes *pp)
+{
+ int i = pp->output_owner;
+ if (pp->children[i].state == GIT_CP_WORKING &&
+ pp->children[i].err.len) {
+ fputs(pp->children[i].err.buf, stderr);
+ strbuf_reset(&pp->children[i].err);
+ }
+}
+
+static int pp_collect_finished(struct parallel_processes *pp)
+{
+ int i, code;
+ int n = pp->max_processes;
+ int result = 0;
+
+ while (pp->nr_processes > 0) {
+ for (i = 0; i < pp->max_processes; i++)
+ if (pp->children[i].state == GIT_CP_WAIT_CLEANUP)
+ break;
+ if (i == pp->max_processes)
+ break;
+
+ code = finish_command(&pp->children[i].process);
+
+ code = pp->task_finished(code, &pp->children[i].process,
+ &pp->children[i].err, pp->data,
+ &pp->children[i].data);
+
+ if (code)
+ result = code;
+ if (code < 0)
+ break;
+
+ pp->nr_processes--;
+ pp->children[i].state = GIT_CP_FREE;
+ pp->pfd[i].fd = -1;
+ child_process_init(&pp->children[i].process);
+
+ if (i != pp->output_owner) {
+ strbuf_addbuf(&pp->buffered_output, &pp->children[i].err);
+ strbuf_reset(&pp->children[i].err);
+ } else {
+ fputs(pp->children[i].err.buf, stderr);
+ strbuf_reset(&pp->children[i].err);
+
+ /* Output all other finished child processes */
+ fputs(pp->buffered_output.buf, stderr);
+ strbuf_reset(&pp->buffered_output);
+
+ /*
+ * Pick next process to output live.
+ * NEEDSWORK:
+ * For now we pick it randomly by doing a round
+ * robin. Later we may want to pick the one with
+ * the most output or the longest or shortest
+ * running process time.
+ */
+ for (i = 0; i < n; i++)
+ if (pp->children[(pp->output_owner + i) % n].state == GIT_CP_WORKING)
+ break;
+ pp->output_owner = (pp->output_owner + i) % n;
+ }
+ }
+ return result;
+}
+
+int run_processes_parallel(int n,
+ get_next_task_fn get_next_task,
+ start_failure_fn start_failure,
+ task_finished_fn task_finished,
+ void *pp_cb)
+{
+ int i, code;
+ int output_timeout = 100;
+ int spawn_cap = 4;
+ struct parallel_processes pp;
+
+ pp_init(&pp, n, get_next_task, start_failure, task_finished, pp_cb);
+ while (1) {
+ for (i = 0;
+ i < spawn_cap && !pp.shutdown &&
+ pp.nr_processes < pp.max_processes;
+ i++) {
+ code = pp_start_one(&pp);
+ if (!code)
+ continue;
+ if (code < 0) {
+ pp.shutdown = 1;
+ kill_children(&pp, -code);
+ }
+ break;
+ }
+ if (!pp.nr_processes)
+ break;
+ pp_buffer_stderr(&pp, output_timeout);
+ pp_output(&pp);
+ code = pp_collect_finished(&pp);
+ if (code) {
+ pp.shutdown = 1;
+ if (code < 0)
+ kill_children(&pp, -code);
+ }
+ }
+
+ pp_cleanup(&pp);
+ return 0;
+}
int finish_async(struct async *async);
int in_async(void);
+/**
+ * This callback should initialize the child process and preload the
+ * error channel if desired. The preloading of is useful if you want to
+ * have a message printed directly before the output of the child process.
+ * pp_cb is the callback cookie as passed to run_processes_parallel.
+ * You can store a child process specific callback cookie in pp_task_cb.
+ *
+ * Even after returning 0 to indicate that there are no more processes,
+ * this function will be called again until there are no more running
+ * child processes.
+ *
+ * Return 1 if the next child is ready to run.
+ * Return 0 if there are currently no more tasks to be processed.
+ * To send a signal to other child processes for abortion,
+ * return the negative signal number.
+ */
+typedef int (*get_next_task_fn)(struct child_process *cp,
+ struct strbuf *err,
+ void *pp_cb,
+ void **pp_task_cb);
+
+/**
+ * This callback is called whenever there are problems starting
+ * a new process.
+ *
+ * You must not write to stdout or stderr in this function. Add your
+ * message to the strbuf err instead, which will be printed without
+ * messing up the output of the other parallel processes.
+ *
+ * pp_cb is the callback cookie as passed into run_processes_parallel,
+ * pp_task_cb is the callback cookie as passed into get_next_task_fn.
+ *
+ * Return 0 to continue the parallel processing. To abort return non zero.
+ * To send a signal to other child processes for abortion, return
+ * the negative signal number.
+ */
+typedef int (*start_failure_fn)(struct child_process *cp,
+ struct strbuf *err,
+ void *pp_cb,
+ void *pp_task_cb);
+
+/**
+ * This callback is called on every child process that finished processing.
+ *
+ * You must not write to stdout or stderr in this function. Add your
+ * message to the strbuf err instead, which will be printed without
+ * messing up the output of the other parallel processes.
+ *
+ * pp_cb is the callback cookie as passed into run_processes_parallel,
+ * pp_task_cb is the callback cookie as passed into get_next_task_fn.
+ *
+ * Return 0 to continue the parallel processing. To abort return non zero.
+ * To send a signal to other child processes for abortion, return
+ * the negative signal number.
+ */
+typedef int (*task_finished_fn)(int result,
+ struct child_process *cp,
+ struct strbuf *err,
+ void *pp_cb,
+ void *pp_task_cb);
+
+/**
+ * Runs up to n processes at the same time. Whenever a process can be
+ * started, the callback get_next_task_fn is called to obtain the data
+ * required to start another child process.
+ *
+ * The children started via this function run in parallel. Their output
+ * (both stdout and stderr) is routed to stderr in a manner that output
+ * from different tasks does not interleave.
+ *
+ * If start_failure_fn or task_finished_fn are NULL, default handlers
+ * will be used. The default handlers will print an error message on
+ * error without issuing an emergency stop.
+ */
+int run_processes_parallel(int n,
+ get_next_task_fn,
+ start_failure_fn,
+ task_finished_fn,
+ void *pp_cb);
+
#endif
if (!f)
return error(_("cannot open %s: %s"), git_path_head_file(),
strerror(errno));
- if (strbuf_getline(&buf, f, '\n')) {
+ if (strbuf_getline_lf(&buf, f)) {
error(_("cannot read %s: %s"), git_path_head_file(),
ferror(f) ? strerror(errno) : _("unexpected end of file"));
fclose(f);
struct strbuf line = STRBUF_INIT;
int found = 0;
- while (strbuf_getline(&line, in, '\n') != EOF) {
+ while (strbuf_getline(&line, in) != EOF) {
if (!strcmp(reference, line.buf)) {
found = 1;
break;
int count;
fprintf(stderr, "git> ");
- if (strbuf_getline(&line, stdin, '\n') == EOF) {
+ if (strbuf_getline_lf(&line, stdin) == EOF) {
fprintf(stderr, "\n");
strbuf_release(&line);
break;
sigchain_push(SIGQUIT, f);
sigchain_push(SIGPIPE, f);
}
+
+void sigchain_pop_common(void)
+{
+ sigchain_pop(SIGPIPE);
+ sigchain_pop(SIGQUIT);
+ sigchain_pop(SIGTERM);
+ sigchain_pop(SIGHUP);
+ sigchain_pop(SIGINT);
+}
int sigchain_pop(int sig);
void sigchain_push_common(sigchain_fun f);
+void sigchain_pop_common(void);
#endif /* SIGCHAIN_H */
return sb->len - oldlen;
}
+ssize_t strbuf_read_once(struct strbuf *sb, int fd, size_t hint)
+{
+ ssize_t cnt;
+
+ strbuf_grow(sb, hint ? hint : 8192);
+ cnt = xread(fd, sb->buf + sb->len, sb->alloc - sb->len - 1);
+ if (cnt > 0)
+ strbuf_setlen(sb, sb->len + cnt);
+ return cnt;
+}
+
#define STRBUF_MAXLINK (2*PATH_MAX)
int strbuf_readlink(struct strbuf *sb, const char *path, size_t hint)
}
#endif
-int strbuf_getline(struct strbuf *sb, FILE *fp, int term)
+static int strbuf_getdelim(struct strbuf *sb, FILE *fp, int term)
{
if (strbuf_getwholeline(sb, fp, term))
return EOF;
- if (sb->buf[sb->len-1] == term)
- strbuf_setlen(sb, sb->len-1);
+ if (sb->buf[sb->len - 1] == term)
+ strbuf_setlen(sb, sb->len - 1);
return 0;
}
+int strbuf_getline(struct strbuf *sb, FILE *fp)
+{
+ if (strbuf_getwholeline(sb, fp, '\n'))
+ return EOF;
+ if (sb->buf[sb->len - 1] == '\n') {
+ strbuf_setlen(sb, sb->len - 1);
+ if (sb->len && sb->buf[sb->len - 1] == '\r')
+ strbuf_setlen(sb, sb->len - 1);
+ }
+ return 0;
+}
+
+int strbuf_getline_lf(struct strbuf *sb, FILE *fp)
+{
+ return strbuf_getdelim(sb, fp, '\n');
+}
+
+int strbuf_getline_nul(struct strbuf *sb, FILE *fp)
+{
+ return strbuf_getdelim(sb, fp, '\0');
+}
+
int strbuf_getwholeline_fd(struct strbuf *sb, int fd, int term)
{
strbuf_reset(sb);
*
* NOTE: The buffer is rewound if the read fails. If -1 is returned,
* `errno` must be consulted, like you would do for `read(3)`.
- * `strbuf_read()`, `strbuf_read_file()` and `strbuf_getline()` has the
- * same behaviour as well.
+ * `strbuf_read()`, `strbuf_read_file()` and `strbuf_getline_*()`
+ * family of functions have the same behaviour as well.
*/
extern size_t strbuf_fread(struct strbuf *, size_t, FILE *);
*/
extern ssize_t strbuf_read(struct strbuf *, int fd, size_t hint);
+/**
+ * Read the contents of a given file descriptor partially by using only one
+ * attempt of xread. The third argument can be used to give a hint about the
+ * file size, to avoid reallocs. Returns the number of new bytes appended to
+ * the sb.
+ */
+extern ssize_t strbuf_read_once(struct strbuf *, int fd, size_t hint);
+
/**
* Read the contents of a file, specified by its path. The third argument
* can be used to give a hint about the file size, to avoid reallocs.
extern int strbuf_readlink(struct strbuf *sb, const char *path, size_t hint);
/**
- * Read a line from a FILE *, overwriting the existing contents
- * of the strbuf. The second argument specifies the line
- * terminator character, typically `'\n'`.
+ * Read a line from a FILE *, overwriting the existing contents of
+ * the strbuf. The strbuf_getline*() family of functions share
+ * this signature, but have different line termination conventions.
+ *
* Reading stops after the terminator or at EOF. The terminator
* is removed from the buffer before returning. Returns 0 unless
* there was nothing left before EOF, in which case it returns `EOF`.
*/
-extern int strbuf_getline(struct strbuf *, FILE *, int);
+typedef int (*strbuf_getline_fn)(struct strbuf *, FILE *);
+
+/* Uses LF as the line terminator */
+extern int strbuf_getline_lf(struct strbuf *sb, FILE *fp);
+
+/* Uses NUL as the line terminator */
+extern int strbuf_getline_nul(struct strbuf *sb, FILE *fp);
+
+/*
+ * Similar to strbuf_getline_lf(), but additionally treats a CR that
+ * comes immediately before the LF as part of the terminator.
+ * This is the most friendly version to be used to read "text" files
+ * that can come from platforms whose native text format is CRLF
+ * terminated.
+ */
+extern int strbuf_getline(struct strbuf *, FILE *);
+
/**
* Like `strbuf_getline`, but keeps the trailing terminator (if
#include "sha1-array.h"
#include "argv-array.h"
#include "blob.h"
+#include "thread-utils.h"
static int config_fetch_recurse_submodules = RECURSE_SUBMODULES_ON_DEMAND;
static struct string_list changed_submodule_paths;
initialized_fetch_ref_tips = 0;
}
-int fetch_populated_submodules(const struct argv_array *options,
- const char *prefix, int command_line_option,
- int quiet)
+struct submodule_parallel_fetch {
+ int count;
+ struct argv_array args;
+ const char *work_tree;
+ const char *prefix;
+ int command_line_option;
+ int quiet;
+ int result;
+};
+#define SPF_INIT {0, ARGV_ARRAY_INIT, NULL, NULL, 0, 0, 0}
+
+static int get_next_submodule(struct child_process *cp,
+ struct strbuf *err, void *data, void **task_cb)
{
- int i, result = 0;
- struct child_process cp = CHILD_PROCESS_INIT;
- struct argv_array argv = ARGV_ARRAY_INIT;
- const char *work_tree = get_git_work_tree();
- if (!work_tree)
- goto out;
-
- if (read_cache() < 0)
- die("index file corrupt");
-
- argv_array_push(&argv, "fetch");
- for (i = 0; i < options->argc; i++)
- argv_array_push(&argv, options->argv[i]);
- argv_array_push(&argv, "--recurse-submodules-default");
- /* default value, "--submodule-prefix" and its value are added later */
-
- cp.env = local_repo_env;
- cp.git_cmd = 1;
- cp.no_stdin = 1;
-
- calculate_changed_submodule_paths();
+ int ret = 0;
+ struct submodule_parallel_fetch *spf = data;
- for (i = 0; i < active_nr; i++) {
+ for (; spf->count < active_nr; spf->count++) {
struct strbuf submodule_path = STRBUF_INIT;
struct strbuf submodule_git_dir = STRBUF_INIT;
struct strbuf submodule_prefix = STRBUF_INIT;
- const struct cache_entry *ce = active_cache[i];
+ const struct cache_entry *ce = active_cache[spf->count];
const char *git_dir, *default_argv;
const struct submodule *submodule;
submodule = submodule_from_name(null_sha1, ce->name);
default_argv = "yes";
- if (command_line_option == RECURSE_SUBMODULES_DEFAULT) {
+ if (spf->command_line_option == RECURSE_SUBMODULES_DEFAULT) {
if (submodule &&
submodule->fetch_recurse !=
RECURSE_SUBMODULES_NONE) {
default_argv = "on-demand";
}
}
- } else if (command_line_option == RECURSE_SUBMODULES_ON_DEMAND) {
+ } else if (spf->command_line_option == RECURSE_SUBMODULES_ON_DEMAND) {
if (!unsorted_string_list_lookup(&changed_submodule_paths, ce->name))
continue;
default_argv = "on-demand";
}
- strbuf_addf(&submodule_path, "%s/%s", work_tree, ce->name);
+ strbuf_addf(&submodule_path, "%s/%s", spf->work_tree, ce->name);
strbuf_addf(&submodule_git_dir, "%s/.git", submodule_path.buf);
- strbuf_addf(&submodule_prefix, "%s%s/", prefix, ce->name);
+ strbuf_addf(&submodule_prefix, "%s%s/", spf->prefix, ce->name);
git_dir = read_gitfile(submodule_git_dir.buf);
if (!git_dir)
git_dir = submodule_git_dir.buf;
if (is_directory(git_dir)) {
- if (!quiet)
- printf("Fetching submodule %s%s\n", prefix, ce->name);
- cp.dir = submodule_path.buf;
- argv_array_push(&argv, default_argv);
- argv_array_push(&argv, "--submodule-prefix");
- argv_array_push(&argv, submodule_prefix.buf);
- cp.argv = argv.argv;
- if (run_command(&cp))
- result = 1;
- argv_array_pop(&argv);
- argv_array_pop(&argv);
- argv_array_pop(&argv);
+ child_process_init(cp);
+ cp->dir = strbuf_detach(&submodule_path, NULL);
+ cp->env = local_repo_env;
+ cp->git_cmd = 1;
+ if (!spf->quiet)
+ strbuf_addf(err, "Fetching submodule %s%s\n",
+ spf->prefix, ce->name);
+ argv_array_init(&cp->args);
+ argv_array_pushv(&cp->args, spf->args.argv);
+ argv_array_push(&cp->args, default_argv);
+ argv_array_push(&cp->args, "--submodule-prefix");
+ argv_array_push(&cp->args, submodule_prefix.buf);
+ ret = 1;
}
strbuf_release(&submodule_path);
strbuf_release(&submodule_git_dir);
strbuf_release(&submodule_prefix);
+ if (ret) {
+ spf->count++;
+ return 1;
+ }
}
- argv_array_clear(&argv);
+ return 0;
+}
+
+static int fetch_start_failure(struct child_process *cp,
+ struct strbuf *err,
+ void *cb, void *task_cb)
+{
+ struct submodule_parallel_fetch *spf = cb;
+
+ spf->result = 1;
+
+ return 0;
+}
+
+static int fetch_finish(int retvalue, struct child_process *cp,
+ struct strbuf *err, void *cb, void *task_cb)
+{
+ struct submodule_parallel_fetch *spf = cb;
+
+ if (retvalue)
+ spf->result = 1;
+
+ return 0;
+}
+
+int fetch_populated_submodules(const struct argv_array *options,
+ const char *prefix, int command_line_option,
+ int quiet, int max_parallel_jobs)
+{
+ int i;
+ struct submodule_parallel_fetch spf = SPF_INIT;
+
+ spf.work_tree = get_git_work_tree();
+ spf.command_line_option = command_line_option;
+ spf.quiet = quiet;
+ spf.prefix = prefix;
+
+ if (!spf.work_tree)
+ goto out;
+
+ if (read_cache() < 0)
+ die("index file corrupt");
+
+ argv_array_push(&spf.args, "fetch");
+ for (i = 0; i < options->argc; i++)
+ argv_array_push(&spf.args, options->argv[i]);
+ argv_array_push(&spf.args, "--recurse-submodules-default");
+ /* default value, "--submodule-prefix" and its value are added later */
+
+ calculate_changed_submodule_paths();
+ run_processes_parallel(max_parallel_jobs,
+ get_next_submodule,
+ fetch_start_failure,
+ fetch_finish,
+ &spf);
+
+ argv_array_clear(&spf.args);
out:
string_list_clear(&changed_submodule_paths, 1);
- return result;
+ return spf.result;
}
unsigned is_submodule_modified(const char *path, int ignore_untracked)
void check_for_new_submodule_commits(unsigned char new_sha1[20]);
int fetch_populated_submodules(const struct argv_array *options,
const char *prefix, int command_line_option,
- int quiet);
+ int quiet, int max_parallel_jobs);
unsigned is_submodule_modified(const char *path, int ignore_untracked);
int submodule_uses_gitfile(const char *path);
int ok_to_remove_submodule(const char *path);
test_skip_or_die $GIT_TEST_HTTPD "no web server found at '$LIB_HTTPD_PATH'"
fi
-HTTPD_VERSION=`$LIB_HTTPD_PATH -v | \
- sed -n 's/^Server version: Apache\/\([0-9]*\)\..*$/\1/p; q'`
+HTTPD_VERSION=$($LIB_HTTPD_PATH -v | \
+ sed -n 's/^Server version: Apache\/\([0-9]*\)\..*$/\1/p; q')
if test -n "$HTTPD_VERSION"
then
check_config bare-ancestor-aliased.git/plain-nested/.git false unset
'
+test_expect_success 'No extra GIT_* on alias scripts' '
+ (
+ env | sed -ne "/^GIT_/s/=.*//p" &&
+ echo GIT_PREFIX && # setup.c
+ echo GIT_TEXTDOMAINDIR # wrapper-for-bin.sh
+ ) | sort | uniq >expected &&
+ cat <<-\EOF >script &&
+ #!/bin/sh
+ env | sed -ne "/^GIT_/s/=.*//p" | sort >actual
+ exit 0
+ EOF
+ chmod 755 script &&
+ git config alias.script \!./script &&
+ ( mkdir sub && cd sub && git script ) &&
+ test_cmp expected actual
+'
+
test_expect_success 'plain with GIT_WORK_TREE' '
mkdir plain-wt &&
test_must_fail env GIT_WORK_TREE="$(pwd)/plain-wt" git init plain-wt
test "$SHA" = "$(git rev-list HEAD)"
'
-test_expect_failure 'setup_git_dir twice in subdir' '
+test_expect_success 'setup_git_dir twice in subdir' '
git init sgd &&
(
cd sgd &&
}
create_NNO_files () {
- lfname=$1
- crlfname=$2
- lfmixcrlf=$3
- lfmixcr=$4
- crlfnul=$5
for crlf in false true input
do
for attr in "" auto text -text lf crlf
do
pfx=NNO_${crlf}_attr_${attr} &&
- cp $lfname ${pfx}_LF.txt &&
- cp $crlfname ${pfx}_CRLF.txt &&
- cp $lfmixcrlf ${pfx}_CRLF_mix_LF.txt &&
- cp $lfmixcr ${pfx}_LF_mix_CR.txt &&
- cp $crlfnul ${pfx}_CRLF_nul.txt
+ cp CRLF_mix_LF ${pfx}_LF.txt &&
+ cp CRLF_mix_LF ${pfx}_CRLF.txt &&
+ cp CRLF_mix_LF ${pfx}_CRLF_mix_LF.txt &&
+ cp CRLF_mix_LF ${pfx}_LF_mix_CR.txt &&
+ cp CRLF_mix_LF ${pfx}_CRLF_nul.txt
done
done
}
crlfnul=$7
pfx=crlf_${crlf}_attr_${attr}
create_gitattributes "$attr" &&
- for f in LF CRLF repoMIX LF_mix_CR CRLF_mix_LF LF_nul CRLF_nul
+ for f in LF CRLF LF_mix_CR CRLF_mix_LF LF_nul CRLF_nul
do
fname=${pfx}_$f.txt &&
cp $f $fname &&
'
}
+stats_ascii () {
+ case "$1" in
+ LF)
+ echo lf
+ ;;
+ CRLF)
+ echo crlf
+ ;;
+ CRLF_mix_LF)
+ echo mixed
+ ;;
+ LF_mix_CR|CRLF_nul|LF_nul|CRLF_mix_CR)
+ echo "-text"
+ ;;
+ *)
+ echo error_invalid $1
+ ;;
+ esac
+
+}
+
check_files_in_repo () {
crlf=$1
attr=$2
create_gitattributes $attr &&
git config core.autocrlf $crlf &&
pfx=eol_${eol}_crlf_${crlf}_attr_${attr}_ &&
- src=crlf_false_attr__ &&
for f in LF CRLF LF_mix_CR CRLF_mix_LF LF_nul
do
- rm $src$f.txt &&
+ rm crlf_false_attr__$f.txt &&
if test -z "$eol"; then
- git checkout $src$f.txt
+ git checkout crlf_false_attr__$f.txt
else
- git -c core.eol=$eol checkout $src$f.txt
+ git -c core.eol=$eol checkout crlf_false_attr__$f.txt
fi
done
+ test_expect_success "ls-files --eol $lfname ${pfx}LF.txt" '
+ test_when_finished "rm expect actual" &&
+ sort <<-EOF >expect &&
+ i/crlf w/$(stats_ascii $crlfname) crlf_false_attr__CRLF.txt
+ i/mixed w/$(stats_ascii $lfmixcrlf) crlf_false_attr__CRLF_mix_LF.txt
+ i/lf w/$(stats_ascii $lfname) crlf_false_attr__LF.txt
+ i/-text w/$(stats_ascii $lfmixcr) crlf_false_attr__LF_mix_CR.txt
+ i/-text w/$(stats_ascii $crlfnul) crlf_false_attr__CRLF_nul.txt
+ i/-text w/$(stats_ascii $crlfnul) crlf_false_attr__LF_nul.txt
+ EOF
+ git ls-files --eol crlf_false_attr__* |
+ sed -e "s!attr/[^ ]*!!g" -e "s/ / /g" -e "s/ */ /g" |
+ sort >actual &&
+ test_cmp expect actual
+ '
test_expect_success "checkout core.eol=$eol core.autocrlf=$crlf gitattributes=$attr file=LF" "
- compare_ws_file $pfx $lfname ${src}LF.txt
+ compare_ws_file $pfx $lfname crlf_false_attr__LF.txt
"
test_expect_success "checkout core.eol=$eol core.autocrlf=$crlf gitattributes=$attr file=CRLF" "
- compare_ws_file $pfx $crlfname ${src}CRLF.txt
+ compare_ws_file $pfx $crlfname crlf_false_attr__CRLF.txt
"
test_expect_success "checkout core.eol=$eol core.autocrlf=$crlf gitattributes=$attr file=CRLF_mix_LF" "
- compare_ws_file $pfx $lfmixcrlf ${src}CRLF_mix_LF.txt
+ compare_ws_file $pfx $lfmixcrlf crlf_false_attr__CRLF_mix_LF.txt
"
test_expect_success "checkout core.eol=$eol core.autocrlf=$crlf gitattributes=$attr file=LF_mix_CR" "
- compare_ws_file $pfx $lfmixcr ${src}LF_mix_CR.txt
+ compare_ws_file $pfx $lfmixcr crlf_false_attr__LF_mix_CR.txt
"
test_expect_success "checkout core.eol=$eol core.autocrlf=$crlf gitattributes=$attr file=LF_nul" "
- compare_ws_file $pfx $crlfnul ${src}LF_nul.txt
+ compare_ws_file $pfx $crlfnul crlf_false_attr__LF_nul.txt
"
}
-#######
+# Test control characters
+# NUL SOH CR EOF==^Z
+test_expect_success 'ls-files --eol -o Text/Binary' '
+ test_when_finished "rm expect actual TeBi_*" &&
+ STRT=AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA &&
+ STR=$STRT$STRT$STRT$STRT &&
+ printf "${STR}BBB\001" >TeBi_127_S &&
+ printf "${STR}BBBB\001">TeBi_128_S &&
+ printf "${STR}BBB\032" >TeBi_127_E &&
+ printf "\032${STR}BBB" >TeBi_E_127 &&
+ printf "${STR}BBBB\000">TeBi_128_N &&
+ printf "${STR}BBB\012">TeBi_128_L &&
+ printf "${STR}BBB\015">TeBi_127_C &&
+ printf "${STR}BB\015\012" >TeBi_126_CL &&
+ printf "${STR}BB\015\012\015" >TeBi_126_CLC &&
+ sort <<-\EOF >expect &&
+ i/ w/-text TeBi_127_S
+ i/ w/none TeBi_128_S
+ i/ w/none TeBi_127_E
+ i/ w/-text TeBi_E_127
+ i/ w/-text TeBi_128_N
+ i/ w/lf TeBi_128_L
+ i/ w/-text TeBi_127_C
+ i/ w/crlf TeBi_126_CL
+ i/ w/-text TeBi_126_CLC
+ EOF
+ git ls-files --eol -o |
+ sed -n -e "/TeBi_/{s!attr/[ ]*!!g
+ s! ! !g
+ s! *! !g
+ p
+ }" | sort >actual &&
+ test_cmp expect actual
+'
+
test_expect_success 'setup master' '
echo >.gitattributes &&
git checkout -b master &&
checkout_files native false "crlf" CRLF CRLF CRLF CRLF_mix_CR CRLF_nul
checkout_files native true "crlf" CRLF CRLF CRLF CRLF_mix_CR CRLF_nul
+# Should be the last test case: remove some files from the worktree
+test_expect_success 'ls-files --eol -d -z' '
+ rm crlf_false_attr__CRLF.txt crlf_false_attr__CRLF_mix_LF.txt crlf_false_attr__LF.txt .gitattributes &&
+ cat >expect <<-\EOF &&
+ i/crlf w/ crlf_false_attr__CRLF.txt
+ i/lf w/ .gitattributes
+ i/lf w/ crlf_false_attr__LF.txt
+ i/mixed w/ crlf_false_attr__CRLF_mix_LF.txt
+ EOF
+ git ls-files --eol -d |
+ sed -e "s!attr/[^ ]*!!g" -e "s/ / /g" -e "s/ */ /g" |
+ sort >actual &&
+ test_cmp expect actual
+'
+
test_done
test_cmp expect actual
'
+cat >expect <<-EOF
+preloaded output of a child
+Hello
+World
+preloaded output of a child
+Hello
+World
+preloaded output of a child
+Hello
+World
+preloaded output of a child
+Hello
+World
+EOF
+
+test_expect_success 'run_command runs in parallel with more jobs available than tasks' '
+ test-run-command run-command-parallel 5 sh -c "printf \"%s\n%s\n\" Hello World" 2>actual &&
+ test_cmp expect actual
+'
+
+test_expect_success 'run_command runs in parallel with as many jobs as tasks' '
+ test-run-command run-command-parallel 4 sh -c "printf \"%s\n%s\n\" Hello World" 2>actual &&
+ test_cmp expect actual
+'
+
+test_expect_success 'run_command runs in parallel with more tasks than jobs available' '
+ test-run-command run-command-parallel 3 sh -c "printf \"%s\n%s\n\" Hello World" 2>actual &&
+ test_cmp expect actual
+'
+
+cat >expect <<-EOF
+preloaded output of a child
+asking for a quick stop
+preloaded output of a child
+asking for a quick stop
+preloaded output of a child
+asking for a quick stop
+EOF
+
+test_expect_success 'run_command is asked to abort gracefully' '
+ test-run-command run-command-abort 3 false 2>actual &&
+ test_cmp expect actual
+'
+
+cat >expect <<-EOF
+no further jobs available
+EOF
+
+test_expect_success 'run_command outputs ' '
+ test-run-command run-command-no-jobs 3 sh -c "printf \"%s\n%s\n\" Hello World" 2>actual &&
+ test_cmp expect actual
+'
+
test_done
GIT_COMMITTER_NAME="Committer Name" \
GIT_COMMITTER_EMAIL="committer@email" \
GIT_COMMITTER_DATE="2005-05-26 23:30" \
- TZ=GMT git commit-tree `cat treeid` >commitid 2>/dev/null'
+ TZ=GMT git commit-tree $(cat treeid) >commitid 2>/dev/null'
test_expect_success \
'read commit' \
- 'git cat-file commit `cat commitid` >commit'
+ 'git cat-file commit $(cat commitid) >commit'
test_expect_success \
'compare commit' \
test_expect_success 'symbolic-ref refuses bare sha1' '
echo content >file && git add file && git commit -m one &&
- test_must_fail git symbolic-ref HEAD `git rev-parse HEAD`
+ test_must_fail git symbolic-ref HEAD $(git rev-parse HEAD)
'
reset_to_sane
test_cmp expect actual
'
+test_expect_success 'symbolic-ref does not create ref d/f conflicts' '
+ git checkout -b df &&
+ test_commit df &&
+ test_must_fail git symbolic-ref refs/heads/df/conflict refs/heads/df &&
+ git pack-refs --all --prune &&
+ test_must_fail git symbolic-ref refs/heads/df/conflict refs/heads/df
+'
+
+test_expect_success 'symbolic-ref handles existing pointer to invalid name' '
+ head=$(git rev-parse HEAD) &&
+ git symbolic-ref HEAD refs/heads/outer &&
+ git update-ref refs/heads/outer/inner $head &&
+ git symbolic-ref HEAD refs/heads/unrelated
+'
+
test_done
git add . &&
test_tick && git commit -m rabbit &&
- H=`git rev-parse --verify HEAD` &&
- A=`git rev-parse --verify HEAD:A` &&
- B=`git rev-parse --verify HEAD:A/B` &&
- C=`git rev-parse --verify HEAD:C` &&
- D=`git rev-parse --verify HEAD:A/D` &&
- E=`git rev-parse --verify HEAD:A/B/E` &&
+ H=$(git rev-parse --verify HEAD) &&
+ A=$(git rev-parse --verify HEAD:A) &&
+ B=$(git rev-parse --verify HEAD:A/B) &&
+ C=$(git rev-parse --verify HEAD:C) &&
+ D=$(git rev-parse --verify HEAD:A/D) &&
+ E=$(git rev-parse --verify HEAD:A/B/E) &&
check_fsck &&
test_chmod +x C &&
git add C &&
test_tick && git commit -m dragon &&
- L=`git rev-parse --verify HEAD` &&
+ L=$(git rev-parse --verify HEAD) &&
check_fsck &&
rm -f C A/B/E &&
echo horse >A/G &&
git add F A/G &&
test_tick && git commit -a -m sheep &&
- F=`git rev-parse --verify HEAD:F` &&
- G=`git rev-parse --verify HEAD:A/G` &&
- I=`git rev-parse --verify HEAD:A` &&
- J=`git rev-parse --verify HEAD` &&
+ F=$(git rev-parse --verify HEAD:F) &&
+ G=$(git rev-parse --verify HEAD:A/G) &&
+ I=$(git rev-parse --verify HEAD:A) &&
+ J=$(git rev-parse --verify HEAD) &&
check_fsck &&
rm -f A/G &&
test_tick && git commit -a -m monkey &&
- K=`git rev-parse --verify HEAD` &&
+ K=$(git rev-parse --verify HEAD) &&
check_fsck &&
check_have A B C D E F G H I J K L &&
test_expect_success 'setup' '
echo blob >a-blob &&
- git tag -a -m blob blob-tag `git hash-object -w a-blob` &&
+ git tag -a -m blob blob-tag $(git hash-object -w a-blob) &&
mkdir a-tree &&
echo moreblobs >a-tree/another-blob &&
git add . &&
- TREE_SHA1=`git write-tree` &&
+ TREE_SHA1=$(git write-tree) &&
git tag -a -m tree tree-tag "$TREE_SHA1" &&
git commit -m Initial &&
git tag -a -m commit commit-tag &&
test_expect_success 'ambiguous 40-hex ref' '
TREE=$(git mktree </dev/null) &&
- REF=`git rev-parse HEAD` &&
+ REF=$(git rev-parse HEAD) &&
VAL=$(git commit-tree $TREE </dev/null) &&
git update-ref refs/heads/$REF $VAL &&
- test `git rev-parse $REF 2>err` = $REF &&
+ test $(git rev-parse $REF 2>err) = $REF &&
grep "refname.*${REF}.*ambiguous" err
'
test_expect_success 'ambiguous short sha1 ref' '
TREE=$(git mktree </dev/null) &&
- REF=`git rev-parse --short HEAD` &&
+ REF=$(git rev-parse --short HEAD) &&
VAL=$(git commit-tree $TREE </dev/null) &&
git update-ref refs/heads/$REF $VAL &&
- test `git rev-parse $REF 2>err` = $VAL &&
+ test $(git rev-parse $REF 2>err) = $VAL &&
grep "refname.*${REF}.*ambiguous" err
'
EOF
test_cmp ls-files.expect ls-files.actual &&
- BASE=`test-dump-split-index .git/index | grep "^own" | sed "s/own/base/"` &&
+ BASE=$(test-dump-split-index .git/index | grep "^own" | sed "s/own/base/") &&
test-dump-split-index .git/index | sed "/^own/d" >actual &&
cat >expect <<EOF &&
not a split index
git checkout master --
'
+test_expect_success 'checkout notices failure to lock HEAD' '
+ test_when_finished "rm -f .git/HEAD.lock" &&
+ >.git/HEAD.lock &&
+ test_must_fail git checkout -b other
+'
+
+test_expect_success 'create ref directory/file conflict scenario' '
+ git update-ref refs/heads/outer/inner master &&
+
+ # do not rely on symbolic-ref to get a known state,
+ # as it may use the same code we are testing
+ reset_to_df () {
+ echo "ref: refs/heads/outer" >.git/HEAD
+ }
+'
+
+test_expect_success 'checkout away from d/f HEAD (unpacked, to branch)' '
+ reset_to_df &&
+ git checkout master
+'
+
+test_expect_success 'checkout away from d/f HEAD (unpacked, to detached)' '
+ reset_to_df &&
+ git checkout --detach master
+'
+
+test_expect_success 'pack refs' '
+ git pack-refs --all --prune
+'
+
+test_expect_success 'checkout away from d/f HEAD (packed, to branch)' '
+ reset_to_df &&
+ git checkout master
+'
+
+test_expect_success 'checkout away from d/f HEAD (packed, to detached)' '
+ reset_to_df &&
+ git checkout --detach master
+'
test_done
test_expect_success 'checkout with grafts' '
test_when_finished rm .git/info/grafts &&
test_commit abc &&
- SHA1=`git rev-parse HEAD` &&
+ SHA1=$(git rev-parse HEAD) &&
test_commit def &&
test_commit xyz &&
- echo "`git rev-parse HEAD` $SHA1" >.git/info/grafts &&
+ echo "$(git rev-parse HEAD) $SHA1" >.git/info/grafts &&
cat >expected <<-\EOF &&
xyz
abc
test_expect_success \
'the index entry must still be a symbolic link' '
-case "`git ls-files --stage --cached symlink`" in
+case "$(git ls-files --stage --cached symlink)" in
120000" "*symlink) echo pass;;
*) echo fail; git ls-files --stage --cached symlink; (exit 1);;
esac'
test_ln_s_add e a &&
test_tick &&
git commit -m "rename a->e, symlink a->e" &&
- oln=`printf e | git hash-object --stdin`
+ oln=$(printf e | git hash-object --stdin)
'
test_expect_success 'setup 9' '
echo Mi >path2/baz/b &&
find path? \( -type f -o -type l \) -print |
xargs git update-index --add &&
- tree=`git write-tree` &&
+ tree=$(git write-tree) &&
echo $tree'
test_output () {
echo 222 >path3/2.txt &&
find *.txt path* \( -type f -o -type l \) -print |
xargs git update-index --add &&
- tree=`git write-tree` &&
+ tree=$(git write-tree) &&
echo $tree
'
test_expect_success \
'see if git show-ref works as expected' \
'git branch a &&
- SHA1=`cat .git/refs/heads/a` &&
+ SHA1=$(cat .git/refs/heads/a) &&
echo "$SHA1 refs/heads/a" >expect &&
git show-ref a >result &&
test_cmp expect result'
test_must_fail git notes show HEAD^
'
+test_expect_success 'show notes from treeish' '
+ test "b3" = "$(git notes --ref commits^{tree} show)" &&
+ test "b4" = "$(git notes --ref commits@{1} show)"
+'
+
+test_expect_success 'cannot edit notes from non-ref' '
+ test_must_fail git notes --ref commits^{tree} edit &&
+ test_must_fail git notes --ref commits@{1} edit
+'
+
test_expect_success 'cannot "git notes add -m" where notes already exists' '
test_must_fail git notes add -m "b2" &&
test_path_is_missing .git/NOTES_EDITMSG &&
git notes add -m "Notes on 1st commit" 1st &&
git notes add -m "Notes on 2nd commit" 2nd &&
git notes add -m "Notes on 3rd commit" 3rd &&
- git notes add -m "Notes on 4th commit" 4th
+ git notes add -m "Notes on 4th commit" 4th &&
+ # Copy notes to remote-notes
+ git fetch . refs/notes/*:refs/remote-notes/origin/*
'
commit_sha1=$(git rev-parse 1st^{commit})
'
cp expect_notes_x expect_notes_y
+cp expect_notes_x expect_notes_v
cp expect_log_x expect_log_y
+cp expect_log_x expect_log_v
test_expect_success 'fail to merge empty notes ref into empty notes ref (z => y)' '
test_must_fail git -c "core.notesRef=refs/notes/y" notes merge z
test_must_fail git -c "core.notesRef=refs/notes/foo^{bar" notes merge x
'
-test_expect_success 'fail to merge various non-note-trees' '
- git config core.notesRef refs/notes/y &&
- test_must_fail git notes merge refs/notes &&
- test_must_fail git notes merge refs/notes/ &&
- test_must_fail git notes merge refs/notes/dir &&
- test_must_fail git notes merge refs/notes/dir/ &&
- test_must_fail git notes merge refs/heads/master &&
- test_must_fail git notes merge x: &&
- test_must_fail git notes merge x:foo &&
- test_must_fail git notes merge foo^{bar
+test_expect_success 'merge non-notes ref into empty notes ref (remote-notes/origin/x => v)' '
+ git config core.notesRef refs/notes/v &&
+ git notes merge refs/remote-notes/origin/x &&
+ verify_notes v &&
+ # refs/remote-notes/origin/x and v should point to the same notes commit
+ test "$(git rev-parse refs/remote-notes/origin/x)" = "$(git rev-parse refs/notes/v)"
'
test_expect_success 'merge notes into empty notes ref (x => y)' '
'
test_expect_success 'merge and reference trees equal' '
- test -z "`git diff-tree skip-merge skip-reference`"
+ test -z "$(git diff-tree skip-merge skip-reference)"
'
test_expect_success 'moved back to branch correctly' '
test_expect_success 'cherry-pick -x inserts blank line after one line subject' '
pristine_detach initial &&
- sha1=`git rev-parse mesg-one-line^0` &&
+ sha1=$(git rev-parse mesg-one-line^0) &&
git cherry-pick -x mesg-one-line &&
cat <<-EOF >expect &&
$mesg_one_line
test_expect_success 'cherry-pick -x inserts blank line when conforming footer not found' '
pristine_detach initial &&
- sha1=`git rev-parse mesg-no-footer^0` &&
+ sha1=$(git rev-parse mesg-no-footer^0) &&
git cherry-pick -x mesg-no-footer &&
cat <<-EOF >expect &&
$mesg_no_footer
test_expect_success 'cherry-pick -x -s inserts blank line when conforming footer not found' '
pristine_detach initial &&
- sha1=`git rev-parse mesg-no-footer^0` &&
+ sha1=$(git rev-parse mesg-no-footer^0) &&
git cherry-pick -x -s mesg-no-footer &&
cat <<-EOF >expect &&
$mesg_no_footer
test_expect_success 'cherry-pick -x -s adds sob when last sob doesnt match committer' '
pristine_detach initial &&
- sha1=`git rev-parse mesg-with-footer^0` &&
+ sha1=$(git rev-parse mesg-with-footer^0) &&
git cherry-pick -x -s mesg-with-footer &&
cat <<-EOF >expect &&
$mesg_with_footer
test_expect_success 'cherry-pick -x -s adds sob even when trailing sob exists for committer' '
pristine_detach initial &&
- sha1=`git rev-parse mesg-with-footer-sob^0` &&
+ sha1=$(git rev-parse mesg-with-footer-sob^0) &&
git cherry-pick -x -s mesg-with-footer-sob &&
cat <<-EOF >expect &&
$mesg_with_footer_sob
test_expect_success 'cherry-pick -x treats "(cherry picked from..." line as part of footer' '
pristine_detach initial &&
- sha1=`git rev-parse mesg-with-cherry-footer^0` &&
+ sha1=$(git rev-parse mesg-with-cherry-footer^0) &&
git cherry-pick -x mesg-with-cherry-footer &&
cat <<-EOF >expect &&
$mesg_with_cherry_footer
test_expect_success 'cherry-pick -x -s treats "(cherry picked from..." line as part of footer' '
pristine_detach initial &&
- sha1=`git rev-parse mesg-with-cherry-footer^0` &&
+ sha1=$(git rev-parse mesg-with-cherry-footer^0) &&
git cherry-pick -x -s mesg-with-cherry-footer &&
cat <<-EOF >expect &&
$mesg_with_cherry_footer
git add test-file &&
git commit -m "add file for rm test" &&
git rm test-file > rm-output &&
- test `grep "^rm " rm-output | wc -l` = 1 &&
+ test $(grep "^rm " rm-output | wc -l) = 1 &&
rm -f test-file rm-output &&
git commit -m "remove file from rm test"
'
git add test-file &&
git commit -m "add file for rm --quiet test" &&
git rm --quiet test-file > rm-output &&
- test `wc -l < rm-output` = 0 &&
+ test $(wc -l < rm-output) = 0 &&
rm -f test-file rm-output &&
git commit -m "remove file from rm --quiet test"
'
echo foo >xfoo1 &&
chmod 755 xfoo1 &&
git add xfoo1 &&
- case "`git ls-files --stage xfoo1`" in
+ case "$(git ls-files --stage xfoo1)" in
100644" "*xfoo1) echo pass;;
*) echo fail; git ls-files --stage xfoo1; (exit 1);;
esac'
test_expect_success 'git add: filemode=0 should not get confused by symlink' '
rm -f xfoo1 &&
test_ln_s_add foo xfoo1 &&
- case "`git ls-files --stage xfoo1`" in
+ case "$(git ls-files --stage xfoo1)" in
120000" "*xfoo1) echo pass;;
*) echo fail; git ls-files --stage xfoo1; (exit 1);;
esac
echo foo >xfoo2 &&
chmod 755 xfoo2 &&
git update-index --add xfoo2 &&
- case "`git ls-files --stage xfoo2`" in
+ case "$(git ls-files --stage xfoo2)" in
100644" "*xfoo2) echo pass;;
*) echo fail; git ls-files --stage xfoo2; (exit 1);;
esac'
test_expect_success 'git add: filemode=0 should not get confused by symlink' '
rm -f xfoo2 &&
test_ln_s_add foo xfoo2 &&
- case "`git ls-files --stage xfoo2`" in
+ case "$(git ls-files --stage xfoo2)" in
120000" "*xfoo2) echo pass;;
*) echo fail; git ls-files --stage xfoo2; (exit 1);;
esac
'git update-index --add: Test that executable bit is not used...' \
'git config core.filemode 0 &&
test_ln_s_add xfoo2 xfoo3 && # runs git update-index --add
- case "`git ls-files --stage xfoo3`" in
+ case "$(git ls-files --stage xfoo3)" in
120000" "*xfoo3) echo pass;;
*) echo fail; git ls-files --stage xfoo3; (exit 1);;
esac'
test_expect_success 'git add --refresh' '
>foo && git add foo && git commit -a -m "commit all" &&
- test -z "`git diff-index HEAD -- foo`" &&
+ test -z "$(git diff-index HEAD -- foo)" &&
git read-tree HEAD &&
- case "`git diff-index HEAD -- foo`" in
+ case "$(git diff-index HEAD -- foo)" in
:100644" "*"M foo") echo pass;;
*) echo fail; (exit 1);;
esac &&
git add --refresh -- foo &&
- test -z "`git diff-index HEAD -- foo`"
+ test -z "$(git diff-index HEAD -- foo)"
'
test_expect_success 'git add --refresh with pathspec' '
test_cmp from filtered
'
+test_expect_success 'format-patch format.outputDirectory option' '
+ test_config format.outputDirectory patches &&
+ rm -fr patches &&
+ git format-patch master..side &&
+ test $(git rev-list master..side | wc -l) -eq $(ls patches | wc -l)
+'
+
+test_expect_success 'format-patch -o overrides format.outputDirectory' '
+ test_config format.outputDirectory patches &&
+ rm -fr patches patchset &&
+ git format-patch master..side -o patchset &&
+ test_path_is_missing patches &&
+ test_path_is_dir patchset
+'
+
test_done
test_cmp expect out
'
+test_expect_success !MINGW 'shortlog can read --format=raw output' '
+ git log --format=raw HEAD >log &&
+ GIT_DIR=non-existing git shortlog -w <log >out &&
+ test_cmp expect out
+'
+
test_expect_success 'shortlog should add newline when input line matches wraplen' '
cat >expect <<\EOF &&
A U Thor (2):
git shortlog HEAD~2.. > out &&
test_cmp expect out'
-test_expect_success 'shortlog ignores commits with missing authors' '
- git commit --allow-empty -m normal &&
- git commit --allow-empty -m soon-to-be-broken &&
- git cat-file commit HEAD >commit.tmp &&
- sed "/^author/d" commit.tmp >broken.tmp &&
- commit=$(git hash-object -w -t commit --stdin <broken.tmp) &&
- git update-ref HEAD $commit &&
- cat >expect <<-\EOF &&
- A U Thor (1):
- normal
-
- EOF
- git shortlog HEAD~2.. >actual &&
- test_cmp expect actual
-'
-
test_expect_success 'shortlog with revision pseudo options' '
git shortlog --all &&
git shortlog --branches &&
test_expect_success 'split sample box' \
'git mailsplit -o. "$TEST_DIRECTORY"/t5100/sample.mbox >last &&
- last=`cat last` &&
+ last=$(cat last) &&
echo total is $last &&
- test `cat last` = 17'
+ test $(cat last) = 17'
check_mailinfo () {
mail=$1 opt=$2
}
-for mail in `echo 00*`
+for mail in 00*
do
test_expect_success "mailinfo $mail" '
check_mailinfo $mail "" &&
'mkdir rfc2047 &&
git mailsplit -orfc2047 "$TEST_DIRECTORY"/t5100/rfc2047-samples.mbox \
>rfc2047/last &&
- last=`cat rfc2047/last` &&
+ last=$(cat rfc2047/last) &&
echo total is $last &&
- test `cat rfc2047/last` = 11'
+ test $(cat rfc2047/last) = 11'
-for mail in `echo rfc2047/00*`
+for mail in rfc2047/00*
do
test_expect_success "mailinfo $mail" '
git mailinfo -u $mail-msg $mail-patch <$mail >$mail-info &&
'
. ./test-lib.sh
-TRASH=`pwd`
+TRASH=$(pwd)
test_expect_success \
'setup' \
test-genrandom "seed b" 2097152 > b_big &&
git update-index --add a a_big b b_big c &&
cat c >d && echo foo >>d && git update-index --add d &&
- tree=`git write-tree` &&
- commit=`git commit-tree $tree </dev/null` && {
+ tree=$(git write-tree) &&
+ commit=$(git commit-tree $tree </dev/null) && {
echo $tree &&
echo $commit &&
git ls-tree $tree | sed -e "s/.* \\([0-9a-f]*\\) .*/\\1/"
git diff-tree --root -p $commit &&
while read object
do
- t=`git cat-file -t $object` &&
+ t=$(git cat-file -t $object) &&
git cat-file $t $object || return 1
done <obj-list
} >expect'
git diff-tree --root -p $commit &&
while read object
do
- t=`git cat-file -t $object` &&
+ t=$(git cat-file -t $object) &&
git cat-file $t $object || return 1
done <obj-list
} >current &&
git diff-tree --root -p $commit &&
while read object
do
- t=`git cat-file -t $object` &&
+ t=$(git cat-file -t $object) &&
git cat-file $t $object || return 1
done <obj-list
} >current &&
git diff-tree --root -p $commit &&
while read object
do
- t=`git cat-file -t $object` &&
+ t=$(git cat-file -t $object) &&
git cat-file $t $object || return 1
done <obj-list
} >current &&
test_expect_success \
'verify-pack catches a corrupted sum of the index file itself' \
- 'l=`wc -c <test-3.idx` &&
- l=`expr $l - 20` &&
+ 'l=$(wc -c <test-3.idx) &&
+ l=$(expr $l - 20) &&
cat test-1-${packname_1}.pack >test-3.pack &&
printf "%20s" "" | dd of=test-3.idx count=20 bs=1 conv=notrunc seek=$l &&
if git verify-pack test-3.pack
git update-index --add $i || return 1
done &&
echo d >d && cat c >>d && git update-index --add d &&
- tree=`git write-tree` &&
- commit1=`git commit-tree $tree </dev/null` &&
+ tree=$(git write-tree) &&
+ commit1=$(git commit-tree $tree </dev/null) &&
git update-ref HEAD $commit1 &&
git repack -a -d &&
- test "`git count-objects`" = "0 objects, 0 kilobytes" &&
- pack1=`ls .git/objects/pack/*.pack` &&
+ test "$(git count-objects)" = "0 objects, 0 kilobytes" &&
+ pack1=$(ls .git/objects/pack/*.pack) &&
test -f "$pack1"'
test_expect_success \
'repack -a -d, packedGit{WindowSize,Limit} == 1 page' \
'git config core.packedGitWindowSize 512 &&
git config core.packedGitLimit 512 &&
- commit2=`git commit-tree $tree -p $commit1 </dev/null` &&
+ commit2=$(git commit-tree $tree -p $commit1 </dev/null) &&
git update-ref HEAD $commit2 &&
git repack -a -d &&
- test "`git count-objects`" = "0 objects, 0 kilobytes" &&
- pack2=`ls .git/objects/pack/*.pack` &&
+ test "$(git count-objects)" = "0 objects, 0 kilobytes" &&
+ pack2=$(ls .git/objects/pack/*.pack) &&
test -f "$pack2" &&
test "$pack1" \!= "$pack2"'
i=1 &&
while test $i -le 100
do
- iii=`printf '%03i' $i`
+ iii=$(printf '%03i' $i)
test-genrandom "bar" 200 > wide_delta_$iii &&
test-genrandom "baz $iii" 50 >> wide_delta_$iii &&
test-genrandom "foo"$i 100 > deep_delta_$iii &&
- test-genrandom "foo"`expr $i + 1` 100 >> deep_delta_$iii &&
- test-genrandom "foo"`expr $i + 2` 100 >> deep_delta_$iii &&
+ test-genrandom "foo"$(expr $i + 1) 100 >> deep_delta_$iii &&
+ test-genrandom "foo"$(expr $i + 2) 100 >> deep_delta_$iii &&
echo $iii >file_$iii &&
test-genrandom "$iii" 8192 >>file_$iii &&
git update-index --add file_$iii deep_delta_$iii wide_delta_$iii &&
- i=`expr $i + 1` || return 1
+ i=$(expr $i + 1) || return 1
done &&
{ echo 101 && test-genrandom 100 8192; } >file_101 &&
git update-index --add file_101 &&
- tree=`git write-tree` &&
- commit=`git commit-tree $tree </dev/null` && {
+ tree=$(git write-tree) &&
+ commit=$(git commit-tree $tree </dev/null) && {
echo $tree &&
git ls-tree $tree | sed -e "s/.* \\([0-9a-f]*\\) .*/\\1/"
} >obj-list &&
'[index v1] 2) create a stealth corruption in a delta base reference' \
'# This test assumes file_101 is a delta smaller than 16 bytes.
# It should be against file_100 but we substitute its base for file_099
- sha1_101=`git hash-object file_101` &&
- sha1_099=`git hash-object file_099` &&
- offs_101=`index_obj_offset 1.idx $sha1_101` &&
- nr_099=`index_obj_nr 1.idx $sha1_099` &&
+ sha1_101=$(git hash-object file_101) &&
+ sha1_099=$(git hash-object file_099) &&
+ offs_101=$(index_obj_offset 1.idx $sha1_101) &&
+ nr_099=$(index_obj_nr 1.idx $sha1_099) &&
chmod +w ".git/objects/pack/pack-${pack1}.pack" &&
dd of=".git/objects/pack/pack-${pack1}.pack" seek=$(($offs_101 + 1)) \
if=".git/objects/pack/pack-${pack1}.idx" \
'[index v2] 2) create a stealth corruption in a delta base reference' \
'# This test assumes file_101 is a delta smaller than 16 bytes.
# It should be against file_100 but we substitute its base for file_099
- sha1_101=`git hash-object file_101` &&
- sha1_099=`git hash-object file_099` &&
- offs_101=`index_obj_offset 1.idx $sha1_101` &&
- nr_099=`index_obj_nr 1.idx $sha1_099` &&
+ sha1_101=$(git hash-object file_101) &&
+ sha1_099=$(git hash-object file_099) &&
+ offs_101=$(index_obj_offset 1.idx $sha1_101) &&
+ nr_099=$(index_obj_nr 1.idx $sha1_099) &&
chmod +w ".git/objects/pack/pack-${pack1}.pack" &&
dd of=".git/objects/pack/pack-${pack1}.pack" seek=$(($offs_101 + 1)) \
if=".git/objects/pack/pack-${pack1}.idx" \
'rm -f .git/objects/pack/* &&
git index-pack --index-version=2 --stdin < "test-1-${pack1}.pack" &&
git verify-pack ".git/objects/pack/pack-${pack1}.pack" &&
- obj=`git hash-object file_001` &&
- nr=`index_obj_nr ".git/objects/pack/pack-${pack1}.idx" $obj` &&
+ obj=$(git hash-object file_001) &&
+ nr=$(index_obj_nr ".git/objects/pack/pack-${pack1}.idx" $obj) &&
chmod +w ".git/objects/pack/pack-${pack1}.idx" &&
printf xxxx | dd of=".git/objects/pack/pack-${pack1}.idx" conv=notrunc \
- bs=1 count=4 seek=$((8 + 256 * 4 + `wc -l <obj-list` * 20 + $nr * 4)) &&
+ bs=1 count=4 seek=$((8 + 256 * 4 + $(wc -l <obj-list) * 20 + $nr * 4)) &&
( while read obj
do git cat-file -p $obj >/dev/null || exit 1
done <obj-list ) &&
create_new_pack() {
rm -rf .git &&
git init &&
- blob_1=`git hash-object -t blob -w file_1` &&
- blob_2=`git hash-object -t blob -w file_2` &&
- blob_3=`git hash-object -t blob -w file_3` &&
- pack=`printf "$blob_1\n$blob_2\n$blob_3\n" |
- git pack-objects $@ .git/objects/pack/pack` &&
+ blob_1=$(git hash-object -t blob -w file_1) &&
+ blob_2=$(git hash-object -t blob -w file_2) &&
+ blob_3=$(git hash-object -t blob -w file_3) &&
+ pack=$(printf "$blob_1\n$blob_2\n$blob_3\n" |
+ git pack-objects $@ .git/objects/pack/pack) &&
pack=".git/objects/pack/pack-${pack}" &&
git verify-pack -v ${pack}.pack
}
do_repack() {
- pack=`printf "$blob_1\n$blob_2\n$blob_3\n" |
- git pack-objects $@ .git/objects/pack/pack` &&
+ pack=$(printf "$blob_1\n$blob_2\n$blob_3\n" |
+ git pack-objects $@ .git/objects/pack/pack) &&
pack=".git/objects/pack/pack-${pack}"
}
do_corrupt_object() {
- ofs=`git show-index < ${pack}.idx | grep $1 | cut -f1 -d" "` &&
+ ofs=$(git show-index < ${pack}.idx | grep $1 | cut -f1 -d" ") &&
ofs=$(($ofs + $2)) &&
chmod +w ${pack}.pack &&
dd of=${pack}.pack bs=1 conv=notrunc seek=$ofs &&
'
test_expect_success 'prune .git/shallow' '
- SHA1=`echo hi|git commit-tree HEAD^{tree}` &&
+ SHA1=$(echo hi|git commit-tree HEAD^{tree}) &&
echo $SHA1 >.git/shallow &&
git prune --dry-run >out &&
grep $SHA1 .git/shallow &&
test_description='git pack-object --include-tag'
. ./test-lib.sh
-TRASH=`pwd`
+TRASH=$(pwd)
test_expect_success setup '
echo c >d &&
git update-index --add d &&
- tree=`git write-tree` &&
- commit=`git commit-tree $tree </dev/null` &&
+ tree=$(git write-tree) &&
+ commit=$(git commit-tree $tree </dev/null) &&
echo "object $commit" >sig &&
echo "type commit" >>sig &&
echo "tag mytag" >>sig &&
echo "tagger $(git var GIT_COMMITTER_IDENT)" >>sig &&
echo >>sig &&
echo "our test tag" >>sig &&
- tag=`git mktag <sig` &&
+ tag=$(git mktag <sig) &&
rm d sig &&
git update-ref refs/tags/mytag $tag && {
echo $tree &&
add () {
name=$1 &&
text="$@" &&
- branch=`echo $name | sed -e 's/^\(.\).*$/\1/'` &&
+ branch=$(echo $name | sed -e 's/^\(.\).*$/\1/') &&
parents="" &&
shift &&
case "$heads" in *B*)
echo $BTIP > .git/refs/heads/B;;
esac &&
- git symbolic-ref HEAD refs/heads/`echo $heads \
- | sed -e "s/^\(.\).*$/\1/"` &&
+ git symbolic-ref HEAD refs/heads/$(echo $heads \
+ | sed -e "s/^\(.\).*$/\1/") &&
git fsck --full &&
mv .git/objects/pack/pack-* . &&
- p=`ls -1 pack-*.pack` &&
+ p=$(ls -1 pack-*.pack) &&
git unpack-objects <$p &&
git fsck --full &&
- idx=`echo pack-*.idx` &&
- pack_count=`git show-index <$idx | wc -l` &&
+ idx=$(echo pack-*.idx) &&
+ pack_count=$(git show-index <$idx | wc -l) &&
test $pack_count = $count &&
rm -f pack-*
)
test_expect_success 'clone shallow depth 1' '
git clone --no-single-branch --depth 1 "file://$(pwd)/." shallow0 &&
- test "`git --git-dir=shallow0/.git rev-list --count HEAD`" = 1
+ test "$(git --git-dir=shallow0/.git rev-list --count HEAD)" = 1
'
test_expect_success 'clone shallow depth 1 with fsck' '
git config --global fetch.fsckobjects true &&
git clone --no-single-branch --depth 1 "file://$(pwd)/." shallow0fsck &&
- test "`git --git-dir=shallow0fsck/.git rev-list --count HEAD`" = 1 &&
+ test "$(git --git-dir=shallow0fsck/.git rev-list --count HEAD)" = 1 &&
git config --global --unset fetch.fsckobjects
'
'
test_expect_success 'clone shallow depth count' '
- test "`git --git-dir=shallow/.git rev-list --count HEAD`" = 2
+ test "$(git --git-dir=shallow/.git rev-list --count HEAD)" = 2
'
test_expect_success 'clone shallow object count' '
'
test_expect_success 'clone shallow depth count' '
- test "`git --git-dir=shallow/.git rev-list --count HEAD`" = 11
+ test "$(git --git-dir=shallow/.git rev-list --count HEAD)" = 11
'
test_expect_success 'clone shallow object count' '
test_expect_success 'remote forces tracking branches' '
(
cd test &&
- case `git config remote.second.fetch` in
+ case $(git config remote.second.fetch) in
+*) true ;;
*) false ;;
esac
}
repo_fetched() {
- if test "`git log -1 --pretty=format:%s $1 --`" = "`cat mark`"; then
+ if test "$(git log -1 --pretty=format:%s $1 --)" = "$(cat mark)"; then
echo >&2 "repo was fetched: $1"
return 0
fi
. ./test-lib.sh
-D=`pwd`
+D=$(pwd)
test_bundle_object_count () {
git verify-pack -v "$1" >verify.out &&
cd two &&
git fetch &&
test -f .git/refs/heads/one &&
- mine=`git rev-parse refs/heads/one` &&
- his=`cd ../one && git rev-parse refs/heads/master` &&
+ mine=$(git rev-parse refs/heads/one) &&
+ his=$(cd ../one && git rev-parse refs/heads/master) &&
test "z$mine" = "z$his"
'
git fetch &&
test -f .git/refs/heads/two &&
test -f .git/refs/heads/one &&
- master_in_two=`cd ../two && git rev-parse master` &&
- one_in_two=`cd ../two && git rev-parse one` &&
+ master_in_two=$(cd ../two && git rev-parse master) &&
+ one_in_two=$(cd ../two && git rev-parse one) &&
{
echo "$one_in_two "
echo "$master_in_two not-for-merge"
grep refs/tags/magic actual
'
+test_expect_success 'ls-remote --symref' '
+ cat >expect <<-\EOF &&
+ ref: refs/heads/master HEAD
+ 1bd44cb9d13204b0fe1958db0082f5028a16eb3a HEAD
+ 1bd44cb9d13204b0fe1958db0082f5028a16eb3a refs/heads/master
+ 1bd44cb9d13204b0fe1958db0082f5028a16eb3a refs/remotes/origin/HEAD
+ 1bd44cb9d13204b0fe1958db0082f5028a16eb3a refs/remotes/origin/master
+ 1bd44cb9d13204b0fe1958db0082f5028a16eb3a refs/tags/mark
+ EOF
+ git ls-remote --symref >actual &&
+ test_cmp expect actual
+'
+
+test_expect_success 'ls-remote with filtered symref (refname)' '
+ cat >expect <<-\EOF &&
+ ref: refs/heads/master HEAD
+ 1bd44cb9d13204b0fe1958db0082f5028a16eb3a HEAD
+ EOF
+ git ls-remote --symref . HEAD >actual &&
+ test_cmp expect actual
+'
+
+test_expect_failure 'ls-remote with filtered symref (--heads)' '
+ git symbolic-ref refs/heads/foo refs/tags/mark &&
+ cat >expect <<-\EOF &&
+ ref: refs/tags/mark refs/heads/foo
+ 1bd44cb9d13204b0fe1958db0082f5028a16eb3a refs/heads/foo
+ 1bd44cb9d13204b0fe1958db0082f5028a16eb3a refs/heads/master
+ EOF
+ git ls-remote --symref --heads . >actual &&
+ test_cmp expect actual
+'
+
+test_expect_success 'ls-remote --symref omits filtered-out matches' '
+ cat >expect <<-\EOF &&
+ 1bd44cb9d13204b0fe1958db0082f5028a16eb3a refs/heads/foo
+ 1bd44cb9d13204b0fe1958db0082f5028a16eb3a refs/heads/master
+ EOF
+ git ls-remote --symref --heads . >actual &&
+ test_cmp expect actual &&
+ git ls-remote --symref . "refs/heads/*" >actual &&
+ test_cmp expect actual
+'
+
+
test_done
case "$cmd" in
'' | '#'*) continue ;;
esac
- test=`echo "$cmd" | sed -e 's|[/ ][/ ]*|_|g'`
- pfx=`printf "%04d" $test_count`
+ test=$(echo "$cmd" | sed -e 's|[/ ][/ ]*|_|g')
+ pfx=$(printf "%04d" $test_count)
expect_f="$TEST_DIRECTORY/t5515/fetch.$test"
actual_f="$pfx-fetch.$test"
expect_r="$TEST_DIRECTORY/t5515/refs.$test"
. ./test-lib.sh
-D=`pwd`
+D=$(pwd)
mk_empty () {
repo_name="$1"
test_expect_success 'push sha1 with non-existent, incomplete dest' '
mk_test testrepo &&
- test_must_fail git push testrepo `git rev-parse master`:foo
+ test_must_fail git push testrepo $(git rev-parse master):foo
'
. ./test-lib.sh
-D=`pwd`
+D=$(pwd)
invert () {
if "$@"; then
test "$(git rev-parse HEAD^2)" = "$(git rev-parse keep-merge)"
'
+test_expect_success 'pull.rebase=interactive' '
+ write_script "$TRASH_DIRECTORY/fake-editor" <<-\EOF &&
+ echo I was here >fake.out &&
+ false
+ EOF
+ test_set_editor "$TRASH_DIRECTORY/fake-editor" &&
+ test_must_fail git pull --rebase=interactive . copy &&
+ test "I was here" = "$(cat fake.out)"
+'
+
test_expect_success 'pull.rebase=invalid fails' '
git reset --hard before-preserve-rebase &&
test_config pull.rebase invalid &&
# git rev-parse --show-cdup printed a path relative to
# clone-repo/subdir/, not subdir-link/. Git rev-parse --show-cdup
# used the correct .git, but when the git pull shell script did
-# "cd `git rev-parse --show-cdup`", it ended up in the wrong
+# "cd $(git rev-parse --show-cdup)", it ended up in the wrong
# directory. A POSIX shell's "cd" works a little differently
# than chdir() in C; "cd -P" is much closer to chdir().
#
git add subfile &&
git commit -m new subfile &&
head2=$(git rev-parse --short HEAD) &&
- echo "From $pwd/submodule" > ../expect.err &&
+ echo "Fetching submodule submodule" > ../expect.err &&
+ echo "From $pwd/submodule" >> ../expect.err &&
echo " $head1..$head2 master -> origin/master" >> ../expect.err
) &&
(
git add deepsubfile &&
git commit -m new deepsubfile &&
head2=$(git rev-parse --short HEAD) &&
+ echo "Fetching submodule submodule/subdir/deepsubmodule" >> ../expect.err
echo "From $pwd/deepsubmodule" >> ../expect.err &&
echo " $head1..$head2 master -> origin/master" >> ../expect.err
)
(
cd downstream &&
git submodule update --init --recursive
- ) &&
- echo "Fetching submodule submodule" > expect.out &&
- echo "Fetching submodule submodule/subdir/deepsubmodule" >> expect.out
+ )
'
test_expect_success "fetch --recurse-submodules recurses into submodules" '
cd downstream &&
git fetch --recurse-submodules >../actual.out 2>../actual.err
) &&
- test_i18ncmp expect.out actual.out &&
+ test_must_be_empty actual.out &&
test_i18ncmp expect.err actual.err
'
+test_expect_success "fetch --recurse-submodules -j2 has the same output behaviour" '
+ add_upstream_commit &&
+ (
+ cd downstream &&
+ GIT_TRACE=$(pwd)/../trace.out git fetch --recurse-submodules -j2 2>../actual.err
+ ) &&
+ test_must_be_empty actual.out &&
+ test_i18ncmp expect.err actual.err &&
+ grep "2 tasks" trace.out
+'
+
test_expect_success "fetch alone only fetches superproject" '
add_upstream_commit &&
(
git config -f .gitmodules submodule.submodule.fetchRecurseSubmodules true &&
git fetch >../actual.out 2>../actual.err
) &&
- test_i18ncmp expect.out actual.out &&
+ test_must_be_empty actual.out &&
test_i18ncmp expect.err actual.err
'
git config --unset -f .gitmodules submodule.submodule.fetchRecurseSubmodules &&
git config --unset submodule.submodule.fetchRecurseSubmodules
) &&
- test_i18ncmp expect.out actual.out &&
+ test_must_be_empty actual.out &&
test_i18ncmp expect.err actual.err
'
! test -s actual.err
'
+test_expect_success "--quiet propagates to parallel submodules" '
+ (
+ cd downstream &&
+ git fetch --recurse-submodules -j 2 --quiet >../actual.out 2>../actual.err
+ ) &&
+ ! test -s actual.out &&
+ ! test -s actual.err
+'
+
test_expect_success "--dry-run propagates to submodules" '
add_upstream_commit &&
(
cd downstream &&
git fetch --recurse-submodules --dry-run >../actual.out 2>../actual.err
) &&
- test_i18ncmp expect.out actual.out &&
+ test_must_be_empty actual.out &&
test_i18ncmp expect.err actual.err
'
cd downstream &&
git fetch --recurse-submodules >../actual.out 2>../actual.err
) &&
- test_i18ncmp expect.out actual.out &&
+ test_must_be_empty actual.out &&
test_i18ncmp expect.err actual.err
'
git config fetch.recurseSubmodules true
git fetch >../actual.out 2>../actual.err
) &&
- test_i18ncmp expect.out actual.out &&
+ test_must_be_empty actual.out &&
test_i18ncmp expect.err actual.err
'
) &&
git fetch --recurse-submodules >../actual.out 2>../actual.err
) &&
- test_i18ncmp expect.out actual.out &&
+ test_must_be_empty actual.out &&
test_i18ncmp expect.err actual.err
'
git add submodule &&
git commit -m "new submodule" &&
head2=$(git rev-parse --short HEAD) &&
- echo "Fetching submodule submodule" > expect.out.sub &&
echo "From $pwd/." > expect.err.sub &&
echo " $head1..$head2 master -> origin/master" >>expect.err.sub &&
- head -2 expect.err >> expect.err.sub &&
+ head -3 expect.err >> expect.err.sub &&
(
cd downstream &&
git fetch >../actual.out 2>../actual.err
) &&
test_i18ncmp expect.err.sub actual.err &&
- test_i18ncmp expect.out.sub actual.out
+ test_must_be_empty actual.out
'
test_expect_success "Recursion doesn't happen when new superproject commits don't change any submodules" '
)
) &&
test_i18ncmp expect.err.sub actual.err &&
- test_i18ncmp expect.out actual.out
+ test_must_be_empty actual.out
'
test_expect_success "Recursion picks up all submodules when necessary" '
git add subdir/deepsubmodule &&
git commit -m "new deepsubmodule"
head2=$(git rev-parse --short HEAD) &&
- echo "From $pwd/submodule" > ../expect.err.sub &&
+ echo "Fetching submodule submodule" > ../expect.err.sub &&
+ echo "From $pwd/submodule" >> ../expect.err.sub &&
echo " $head1..$head2 master -> origin/master" >> ../expect.err.sub
) &&
head1=$(git rev-parse --short HEAD) &&
echo "From $pwd/." > expect.err.2 &&
echo " $head1..$head2 master -> origin/master" >> expect.err.2 &&
cat expect.err.sub >> expect.err.2 &&
- tail -2 expect.err >> expect.err.2 &&
+ tail -3 expect.err >> expect.err.2 &&
(
cd downstream &&
git fetch >../actual.out 2>../actual.err
) &&
test_i18ncmp expect.err.2 actual.err &&
- test_i18ncmp expect.out actual.out
+ test_must_be_empty actual.out
'
test_expect_success "'--recurse-submodules=on-demand' doesn't recurse when no new commits are fetched in the superproject (and ignores config)" '
git add subdir/deepsubmodule &&
git commit -m "new deepsubmodule" &&
head2=$(git rev-parse --short HEAD) &&
- echo "From $pwd/submodule" > ../expect.err.sub &&
+ echo Fetching submodule submodule > ../expect.err.sub &&
+ echo "From $pwd/submodule" >> ../expect.err.sub &&
echo " $head1..$head2 master -> origin/master" >> ../expect.err.sub
) &&
(
git add submodule &&
git commit -m "new submodule" &&
head2=$(git rev-parse --short HEAD) &&
- tail -2 expect.err > expect.err.deepsub &&
+ tail -3 expect.err > expect.err.deepsub &&
echo "From $pwd/." > expect.err &&
echo " $head1..$head2 master -> origin/master" >>expect.err &&
cat expect.err.sub >> expect.err &&
git config --unset -f .gitmodules submodule.subdir/deepsubmodule.fetchRecursive
)
) &&
- test_i18ncmp expect.out actual.out &&
+ test_must_be_empty actual.out &&
test_i18ncmp expect.err actual.err
'
head2=$(git rev-parse --short HEAD) &&
echo "From $pwd/." > expect.err.2 &&
echo " $head1..$head2 master -> origin/master" >>expect.err.2 &&
- head -2 expect.err >> expect.err.2 &&
+ head -3 expect.err >> expect.err.2 &&
(
cd downstream &&
git config fetch.recurseSubmodules on-demand &&
cd downstream &&
git config --unset fetch.recurseSubmodules
) &&
- test_i18ncmp expect.out.sub actual.out &&
+ test_must_be_empty actual.out &&
test_i18ncmp expect.err.2 actual.err
'
head2=$(git rev-parse --short HEAD) &&
echo "From $pwd/." > expect.err.2 &&
echo " $head1..$head2 master -> origin/master" >>expect.err.2 &&
- head -2 expect.err >> expect.err.2 &&
+ head -3 expect.err >> expect.err.2 &&
(
cd downstream &&
git config submodule.submodule.fetchRecurseSubmodules on-demand &&
cd downstream &&
git config --unset submodule.submodule.fetchRecurseSubmodules
) &&
- test_i18ncmp expect.out.sub actual.out &&
+ test_must_be_empty actual.out &&
test_i18ncmp expect.err.2 actual.err
'
. ./test-lib.sh
-D=`pwd`
+D=$(pwd)
corrupt_repo () {
object_sha1=$(git rev-parse "$1") &&
cat >proxy <<'EOF'
#!/bin/sh
echo >&2 "proxying for $*"
-cmd=`"$PERL_PATH" -e '
+cmd=$("$PERL_PATH" -e '
read(STDIN, $buf, 4);
my $n = hex($buf) - 4;
read(STDIN, $buf, $n);
# drop absolute-path on repo name
$cmd =~ s{ /}{ };
print $cmd;
-'`
+')
echo >&2 "Running '$cmd'"
exec $cmd
EOF
test_expect_success 'fetch something upstream has but hidden by clients shallow boundaries' '
# the blob "1" is available in .git but hidden by the
# shallow2/.git/shallow and it should be resent
- ! git --git-dir=shallow2/.git cat-file blob `echo 1|git hash-object --stdin` >/dev/null &&
+ ! git --git-dir=shallow2/.git cat-file blob $(echo 1|git hash-object --stdin) >/dev/null &&
echo 1 >1.t &&
git add 1.t &&
git commit -m add-1-back &&
EOF
test_cmp expect actual
) &&
- git --git-dir=shallow2/.git cat-file blob `echo 1|git hash-object --stdin` >/dev/null
+ git --git-dir=shallow2/.git cat-file blob $(echo 1|git hash-object --stdin) >/dev/null
'
'
test_expect_success 'push from full to shallow' '
- ! git --git-dir=shallow2/.git cat-file blob `echo 1|git hash-object --stdin` &&
+ ! git --git-dir=shallow2/.git cat-file blob $(echo 1|git hash-object --stdin) &&
commit 1 &&
git push shallow2/.git +master:refs/remotes/top/master &&
(
3
EOF
test_cmp expect actual &&
- git cat-file blob `echo 1|git hash-object --stdin` >/dev/null
+ git cat-file blob $(echo 1|git hash-object --stdin) >/dev/null
)
'
test_done
test_expect_success 'fetch notices corrupt pack' '
cp -R "$HTTPD_DOCUMENT_ROOT_PATH"/repo_pack.git "$HTTPD_DOCUMENT_ROOT_PATH"/repo_bad1.git &&
(cd "$HTTPD_DOCUMENT_ROOT_PATH"/repo_bad1.git &&
- p=`ls objects/pack/pack-*.pack` &&
+ p=$(ls objects/pack/pack-*.pack) &&
chmod u+w $p &&
printf %0256d 0 | dd of=$p bs=256 count=1 seek=1 conv=notrunc
) &&
(cd repo_bad1.git &&
git --bare init &&
test_must_fail git --bare fetch $HTTPD_URL/dumb/repo_bad1.git &&
- test 0 = `ls objects/pack/pack-*.pack | wc -l`
+ test 0 = $(ls objects/pack/pack-*.pack | wc -l)
)
'
test_expect_success 'fetch notices corrupt idx' '
cp -R "$HTTPD_DOCUMENT_ROOT_PATH"/repo_pack.git "$HTTPD_DOCUMENT_ROOT_PATH"/repo_bad2.git &&
(cd "$HTTPD_DOCUMENT_ROOT_PATH"/repo_bad2.git &&
- p=`ls objects/pack/pack-*.idx` &&
+ p=$(ls objects/pack/pack-*.idx) &&
chmod u+w $p &&
printf %0256d 0 | dd of=$p bs=256 count=1 seek=1 conv=notrunc
) &&
(cd repo_bad2.git &&
git --bare init &&
test_must_fail git --bare fetch $HTTPD_URL/dumb/repo_bad2.git &&
- test 0 = `ls objects/pack | wc -l`
+ test 0 = $(ls objects/pack | wc -l)
)
'
test_expect_success 'fetch notices corrupt pack' '
cp -R "$GIT_DAEMON_DOCUMENT_ROOT_PATH"/repo_pack.git "$GIT_DAEMON_DOCUMENT_ROOT_PATH"/repo_bad1.git &&
(cd "$GIT_DAEMON_DOCUMENT_ROOT_PATH"/repo_bad1.git &&
- p=`ls objects/pack/pack-*.pack` &&
+ p=$(ls objects/pack/pack-*.pack) &&
chmod u+w $p &&
printf %0256d 0 | dd of=$p bs=256 count=1 seek=1 conv=notrunc
) &&
(cd repo_bad1.git &&
git --bare init &&
test_must_fail git --bare fetch "$GIT_DAEMON_URL/repo_bad1.git" &&
- test 0 = `ls objects/pack/pack-*.pack | wc -l`
+ test 0 = $(ls objects/pack/pack-*.pack | wc -l)
)
'
test_expect_success 'fetch notices corrupt idx' '
cp -R "$GIT_DAEMON_DOCUMENT_ROOT_PATH"/repo_pack.git "$GIT_DAEMON_DOCUMENT_ROOT_PATH"/repo_bad2.git &&
(cd "$GIT_DAEMON_DOCUMENT_ROOT_PATH"/repo_bad2.git &&
- p=`ls objects/pack/pack-*.idx` &&
+ p=$(ls objects/pack/pack-*.idx) &&
chmod u+w $p &&
printf %0256d 0 | dd of=$p bs=256 count=1 seek=1 conv=notrunc
) &&
(cd repo_bad2.git &&
git --bare init &&
test_must_fail git --bare fetch "$GIT_DAEMON_URL/repo_bad2.git" &&
- test 0 = `ls objects/pack | wc -l`
+ test 0 = $(ls objects/pack | wc -l)
)
'
'
+test_expect_success 'clone from hooks' '
+
+ test_create_repo r0 &&
+ cd r0 &&
+ test_commit initial &&
+ cd .. &&
+ git init r1 &&
+ cd r1 &&
+ cat >.git/hooks/pre-commit <<-\EOF &&
+ #!/bin/sh
+ git clone ../r0 ../r2
+ exit 1
+ EOF
+ chmod u+x .git/hooks/pre-commit &&
+ : >file &&
+ git add file &&
+ test_must_fail git commit -m invoke-hook &&
+ cd .. &&
+ test_cmp r0/.git/HEAD r2/.git/HEAD &&
+ test_cmp r0/initial.t r2/initial.t
+
+'
+
test_expect_success 'clone creates intermediate directories' '
git clone src long/path/to/dst &&
'
test_expect_success 'clone separate gitdir: output' '
- echo "gitdir: `pwd`/realgitdir" >expected &&
+ echo "gitdir: $(pwd)/realgitdir" >expected &&
test_cmp expected dst/.git
'
test_description='test clone --reference'
. ./test-lib.sh
-base_dir=`pwd`
+base_dir=$(pwd)
U=$base_dir/UPLOAD_LOG
test_line_count = 0 fsck.log
}
-base_dir=`pwd`
+base_dir=$(pwd)
test_expect_success 'preparing first repository' \
'test_create_repo A && cd A &&
make_bare() {
git init --bare "$1" &&
(cd "$1" &&
- tree=`git hash-object -w -t tree /dev/null` &&
+ tree=$(git hash-object -w -t tree /dev/null) &&
commit=$(echo "$1" | git commit-tree $tree) &&
git update-ref HEAD $commit
)
echo >subdir/fileB fileB &&
git add fileA subdir/fileB &&
git commit -a -m "Initial in one history." &&
- A0=`git rev-parse --verify HEAD` &&
+ A0=$(git rev-parse --verify HEAD) &&
echo >fileA fileA modified &&
git commit -a -m "Second in one history." &&
- A1=`git rev-parse --verify HEAD` &&
+ A1=$(git rev-parse --verify HEAD) &&
echo >subdir/fileB fileB modified &&
git commit -a -m "Third in one history." &&
- A2=`git rev-parse --verify HEAD` &&
+ A2=$(git rev-parse --verify HEAD) &&
rm -f .git/refs/heads/master .git/index &&
echo >subdir/fileB fileB again &&
git add fileA subdir/fileB &&
git commit -a -m "Initial in alternate history." &&
- B0=`git rev-parse --verify HEAD` &&
+ B0=$(git rev-parse --verify HEAD) &&
echo >fileA fileA modified in alternate history &&
git commit -a -m "Second in alternate history." &&
- B1=`git rev-parse --verify HEAD` &&
+ B1=$(git rev-parse --verify HEAD) &&
echo >subdir/fileB fileB modified in alternate history &&
git commit -a -m "Third in alternate history." &&
- B2=`git rev-parse --verify HEAD` &&
+ B2=$(git rev-parse --verify HEAD) &&
: done
'
# Test if bisection size is close to half of list size within
# tolerance.
#
- _bisect_err=`expr $_list_size - $_bisection_size \* 2`
- test "$_bisect_err" -lt 0 && _bisect_err=`expr 0 - $_bisect_err`
- _bisect_err=`expr $_bisect_err / 2` ; # floor
+ _bisect_err=$(expr $_list_size - $_bisection_size \* 2)
+ test "$_bisect_err" -lt 0 && _bisect_err=$(expr 0 - $_bisect_err)
+ _bisect_err=$(expr $_bisect_err / 2) ; # floor
test_expect_success \
"bisection diff $_bisect_option $_head $* <= $_max_diff" \
test_expect_success 'set up --show-all --parents test' '
test_commit one foo.txt &&
- commit1=`git rev-list -1 HEAD` &&
+ commit1=$(git rev-list -1 HEAD) &&
test_commit two bar.txt &&
- commit2=`git rev-list -1 HEAD` &&
+ commit2=$(git rev-list -1 HEAD) &&
test_commit three foo.txt &&
- commit3=`git rev-list -1 HEAD`
+ commit3=$(git rev-list -1 HEAD)
'
test_expect_success '--parents rewrites TREESAME parents correctly' '
make_text() {
echo $1: $2
- for i in `count 20`; do
+ for i in $(count 20); do
echo $1: $i
done
echo $1: $3
test_expect_success 'setup' '
for p in file sub/file sub/sub/file sub/file2 sub/sub/sub/file sub2/file; do
if echo $p | grep /; then
- mkdir -p `dirname $p`
+ mkdir -p $(dirname $p)
fi &&
: >$p &&
git add $p &&
echo b > partA/outline.txt &&
echo c > papers/unsorted/_another &&
git add papers partA &&
- T1=`git write-tree` &&
+ T1=$(git write-tree) &&
git mv papers/unsorted/Thesis.pdf papers/all-papers/moo-blah.pdf &&
- T=`git write-tree` &&
+ T=$(git write-tree) &&
git ls-tree -r $T | verbose grep partA/outline.txt
'
git branch original HEAD
'
-orig_head=`git show-ref --hash --head HEAD`
+orig_head=$(git show-ref --hash --head HEAD)
test_expect_success 'rewrite submodule with another content' '
git filter-branch --tree-filter "test -d submod && {
mkdir submod &&
: > submod/file
} || :" HEAD &&
- test $orig_head != `git show-ref --hash --head HEAD`
+ test $orig_head != $(git show-ref --hash --head HEAD)
'
test_expect_success 'replace submodule revision' '
"if git ls-files --error-unmatch -- submod > /dev/null 2>&1
then git update-index --cacheinfo 160000 0123456789012345678901234567890123456789 submod
fi" HEAD &&
- test $orig_head != `git show-ref --hash --head HEAD`
+ test $orig_head != $(git show-ref --hash --head HEAD)
'
test_expect_success 'filter commit message without trailing newline' '
'
test_expect_success 'listing all tags in an empty tree should output nothing' '
- test `git tag -l | wc -l` -eq 0 &&
- test `git tag | wc -l` -eq 0
+ test $(git tag -l | wc -l) -eq 0 &&
+ test $(git tag | wc -l) -eq 0
'
test_expect_success 'looking for a tag in an empty tree should fail' \
'
test_expect_success 'listing all tags if one exists should output that tag' '
- test `git tag -l` = mytag &&
- test `git tag` = mytag
+ test $(git tag -l) = mytag &&
+ test $(git tag) = mytag
'
# pattern matching:
test_expect_success \
'listing a tag using a matching pattern should output that tag' \
- 'test `git tag -l mytag` = mytag'
+ 'test $(git tag -l mytag) = mytag'
# todo: git tag -l now returns always zero, when fixed, change this test
test_expect_success \
test_expect_success \
'listing tags using a non-matching pattern should output nothing' \
- 'test `git tag -l xxx | wc -l` -eq 0'
+ 'test $(git tag -l xxx | wc -l) -eq 0'
# special cases for creating tags:
test_expect_success \
'trying to create a tag with a non-valid name should fail' '
- test `git tag -l | wc -l` -eq 1 &&
+ test $(git tag -l | wc -l) -eq 1 &&
test_must_fail git tag "" &&
test_must_fail git tag .othertag &&
test_must_fail git tag "other tag" &&
test_must_fail git tag "othertag^" &&
test_must_fail git tag "other~tag" &&
- test `git tag -l | wc -l` -eq 1
+ test $(git tag -l | wc -l) -eq 1
'
test_expect_success 'creating a tag using HEAD directly should succeed' '
echo "foo:initial" >expect &&
>actual &&
test_config pager.external "sed s/^/foo:/ >actual" &&
- test_terminal git --exec-path="`pwd`" external log --format=%s -1 &&
+ test_terminal git --exec-path="$(pwd)" external log --format=%s -1 &&
test_cmp expect actual
'
test_expect_success '"soft" reset is allowed in bare' '
git reset --soft HEAD^ &&
- test "`git show --pretty=format:%s | head -n 1`" = "one"
+ test "$(git show --pretty=format:%s | head -n 1)" = "one"
'
test_done
compare_head()
{
- sha_master=`git rev-list --max-count=1 master`
- sha_head=`git rev-list --max-count=1 HEAD`
+ sha_master=$(git rev-list --max-count=1 master)
+ sha_head=$(git rev-list --max-count=1 HEAD)
test "$sha_master" = "$sha_head"
}
test_description='test clone --reference'
. ./test-lib.sh
-base_dir=`pwd`
+base_dir=$(pwd)
U=$base_dir/UPLOAD_LOG
chmod +x "$HOOK"
commit_msg_is () {
- test "`git log --pretty=format:%s%b -1`" = "$1"
+ test "$(git log --pretty=format:%s%b -1)" = "$1"
}
test_expect_success 'hook edits commit message' '
echo "more" >> file &&
git add file &&
git commit -m "more" &&
- test "`git log -1 --pretty=format:%s`" = "message (no editor)"
+ test "$(git log -1 --pretty=format:%s)" = "message (no editor)"
'
echo "more" >> file &&
git add file &&
GIT_EDITOR="\"\$FAKE_EDITOR\"" git commit -e -m "more more" &&
- test "`git log -1 --pretty=format:%s`" = message
+ test "$(git log -1 --pretty=format:%s)" = message
'
echo "more" >> file &&
git add file &&
git commit -t "$(git rev-parse --git-dir)/template" &&
- test "`git log -1 --pretty=format:%s`" = template
+ test "$(git log -1 --pretty=format:%s)" = template
'
echo "more" >> file &&
git add file &&
(echo more | git commit -F -) &&
- test "`git log -1 --pretty=format:%s`" = "message (no editor)"
+ test "$(git log -1 --pretty=format:%s)" = "message (no editor)"
'
echo "more" >> file &&
git add file &&
(echo more more | GIT_EDITOR="\"\$FAKE_EDITOR\"" git commit -e -F -) &&
- test "`git log -1 --pretty=format:%s`" = message
+ test "$(git log -1 --pretty=format:%s)" = message
'
test_expect_success 'with hook (-C)' '
- head=`git rev-parse HEAD` &&
+ head=$(git rev-parse HEAD) &&
echo "more" >> file &&
git add file &&
git commit -C $head &&
- test "`git log -1 --pretty=format:%s`" = "$head (no editor)"
+ test "$(git log -1 --pretty=format:%s)" = "$head (no editor)"
'
echo "more more" >> file &&
git add file &&
GIT_EDITOR="\"\$FAKE_EDITOR\"" git commit &&
- test "`git log -1 --pretty=format:%s`" = default
+ test "$(git log -1 --pretty=format:%s)" = default
'
test_expect_success 'with hook (--amend)' '
- head=`git rev-parse HEAD` &&
+ head=$(git rev-parse HEAD) &&
echo "more" >> file &&
git add file &&
GIT_EDITOR="\"\$FAKE_EDITOR\"" git commit --amend &&
- test "`git log -1 --pretty=format:%s`" = "$head"
+ test "$(git log -1 --pretty=format:%s)" = "$head"
'
test_expect_success 'with hook (-c)' '
- head=`git rev-parse HEAD` &&
+ head=$(git rev-parse HEAD) &&
echo "more" >> file &&
git add file &&
GIT_EDITOR="\"\$FAKE_EDITOR\"" git commit -c $head &&
- test "`git log -1 --pretty=format:%s`" = "$head"
+ test "$(git log -1 --pretty=format:%s)" = "$head"
'
git commit -m other &&
git checkout - &&
git merge --no-ff other &&
- test "`git log -1 --pretty=format:%s`" = "merge (no editor)"
+ test "$(git log -1 --pretty=format:%s)" = "merge (no editor)"
'
test_expect_success 'with hook and editor (merge)' '
git commit -m other &&
git checkout - &&
env GIT_EDITOR="\"\$FAKE_EDITOR\"" git merge --no-ff -e other &&
- test "`git log -1 --pretty=format:%s`" = "merge"
+ test "$(git log -1 --pretty=format:%s)" = "merge"
'
cat > "$HOOK" <<'EOF'
test_expect_success 'with failing hook' '
test_when_finished "git checkout -f master" &&
- head=`git rev-parse HEAD` &&
+ head=$(git rev-parse HEAD) &&
echo "more" >> file &&
git add file &&
test_must_fail env GIT_EDITOR="\"\$FAKE_EDITOR\"" git commit -c $head
test_expect_success 'with failing hook (--no-verify)' '
test_when_finished "git checkout -f master" &&
- head=`git rev-parse HEAD` &&
+ head=$(git rev-parse HEAD) &&
echo "more" >> file &&
git add file &&
test_must_fail env GIT_EDITOR="\"\$FAKE_EDITOR\"" git commit --no-verify -c $head
test_cmp expected actual
'
+test_expect_success 'in-place editing with basic patch' '
+ cat basic_message >message &&
+ cat basic_patch >>message &&
+ cat basic_message >expected &&
+ echo >>expected &&
+ cat basic_patch >>expected &&
+ git interpret-trailers --in-place message &&
+ test_cmp expected message
+'
+
+test_expect_success 'in-place editing with additional trailer' '
+ cat basic_message >message &&
+ cat basic_patch >>message &&
+ cat basic_message >expected &&
+ echo >>expected &&
+ cat >>expected <<-\EOF &&
+ Reviewed-by: Alice
+ EOF
+ cat basic_patch >>expected &&
+ git interpret-trailers --trailer "Reviewed-by: Alice" --in-place message &&
+ test_cmp expected message
+'
+
+test_expect_success 'in-place editing on stdin disallowed' '
+ test_must_fail git interpret-trailers --trailer "Reviewed-by: Alice" --in-place < basic_message
+'
+
+test_expect_success 'in-place editing on non-existing file' '
+ test_must_fail git interpret-trailers --trailer "Reviewed-by: Alice" --in-place nonexisting &&
+ test_path_is_missing nonexisting
+'
+
+test_expect_success POSIXPERM,SANITY "in-place editing doesn't clobber original file on error" '
+ cat basic_message >message &&
+ chmod -r message &&
+ test_must_fail git interpret-trailers --trailer "Reviewed-by: Alice" --in-place message &&
+ chmod +r message &&
+ test_cmp message basic_message
+'
+
test_expect_success 'using "where = before"' '
git config trailer.bug.where "before" &&
cat complex_message_body >expected &&
git add c$i.c &&
git commit -m c$i &&
git tag c$i &&
- i=`expr $i + 1` || return 1
+ i=$(expr $i + 1) || return 1
done
'
while test $i -le 30
do
refs="$refs c$i"
- i=`expr $i + 1`
+ i=$(expr $i + 1)
done &&
git merge $refs &&
test "$(git rev-parse c1)" != "$(git rev-parse HEAD)" &&
while test $i -le 30
do
test "$(git rev-parse c$i)" = "$(git rev-parse HEAD^$i)" &&
- i=`expr $i + 1` || return 1
+ i=$(expr $i + 1) || return 1
done &&
git diff --exit-code &&
i=1 &&
while test $i -le 30
do
test -f c$i.c &&
- i=`expr $i + 1` || return 1
+ i=$(expr $i + 1) || return 1
done
'
test_expect_success 'loose objects in alternate ODB are not repacked' '
mkdir alt_objects &&
- echo `pwd`/alt_objects > .git/objects/info/alternates &&
+ echo $(pwd)/alt_objects > .git/objects/info/alternates &&
echo content3 > file3 &&
objsha1=$(GIT_OBJECT_DIRECTORY=alt_objects git hash-object -w file3) &&
git add file3 &&
'
test_expect_success 'local packed unreachable obs that exist in alternate ODB are not loosened' '
- echo `pwd`/alt_objects > .git/objects/info/alternates &&
+ echo $(pwd)/alt_objects > .git/objects/info/alternates &&
echo "$csha1" | git pack-objects --non-empty --all --reflog pack &&
rm -f .git/objects/pack/* &&
mv pack-* .git/objects/pack/ &&
} >non/expect.full &&
echo file2:world >non/expect.sub &&
(
- GIT_CEILING_DIRECTORIES="$(pwd)/non/git" &&
+ GIT_CEILING_DIRECTORIES="$(pwd)/non" &&
export GIT_CEILING_DIRECTORIES &&
cd non/git &&
test_must_fail git grep o &&
git grep --no-index o >../actual.full &&
- test_cmp ../expect.full ../actual.full
+ test_cmp ../expect.full ../actual.full &&
cd sub &&
test_must_fail git grep o &&
git grep --no-index o >../../actual.sub &&
echo ".*o*" >non/git/.gitignore &&
(
- GIT_CEILING_DIRECTORIES="$(pwd)/non/git" &&
+ GIT_CEILING_DIRECTORIES="$(pwd)/non" &&
export GIT_CEILING_DIRECTORIES &&
cd non/git &&
test_must_fail git grep o &&
test_cmp ../expect.full ../actual.full &&
{
- echo ".gitignore:.*o*"
+ echo ".gitignore:.*o*" &&
cat ../expect.full
} >../expect.with.ignored &&
git grep --no-index --no-exclude o >../actual.full &&
)
'
+test_expect_success 'outside of git repository with fallbackToNoIndex' '
+ rm -fr non &&
+ mkdir -p non/git/sub &&
+ echo hello >non/git/file1 &&
+ echo world >non/git/sub/file2 &&
+ cat <<-\EOF >non/expect.full &&
+ file1:hello
+ sub/file2:world
+ EOF
+ echo file2:world >non/expect.sub &&
+ (
+ GIT_CEILING_DIRECTORIES="$(pwd)/non" &&
+ export GIT_CEILING_DIRECTORIES &&
+ cd non/git &&
+ test_must_fail git -c grep.fallbackToNoIndex=false grep o &&
+ git -c grep.fallbackToNoIndex=true grep o >../actual.full &&
+ test_cmp ../expect.full ../actual.full &&
+ cd sub &&
+ test_must_fail git -c grep.fallbackToNoIndex=false grep o &&
+ git -c grep.fallbackToNoIndex=true grep o >../../actual.sub &&
+ test_cmp ../../expect.sub ../../actual.sub
+ ) &&
+
+ echo ".*o*" >non/git/.gitignore &&
+ (
+ GIT_CEILING_DIRECTORIES="$(pwd)/non" &&
+ export GIT_CEILING_DIRECTORIES &&
+ cd non/git &&
+ test_must_fail git -c grep.fallbackToNoIndex=false grep o &&
+ git -c grep.fallbackToNoIndex=true grep --exclude-standard o >../actual.full &&
+ test_cmp ../expect.full ../actual.full &&
+
+ {
+ echo ".gitignore:.*o*" &&
+ cat ../expect.full
+ } >../expect.with.ignored &&
+ git -c grep.fallbackToNoIndex grep --no-exclude o >../actual.full &&
+ test_cmp ../expect.with.ignored ../actual.full
+ )
+'
+
test_expect_success 'inside git repository but with --no-index' '
rm -fr is &&
mkdir -p is/git/sub &&
'
test_expect_success 'blame to a commit with no author name' '
- TREE=`git rev-parse HEAD:` &&
+ TREE=$(git rev-parse HEAD:) &&
cat >badcommit <<EOF &&
tree $TREE
author <noname> 1234567890 +0000
some message
EOF
- COMMIT=`git hash-object -t commit -w badcommit` &&
+ COMMIT=$(git hash-object -t commit -w badcommit) &&
git --no-pager blame $COMMIT -- uno >/dev/null
'
}
test_expect_success $PREREQ 'Extract patches' '
- patches=`git format-patch -s --cc="One <one@example.com>" --cc=two@example.com -n HEAD^1`
+ patches=$(git format-patch -s --cc="One <one@example.com>" --cc=two@example.com -n HEAD^1)
'
# Test no confirm early to ensure remaining tests will not hang
'
test_expect_success $PREREQ 'patches To headers are used by default' '
- patch=`git format-patch -1 --to="bodies@example.com"` &&
+ patch=$(git format-patch -1 --to="bodies@example.com") &&
test_when_finished "rm $patch" &&
git send-email \
--dry-run \
'
test_expect_success $PREREQ 'patches To headers are appended to' '
- patch=`git format-patch -1 --to="bodies@example.com"` &&
+ patch=$(git format-patch -1 --to="bodies@example.com") &&
test_when_finished "rm $patch" &&
git send-email \
--dry-run \
'
test_expect_success $PREREQ 'To headers from files reset each patch' '
- patch1=`git format-patch -1 --to="bodies@example.com"` &&
- patch2=`git format-patch -1 --to="other@example.com" HEAD~` &&
+ patch1=$(git format-patch -1 --to="bodies@example.com") &&
+ patch2=$(git format-patch -1 --to="other@example.com" HEAD~) &&
test_when_finished "rm $patch1 && rm $patch2" &&
git send-email \
--dry-run \
clean_fake_sendmail &&
rm -fr outdir &&
git format-patch --cover-letter -2 -o outdir &&
- cover=`echo outdir/0000-*.patch` &&
+ cover=$(echo outdir/0000-*.patch) &&
mv $cover cover-to-edit.patch &&
perl -pe "s/^From:/$header: extra\@address.com\nFrom:/" cover-to-edit.patch >"$cover" &&
git send-email \
git update-index --add d &&
git commit -m '/bar/d should be in the log' &&
git svn dcommit -i bar &&
- test -z \"\`git diff refs/heads/my-bar refs/remotes/bar\`\" &&
+ test -z \"\$(git diff refs/heads/my-bar refs/remotes/bar)\" &&
mkdir newdir &&
echo new > newdir/dir &&
git update-index --add newdir/dir &&
git commit -m 'add a new directory' &&
git svn dcommit -i bar &&
- test -z \"\`git diff refs/heads/my-bar refs/remotes/bar\`\" &&
+ test -z \"\$(git diff refs/heads/my-bar refs/remotes/bar)\" &&
echo foo >> newdir/dir &&
git update-index newdir/dir &&
git commit -m 'modify a file in new directory' &&
git svn dcommit -i bar &&
- test -z \"\`git diff refs/heads/my-bar refs/remotes/bar\`\"
+ test -z \"\$(git diff refs/heads/my-bar refs/remotes/bar)\"
"
test_expect_success 'dcommit should not fail with a touched file' '
git update-index d &&
git commit -m 'update /bar/d' &&
git svn set-tree -i bar HEAD &&
- test -z \"\`git diff refs/heads/my-bar refs/remotes/bar\`\"
+ test -z \"\$(git diff refs/heads/my-bar refs/remotes/bar)\"
"
test_expect_success 'git-svn works in a bare repository' '
EOF
printf "Hello\r\nWorld\r\n" > crlf
- a_crlf=`git hash-object -w crlf`
+ a_crlf=$(git hash-object -w crlf)
printf "Hello\rWorld\r" > cr
- a_cr=`git hash-object -w cr`
+ a_cr=$(git hash-object -w cr)
printf "Hello\nWorld\n" > lf
- a_lf=`git hash-object -w lf`
+ a_lf=$(git hash-object -w lf)
printf "Hello\r\nWorld" > ne_crlf
- a_ne_crlf=`git hash-object -w ne_crlf`
+ a_ne_crlf=$(git hash-object -w ne_crlf)
printf "Hello\nWorld" > ne_lf
- a_ne_lf=`git hash-object -w ne_lf`
+ a_ne_lf=$(git hash-object -w ne_lf)
printf "Hello\rWorld" > ne_cr
- a_ne_cr=`git hash-object -w ne_cr`
+ a_ne_cr=$(git hash-object -w ne_cr)
touch empty
- a_empty=`git hash-object -w empty`
+ a_empty=$(git hash-object -w empty)
printf "\n" > empty_lf
- a_empty_lf=`git hash-object -w empty_lf`
+ a_empty_lf=$(git hash-object -w empty_lf)
printf "\r" > empty_cr
- a_empty_cr=`git hash-object -w empty_cr`
+ a_empty_cr=$(git hash-object -w empty_cr)
printf "\r\n" > empty_crlf
- a_empty_crlf=`git hash-object -w empty_crlf`
+ a_empty_crlf=$(git hash-object -w empty_crlf)
svn_cmd import --no-auto-props -m 'import for git svn' . "$svnrepo" >/dev/null
cd ..
git pull . ${remotes_git_svn}'
expect='/* $Id$ */'
-got="`sed -ne 2p kw.c`"
+got="$(sed -ne 2p kw.c)"
test_expect_success 'raw $Id$ found in kw.c' "test '$expect' = '$got'"
test_expect_success "propset CR on crlf files" '
cd test_wc
printf '$Id$\rHello\rWorld\r' > cr
printf '$Id$\rHello\rWorld' > ne_cr
- a_cr=`printf '$Id$\r\nHello\r\nWorld\r\n' | git hash-object --stdin`
- a_ne_cr=`printf '$Id$\r\nHello\r\nWorld' | git hash-object --stdin`
+ a_cr=$(printf '$Id$\r\nHello\r\nWorld\r\n' | git hash-object --stdin)
+ a_ne_cr=$(printf '$Id$\r\nHello\r\nWorld' | git hash-object --stdin)
test_expect_success 'Set CRLF on cr files' \
'svn_cmd propset svn:eol-style CRLF cr &&
svn_cmd propset svn:eol-style CRLF ne_cr &&
test_expect_success 'fetch and pull latest from svn' \
'git svn fetch && git pull . ${remotes_git_svn}'
-b_cr="`git hash-object cr`"
-b_ne_cr="`git hash-object ne_cr`"
+b_cr="$(git hash-object cr)"
+b_ne_cr="$(git hash-object ne_cr)"
test_expect_success 'CRLF + $Id$' "test '$a_cr' = '$b_cr'"
test_expect_success 'CRLF + $Id$ (no newline)' "test '$a_ne_cr' = '$b_ne_cr'"
test_expect_success 'init and fetch a moved directory' '
git svn init --minimize-url -i thunk "$svnrepo"/thunk &&
git svn fetch -i thunk &&
- test "`git rev-parse --verify refs/remotes/thunk@2`" \
- = "`git rev-parse --verify refs/remotes/thunk~1`" &&
- test "`git cat-file blob refs/remotes/thunk:readme |\
- sed -n -e "3p"`" = goodbye &&
- test -z "`git config --get svn-remote.svn.fetch \
- "^trunk:refs/remotes/thunk@2$"`"
+ test "$(git rev-parse --verify refs/remotes/thunk@2)" \
+ = "$(git rev-parse --verify refs/remotes/thunk~1)" &&
+ test "$(git cat-file blob refs/remotes/thunk:readme |\
+ sed -n -e "3p")" = goodbye &&
+ test -z "$(git config --get svn-remote.svn.fetch \
+ "^trunk:refs/remotes/thunk@2$")"
'
test_expect_success 'init and fetch from one svn-remote' '
git config --add svn-remote.svn.fetch \
thunk:refs/remotes/svn/thunk &&
git svn fetch -i svn/thunk &&
- test "`git rev-parse --verify refs/remotes/svn/trunk`" \
- = "`git rev-parse --verify refs/remotes/svn/thunk~1`" &&
- test "`git cat-file blob refs/remotes/svn/thunk:readme |\
- sed -n -e "3p"`" = goodbye
+ test "$(git rev-parse --verify refs/remotes/svn/trunk)" \
+ = "$(git rev-parse --verify refs/remotes/svn/thunk~1)" &&
+ test "$(git cat-file blob refs/remotes/svn/thunk:readme |\
+ sed -n -e "3p")" = goodbye
'
test_expect_success 'follow deleted parent' '
junk:refs/remotes/svn/junk &&
git svn fetch -i svn/thunk &&
git svn fetch -i svn/junk &&
- test -z "`git diff svn/junk svn/trunk`" &&
- test "`git merge-base svn/junk svn/trunk`" \
- = "`git rev-parse svn/trunk`"
+ test -z "$(git diff svn/junk svn/trunk)" &&
+ test "$(git merge-base svn/junk svn/trunk)" \
+ = "$(git rev-parse svn/trunk)"
'
test_expect_success 'follow larger parent' '
git rev-parse --verify refs/remotes/larger &&
git rev-parse --verify \
refs/remotes/larger-parent &&
- test "`git merge-base \
+ test "$(git merge-base \
refs/remotes/larger-parent \
- refs/remotes/larger`" = \
- "`git rev-parse refs/remotes/larger`"
+ refs/remotes/larger)" = \
+ "$(git rev-parse refs/remotes/larger)"
'
test_expect_success 'follow higher-level parent' '
svn_cmd rm -m "remove glob" "$svnrepo"/glob &&
git svn init --minimize-url -i glob "$svnrepo"/glob &&
git svn fetch -i glob &&
- test "`git cat-file blob refs/remotes/glob:blob/bye`" = hi &&
- test "`git ls-tree refs/remotes/glob | wc -l `" -eq 1
+ test "$(git cat-file blob refs/remotes/glob:blob/bye)" = hi &&
+ test "$(git ls-tree refs/remotes/glob | wc -l )" -eq 1
'
# ref: r9270 of the Subversion repository: (http://svn.collab.net/repos/svn)
git svn init --minimize-url -i r9270-t \
"$svnrepo"/r9270/trunk/subversion/bindings/swig/perl/native/t &&
git svn fetch -i r9270-t &&
- test `git rev-list r9270-t | wc -l` -eq 2 &&
- test "`git ls-tree --name-only r9270-t~1`" = \
- "`git ls-tree --name-only r9270-t`"
+ test $(git rev-list r9270-t | wc -l) -eq 2 &&
+ test "$(git ls-tree --name-only r9270-t~1)" = \
+ "$(git ls-tree --name-only r9270-t)"
'
test_expect_success "track initial change if it was only made to parent" '
git svn init --minimize-url -i r9270-d \
"$svnrepo"/r9270/drunk/subversion/bindings/swig/perl/native/t &&
git svn fetch -i r9270-d &&
- test `git rev-list r9270-d | wc -l` -eq 3 &&
- test "`git ls-tree --name-only r9270-t`" = \
- "`git ls-tree --name-only r9270-d`" &&
- test "`git rev-parse r9270-t`" = \
- "`git rev-parse r9270-d~1`"
+ test $(git rev-list r9270-d | wc -l) -eq 3 &&
+ test "$(git ls-tree --name-only r9270-t)" = \
+ "$(git ls-tree --name-only r9270-d)" &&
+ test "$(git rev-parse r9270-t)" = \
+ "$(git rev-parse r9270-d~1)"
'
test_expect_success "follow-parent is atomic" '
git svn fetch -i stunk &&
git svn init --minimize-url -i flunked "$svnrepo"/flunked &&
git svn fetch -i flunked &&
- test "`git rev-parse --verify refs/remotes/flunk@18`" \
- = "`git rev-parse --verify refs/remotes/stunk`" &&
- test "`git rev-parse --verify refs/remotes/flunk~1`" \
- = "`git rev-parse --verify refs/remotes/stunk`" &&
- test "`git rev-parse --verify refs/remotes/flunked~1`" \
- = "`git rev-parse --verify refs/remotes/stunk~1`"
+ test "$(git rev-parse --verify refs/remotes/flunk@18)" \
+ = "$(git rev-parse --verify refs/remotes/stunk)" &&
+ test "$(git rev-parse --verify refs/remotes/flunk~1)" \
+ = "$(git rev-parse --verify refs/remotes/stunk)" &&
+ test "$(git rev-parse --verify refs/remotes/flunked~1)" \
+ = "$(git rev-parse --verify refs/remotes/stunk~1)"
'
test_expect_success "track multi-parent paths" '
svn_cmd cp -m "resurrect /glob" "$svnrepo"/r9270 "$svnrepo"/glob &&
git svn multi-fetch &&
- test `git cat-file commit refs/remotes/glob | \
- grep "^parent " | wc -l` -eq 2
+ test $(git cat-file commit refs/remotes/glob | \
+ grep "^parent " | wc -l) -eq 2
'
test_expect_success "multi-fetch continues to work" "
git commit -a -m "another"
'
-head=`git rev-parse --verify HEAD^0`
-prev=`git rev-parse --verify HEAD^1`
+head=$(git rev-parse --verify HEAD^0)
+prev=$(git rev-parse --verify HEAD^1)
# the internals of the commit-diff command are the same as the regular
# commit, so only a basic test of functionality is needed since we've
git update-ref -d refs/${remotes_git_svn} refs/${remotes_git_svn}
'
-head=`git rev-parse --verify refs/heads/git-svn-HEAD^0`
+head=$(git rev-parse --verify refs/heads/git-svn-HEAD^0)
test_expect_success 'git-svn-HEAD is a real HEAD' "test -n '$head'"
-svnrepo_escaped=`echo $svnrepo | sed 's/ /%20/'`
+svnrepo_escaped=$(echo $svnrepo | sed 's/ /%20/')
test_expect_success 'initialize old-style (v0) git svn layout' '
mkdir -p "$GIT_DIR"/git-svn/info "$GIT_DIR"/svn/info &&
git rev-parse --verify refs/${remotes_git_svn}^0 &&
git rev-parse --verify refs/remotes/svn^0 &&
test "$(git config --get svn-remote.svn.url)" = "$svnrepo_escaped" &&
- test `git config --get svn-remote.svn.fetch` = \
+ test $(git config --get svn-remote.svn.fetch) = \
":refs/${remotes_git_svn}"
'
git svn init "$svnrepo" -T trunk -t tags -b branches &&
git config --get-all svn-remote.svn.fetch > fetch.out &&
grep "^trunk:refs/remotes/origin/trunk$" fetch.out &&
- test -n "`git config --get svn-remote.svn.branches \
- "^branches/\*:refs/remotes/origin/\*$"`" &&
- test -n "`git config --get svn-remote.svn.tags \
- "^tags/\*:refs/remotes/origin/tags/\*$"`" &&
+ test -n "$(git config --get svn-remote.svn.branches \
+ "^branches/\*:refs/remotes/origin/\*$")" &&
+ test -n "$(git config --get svn-remote.svn.tags \
+ "^tags/\*:refs/remotes/origin/tags/\*$")" &&
git config --unset svn-remote.svn.branches \
"^branches/\*:refs/remotes/origin/\*$" &&
git config --unset svn-remote.svn.tags \
for i in trunk a b tags/0.1 tags/0.2 tags/0.3; do
git rev-parse --verify refs/remotes/origin/\$i^0 >> refs.out || exit 1;
done &&
- test -z \"\`sort < refs.out | uniq -d\`\" &&
+ test -z \"\$(sort < refs.out | uniq -d)\" &&
for i in trunk a b tags/0.1 tags/0.2 tags/0.3; do
for j in trunk a b tags/0.1 tags/0.2 tags/0.3; do
if test \$j != \$i; then continue; fi
- test -z \"\`git diff refs/remotes/origin/\$i \
- refs/remotes/origin/\$j\`\" ||exit 1; done; done
+ test -z \"\$(git diff refs/remotes/origin/\$i \
+ refs/remotes/origin/\$j)\" ||exit 1; done; done
"
test_expect_success 'migrate --minimize on old inited layout' '
git config --unset-all svn-remote.svn.fetch &&
git config --unset-all svn-remote.svn.url &&
rm -rf "$GIT_DIR"/svn &&
- for i in `cat fetch.out`; do
- path=`expr $i : "\([^:]*\):.*$"`
- ref=`expr $i : "[^:]*:\(refs/remotes/.*\)$"`
+ for i in $(cat fetch.out); do
+ path=$(expr $i : "\([^:]*\):.*$")
+ ref=$(expr $i : "[^:]*:\(refs/remotes/.*\)$")
if test -z "$ref"; then continue; fi
if test -n "$path"; then path="/$path"; fi
( mkdir -p "$GIT_DIR"/svn/$ref/info/ &&
echo "$svnrepo"$path > "$GIT_DIR"/svn/$ref/info/url ) || exit 1;
done &&
git svn migrate --minimize &&
- test -z "`git config -l | grep "^svn-remote\.git-svn\."`" &&
+ test -z "$(git config -l | grep "^svn-remote\.git-svn\.")" &&
git config --get-all svn-remote.svn.fetch > fetch.out &&
grep "^trunk:refs/remotes/origin/trunk$" fetch.out &&
grep "^branches/a:refs/remotes/origin/a$" fetch.out &&
git log --pretty=oneline refs/remotes/tags/end | \
sed -e "s/^.\{41\}//" > output.end &&
test_cmp expect.end output.end &&
- test "`git rev-parse refs/remotes/tags/end~1`" = \
- "`git rev-parse refs/remotes/branches/start`" &&
- test "`git rev-parse refs/remotes/branches/start~2`" = \
- "`git rev-parse refs/remotes/trunk`" &&
+ test "$(git rev-parse refs/remotes/tags/end~1)" = \
+ "$(git rev-parse refs/remotes/branches/start)" &&
+ test "$(git rev-parse refs/remotes/branches/start~2)" = \
+ "$(git rev-parse refs/remotes/trunk)" &&
test_must_fail git rev-parse refs/remotes/tags/end@3
'
svn_cmd commit -m "try to try"
) &&
git svn fetch two &&
- test `git rev-list refs/remotes/two/tags/end | wc -l` -eq 6 &&
- test `git rev-list refs/remotes/two/branches/start | wc -l` -eq 3 &&
- test `git rev-parse refs/remotes/two/branches/start~2` = \
- `git rev-parse refs/remotes/two/trunk` &&
- test `git rev-parse refs/remotes/two/tags/end~3` = \
- `git rev-parse refs/remotes/two/branches/start` &&
+ test $(git rev-list refs/remotes/two/tags/end | wc -l) -eq 6 &&
+ test $(git rev-list refs/remotes/two/branches/start | wc -l) -eq 3 &&
+ test $(git rev-parse refs/remotes/two/branches/start~2) = \
+ $(git rev-parse refs/remotes/two/trunk) &&
+ test $(git rev-parse refs/remotes/two/tags/end~3) = \
+ $(git rev-parse refs/remotes/two/branches/start) &&
git log --pretty=oneline refs/remotes/two/tags/end | \
sed -e "s/^.\{41\}//" > output.two &&
test_cmp expect.two output.two
git log --pretty=oneline refs/remotes/tags/end | \
sed -e "s/^.\{41\}//" > output.end &&
test_cmp expect.end output.end &&
- test "`git rev-parse refs/remotes/tags/end~1`" = \
- "`git rev-parse refs/remotes/branches/v1/start`" &&
- test "`git rev-parse refs/remotes/branches/v1/start~2`" = \
- "`git rev-parse refs/remotes/trunk`" &&
+ test "$(git rev-parse refs/remotes/tags/end~1)" = \
+ "$(git rev-parse refs/remotes/branches/v1/start)" &&
+ test "$(git rev-parse refs/remotes/branches/v1/start~2)" = \
+ "$(git rev-parse refs/remotes/trunk)" &&
test_must_fail git rev-parse refs/remotes/tags/end@3
'
svn_cmd commit -m "try to try"
) &&
git svn fetch two &&
- test `git rev-list refs/remotes/two/tags/end | wc -l` -eq 6 &&
- test `git rev-list refs/remotes/two/branches/v1/start | wc -l` -eq 3 &&
- test `git rev-parse refs/remotes/two/branches/v1/start~2` = \
- `git rev-parse refs/remotes/two/trunk` &&
- test `git rev-parse refs/remotes/two/tags/end~3` = \
- `git rev-parse refs/remotes/two/branches/v1/start` &&
+ test $(git rev-list refs/remotes/two/tags/end | wc -l) -eq 6 &&
+ test $(git rev-list refs/remotes/two/branches/v1/start | wc -l) -eq 3 &&
+ test $(git rev-parse refs/remotes/two/branches/v1/start~2) = \
+ $(git rev-parse refs/remotes/two/trunk) &&
+ test $(git rev-parse refs/remotes/two/tags/end~3) = \
+ $(git rev-parse refs/remotes/two/branches/v1/start) &&
git log --pretty=oneline refs/remotes/two/tags/end | \
sed -e "s/^.\{41\}//" > output.two &&
test_cmp expect.two output.two
git config --add svn-remote.four.tags \
"tags/*:refs/remotes/four/tags/*" &&
git svn fetch four &&
- test `git rev-list refs/remotes/four/tags/next | wc -l` -eq 5 &&
- test `git rev-list refs/remotes/four/branches/v2/start | wc -l` -eq 3 &&
- test `git rev-parse refs/remotes/four/branches/v2/start~2` = \
- `git rev-parse refs/remotes/four/trunk` &&
- test `git rev-parse refs/remotes/four/tags/next~2` = \
- `git rev-parse refs/remotes/four/branches/v2/start` &&
+ test $(git rev-list refs/remotes/four/tags/next | wc -l) -eq 5 &&
+ test $(git rev-list refs/remotes/four/branches/v2/start | wc -l) -eq 3 &&
+ test $(git rev-parse refs/remotes/four/branches/v2/start~2) = \
+ $(git rev-parse refs/remotes/four/trunk) &&
+ test $(git rev-parse refs/remotes/four/tags/next~2) = \
+ $(git rev-parse refs/remotes/four/branches/v2/start) &&
git log --pretty=oneline refs/remotes/four/tags/next | \
sed -e "s/^.\{41\}//" > output.four &&
test_cmp expect.four output.four
test_expect_success 'find commit based on SVN revision number' "
git svn find-rev r12 |
- grep `git rev-parse HEAD`
+ grep $(git rev-parse HEAD)
"
test_expect_success 'empty rebase' "
test_debug 'gitk --all & sleep 1'
test_expect_success 'verify pre-merge ancestry' "
- test x\`git rev-parse --verify refs/heads/svn^2\` = \
- x\`git rev-parse --verify refs/heads/merge\` &&
+ test x\$(git rev-parse --verify refs/heads/svn^2) = \
+ x\$(git rev-parse --verify refs/heads/merge) &&
git cat-file commit refs/heads/svn^ | grep '^friend$'
"
test_debug 'gitk --all & sleep 1'
test_expect_success 'verify post-merge ancestry' "
- test x\`git rev-parse --verify refs/heads/svn\` = \
- x\`git rev-parse --verify refs/remotes/origin/trunk \` &&
- test x\`git rev-parse --verify refs/heads/svn^2\` = \
- x\`git rev-parse --verify refs/heads/merge\` &&
+ test x\$(git rev-parse --verify refs/heads/svn) = \
+ x\$(git rev-parse --verify refs/remotes/origin/trunk) &&
+ test x\$(git rev-parse --verify refs/heads/svn^2) = \
+ x\$(git rev-parse --verify refs/heads/merge) &&
git cat-file commit refs/heads/svn^ | grep '^friend$'
"
# SVN 1.7 will truncate "not-a%40{0]" to just "not-a".
# Look at what SVN wound up naming the branch and use that.
# Be sure to escape the @ if it shows up.
-non_reflog=`svn_cmd ls "$svnrepo/pr ject/branches" | grep not-a | sed 's/\///' | sed 's/@/%40/'`
+non_reflog=$(svn_cmd ls "$svnrepo/pr ject/branches" | grep not-a | sed 's/\///' | sed 's/@/%40/')
test_expect_success 'test clone with funky branch names' '
git svn clone -s "$svnrepo/pr ject" project &&
# Tested with: svn, version 1.4.4 (r25188)
# Tested with: svn, version 1.6.[12345689]
-v=`svn_cmd --version | sed -n -e 's/^svn, version \(1\.[0-9]*\.[0-9]*\).*$/\1/p'`
+v=$(svn_cmd --version | sed -n -e 's/^svn, version \(1\.[0-9]*\.[0-9]*\).*$/\1/p')
case $v in
1.[456].*)
;;
. ./lib-git-svn.sh
compare_git_head_with () {
- nr=`wc -l < "$1"`
+ nr=$(wc -l < "$1")
a=7
b=$(($a + $nr - 1))
git cat-file commit HEAD | sed -ne "$a,${b}p" >current &&
compare_svn_head_with () {
# extract just the log message and strip out committer info.
# don't use --limit here since svn 1.1.x doesn't have it,
- LC_ALL="$a_utf8_locale" svn log `git svn info --url` | perl -w -e '
+ LC_ALL="$a_utf8_locale" svn log $(git svn info --url) | perl -w -e '
use bytes;
$/ = ("-"x72) . "\n";
my @x = <STDIN>;
test_expect_success 'imported 2 revisions successfully' '
(
cd x
- test "`git rev-list refs/remotes/git-svn | wc -l`" -eq 2 &&
+ test "$(git rev-list refs/remotes/git-svn | wc -l)" -eq 2 &&
git rev-list -1 --pretty=raw refs/remotes/git-svn | \
grep "^author BBBBBBB BBBBBBB <bb@example\.com> " &&
git rev-list -1 --pretty=raw refs/remotes/git-svn~1 | \
(
cd x
git svn fetch --authors-file=../svn-authors &&
- test "`git rev-list refs/remotes/git-svn | wc -l`" -eq 4 &&
+ test "$(git rev-list refs/remotes/git-svn | wc -l)" -eq 4 &&
git rev-list -1 --pretty=raw refs/remotes/git-svn | \
grep "^author DDDDDDD DDDDDDD <dd@example\.com> " &&
git rev-list -1 --pretty=raw refs/remotes/git-svn~1 | \
test_expect_success 'failure happened without negative side effects' '
(
cd aa-work &&
- test 6 -eq "`tmp_config_get svn-remote.svn.branches-maxRev`" &&
- test 6 -eq "`tmp_config_get svn-remote.svn.tags-maxRev`"
+ test 6 -eq "$(tmp_config_get svn-remote.svn.branches-maxRev)" &&
+ test 6 -eq "$(tmp_config_get svn-remote.svn.tags-maxRev)"
)
'
(
cd aa-work &&
git svn fetch --authors-file=../svn-authors &&
- test 8 -eq "`tmp_config_get svn-remote.svn.branches-maxRev`" &&
- test 8 -eq "`tmp_config_get svn-remote.svn.tags-maxRev`"
+ test 8 -eq "$(tmp_config_get svn-remote.svn.branches-maxRev)" &&
+ test 8 -eq "$(tmp_config_get svn-remote.svn.tags-maxRev)"
)
'
test_expect_success SYMLINKS '"bar" is a symlink that points to "asdf"' '
test -L x/bar &&
- (cd x && test xasdf = x"`git cat-file blob HEAD:bar`")
+ (cd x && test xasdf = x"$(git cat-file blob HEAD:bar)")
'
test_expect_success 'get "bar" => symlink fix from svn' '
test_expect_success SYMLINKS '"bar" remains a proper symlink' '
test -L x/bar &&
- (cd x && test xdoink = x"`git cat-file blob HEAD:bar`")
+ (cd x && test xdoink = x"$(git cat-file blob HEAD:bar)")
'
test_done
'
test_expect_success '(supposedly) non-conflicting change from SVN' '
- test x"`sed -n -e 58p < file`" = x58 &&
- test x"`sed -n -e 61p < file`" = x61 &&
+ test x"$(sed -n -e 58p < file)" = x58 &&
+ test x"$(sed -n -e 61p < file)" = x61 &&
svn_cmd co "$svnrepo" tmp &&
(cd tmp &&
perl -i.bak -p -e "s/^58$/5588/" file &&
perl -i.bak -p -e "s/^61$/6611/" file &&
poke file &&
- test x"`sed -n -e 58p < file`" = x5588 &&
- test x"`sed -n -e 61p < file`" = x6611 &&
+ test x"$(sed -n -e 58p < file)" = x5588 &&
+ test x"$(sed -n -e 61p < file)" = x6611 &&
svn_cmd commit -m "58 => 5588, 61 => 6611"
)
'
"
test_expect_success 'change file but in unrelated area' "
- test x\"\`sed -n -e 4p < file\`\" = x4 &&
- test x\"\`sed -n -e 7p < file\`\" = x7 &&
+ test x\"\$(sed -n -e 4p < file)\" = x4 &&
+ test x\"\$(sed -n -e 7p < file)\" = x7 &&
perl -i.bak -p -e 's/^4\$/4444/' file &&
perl -i.bak -p -e 's/^7\$/7777/' file &&
- test x\"\`sed -n -e 4p < file\`\" = x4444 &&
- test x\"\`sed -n -e 7p < file\`\" = x7777 &&
+ test x\"\$(sed -n -e 4p < file)\" = x4444 &&
+ test x\"\$(sed -n -e 7p < file)\" = x7777 &&
git commit -m '4 => 4444, 7 => 7777' file &&
git svn dcommit &&
svn_cmd up tmp &&
cd tmp &&
- test x\"\`sed -n -e 4p < file\`\" = x4444 &&
- test x\"\`sed -n -e 7p < file\`\" = x7777 &&
- test x\"\`sed -n -e 58p < file\`\" = x5588 &&
- test x\"\`sed -n -e 61p < file\`\" = x6611
+ test x\"\$(sed -n -e 4p < file)\" = x4444 &&
+ test x\"\$(sed -n -e 7p < file)\" = x7777 &&
+ test x\"\$(sed -n -e 58p < file)\" = x5588 &&
+ test x\"\$(sed -n -e 61p < file)\" = x6611
"
test_expect_success 'attempt to dcommit with a dirty index' '
test_expect_success 'imported 6 revisions successfully' '
(
cd x
- test "`git rev-list refs/remotes/git-svn | wc -l`" -eq 6
+ test "$(git rev-list refs/remotes/git-svn | wc -l)" -eq 6
)
'
git svn clone -s "$svnrepo" g &&
(
cd g &&
- test x`git rev-parse --verify refs/remotes/origin/trunk^0` = \
- x`git rev-parse --verify refs/heads/master^0`
+ test x$(git rev-parse --verify refs/remotes/origin/trunk^0) = \
+ x$(git rev-parse --verify refs/heads/master^0)
)
'
uuid=b48289b2-9c08-4d72-af37-0358a40b9c15
test_expect_success 'svk merges were represented coming in' "
- [ `git cat-file commit HEAD | grep parent | wc -l` -eq 2 ]
+ [ $(git cat-file commit HEAD | grep parent | wc -l) -eq 2 ]
"
test_done
test_expect_success 'A: verify marks output' '
cat >expect <<-EOF &&
- :2 `git rev-parse --verify master:file2`
- :3 `git rev-parse --verify master:file3`
- :4 `git rev-parse --verify master:file4`
- :5 `git rev-parse --verify master^0`
+ :2 $(git rev-parse --verify master:file2)
+ :3 $(git rev-parse --verify master:file3)
+ :4 $(git rev-parse --verify master:file4)
+ :5 $(git rev-parse --verify master^0)
EOF
test_cmp expect marks.out
'
EOF
git diff-tree -M -r master verify--import-marks >actual &&
compare_diff_raw expect actual &&
- test `git rev-parse --verify master:file2` \
- = `git rev-parse --verify verify--import-marks:copy-of-file2`
+ test $(git rev-parse --verify master:file2) \
+ = $(git rev-parse --verify verify--import-marks:copy-of-file2)
'
test_expect_success 'A: export marks with large values' '
git prune" &&
git fast-import <input &&
test -f .git/TEMP_TAG &&
- test `git rev-parse master` = `git rev-parse TEMP_TAG^`
+ test $(git rev-parse master) = $(git rev-parse TEMP_TAG^)
'
test_expect_success 'B: accept empty committer' '
###
test_expect_success 'C: incremental import create pack from stdin' '
- newf=`echo hi newf | git hash-object -w --stdin` &&
- oldf=`git rev-parse --verify master:file2` &&
+ newf=$(echo hi newf | git hash-object -w --stdin) &&
+ oldf=$(git rev-parse --verify master:file2) &&
test_tick &&
cat >input <<-INPUT_END &&
commit refs/heads/branch
'
test_expect_success 'C: validate reuse existing blob' '
- test $newf = `git rev-parse --verify branch:file2/newf` &&
- test $oldf = `git rev-parse --verify branch:file2/oldf`
+ test $newf = $(git rev-parse --verify branch:file2/newf) &&
+ test $oldf = $(git rev-parse --verify branch:file2/oldf)
'
test_expect_success 'C: verify commit' '
cat >expect <<-EOF &&
- parent `git rev-parse --verify master^0`
+ parent $(git rev-parse --verify master^0)
author $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL> $GIT_COMMITTER_DATE
committer $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL> $GIT_COMMITTER_DATE
###
test_expect_success 'F: non-fast-forward update skips' '
- old_branch=`git rev-parse --verify branch^0` &&
+ old_branch=$(git rev-parse --verify branch^0) &&
test_tick &&
cat >input <<-INPUT_END &&
commit refs/heads/branch
test_must_fail git fast-import <input &&
# branch must remain unaffected
- test $old_branch = `git rev-parse --verify branch^0`
+ test $old_branch = $(git rev-parse --verify branch^0)
'
test_expect_success 'F: verify pack' '
test_expect_success 'F: verify other commit' '
cat >expect <<-EOF &&
- tree `git rev-parse branch~1^{tree}`
- parent `git rev-parse branch~1`
+ tree $(git rev-parse branch~1^{tree})
+ parent $(git rev-parse branch~1)
author $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL> $GIT_COMMITTER_DATE
committer $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL> $GIT_COMMITTER_DATE
###
test_expect_success 'G: non-fast-forward update forced' '
- old_branch=`git rev-parse --verify branch^0` &&
+ old_branch=$(git rev-parse --verify branch^0) &&
test_tick &&
cat >input <<-INPUT_END &&
commit refs/heads/branch
'
test_expect_success 'G: branch changed, but logged' '
- test $old_branch != `git rev-parse --verify branch^0` &&
- test $old_branch = `git rev-parse --verify branch@{1}`
+ test $old_branch != $(git rev-parse --verify branch^0) &&
+ test $old_branch = $(git rev-parse --verify branch@{1})
'
###
test_expect_success 'I: verify edge list' '
cat >expect <<-EOF &&
- .git/objects/pack/pack-.pack: `git rev-parse --verify export-boundary`
+ .git/objects/pack/pack-.pack: $(git rev-parse --verify export-boundary)
EOF
sed -e s/pack-.*pack/pack-.pack/ edges.list >actual &&
test_cmp expect actual
git fast-import <input
'
test_expect_success 'J: branch has 1 commit, empty tree' '
- test 1 = `git rev-list J | wc -l` &&
- test 0 = `git ls-tree J | wc -l`
+ test 1 = $(git rev-list J | wc -l) &&
+ test 0 = $(git ls-tree J | wc -l)
'
test_expect_success 'J: tag must fail on empty branch' '
git fast-import <input
'
test_expect_success 'K: verify K^1 = branch^1' '
- test `git rev-parse --verify branch^1` \
- = `git rev-parse --verify K^1`
+ test $(git rev-parse --verify branch^1) \
+ = $(git rev-parse --verify K^1)
'
###
git ls-tree L2 g/b/ >tmp &&
cat tmp | cut -f 2 >actual &&
test_cmp expect actual &&
- git fsck `git rev-parse L2`
+ git fsck $(git rev-parse L2)
'
###
INPUT_END
git fast-import <input &&
- test `git rev-parse N2^{tree}` = `git rev-parse N3^{tree}`
+ test $(git rev-parse N2^{tree}) = $(git rev-parse N3^{tree})
'
test_expect_success 'N: copy directory by id' '
INPUT_END
git fast-import <input &&
- test `git rev-parse N3` = `git rev-parse O1`
+ test $(git rev-parse N3) = $(git rev-parse O1)
'
test_expect_success 'O: blank lines not necessary after data commands' '
INPUT_END
git fast-import <input &&
- test `git rev-parse N3` = `git rev-parse O2`
+ test $(git rev-parse N3) = $(git rev-parse O2)
'
test_expect_success 'O: repack before next test' '
INPUT_END
git fast-import <input &&
- test 8 = `find .git/objects/pack -type f | wc -l` &&
- test `git rev-parse refs/tags/O3-2nd` = `git rev-parse O3^` &&
+ test 8 = $(find .git/objects/pack -type f | wc -l) &&
+ test $(git rev-parse refs/tags/O3-2nd) = $(git rev-parse O3^) &&
git log --reverse --pretty=oneline O3 | sed s/^.*z// >actual &&
test_cmp expect actual
'
data <<DATAEND
[submodule "sub"]
path = sub
- url = "`pwd`/sub"
+ url = "$(pwd)/sub"
DATAEND
commit refs/heads/subuse1
data <<DATAEND
[submodule "sub"]
path = sub
- url = "`pwd`/sub"
+ url = "$(pwd)/sub"
DATAEND
commit refs/heads/subuse2
test_expect_success 'Q: verify second notes commit' '
cat >expect <<-EOF &&
- parent `git rev-parse --verify refs/notes/foobar~2`
+ parent $(git rev-parse --verify refs/notes/foobar~2)
author $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL> $GIT_COMMITTER_DATE
committer $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL> $GIT_COMMITTER_DATE
test_expect_success 'Q: verify fourth notes commit' '
cat >expect <<-EOF &&
- parent `git rev-parse --verify refs/notes/foobar^`
+ parent $(git rev-parse --verify refs/notes/foobar^)
author $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL> $GIT_COMMITTER_DATE
committer $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL> $GIT_COMMITTER_DATE
git add file &&
git commit -m sub_initial
) &&
- git submodule add "`pwd`/sub" sub &&
+ git submodule add "$(pwd)/sub" sub &&
git commit -m initial &&
test_tick &&
(
test_expect_success 'set-up a few more tags for tag export tests' '
git checkout -f master &&
- HEAD_TREE=`git show -s --pretty=raw HEAD | grep tree | sed "s/tree //"` &&
+ HEAD_TREE=$(git show -s --pretty=raw HEAD | grep tree | sed "s/tree //") &&
git tag tree_tag -m "tagging a tree" $HEAD_TREE &&
git tag -a tree_tag-obj -m "tagging a tree" $HEAD_TREE &&
git tag tag-obj_tag -m "tagging a tag" tree_tag-obj &&
test_expect_success 'fast-export quotes pathnames' '
git init crazy-paths &&
(cd crazy-paths &&
- blob=`echo foo | git hash-object -w --stdin` &&
+ blob=$(echo foo | git hash-object -w --stdin) &&
git update-index --add \
--cacheinfo 100644 $blob "$(printf "path with\\nnewline")" \
--cacheinfo 100644 $blob "path with \"quote\"" \
echo object > tag-object &&
git add tag-object &&
test_tick && git commit -m "Object to be tagged" &&
- git tag tagged-object `git hash-object tag-object` &&
+ git tag tagged-object $(git hash-object tag-object) &&
gitweb_run "p=.git;a=snapshot;h=tagged-object;sf=tgz" &&
grep "400 - Object is not a tree-ish" gitweb.output
'
test_debug 'cat gitweb.output'
test_expect_success 'snapshots: good object id' '
- ID=`git rev-parse --verify HEAD` &&
+ ID=$(git rev-parse --verify HEAD) &&
gitweb_run "p=.git;a=snapshot;h=$ID;sf=tgz" &&
grep "Status: 200 OK" gitweb.output
'
test_debug 'cat gitweb.headers'
test_expect_success DATE_PARSER 'modification: tree snapshot' '
- ID=`git rev-parse --verify HEAD^{tree}` &&
+ ID=$(git rev-parse --verify HEAD^{tree}) &&
HTTP_IF_MODIFIED_SINCE="Wed, 6 Apr 2005 22:14:13 +0000" &&
export HTTP_IF_MODIFIED_SINCE &&
test_when_finished "unset HTTP_IF_MODIFIED_SINCE" &&
--- /dev/null
+#!/bin/sh
+
+test_description='git p4 support for file type change'
+
+. ./lib-git-p4.sh
+
+test_expect_success 'start p4d' '
+ start_p4d
+'
+
+test_expect_success 'create files' '
+ (
+ cd "$cli" &&
+ p4 client -o | sed "/LineEnd/s/:.*/:unix/" | p4 client -i &&
+ cat >file1 <<-EOF &&
+ text without any funny substitution business
+ EOF
+ cat >file2 <<-EOF &&
+ second file whose type will change
+ EOF
+ p4 add file1 file2 &&
+ p4 submit -d "add files"
+ )
+'
+
+test_expect_success SYMLINKS 'change file to symbolic link' '
+ git p4 clone --dest="$git" //depot@all &&
+ test_when_finished cleanup_git &&
+ (
+ cd "$git" &&
+ git config git-p4.skipSubmitEdit true &&
+
+ rm file2 &&
+ ln -s file1 file2 &&
+ git add file2 &&
+ git commit -m "symlink file1 to file2" &&
+ git p4 submit &&
+ p4 filelog -m 1 //depot/file2 >filelog &&
+ grep "(symlink)" filelog
+ )
+'
+
+test_expect_success SYMLINKS 'change symbolic link to file' '
+ git p4 clone --dest="$git" //depot@all &&
+ test_when_finished cleanup_git &&
+ (
+ cd "$git" &&
+ git config git-p4.skipSubmitEdit true &&
+
+ rm file2 &&
+ cat >file2 <<-EOF &&
+ This is another content for the second file.
+ EOF
+ git add file2 &&
+ git commit -m "re-write file2" &&
+ git p4 submit &&
+ p4 filelog -m 1 //depot/file2 >filelog &&
+ grep "(text)" filelog
+ )
+'
+
+test_expect_success 'kill p4d' '
+ kill_p4d
+'
+
+test_done
echo fake: "$@"
EOF
chmod +x "fake browser" &&
- git config browser.w3m.path "`pwd`/fake browser" &&
+ git config browser.w3m.path "$(pwd)/fake browser" &&
test_web_browse w3m http://example.com/foo
'
#include "git-compat-util.h"
#include "run-command.h"
+#include "argv-array.h"
+#include "strbuf.h"
#include <string.h>
#include <errno.h>
+static int number_callbacks;
+static int parallel_next(struct child_process *cp,
+ struct strbuf *err,
+ void *cb,
+ void **task_cb)
+{
+ struct child_process *d = cb;
+ if (number_callbacks >= 4)
+ return 0;
+
+ argv_array_pushv(&cp->args, d->argv);
+ strbuf_addf(err, "preloaded output of a child\n");
+ number_callbacks++;
+ return 1;
+}
+
+static int no_job(struct child_process *cp,
+ struct strbuf *err,
+ void *cb,
+ void **task_cb)
+{
+ strbuf_addf(err, "no further jobs available\n");
+ return 0;
+}
+
+static int task_finished(int result,
+ struct child_process *cp,
+ struct strbuf *err,
+ void *pp_cb,
+ void *pp_task_cb)
+{
+ strbuf_addf(err, "asking for a quick stop\n");
+ return 1;
+}
+
int main(int argc, char **argv)
{
struct child_process proc = CHILD_PROCESS_INIT;
+ int jobs;
if (argc < 3)
return 1;
- proc.argv = (const char **)argv+2;
+ proc.argv = (const char **)argv + 2;
if (!strcmp(argv[1], "start-command-ENOENT")) {
if (start_command(&proc) < 0 && errno == ENOENT)
if (!strcmp(argv[1], "run-command"))
exit(run_command(&proc));
+ jobs = atoi(argv[2]);
+ proc.argv = (const char **)argv + 3;
+
+ if (!strcmp(argv[1], "run-command-parallel"))
+ exit(run_processes_parallel(jobs, parallel_next,
+ NULL, NULL, &proc));
+
+ if (!strcmp(argv[1], "run-command-abort"))
+ exit(run_processes_parallel(jobs, parallel_next,
+ NULL, task_finished, &proc));
+
+ if (!strcmp(argv[1], "run-command-no-jobs"))
+ exit(run_processes_parallel(jobs, no_job,
+ NULL, task_finished, &proc));
+
fprintf(stderr, "check usage\n");
return 1;
}
struct sha1_array array = SHA1_ARRAY_INIT;
struct strbuf line = STRBUF_INIT;
- while (strbuf_getline(&line, stdin, '\n') != EOF) {
+ while (strbuf_getline(&line, stdin) != EOF) {
const char *arg;
unsigned char sha1[20];
while read expect cnt pfx
do
case "$expect" in '#'*) continue ;; esac
- actual=`
+ actual=$(
{
test -z "$pfx" || echo "$pfx"
dd if=/dev/zero bs=1048576 count=$cnt 2>/dev/null |
perl -pe 'y/\000/g/'
} | ./test-sha1 $cnt
- `
+ )
if test "$expect" = "$actual"
then
echo "OK: $expect $cnt $pfx"
while read cnt pfx
do
- actual=`
+ actual=$(
{
test -z "$pfx" || echo "$pfx"
dd if=/dev/zero bs=1048576 count=$cnt 2>/dev/null |
perl -pe 'y/\000/g/'
} | sha1sum |
sed -e 's/ .*//'
- `
+ )
echo "$actual $cnt $pfx"
done <<EOF
0
#include "string-list.h"
#include "run-command.h"
#include "commit.h"
+#include "tempfile.h"
#include "trailer.h"
/*
* Copyright (c) 2013, 2014 Christian Couder <chriscool@tuxfamily.org>
return '\0';
}
-static void print_tok_val(const char *tok, const char *val)
+static void print_tok_val(FILE *outfile, const char *tok, const char *val)
{
char c = last_non_space_char(tok);
if (!c)
return;
if (strchr(separators, c))
- printf("%s%s\n", tok, val);
+ fprintf(outfile, "%s%s\n", tok, val);
else
- printf("%s%c %s\n", tok, separators[0], val);
+ fprintf(outfile, "%s%c %s\n", tok, separators[0], val);
}
-static void print_all(struct trailer_item *first, int trim_empty)
+static void print_all(FILE *outfile, struct trailer_item *first, int trim_empty)
{
struct trailer_item *item;
for (item = first; item; item = item->next) {
if (!trim_empty || strlen(item->value) > 0)
- print_tok_val(item->token, item->value);
+ print_tok_val(outfile, item->token, item->value);
}
}
return 0;
}
-static void print_lines(struct strbuf **lines, int start, int end)
+static void print_lines(FILE *outfile, struct strbuf **lines, int start, int end)
{
int i;
for (i = start; lines[i] && i < end; i++)
- printf("%s", lines[i]->buf);
+ fprintf(outfile, "%s", lines[i]->buf);
}
-static int process_input_file(struct strbuf **lines,
+static int process_input_file(FILE *outfile,
+ struct strbuf **lines,
struct trailer_item **in_tok_first,
struct trailer_item **in_tok_last)
{
trailer_start = find_trailer_start(lines, trailer_end);
/* Print lines before the trailers as is */
- print_lines(lines, 0, trailer_start);
+ print_lines(outfile, lines, 0, trailer_start);
if (!has_blank_line_before(lines, trailer_start - 1))
- printf("\n");
+ fprintf(outfile, "\n");
/* Parse trailer lines */
for (i = trailer_start; i < trailer_end; i++) {
}
}
-void process_trailers(const char *file, int trim_empty, struct string_list *trailers)
+static struct tempfile trailers_tempfile;
+
+static FILE *create_in_place_tempfile(const char *file)
+{
+ struct stat st;
+ struct strbuf template = STRBUF_INIT;
+ const char *tail;
+ FILE *outfile;
+
+ if (stat(file, &st))
+ die_errno(_("could not stat %s"), file);
+ if (!S_ISREG(st.st_mode))
+ die(_("file %s is not a regular file"), file);
+ if (!(st.st_mode & S_IWUSR))
+ die(_("file %s is not writable by user"), file);
+
+ /* Create temporary file in the same directory as the original */
+ tail = strrchr(file, '/');
+ if (tail != NULL)
+ strbuf_add(&template, file, tail - file + 1);
+ strbuf_addstr(&template, "git-interpret-trailers-XXXXXX");
+
+ xmks_tempfile_m(&trailers_tempfile, template.buf, st.st_mode);
+ strbuf_release(&template);
+ outfile = fdopen_tempfile(&trailers_tempfile, "w");
+ if (!outfile)
+ die_errno(_("could not open temporary file"));
+
+ return outfile;
+}
+
+void process_trailers(const char *file, int in_place, int trim_empty, struct string_list *trailers)
{
struct trailer_item *in_tok_first = NULL;
struct trailer_item *in_tok_last = NULL;
struct trailer_item *arg_tok_first;
struct strbuf **lines;
int trailer_end;
+ FILE *outfile = stdout;
/* Default config must be setup first */
git_config(git_trailer_default_config, NULL);
lines = read_input_file(file);
+ if (in_place)
+ outfile = create_in_place_tempfile(file);
+
/* Print the lines before the trailers */
- trailer_end = process_input_file(lines, &in_tok_first, &in_tok_last);
+ trailer_end = process_input_file(outfile, lines, &in_tok_first, &in_tok_last);
arg_tok_first = process_command_line_args(trailers);
process_trailers_lists(&in_tok_first, &in_tok_last, &arg_tok_first);
- print_all(in_tok_first, trim_empty);
+ print_all(outfile, in_tok_first, trim_empty);
free_all(&in_tok_first);
/* Print the lines after the trailers as is */
- print_lines(lines, trailer_end, INT_MAX);
+ print_lines(outfile, lines, trailer_end, INT_MAX);
+
+ if (in_place)
+ if (rename_tempfile(&trailers_tempfile, file))
+ die_errno(_("could not rename temporary file to %s"), file);
strbuf_list_free(lines);
}
#ifndef TRAILER_H
#define TRAILER_H
-void process_trailers(const char *file, int trim_empty, struct string_list *trailers);
+void process_trailers(const char *file, int in_place, int trim_empty,
+ struct string_list *trailers);
#endif /* TRAILER_H */
strbuf_reset(buffer);
if (debug)
fprintf(stderr, "Debug: Remote helper: Waiting...\n");
- if (strbuf_getline(buffer, helper, '\n') == EOF) {
+ if (strbuf_getline(buffer, helper) == EOF) {
if (debug)
fprintf(stderr, "Debug: Remote helper quit.\n");
return 1;
data->no_disconnect_req = 0;
/*
- * Open the output as FILE* so strbuf_getline() can be used.
+ * Open the output as FILE* so strbuf_getline_*() family of
+ * functions can be used.
* Do this with duped fd because fclose() will close the fd,
* and stuff like taking over will require the fd to remain.
*/
struct tree_desc_x *tx = xcalloc(n, sizeof(*tx));
struct strbuf base = STRBUF_INIT;
int interesting = 1;
+ char *traverse_path;
for (i = 0; i < n; i++)
tx[i].d = t[i];
make_traverse_path(base.buf, info->prev, &info->name);
base.buf[info->pathlen-1] = '/';
strbuf_setlen(&base, info->pathlen);
+ traverse_path = xstrndup(base.buf, info->pathlen);
+ } else {
+ traverse_path = xstrndup(info->name.path, info->pathlen);
}
+ info->traverse_path = traverse_path;
for (;;) {
int trees_used;
unsigned long mask, dirmask;
for (i = 0; i < n; i++)
free_extended_entry(tx + i);
free(tx);
+ free(traverse_path);
+ info->traverse_path = NULL;
strbuf_release(&base);
return error;
}
enum follow_symlinks_result get_tree_entry_follow_symlinks(unsigned char *tree_sha1, const char *name, unsigned char *result, struct strbuf *result_path, unsigned *mode);
struct traverse_info {
+ const char *traverse_path;
struct traverse_info *prev;
struct name_entry name;
int pathlen;
#!/bin/sh
-echo >&2 "fatal: git was built without support for `basename $0` (@@REASON@@)."
+echo >&2 "fatal: git was built without support for $(basename $0) (@@REASON@@)."
exit 128
* itself - the caller needs to do the final check for the cache
* entry having more data at the end!
*/
-static int do_compare_entry(const struct cache_entry *ce, const struct traverse_info *info, const struct name_entry *n)
+static int do_compare_entry_piecewise(const struct cache_entry *ce, const struct traverse_info *info, const struct name_entry *n)
{
int len, pathlen, ce_len;
const char *ce_name;
if (info->prev) {
- int cmp = do_compare_entry(ce, info->prev, &info->name);
+ int cmp = do_compare_entry_piecewise(ce, info->prev,
+ &info->name);
if (cmp)
return cmp;
}
return df_name_compare(ce_name, ce_len, S_IFREG, n->path, len, n->mode);
}
+static int do_compare_entry(const struct cache_entry *ce,
+ const struct traverse_info *info,
+ const struct name_entry *n)
+{
+ int len, pathlen, ce_len;
+ const char *ce_name;
+ int cmp;
+
+ /*
+ * If we have not precomputed the traverse path, it is quicker
+ * to avoid doing so. But if we have precomputed it,
+ * it is quicker to use the precomputed version.
+ */
+ if (!info->traverse_path)
+ return do_compare_entry_piecewise(ce, info, n);
+
+ cmp = strncmp(ce->name, info->traverse_path, info->pathlen);
+ if (cmp)
+ return cmp;
+
+ pathlen = info->pathlen;
+ ce_len = ce_namelen(ce);
+
+ if (ce_len < pathlen)
+ return -1;
+
+ ce_len -= pathlen;
+ ce_name = ce->name + pathlen;
+
+ len = tree_entry_len(n);
+ return df_name_compare(ce_name, ce_len, S_IFREG, n->path, len, n->mode);
+}
+
static int compare_entry(const struct cache_entry *ce, const struct traverse_info *info, const struct name_entry *n)
{
int cmp = do_compare_entry(ce, info, n);
++o->cache_bottom;
continue;
}
- if (!ce_in_traverse_path(ce, info))
+ if (!ce_in_traverse_path(ce, info)) {
+ /*
+ * Check if we can skip future cache checks
+ * (because we're already past all possible
+ * entries in the traverse path).
+ */
+ if (info->traverse_path) {
+ if (strncmp(ce->name, info->traverse_path,
+ info->pathlen) > 0)
+ break;
+ }
continue;
+ }
ce_name = ce->name + pfxlen;
ce_slash = strchr(ce_name, '/');
if (ce_slash)
char *rf_one = NULL;
char *tg_one;
- if (strbuf_getline(&buf, stdin, '\n') == EOF)
+ if (strbuf_getline_lf(&buf, stdin) == EOF)
break;
tg_one = buf.buf;
rf_one = strchr(tg_one, '\t');
len = MAX_IO_SIZE;
while (1) {
nr = read(fd, buf, len);
- if ((nr < 0) && (errno == EAGAIN || errno == EINTR))
- continue;
+ if (nr < 0) {
+ if (errno == EINTR)
+ continue;
+ if (errno == EAGAIN || errno == EWOULDBLOCK) {
+ struct pollfd pfd;
+ pfd.events = POLLIN;
+ pfd.fd = fd;
+ /*
+ * it is OK if this poll() failed; we
+ * want to leave this infinite loop
+ * only when read() returns with
+ * success, or an expected failure,
+ * which would be checked by the next
+ * call to read(2).
+ */
+ poll(&pfd, 1, -1);
+ }
+ }
return nr;
}
}
strbuf_release(&buf);
return NULL;
}
- strbuf_getline(&buf, fp, '\n');
+ strbuf_getline_lf(&buf, fp);
if (!fclose(fp)) {
return strbuf_detach(&buf, NULL);
} else {
if (!f)
die_errno("Could not open file %s for reading",
git_path("%s", fname));
- while (!strbuf_getline(&line, f, '\n')) {
+ while (!strbuf_getline_lf(&line, f)) {
if (line.len && line.buf[0] == comment_line_char)
continue;
strbuf_trim(&line);