--- /dev/null
+# Defaults
+
+# Use tabs whenever we need to fill whitespace that spans at least from one tab
+# stop to the next one.
+UseTab: Always
+TabWidth: 8
+IndentWidth: 8
+ContinuationIndentWidth: 8
+ColumnLimit: 80
+
+# C Language specifics
+Language: Cpp
+
+# Align parameters on the open bracket
+# someLongFunction(argument1,
+# argument2);
+AlignAfterOpenBracket: Align
+
+# Don't align consecutive assignments
+# int aaaa = 12;
+# int b = 14;
+AlignConsecutiveAssignments: false
+
+# Don't align consecutive declarations
+# int aaaa = 12;
+# double b = 3.14;
+AlignConsecutiveDeclarations: false
+
+# Align escaped newlines as far left as possible
+# #define A \
+# int aaaa; \
+# int b; \
+# int cccccccc;
+AlignEscapedNewlines: Left
+
+# Align operands of binary and ternary expressions
+# int aaa = bbbbbbbbbbb +
+# cccccc;
+AlignOperands: true
+
+# Don't align trailing comments
+# int a; // Comment a
+# int b = 2; // Comment b
+AlignTrailingComments: false
+
+# By default don't allow putting parameters onto the next line
+# myFunction(foo, bar, baz);
+AllowAllParametersOfDeclarationOnNextLine: false
+
+# Don't allow short braced statements to be on a single line
+# if (a) not if (a) return;
+# return;
+AllowShortBlocksOnASingleLine: false
+AllowShortCaseLabelsOnASingleLine: false
+AllowShortFunctionsOnASingleLine: false
+AllowShortIfStatementsOnASingleLine: false
+AllowShortLoopsOnASingleLine: false
+
+# By default don't add a line break after the return type of top-level functions
+# int foo();
+AlwaysBreakAfterReturnType: None
+
+# Pack as many parameters or arguments onto the same line as possible
+# int myFunction(int aaaaaaaaaaaa, int bbbbbbbb,
+# int cccc);
+BinPackArguments: true
+BinPackParameters: true
+
+# Attach braces to surrounding context except break before braces on function
+# definitions.
+# void foo()
+# {
+# if (true) {
+# } else {
+# }
+# };
+BreakBeforeBraces: Linux
+
+# Break after operators
+# int valuve = aaaaaaaaaaaaa +
+# bbbbbb -
+# ccccccccccc;
+BreakBeforeBinaryOperators: None
+BreakBeforeTernaryOperators: false
+
+# Don't break string literals
+BreakStringLiterals: false
+
+# Use the same indentation level as for the switch statement.
+# Switch statement body is always indented one level more than case labels.
+IndentCaseLabels: false
+
+# Don't indent a function definition or declaration if it is wrapped after the
+# type
+IndentWrappedFunctionNames: false
+
+# Align pointer to the right
+# int *a;
+PointerAlignment: Right
+
+# Don't insert a space after a cast
+# x = (int32)y; not x = (int32) y;
+SpaceAfterCStyleCast: false
+
+# Insert spaces before and after assignment operators
+# int a = 5; not int a=5;
+# a += 42; a+=42;
+SpaceBeforeAssignmentOperators: true
+
+# Put a space before opening parentheses only after control statement keywords.
+# void f() {
+# if (true) {
+# f();
+# }
+# }
+SpaceBeforeParens: ControlStatements
+
+# Don't insert spaces inside empty '()'
+SpaceInEmptyParentheses: false
+
+# The number of spaces before trailing line comments (// - comments).
+# This does not affect trailing block comments (/* - comments).
+SpacesBeforeTrailingComments: 1
+
+# Don't insert spaces in casts
+# x = (int32) y; not x = ( int32 ) y;
+SpacesInCStyleCastParentheses: false
+
+# Don't insert spaces inside container literals
+# var arr = [1, 2, 3]; not var arr = [ 1, 2, 3 ];
+SpacesInContainerLiterals: false
+
+# Don't insert spaces after '(' or before ')'
+# f(arg); not f( arg );
+SpacesInParentheses: false
+
+# Don't insert spaces after '[' or before ']'
+# int a[5]; not int a[ 5 ];
+SpacesInSquareBrackets: false
+
+# Insert a space after '{' and before '}' in struct initializers
+Cpp11BracedListStyle: false
+
+# A list of macros that should be interpreted as foreach loops instead of as
+# function calls.
+ForEachMacros: ['for_each_string_list_item']
+
+# The maximum number of consecutive empty lines to keep.
+MaxEmptyLinesToKeep: 1
+
+# No empty line at the start of a block.
+KeepEmptyLinesAtTheStartOfBlocks: false
+
+# Penalties
+# This decides what order things should be done if a line is too long
+PenaltyBreakAssignment: 100
+PenaltyBreakBeforeFirstCallParameter: 100
+PenaltyBreakComment: 100
+PenaltyBreakFirstLessLess: 0
+PenaltyBreakString: 100
+PenaltyExcessCharacter: 5
+PenaltyReturnTypeOnItsOwnLine: 0
+
+# Don't sort #include's
+SortIncludes: false
services:
- docker
before_install:
- - docker pull daald/ubuntu32:xenial
before_script:
- script:
- - >
- docker run
- --interactive
- --env DEVELOPER
- --env DEFAULT_TEST_TARGET
- --env GIT_PROVE_OPTS
- --env GIT_TEST_OPTS
- --env GIT_TEST_CLONE_2GB
- --volume "${PWD}:/usr/src/git"
- daald/ubuntu32:xenial
- /usr/src/git/ci/run-linux32-build.sh $(id -u $USER)
- # Use the following command to debug the docker build locally:
- # $ docker run -itv "${PWD}:/usr/src/git" --entrypoint /bin/bash daald/ubuntu32:xenial
- # root@container:/# /usr/src/git/ci/run-linux32-build.sh
+ script: ci/run-linux32-docker.sh
- env: Static Analysis
os: linux
compiler:
packages:
- coccinelle
before_install:
- script:
- # "before_script" that builds Git is inherited from base job
- - make coccicheck
+ # "before_script" that builds Git is inherited from base job
+ script: ci/run-static-analysis.sh
after_failure:
- env: Documentation
os: linux
- asciidoc
- xmlto
before_install:
- before_script: gem install asciidoctor
+ before_script:
script: ci/test-documentation.sh
after_failure:
-before_install:
- - >
- case "${TRAVIS_OS_NAME:-linux}" in
- linux)
- export GIT_TEST_HTTPD=YesPlease
-
- mkdir --parents custom/p4
- pushd custom/p4
- wget --quiet http://filehost.perforce.com/perforce/r$LINUX_P4_VERSION/bin.linux26x86_64/p4d
- wget --quiet http://filehost.perforce.com/perforce/r$LINUX_P4_VERSION/bin.linux26x86_64/p4
- chmod u+x p4d
- chmod u+x p4
- export PATH="$(pwd):$PATH"
- popd
- mkdir --parents custom/git-lfs
- pushd custom/git-lfs
- wget --quiet https://github.com/github/git-lfs/releases/download/v$LINUX_GIT_LFS_VERSION/git-lfs-linux-amd64-$LINUX_GIT_LFS_VERSION.tar.gz
- tar --extract --gunzip --file "git-lfs-linux-amd64-$LINUX_GIT_LFS_VERSION.tar.gz"
- cp git-lfs-$LINUX_GIT_LFS_VERSION/git-lfs .
- export PATH="$(pwd):$PATH"
- popd
- ;;
- osx)
- brew update --quiet
- # Uncomment this if you want to run perf tests:
- # brew install gnu-time
- brew install git-lfs gettext
- brew link --force gettext
- brew install caskroom/cask/perforce
- ;;
- esac;
- echo "$(tput setaf 6)Perforce Server Version$(tput sgr0)";
- p4d -V | grep Rev.;
- echo "$(tput setaf 6)Perforce Client Version$(tput sgr0)";
- p4 -V | grep Rev.;
- echo "$(tput setaf 6)Git-LFS Version$(tput sgr0)";
- git-lfs version;
-
-before_script: make --jobs=2
-
-script:
- - >
- mkdir -p $HOME/travis-cache;
- ln -s $HOME/travis-cache/.prove t/.prove;
- make --quiet test;
-
-after_failure:
- - >
- : '<-- Click here to see detailed test output! ';
- for TEST_EXIT in t/test-results/*.exit;
- do
- if [ "$(cat "$TEST_EXIT")" != "0" ];
- then
- TEST_OUT="${TEST_EXIT%exit}out";
- echo "------------------------------------------------------------------------";
- echo "$(tput setaf 1)${TEST_OUT}...$(tput sgr0)";
- echo "------------------------------------------------------------------------";
- cat "${TEST_OUT}";
- fi;
- done;
+before_install: ci/install-dependencies.sh
+before_script: ci/run-build.sh
+script: ci/run-tests.sh
+after_failure: ci/print-test-failures.sh
notifications:
email: false
--- /dev/null
+Git v2.10.5 Release Notes
+=========================
+
+Fixes since v2.10.4
+-------------------
+
+ * "git cvsserver" no longer is invoked by "git daemon" by default,
+ as it is old and largely unmaintained.
+
+ * Various Perl scripts did not use safe_pipe_capture() instead of
+ backticks, leaving them susceptible to end-user input. They have
+ been corrected.
+
+Credits go to joernchen <joernchen@phenoelit.de> for finding the
+unsafe constructs in "git cvsserver", and to Jeff King at GitHub for
+finding and fixing instances of the same issue in other scripts.
+
--- /dev/null
+Git v2.11.4 Release Notes
+=========================
+
+Fixes since v2.11.3
+-------------------
+
+ * "git cvsserver" no longer is invoked by "git daemon" by default,
+ as it is old and largely unmaintained.
+
+ * Various Perl scripts did not use safe_pipe_capture() instead of
+ backticks, leaving them susceptible to end-user input. They have
+ been corrected.
+
+Credits go to joernchen <joernchen@phenoelit.de> for finding the
+unsafe constructs in "git cvsserver", and to Jeff King at GitHub for
+finding and fixing instances of the same issue in other scripts.
+
--- /dev/null
+Git v2.12.5 Release Notes
+=========================
+
+Fixes since v2.12.4
+-------------------
+
+ * "git cvsserver" no longer is invoked by "git daemon" by default,
+ as it is old and largely unmaintained.
+
+ * Various Perl scripts did not use safe_pipe_capture() instead of
+ backticks, leaving them susceptible to end-user input. They have
+ been corrected.
+
+Credits go to joernchen <joernchen@phenoelit.de> for finding the
+unsafe constructs in "git cvsserver", and to Jeff King at GitHub for
+finding and fixing instances of the same issue in other scripts.
+
--- /dev/null
+Git v2.13.6 Release Notes
+=========================
+
+Fixes since v2.13.5
+-------------------
+
+ * "git cvsserver" no longer is invoked by "git daemon" by default,
+ as it is old and largely unmaintained.
+
+ * Various Perl scripts did not use safe_pipe_capture() instead of
+ backticks, leaving them susceptible to end-user input. They have
+ been corrected.
+
+Credits go to joernchen <joernchen@phenoelit.de> for finding the
+unsafe constructs in "git cvsserver", and to Jeff King at GitHub for
+finding and fixing instances of the same issue in other scripts.
+
* "git archive" did not work well with pathspecs and the
export-ignore attribute.
+ * "git cvsserver" no longer is invoked by "git daemon" by default,
+ as it is old and largely unmaintained.
+
+ * Various Perl scripts did not use safe_pipe_capture() instead of
+ backticks, leaving them susceptible to end-user input. They have
+ been corrected.
+
Also contains various documentation updates and code clean-ups.
+
+Credits go to joernchen <joernchen@phenoelit.de> for finding the
+unsafe constructs in "git cvsserver", and to Jeff King at GitHub for
+finding and fixing instances of the same issue in other scripts.
* The codepath to call external process filter for smudge/clean
operation learned to show the progress meter.
+ * "git rev-parse" learned "--is-shallow-repository", that is to be
+ used in a way similar to existing "--is-bare-repository" and
+ friends.
+
Performance, Internal Implementation, Development Support etc.
* Many leaks of strbuf have been fixed.
+ * "git imap-send" has our own implementation of the protocol and also
+ can use more recent libCurl with the imap protocol support. Update
+ the latter so that it can use the credential subsystem, and then
+ make it the default option to use, so that we can eventually
+ deprecate and remove the former.
+
+ * "make style" runs git-clang-format to help developers by pointing
+ out coding style issues.
+
+ * A test to demonstrate "git mv" failing to adjust nested submodules
+ has been added.
+ (merge c514167df2 hv/mv-nested-submodules-test later to maint).
+
+ * On Cygwin, "ulimit -s" does not report failure but it does not work
+ at all, which causes an unexpected success of some tests that
+ expect failures under a limited stack situation. This has been
+ fixed.
+
+ * Many codepaths have been updated to squelch -Wimplicit-fallthrough
+ warnings from Gcc 7 (which is a good code hygiene).
+
+ * Add a helper for DLL loading in anticipation for its need in a
+ future topic RSN.
+
Also contains various documentation updates and code clean-ups.
to match the behaviour of the former.
(merge c818e74332 rk/commit-tree-make-F-verbatim later to maint).
+ * Many codepaths did not diagnose write failures correctly when disks
+ go full, due to their misuse of write_in_full() helper function,
+ which have been corrected.
+ (merge f48ecd38cb jk/write-in-full-fix later to maint).
+
+ * "git help co" now says "co is aliased to ...", not "git co is".
+ (merge b3a8076e0d ks/help-alias-label later to maint).
+
+ * "git archive", especially when used with pathspec, stored an empty
+ directory in its output, even though Git itself never does so.
+ This has been fixed.
+ (merge 4318094047 rs/archive-excluded-directory later to maint).
+
+ * API error-proofing which happens to also squelch warnings from GCC.
+ (merge c788c54cde tg/refs-allowed-flags later to maint).
+
+ * The explanation of the cut-line in the commit log editor has been
+ slightly tweaked.
+ (merge 8c4b1a3593 ks/commit-do-not-touch-cut-line later to maint).
+
+ * "git gc" tries to avoid running two instances at the same time by
+ reading and writing pid/host from and to a lock file; it used to
+ use an incorrect fscanf() format when reading, which has been
+ corrected.
+ (merge afe2fab72c aw/gc-lockfile-fscanf-fix later to maint).
+
+ * The scripts to drive TravisCI has been reorganized and then an
+ optimization to avoid spending cycles on a branch whose tip is
+ tagged has been implemented.
+ (merge 8376eb4a8f ls/travis-scriptify later to maint).
+
+ * The test linter has been taught that we do not like "echo -e".
+ (merge 1a6d46895d tb/test-lint-echo-e later to maint).
+
+ * Code cmp.std.c nitpick.
+ (merge ac7da78ede mh/for-each-string-list-item-empty-fix later to maint).
+
+ * A regression fix for 2.11 that made the code to read the list of
+ alternate object stores overrun the end of the string.
+ (merge f0f7bebef7 jk/info-alternates-fix later to maint).
+
+ * "git describe --match" learned to take multiple patterns in v2.13
+ series, but the feature ignored the patterns after the first one
+ and did not work at all. This has been fixed.
+ (merge da769d2986 jk/describe-omit-some-refs later to maint).
+
+ * "git filter-branch" cannot reproduce a history with a tag without
+ the tagger field, which only ancient versions of Git allowed to be
+ created. This has been corrected.
+ (merge b2c1ca6b4b ic/fix-filter-branch-to-handle-tag-without-tagger later to maint).
+
+ * "git cat-file --textconv" started segfaulting recently, which
+ has been corrected.
+ (merge cc0ea7c9e5 jk/diff-blob later to maint).
+
+ * The built-in pattern to detect the "function header" for HTML did
+ not match <H1>..<H6> elements without any attributes, which has
+ been fixed.
+ (merge 9c03caca2c ik/userdiff-html-h-element-fix later to maint).
+
+ * "git mailinfo" was loose in decoding quoted printable and produced
+ garbage when the two letters after the equal sign are not
+ hexadecimal. This has been fixed.
+ (merge c8cf423eab rs/mailinfo-qp-decode-fix later to maint).
+
+ * The machinery to create xdelta used in pack files received the
+ sizes of the data in size_t, but lost the higher bits of them by
+ storing them in "unsigned int" during the computation, which is
+ fixed.
+
+ * The delta format used in the packfile cannot reference data at
+ offset larger than what can be expressed in 4-byte, but the
+ generator for the data failed to make sure the offset does not
+ overflow. This has been corrected.
+
+ * The documentation for '-X<option>' for merges was misleadingly
+ written to suggest that "-s theirs" exists, which is not the case.
+ (merge c25d98b2a7 jc/merge-x-theirs-docfix later to maint).
+
* Other minor doc, test and build updates and code cleanups.
(merge f094b89a4d ma/parse-maybe-bool later to maint).
(merge 39b00fa4d4 jk/drop-sha1-entry-pos later to maint).
(merge 276d0e35c0 ma/split-symref-update-fix later to maint).
(merge 3bc4b8f7c7 bb/doc-eol-dirty later to maint).
(merge c1bb33c99c jk/system-path-cleanup later to maint).
+ (merge ab46e6fc72 cc/subprocess-handshake-missing-capabilities later to maint).
+ (merge f7a32dd97f kd/doc-for-each-ref later to maint).
+ (merge be94568bc7 ez/doc-duplicated-words-fix later to maint).
+ (merge 01e4be6c3d ks/test-readme-phrasofix later to maint).
+ (merge 217bb56d4f hn/typofix later to maint).
+ (merge c08fd6388c jk/doc-read-tree-table-asciidoctor-fix later to maint).
+ (merge c3342b362e ks/doc-use-camelcase-for-config-name later to maint).
all changes made to the branch ref, enabling use of date
based sha1 expressions such as "<branchname>@\{yesterday}".
Note that in non-bare repositories, reflogs are usually
- enabled by default by the `core.logallrefupdates` config option.
+ enabled by default by the `core.logAllRefUpdates` config option.
The negated form `--no-create-reflog` only overrides an earlier
`--create-reflog`, but currently does not negate the setting of
- `core.logallrefupdates`.
+ `core.logAllRefUpdates`.
-f::
--force::
The 40-hex object name of the object.
`objecttype`::
- The type of of the object (the same as `cat-file -t` reports).
+ The type of the object (the same as `cat-file -t` reports).
`objectsize`::
The size, in bytes, of the object (the same as `cat-file -s`
------------
+
You could omit <branch>, in which case the command degenerates to
-"check out the current branch", which is a glorified no-op with a
+"check out the current branch", which is a glorified no-op with
rather expensive side-effects to show only the tracking information,
if exists, for the current branch.
--match <pattern>::
Only consider tags matching the given `glob(7)` pattern,
- excluding the "refs/tags/" prefix. This can be used to avoid
- leaking private tags from the repository. If given multiple times, a
- list of patterns will be accumulated, and tags matching any of the
- patterns will be considered. Use `--no-match` to clear and reset the
- list of patterns.
+ excluding the "refs/tags/" prefix. If used with `--all`, it also
+ considers local branches and remote-tracking references matching the
+ pattern, excluding respectively "refs/heads/" and "refs/remotes/"
+ prefix; references of other types are never considered. If given
+ multiple times, a list of patterns will be accumulated, and tags
+ matching any of the patterns will be considered. Use `--no-match` to
+ clear and reset the list of patterns.
--exclude <pattern>::
Do not consider tags matching the given `glob(7)` pattern, excluding
- the "refs/tags/" prefix. This can be used to narrow the tag space and
- find only tags matching some meaningful criteria. If given multiple
- times, a list of patterns will be accumulated and tags matching any
- of the patterns will be excluded. When combined with --match a tag will
- be considered when it matches at least one --match pattern and does not
+ the "refs/tags/" prefix. If used with `--all`, it also does not consider
+ local branches and remote-tracking references matching the pattern,
+ excluding respectively "refs/heads/" and "refs/remotes/" prefix;
+ references of other types are never considered. If given multiple times,
+ a list of patterns will be accumulated and tags matching any of the
+ patterns will be excluded. When combined with --match a tag will be
+ considered when it matches at least one --match pattern and does not
match any of the --exclude patterns. Use `--no-exclude` to clear and
reset the list of patterns.
[--commit-filter <command>] [--tag-name-filter <command>]
[--subdirectory-filter <directory>] [--prune-empty]
[--original <namespace>] [-d <directory>] [-f | --force]
- [--] [<rev-list options>...]
+ [--state-branch <branch>] [--] [<rev-list options>...]
DESCRIPTION
-----------
directory or when there are already refs starting with
'refs/original/', unless forced.
+--state-branch <branch>::
+ This option will cause the mapping from old to new objects to
+ be loaded from named branch upon startup and saved as a new
+ commit to that branch upon exit, enabling incremental of large
+ trees. If '<branch>' does not exist it will be created.
+
<rev-list options>...::
Arguments for 'git rev-list'. All positive refs included by
these options are rewritten. You may also specify options
[verse]
'git for-each-ref' [--count=<count>] [--shell|--perl|--python|--tcl]
[(--sort=<key>)...] [--format=<format>] [<pattern>...]
- [--points-at <object>] [(--merged | --no-merged) [<object>]]
- [--contains [<object>]] [--no-contains [<object>]]
+ [--points-at=<object>]
+ (--merged[=<object>] | --no-merged[=<object>])
+ [--contains[=<object>]] [--no-contains[=<object>]]
DESCRIPTION
-----------
OPTIONS
-------
-<count>::
+<pattern>...::
+ If one or more patterns are given, only refs are shown that
+ match against at least one pattern, either using fnmatch(3) or
+ literally, in the latter case matching completely or from the
+ beginning up to a slash.
+
+--count=<count>::
By default the command shows all refs that match
`<pattern>`. This option makes it stop after showing
that many refs.
-<key>::
+--sort=<key>::
A field name to sort on. Prefix `-` to sort in
descending order of the value. When unspecified,
`refname` is used. You may use the --sort=<key> option
multiple times, in which case the last key becomes the primary
key.
-<format>::
+--format=<format>::
A string that interpolates `%(fieldname)` from a ref being shown
and the object it points at. If `fieldname`
is prefixed with an asterisk (`*`) and the ref points
`xx`; for example `%00` interpolates to `\0` (NUL),
`%09` to `\t` (TAB) and `%0a` to `\n` (LF).
-<pattern>...::
- If one or more patterns are given, only refs are shown that
- match against at least one pattern, either using fnmatch(3) or
- literally, in the latter case matching completely or from the
- beginning up to a slash.
-
--shell::
--perl::
--python::
the specified host language. This is meant to produce
a scriptlet that can directly be `eval`ed.
---points-at <object>::
+--points-at=<object>::
Only list refs which points at the given object.
---merged [<object>]::
+--merged[=<object>]::
Only list refs whose tips are reachable from the
specified commit (HEAD if not specified),
incompatible with `--no-merged`.
---no-merged [<object>]::
+--no-merged[=<object>]::
Only list refs whose tips are not reachable from the
specified commit (HEAD if not specified),
incompatible with `--merged`.
---contains [<object>]::
+--contains[=<object>]::
Only list refs which contain the specified commit (HEAD if not
specified).
---no-contains [<object>]::
+--no-contains[=<object>]::
Only list refs which don't contain the specified commit (HEAD
if not specified).
object that does not have notes attached to it.
--stdin::
- Also read the object names to remove notes from from the standard
+ Also read the object names to remove notes from the standard
input (there is no reason you cannot combine this with object
names from the command line).
"clean" means that index and work tree coincide, and "exists"/"nothing"
refer to the presence of a path in the specified commit:
+....
I H M Result
-------------------------------------------------------
0 nothing nothing nothing (does not happen)
19 no no yes exists exists keep index
20 yes yes no exists exists use M
21 no yes no exists exists fail
+....
In all "keep index" cases, the index entry stays as in the
original index file. If the entry is not up to date,
--is-bare-repository::
When the repository is bare print "true", otherwise "false".
+--is-shallow-repository::
+ When the repository is shallow print "true", otherwise "false".
+
--resolve-git-dir <path>::
Check if <path> is a valid repository or a gitfile that
points at a valid repository, and print the location of the
$ chmod +x $HOME/git-shell-commands/no-interactive-login
----------------
+To enable git-cvsserver access (which should generally have the
+`no-interactive-login` example above as a prerequisite, as creating
+the git-shell-commands directory allows interactive logins):
+
+----------------
+$ cat >$HOME/git-shell-commands/cvs <<\EOF
+if ! test $# = 1 && test "$1" = "server"
+then
+ echo >&2 "git-cvsserver only handles \"server\""
+ exit 1
+fi
+exec git cvsserver server
+EOF
+$ chmod +x $HOME/git-shell-commands/cvs
+----------------
+
SEE ALSO
--------
ssh(1),
`core.logAllRefUpdates` in linkgit:git-config[1].
The negated form `--no-create-reflog` only overrides an earlier
`--create-reflog`, but currently does not negate the setting of
- `core.logallrefupdates`.
+ `core.logAllRefUpdates`.
<tagname>::
The name of the tag to create, delete, or describe.
+
Version 4 performs a simple pathname compression that reduces index
size by 30%-50% on large repositories, which results in faster load
-time. Version 4 is relatively young (first released in in 1.8.0 in
+time. Version 4 is relatively young (first released in 1.8.0 in
October 2012). Other Git implementations such as JGit and libgit2
may not support it yet.
commit-msg
~~~~~~~~~~
-This hook is invoked by 'git commit', and can be bypassed
-with the `--no-verify` option. It takes a single parameter, the
-name of the file that holds the proposed commit log message.
-Exiting with a non-zero status causes the 'git commit' to
-abort.
+This hook is invoked by 'git commit' and 'git merge', and can be
+bypassed with the `--no-verify` option. It takes a single parameter,
+the name of the file that holds the proposed commit log message.
+Exiting with a non-zero status causes the command to abort.
The hook is allowed to edit the message file in place, and can be used
to normalize the message into some project standard format. It
the other tree did, declaring 'our' history contains all that happened in it.
theirs;;
- This is the opposite of 'ours'.
+ This is the opposite of 'ours'; note that, unlike 'ours', there is
+ no 'theirs' merge stragegy to confuse this merge option with.
patience;;
With this option, 'merge-recursive' spends a little extra time
.PHONY: sparse $(SP_OBJ)
sparse: $(SP_OBJ)
+.PHONY: style
+style:
+ git clang-format --style file --diff --extensions c,h
+
check: common-cmds.h
@if sparse; \
then \
if (plen && (ws_rule & WS_BLANK_AT_EOF) &&
ws_blank_line(patch + 1, plen, ws_rule))
is_blank_context = 1;
+ /* fallthrough */
case '-':
memcpy(old, patch + 1, plen);
add_line_info(&preimage, old, plen,
old += plen;
if (first == '-')
break;
- /* Fall-through for ' ' */
+ /* fallthrough */
case '+':
/* --no-add does not add new lines */
if (first == '+' && state->no_add)
return check && ATTR_TRUE(check->items[1].value);
}
-static int should_queue_directories(const struct archiver_args *args)
-{
- return args->pathspec.has_wildcard;
-}
-
static int write_archive_entry(const unsigned char *sha1, const char *base,
int baselen, const char *filename, unsigned mode, int stage,
void *context)
strbuf_addch(&path, '/');
path_without_prefix = path.buf + args->baselen;
- if (!S_ISDIR(mode) || !should_queue_directories(args)) {
+ if (!S_ISDIR(mode)) {
const struct attr_check *check;
check = get_archive_attrs(path_without_prefix);
if (check_attr_export_ignore(check))
return write_entry(args, sha1, path.buf, path.len, mode);
}
-static int write_archive_entry_buf(const unsigned char *sha1, struct strbuf *base,
- const char *filename, unsigned mode, int stage,
- void *context)
-{
- return write_archive_entry(sha1, base->buf, base->len,
- filename, mode, stage, context);
-}
-
static void queue_directory(const unsigned char *sha1,
struct strbuf *base, const char *filename,
unsigned mode, int stage, struct archiver_context *c)
}
err = read_tree_recursive(args->tree, "", 0, 0, &args->pathspec,
- should_queue_directories(args) ?
- queue_or_write_archive_entry :
- write_archive_entry_buf,
+ queue_or_write_archive_entry,
&context);
if (err == READ_TREE_RECURSIVE)
err = 0;
/* Clean up objects used, as they will be reused. */
clear_commit_marks_for_object_array(&pending_copy, ALL_REV_FLAGS);
- free(pending_copy.objects);
+
+ object_array_clear(&pending_copy);
return res;
}
if (!attr_only) {
const char *head;
- struct object_id oid;
- head = resolve_ref_unsafe("HEAD", 0, oid.hash, NULL);
+ head = resolve_ref_unsafe("HEAD", 0, NULL, NULL);
if (!is_bare_repository() && head && !strcmp(head, ref->buf))
die(_("Cannot force update the current branch."));
}
return !has_object_file(&oid);
case 'w':
- if (!path[0])
+ if (!path)
die("git cat-file --filters %s: <object> must be "
"<sha1:path>", obj_name);
break;
case 'c':
- if (!path[0])
+ if (!path)
die("git cat-file --textconv %s: <object> must be <sha1:path>",
obj_name);
if (textconv_object(path, obj_context.mode, &oid, 1, &buf, &size))
break;
+ /* else fallthrough */
case 'p':
type = sha1_object_info(oid.hash, NULL);
* update paths in the work tree, and we cannot revert
* them.
*/
+ /* fallthrough */
case 0:
return 0;
default:
for_each_ref(add_pending_uninteresting_ref, &revs);
add_pending_oid(&revs, "HEAD", &new->object.oid, UNINTERESTING);
+ /* Save pending objects, so they can be cleaned up later. */
refs = revs.pending;
revs.leak_pending = 1;
+ /*
+ * prepare_revision_walk (together with .leak_pending = 1) makes us
+ * the sole owner of the list of pending objects.
+ */
if (prepare_revision_walk(&revs))
die(_("internal error in revision walk"));
if (!(old->object.flags & UNINTERESTING))
else
describe_detached_head(_("Previous HEAD position was"), old);
+ /* Clean up objects used, as they will be reused. */
clear_commit_marks_for_object_array(&refs, ALL_REV_FLAGS);
- free(refs.objects);
+
+ object_array_clear(&refs);
}
static int switch_branches(const struct checkout_opts *opts,
static const char *prepare_index(int argc, const char **argv, const char *prefix,
const struct commit *current_head, int is_status)
{
- struct string_list partial;
+ struct string_list partial = STRING_LIST_INIT_DUP;
struct pathspec pathspec;
int refresh_flags = REFRESH_QUIET;
const char *ret;
warning(_("Failed to update main cache tree"));
commit_style = COMMIT_NORMAL;
- return get_lock_file_path(&index_lock);
+ ret = get_lock_file_path(&index_lock);
+ goto out;
}
/*
if (write_locked_index(&the_index, &index_lock, CLOSE_LOCK))
die(_("unable to write new_index file"));
commit_style = COMMIT_NORMAL;
- return get_lock_file_path(&index_lock);
+ ret = get_lock_file_path(&index_lock);
+ goto out;
}
/*
rollback_lock_file(&index_lock);
}
commit_style = COMMIT_AS_IS;
- return get_index_file();
+ ret = get_index_file();
+ goto out;
}
/*
die(_("cannot do a partial commit during a cherry-pick."));
}
- string_list_init(&partial, 1);
if (list_paths(&partial, !current_head ? NULL : "HEAD", prefix, &pathspec))
exit(1);
discard_cache();
ret = get_lock_file_path(&false_lock);
read_cache_from(ret);
+out:
+ string_list_clear(&partial, 0);
+ clear_pathspec(&pathspec);
return ret;
}
struct rev_info rev;
struct commit *commit;
struct strbuf format = STRBUF_INIT;
- struct object_id junk_oid;
const char *head;
struct pretty_print_context pctx = {0};
struct strbuf author_ident = STRBUF_INIT;
rev.diffopt.break_opt = 0;
diff_setup_done(&rev.diffopt);
- head = resolve_ref_unsafe("HEAD", 0, junk_oid.hash, NULL);
+ head = resolve_ref_unsafe("HEAD", 0, NULL, NULL);
if (!strcmp(head, "HEAD"))
head = _("detached HEAD");
else
static int get_name(const char *path, const struct object_id *oid, int flag, void *cb_data)
{
- int is_tag = starts_with(path, "refs/tags/");
+ int is_tag = 0;
struct object_id peeled;
int is_annotated, prio;
-
- /* Reject anything outside refs/tags/ unless --all */
- if (!all && !is_tag)
+ const char *path_to_match = NULL;
+
+ if (skip_prefix(path, "refs/tags/", &path_to_match)) {
+ is_tag = 1;
+ } else if (all) {
+ if ((exclude_patterns.nr || patterns.nr) &&
+ !skip_prefix(path, "refs/heads/", &path_to_match) &&
+ !skip_prefix(path, "refs/remotes/", &path_to_match)) {
+ /* Only accept reference of known type if there are match/exclude patterns */
+ return 0;
+ }
+ } else {
+ /* Reject anything outside refs/tags/ unless --all */
return 0;
+ }
/*
* If we're given exclude patterns, first exclude any tag which match
if (exclude_patterns.nr) {
struct string_list_item *item;
- if (!is_tag)
- return 0;
-
for_each_string_list_item(item, &exclude_patterns) {
- if (!wildmatch(item->string, path + 10, 0))
+ if (!wildmatch(item->string, path_to_match, 0))
return 0;
}
}
* pattern.
*/
if (patterns.nr) {
+ int found = 0;
struct string_list_item *item;
- if (!is_tag)
- return 0;
-
for_each_string_list_item(item, &patterns) {
- if (!wildmatch(item->string, path + 10, 0))
+ if (!wildmatch(item->string, path_to_match, 0)) {
+ found = 1;
break;
+ }
+ }
- /* If we get here, no pattern matched. */
+ if (!found)
return 0;
- }
}
/* Is it annotated? */
struct diff_options *options, void *data)
{
int i;
+ struct string_list *changed = data;
/*
* Handle files below a directory first, in case they are all deleted
case DIFF_STATUS_DELETED:
printf("D ");
print_path(spec->path);
+ string_list_insert(changed, spec->path);
putchar('\n');
break;
case DIFF_STATUS_COPIED:
case DIFF_STATUS_RENAMED:
- printf("%c ", q->queue[i]->status);
- print_path(ospec->path);
- putchar(' ');
- print_path(spec->path);
- putchar('\n');
-
- if (!oidcmp(&ospec->oid, &spec->oid) &&
- ospec->mode == spec->mode)
- break;
+ /*
+ * If a change in the file corresponding to ospec->path
+ * has been observed, we cannot trust its contents
+ * because the diff is calculated based on the prior
+ * contents, not the current contents. So, declare a
+ * copy or rename only if there was no change observed.
+ */
+ if (!string_list_has_string(changed, ospec->path)) {
+ printf("%c ", q->queue[i]->status);
+ print_path(ospec->path);
+ putchar(' ');
+ print_path(spec->path);
+ string_list_insert(changed, spec->path);
+ putchar('\n');
+
+ if (!oidcmp(&ospec->oid, &spec->oid) &&
+ ospec->mode == spec->mode)
+ break;
+ }
/* fallthrough */
case DIFF_STATUS_TYPE_CHANGED:
get_object_mark(object));
}
print_path(spec->path);
+ string_list_insert(changed, spec->path);
putchar('\n');
break;
*end = out->buf + out->len;
}
-static void handle_commit(struct commit *commit, struct rev_info *rev)
+static void handle_commit(struct commit *commit, struct rev_info *rev,
+ struct string_list *paths_of_changed_objects)
{
int saved_output_format = rev->diffopt.output_format;
const char *commit_buffer;
if (full_tree)
printf("deleteall\n");
log_tree_diff_flush(rev);
+ string_list_clear(paths_of_changed_objects, 0);
rev->diffopt.output_format = saved_output_format;
printf("\n");
return strbuf_detach(&out, len);
}
-static void handle_tail(struct object_array *commits, struct rev_info *revs)
+static void handle_tail(struct object_array *commits, struct rev_info *revs,
+ struct string_list *paths_of_changed_objects)
{
struct commit *commit;
while (commits->nr) {
- commit = (struct commit *)commits->objects[commits->nr - 1].item;
+ commit = (struct commit *)object_array_pop(commits);
if (has_unshown_parent(commit))
return;
- handle_commit(commit, revs);
- commits->nr--;
+ handle_commit(commit, revs, paths_of_changed_objects);
}
}
char *export_filename = NULL, *import_filename = NULL;
uint32_t lastimportid;
struct string_list refspecs_list = STRING_LIST_INIT_NODUP;
+ struct string_list paths_of_changed_objects = STRING_LIST_INIT_DUP;
struct option options[] = {
OPT_INTEGER(0, "progress", &progress,
N_("show progress after <n> objects")),
if (prepare_revision_walk(&revs))
die("revision walk setup failed");
revs.diffopt.format_callback = show_filemodify;
+ revs.diffopt.format_callback_data = &paths_of_changed_objects;
DIFF_OPT_SET(&revs.diffopt, RECURSIVE);
while ((commit = get_revision(&revs))) {
if (has_unshown_parent(commit)) {
add_object_array(&commit->object, NULL, &commits);
}
else {
- handle_commit(commit, &revs);
- handle_tail(&commits, &revs);
+ handle_commit(commit, &revs, &paths_of_changed_objects);
+ handle_tail(&commits, &revs, &paths_of_changed_objects);
}
}
if (show_progress)
progress = start_delayed_progress(_("Checking connectivity"), 0);
while (pending.nr) {
- struct object_array_entry *entry;
- struct object *obj;
-
- entry = pending.objects + --pending.nr;
- obj = entry->item;
- result |= traverse_one_object(obj);
+ result |= traverse_one_object(object_array_pop(&pending));
display_progress(progress, ++nr);
}
stop_progress(&progress);
int should_exit;
if (!scan_fmt)
- scan_fmt = xstrfmt("%s %%%dc", "%"SCNuMAX, HOST_NAME_MAX);
+ scan_fmt = xstrfmt("%s %%%ds", "%"SCNuMAX, HOST_NAME_MAX);
fp = fopen(pidfile_path, "r");
memset(locking_host, 0, sizeof(locking_host));
should_exit =
if (!skip_prefix(content, "52 comment=", &comment))
return 1;
- n = write_in_full(1, comment, 41);
- if (n < 41)
+ if (write_in_full(1, comment, 41) < 0)
die_errno("git get-tar-commit-id: write error");
return 0;
alias = alias_lookup(cmd);
if (alias) {
- printf_ln(_("`git %s' is aliased to `%s'"), cmd, alias);
+ printf_ln(_("'%s' is aliased to '%s'"), cmd, alias);
free(alias);
exit(0);
}
check_head = 1;
if (check_head) {
- struct object_id oid;
const char *ref, *v;
ref = resolve_ref_unsafe("HEAD", RESOLVE_REF_READING,
- oid.hash, NULL);
+ NULL, NULL);
if (ref && skip_prefix(ref, "refs/heads/", &v))
branch_name = xstrdup(v);
else
return want;
}
- for (entry = packed_git_mru->head; entry; entry = entry->next) {
+ for (entry = packed_git_mru.head; entry; entry = entry->next) {
struct packed_git *p = entry->item;
off_t offset;
}
want = want_found_object(exclude, p);
if (!exclude && want > 0)
- mru_mark(packed_git_mru, entry);
+ mru_mark(&packed_git_mru, entry);
if (want != -1)
return want;
}
};
struct in_pack {
- int alloc;
- int nr;
+ unsigned int alloc;
+ unsigned int nr;
struct in_pack_object *array;
};
size_t n;
if (feed(feed_state, &buf, &n))
break;
- if (write_in_full(proc.in, buf, n) != n)
+ if (write_in_full(proc.in, buf, n) < 0)
break;
}
close(proc.in);
const char *dst_name;
struct string_list_item *item;
struct command *dst_cmd;
- unsigned char sha1[GIT_MAX_RAWSZ];
int flag;
strbuf_addf(&buf, "%s%s", get_git_namespace(), cmd->ref_name);
- dst_name = resolve_ref_unsafe(buf.buf, 0, sha1, &flag);
+ dst_name = resolve_ref_unsafe(buf.buf, 0, NULL, &flag);
strbuf_release(&buf);
if (!(flag & REF_ISSYMREF))
struct commit *c;
struct commit_list *parent;
- c = (struct commit *)study.objects[--study.nr].item;
+ c = (struct commit *)object_array_pop(&study);
if (!c->object.parsed && !parse_object(&c->object.oid))
c->object.flags |= INCOMPLETE;
found.objects[i].item->flags |= SEEN;
}
/* free object arrays */
- free(study.objects);
- free(found.objects);
+ object_array_clear(&study);
+ object_array_clear(&found);
return !is_incomplete;
}
special = str[rpos];
if (rpos == 1)
break;
- /* Fall-through to error. */
+ /* fallthrough */
default:
die("Bad remote-ext placeholder '%%%c'.",
str[rpos]);
struct strbuf buf = STRBUF_INIT;
struct string_list_item *item;
int flag;
- struct object_id orig_oid;
const char *symref;
strbuf_addf(&buf, "refs/remotes/%s/", rename->old);
if (starts_with(refname, buf.buf)) {
item = string_list_append(rename->remote_branches, xstrdup(refname));
symref = resolve_ref_unsafe(refname, RESOLVE_REF_READING,
- orig_oid.hash, &flag);
+ NULL, &flag);
if (flag & REF_ISSYMREF)
item->util = xstrdup(symref);
else
{
int i;
for (i = 0; i < nbuf; i++)
- if (write_in_full(1, ptr[i].ptr, ptr[i].size) != ptr[i].size)
+ if (write_in_full(1, ptr[i].ptr, ptr[i].size) < 0)
return -1;
return 0;
}
: "false");
continue;
}
+ if (!strcmp(arg, "--is-shallow-repository")) {
+ printf("%s\n", is_repository_shallow() ? "true"
+ : "false");
+ continue;
+ }
if (!strcmp(arg, "--shared-index-path")) {
if (read_cache() < 0)
die(_("Could not read the index"));
static char *get_default_remote(void)
{
char *dest = NULL, *ret;
- unsigned char sha1[20];
struct strbuf sb = STRBUF_INIT;
- const char *refname = resolve_ref_unsafe("HEAD", 0, sha1, NULL);
+ const char *refname = resolve_ref_unsafe("HEAD", 0, NULL, NULL);
if (!refname)
die(_("No such ref: %s"), "HEAD");
return "master";
if (!strcmp(branch, ".")) {
- unsigned char sha1[20];
- const char *refname = resolve_ref_unsafe("HEAD", 0, sha1, NULL);
+ const char *refname = resolve_ref_unsafe("HEAD", 0, NULL, NULL);
if (!refname)
die(_("No such ref: %s"), "HEAD");
break;
die("HEAD does not match the named branch in the superproject");
}
+ /* fallthrough */
default:
die("src refspec '%s' must name a ref",
rs->src);
static int check_symref(const char *HEAD, int quiet, int shorten, int print)
{
- unsigned char sha1[20];
int flag;
- const char *refname = resolve_ref_unsafe(HEAD, 0, sha1, &flag);
+ const char *refname = resolve_ref_unsafe(HEAD, 0, NULL, &flag);
if (!refname)
die("No such ref: %s", HEAD);
xsnprintf(path, sizeof(path), ".merge_file_XXXXXX");
fd = xmkstemp(path);
- if (write_in_full(fd, buf, size) != size)
+ if (write_in_full(fd, buf, size) < 0)
die_errno("unable to write temp-file");
close(fd);
return path;
req_nr = revs.pending.nr;
setup_revisions(2, argv, &revs, NULL);
+ /* Save pending objects, so they can be cleaned up later. */
refs = revs.pending;
revs.leak_pending = 1;
+ /*
+ * prepare_revision_walk (together with .leak_pending = 1) makes us
+ * the sole owner of the list of pending objects.
+ */
if (prepare_revision_walk(&revs))
die(_("revision walk setup failed"));
refs.objects[i].name);
}
+ /* Clean up objects used, as they will be reused. */
clear_commit_marks_for_object_array(&refs, ALL_REV_FLAGS);
- free(refs.objects);
+
+ object_array_clear(&refs);
if (verbose) {
struct ref_list *r;
#include "git-compat-util.h"
#include "strbuf.h"
#include "hashmap.h"
+#include "mru.h"
#include "advice.h"
#include "gettext.h"
#include "convert.h"
*/
static inline int hex2chr(const char *s)
{
- int val = hexval(s[0]);
- return (val < 0) ? val : (val << 4) | hexval(s[1]);
+ unsigned int val = hexval(s[0]);
+ return (val & ~0xf) ? val : (val << 4) | hexval(s[1]);
}
/* Convert to/from hex/sha1 representation */
* A most-recently-used ordered version of the packed_git list, which can
* be iterated instead of packed_git (and marked via mru_mark).
*/
-struct mru;
-extern struct mru *packed_git_mru;
+extern struct mru packed_git_mru;
struct pack_entry {
off_t offset;
--- /dev/null
+#!/usr/bin/env bash
+#
+# Install dependencies required to build and test Git on Linux and macOS
+#
+
+. ${0%/*}/lib-travisci.sh
+
+P4WHENCE=http://filehost.perforce.com/perforce/r$LINUX_P4_VERSION
+LFSWHENCE=https://github.com/github/git-lfs/releases/download/v$LINUX_GIT_LFS_VERSION
+
+case "${TRAVIS_OS_NAME:-linux}" in
+linux)
+ export GIT_TEST_HTTPD=YesPlease
+
+ mkdir --parents custom/p4
+ pushd custom/p4
+ wget --quiet "$P4WHENCE/bin.linux26x86_64/p4d"
+ wget --quiet "$P4WHENCE/bin.linux26x86_64/p4"
+ chmod u+x p4d
+ chmod u+x p4
+ export PATH="$(pwd):$PATH"
+ popd
+ mkdir --parents custom/git-lfs
+ pushd custom/git-lfs
+ wget --quiet "$LFSWHENCE/git-lfs-linux-amd64-$LINUX_GIT_LFS_VERSION.tar.gz"
+ tar --extract --gunzip --file "git-lfs-linux-amd64-$LINUX_GIT_LFS_VERSION.tar.gz"
+ cp git-lfs-$LINUX_GIT_LFS_VERSION/git-lfs .
+ export PATH="$(pwd):$PATH"
+ popd
+ ;;
+osx)
+ brew update --quiet
+ # Uncomment this if you want to run perf tests:
+ # brew install gnu-time
+ brew install git-lfs gettext
+ brew link --force gettext
+ brew install caskroom/cask/perforce
+ ;;
+esac
+
+echo "$(tput setaf 6)Perforce Server Version$(tput sgr0)"
+p4d -V | grep Rev.
+echo "$(tput setaf 6)Perforce Client Version$(tput sgr0)"
+p4 -V | grep Rev.
+echo "$(tput setaf 6)Git-LFS Version$(tput sgr0)"
+git-lfs version
--- /dev/null
+# Library of functions shared by all CI scripts
+
+skip_branch_tip_with_tag () {
+ # Sometimes, a branch is pushed at the same time the tag that points
+ # at the same commit as the tip of the branch is pushed, and building
+ # both at the same time is a waste.
+ #
+ # Travis gives a tagname e.g. v2.14.0 in $TRAVIS_BRANCH when
+ # the build is triggered by a push to a tag. Let's see if
+ # $TRAVIS_BRANCH is exactly at a tag, and if so, if it is
+ # different from $TRAVIS_BRANCH. That way, we can tell if
+ # we are building the tip of a branch that is tagged and
+ # we can skip the build because we won't be skipping a build
+ # of a tag.
+
+ if TAG=$(git describe --exact-match "$TRAVIS_BRANCH" 2>/dev/null) &&
+ test "$TAG" != "$TRAVIS_BRANCH"
+ then
+ echo "Tip of $TRAVIS_BRANCH is exactly at $TAG"
+ exit 0
+ fi
+}
+
+# Set 'exit on error' for all CI scripts to let the caller know that
+# something went wrong
+set -e
+
+skip_branch_tip_with_tag
--- /dev/null
+#!/bin/sh
+#
+# Print output of failing tests
+#
+
+. ${0%/*}/lib-travisci.sh
+
+for TEST_EXIT in t/test-results/*.exit
+do
+ if [ "$(cat "$TEST_EXIT")" != "0" ]
+ then
+ TEST_OUT="${TEST_EXIT%exit}out"
+ echo "------------------------------------------------------------------------"
+ echo "$(tput setaf 1)${TEST_OUT}...$(tput sgr0)"
+ echo "------------------------------------------------------------------------"
+ cat "${TEST_OUT}"
+ fi
+done
--- /dev/null
+#!/bin/sh
+#
+# Build Git
+#
+
+. ${0%/*}/lib-travisci.sh
+
+make --jobs=2
--- /dev/null
+#!/bin/sh
+#
+# Download and run Docker image to build and test 32-bit Git
+#
+
+. ${0%/*}/lib-travisci.sh
+
+docker pull daald/ubuntu32:xenial
+
+# Use the following command to debug the docker build locally:
+# $ docker run -itv "${PWD}:/usr/src/git" --entrypoint /bin/bash daald/ubuntu32:xenial
+# root@container:/# /usr/src/git/ci/run-linux32-build.sh
+
+docker run \
+ --interactive \
+ --env DEVELOPER \
+ --env DEFAULT_TEST_TARGET \
+ --env GIT_PROVE_OPTS \
+ --env GIT_TEST_OPTS \
+ --env GIT_TEST_CLONE_2GB \
+ --volume "${PWD}:/usr/src/git" \
+ daald/ubuntu32:xenial \
+ /usr/src/git/ci/run-linux32-build.sh $(id -u $USER)
--- /dev/null
+#!/bin/sh
+#
+# Perform various static code analysis checks
+#
+
+. ${0%/*}/lib-travisci.sh
+
+make coccicheck
--- /dev/null
+#!/bin/sh
+#
+# Test Git
+#
+
+. ${0%/*}/lib-travisci.sh
+
+mkdir -p $HOME/travis-cache
+ln -s $HOME/travis-cache/.prove t/.prove
+make --quiet test
# supported) and a commit hash.
#
+. ${0%/*}/lib-travisci.sh
+
test $# -ne 2 && echo "Unexpected number of parameters" && exit 1
test -z "$GFW_CI_TOKEN" && echo "GFW_CI_TOKEN not defined" && exit
# Perform sanity checks on documentation and build it.
#
-set -e
+. ${0%/*}/lib-travisci.sh
+
+gem install asciidoctor
make check-builtins
make check-docs
\
static MAYBE_UNUSED void clear_ ##slabname(struct slabname *s) \
{ \
- int i; \
+ unsigned int i; \
for (i = 0; i < s->slab_count; i++) \
free(s->slab[i]); \
s->slab_count = 0; \
const struct commit *c, \
int add_if_missing) \
{ \
- int nth_slab, nth_slot; \
+ unsigned int nth_slab, nth_slot; \
\
nth_slab = c->index / s->slab_size; \
nth_slot = c->index % s->slab_size; \
\
if (s->slab_count <= nth_slab) { \
- int i; \
+ unsigned int i; \
if (!add_if_missing) \
return NULL; \
REALLOC_ARRAY(s->slab, nth_slab + 1); \
num_head = remove_redundant(array, num_head);
for (i = 0; i < num_head; i++)
tail = &commit_list_insert(array[i], tail)->next;
+ free(array);
return result;
}
--- /dev/null
+#ifndef LAZYLOAD_H
+#define LAZYLOAD_H
+
+/*
+ * A pair of macros to simplify loading of DLL functions. Example:
+ *
+ * DECLARE_PROC_ADDR(kernel32.dll, BOOL, CreateHardLinkW,
+ * LPCWSTR, LPCWSTR, LPSECURITY_ATTRIBUTES);
+ *
+ * if (!INIT_PROC_ADDR(CreateHardLinkW))
+ * return error("Could not find CreateHardLinkW() function";
+ *
+ * if (!CreateHardLinkW(source, target, NULL))
+ * return error("could not create hardlink from %S to %S",
+ * source, target);
+ */
+
+struct proc_addr {
+ const char *const dll;
+ const char *const function;
+ FARPROC pfunction;
+ unsigned initialized : 1;
+};
+
+/* Declares a function to be loaded dynamically from a DLL. */
+#define DECLARE_PROC_ADDR(dll, rettype, function, ...) \
+ static struct proc_addr proc_addr_##function = \
+ { #dll, #function, NULL, 0 }; \
+ static rettype (WINAPI *function)(__VA_ARGS__)
+
+/*
+ * Loads a function from a DLL (once-only).
+ * Returns non-NULL function pointer on success.
+ * Returns NULL + errno == ENOSYS on failure.
+ * This function is not thread-safe.
+ */
+#define INIT_PROC_ADDR(function) \
+ (function = get_proc_addr(&proc_addr_##function))
+
+static inline void *get_proc_addr(struct proc_addr *proc)
+{
+ /* only do this once */
+ if (!proc->initialized) {
+ HANDLE hnd;
+ proc->initialized = 1;
+ hnd = LoadLibraryExA(proc->dll, NULL,
+ LOAD_LIBRARY_SEARCH_SYSTEM32);
+ if (hnd)
+ proc->pfunction = GetProcAddress(hnd, proc->function);
+ }
+ /* set ENOSYS if DLL or function was not found */
+ if (!proc->pfunction)
+ errno = ENOSYS;
+ return proc->pfunction;
+}
+
+#endif
size_t *offset;
unsigned int offset_alloc;
enum { START, SECTION_SEEN, SECTION_END_SEEN, KEY_SEEN } state;
- int seen;
+ unsigned int seen;
} store;
static int matches(const char *key, const char *value)
return 4;
}
-static int store_write_section(int fd, const char *key)
+static ssize_t write_section(int fd, const char *key)
{
const char *dot;
- int i, success;
+ int i;
+ ssize_t ret;
struct strbuf sb = STRBUF_INIT;
dot = memchr(key, '.', store.baselen);
strbuf_addf(&sb, "[%.*s]\n", store.baselen, key);
}
- success = write_in_full(fd, sb.buf, sb.len) == sb.len;
+ ret = write_in_full(fd, sb.buf, sb.len);
strbuf_release(&sb);
- return success;
+ return ret;
}
-static int store_write_pair(int fd, const char *key, const char *value)
+static ssize_t write_pair(int fd, const char *key, const char *value)
{
- int i, success;
+ int i;
+ ssize_t ret;
int length = strlen(key + store.baselen + 1);
const char *quote = "";
struct strbuf sb = STRBUF_INIT;
case '"':
case '\\':
strbuf_addch(&sb, '\\');
+ /* fallthrough */
default:
strbuf_addch(&sb, value[i]);
break;
}
strbuf_addf(&sb, "%s\n", quote);
- success = write_in_full(fd, sb.buf, sb.len) == sb.len;
+ ret = write_in_full(fd, sb.buf, sb.len);
strbuf_release(&sb);
- return success;
+ return ret;
}
static ssize_t find_beginning_of_line(const char *contents, size_t size,
}
store.key = (char *)key;
- if (!store_write_section(fd, key) ||
- !store_write_pair(fd, key, value))
+ if (write_section(fd, key) < 0 ||
+ write_pair(fd, key, value) < 0)
goto write_err_out;
} else {
struct stat st;
/* write the first part of the config */
if (copy_end > copy_begin) {
if (write_in_full(fd, contents + copy_begin,
- copy_end - copy_begin) <
- copy_end - copy_begin)
+ copy_end - copy_begin) < 0)
goto write_err_out;
if (new_line &&
- write_str_in_full(fd, "\n") != 1)
+ write_str_in_full(fd, "\n") < 0)
goto write_err_out;
}
copy_begin = store.offset[i];
/* write the pair (value == NULL means unset) */
if (value != NULL) {
if (store.state == START) {
- if (!store_write_section(fd, key))
+ if (write_section(fd, key) < 0)
goto write_err_out;
}
- if (!store_write_pair(fd, key, value))
+ if (write_pair(fd, key, value) < 0)
goto write_err_out;
}
/* write the rest of the config */
if (copy_begin < contents_sz)
if (write_in_full(fd, contents + copy_begin,
- contents_sz - copy_begin) <
- contents_sz - copy_begin)
+ contents_sz - copy_begin) < 0)
goto write_err_out;
munmap(contents, contents_sz);
continue;
}
store.baselen = strlen(new_name);
- if (!store_write_section(out_fd, new_name)) {
+ if (write_section(out_fd, new_name) < 0) {
ret = write_error(get_lock_file_path(lock));
goto out;
}
if (remove)
continue;
length = strlen(output);
- if (write_in_full(out_fd, output, length) != length) {
+ if (write_in_full(out_fd, output, length) < 0) {
ret = write_error(get_lock_file_path(lock));
goto out;
}
switch (ident->state) {
default:
strbuf_add(&ident->left, head, ident->state);
+ /* fallthrough */
case IDENT_SKIPPING:
- /* fallthru */
+ /* fallthrough */
case IDENT_DRAINING:
ident_drain(ident, &output, osize_p);
}
const void *trg_buf, unsigned long trg_size,
unsigned long *delta_size, unsigned long max_size)
{
- unsigned int i, outpos, outsize, moff, msize, val;
+ unsigned int i, val;
+ off_t outpos, moff;
+ size_t l, outsize, msize;
int inscnt;
const unsigned char *ref_data, *ref_top, *data, *top;
unsigned char *out;
return NULL;
/* store reference buffer size */
- i = index->src_size;
- while (i >= 0x80) {
- out[outpos++] = i | 0x80;
- i >>= 7;
+ l = index->src_size;
+ while (l >= 0x80) {
+ out[outpos++] = l | 0x80;
+ l >>= 7;
}
- out[outpos++] = i;
+ out[outpos++] = l;
/* store target buffer size */
- i = trg_size;
- while (i >= 0x80) {
- out[outpos++] = i | 0x80;
- i >>= 7;
+ l = trg_size;
+ while (l >= 0x80) {
+ out[outpos++] = l | 0x80;
+ l >>= 7;
}
- out[outpos++] = i;
+ out[outpos++] = l;
ref_data = index->src_buf;
ref_top = ref_data + index->src_size;
moff += msize;
msize = left;
+ if (moff > 0xffffffff)
+ msize = 0;
+
if (msize < 4096) {
int j;
val = 0;
rev.diffopt.flags |= diff_flags;
rev.diffopt.ita_invisible_in_index = ita_invisible_in_index;
run_diff_index(&rev, 1);
- if (rev.pending.alloc)
- free(rev.pending.objects);
+ object_array_clear(&rev.pending);
return (DIFF_OPT_TST(&rev.diffopt, HAS_CHANGES) != 0);
}
struct diff_words_buffer {
mmfile_t text;
- long alloc;
+ unsigned long alloc;
struct diff_words_orig {
const char *begin, *end;
} *orig;
blob = buf.buf;
size = buf.len;
}
- if (write_in_full(temp->tempfile->fd, blob, size) != size ||
+ if (write_in_full(temp->tempfile->fd, blob, size) < 0 ||
close_tempfile_gently(temp->tempfile))
die_errno("unable to write temp-file");
temp->name = get_tempfile_path(temp->tempfile);
static enum path_treatment read_directory_recursive(struct dir_struct *dir,
struct index_state *istate, const char *path, int len,
struct untracked_cache_dir *untracked,
- int check_only, const struct pathspec *pathspec);
+ int check_only, int stop_at_first_file, const struct pathspec *pathspec);
static int get_dtype(struct dirent *de, struct index_state *istate,
const char *path, int len);
untracked = lookup_untracked(dir->untracked, untracked,
dirname + baselen, len - baselen);
+
+ /*
+ * If this is an excluded directory, then we only need to check if
+ * the directory contains any files.
+ */
return read_directory_recursive(dir, istate, dirname, len,
- untracked, 1, pathspec);
+ untracked, 1, exclude, pathspec);
}
/*
* with check_only set.
*/
return read_directory_recursive(dir, istate, path->buf, path->len,
- cdir->ucd, 1, pathspec);
+ cdir->ucd, 1, 0, pathspec);
/*
* We get path_recurse in the first run when
* directory_exists_in_index() returns index_nonexistent. We
* Also, we ignore the name ".git" (even if it is not a directory).
* That likely will not change.
*
+ * If 'stop_at_first_file' is specified, 'path_excluded' is returned
+ * to signal that a file was found. This is the least significant value that
+ * indicates that a file was encountered that does not depend on the order of
+ * whether an untracked or exluded path was encountered first.
+ *
* Returns the most significant path_treatment value encountered in the scan.
+ * If 'stop_at_first_file' is specified, `path_excluded` is the most
+ * significant path_treatment value that will be returned.
*/
+
static enum path_treatment read_directory_recursive(struct dir_struct *dir,
struct index_state *istate, const char *base, int baselen,
struct untracked_cache_dir *untracked, int check_only,
- const struct pathspec *pathspec)
+ int stop_at_first_file, const struct pathspec *pathspec)
{
struct cached_dir cdir;
enum path_treatment state, subdir_state, dir_state = path_none;
subdir_state =
read_directory_recursive(dir, istate, path.buf,
path.len, ud,
- check_only, pathspec);
+ check_only, stop_at_first_file, pathspec);
if (subdir_state > dir_state)
dir_state = subdir_state;
}
if (check_only) {
+ if (stop_at_first_file) {
+ /*
+ * If stopping at first file, then
+ * signal that a file was found by
+ * returning `path_excluded`. This is
+ * to return a consistent value
+ * regardless of whether an ignored or
+ * excluded file happened to be
+ * encountered 1st.
+ *
+ * In current usage, the
+ * `stop_at_first_file` is passed when
+ * an ancestor directory has matched
+ * an exclude pattern, so any found
+ * files will be excluded.
+ */
+ if (dir_state >= path_excluded) {
+ dir_state = path_excluded;
+ break;
+ }
+ }
+
/* abort early if maximum state has been reached */
if (dir_state == path_untracked) {
if (cdir.fdir)
*/
dir->untracked = NULL;
if (!len || treat_leading_path(dir, istate, path, len, pathspec))
- read_directory_recursive(dir, istate, path, len, untracked, 0, pathspec);
+ read_directory_recursive(dir, istate, path, len, untracked, 0, 0, pathspec);
QSORT(dir->entries, dir->nr, cmp_dir_entry);
QSORT(dir->ignored, dir->ignored_nr, cmp_dir_entry);
char *new;
struct strbuf buf = STRBUF_INIT;
unsigned long size;
- size_t wrote, newsize = 0;
+ ssize_t wrote;
+ size_t newsize = 0;
struct stat st;
const struct submodule *sub;
fstat_done = fstat_output(fd, state, &st);
close(fd);
free(new);
- if (wrote != size)
+ if (wrote < 0)
return error("unable to write file %s", path);
break;
case S_IFGITLINK:
static void cat_blob_write(const char *buf, unsigned long size)
{
- if (write_in_full(cat_blob_fd, buf, size) != size)
+ if (write_in_full(cat_blob_fd, buf, size) < 0)
die_errno("Write to frontend failed");
}
case S_IFREG | 0664:
if (!options->strict)
break;
+ /* fallthrough */
default:
has_bad_modes = 1;
}
# check that we actually know about the branch
next unless -e "$git_dir/refs/heads/$branch";
- my $mergebase = `git-merge-base $branch $ps->{branch}`;
+ my $mergebase = safe_pipe_capture(qw(git-merge-base), $branch, $ps->{branch});
if ($?) {
# Don't die here, Arch supports one-way cherry-picking
# between branches with no common base (or any relationship
sub git_rev_parse {
my $name = shift;
- my $val = `git-rev-parse $name`;
+ my $val = safe_pipe_capture(qw(git-rev-parse), $name);
die "Error: git-rev-parse $name" if $?;
chomp $val;
return $val;
static inline size_t xsize_t(off_t len)
{
- if (len > (size_t) len)
+ size_t size = (size_t) len;
+
+ if (len != (off_t) size)
die("Cannot handle files this big");
- return (size_t)len;
+ return size;
}
__attribute__((format (printf, 3, 4)))
sub get_headref ($) {
my $name = shift;
+ $name =~ s/'/'\\''/;
my $r = `git rev-parse --verify '$name' 2>/dev/null`;
return undef unless $? == 0;
chomp $r;
return 0;
}
- my @gitvars = `git config -l`;
+ my @gitvars = safe_pipe_capture(qw(git config -l));
if ($?) {
print "E problems executing git-config on the server -- this is not a git repository or the PATH is not set correctly.\n";
print "E \n";
# Save the file data in $state
$state->{entries}{$state->{directory}.$data}{modified_filename} = $filename;
$state->{entries}{$state->{directory}.$data}{modified_mode} = $mode;
- $state->{entries}{$state->{directory}.$data}{modified_hash} = `git hash-object $filename`;
+ $state->{entries}{$state->{directory}.$data}{modified_hash} = safe_pipe_capture('git','hash-object',$filename);
$state->{entries}{$state->{directory}.$data}{modified_hash} =~ s/\s.*$//s;
#$log->debug("req_Modified : file=$data mode=$mode size=$size");
# Provide list of modules, if -c was used.
if (exists $state->{opt}{c}) {
- my $showref = `git show-ref --heads`;
+ my $showref = safe_pipe_capture(qw(git show-ref --heads));
for my $line (split '\n', $showref) {
if ( $line =~ m% refs/heads/(.*)$% ) {
print "M $1\t$1\n";
# projects (heads in this case) to checkout.
#
if ($state->{module} eq '') {
- my $showref = `git show-ref --heads`;
+ my $showref = safe_pipe_capture(qw(git show-ref --heads));
print "E cvs update: Updating .\n";
for my $line (split '\n', $showref) {
if ( $line =~ m% refs/heads/(.*)$% ) {
# transmit file, format is single integer on a line by itself (file
# size) followed by the file contents
# TODO : we should copy files in blocks
- my $data = `cat $mergedFile`;
+ my $data = safe_pipe_capture('cat', $mergedFile);
$log->debug("File size : " . length($data));
print length($data) . "\n";
print $data;
$branchRef = "refs/heads/$stickyInfo->{tag}";
}
- $parenthash = `git show-ref -s $branchRef`;
+ $parenthash = safe_pipe_capture('git', 'show-ref', '-s', $branchRef);
chomp $parenthash;
if ($parenthash !~ /^[0-9a-f]{40}$/)
{
return;
}
- my $treehash = `git write-tree`;
+ my $treehash = safe_pipe_capture(qw(git write-tree));
chomp $treehash;
$log->debug("Treehash : $treehash, Parenthash : $parenthash");
}
close $msg_fh;
- my $commithash = `git commit-tree $treehash -p $parenthash < $msg_filename`;
+ my $commithash = safe_pipe_capture('git', 'commit-tree', $treehash, '-p', $parenthash, '-F', $msg_filename);
chomp($commithash);
$log->info("Commit hash : $commithash");
die "Need filehash" unless ( defined ( $filehash ) and $filehash =~ /^[a-zA-Z0-9]{40}$/ );
- my $type = `git cat-file -t $filehash`;
+ my $type = safe_pipe_capture('git', 'cat-file', '-t', $filehash);
chomp $type;
die ( "Invalid type '$type' (expected 'blob')" ) unless ( defined ( $type ) and $type eq "blob" );
- my $size = `git cat-file -s $filehash`;
+ my $size = safe_pipe_capture('git', 'cat-file', '-s', $filehash);
chomp $size;
$log->debug("transmitfile($filehash) size=$size, type=$type");
chdir $work->{emptyDir} or
die "Unable to chdir to $work->{emptyDir}\n";
- my $ver = `git show-ref -s refs/heads/$state->{module}`;
+ my $ver = safe_pipe_capture('git', 'show-ref', '-s', "refs/heads/$state->{module}");
chomp $ver;
if ($ver !~ /^[0-9a-f]{40}$/)
{
die "Need filehash\n";
}
- my $type = `git cat-file -t $name`;
+ my $type = safe_pipe_capture('git', 'cat-file', '-t', $name);
chomp $type;
unless ( defined ( $type ) and $type eq "blob" )
die ( "Invalid type '$type' (expected 'blob')" )
}
- my $size = `git cat-file -s $name`;
+ my $size = safe_pipe_capture('git', 'cat-file', '-s', $name);
chomp $size;
$log->debug("open_blob_or_die($name) size=$size, type=$type");
return $out;
}
+# an alternative to `command` that allows input to be passed as an array
+# to work around shell problems with weird characters in arguments
+
+sub safe_pipe_capture {
+
+ my @output;
+
+ if (my $pid = open my $child, '-|') {
+ @output = (<$child>);
+ close $child or die join(' ',@_).": $! $?";
+ } else {
+ exec(@_) or die "$! $?"; # exec() can fail the executable can't be found
+ }
+ return wantarray ? @output : join('',@output);
+}
+
package GITCVS::log;
# first lets get the commit list
$ENV{GIT_DIR} = $self->{git_path};
- my $commitsha1 = `git rev-parse $self->{module}`;
+ my $commitsha1 = ::safe_pipe_capture('git', 'rev-parse', $self->{module});
chomp $commitsha1;
- my $commitinfo = `git cat-file commit $self->{module} 2>&1`;
+ my $commitinfo = ::safe_pipe_capture('git', 'cat-file', 'commit', $self->{module});
unless ( $commitinfo =~ /tree\s+[a-zA-Z0-9]{40}/ )
{
die("Invalid module '$self->{module}'");
# several candidate merge bases. let's assume
# that the first one is the best one.
my $base = eval {
- safe_pipe_capture('git', 'merge-base',
+ ::safe_pipe_capture('git', 'merge-base',
$lastpicked, $parent);
};
# The two branches may not be related at all,
return $retVal;
}
- my($fileHash)=safe_pipe_capture("git","rev-parse","$revCommit:$filename");
+ my($fileHash) = ::safe_pipe_capture("git","rev-parse","$revCommit:$filename");
chomp $fileHash;
if(!($fileHash=~/^[0-9a-f]{40}$/))
{
return $commitHash;
}
- $commitHash=safe_pipe_capture("git","rev-parse","--verify","--quiet",
- $self->unescapeRefName($ref));
+ $commitHash = ::safe_pipe_capture("git","rev-parse","--verify","--quiet",
+ $self->unescapeRefName($ref));
$commitHash=~s/\s*$//;
if(!($commitHash=~/^[0-9a-f]{40}$/))
{
if( defined($commitHash) )
{
- my $type=safe_pipe_capture("git","cat-file","-t",$commitHash);
+ my $type = ::safe_pipe_capture("git","cat-file","-t",$commitHash);
if( ! ($type=~/^commit\s*$/ ) )
{
$commitHash=undef;
return $message;
}
- my @lines = safe_pipe_capture("git", "cat-file", "commit", $commithash);
+ my @lines = ::safe_pipe_capture("git", "cat-file", "commit", $commithash);
shift @lines while ( $lines[0] =~ /\S/ );
$message = join("",@lines);
$message .= " " if ( $message =~ /\n$/ );
return $retval;
}
-=head2 safe_pipe_capture
-
-an alternative to `command` that allows input to be passed as an array
-to work around shell problems with weird characters in arguments
-
-=cut
-sub safe_pipe_capture {
-
- my @output;
-
- if (my $pid = open my $child, '-|') {
- @output = (<$child>);
- close $child or die join(' ',@_).": $! $?";
- } else {
- exec(@_) or die "$! $?"; # exec() can fail the executable can't be found
- }
- return wantarray ? @output : join('',@output);
-}
-
=head2 mangle_dirname
create a string from a directory name that is suitable to use as
[--parent-filter <command>] [--msg-filter <command>]
[--commit-filter <command>] [--tag-name-filter <command>]
[--subdirectory-filter <directory>] [--original <namespace>]
- [-d <directory>] [-f | --force]
+ [-d <directory>] [-f | --force] [--state-branch <branch>]
[--] [<rev-list options>...]"
OPTIONS_SPEC=
filter_commit=
filter_tag_name=
filter_subdir=
+state_branch=
orig_namespace=refs/original/
force=
prune_empty=
--original)
orig_namespace=$(expr "$OPTARG/" : '\(.*[^/]\)/*$')/
;;
+ --state-branch)
+ state_branch="$OPTARG"
+ ;;
*)
usage
;;
ORIG_GIT_DIR="$GIT_DIR"
ORIG_GIT_WORK_TREE="$GIT_WORK_TREE"
ORIG_GIT_INDEX_FILE="$GIT_INDEX_FILE"
+ORIG_GIT_AUTHOR_NAME="$GIT_AUTHOR_NAME"
+ORIG_GIT_AUTHOR_EMAIL="$GIT_AUTHOR_EMAIL"
+ORIG_GIT_AUTHOR_DATE="$GIT_AUTHOR_DATE"
+ORIG_GIT_COMMITTER_NAME="$GIT_COMMITTER_NAME"
+ORIG_GIT_COMMITTER_EMAIL="$GIT_COMMITTER_EMAIL"
+ORIG_GIT_COMMITTER_DATE="$GIT_COMMITTER_DATE"
+
GIT_WORK_TREE=.
export GIT_DIR GIT_WORK_TREE
# map old->new commit ids for rewriting parents
mkdir ../map || die "Could not create map/ directory"
+if test -n "$state_branch"
+then
+ state_commit=$(git rev-parse --no-flags --revs-only "$state_branch")
+ if test -n "$state_commit"
+ then
+ echo "Populating map from $state_branch ($state_commit)" 1>&2
+ perl -e'open(MAP, "-|", "git show $ARGV[0]:filter.map") or die;
+ while (<MAP>) {
+ m/(.*):(.*)/ or die;
+ open F, ">../map/$1" or die;
+ print F "$2" or die;
+ close(F) or die;
+ }
+ close(MAP) or die;' "$state_commit" \
+ || die "Unable to load state from $state_branch:filter.map"
+ else
+ echo "Branch $state_branch does not exist. Will create" 1>&2
+ fi
+fi
+
# we need "--" only if there are no path arguments in $@
nonrevs=$(git rev-parse --no-revs "$@") || exit
if test -z "$nonrevs"
}' \
-e '/^-----BEGIN PGP SIGNATURE-----/q' \
-e 'p' ) |
- git mktag) ||
+ git hash-object -t tag -w --stdin) ||
die "Could not create new tag object for $ref"
if git cat-file tag "$ref" | \
sane_grep '^-----BEGIN PGP SIGNATURE-----' >/dev/null 2>&1
done
fi
-cd "$orig_dir"
-rm -rf "$tempdir"
-
-trap - 0
-
unset GIT_DIR GIT_WORK_TREE GIT_INDEX_FILE
+unset GIT_AUTHOR_NAME GIT_AUTHOR_EMAIL GIT_AUTHOR_DATE
+unset GIT_COMMITTER_NAME GIT_COMMITTER_EMAIL GIT_COMMITTER_DATE
test -z "$ORIG_GIT_DIR" || {
GIT_DIR="$ORIG_GIT_DIR" && export GIT_DIR
}
GIT_INDEX_FILE="$ORIG_GIT_INDEX_FILE" &&
export GIT_INDEX_FILE
}
+test -z "$ORIG_GIT_AUTHOR_NAME" || {
+ GIT_AUTHOR_NAME="$ORIG_GIT_AUTHOR_NAME" &&
+ export GIT_AUTHOR_NAME
+}
+test -z "$ORIG_GIT_AUTHOR_EMAIL" || {
+ GIT_AUTHOR_EMAIL="$ORIG_GIT_AUTHOR_EMAIL" &&
+ export GIT_AUTHOR_EMAIL
+}
+test -z "$ORIG_GIT_AUTHOR_DATE" || {
+ GIT_AUTHOR_DATE="$ORIG_GIT_AUTHOR_DATE" &&
+ export GIT_AUTHOR_DATE
+}
+test -z "$ORIG_GIT_COMMITTER_NAME" || {
+ GIT_COMMITTER_NAME="$ORIG_GIT_COMMITTER_NAME" &&
+ export GIT_COMMITTER_NAME
+}
+test -z "$ORIG_GIT_COMMITTER_EMAIL" || {
+ GIT_COMMITTER_EMAIL="$ORIG_GIT_COMMITTER_EMAIL" &&
+ export GIT_COMMITTER_EMAIL
+}
+test -z "$ORIG_GIT_COMMITTER_DATE" || {
+ GIT_COMMITTER_DATE="$ORIG_GIT_COMMITTER_DATE" &&
+ export GIT_COMMITTER_DATE
+}
+
+if test -n "$state_branch"
+then
+ echo "Saving rewrite state to $state_branch" 1>&2
+ state_blob=$(
+ perl -e'opendir D, "../map" or die;
+ open H, "|-", "git hash-object -w --stdin" or die;
+ foreach (sort readdir(D)) {
+ next if m/^\.\.?$/;
+ open F, "<../map/$_" or die;
+ chomp($f = <F>);
+ print H "$_:$f\n" or die;
+ }
+ close(H) or die;' || die "Unable to save state")
+ state_tree=$(/bin/echo -e "100644 blob $state_blob\tfilter.map" | git mktree)
+ if test -n "$state_commit"
+ then
+ state_commit=$(/bin/echo "Sync" | git commit-tree "$state_tree" -p "$state_commit")
+ else
+ state_commit=$(/bin/echo "Sync" | git commit-tree "$state_tree" )
+ fi
+ git update-ref "$state_branch" "$state_commit"
+fi
+
+cd "$orig_dir"
+rm -rf "$tempdir"
+
+trap - 0
if [ "$(is_bare_repository)" = false ]; then
git read-tree -u -m HEAD || exit
die("zlib error inflating request, result %d", ret);
n = stream.total_out - cnt;
- if (write_in_full(out, out_buf, n) != n)
+ if (write_in_full(out, out_buf, n) < 0)
die("%s aborted reading request", prog_name);
cnt += n;
ssize_t n = read_request(0, &buf);
if (n < 0)
die_errno("error reading request body");
- if (write_in_full(out, buf, n) != n)
+ if (write_in_full(out, buf, n) < 0)
die("%s aborted reading request", prog_name);
close(out);
free(buf);
struct strbuf *buf = cb_data;
if (flag & REF_ISSYMREF) {
- struct object_id unused;
const char *target = resolve_ref_unsafe(refname,
RESOLVE_REF_READING,
- unused.hash, NULL);
+ NULL, NULL);
if (target)
strbuf_addf(buf, "ref: %s\n", strip_namespace(target));
break;
case HTTP_ERROR:
error("unable to access '%s': %s", url, curl_errorstr);
+ /* fallthrough */
default:
ret = -1;
}
switch (type) {
case CURLINFO_TEXT:
trace_printf_key(&trace_curl, "== Info: %s", data);
- default: /* we ignore unknown types by default */
- return 0;
-
+ break;
case CURLINFO_HEADER_OUT:
text = "=> Send header";
curl_dump_header(text, (unsigned char *)data, size, DO_FILTER);
text = "<= Recv SSL data";
curl_dump_data(text, (unsigned char *)data, size);
break;
+
+ default: /* we ignore unknown types by default */
+ return 0;
}
return 0;
}
#include "http.h"
#endif
-#if defined(USE_CURL_FOR_IMAP_SEND) && defined(NO_OPENSSL)
-/* only available option */
+#if defined(USE_CURL_FOR_IMAP_SEND)
+/* Always default to curl if it's available. */
#define USE_CURL_DEFAULT 1
#else
-/* strictly opt in */
+/* We don't have curl, so continue to use the historical implementation */
#define USE_CURL_DEFAULT 0
#endif
return 0;
}
+static void server_fill_credential(struct imap_server_conf *srvc, struct credential *cred)
+{
+ if (srvc->user && srvc->pass)
+ return;
+
+ cred->protocol = xstrdup(srvc->use_ssl ? "imaps" : "imap");
+ cred->host = xstrdup(srvc->host);
+
+ cred->username = xstrdup_or_null(srvc->user);
+ cred->password = xstrdup_or_null(srvc->pass);
+
+ credential_fill(cred);
+
+ if (!srvc->user)
+ srvc->user = xstrdup(cred->username);
+ if (!srvc->pass)
+ srvc->pass = xstrdup(cred->password);
+}
+
static struct imap_store *imap_open_store(struct imap_server_conf *srvc, char *folder)
{
struct credential cred = CREDENTIAL_INIT;
}
#endif
imap_info("Logging in...\n");
- if (!srvc->user || !srvc->pass) {
- cred.protocol = xstrdup(srvc->use_ssl ? "imaps" : "imap");
- cred.host = xstrdup(srvc->host);
-
- cred.username = xstrdup_or_null(srvc->user);
- cred.password = xstrdup_or_null(srvc->pass);
-
- credential_fill(&cred);
-
- if (!srvc->user)
- srvc->user = xstrdup(cred.username);
- if (!srvc->pass)
- srvc->pass = xstrdup(cred.password);
- }
+ server_fill_credential(srvc, &cred);
if (srvc->auth_method) {
struct imap_cmd_cb cb;
}
#ifdef USE_CURL_FOR_IMAP_SEND
-static CURL *setup_curl(struct imap_server_conf *srvc)
+static CURL *setup_curl(struct imap_server_conf *srvc, struct credential *cred)
{
CURL *curl;
struct strbuf path = STRBUF_INIT;
if (!curl)
die("curl_easy_init failed");
+ server_fill_credential(&server, cred);
curl_easy_setopt(curl, CURLOPT_USERNAME, server.user);
curl_easy_setopt(curl, CURLOPT_PASSWORD, server.pass);
struct buffer msgbuf = { STRBUF_INIT, 0 };
CURL *curl;
CURLcode res = CURLE_OK;
+ struct credential cred = CREDENTIAL_INIT;
- curl = setup_curl(server);
+ curl = setup_curl(server, &cred);
curl_easy_setopt(curl, CURLOPT_READDATA, &msgbuf);
fprintf(stderr, "sending %d message%s\n", total, (total != 1) ? "s" : "");
curl_easy_cleanup(curl);
curl_global_cleanup();
- return 0;
+ if (cred.username) {
+ if (res == CURLE_OK)
+ credential_approve(&cred);
+#if LIBCURL_VERSION_NUM >= 0x070d01
+ else if (res == CURLE_LOGIN_DENIED)
+#else
+ else
+#endif
+ credential_reject(&cred);
+ }
+
+ credential_clear(&cred);
+
+ return res != CURLE_OK;
}
#endif
*/
static void range_set_check_invariants(struct range_set *rs)
{
- int i;
+ unsigned int i;
if (!rs)
return;
*/
void sort_and_merge_range_set(struct range_set *rs)
{
- int i;
- int o = 0; /* output cursor */
+ unsigned int i;
+ unsigned int o = 0; /* output cursor */
QSORT(rs->ranges, rs->nr, range_cmp);
static void range_set_union(struct range_set *out,
struct range_set *a, struct range_set *b)
{
- int i = 0, j = 0;
+ unsigned int i = 0, j = 0;
struct range *ra = a->ranges;
struct range *rb = b->ranges;
/* cannot make an alias of out->ranges: it may change during grow */
static void range_set_difference(struct range_set *out,
struct range_set *a, struct range_set *b)
{
- int i, j = 0;
+ unsigned int i, j = 0;
for (i = 0; i < a->nr; i++) {
long start = a->ranges[i].start;
long end = a->ranges[i].end;
struct diff_ranges *diff,
struct range_set *rs)
{
- int i, j = 0;
+ unsigned int i, j = 0;
assert(out->target.nr == 0);
struct range_set *rs,
struct diff_ranges *diff)
{
- int i, j = 0;
+ unsigned int i, j = 0;
long offset = 0;
struct range *src = rs->ranges;
struct range *target = diff->target.ranges;
static void dump_diff_hacky_one(struct rev_info *rev, struct line_log_data *range)
{
- int i, j = 0;
+ unsigned int i, j = 0;
long p_lines, t_lines;
unsigned long *p_ends = NULL, *t_ends = NULL;
struct diff_filepair *pair = range->pair;
long t_start = range->ranges.ranges[i].start;
long t_end = range->ranges.ranges[i].end;
long t_cur = t_start;
- int j_last;
+ unsigned int j_last;
while (j < diff->target.nr && diff->target.ranges[j].end < t_start)
j++;
/* A set of ranges. The ranges must always be disjoint and sorted. */
struct range_set {
- int alloc, nr;
+ unsigned int alloc, nr;
struct range *ranges;
};
xsnprintf(path, len, ".merge_file_XXXXXX");
fd = xmkstemp(path);
- if (write_in_full(fd, src->ptr, src->size) != src->size)
+ if (write_in_full(fd, src->ptr, src->size) < 0)
die_errno("unable to write temp-file");
close(fd);
}
{
const struct name_decoration *list, *head = NULL;
const char *branch_name = NULL;
- struct object_id unused;
int rru_flags;
/* First find HEAD */
return NULL;
/* Now resolve and find the matching current branch */
- branch_name = resolve_ref_unsafe("HEAD", 0, unused.hash, &rru_flags);
+ branch_name = resolve_ref_unsafe("HEAD", 0, NULL, &rru_flags);
if (!(rru_flags & REF_ISSYMREF))
return NULL;
while ((c = *in++) != 0) {
if (c == '=') {
- int d = *in++;
+ int ch, d = *in;
if (d == '\n' || !d)
break; /* drop trailing newline */
- strbuf_addch(out, (hexval(d) << 4) | hexval(*in++));
- continue;
+ ch = hex2chr(in);
+ if (ch >= 0) {
+ strbuf_addch(out, ch);
+ in += 2;
+ continue;
+ }
+ /* garbage -- fall through */
}
if (rfc2047 && c == '_') /* rfc2047 4.2 (2) */
c = 0x20;
if (!handle_commit_msg(mi, line))
break;
mi->filter_stage++;
+ /* fallthrough */
case 1:
handle_patch(mi, line);
break;
fd = xopen(path, O_WRONLY | O_EXCL | O_CREAT, 0666);
while (size > 0) {
- long ret = write_in_full(fd, buf, size);
+ ssize_t ret = write_in_full(fd, buf, size);
if (ret < 0) {
/* Ignore epipe */
if (errno == EPIPE)
free(ent->path);
}
+struct object *object_array_pop(struct object_array *array)
+{
+ struct object *ret;
+
+ if (!array->nr)
+ return NULL;
+
+ ret = array->objects[array->nr - 1].item;
+ object_array_release_entry(&array->objects[array->nr - 1]);
+ array->nr--;
+ return ret;
+}
+
void object_array_filter(struct object_array *array,
object_array_each_func_t want, void *cb_data)
{
void add_object_array(struct object *obj, const char *name, struct object_array *array);
void add_object_array_with_path(struct object *obj, const char *name, struct object_array *array, unsigned mode, const char *path);
+/*
+ * Returns NULL if the array is empty. Otherwise, returns the last object
+ * after removing its entry from the array. Other resources associated
+ * with that object are left in an unspecified state and should not be
+ * examined.
+ */
+struct object *object_array_pop(struct object_array *array);
+
typedef int (*object_array_each_func_t)(struct object_array_entry *, void *);
/*
traverse_commit_list(&revs, show_commit, show_object, base);
- revs.pending.nr = 0;
- revs.pending.alloc = 0;
- revs.pending.objects = NULL;
+ object_array_clear(&revs.pending);
stored->bitmap = bitmap_to_ewah(base);
need_reset = 0;
int prepare_bitmap_walk(struct rev_info *revs)
{
unsigned int i;
- unsigned int pending_nr = revs->pending.nr;
- struct object_array_entry *pending_e = revs->pending.objects;
struct object_list *wants = NULL;
struct object_list *haves = NULL;
return -1;
}
- for (i = 0; i < pending_nr; ++i) {
- struct object *object = pending_e[i].item;
+ for (i = 0; i < revs->pending.nr; ++i) {
+ struct object *object = revs->pending.objects[i].item;
if (object->type == OBJ_NONE)
parse_object_or_die(&object->oid, NULL);
if (!bitmap_git.loaded && load_pack_bitmap() < 0)
return -1;
- revs->pending.nr = 0;
- revs->pending.alloc = 0;
- revs->pending.objects = NULL;
+ object_array_clear(&revs->pending);
if (haves) {
revs->ignore_missing_links = 1;
static size_t peak_pack_mapped;
static size_t pack_mapped;
struct packed_git *packed_git;
-
-static struct mru packed_git_mru_storage;
-struct mru *packed_git_mru = &packed_git_mru_storage;
+struct mru packed_git_mru;
#define SZ_FMT PRIuMAX
static inline uintmax_t sz_fmt(size_t s) { return s; }
{
struct packed_git *p;
- mru_clear(packed_git_mru);
+ mru_clear(&packed_git_mru);
for (p = packed_git; p; p = p->next)
- mru_append(packed_git_mru, p);
+ mru_append(&packed_git_mru, p);
}
static int prepare_packed_git_run_once = 0;
if (!packed_git)
return 0;
- for (p = packed_git_mru->head; p; p = p->next) {
+ for (p = packed_git_mru.head; p; p = p->next) {
if (fill_pack_entry(sha1, e, p->item)) {
- mru_mark(packed_git_mru, p);
+ mru_mark(&packed_git_mru, p);
return 1;
}
}
pattern, sb.buf);
}
-/*
- * Given command line arguments and a prefix, convert the input to
- * pathspec. die() if any magic in magic_mask is used.
- */
void parse_pathspec(struct pathspec *pathspec,
unsigned magic_mask, unsigned flags,
const char *prefix, const char **argv)
*/
#define PATHSPEC_LITERAL_PATH (1<<6)
+/*
+ * Given command line arguments and a prefix, convert the input to
+ * pathspec. die() if any magic in magic_mask is used.
+ *
+ * Any arguments used are copied. It is safe for the caller to modify
+ * or free 'prefix' and 'args' after calling this function.
+ */
extern void parse_pathspec(struct pathspec *pathspec,
unsigned magic_mask,
unsigned flags,
int packet_flush_gently(int fd)
{
packet_trace("0000", 4, 1);
- if (write_in_full(fd, "0000", 4) == 4)
- return 0;
- return error("flush packet write failed");
+ if (write_in_full(fd, "0000", 4) < 0)
+ return error("flush packet write failed");
+ return 0;
}
void packet_buf_flush(struct strbuf *buf)
const char *fmt, va_list args)
{
static struct strbuf buf = STRBUF_INIT;
- ssize_t count;
strbuf_reset(&buf);
format_packet(&buf, fmt, args);
- count = write_in_full(fd, buf.buf, buf.len);
- if (count == buf.len)
- return 0;
-
- if (!gently) {
- check_pipe(errno);
- die_errno("packet write with format failed");
+ if (write_in_full(fd, buf.buf, buf.len) < 0) {
+ if (!gently) {
+ check_pipe(errno);
+ die_errno("packet write with format failed");
+ }
+ return error("packet write with format failed");
}
- return error("packet write with format failed");
+
+ return 0;
}
void packet_write_fmt(int fd, const char *fmt, ...)
packet_size = size + 4;
set_packet_header(packet_write_buffer, packet_size);
memcpy(packet_write_buffer + 4, buf, size);
- if (write_in_full(fd_out, packet_write_buffer, packet_size) == packet_size)
- return 0;
- return error("packet write failed");
+ if (write_in_full(fd_out, packet_write_buffer, packet_size) < 0)
+ return error("packet write failed");
+ return 0;
}
void packet_buf_write(struct strbuf *buf, const char *fmt, ...)
case '{': case '}':
case '$': case '\\': case '"':
strbuf_addch(sb, '\\');
+ /* fallthrough */
default:
strbuf_addch(sb, c);
break;
case S_IFDIR:
if (S_ISGITLINK(ce->ce_mode))
return ce_compare_gitlink(ce) ? DATA_CHANGED : 0;
+ /* else fallthrough */
default:
return TYPE_CHANGED;
}
unsigned int buffered = write_buffer_len;
if (buffered) {
git_SHA1_Update(context, write_buffer, buffered);
- if (write_in_full(fd, write_buffer, buffered) != buffered)
+ if (write_in_full(fd, write_buffer, buffered) < 0)
return -1;
write_buffer_len = 0;
}
/* Flush first if not enough space for SHA1 signature */
if (left + 20 > WRITE_BUFFER_SIZE) {
- if (write_in_full(fd, write_buffer, left) != left)
+ if (write_in_full(fd, write_buffer, left) < 0)
return -1;
left = 0;
}
git_SHA1_Final(write_buffer + left, context);
hashcpy(sha1, write_buffer + left);
left += 20;
- return (write_in_full(fd, write_buffer, left) != left) ? -1 : 0;
+ return (write_in_full(fd, write_buffer, left) < 0) ? -1 : 0;
}
static void ce_smudge_racily_clean_entry(struct cache_entry *ce)
if (!result)
result = ce_write(c, fd, to_remove_vi, prefix_size);
if (!result)
- result = ce_write(c, fd, ce->name + common, ce_namelen(ce) - common + 1);
+ result = ce_write(c, fd, ce->name + common, ce_namelen(ce) - common);
+ if (!result)
+ result = ce_write(c, fd, padding, 1);
strbuf_splice(previous_name, common, to_remove,
ce->name + common, ce_namelen(ce) - common);
int ref_exists(const char *refname)
{
- unsigned char sha1[20];
- return !!resolve_ref_unsafe(refname, RESOLVE_REF_READING, sha1, NULL);
+ return !!resolve_ref_unsafe(refname, RESOLVE_REF_READING, NULL, NULL);
}
static int filter_refs(const char *refname, const struct object_id *oid,
{
struct warn_if_dangling_data *d = cb_data;
const char *resolves_to;
- struct object_id junk;
if (!(flags & REF_ISSYMREF))
return 0;
- resolves_to = resolve_ref_unsafe(refname, 0, junk.hash, NULL);
+ resolves_to = resolve_ref_unsafe(refname, 0, NULL, NULL);
if (!resolves_to
|| (d->refname
? strcmp(resolves_to, d->refname)
}
}
- if (write_in_full(fd, buf.buf, buf.len) != buf.len) {
+ if (write_in_full(fd, buf.buf, buf.len) < 0) {
strbuf_addf(err, "could not write to '%s'", filename);
rollback_lock_file(&lock);
goto done;
return -1;
}
+ flags &= REF_TRANSACTION_UPDATE_ALLOWED_FLAGS;
+
flags |= (new_sha1 ? REF_HAVE_NEW : 0) | (old_sha1 ? REF_HAVE_OLD : 0);
ref_transaction_add_update(transaction, refname, flags,
return do_for_each_ref(get_main_ref_store(),
git_replace_ref_base, fn,
strlen(git_replace_ref_base),
- 0, cb_data);
+ DO_FOR_EACH_INCLUDE_BROKEN, cb_data);
}
int for_each_namespaced_ref(each_ref_fn fn, void *cb_data)
unsigned char *sha1, int *flags)
{
static struct strbuf sb_refname = STRBUF_INIT;
+ struct object_id unused_oid;
int unused_flags;
int symref_count;
+ if (!sha1)
+ sha1 = unused_oid.hash;
if (!flags)
flags = &unused_flags;
/*
* Resolve a reference, recursively following symbolic refererences.
*
- * Store the referred-to object's name in sha1 and return the name of
- * the non-symbolic reference that ultimately pointed at it. The
- * return value, if not NULL, is a pointer into either a static buffer
- * or the input ref.
+ * Return the name of the non-symbolic reference that ultimately pointed
+ * at the resolved object name. The return value, if not NULL, is a
+ * pointer into either a static buffer or the input ref.
+ *
+ * If sha1 is non-NULL, store the referred-to object's name in it.
*
* If the reference cannot be resolved to an object, the behavior
* depends on the RESOLVE_REF_READING flag:
#define REF_NODEREF 0x01
#define REF_FORCE_CREATE_REFLOG 0x40
+/*
+ * Flags that can be passed in to ref_transaction_update
+ */
+#define REF_TRANSACTION_UPDATE_ALLOWED_FLAGS \
+ REF_ISPRUNING | \
+ REF_FORCE_CREATE_REFLOG | \
+ REF_NODEREF
+
/*
* Setup reflog before using. Fill in err and return -1 on failure.
*/
written = len <= maxlen ? write_in_full(fd, logrec, len) : -1;
free(logrec);
- if (written != len)
+ if (written < 0)
return -1;
return 0;
return -1;
}
fd = get_lock_file_fd(&lock->lk);
- if (write_in_full(fd, oid_to_hex(oid), GIT_SHA1_HEXSZ) != GIT_SHA1_HEXSZ ||
- write_in_full(fd, &term, 1) != 1 ||
+ if (write_in_full(fd, oid_to_hex(oid), GIT_SHA1_HEXSZ) < 0 ||
+ write_in_full(fd, &term, 1) < 0 ||
close_ref_gently(lock) < 0) {
strbuf_addf(err,
"couldn't write '%s'", get_lock_file_path(&lock->lk));
* check with HEAD only which should cover 99% of all usage
* scenarios (even 100% of the default ones).
*/
- struct object_id head_oid;
int head_flag;
const char *head_ref;
head_ref = refs_resolve_ref_unsafe(&refs->base, "HEAD",
RESOLVE_REF_READING,
- head_oid.hash, &head_flag);
+ NULL, &head_flag);
if (head_ref && (head_flag & REF_ISSYMREF) &&
!strcmp(head_ref, lock->ref_name)) {
struct strbuf log_err = STRBUF_INIT;
rollback_lock_file(&reflog_lock);
} else if (update &&
(write_in_full(get_lock_file_fd(&lock->lk),
- oid_to_hex(&cb.last_kept_oid), GIT_SHA1_HEXSZ) != GIT_SHA1_HEXSZ ||
- write_str_in_full(get_lock_file_fd(&lock->lk), "\n") != 1 ||
+ oid_to_hex(&cb.last_kept_oid), GIT_SHA1_HEXSZ) < 0 ||
+ write_str_in_full(get_lock_file_fd(&lock->lk), "\n") < 1 ||
close_ref_gently(lock) < 0)) {
status |= error("couldn't write %s",
get_lock_file_path(&lock->lk));
static void read_config(void)
{
static int loaded;
- struct object_id oid;
int flag;
if (loaded)
current_branch = NULL;
if (startup_info->have_repository) {
- const char *head_ref = resolve_ref_unsafe("HEAD", 0, oid.hash, &flag);
+ const char *head_ref = resolve_ref_unsafe("HEAD", 0, NULL, &flag);
if (head_ref && (flag & REF_ISSYMREF) &&
skip_prefix(head_ref, "refs/heads/", &head_ref)) {
current_branch = make_branch(head_ref, 0);
static char *guess_ref(const char *name, struct ref *peer)
{
struct strbuf buf = STRBUF_INIT;
- struct object_id oid;
const char *r = resolve_ref_unsafe(peer->name, RESOLVE_REF_READING,
- oid.hash, NULL);
+ NULL, NULL);
if (!r)
return NULL;
return -1;
if (!dst_value) {
- struct object_id oid;
int flag;
dst_value = resolve_ref_unsafe(matched_src->name,
RESOLVE_REF_READING,
- oid.hash, &flag);
+ NULL, &flag);
if (!dst_value ||
((flag & REF_ISSYMREF) &&
!starts_with(dst_value, "refs/heads/")))
static int ignore_symref_update(const char *refname)
{
- struct object_id oid;
int flag;
- if (!resolve_ref_unsafe(refname, 0, oid.hash, &flag))
+ if (!resolve_ref_unsafe(refname, 0, NULL, &flag))
return 0; /* non-existing refs are OK */
return (flag & REF_ISSYMREF);
}
rerere_id_hex(id),
rr->items[i].string, 0);
- if (write_in_full(out_fd, buf.buf, buf.len) != buf.len)
+ if (write_in_full(out_fd, buf.buf, buf.len) < 0)
die("unable to write rerere record");
strbuf_release(&buf);
#include "bisect.h"
#include "packfile.h"
#include "worktree.h"
+#include "argv-array.h"
volatile show_early_output_fn_t show_early_output;
unsigned flags)
{
struct rev_cmdline_info *info = &revs->cmdline;
- int nr = info->nr;
+ unsigned int nr = info->nr;
ALLOC_GROW(info->rev, nr + 1, info->alloc);
info->rev[nr].item = item;
return 0;
}
-struct cmdline_pathspec {
- int alloc;
- int nr;
- const char **path;
-};
-
-static void append_prune_data(struct cmdline_pathspec *prune, const char **av)
-{
- while (*av) {
- ALLOC_GROW(prune->path, prune->nr + 1, prune->alloc);
- prune->path[prune->nr++] = *(av++);
- }
-}
-
static void read_pathspec_from_stdin(struct rev_info *revs, struct strbuf *sb,
- struct cmdline_pathspec *prune)
+ struct argv_array *prune)
{
- while (strbuf_getline(sb, stdin) != EOF) {
- ALLOC_GROW(prune->path, prune->nr + 1, prune->alloc);
- prune->path[prune->nr++] = xstrdup(sb->buf);
- }
+ while (strbuf_getline(sb, stdin) != EOF)
+ argv_array_push(prune, sb->buf);
}
static void read_revisions_from_stdin(struct rev_info *revs,
- struct cmdline_pathspec *prune)
+ struct argv_array *prune)
{
struct strbuf sb;
int seen_dashdash = 0;
static void NORETURN diagnose_missing_default(const char *def)
{
- unsigned char sha1[20];
int flags;
const char *refname;
- refname = resolve_ref_unsafe(def, 0, sha1, &flags);
+ refname = resolve_ref_unsafe(def, 0, NULL, &flags);
if (!refname || !(flags & REF_ISSYMREF) || (flags & REF_ISBROKEN))
die(_("your current branch appears to be broken"));
int setup_revisions(int argc, const char **argv, struct rev_info *revs, struct setup_revision_opt *opt)
{
int i, flags, left, seen_dashdash, read_from_stdin, got_rev_arg = 0, revarg_opt;
- struct cmdline_pathspec prune_data;
+ struct argv_array prune_data = ARGV_ARRAY_INIT;
const char *submodule = NULL;
- memset(&prune_data, 0, sizeof(prune_data));
if (opt)
submodule = opt->submodule;
argv[i] = NULL;
argc = i;
if (argv[i + 1])
- append_prune_data(&prune_data, argv + i + 1);
+ argv_array_pushv(&prune_data, argv + i + 1);
seen_dashdash = 1;
break;
}
for (j = i; j < argc; j++)
verify_filename(revs->prefix, argv[j], j == i);
- append_prune_data(&prune_data, argv + i);
+ argv_array_pushv(&prune_data, argv + i);
break;
}
else
got_rev_arg = 1;
}
- if (prune_data.nr) {
+ if (prune_data.argc) {
/*
* If we need to introduce the magic "a lone ':' means no
* pathspec whatsoever", here is the place to do so.
* call init_pathspec() to set revs->prune_data here.
* }
*/
- ALLOC_GROW(prune_data.path, prune_data.nr + 1, prune_data.alloc);
- prune_data.path[prune_data.nr++] = NULL;
parse_pathspec(&revs->prune_data, 0, 0,
- revs->prefix, prune_data.path);
+ revs->prefix, prune_data.argv);
}
+ argv_array_clear(&prune_data);
if (revs->def == NULL)
revs->def = opt ? opt->def : NULL;
date_mode_explicit:1,
preserve_subject:1;
unsigned int disable_stdin:1;
+ /*
+ * Set `leak_pending` to prevent `prepare_revision_walk()` from clearing
+ * the array of pending objects (`pending`). It will still forget about
+ * the array and its entries, so they really are leaked. This can be
+ * useful if the `struct object_array` `pending` is copied before
+ * calling `prepare_revision_walk()`. By setting `leak_pending`, you
+ * effectively claim ownership of the old array, so you should most
+ * likely call `object_array_clear(&pending_copy)` once you are done.
+ * Observe that this is about ownership of the array and its entries,
+ * not the commits referenced by those entries.
+ */
unsigned int leak_pending:1;
/* --show-linear-break */
unsigned int track_linear:1,
strbuf_release(&cap_buf);
return atomic_push_failure(args, remote_refs, ref);
}
- /* Fallthrough for non atomic case. */
+ /* else fallthrough */
default:
continue;
}
return end;
}
-static void link_alt_odb_entries(const char *alt, int len, int sep,
+static void link_alt_odb_entries(const char *alt, int sep,
const char *relative_base, int depth)
{
struct strbuf objdirbuf = STRBUF_INIT;
static void read_info_alternates(const char * relative_base, int depth)
{
- char *map;
- size_t mapsz;
- struct stat st;
char *path;
- int fd;
+ struct strbuf buf = STRBUF_INIT;
path = xstrfmt("%s/info/alternates", relative_base);
- fd = git_open(path);
- free(path);
- if (fd < 0)
- return;
- if (fstat(fd, &st) || (st.st_size == 0)) {
- close(fd);
+ if (strbuf_read_file(&buf, path, 1024) < 0) {
+ warn_on_fopen_errors(path);
+ free(path);
return;
}
- mapsz = xsize_t(st.st_size);
- map = xmmap(NULL, mapsz, PROT_READ, MAP_PRIVATE, fd, 0);
- close(fd);
-
- link_alt_odb_entries(map, mapsz, '\n', relative_base, depth);
- munmap(map, mapsz);
+ link_alt_odb_entries(buf.buf, '\n', relative_base, depth);
+ strbuf_release(&buf);
+ free(path);
}
struct alternate_object_database *alloc_alt_odb(const char *dir)
if (commit_lock_file(lock))
die_errno("unable to move new alternates file into place");
if (alt_odb_tail)
- link_alt_odb_entries(reference, strlen(reference), '\n', NULL, 0);
+ link_alt_odb_entries(reference, '\n', NULL, 0);
}
free(alts);
}
*/
prepare_alt_odb();
- link_alt_odb_entries(reference, strlen(reference), '\n', NULL, 0);
+ link_alt_odb_entries(reference, '\n', NULL, 0);
}
/*
if (!alt) alt = "";
alt_odb_tail = &alt_odb_list;
- link_alt_odb_entries(alt, strlen(alt), PATH_SEP, NULL, 0);
+ link_alt_odb_entries(alt, PATH_SEP, NULL, 0);
read_info_alternates(get_object_directory(), 0);
}
int read_pack_header(int fd, struct pack_header *header)
{
- if (read_in_full(fd, header, sizeof(*header)) < sizeof(*header))
+ if (read_in_full(fd, header, sizeof(*header)) != sizeof(*header))
/* "eof before pack header was fully read" */
return PH_ERROR_EOF;
cur_depth = 0;
} else {
commit = (struct commit *)
- stack.objects[--stack.nr].item;
+ object_array_pop(&stack);
cur_depth = *(int *)commit->util;
}
}
if (write_shallow_commits(&sb, 0, extra)) {
temp = xmks_tempfile(git_path("shallow_XXXXXX"));
- if (write_in_full(temp->fd, sb.buf, sb.len) != sb.len ||
+ if (write_in_full(temp->fd, sb.buf, sb.len) < 0 ||
close_tempfile_gently(temp) < 0)
die_errno("failed to write to %s",
get_tempfile_path(temp));
LOCK_DIE_ON_ERROR);
check_shallow_file_for_update();
if (write_shallow_commits(&sb, 0, extra)) {
- if (write_in_full(fd, sb.buf, sb.len) != sb.len)
+ if (write_in_full(fd, sb.buf, sb.len) < 0)
die_errno("failed to write to %s",
get_lock_file_path(shallow_lock));
*alternate_shallow_file = get_lock_file_path(shallow_lock);
LOCK_DIE_ON_ERROR);
check_shallow_file_for_update();
if (write_shallow_commits_1(&sb, 0, NULL, SEEN_ONLY)) {
- if (write_in_full(fd, sb.buf, sb.len) != sb.len)
+ if (write_in_full(fd, sb.buf, sb.len) < 0)
die_errno("failed to write to %s",
get_lock_file_path(&shallow_lock));
commit_lock_file(&shallow_lock);
return execv_git_cmd(my_argv);
}
-static int do_cvs_cmd(const char *me, char *arg)
-{
- const char *cvsserver_argv[3] = {
- "cvsserver", "server", NULL
- };
-
- if (!arg || strcmp(arg, "server"))
- die("git-cvsserver only handles server: %s", arg);
-
- setup_path();
- return execv_git_cmd(cvsserver_argv);
-}
-
static int is_valid_cmd_name(const char *cmd)
{
/* Test command contains no . or / characters */
{ "git-receive-pack", do_generic_cmd },
{ "git-upload-pack", do_generic_cmd },
{ "git-upload-archive", do_generic_cmd },
- { "cvs", do_cvs_cmd },
{ NULL },
};
kept = 0;
wrote = write_in_full(fd, buf, readlen);
- if (wrote != readlen)
+ if (wrote < 0)
goto close_and_exit;
}
if (kept && (lseek(fd, kept - 1, SEEK_CUR) == (off_t) -1 ||
typedef int (*string_list_each_func_t)(struct string_list_item *, void *);
int for_each_string_list(struct string_list *list,
string_list_each_func_t, void *cb_data);
-#define for_each_string_list_item(item,list) \
- for (item = (list)->items; item < (list)->items + (list)->nr; ++item)
+#define for_each_string_list_item(item,list) \
+ for (item = (list)->items; \
+ item && item < (list)->items + (list)->nr; \
+ ++item)
/*
* Apply want to each item in list, retaining only the ones for which
if (supported_capabilities)
*supported_capabilities |= capabilities[i].flag;
} else {
- warning("subprocess '%s' requested unsupported capability '%s'",
- process->argv[0], p);
+ die("subprocess '%s' requested unsupported capability '%s'",
+ process->argv[0], p);
}
}
return 0;
}
+struct has_commit_data {
+ int result;
+ const char *path;
+};
+
static int check_has_commit(const struct object_id *oid, void *data)
{
- int *has_commit = data;
+ struct has_commit_data *cb = data;
- if (!lookup_commit_reference(oid))
- *has_commit = 0;
+ enum object_type type = sha1_object_info(oid->hash, NULL);
- return 0;
+ switch (type) {
+ case OBJ_COMMIT:
+ return 0;
+ case OBJ_BAD:
+ /*
+ * Object is missing or invalid. If invalid, an error message
+ * has already been printed.
+ */
+ cb->result = 0;
+ return 0;
+ default:
+ die(_("submodule entry '%s' (%s) is a %s, not a commit"),
+ cb->path, oid_to_hex(oid), typename(type));
+ }
}
static int submodule_has_commits(const char *path, struct oid_array *commits)
{
- int has_commit = 1;
+ struct has_commit_data has_commit = { 1, path };
/*
* Perform a cheap, but incorrect check for the existence of 'commits'.
oid_array_for_each_unique(commits, check_has_commit, &has_commit);
- if (has_commit) {
+ if (has_commit.result) {
/*
* Even if the submodule is checked out and the commit is
* present, make sure it exists in the submodule's object store
cp.dir = path;
if (capture_command(&cp, &out, GIT_MAX_HEXSZ + 1) || out.len)
- has_commit = 0;
+ has_commit.result = 0;
strbuf_release(&out);
}
- return has_commit;
+ return has_commit.result;
}
static int submodule_needs_pushing(const char *path, struct oid_array *commits)
add_object_array(merges.objects[i].item, NULL, result);
}
- free(merges.objects);
+ object_array_clear(&merges);
return result->nr;
}
print_commit((struct commit *) merges.objects[i].item);
}
- free(merges.objects);
+ object_array_clear(&merges);
return 0;
}
/*
* Prepare the "env_array" parameter of a "struct child_process" for executing
- * a submodule by clearing any repo-specific envirionment variables, but
+ * a submodule by clearing any repo-specific environment variables, but
* retaining any config in the environment.
*/
extern void prepare_submodule_repo_env(struct argv_array *out);
$ sh ./t9200-git-cvsexport-commit.sh --run='-3 21'
-As noted above, the test set is built going though items left to
-right, so this:
+As noted above, the test set is built by going through the items
+from left to right, so this:
$ sh ./t9200-git-cvsexport-commit.sh --run='1-4 !3'
-will run tests 1, 2, and 4. Items that comes later have higher
+will run tests 1, 2, and 4. Items that come later have higher
precedence. It means that this:
$ sh ./t9200-git-cvsexport-commit.sh --run='!3 1-4'
while (<>) {
chomp;
/\bsed\s+-i/ and err 'sed -i is not portable';
- /\becho\s+-n/ and err 'echo -n is not portable (please use printf)';
+ /\becho\s+-[neE]/ and err 'echo with option is not portable (please use printf)';
/^\s*declare\s+/ and err 'arrays/declare not portable';
/^\s*[^#]\s*which\s/ and err 'which is not portable (please use type)';
/\btest\s+[^=]*==/ and err '"test a == b" is not portable (please use =)';
/test-svn-fe
/test-urlmatch-normalization
/test-wildmatch
+/test-write-cache
}
fd = open (argv[4], O_WRONLY|O_CREAT|O_TRUNC, 0666);
- if (fd < 0 || write_in_full(fd, out_buf, out_size) != out_size) {
+ if (fd < 0 || write_in_full(fd, out_buf, out_size) < 0) {
perror(argv[4]);
return 1;
}
static void handle_command(const char *command, const char *arg, struct line_buffer *buf)
{
- switch (*command) {
- case 'b':
- if (starts_with(command, "binary ")) {
- struct strbuf sb = STRBUF_INIT;
- strbuf_addch(&sb, '>');
- buffer_read_binary(buf, &sb, strtouint32(arg));
- fwrite(sb.buf, 1, sb.len, stdout);
- strbuf_release(&sb);
- return;
- }
- case 'c':
- if (starts_with(command, "copy ")) {
- buffer_copy_bytes(buf, strtouint32(arg));
- return;
- }
- case 's':
- if (starts_with(command, "skip ")) {
- buffer_skip_bytes(buf, strtouint32(arg));
- return;
- }
- default:
+ if (starts_with(command, "binary ")) {
+ struct strbuf sb = STRBUF_INIT;
+ strbuf_addch(&sb, '>');
+ buffer_read_binary(buf, &sb, strtouint32(arg));
+ fwrite(sb.buf, 1, sb.len, stdout);
+ strbuf_release(&sb);
+ } else if (starts_with(command, "copy ")) {
+ buffer_copy_bytes(buf, strtouint32(arg));
+ } else if (starts_with(command, "skip ")) {
+ buffer_skip_bytes(buf, strtouint32(arg));
+ } else {
die("unrecognized command: %s", command);
}
}
(ulimit -n 32 && "$@")
}
-test_lazy_prereq ULIMIT_FILE_DESCRIPTORS 'run_with_limited_open_files true'
+test_lazy_prereq ULIMIT_FILE_DESCRIPTORS '
+ test_have_prereq !MINGW,!CYGWIN &&
+ run_with_limited_open_files true
+'
test_expect_success ULIMIT_FILE_DESCRIPTORS 'large transaction creating branches does not burst open file limit' '
(
test_cmp expect actual
'
+test_expect_success 'rev-parse --is-shallow-repository in shallow repo' '
+ test_commit test_commit &&
+ echo true >expect &&
+ git clone --depth 1 --no-local . shallow &&
+ test_when_finished "rm -rf shallow" &&
+ git -C shallow rev-parse --is-shallow-repository >actual &&
+ test_cmp expect actual
+'
+
+test_expect_success 'rev-parse --is-shallow-repository in non-shallow repo' '
+ echo false >expect &&
+ git rev-parse --is-shallow-repository >actual &&
+ test_cmp expect actual
+'
+
test_expect_success 'showing the superproject correctly' '
git rev-parse --show-superproject-working-tree >out &&
test_must_be_empty out &&
test_expect_missing archive-pathspec/ignored-by-tree.d
test_expect_missing archive-pathspec/ignored-by-tree.d/file
test_expect_exists archive-pathspec/ignored-by-worktree
-test_expect_missing archive-pathspec/excluded-by-pathspec.d failure
+test_expect_missing archive-pathspec/excluded-by-pathspec.d
test_expect_missing archive-pathspec/excluded-by-pathspec.d/file
test_expect_success 'git archive with wildcard pathspec' '
test_expect_missing archive/deep/and/slashless/foo &&
test_expect_missing archive/deep/with/wildcard/ &&
test_expect_missing archive/deep/with/wildcard/foo &&
-test_expect_exists archive/one-level-lower/
+test_expect_missing archive/one-level-lower/
test_expect_missing archive/one-level-lower/two-levels-lower/ignored-only-if-dir/
test_expect_missing archive/one-level-lower/two-levels-lower/ignored-ony-if-dir/ignored-by-ignored-dir
git archive --format=tar $root_tree >subtree-all.tar &&
make_dir extract &&
"$TAR" xf subtree-all.tar -C extract &&
- check_dir extract sub
+ check_dir extract
'
test_expect_success 'archive empty subtree by direct pathspec' '
git archive --format=tar $root_tree -- sub >subtree-path.tar &&
make_dir extract &&
"$TAR" xf subtree-path.tar -C extract &&
- check_dir extract sub
+ check_dir extract
'
ZIPINFO=zipinfo
)
'
+test_expect_success 'submodule entry pointing at a tag is error' '
+ git -C work/gar/bage tag -a test1 -m "tag" &&
+ tag=$(git -C work/gar/bage rev-parse test1^{tag}) &&
+ git -C work update-index --cacheinfo 160000 "$tag" gar/bage &&
+ git -C work commit -m "bad commit" &&
+ test_when_finished "git -C work reset --hard HEAD^" &&
+ test_must_fail git -C work push --recurse-submodules=on-demand ../pub.git master 2>err &&
+ test_i18ngrep "is a tag, not a commit" err
+'
+
test_expect_success 'push fails if recurse submodules option passed as yes' '
(
cd work/gar/bage &&
check_describe "test2-lightweight-*" --long --tags --match="test2-*" HEAD^
-check_describe "test1-lightweight-*" --long --tags --match="test1-*" --match="test2-*" HEAD^
+check_describe "test2-lightweight-*" --long --tags --match="test1-*" --match="test2-*" HEAD^
check_describe "test2-lightweight-*" --long --tags --match="test1-*" --no-match --match="test2-*" HEAD^
+check_describe "test1-lightweight-*" --long --tags --match="test1-*" --match="test3-*" HEAD
+
+check_describe "test1-lightweight-*" --long --tags --match="test3-*" --match="test1-*" HEAD
+
+test_expect_success 'set-up branches' '
+ git branch branch_A A &&
+ git branch branch_C c &&
+ git update-ref refs/remotes/origin/remote_branch_A "A^{commit}" &&
+ git update-ref refs/remotes/origin/remote_branch_C "c^{commit}" &&
+ git update-ref refs/original/original_branch_A test-annotated~2
+'
+
+check_describe "heads/branch_A*" --all --match="branch_*" --exclude="branch_C" HEAD
+
+check_describe "remotes/origin/remote_branch_A*" --all --match="origin/remote_branch_*" --exclude="origin/remote_branch_C" HEAD
+
+check_describe "original/original_branch_A*" --all test-annotated~1
+
+test_expect_success '--match does not work for other types' '
+ test_must_fail git describe --all --match="*original_branch_*" test-annotated~1
+'
+
+test_expect_success '--exclude does not work for other types' '
+ R=$(git describe --all --exclude="any_pattern_even_not_matching" test-annotated~1) &&
+ case "$R" in
+ *original_branch_A*) echo "fail: Found unknown reference $R with --exclude"
+ false;;
+ *) echo ok: Found some known type;;
+ esac
+'
+
test_expect_success 'name-rev with exact tags' '
echo A >expect &&
tag_object=$(git rev-parse refs/tags/A) &&
test_cmp expect actual
'
+test_expect_success 'name-rev --all' '
+ >expect.unsorted &&
+ for rev in $(git rev-list --all)
+ do
+ git name-rev $rev >>expect.unsorted
+ done &&
+ sort <expect.unsorted >expect &&
+ git name-rev --all >actual.unsorted &&
+ sort <actual.unsorted >actual &&
+ test_cmp expect actual
+'
+
+test_expect_success 'name-rev --stdin' '
+ >expect.unsorted &&
+ for rev in $(git rev-list --all)
+ do
+ name=$(git name-rev --name-only $rev) &&
+ echo "$rev ($name)" >>expect.unsorted
+ done &&
+ sort <expect.unsorted >expect &&
+ git rev-list --all | git name-rev --stdin >actual.unsorted &&
+ sort <actual.unsorted >actual &&
+ test_cmp expect actual
+'
+
test_expect_success 'describe --contains with the exact tags' '
echo "A^0" >expect &&
tag_object=$(git rev-parse refs/tags/A) &&
'
test_expect_success 'describe ignoring a borken submodule' '
git describe --broken >out &&
+ test_when_finished "mv .git/modules/sub_moved .git/modules/sub1" &&
grep broken out
'
+test_expect_failure ULIMIT_STACK_SIZE 'name-rev works in a deep repo' '
+ i=1 &&
+ while test $i -lt 8000
+ do
+ echo "commit refs/heads/master
+committer A U Thor <author@example.com> $((1000000000 + $i * 100)) +0200
+data <<EOF
+commit #$i
+EOF"
+ test $i = 1 && echo "from refs/heads/master^0"
+ i=$(($i + 1))
+ done | git fast-import &&
+ git checkout master &&
+ git tag far-far-away HEAD^ &&
+ echo "HEAD~4000 tags/far-far-away~3999" >expect &&
+ git name-rev HEAD~4000 >actual &&
+ test_cmp expect actual &&
+ run_with_limited_stack git name-rev HEAD~4000 >actual &&
+ test_cmp expect actual
+'
+
+test_expect_success ULIMIT_STACK_SIZE 'describe works in a deep repo' '
+ git tag -f far-far-away HEAD~7999 &&
+ echo "far-far-away" >expect &&
+ git describe --tags --abbrev=0 HEAD~4000 >actual &&
+ test_cmp expect actual &&
+ run_with_limited_stack git describe --tags --abbrev=0 HEAD~4000 >actual &&
+ test_cmp expect actual
+'
+
test_done
test_cmp actual expect
'
+test_expect_failure 'moving nested submodules' '
+ git commit -am "cleanup commit" &&
+ mkdir sub_nested_nested &&
+ (cd sub_nested_nested &&
+ touch nested_level2 &&
+ git init &&
+ git add . &&
+ git commit -m "nested level 2"
+ ) &&
+ mkdir sub_nested &&
+ (cd sub_nested &&
+ touch nested_level1 &&
+ git init &&
+ git add . &&
+ git commit -m "nested level 1"
+ git submodule add ../sub_nested_nested &&
+ git commit -m "add nested level 2"
+ ) &&
+ git submodule add ./sub_nested nested_move &&
+ git commit -m "add nested_move" &&
+ git submodule update --init --recursive &&
+ git mv nested_move sub_nested_moved &&
+ git status
+'
+
test_done
git tag -l --sort=version:refname
'
-run_with_limited_stack () {
- (ulimit -s 128 && "$@")
-}
-
-test_lazy_prereq ULIMIT_STACK_SIZE 'run_with_limited_stack true'
-
-# we require ulimit, this excludes Windows
test_expect_success ULIMIT_STACK_SIZE '--contains and --no-contains work in a deep repo' '
>expect &&
i=1 &&
grep "path.*needs.*filters" err
'
+test_expect_success '--textconv/--filters complain without path' '
+ test_must_fail git cat-file --textconv HEAD &&
+ test_must_fail git cat-file --filters HEAD
+'
+
test_expect_success 'cat-file --textconv --batch works' '
sha1=$(git rev-parse -q --verify HEAD:world.txt) &&
test_config diff.txt.textconv "tr A-Za-z N-ZA-Mn-za-m <" &&
. ./test-lib.sh
+if test_have_prereq !PIPE
+then
+ skip_all="svn dumpfile importer testing requires the PIPE prerequisite"
+ test_done
+fi
+
reinit_git () {
- if ! test_declared_prereq PIPE
- then
- echo >&4 "reinit_git: need to declare PIPE prerequisite"
- return 127
- fi
rm -fr .git &&
rm -f stream backflow &&
git init &&
>empty
-test_expect_success PIPE 'empty dump' '
+test_expect_success 'empty dump' '
reinit_git &&
echo "SVN-fs-dump-format-version: 2" >input &&
try_dump input
'
-test_expect_success PIPE 'v4 dumps not supported' '
+test_expect_success 'v4 dumps not supported' '
reinit_git &&
echo "SVN-fs-dump-format-version: 4" >v4.dump &&
try_dump v4.dump must_fail
'
-test_expect_failure PIPE 'empty revision' '
+test_expect_failure 'empty revision' '
reinit_git &&
printf "rev <nobody, nobody@local>: %s\n" "" "" >expect &&
cat >emptyrev.dump <<-\EOF &&
test_cmp expect actual
'
-test_expect_success PIPE 'empty properties' '
+test_expect_success 'empty properties' '
reinit_git &&
printf "rev <nobody, nobody@local>: %s\n" "" "" >expect &&
cat >emptyprop.dump <<-\EOF &&
test_cmp expect actual
'
-test_expect_success PIPE 'author name and commit message' '
+test_expect_success 'author name and commit message' '
reinit_git &&
echo "<author@example.com, author@example.com@local>" >expect.author &&
cat >message <<-\EOF &&
test_cmp expect.author actual.author
'
-test_expect_success PIPE 'unsupported properties are ignored' '
+test_expect_success 'unsupported properties are ignored' '
reinit_git &&
echo author >expect &&
cat >extraprop.dump <<-\EOF &&
test_cmp expect actual
'
-test_expect_failure PIPE 'timestamp and empty file' '
+test_expect_failure 'timestamp and empty file' '
echo author@example.com >expect.author &&
echo 1999-01-01 >expect.date &&
echo file >expect.files &&
test_cmp empty file
'
-test_expect_success PIPE 'directory with files' '
+test_expect_success 'directory with files' '
reinit_git &&
printf "%s\n" directory/file1 directory/file2 >expect.files &&
echo hi >hi &&
test_cmp hi directory/file2
'
-test_expect_success PIPE 'branch name with backslash' '
+test_expect_success 'branch name with backslash' '
reinit_git &&
sort <<-\EOF >expect.branch-files &&
trunk/file1
test_cmp expect.branch-files actual.branch-files
'
-test_expect_success PIPE 'node without action' '
+test_expect_success 'node without action' '
reinit_git &&
cat >inaction.dump <<-\EOF &&
SVN-fs-dump-format-version: 3
try_dump inaction.dump must_fail
'
-test_expect_success PIPE 'action: add node without text' '
+test_expect_success 'action: add node without text' '
reinit_git &&
cat >textless.dump <<-\EOF &&
SVN-fs-dump-format-version: 3
try_dump textless.dump must_fail
'
-test_expect_failure PIPE 'change file mode but keep old content' '
+test_expect_failure 'change file mode but keep old content' '
reinit_git &&
cat >expect <<-\EOF &&
OBJID
test_cmp hello actual.target
'
-test_expect_success PIPE 'NUL in property value' '
+test_expect_success 'NUL in property value' '
reinit_git &&
echo "commit message" >expect.message &&
{
test_cmp expect.message actual.message
'
-test_expect_success PIPE 'NUL in log message, file content, and property name' '
+test_expect_success 'NUL in log message, file content, and property name' '
# Caveat: svnadmin 1.6.16 (r1073529) truncates at \0 in the
# svn:specialQnotreally example.
reinit_git &&
test_cmp expect.hello2 actual.hello2
'
-test_expect_success PIPE 'change file mode and reiterate content' '
+test_expect_success 'change file mode and reiterate content' '
reinit_git &&
cat >expect <<-\EOF &&
OBJID
test_cmp hello actual.target
'
-test_expect_success PIPE 'deltas supported' '
+test_expect_success 'deltas supported' '
reinit_git &&
{
# (old) h + (inline) ello + (old) \n
try_dump delta.dump
'
-test_expect_success PIPE 'property deltas supported' '
+test_expect_success 'property deltas supported' '
reinit_git &&
cat >expect <<-\EOF &&
OBJID
test_cmp expect actual
'
-test_expect_success PIPE 'properties on /' '
+test_expect_success 'properties on /' '
reinit_git &&
cat <<-\EOF >expect &&
OBJID
test_cmp expect actual
'
-test_expect_success PIPE 'deltas for typechange' '
+test_expect_success 'deltas for typechange' '
reinit_git &&
cat >expect <<-\EOF &&
OBJID
test_cmp expect actual
'
-test_expect_success PIPE 'deltas need not consume the whole preimage' '
+test_expect_success 'deltas need not consume the whole preimage' '
reinit_git &&
cat >expect <<-\EOF &&
OBJID
test_cmp expect.3 actual.3
'
-test_expect_success PIPE 'no hang for delta trying to read past end of preimage' '
+test_expect_success 'no hang for delta trying to read past end of preimage' '
reinit_git &&
{
# COPY 1
fi
'
-test_expect_success SVNREPO,PIPE 't9135/svn.dump' '
+test_expect_success SVNREPO 't9135/svn.dump' '
mkdir -p simple-git &&
(
cd simple-git &&
mkdir new &&
git --git-dir=new/.git init &&
git fast-export -C -C --signed-tags=strip --all > output &&
- grep "^C file6 file7\$" output &&
+ grep "^C file2 file4\$" output &&
cat output |
(cd new &&
git fast-import &&
test_cmp expected actual
'
+test_expect_success 'when using -C, do not declare copy when source of copy is also modified' '
+ test_create_repo src &&
+ echo a_line >src/file.txt &&
+ git -C src add file.txt &&
+ git -C src commit -m 1st_commit &&
+
+ cp src/file.txt src/file2.txt &&
+ echo another_line >>src/file.txt &&
+ git -C src add file.txt file2.txt &&
+ git -C src commit -m 2nd_commit &&
+
+ test_create_repo dst &&
+ git -C src fast-export --all -C | git -C dst fast-import &&
+ git -C src show >expected &&
+ git -C dst show >actual &&
+ test_cmp expected actual
+'
+
test_done
test_cmp ../expect ../actual
'
+#------------
+# running via git-shell
+#------------
+
+cd "$WORKDIR"
+
+test_expect_success 'create remote-cvs helper' '
+ write_script remote-cvs <<-\EOF
+ exec git shell -c "cvs server"
+ EOF
+'
+
+test_expect_success 'cvs server does not run with vanilla git-shell' '
+ (
+ cd cvswork &&
+ CVS_SERVER=$WORKDIR/remote-cvs &&
+ export CVS_SERVER &&
+ test_must_fail cvs log merge
+ )
+'
+
+test_expect_success 'configure git shell to run cvs server' '
+ mkdir "$HOME"/git-shell-commands &&
+
+ write_script "$HOME"/git-shell-commands/cvs <<-\EOF &&
+ if ! test $# = 1 && test "$1" = "server"
+ then
+ echo >&2 "git-cvsserver only handles \"server\""
+ exit 1
+ fi
+ exec git cvsserver server
+ EOF
+
+ # Should not be used, but part of the recommended setup
+ write_script "$HOME"/git-shell-commands/no-interactive-login <<-\EOF
+ echo Interactive login forbidden
+ EOF
+'
+
+test_expect_success 'cvs server can run with recommended config' '
+ (
+ cd cvswork &&
+ CVS_SERVER=$WORKDIR/remote-cvs &&
+ export CVS_SERVER &&
+ cvs log merge
+ )
+'
+
test_done
test_lazy_prereq PIPE '
# test whether the filesystem supports FIFOs
- case $(uname -s) in
- CYGWIN*|MINGW*)
- false
- ;;
- *)
- rm -f testfifo && mkfifo testfifo
- ;;
- esac
+ test_have_prereq !MINGW,!CYGWIN &&
+ rm -f testfifo && mkfifo testfifo
'
test_lazy_prereq SYMLINKS '
(ulimit -s 128 && "$@")
}
-test_lazy_prereq CMDLINE_LIMIT 'run_with_limited_cmdline true'
+test_lazy_prereq CMDLINE_LIMIT '
+ test_have_prereq !MINGW,!CYGWIN &&
+ run_with_limited_cmdline true
+'
+
+run_with_limited_stack () {
+ (ulimit -s 128 && "$@")
+}
+
+test_lazy_prereq ULIMIT_STACK_SIZE '
+ test_have_prereq !MINGW,!CYGWIN &&
+ run_with_limited_stack true
+'
build_option () {
git version --build-options |
{
if (debug)
fprintf(stderr, "Debug: Remote helper: -> %s", buffer->buf);
- if (write_in_full(helper->helper->in, buffer->buf, buffer->len)
- != buffer->len)
+ if (write_in_full(helper->helper->in, buffer->buf, buffer->len) < 0)
die_errno("Full write to remote helper failed");
}
{
if (debug)
fprintf(stderr, "Debug: Remote helper: -> %s", str);
- if (write_in_full(fd, str, strlen(str)) != strlen(str))
+ if (write_in_full(fd, str, strlen(str)) < 0)
die_errno("Full write to remote helper failed");
}
const char *localname;
const char *tmp;
const char *remotename;
- unsigned char sha[20];
int flag = 0;
/*
* Check suitability for tracking. Must be successful /
localname = ref->peer_ref->name;
remotename = ref->name;
tmp = resolve_ref_unsafe(localname, RESOLVE_REF_READING,
- sha, &flag);
+ NULL, &flag);
if (tmp && flag & REF_ISSYMREF &&
starts_with(tmp, "refs/heads/"))
localname = tmp;
int retval = MISSING_OBJECT;
struct dir_state *parents = NULL;
size_t parents_alloc = 0;
- ssize_t parents_nr = 0;
+ size_t i, parents_nr = 0;
unsigned char current_tree_sha1[20];
struct strbuf namebuf = STRBUF_INIT;
struct tree_desc t;
int follows_remaining = GET_TREE_ENTRY_FOLLOW_SYMLINKS_MAX_LINKS;
- int i;
init_tree_desc(&t, NULL, 0UL);
strbuf_addstr(&namebuf, name);
}
shallow_nr += shallows.nr;
- free(shallows.objects);
+ object_array_clear(&shallows);
}
/* return non-zero if the ref is hidden, otherwise 0 */
{
const char *symref_target;
struct string_list_item *item;
- struct object_id unused;
if ((flag & REF_ISSYMREF) == 0)
return 0;
- symref_target = resolve_ref_unsafe(refname, 0, unused.hash, &flag);
+ symref_target = resolve_ref_unsafe(refname, 0, NULL, &flag);
if (!symref_target || (flag & REF_ISSYMREF) == 0)
die("'%s' is a symref but it is not?", refname);
item = string_list_append(cb_data, refname);
"|//|\\*\\*|::|[/<>=]="),
IPATTERN("fountain", "^((\\.[^.]|(int|ext|est|int\\.?/ext|i/e)[. ]).*)$",
"[^ \t-]+"),
-PATTERNS("html", "^[ \t]*(<[Hh][1-6][ \t].*>.*)$",
+PATTERNS("html", "^[ \t]*(<[Hh][1-6]([ \t].*)?>.*)$",
"[^<>= \t]+"),
PATTERNS("java",
"!^[ \t]*(catch|do|for|if|instanceof|new|return|switch|throw|while)\n"
for (i = 0; worktrees[i]; i++) {
struct worktree *wt = worktrees[i];
const char *symref_target;
- unsigned char sha1[20];
struct ref_store *refs;
int flags;
refs = get_worktree_ref_store(wt);
symref_target = refs_resolve_ref_unsafe(refs, symref, 0,
- sha1, &flags);
+ NULL, &flags);
if ((flags & REF_ISSYMREF) && !strcmp(symref_target, target)) {
existing = wt;
break;
void write_file_buf(const char *path, const char *buf, size_t len)
{
int fd = xopen(path, O_WRONLY | O_CREAT | O_TRUNC, 0666);
- if (write_in_full(fd, buf, len) != len)
+ if (write_in_full(fd, buf, len) < 0)
die_errno(_("could not write to %s"), path);
if (close(fd))
die_errno(_("could not close %s"), path);
void wt_status_add_cut_line(FILE *fp)
{
- const char *explanation = _("Do not touch the line above.\nEverything below will be removed.");
+ const char *explanation = _("Do not modify or remove the line above.\nEverything below it will be ignored.");
struct strbuf buf = STRBUF_INIT;
fprintf(fp, "%c %s", comment_line_char, cut_line);