Code clean-up.
* rs/resolve-ref-optional-result:
refs: pass NULL to resolve_ref_unsafe() if hash is not needed
refs: pass NULL to refs_resolve_ref_unsafe() if hash is not needed
refs: make sha1 output parameter of refs_resolve_ref_unsafe() optional
--- /dev/null
+# Defaults
+
+# Use tabs whenever we need to fill whitespace that spans at least from one tab
+# stop to the next one.
+UseTab: Always
+TabWidth: 8
+IndentWidth: 8
+ContinuationIndentWidth: 8
+ColumnLimit: 80
+
+# C Language specifics
+Language: Cpp
+
+# Align parameters on the open bracket
+# someLongFunction(argument1,
+# argument2);
+AlignAfterOpenBracket: Align
+
+# Don't align consecutive assignments
+# int aaaa = 12;
+# int b = 14;
+AlignConsecutiveAssignments: false
+
+# Don't align consecutive declarations
+# int aaaa = 12;
+# double b = 3.14;
+AlignConsecutiveDeclarations: false
+
+# Align escaped newlines as far left as possible
+# #define A \
+# int aaaa; \
+# int b; \
+# int cccccccc;
+AlignEscapedNewlines: Left
+
+# Align operands of binary and ternary expressions
+# int aaa = bbbbbbbbbbb +
+# cccccc;
+AlignOperands: true
+
+# Don't align trailing comments
+# int a; // Comment a
+# int b = 2; // Comment b
+AlignTrailingComments: false
+
+# By default don't allow putting parameters onto the next line
+# myFunction(foo, bar, baz);
+AllowAllParametersOfDeclarationOnNextLine: false
+
+# Don't allow short braced statements to be on a single line
+# if (a) not if (a) return;
+# return;
+AllowShortBlocksOnASingleLine: false
+AllowShortCaseLabelsOnASingleLine: false
+AllowShortFunctionsOnASingleLine: false
+AllowShortIfStatementsOnASingleLine: false
+AllowShortLoopsOnASingleLine: false
+
+# By default don't add a line break after the return type of top-level functions
+# int foo();
+AlwaysBreakAfterReturnType: None
+
+# Pack as many parameters or arguments onto the same line as possible
+# int myFunction(int aaaaaaaaaaaa, int bbbbbbbb,
+# int cccc);
+BinPackArguments: true
+BinPackParameters: true
+
+# Attach braces to surrounding context except break before braces on function
+# definitions.
+# void foo()
+# {
+# if (true) {
+# } else {
+# }
+# };
+BreakBeforeBraces: Linux
+
+# Break after operators
+# int valuve = aaaaaaaaaaaaa +
+# bbbbbb -
+# ccccccccccc;
+BreakBeforeBinaryOperators: None
+BreakBeforeTernaryOperators: false
+
+# Don't break string literals
+BreakStringLiterals: false
+
+# Use the same indentation level as for the switch statement.
+# Switch statement body is always indented one level more than case labels.
+IndentCaseLabels: false
+
+# Don't indent a function definition or declaration if it is wrapped after the
+# type
+IndentWrappedFunctionNames: false
+
+# Align pointer to the right
+# int *a;
+PointerAlignment: Right
+
+# Don't insert a space after a cast
+# x = (int32)y; not x = (int32) y;
+SpaceAfterCStyleCast: false
+
+# Insert spaces before and after assignment operators
+# int a = 5; not int a=5;
+# a += 42; a+=42;
+SpaceBeforeAssignmentOperators: true
+
+# Put a space before opening parentheses only after control statement keywords.
+# void f() {
+# if (true) {
+# f();
+# }
+# }
+SpaceBeforeParens: ControlStatements
+
+# Don't insert spaces inside empty '()'
+SpaceInEmptyParentheses: false
+
+# The number of spaces before trailing line comments (// - comments).
+# This does not affect trailing block comments (/* - comments).
+SpacesBeforeTrailingComments: 1
+
+# Don't insert spaces in casts
+# x = (int32) y; not x = ( int32 ) y;
+SpacesInCStyleCastParentheses: false
+
+# Don't insert spaces inside container literals
+# var arr = [1, 2, 3]; not var arr = [ 1, 2, 3 ];
+SpacesInContainerLiterals: false
+
+# Don't insert spaces after '(' or before ')'
+# f(arg); not f( arg );
+SpacesInParentheses: false
+
+# Don't insert spaces after '[' or before ']'
+# int a[5]; not int a[ 5 ];
+SpacesInSquareBrackets: false
+
+# Insert a space after '{' and before '}' in struct initializers
+Cpp11BracedListStyle: false
+
+# A list of macros that should be interpreted as foreach loops instead of as
+# function calls.
+ForEachMacros: ['for_each_string_list_item']
+
+# The maximum number of consecutive empty lines to keep.
+MaxEmptyLinesToKeep: 1
+
+# No empty line at the start of a block.
+KeepEmptyLinesAtTheStartOfBlocks: false
+
+# Penalties
+# This decides what order things should be done if a line is too long
+PenaltyBreakAssignment: 100
+PenaltyBreakBeforeFirstCallParameter: 100
+PenaltyBreakComment: 100
+PenaltyBreakFirstLessLess: 0
+PenaltyBreakString: 100
+PenaltyExcessCharacter: 5
+PenaltyReturnTypeOnItsOwnLine: 0
+
+# Don't sort #include's
+SortIncludes: false
services:
- docker
before_install:
- - docker pull daald/ubuntu32:xenial
before_script:
- script:
- - >
- docker run
- --interactive
- --env DEVELOPER
- --env DEFAULT_TEST_TARGET
- --env GIT_PROVE_OPTS
- --env GIT_TEST_OPTS
- --env GIT_TEST_CLONE_2GB
- --volume "${PWD}:/usr/src/git"
- daald/ubuntu32:xenial
- /usr/src/git/ci/run-linux32-build.sh $(id -u $USER)
- # Use the following command to debug the docker build locally:
- # $ docker run -itv "${PWD}:/usr/src/git" --entrypoint /bin/bash daald/ubuntu32:xenial
- # root@container:/# /usr/src/git/ci/run-linux32-build.sh
+ script: ci/run-linux32-docker.sh
- env: Static Analysis
os: linux
compiler:
packages:
- coccinelle
before_install:
- script:
- # "before_script" that builds Git is inherited from base job
- - make coccicheck
+ # "before_script" that builds Git is inherited from base job
+ script: ci/run-static-analysis.sh
after_failure:
- env: Documentation
os: linux
- asciidoc
- xmlto
before_install:
- before_script: gem install asciidoctor
+ before_script:
script: ci/test-documentation.sh
after_failure:
-before_install:
- - >
- case "${TRAVIS_OS_NAME:-linux}" in
- linux)
- export GIT_TEST_HTTPD=YesPlease
-
- mkdir --parents custom/p4
- pushd custom/p4
- wget --quiet http://filehost.perforce.com/perforce/r$LINUX_P4_VERSION/bin.linux26x86_64/p4d
- wget --quiet http://filehost.perforce.com/perforce/r$LINUX_P4_VERSION/bin.linux26x86_64/p4
- chmod u+x p4d
- chmod u+x p4
- export PATH="$(pwd):$PATH"
- popd
- mkdir --parents custom/git-lfs
- pushd custom/git-lfs
- wget --quiet https://github.com/github/git-lfs/releases/download/v$LINUX_GIT_LFS_VERSION/git-lfs-linux-amd64-$LINUX_GIT_LFS_VERSION.tar.gz
- tar --extract --gunzip --file "git-lfs-linux-amd64-$LINUX_GIT_LFS_VERSION.tar.gz"
- cp git-lfs-$LINUX_GIT_LFS_VERSION/git-lfs .
- export PATH="$(pwd):$PATH"
- popd
- ;;
- osx)
- brew update --quiet
- # Uncomment this if you want to run perf tests:
- # brew install gnu-time
- brew install git-lfs gettext
- brew link --force gettext
- brew install caskroom/cask/perforce
- ;;
- esac;
- echo "$(tput setaf 6)Perforce Server Version$(tput sgr0)";
- p4d -V | grep Rev.;
- echo "$(tput setaf 6)Perforce Client Version$(tput sgr0)";
- p4 -V | grep Rev.;
- echo "$(tput setaf 6)Git-LFS Version$(tput sgr0)";
- git-lfs version;
-
-before_script: make --jobs=2
-
-script:
- - >
- mkdir -p $HOME/travis-cache;
- ln -s $HOME/travis-cache/.prove t/.prove;
- make --quiet test;
-
-after_failure:
- - >
- : '<-- Click here to see detailed test output! ';
- for TEST_EXIT in t/test-results/*.exit;
- do
- if [ "$(cat "$TEST_EXIT")" != "0" ];
- then
- TEST_OUT="${TEST_EXIT%exit}out";
- echo "------------------------------------------------------------------------";
- echo "$(tput setaf 1)${TEST_OUT}...$(tput sgr0)";
- echo "------------------------------------------------------------------------";
- cat "${TEST_OUT}";
- fi;
- done;
+before_install: ci/install-dependencies.sh
+before_script: ci/run-build.sh
+script: ci/run-tests.sh
+after_failure: ci/print-test-failures.sh
notifications:
email: false
--- /dev/null
+Git v2.10.5 Release Notes
+=========================
+
+Fixes since v2.10.4
+-------------------
+
+ * "git cvsserver" no longer is invoked by "git daemon" by default,
+ as it is old and largely unmaintained.
+
+ * Various Perl scripts did not use safe_pipe_capture() instead of
+ backticks, leaving them susceptible to end-user input. They have
+ been corrected.
+
+Credits go to joernchen <joernchen@phenoelit.de> for finding the
+unsafe constructs in "git cvsserver", and to Jeff King at GitHub for
+finding and fixing instances of the same issue in other scripts.
+
--- /dev/null
+Git v2.11.4 Release Notes
+=========================
+
+Fixes since v2.11.3
+-------------------
+
+ * "git cvsserver" no longer is invoked by "git daemon" by default,
+ as it is old and largely unmaintained.
+
+ * Various Perl scripts did not use safe_pipe_capture() instead of
+ backticks, leaving them susceptible to end-user input. They have
+ been corrected.
+
+Credits go to joernchen <joernchen@phenoelit.de> for finding the
+unsafe constructs in "git cvsserver", and to Jeff King at GitHub for
+finding and fixing instances of the same issue in other scripts.
+
--- /dev/null
+Git v2.12.5 Release Notes
+=========================
+
+Fixes since v2.12.4
+-------------------
+
+ * "git cvsserver" no longer is invoked by "git daemon" by default,
+ as it is old and largely unmaintained.
+
+ * Various Perl scripts did not use safe_pipe_capture() instead of
+ backticks, leaving them susceptible to end-user input. They have
+ been corrected.
+
+Credits go to joernchen <joernchen@phenoelit.de> for finding the
+unsafe constructs in "git cvsserver", and to Jeff King at GitHub for
+finding and fixing instances of the same issue in other scripts.
+
--- /dev/null
+Git v2.13.6 Release Notes
+=========================
+
+Fixes since v2.13.5
+-------------------
+
+ * "git cvsserver" no longer is invoked by "git daemon" by default,
+ as it is old and largely unmaintained.
+
+ * Various Perl scripts did not use safe_pipe_capture() instead of
+ backticks, leaving them susceptible to end-user input. They have
+ been corrected.
+
+Credits go to joernchen <joernchen@phenoelit.de> for finding the
+unsafe constructs in "git cvsserver", and to Jeff King at GitHub for
+finding and fixing instances of the same issue in other scripts.
+
* "git archive" did not work well with pathspecs and the
export-ignore attribute.
+ * "git cvsserver" no longer is invoked by "git daemon" by default,
+ as it is old and largely unmaintained.
+
+ * Various Perl scripts did not use safe_pipe_capture() instead of
+ backticks, leaving them susceptible to end-user input. They have
+ been corrected.
+
Also contains various documentation updates and code clean-ups.
+
+Credits go to joernchen <joernchen@phenoelit.de> for finding the
+unsafe constructs in "git cvsserver", and to Jeff King at GitHub for
+finding and fixing instances of the same issue in other scripts.
* The codepath to call external process filter for smudge/clean
operation learned to show the progress meter.
+ * "git rev-parse" learned "--is-shallow-repository", that is to be
+ used in a way similar to existing "--is-bare-repository" and
+ friends.
+
Performance, Internal Implementation, Development Support etc.
* Many leaks of strbuf have been fixed.
+ * "git imap-send" has our own implementation of the protocol and also
+ can use more recent libCurl with the imap protocol support. Update
+ the latter so that it can use the credential subsystem, and then
+ make it the default option to use, so that we can eventually
+ deprecate and remove the former.
+
+ * "make style" runs git-clang-format to help developers by pointing
+ out coding style issues.
+
+ * A test to demonstrate "git mv" failing to adjust nested submodules
+ has been added.
+ (merge c514167df2 hv/mv-nested-submodules-test later to maint).
+
+ * On Cygwin, "ulimit -s" does not report failure but it does not work
+ at all, which causes an unexpected success of some tests that
+ expect failures under a limited stack situation. This has been
+ fixed.
+
Also contains various documentation updates and code clean-ups.
to match the behaviour of the former.
(merge c818e74332 rk/commit-tree-make-F-verbatim later to maint).
+ * Many codepaths did not diagnose write failures correctly when disks
+ go full, due to their misuse of write_in_full() helper function,
+ which have been corrected.
+ (merge f48ecd38cb jk/write-in-full-fix later to maint).
+
+ * "git help co" now says "co is aliased to ...", not "git co is".
+ (merge b3a8076e0d ks/help-alias-label later to maint).
+
+ * "git archive", especially when used with pathspec, stored an empty
+ directory in its output, even though Git itself never does so.
+ This has been fixed.
+ (merge 4318094047 rs/archive-excluded-directory later to maint).
+
+ * API error-proofing which happens to also squelch warnings from GCC.
+ (merge c788c54cde tg/refs-allowed-flags later to maint).
+
+ * The explanation of the cut-line in the commit log editor has been
+ slightly tweaked.
+ (merge 8c4b1a3593 ks/commit-do-not-touch-cut-line later to maint).
+
+ * "git gc" tries to avoid running two instances at the same time by
+ reading and writing pid/host from and to a lock file; it used to
+ use an incorrect fscanf() format when reading, which has been
+ corrected.
+ (merge afe2fab72c aw/gc-lockfile-fscanf-fix later to maint).
+
+ * The scripts to drive TravisCI has been reorganized and then an
+ optimization to avoid spending cycles on a branch whose tip is
+ tagged has been implemented.
+ (merge 8376eb4a8f ls/travis-scriptify later to maint).
+
+ * The test linter has been taught that we do not like "echo -e".
+ (merge 1a6d46895d tb/test-lint-echo-e later to maint).
+
+ * Code cmp.std.c nitpick.
+ (merge ac7da78ede mh/for-each-string-list-item-empty-fix later to maint).
+
+ * A regression fix for 2.11 that made the code to read the list of
+ alternate object stores overrun the end of the string.
+ (merge f0f7bebef7 jk/info-alternates-fix later to maint).
+
* Other minor doc, test and build updates and code cleanups.
(merge f094b89a4d ma/parse-maybe-bool later to maint).
(merge 39b00fa4d4 jk/drop-sha1-entry-pos later to maint).
(merge 276d0e35c0 ma/split-symref-update-fix later to maint).
(merge 3bc4b8f7c7 bb/doc-eol-dirty later to maint).
(merge c1bb33c99c jk/system-path-cleanup later to maint).
+ (merge ab46e6fc72 cc/subprocess-handshake-missing-capabilities later to maint).
+ (merge f7a32dd97f kd/doc-for-each-ref later to maint).
+ (merge be94568bc7 ez/doc-duplicated-words-fix later to maint).
+ (merge 01e4be6c3d ks/test-readme-phrasofix later to maint).
The 40-hex object name of the object.
`objecttype`::
- The type of of the object (the same as `cat-file -t` reports).
+ The type of the object (the same as `cat-file -t` reports).
`objectsize`::
The size, in bytes, of the object (the same as `cat-file -s`
------------
+
You could omit <branch>, in which case the command degenerates to
-"check out the current branch", which is a glorified no-op with a
+"check out the current branch", which is a glorified no-op with
rather expensive side-effects to show only the tracking information,
if exists, for the current branch.
[--commit-filter <command>] [--tag-name-filter <command>]
[--subdirectory-filter <directory>] [--prune-empty]
[--original <namespace>] [-d <directory>] [-f | --force]
- [--] [<rev-list options>...]
+ [--state-branch <branch>] [--] [<rev-list options>...]
DESCRIPTION
-----------
directory or when there are already refs starting with
'refs/original/', unless forced.
+--state-branch <branch>::
+ This option will cause the mapping from old to new objects to
+ be loaded from named branch upon startup and saved as a new
+ commit to that branch upon exit, enabling incremental of large
+ trees. If '<branch>' does not exist it will be created.
+
<rev-list options>...::
Arguments for 'git rev-list'. All positive refs included by
these options are rewritten. You may also specify options
[verse]
'git for-each-ref' [--count=<count>] [--shell|--perl|--python|--tcl]
[(--sort=<key>)...] [--format=<format>] [<pattern>...]
- [--points-at <object>] [(--merged | --no-merged) [<object>]]
- [--contains [<object>]] [--no-contains [<object>]]
+ [--points-at=<object>]
+ (--merged[=<object>] | --no-merged[=<object>])
+ [--contains[=<object>]] [--no-contains[=<object>]]
DESCRIPTION
-----------
OPTIONS
-------
-<count>::
+<pattern>...::
+ If one or more patterns are given, only refs are shown that
+ match against at least one pattern, either using fnmatch(3) or
+ literally, in the latter case matching completely or from the
+ beginning up to a slash.
+
+--count=<count>::
By default the command shows all refs that match
`<pattern>`. This option makes it stop after showing
that many refs.
-<key>::
+--sort=<key>::
A field name to sort on. Prefix `-` to sort in
descending order of the value. When unspecified,
`refname` is used. You may use the --sort=<key> option
multiple times, in which case the last key becomes the primary
key.
-<format>::
+--format=<format>::
A string that interpolates `%(fieldname)` from a ref being shown
and the object it points at. If `fieldname`
is prefixed with an asterisk (`*`) and the ref points
`xx`; for example `%00` interpolates to `\0` (NUL),
`%09` to `\t` (TAB) and `%0a` to `\n` (LF).
-<pattern>...::
- If one or more patterns are given, only refs are shown that
- match against at least one pattern, either using fnmatch(3) or
- literally, in the latter case matching completely or from the
- beginning up to a slash.
-
--shell::
--perl::
--python::
the specified host language. This is meant to produce
a scriptlet that can directly be `eval`ed.
---points-at <object>::
+--points-at=<object>::
Only list refs which points at the given object.
---merged [<object>]::
+--merged[=<object>]::
Only list refs whose tips are reachable from the
specified commit (HEAD if not specified),
incompatible with `--no-merged`.
---no-merged [<object>]::
+--no-merged[=<object>]::
Only list refs whose tips are not reachable from the
specified commit (HEAD if not specified),
incompatible with `--merged`.
---contains [<object>]::
+--contains[=<object>]::
Only list refs which contain the specified commit (HEAD if not
specified).
---no-contains [<object>]::
+--no-contains[=<object>]::
Only list refs which don't contain the specified commit (HEAD
if not specified).
object that does not have notes attached to it.
--stdin::
- Also read the object names to remove notes from from the standard
+ Also read the object names to remove notes from the standard
input (there is no reason you cannot combine this with object
names from the command line).
"clean" means that index and work tree coincide, and "exists"/"nothing"
refer to the presence of a path in the specified commit:
+....
I H M Result
-------------------------------------------------------
0 nothing nothing nothing (does not happen)
19 no no yes exists exists keep index
20 yes yes no exists exists use M
21 no yes no exists exists fail
+....
In all "keep index" cases, the index entry stays as in the
original index file. If the entry is not up to date,
--is-bare-repository::
When the repository is bare print "true", otherwise "false".
+--is-shallow-repository::
+ When the repository is shallow print "true", otherwise "false".
+
--resolve-git-dir <path>::
Check if <path> is a valid repository or a gitfile that
points at a valid repository, and print the location of the
$ chmod +x $HOME/git-shell-commands/no-interactive-login
----------------
+To enable git-cvsserver access (which should generally have the
+`no-interactive-login` example above as a prerequisite, as creating
+the git-shell-commands directory allows interactive logins):
+
+----------------
+$ cat >$HOME/git-shell-commands/cvs <<\EOF
+if ! test $# = 1 && test "$1" = "server"
+then
+ echo >&2 "git-cvsserver only handles \"server\""
+ exit 1
+fi
+exec git cvsserver server
+EOF
+$ chmod +x $HOME/git-shell-commands/cvs
+----------------
+
SEE ALSO
--------
ssh(1),
+
Version 4 performs a simple pathname compression that reduces index
size by 30%-50% on large repositories, which results in faster load
-time. Version 4 is relatively young (first released in in 1.8.0 in
+time. Version 4 is relatively young (first released in 1.8.0 in
October 2012). Other Git implementations such as JGit and libgit2
may not support it yet.
.PHONY: sparse $(SP_OBJ)
sparse: $(SP_OBJ)
+.PHONY: style
+style:
+ git clang-format --style file --diff --extensions c,h
+
check: common-cmds.h
@if sparse; \
then \
if (plen && (ws_rule & WS_BLANK_AT_EOF) &&
ws_blank_line(patch + 1, plen, ws_rule))
is_blank_context = 1;
+ /* fallthrough */
case '-':
memcpy(old, patch + 1, plen);
add_line_info(&preimage, old, plen,
old += plen;
if (first == '-')
break;
- /* Fall-through for ' ' */
+ /* fallthrough */
case '+':
/* --no-add does not add new lines */
if (first == '+' && state->no_add)
return check && ATTR_TRUE(check->items[1].value);
}
-static int should_queue_directories(const struct archiver_args *args)
-{
- return args->pathspec.has_wildcard;
-}
-
static int write_archive_entry(const unsigned char *sha1, const char *base,
int baselen, const char *filename, unsigned mode, int stage,
void *context)
strbuf_addch(&path, '/');
path_without_prefix = path.buf + args->baselen;
- if (!S_ISDIR(mode) || !should_queue_directories(args)) {
+ if (!S_ISDIR(mode)) {
const struct attr_check *check;
check = get_archive_attrs(path_without_prefix);
if (check_attr_export_ignore(check))
return write_entry(args, sha1, path.buf, path.len, mode);
}
-static int write_archive_entry_buf(const unsigned char *sha1, struct strbuf *base,
- const char *filename, unsigned mode, int stage,
- void *context)
-{
- return write_archive_entry(sha1, base->buf, base->len,
- filename, mode, stage, context);
-}
-
static void queue_directory(const unsigned char *sha1,
struct strbuf *base, const char *filename,
unsigned mode, int stage, struct archiver_context *c)
}
err = read_tree_recursive(args->tree, "", 0, 0, &args->pathspec,
- should_queue_directories(args) ?
- queue_or_write_archive_entry :
- write_archive_entry_buf,
+ queue_or_write_archive_entry,
&context);
if (err == READ_TREE_RECURSIVE)
err = 0;
return !has_object_file(&oid);
case 'w':
- if (!path[0])
+ if (!path)
die("git cat-file --filters %s: <object> must be "
"<sha1:path>", obj_name);
break;
case 'c':
- if (!path[0])
+ if (!path)
die("git cat-file --textconv %s: <object> must be <sha1:path>",
obj_name);
if (textconv_object(path, obj_context.mode, &oid, 1, &buf, &size))
break;
+ /* else fallthrough */
case 'p':
type = sha1_object_info(oid.hash, NULL);
* update paths in the work tree, and we cannot revert
* them.
*/
+ /* fallthrough */
case 0:
return 0;
default:
* pattern.
*/
if (patterns.nr) {
+ int found = 0;
struct string_list_item *item;
if (!is_tag)
return 0;
for_each_string_list_item(item, &patterns) {
- if (!wildmatch(item->string, path + 10, 0))
+ if (!wildmatch(item->string, path + 10, 0)) {
+ found = 1;
break;
+ }
+ }
- /* If we get here, no pattern matched. */
+ if (!found)
return 0;
- }
}
/* Is it annotated? */
int should_exit;
if (!scan_fmt)
- scan_fmt = xstrfmt("%s %%%dc", "%"SCNuMAX, HOST_NAME_MAX);
+ scan_fmt = xstrfmt("%s %%%ds", "%"SCNuMAX, HOST_NAME_MAX);
fp = fopen(pidfile_path, "r");
memset(locking_host, 0, sizeof(locking_host));
should_exit =
if (!skip_prefix(content, "52 comment=", &comment))
return 1;
- n = write_in_full(1, comment, 41);
- if (n < 41)
+ if (write_in_full(1, comment, 41) < 0)
die_errno("git get-tar-commit-id: write error");
return 0;
alias = alias_lookup(cmd);
if (alias) {
- printf_ln(_("`git %s' is aliased to `%s'"), cmd, alias);
+ printf_ln(_("'%s' is aliased to '%s'"), cmd, alias);
free(alias);
exit(0);
}
return want;
}
- for (entry = packed_git_mru->head; entry; entry = entry->next) {
+ for (entry = packed_git_mru.head; entry; entry = entry->next) {
struct packed_git *p = entry->item;
off_t offset;
}
want = want_found_object(exclude, p);
if (!exclude && want > 0)
- mru_mark(packed_git_mru, entry);
+ mru_mark(&packed_git_mru, entry);
if (want != -1)
return want;
}
size_t n;
if (feed(feed_state, &buf, &n))
break;
- if (write_in_full(proc.in, buf, n) != n)
+ if (write_in_full(proc.in, buf, n) < 0)
break;
}
close(proc.in);
special = str[rpos];
if (rpos == 1)
break;
- /* Fall-through to error. */
+ /* fallthrough */
default:
die("Bad remote-ext placeholder '%%%c'.",
str[rpos]);
{
int i;
for (i = 0; i < nbuf; i++)
- if (write_in_full(1, ptr[i].ptr, ptr[i].size) != ptr[i].size)
+ if (write_in_full(1, ptr[i].ptr, ptr[i].size) < 0)
return -1;
return 0;
}
: "false");
continue;
}
+ if (!strcmp(arg, "--is-shallow-repository")) {
+ printf("%s\n", is_repository_shallow() ? "true"
+ : "false");
+ continue;
+ }
if (!strcmp(arg, "--shared-index-path")) {
if (read_cache() < 0)
die(_("Could not read the index"));
break;
die("HEAD does not match the named branch in the superproject");
}
+ /* fallthrough */
default:
die("src refspec '%s' must name a ref",
rs->src);
xsnprintf(path, sizeof(path), ".merge_file_XXXXXX");
fd = xmkstemp(path);
- if (write_in_full(fd, buf, size) != size)
+ if (write_in_full(fd, buf, size) < 0)
die_errno("unable to write temp-file");
close(fd);
return path;
#include "git-compat-util.h"
#include "strbuf.h"
#include "hashmap.h"
+#include "mru.h"
#include "advice.h"
#include "gettext.h"
#include "convert.h"
* A most-recently-used ordered version of the packed_git list, which can
* be iterated instead of packed_git (and marked via mru_mark).
*/
-struct mru;
-extern struct mru *packed_git_mru;
+extern struct mru packed_git_mru;
struct pack_entry {
off_t offset;
--- /dev/null
+#!/usr/bin/env bash
+#
+# Install dependencies required to build and test Git on Linux and macOS
+#
+
+. ${0%/*}/lib-travisci.sh
+
+P4WHENCE=http://filehost.perforce.com/perforce/r$LINUX_P4_VERSION
+LFSWHENCE=https://github.com/github/git-lfs/releases/download/v$LINUX_GIT_LFS_VERSION
+
+case "${TRAVIS_OS_NAME:-linux}" in
+linux)
+ export GIT_TEST_HTTPD=YesPlease
+
+ mkdir --parents custom/p4
+ pushd custom/p4
+ wget --quiet "$P4WHENCE/bin.linux26x86_64/p4d"
+ wget --quiet "$P4WHENCE/bin.linux26x86_64/p4"
+ chmod u+x p4d
+ chmod u+x p4
+ export PATH="$(pwd):$PATH"
+ popd
+ mkdir --parents custom/git-lfs
+ pushd custom/git-lfs
+ wget --quiet "$LFSWHENCE/git-lfs-linux-amd64-$LINUX_GIT_LFS_VERSION.tar.gz"
+ tar --extract --gunzip --file "git-lfs-linux-amd64-$LINUX_GIT_LFS_VERSION.tar.gz"
+ cp git-lfs-$LINUX_GIT_LFS_VERSION/git-lfs .
+ export PATH="$(pwd):$PATH"
+ popd
+ ;;
+osx)
+ brew update --quiet
+ # Uncomment this if you want to run perf tests:
+ # brew install gnu-time
+ brew install git-lfs gettext
+ brew link --force gettext
+ brew install caskroom/cask/perforce
+ ;;
+esac
+
+echo "$(tput setaf 6)Perforce Server Version$(tput sgr0)"
+p4d -V | grep Rev.
+echo "$(tput setaf 6)Perforce Client Version$(tput sgr0)"
+p4 -V | grep Rev.
+echo "$(tput setaf 6)Git-LFS Version$(tput sgr0)"
+git-lfs version
--- /dev/null
+# Library of functions shared by all CI scripts
+
+skip_branch_tip_with_tag () {
+ # Sometimes, a branch is pushed at the same time the tag that points
+ # at the same commit as the tip of the branch is pushed, and building
+ # both at the same time is a waste.
+ #
+ # Travis gives a tagname e.g. v2.14.0 in $TRAVIS_BRANCH when
+ # the build is triggered by a push to a tag. Let's see if
+ # $TRAVIS_BRANCH is exactly at a tag, and if so, if it is
+ # different from $TRAVIS_BRANCH. That way, we can tell if
+ # we are building the tip of a branch that is tagged and
+ # we can skip the build because we won't be skipping a build
+ # of a tag.
+
+ if TAG=$(git describe --exact-match "$TRAVIS_BRANCH" 2>/dev/null) &&
+ test "$TAG" != "$TRAVIS_BRANCH"
+ then
+ echo "Tip of $TRAVIS_BRANCH is exactly at $TAG"
+ exit 0
+ fi
+}
+
+# Set 'exit on error' for all CI scripts to let the caller know that
+# something went wrong
+set -e
+
+skip_branch_tip_with_tag
--- /dev/null
+#!/bin/sh
+#
+# Print output of failing tests
+#
+
+. ${0%/*}/lib-travisci.sh
+
+for TEST_EXIT in t/test-results/*.exit
+do
+ if [ "$(cat "$TEST_EXIT")" != "0" ]
+ then
+ TEST_OUT="${TEST_EXIT%exit}out"
+ echo "------------------------------------------------------------------------"
+ echo "$(tput setaf 1)${TEST_OUT}...$(tput sgr0)"
+ echo "------------------------------------------------------------------------"
+ cat "${TEST_OUT}"
+ fi
+done
--- /dev/null
+#!/bin/sh
+#
+# Build Git
+#
+
+. ${0%/*}/lib-travisci.sh
+
+make --jobs=2
--- /dev/null
+#!/bin/sh
+#
+# Download and run Docker image to build and test 32-bit Git
+#
+
+. ${0%/*}/lib-travisci.sh
+
+docker pull daald/ubuntu32:xenial
+
+# Use the following command to debug the docker build locally:
+# $ docker run -itv "${PWD}:/usr/src/git" --entrypoint /bin/bash daald/ubuntu32:xenial
+# root@container:/# /usr/src/git/ci/run-linux32-build.sh
+
+docker run \
+ --interactive \
+ --env DEVELOPER \
+ --env DEFAULT_TEST_TARGET \
+ --env GIT_PROVE_OPTS \
+ --env GIT_TEST_OPTS \
+ --env GIT_TEST_CLONE_2GB \
+ --volume "${PWD}:/usr/src/git" \
+ daald/ubuntu32:xenial \
+ /usr/src/git/ci/run-linux32-build.sh $(id -u $USER)
--- /dev/null
+#!/bin/sh
+#
+# Perform various static code analysis checks
+#
+
+. ${0%/*}/lib-travisci.sh
+
+make coccicheck
--- /dev/null
+#!/bin/sh
+#
+# Test Git
+#
+
+. ${0%/*}/lib-travisci.sh
+
+mkdir -p $HOME/travis-cache
+ln -s $HOME/travis-cache/.prove t/.prove
+make --quiet test
# supported) and a commit hash.
#
+. ${0%/*}/lib-travisci.sh
+
test $# -ne 2 && echo "Unexpected number of parameters" && exit 1
test -z "$GFW_CI_TOKEN" && echo "GFW_CI_TOKEN not defined" && exit
# Perform sanity checks on documentation and build it.
#
-set -e
+. ${0%/*}/lib-travisci.sh
+
+gem install asciidoctor
make check-builtins
make check-docs
return 4;
}
-static int store_write_section(int fd, const char *key)
+static ssize_t write_section(int fd, const char *key)
{
const char *dot;
- int i, success;
+ int i;
+ ssize_t ret;
struct strbuf sb = STRBUF_INIT;
dot = memchr(key, '.', store.baselen);
strbuf_addf(&sb, "[%.*s]\n", store.baselen, key);
}
- success = write_in_full(fd, sb.buf, sb.len) == sb.len;
+ ret = write_in_full(fd, sb.buf, sb.len);
strbuf_release(&sb);
- return success;
+ return ret;
}
-static int store_write_pair(int fd, const char *key, const char *value)
+static ssize_t write_pair(int fd, const char *key, const char *value)
{
- int i, success;
+ int i;
+ ssize_t ret;
int length = strlen(key + store.baselen + 1);
const char *quote = "";
struct strbuf sb = STRBUF_INIT;
case '"':
case '\\':
strbuf_addch(&sb, '\\');
+ /* fallthrough */
default:
strbuf_addch(&sb, value[i]);
break;
}
strbuf_addf(&sb, "%s\n", quote);
- success = write_in_full(fd, sb.buf, sb.len) == sb.len;
+ ret = write_in_full(fd, sb.buf, sb.len);
strbuf_release(&sb);
- return success;
+ return ret;
}
static ssize_t find_beginning_of_line(const char *contents, size_t size,
}
store.key = (char *)key;
- if (!store_write_section(fd, key) ||
- !store_write_pair(fd, key, value))
+ if (write_section(fd, key) < 0 ||
+ write_pair(fd, key, value) < 0)
goto write_err_out;
} else {
struct stat st;
/* write the first part of the config */
if (copy_end > copy_begin) {
if (write_in_full(fd, contents + copy_begin,
- copy_end - copy_begin) <
- copy_end - copy_begin)
+ copy_end - copy_begin) < 0)
goto write_err_out;
if (new_line &&
- write_str_in_full(fd, "\n") != 1)
+ write_str_in_full(fd, "\n") < 0)
goto write_err_out;
}
copy_begin = store.offset[i];
/* write the pair (value == NULL means unset) */
if (value != NULL) {
if (store.state == START) {
- if (!store_write_section(fd, key))
+ if (write_section(fd, key) < 0)
goto write_err_out;
}
- if (!store_write_pair(fd, key, value))
+ if (write_pair(fd, key, value) < 0)
goto write_err_out;
}
/* write the rest of the config */
if (copy_begin < contents_sz)
if (write_in_full(fd, contents + copy_begin,
- contents_sz - copy_begin) <
- contents_sz - copy_begin)
+ contents_sz - copy_begin) < 0)
goto write_err_out;
munmap(contents, contents_sz);
continue;
}
store.baselen = strlen(new_name);
- if (!store_write_section(out_fd, new_name)) {
+ if (write_section(out_fd, new_name) < 0) {
ret = write_error(get_lock_file_path(lock));
goto out;
}
if (remove)
continue;
length = strlen(output);
- if (write_in_full(out_fd, output, length) != length) {
+ if (write_in_full(out_fd, output, length) < 0) {
ret = write_error(get_lock_file_path(lock));
goto out;
}
switch (ident->state) {
default:
strbuf_add(&ident->left, head, ident->state);
+ /* fallthrough */
case IDENT_SKIPPING:
- /* fallthru */
+ /* fallthrough */
case IDENT_DRAINING:
ident_drain(ident, &output, osize_p);
}
blob = buf.buf;
size = buf.len;
}
- if (write_in_full(temp->tempfile->fd, blob, size) != size ||
+ if (write_in_full(temp->tempfile->fd, blob, size) < 0 ||
close_tempfile_gently(temp->tempfile))
die_errno("unable to write temp-file");
temp->name = get_tempfile_path(temp->tempfile);
char *new;
struct strbuf buf = STRBUF_INIT;
unsigned long size;
- size_t wrote, newsize = 0;
+ ssize_t wrote;
+ size_t newsize = 0;
struct stat st;
const struct submodule *sub;
fstat_done = fstat_output(fd, state, &st);
close(fd);
free(new);
- if (wrote != size)
+ if (wrote < 0)
return error("unable to write file %s", path);
break;
case S_IFGITLINK:
static void cat_blob_write(const char *buf, unsigned long size)
{
- if (write_in_full(cat_blob_fd, buf, size) != size)
+ if (write_in_full(cat_blob_fd, buf, size) < 0)
die_errno("Write to frontend failed");
}
case S_IFREG | 0664:
if (!options->strict)
break;
+ /* fallthrough */
default:
has_bad_modes = 1;
}
# check that we actually know about the branch
next unless -e "$git_dir/refs/heads/$branch";
- my $mergebase = `git-merge-base $branch $ps->{branch}`;
+ my $mergebase = safe_pipe_capture(qw(git-merge-base), $branch, $ps->{branch});
if ($?) {
# Don't die here, Arch supports one-way cherry-picking
# between branches with no common base (or any relationship
sub git_rev_parse {
my $name = shift;
- my $val = `git-rev-parse $name`;
+ my $val = safe_pipe_capture(qw(git-rev-parse), $name);
die "Error: git-rev-parse $name" if $?;
chomp $val;
return $val;
sub get_headref ($) {
my $name = shift;
+ $name =~ s/'/'\\''/;
my $r = `git rev-parse --verify '$name' 2>/dev/null`;
return undef unless $? == 0;
chomp $r;
return 0;
}
- my @gitvars = `git config -l`;
+ my @gitvars = safe_pipe_capture(qw(git config -l));
if ($?) {
print "E problems executing git-config on the server -- this is not a git repository or the PATH is not set correctly.\n";
print "E \n";
# Save the file data in $state
$state->{entries}{$state->{directory}.$data}{modified_filename} = $filename;
$state->{entries}{$state->{directory}.$data}{modified_mode} = $mode;
- $state->{entries}{$state->{directory}.$data}{modified_hash} = `git hash-object $filename`;
+ $state->{entries}{$state->{directory}.$data}{modified_hash} = safe_pipe_capture('git','hash-object',$filename);
$state->{entries}{$state->{directory}.$data}{modified_hash} =~ s/\s.*$//s;
#$log->debug("req_Modified : file=$data mode=$mode size=$size");
# Provide list of modules, if -c was used.
if (exists $state->{opt}{c}) {
- my $showref = `git show-ref --heads`;
+ my $showref = safe_pipe_capture(qw(git show-ref --heads));
for my $line (split '\n', $showref) {
if ( $line =~ m% refs/heads/(.*)$% ) {
print "M $1\t$1\n";
# projects (heads in this case) to checkout.
#
if ($state->{module} eq '') {
- my $showref = `git show-ref --heads`;
+ my $showref = safe_pipe_capture(qw(git show-ref --heads));
print "E cvs update: Updating .\n";
for my $line (split '\n', $showref) {
if ( $line =~ m% refs/heads/(.*)$% ) {
# transmit file, format is single integer on a line by itself (file
# size) followed by the file contents
# TODO : we should copy files in blocks
- my $data = `cat $mergedFile`;
+ my $data = safe_pipe_capture('cat', $mergedFile);
$log->debug("File size : " . length($data));
print length($data) . "\n";
print $data;
$branchRef = "refs/heads/$stickyInfo->{tag}";
}
- $parenthash = `git show-ref -s $branchRef`;
+ $parenthash = safe_pipe_capture('git', 'show-ref', '-s', $branchRef);
chomp $parenthash;
if ($parenthash !~ /^[0-9a-f]{40}$/)
{
return;
}
- my $treehash = `git write-tree`;
+ my $treehash = safe_pipe_capture(qw(git write-tree));
chomp $treehash;
$log->debug("Treehash : $treehash, Parenthash : $parenthash");
}
close $msg_fh;
- my $commithash = `git commit-tree $treehash -p $parenthash < $msg_filename`;
+ my $commithash = safe_pipe_capture('git', 'commit-tree', $treehash, '-p', $parenthash, '-F', $msg_filename);
chomp($commithash);
$log->info("Commit hash : $commithash");
die "Need filehash" unless ( defined ( $filehash ) and $filehash =~ /^[a-zA-Z0-9]{40}$/ );
- my $type = `git cat-file -t $filehash`;
+ my $type = safe_pipe_capture('git', 'cat-file', '-t', $filehash);
chomp $type;
die ( "Invalid type '$type' (expected 'blob')" ) unless ( defined ( $type ) and $type eq "blob" );
- my $size = `git cat-file -s $filehash`;
+ my $size = safe_pipe_capture('git', 'cat-file', '-s', $filehash);
chomp $size;
$log->debug("transmitfile($filehash) size=$size, type=$type");
chdir $work->{emptyDir} or
die "Unable to chdir to $work->{emptyDir}\n";
- my $ver = `git show-ref -s refs/heads/$state->{module}`;
+ my $ver = safe_pipe_capture('git', 'show-ref', '-s', "refs/heads/$state->{module}");
chomp $ver;
if ($ver !~ /^[0-9a-f]{40}$/)
{
die "Need filehash\n";
}
- my $type = `git cat-file -t $name`;
+ my $type = safe_pipe_capture('git', 'cat-file', '-t', $name);
chomp $type;
unless ( defined ( $type ) and $type eq "blob" )
die ( "Invalid type '$type' (expected 'blob')" )
}
- my $size = `git cat-file -s $name`;
+ my $size = safe_pipe_capture('git', 'cat-file', '-s', $name);
chomp $size;
$log->debug("open_blob_or_die($name) size=$size, type=$type");
return $out;
}
+# an alternative to `command` that allows input to be passed as an array
+# to work around shell problems with weird characters in arguments
+
+sub safe_pipe_capture {
+
+ my @output;
+
+ if (my $pid = open my $child, '-|') {
+ @output = (<$child>);
+ close $child or die join(' ',@_).": $! $?";
+ } else {
+ exec(@_) or die "$! $?"; # exec() can fail the executable can't be found
+ }
+ return wantarray ? @output : join('',@output);
+}
+
package GITCVS::log;
# first lets get the commit list
$ENV{GIT_DIR} = $self->{git_path};
- my $commitsha1 = `git rev-parse $self->{module}`;
+ my $commitsha1 = ::safe_pipe_capture('git', 'rev-parse', $self->{module});
chomp $commitsha1;
- my $commitinfo = `git cat-file commit $self->{module} 2>&1`;
+ my $commitinfo = ::safe_pipe_capture('git', 'cat-file', 'commit', $self->{module});
unless ( $commitinfo =~ /tree\s+[a-zA-Z0-9]{40}/ )
{
die("Invalid module '$self->{module}'");
# several candidate merge bases. let's assume
# that the first one is the best one.
my $base = eval {
- safe_pipe_capture('git', 'merge-base',
+ ::safe_pipe_capture('git', 'merge-base',
$lastpicked, $parent);
};
# The two branches may not be related at all,
return $retVal;
}
- my($fileHash)=safe_pipe_capture("git","rev-parse","$revCommit:$filename");
+ my($fileHash) = ::safe_pipe_capture("git","rev-parse","$revCommit:$filename");
chomp $fileHash;
if(!($fileHash=~/^[0-9a-f]{40}$/))
{
return $commitHash;
}
- $commitHash=safe_pipe_capture("git","rev-parse","--verify","--quiet",
- $self->unescapeRefName($ref));
+ $commitHash = ::safe_pipe_capture("git","rev-parse","--verify","--quiet",
+ $self->unescapeRefName($ref));
$commitHash=~s/\s*$//;
if(!($commitHash=~/^[0-9a-f]{40}$/))
{
if( defined($commitHash) )
{
- my $type=safe_pipe_capture("git","cat-file","-t",$commitHash);
+ my $type = ::safe_pipe_capture("git","cat-file","-t",$commitHash);
if( ! ($type=~/^commit\s*$/ ) )
{
$commitHash=undef;
return $message;
}
- my @lines = safe_pipe_capture("git", "cat-file", "commit", $commithash);
+ my @lines = ::safe_pipe_capture("git", "cat-file", "commit", $commithash);
shift @lines while ( $lines[0] =~ /\S/ );
$message = join("",@lines);
$message .= " " if ( $message =~ /\n$/ );
return $retval;
}
-=head2 safe_pipe_capture
-
-an alternative to `command` that allows input to be passed as an array
-to work around shell problems with weird characters in arguments
-
-=cut
-sub safe_pipe_capture {
-
- my @output;
-
- if (my $pid = open my $child, '-|') {
- @output = (<$child>);
- close $child or die join(' ',@_).": $! $?";
- } else {
- exec(@_) or die "$! $?"; # exec() can fail the executable can't be found
- }
- return wantarray ? @output : join('',@output);
-}
-
=head2 mangle_dirname
create a string from a directory name that is suitable to use as
[--parent-filter <command>] [--msg-filter <command>]
[--commit-filter <command>] [--tag-name-filter <command>]
[--subdirectory-filter <directory>] [--original <namespace>]
- [-d <directory>] [-f | --force]
+ [-d <directory>] [-f | --force] [--state-branch <branch>]
[--] [<rev-list options>...]"
OPTIONS_SPEC=
filter_commit=
filter_tag_name=
filter_subdir=
+state_branch=
orig_namespace=refs/original/
force=
prune_empty=
--original)
orig_namespace=$(expr "$OPTARG/" : '\(.*[^/]\)/*$')/
;;
+ --state-branch)
+ state_branch="$OPTARG"
+ ;;
*)
usage
;;
ORIG_GIT_DIR="$GIT_DIR"
ORIG_GIT_WORK_TREE="$GIT_WORK_TREE"
ORIG_GIT_INDEX_FILE="$GIT_INDEX_FILE"
+ORIG_GIT_AUTHOR_NAME="$GIT_AUTHOR_NAME"
+ORIG_GIT_AUTHOR_EMAIL="$GIT_AUTHOR_EMAIL"
+ORIG_GIT_AUTHOR_DATE="$GIT_AUTHOR_DATE"
+ORIG_GIT_COMMITTER_NAME="$GIT_COMMITTER_NAME"
+ORIG_GIT_COMMITTER_EMAIL="$GIT_COMMITTER_EMAIL"
+ORIG_GIT_COMMITTER_DATE="$GIT_COMMITTER_DATE"
+
GIT_WORK_TREE=.
export GIT_DIR GIT_WORK_TREE
# map old->new commit ids for rewriting parents
mkdir ../map || die "Could not create map/ directory"
+if test -n "$state_branch"
+then
+ state_commit=$(git rev-parse --no-flags --revs-only "$state_branch")
+ if test -n "$state_commit"
+ then
+ echo "Populating map from $state_branch ($state_commit)" 1>&2
+ perl -e'open(MAP, "-|", "git show $ARGV[0]:filter.map") or die;
+ while (<MAP>) {
+ m/(.*):(.*)/ or die;
+ open F, ">../map/$1" or die;
+ print F "$2" or die;
+ close(F) or die;
+ }
+ close(MAP) or die;' "$state_commit" \
+ || die "Unable to load state from $state_branch:filter.map"
+ else
+ echo "Branch $state_branch does not exist. Will create" 1>&2
+ fi
+fi
+
# we need "--" only if there are no path arguments in $@
nonrevs=$(git rev-parse --no-revs "$@") || exit
if test -z "$nonrevs"
}' \
-e '/^-----BEGIN PGP SIGNATURE-----/q' \
-e 'p' ) |
- git mktag) ||
+ git hash-object -t tag -w --stdin) ||
die "Could not create new tag object for $ref"
if git cat-file tag "$ref" | \
sane_grep '^-----BEGIN PGP SIGNATURE-----' >/dev/null 2>&1
done
fi
-cd "$orig_dir"
-rm -rf "$tempdir"
-
-trap - 0
-
unset GIT_DIR GIT_WORK_TREE GIT_INDEX_FILE
+unset GIT_AUTHOR_NAME GIT_AUTHOR_EMAIL GIT_AUTHOR_DATE
+unset GIT_COMMITTER_NAME GIT_COMMITTER_EMAIL GIT_COMMITTER_DATE
test -z "$ORIG_GIT_DIR" || {
GIT_DIR="$ORIG_GIT_DIR" && export GIT_DIR
}
GIT_INDEX_FILE="$ORIG_GIT_INDEX_FILE" &&
export GIT_INDEX_FILE
}
+test -z "$ORIG_GIT_AUTHOR_NAME" || {
+ GIT_AUTHOR_NAME="$ORIG_GIT_AUTHOR_NAME" &&
+ export GIT_AUTHOR_NAME
+}
+test -z "$ORIG_GIT_AUTHOR_EMAIL" || {
+ GIT_AUTHOR_EMAIL="$ORIG_GIT_AUTHOR_EMAIL" &&
+ export GIT_AUTHOR_EMAIL
+}
+test -z "$ORIG_GIT_AUTHOR_DATE" || {
+ GIT_AUTHOR_DATE="$ORIG_GIT_AUTHOR_DATE" &&
+ export GIT_AUTHOR_DATE
+}
+test -z "$ORIG_GIT_COMMITTER_NAME" || {
+ GIT_COMMITTER_NAME="$ORIG_GIT_COMMITTER_NAME" &&
+ export GIT_COMMITTER_NAME
+}
+test -z "$ORIG_GIT_COMMITTER_EMAIL" || {
+ GIT_COMMITTER_EMAIL="$ORIG_GIT_COMMITTER_EMAIL" &&
+ export GIT_COMMITTER_EMAIL
+}
+test -z "$ORIG_GIT_COMMITTER_DATE" || {
+ GIT_COMMITTER_DATE="$ORIG_GIT_COMMITTER_DATE" &&
+ export GIT_COMMITTER_DATE
+}
+
+if test -n "$state_branch"
+then
+ echo "Saving rewrite state to $state_branch" 1>&2
+ state_blob=$(
+ perl -e'opendir D, "../map" or die;
+ open H, "|-", "git hash-object -w --stdin" or die;
+ foreach (sort readdir(D)) {
+ next if m/^\.\.?$/;
+ open F, "<../map/$_" or die;
+ chomp($f = <F>);
+ print H "$_:$f\n" or die;
+ }
+ close(H) or die;' || die "Unable to save state")
+ state_tree=$(/bin/echo -e "100644 blob $state_blob\tfilter.map" | git mktree)
+ if test -n "$state_commit"
+ then
+ state_commit=$(/bin/echo "Sync" | git commit-tree "$state_tree" -p "$state_commit")
+ else
+ state_commit=$(/bin/echo "Sync" | git commit-tree "$state_tree" )
+ fi
+ git update-ref "$state_branch" "$state_commit"
+fi
+
+cd "$orig_dir"
+rm -rf "$tempdir"
+
+trap - 0
if [ "$(is_bare_repository)" = false ]; then
git read-tree -u -m HEAD || exit
die("zlib error inflating request, result %d", ret);
n = stream.total_out - cnt;
- if (write_in_full(out, out_buf, n) != n)
+ if (write_in_full(out, out_buf, n) < 0)
die("%s aborted reading request", prog_name);
cnt += n;
ssize_t n = read_request(0, &buf);
if (n < 0)
die_errno("error reading request body");
- if (write_in_full(out, buf, n) != n)
+ if (write_in_full(out, buf, n) < 0)
die("%s aborted reading request", prog_name);
close(out);
free(buf);
break;
case HTTP_ERROR:
error("unable to access '%s': %s", url, curl_errorstr);
+ /* fallthrough */
default:
ret = -1;
}
switch (type) {
case CURLINFO_TEXT:
trace_printf_key(&trace_curl, "== Info: %s", data);
- default: /* we ignore unknown types by default */
- return 0;
-
+ break;
case CURLINFO_HEADER_OUT:
text = "=> Send header";
curl_dump_header(text, (unsigned char *)data, size, DO_FILTER);
text = "<= Recv SSL data";
curl_dump_data(text, (unsigned char *)data, size);
break;
+
+ default: /* we ignore unknown types by default */
+ return 0;
}
return 0;
}
#include "http.h"
#endif
-#if defined(USE_CURL_FOR_IMAP_SEND) && defined(NO_OPENSSL)
-/* only available option */
+#if defined(USE_CURL_FOR_IMAP_SEND)
+/* Always default to curl if it's available. */
#define USE_CURL_DEFAULT 1
#else
-/* strictly opt in */
+/* We don't have curl, so continue to use the historical implementation */
#define USE_CURL_DEFAULT 0
#endif
return 0;
}
+static void server_fill_credential(struct imap_server_conf *srvc, struct credential *cred)
+{
+ if (srvc->user && srvc->pass)
+ return;
+
+ cred->protocol = xstrdup(srvc->use_ssl ? "imaps" : "imap");
+ cred->host = xstrdup(srvc->host);
+
+ cred->username = xstrdup_or_null(srvc->user);
+ cred->password = xstrdup_or_null(srvc->pass);
+
+ credential_fill(cred);
+
+ if (!srvc->user)
+ srvc->user = xstrdup(cred->username);
+ if (!srvc->pass)
+ srvc->pass = xstrdup(cred->password);
+}
+
static struct imap_store *imap_open_store(struct imap_server_conf *srvc, char *folder)
{
struct credential cred = CREDENTIAL_INIT;
}
#endif
imap_info("Logging in...\n");
- if (!srvc->user || !srvc->pass) {
- cred.protocol = xstrdup(srvc->use_ssl ? "imaps" : "imap");
- cred.host = xstrdup(srvc->host);
-
- cred.username = xstrdup_or_null(srvc->user);
- cred.password = xstrdup_or_null(srvc->pass);
-
- credential_fill(&cred);
-
- if (!srvc->user)
- srvc->user = xstrdup(cred.username);
- if (!srvc->pass)
- srvc->pass = xstrdup(cred.password);
- }
+ server_fill_credential(srvc, &cred);
if (srvc->auth_method) {
struct imap_cmd_cb cb;
}
#ifdef USE_CURL_FOR_IMAP_SEND
-static CURL *setup_curl(struct imap_server_conf *srvc)
+static CURL *setup_curl(struct imap_server_conf *srvc, struct credential *cred)
{
CURL *curl;
struct strbuf path = STRBUF_INIT;
if (!curl)
die("curl_easy_init failed");
+ server_fill_credential(&server, cred);
curl_easy_setopt(curl, CURLOPT_USERNAME, server.user);
curl_easy_setopt(curl, CURLOPT_PASSWORD, server.pass);
struct buffer msgbuf = { STRBUF_INIT, 0 };
CURL *curl;
CURLcode res = CURLE_OK;
+ struct credential cred = CREDENTIAL_INIT;
- curl = setup_curl(server);
+ curl = setup_curl(server, &cred);
curl_easy_setopt(curl, CURLOPT_READDATA, &msgbuf);
fprintf(stderr, "sending %d message%s\n", total, (total != 1) ? "s" : "");
curl_easy_cleanup(curl);
curl_global_cleanup();
- return 0;
+ if (cred.username) {
+ if (res == CURLE_OK)
+ credential_approve(&cred);
+#if LIBCURL_VERSION_NUM >= 0x070d01
+ else if (res == CURLE_LOGIN_DENIED)
+#else
+ else
+#endif
+ credential_reject(&cred);
+ }
+
+ credential_clear(&cred);
+
+ return res != CURLE_OK;
}
#endif
xsnprintf(path, len, ".merge_file_XXXXXX");
fd = xmkstemp(path);
- if (write_in_full(fd, src->ptr, src->size) != src->size)
+ if (write_in_full(fd, src->ptr, src->size) < 0)
die_errno("unable to write temp-file");
close(fd);
}
while ((c = *in++) != 0) {
if (c == '=') {
- int d = *in++;
+ int ch, d = *in;
if (d == '\n' || !d)
break; /* drop trailing newline */
- strbuf_addch(out, (hexval(d) << 4) | hexval(*in++));
- continue;
+ ch = hex2chr(in);
+ if (ch >= 0) {
+ strbuf_addch(out, ch);
+ in += 2;
+ continue;
+ }
+ /* garbage -- fall through */
}
if (rfc2047 && c == '_') /* rfc2047 4.2 (2) */
c = 0x20;
if (!handle_commit_msg(mi, line))
break;
mi->filter_stage++;
+ /* fallthrough */
case 1:
handle_patch(mi, line);
break;
fd = xopen(path, O_WRONLY | O_EXCL | O_CREAT, 0666);
while (size > 0) {
- long ret = write_in_full(fd, buf, size);
+ ssize_t ret = write_in_full(fd, buf, size);
if (ret < 0) {
/* Ignore epipe */
if (errno == EPIPE)
static size_t peak_pack_mapped;
static size_t pack_mapped;
struct packed_git *packed_git;
-
-static struct mru packed_git_mru_storage;
-struct mru *packed_git_mru = &packed_git_mru_storage;
+struct mru packed_git_mru;
#define SZ_FMT PRIuMAX
static inline uintmax_t sz_fmt(size_t s) { return s; }
{
struct packed_git *p;
- mru_clear(packed_git_mru);
+ mru_clear(&packed_git_mru);
for (p = packed_git; p; p = p->next)
- mru_append(packed_git_mru, p);
+ mru_append(&packed_git_mru, p);
}
static int prepare_packed_git_run_once = 0;
if (!packed_git)
return 0;
- for (p = packed_git_mru->head; p; p = p->next) {
+ for (p = packed_git_mru.head; p; p = p->next) {
if (fill_pack_entry(sha1, e, p->item)) {
- mru_mark(packed_git_mru, p);
+ mru_mark(&packed_git_mru, p);
return 1;
}
}
pattern, sb.buf);
}
-/*
- * Given command line arguments and a prefix, convert the input to
- * pathspec. die() if any magic in magic_mask is used.
- */
void parse_pathspec(struct pathspec *pathspec,
unsigned magic_mask, unsigned flags,
const char *prefix, const char **argv)
*/
#define PATHSPEC_LITERAL_PATH (1<<6)
+/*
+ * Given command line arguments and a prefix, convert the input to
+ * pathspec. die() if any magic in magic_mask is used.
+ *
+ * Any arguments used are copied. It is safe for the caller to modify
+ * or free 'prefix' and 'args' after calling this function.
+ */
extern void parse_pathspec(struct pathspec *pathspec,
unsigned magic_mask,
unsigned flags,
int packet_flush_gently(int fd)
{
packet_trace("0000", 4, 1);
- if (write_in_full(fd, "0000", 4) == 4)
- return 0;
- return error("flush packet write failed");
+ if (write_in_full(fd, "0000", 4) < 0)
+ return error("flush packet write failed");
+ return 0;
}
void packet_buf_flush(struct strbuf *buf)
const char *fmt, va_list args)
{
static struct strbuf buf = STRBUF_INIT;
- ssize_t count;
strbuf_reset(&buf);
format_packet(&buf, fmt, args);
- count = write_in_full(fd, buf.buf, buf.len);
- if (count == buf.len)
- return 0;
-
- if (!gently) {
- check_pipe(errno);
- die_errno("packet write with format failed");
+ if (write_in_full(fd, buf.buf, buf.len) < 0) {
+ if (!gently) {
+ check_pipe(errno);
+ die_errno("packet write with format failed");
+ }
+ return error("packet write with format failed");
}
- return error("packet write with format failed");
+
+ return 0;
}
void packet_write_fmt(int fd, const char *fmt, ...)
packet_size = size + 4;
set_packet_header(packet_write_buffer, packet_size);
memcpy(packet_write_buffer + 4, buf, size);
- if (write_in_full(fd_out, packet_write_buffer, packet_size) == packet_size)
- return 0;
- return error("packet write failed");
+ if (write_in_full(fd_out, packet_write_buffer, packet_size) < 0)
+ return error("packet write failed");
+ return 0;
}
void packet_buf_write(struct strbuf *buf, const char *fmt, ...)
case '{': case '}':
case '$': case '\\': case '"':
strbuf_addch(sb, '\\');
+ /* fallthrough */
default:
strbuf_addch(sb, c);
break;
case S_IFDIR:
if (S_ISGITLINK(ce->ce_mode))
return ce_compare_gitlink(ce) ? DATA_CHANGED : 0;
+ /* else fallthrough */
default:
return TYPE_CHANGED;
}
unsigned int buffered = write_buffer_len;
if (buffered) {
git_SHA1_Update(context, write_buffer, buffered);
- if (write_in_full(fd, write_buffer, buffered) != buffered)
+ if (write_in_full(fd, write_buffer, buffered) < 0)
return -1;
write_buffer_len = 0;
}
/* Flush first if not enough space for SHA1 signature */
if (left + 20 > WRITE_BUFFER_SIZE) {
- if (write_in_full(fd, write_buffer, left) != left)
+ if (write_in_full(fd, write_buffer, left) < 0)
return -1;
left = 0;
}
git_SHA1_Final(write_buffer + left, context);
hashcpy(sha1, write_buffer + left);
left += 20;
- return (write_in_full(fd, write_buffer, left) != left) ? -1 : 0;
+ return (write_in_full(fd, write_buffer, left) < 0) ? -1 : 0;
}
static void ce_smudge_racily_clean_entry(struct cache_entry *ce)
if (!result)
result = ce_write(c, fd, to_remove_vi, prefix_size);
if (!result)
- result = ce_write(c, fd, ce->name + common, ce_namelen(ce) - common + 1);
+ result = ce_write(c, fd, ce->name + common, ce_namelen(ce) - common);
+ if (!result)
+ result = ce_write(c, fd, padding, 1);
strbuf_splice(previous_name, common, to_remove,
ce->name + common, ce_namelen(ce) - common);
}
}
- if (write_in_full(fd, buf.buf, buf.len) != buf.len) {
+ if (write_in_full(fd, buf.buf, buf.len) < 0) {
strbuf_addf(err, "could not write to '%s'", filename);
rollback_lock_file(&lock);
goto done;
return -1;
}
+ flags &= REF_TRANSACTION_UPDATE_ALLOWED_FLAGS;
+
flags |= (new_sha1 ? REF_HAVE_NEW : 0) | (old_sha1 ? REF_HAVE_OLD : 0);
ref_transaction_add_update(transaction, refname, flags,
return do_for_each_ref(get_main_ref_store(),
git_replace_ref_base, fn,
strlen(git_replace_ref_base),
- 0, cb_data);
+ DO_FOR_EACH_INCLUDE_BROKEN, cb_data);
}
int for_each_namespaced_ref(each_ref_fn fn, void *cb_data)
#define REF_NODEREF 0x01
#define REF_FORCE_CREATE_REFLOG 0x40
+/*
+ * Flags that can be passed in to ref_transaction_update
+ */
+#define REF_TRANSACTION_UPDATE_ALLOWED_FLAGS \
+ REF_ISPRUNING | \
+ REF_FORCE_CREATE_REFLOG | \
+ REF_NODEREF
+
/*
* Setup reflog before using. Fill in err and return -1 on failure.
*/
written = len <= maxlen ? write_in_full(fd, logrec, len) : -1;
free(logrec);
- if (written != len)
+ if (written < 0)
return -1;
return 0;
return -1;
}
fd = get_lock_file_fd(&lock->lk);
- if (write_in_full(fd, oid_to_hex(oid), GIT_SHA1_HEXSZ) != GIT_SHA1_HEXSZ ||
- write_in_full(fd, &term, 1) != 1 ||
+ if (write_in_full(fd, oid_to_hex(oid), GIT_SHA1_HEXSZ) < 0 ||
+ write_in_full(fd, &term, 1) < 0 ||
close_ref_gently(lock) < 0) {
strbuf_addf(err,
"couldn't write '%s'", get_lock_file_path(&lock->lk));
rollback_lock_file(&reflog_lock);
} else if (update &&
(write_in_full(get_lock_file_fd(&lock->lk),
- oid_to_hex(&cb.last_kept_oid), GIT_SHA1_HEXSZ) != GIT_SHA1_HEXSZ ||
- write_str_in_full(get_lock_file_fd(&lock->lk), "\n") != 1 ||
+ oid_to_hex(&cb.last_kept_oid), GIT_SHA1_HEXSZ) < 0 ||
+ write_str_in_full(get_lock_file_fd(&lock->lk), "\n") < 1 ||
close_ref_gently(lock) < 0)) {
status |= error("couldn't write %s",
get_lock_file_path(&lock->lk));
rerere_id_hex(id),
rr->items[i].string, 0);
- if (write_in_full(out_fd, buf.buf, buf.len) != buf.len)
+ if (write_in_full(out_fd, buf.buf, buf.len) < 0)
die("unable to write rerere record");
strbuf_release(&buf);
#include "bisect.h"
#include "packfile.h"
#include "worktree.h"
+#include "argv-array.h"
volatile show_early_output_fn_t show_early_output;
return 0;
}
-struct cmdline_pathspec {
- int alloc;
- int nr;
- const char **path;
-};
-
-static void append_prune_data(struct cmdline_pathspec *prune, const char **av)
-{
- while (*av) {
- ALLOC_GROW(prune->path, prune->nr + 1, prune->alloc);
- prune->path[prune->nr++] = *(av++);
- }
-}
-
static void read_pathspec_from_stdin(struct rev_info *revs, struct strbuf *sb,
- struct cmdline_pathspec *prune)
+ struct argv_array *prune)
{
- while (strbuf_getline(sb, stdin) != EOF) {
- ALLOC_GROW(prune->path, prune->nr + 1, prune->alloc);
- prune->path[prune->nr++] = xstrdup(sb->buf);
- }
+ while (strbuf_getline(sb, stdin) != EOF)
+ argv_array_push(prune, sb->buf);
}
static void read_revisions_from_stdin(struct rev_info *revs,
- struct cmdline_pathspec *prune)
+ struct argv_array *prune)
{
struct strbuf sb;
int seen_dashdash = 0;
int setup_revisions(int argc, const char **argv, struct rev_info *revs, struct setup_revision_opt *opt)
{
int i, flags, left, seen_dashdash, read_from_stdin, got_rev_arg = 0, revarg_opt;
- struct cmdline_pathspec prune_data;
+ struct argv_array prune_data = ARGV_ARRAY_INIT;
const char *submodule = NULL;
- memset(&prune_data, 0, sizeof(prune_data));
if (opt)
submodule = opt->submodule;
argv[i] = NULL;
argc = i;
if (argv[i + 1])
- append_prune_data(&prune_data, argv + i + 1);
+ argv_array_pushv(&prune_data, argv + i + 1);
seen_dashdash = 1;
break;
}
for (j = i; j < argc; j++)
verify_filename(revs->prefix, argv[j], j == i);
- append_prune_data(&prune_data, argv + i);
+ argv_array_pushv(&prune_data, argv + i);
break;
}
else
got_rev_arg = 1;
}
- if (prune_data.nr) {
+ if (prune_data.argc) {
/*
* If we need to introduce the magic "a lone ':' means no
* pathspec whatsoever", here is the place to do so.
* call init_pathspec() to set revs->prune_data here.
* }
*/
- ALLOC_GROW(prune_data.path, prune_data.nr + 1, prune_data.alloc);
- prune_data.path[prune_data.nr++] = NULL;
parse_pathspec(&revs->prune_data, 0, 0,
- revs->prefix, prune_data.path);
+ revs->prefix, prune_data.argv);
}
+ argv_array_clear(&prune_data);
if (revs->def == NULL)
revs->def = opt ? opt->def : NULL;
strbuf_release(&cap_buf);
return atomic_push_failure(args, remote_refs, ref);
}
- /* Fallthrough for non atomic case. */
+ /* else fallthrough */
default:
continue;
}
return end;
}
-static void link_alt_odb_entries(const char *alt, int len, int sep,
+static void link_alt_odb_entries(const char *alt, int sep,
const char *relative_base, int depth)
{
struct strbuf objdirbuf = STRBUF_INIT;
static void read_info_alternates(const char * relative_base, int depth)
{
- char *map;
- size_t mapsz;
- struct stat st;
char *path;
- int fd;
+ struct strbuf buf = STRBUF_INIT;
path = xstrfmt("%s/info/alternates", relative_base);
- fd = git_open(path);
- free(path);
- if (fd < 0)
- return;
- if (fstat(fd, &st) || (st.st_size == 0)) {
- close(fd);
+ if (strbuf_read_file(&buf, path, 1024) < 0) {
+ warn_on_fopen_errors(path);
+ free(path);
return;
}
- mapsz = xsize_t(st.st_size);
- map = xmmap(NULL, mapsz, PROT_READ, MAP_PRIVATE, fd, 0);
- close(fd);
-
- link_alt_odb_entries(map, mapsz, '\n', relative_base, depth);
- munmap(map, mapsz);
+ link_alt_odb_entries(buf.buf, '\n', relative_base, depth);
+ strbuf_release(&buf);
+ free(path);
}
struct alternate_object_database *alloc_alt_odb(const char *dir)
if (commit_lock_file(lock))
die_errno("unable to move new alternates file into place");
if (alt_odb_tail)
- link_alt_odb_entries(reference, strlen(reference), '\n', NULL, 0);
+ link_alt_odb_entries(reference, '\n', NULL, 0);
}
free(alts);
}
*/
prepare_alt_odb();
- link_alt_odb_entries(reference, strlen(reference), '\n', NULL, 0);
+ link_alt_odb_entries(reference, '\n', NULL, 0);
}
/*
if (!alt) alt = "";
alt_odb_tail = &alt_odb_list;
- link_alt_odb_entries(alt, strlen(alt), PATH_SEP, NULL, 0);
+ link_alt_odb_entries(alt, PATH_SEP, NULL, 0);
read_info_alternates(get_object_directory(), 0);
}
int read_pack_header(int fd, struct pack_header *header)
{
- if (read_in_full(fd, header, sizeof(*header)) < sizeof(*header))
+ if (read_in_full(fd, header, sizeof(*header)) != sizeof(*header))
/* "eof before pack header was fully read" */
return PH_ERROR_EOF;
if (write_shallow_commits(&sb, 0, extra)) {
temp = xmks_tempfile(git_path("shallow_XXXXXX"));
- if (write_in_full(temp->fd, sb.buf, sb.len) != sb.len ||
+ if (write_in_full(temp->fd, sb.buf, sb.len) < 0 ||
close_tempfile_gently(temp) < 0)
die_errno("failed to write to %s",
get_tempfile_path(temp));
LOCK_DIE_ON_ERROR);
check_shallow_file_for_update();
if (write_shallow_commits(&sb, 0, extra)) {
- if (write_in_full(fd, sb.buf, sb.len) != sb.len)
+ if (write_in_full(fd, sb.buf, sb.len) < 0)
die_errno("failed to write to %s",
get_lock_file_path(shallow_lock));
*alternate_shallow_file = get_lock_file_path(shallow_lock);
LOCK_DIE_ON_ERROR);
check_shallow_file_for_update();
if (write_shallow_commits_1(&sb, 0, NULL, SEEN_ONLY)) {
- if (write_in_full(fd, sb.buf, sb.len) != sb.len)
+ if (write_in_full(fd, sb.buf, sb.len) < 0)
die_errno("failed to write to %s",
get_lock_file_path(&shallow_lock));
commit_lock_file(&shallow_lock);
return execv_git_cmd(my_argv);
}
-static int do_cvs_cmd(const char *me, char *arg)
-{
- const char *cvsserver_argv[3] = {
- "cvsserver", "server", NULL
- };
-
- if (!arg || strcmp(arg, "server"))
- die("git-cvsserver only handles server: %s", arg);
-
- setup_path();
- return execv_git_cmd(cvsserver_argv);
-}
-
static int is_valid_cmd_name(const char *cmd)
{
/* Test command contains no . or / characters */
{ "git-receive-pack", do_generic_cmd },
{ "git-upload-pack", do_generic_cmd },
{ "git-upload-archive", do_generic_cmd },
- { "cvs", do_cvs_cmd },
{ NULL },
};
kept = 0;
wrote = write_in_full(fd, buf, readlen);
- if (wrote != readlen)
+ if (wrote < 0)
goto close_and_exit;
}
if (kept && (lseek(fd, kept - 1, SEEK_CUR) == (off_t) -1 ||
typedef int (*string_list_each_func_t)(struct string_list_item *, void *);
int for_each_string_list(struct string_list *list,
string_list_each_func_t, void *cb_data);
-#define for_each_string_list_item(item,list) \
- for (item = (list)->items; item < (list)->items + (list)->nr; ++item)
+#define for_each_string_list_item(item,list) \
+ for (item = (list)->items; \
+ item && item < (list)->items + (list)->nr; \
+ ++item)
/*
* Apply want to each item in list, retaining only the ones for which
if (supported_capabilities)
*supported_capabilities |= capabilities[i].flag;
} else {
- warning("subprocess '%s' requested unsupported capability '%s'",
- process->argv[0], p);
+ die("subprocess '%s' requested unsupported capability '%s'",
+ process->argv[0], p);
}
}
return 0;
}
+struct has_commit_data {
+ int result;
+ const char *path;
+};
+
static int check_has_commit(const struct object_id *oid, void *data)
{
- int *has_commit = data;
+ struct has_commit_data *cb = data;
- if (!lookup_commit_reference(oid))
- *has_commit = 0;
+ enum object_type type = sha1_object_info(oid->hash, NULL);
- return 0;
+ switch (type) {
+ case OBJ_COMMIT:
+ return 0;
+ case OBJ_BAD:
+ /*
+ * Object is missing or invalid. If invalid, an error message
+ * has already been printed.
+ */
+ cb->result = 0;
+ return 0;
+ default:
+ die(_("submodule entry '%s' (%s) is a %s, not a commit"),
+ cb->path, oid_to_hex(oid), typename(type));
+ }
}
static int submodule_has_commits(const char *path, struct oid_array *commits)
{
- int has_commit = 1;
+ struct has_commit_data has_commit = { 1, path };
/*
* Perform a cheap, but incorrect check for the existence of 'commits'.
oid_array_for_each_unique(commits, check_has_commit, &has_commit);
- if (has_commit) {
+ if (has_commit.result) {
/*
* Even if the submodule is checked out and the commit is
* present, make sure it exists in the submodule's object store
cp.dir = path;
if (capture_command(&cp, &out, GIT_MAX_HEXSZ + 1) || out.len)
- has_commit = 0;
+ has_commit.result = 0;
strbuf_release(&out);
}
- return has_commit;
+ return has_commit.result;
}
static int submodule_needs_pushing(const char *path, struct oid_array *commits)
/*
* Prepare the "env_array" parameter of a "struct child_process" for executing
- * a submodule by clearing any repo-specific envirionment variables, but
+ * a submodule by clearing any repo-specific environment variables, but
* retaining any config in the environment.
*/
extern void prepare_submodule_repo_env(struct argv_array *out);
$ sh ./t9200-git-cvsexport-commit.sh --run='-3 21'
-As noted above, the test set is built going though items left to
-right, so this:
+As noted above, the test set is built by going through the items
+from left to right, so this:
$ sh ./t9200-git-cvsexport-commit.sh --run='1-4 !3'
-will run tests 1, 2, and 4. Items that comes later have higher
+will run tests 1, 2, and 4. Items that come later have higher
precedence. It means that this:
$ sh ./t9200-git-cvsexport-commit.sh --run='!3 1-4'
while (<>) {
chomp;
/\bsed\s+-i/ and err 'sed -i is not portable';
- /\becho\s+-n/ and err 'echo -n is not portable (please use printf)';
+ /\becho\s+-[neE]/ and err 'echo with option is not portable (please use printf)';
/^\s*declare\s+/ and err 'arrays/declare not portable';
/^\s*[^#]\s*which\s/ and err 'which is not portable (please use type)';
/\btest\s+[^=]*==/ and err '"test a == b" is not portable (please use =)';
/test-svn-fe
/test-urlmatch-normalization
/test-wildmatch
+/test-write-cache
}
fd = open (argv[4], O_WRONLY|O_CREAT|O_TRUNC, 0666);
- if (fd < 0 || write_in_full(fd, out_buf, out_size) != out_size) {
+ if (fd < 0 || write_in_full(fd, out_buf, out_size) < 0) {
perror(argv[4]);
return 1;
}
static void handle_command(const char *command, const char *arg, struct line_buffer *buf)
{
- switch (*command) {
- case 'b':
- if (starts_with(command, "binary ")) {
- struct strbuf sb = STRBUF_INIT;
- strbuf_addch(&sb, '>');
- buffer_read_binary(buf, &sb, strtouint32(arg));
- fwrite(sb.buf, 1, sb.len, stdout);
- strbuf_release(&sb);
- return;
- }
- case 'c':
- if (starts_with(command, "copy ")) {
- buffer_copy_bytes(buf, strtouint32(arg));
- return;
- }
- case 's':
- if (starts_with(command, "skip ")) {
- buffer_skip_bytes(buf, strtouint32(arg));
- return;
- }
- default:
+ if (starts_with(command, "binary ")) {
+ struct strbuf sb = STRBUF_INIT;
+ strbuf_addch(&sb, '>');
+ buffer_read_binary(buf, &sb, strtouint32(arg));
+ fwrite(sb.buf, 1, sb.len, stdout);
+ strbuf_release(&sb);
+ } else if (starts_with(command, "copy ")) {
+ buffer_copy_bytes(buf, strtouint32(arg));
+ } else if (starts_with(command, "skip ")) {
+ buffer_skip_bytes(buf, strtouint32(arg));
+ } else {
die("unrecognized command: %s", command);
}
}
(ulimit -n 32 && "$@")
}
-test_lazy_prereq ULIMIT_FILE_DESCRIPTORS 'run_with_limited_open_files true'
+test_lazy_prereq ULIMIT_FILE_DESCRIPTORS '
+ test_have_prereq !MINGW,!CYGWIN &&
+ run_with_limited_open_files true
+'
test_expect_success ULIMIT_FILE_DESCRIPTORS 'large transaction creating branches does not burst open file limit' '
(
test_cmp expect actual
'
+test_expect_success 'rev-parse --is-shallow-repository in shallow repo' '
+ test_commit test_commit &&
+ echo true >expect &&
+ git clone --depth 1 --no-local . shallow &&
+ test_when_finished "rm -rf shallow" &&
+ git -C shallow rev-parse --is-shallow-repository >actual &&
+ test_cmp expect actual
+'
+
+test_expect_success 'rev-parse --is-shallow-repository in non-shallow repo' '
+ echo false >expect &&
+ git rev-parse --is-shallow-repository >actual &&
+ test_cmp expect actual
+'
+
test_expect_success 'showing the superproject correctly' '
git rev-parse --show-superproject-working-tree >out &&
test_must_be_empty out &&
test_expect_missing archive-pathspec/ignored-by-tree.d
test_expect_missing archive-pathspec/ignored-by-tree.d/file
test_expect_exists archive-pathspec/ignored-by-worktree
-test_expect_missing archive-pathspec/excluded-by-pathspec.d failure
+test_expect_missing archive-pathspec/excluded-by-pathspec.d
test_expect_missing archive-pathspec/excluded-by-pathspec.d/file
test_expect_success 'git archive with wildcard pathspec' '
test_expect_missing archive/deep/and/slashless/foo &&
test_expect_missing archive/deep/with/wildcard/ &&
test_expect_missing archive/deep/with/wildcard/foo &&
-test_expect_exists archive/one-level-lower/
+test_expect_missing archive/one-level-lower/
test_expect_missing archive/one-level-lower/two-levels-lower/ignored-only-if-dir/
test_expect_missing archive/one-level-lower/two-levels-lower/ignored-ony-if-dir/ignored-by-ignored-dir
git archive --format=tar $root_tree >subtree-all.tar &&
make_dir extract &&
"$TAR" xf subtree-all.tar -C extract &&
- check_dir extract sub
+ check_dir extract
'
test_expect_success 'archive empty subtree by direct pathspec' '
git archive --format=tar $root_tree -- sub >subtree-path.tar &&
make_dir extract &&
"$TAR" xf subtree-path.tar -C extract &&
- check_dir extract sub
+ check_dir extract
'
ZIPINFO=zipinfo
)
'
+test_expect_success 'submodule entry pointing at a tag is error' '
+ git -C work/gar/bage tag -a test1 -m "tag" &&
+ tag=$(git -C work/gar/bage rev-parse test1^{tag}) &&
+ git -C work update-index --cacheinfo 160000 "$tag" gar/bage &&
+ git -C work commit -m "bad commit" &&
+ test_when_finished "git -C work reset --hard HEAD^" &&
+ test_must_fail git -C work push --recurse-submodules=on-demand ../pub.git master 2>err &&
+ test_i18ngrep "is a tag, not a commit" err
+'
+
test_expect_success 'push fails if recurse submodules option passed as yes' '
(
cd work/gar/bage &&
check_describe "test2-lightweight-*" --long --tags --match="test2-*" HEAD^
-check_describe "test1-lightweight-*" --long --tags --match="test1-*" --match="test2-*" HEAD^
+check_describe "test2-lightweight-*" --long --tags --match="test1-*" --match="test2-*" HEAD^
check_describe "test2-lightweight-*" --long --tags --match="test1-*" --no-match --match="test2-*" HEAD^
+check_describe "test1-lightweight-*" --long --tags --match="test1-*" --match="test3-*" HEAD
+
+check_describe "test1-lightweight-*" --long --tags --match="test3-*" --match="test1-*" HEAD
+
test_expect_success 'name-rev with exact tags' '
echo A >expect &&
tag_object=$(git rev-parse refs/tags/A) &&
test_cmp expect actual
'
+test_expect_success 'name-rev --all' '
+ >expect.unsorted &&
+ for rev in $(git rev-list --all)
+ do
+ git name-rev $rev >>expect.unsorted
+ done &&
+ sort <expect.unsorted >expect &&
+ git name-rev --all >actual.unsorted &&
+ sort <actual.unsorted >actual &&
+ test_cmp expect actual
+'
+
+test_expect_success 'name-rev --stdin' '
+ >expect.unsorted &&
+ for rev in $(git rev-list --all)
+ do
+ name=$(git name-rev --name-only $rev) &&
+ echo "$rev ($name)" >>expect.unsorted
+ done &&
+ sort <expect.unsorted >expect &&
+ git rev-list --all | git name-rev --stdin >actual.unsorted &&
+ sort <actual.unsorted >actual &&
+ test_cmp expect actual
+'
+
test_expect_success 'describe --contains with the exact tags' '
echo "A^0" >expect &&
tag_object=$(git rev-parse refs/tags/A) &&
'
test_expect_success 'describe ignoring a borken submodule' '
git describe --broken >out &&
+ test_when_finished "mv .git/modules/sub_moved .git/modules/sub1" &&
grep broken out
'
+test_expect_failure ULIMIT_STACK_SIZE 'name-rev works in a deep repo' '
+ i=1 &&
+ while test $i -lt 8000
+ do
+ echo "commit refs/heads/master
+committer A U Thor <author@example.com> $((1000000000 + $i * 100)) +0200
+data <<EOF
+commit #$i
+EOF"
+ test $i = 1 && echo "from refs/heads/master^0"
+ i=$(($i + 1))
+ done | git fast-import &&
+ git checkout master &&
+ git tag far-far-away HEAD^ &&
+ echo "HEAD~4000 tags/far-far-away~3999" >expect &&
+ git name-rev HEAD~4000 >actual &&
+ test_cmp expect actual &&
+ run_with_limited_stack git name-rev HEAD~4000 >actual &&
+ test_cmp expect actual
+'
+
+test_expect_success ULIMIT_STACK_SIZE 'describe works in a deep repo' '
+ git tag -f far-far-away HEAD~7999 &&
+ echo "far-far-away" >expect &&
+ git describe --tags --abbrev=0 HEAD~4000 >actual &&
+ test_cmp expect actual &&
+ run_with_limited_stack git describe --tags --abbrev=0 HEAD~4000 >actual &&
+ test_cmp expect actual
+'
+
test_done
test_cmp actual expect
'
+test_expect_failure 'moving nested submodules' '
+ git commit -am "cleanup commit" &&
+ mkdir sub_nested_nested &&
+ (cd sub_nested_nested &&
+ touch nested_level2 &&
+ git init &&
+ git add . &&
+ git commit -m "nested level 2"
+ ) &&
+ mkdir sub_nested &&
+ (cd sub_nested &&
+ touch nested_level1 &&
+ git init &&
+ git add . &&
+ git commit -m "nested level 1"
+ git submodule add ../sub_nested_nested &&
+ git commit -m "add nested level 2"
+ ) &&
+ git submodule add ./sub_nested nested_move &&
+ git commit -m "add nested_move" &&
+ git submodule update --init --recursive &&
+ git mv nested_move sub_nested_moved &&
+ git status
+'
+
test_done
git tag -l --sort=version:refname
'
-run_with_limited_stack () {
- (ulimit -s 128 && "$@")
-}
-
-test_lazy_prereq ULIMIT_STACK_SIZE 'run_with_limited_stack true'
-
-# we require ulimit, this excludes Windows
test_expect_success ULIMIT_STACK_SIZE '--contains and --no-contains work in a deep repo' '
>expect &&
i=1 &&
grep "path.*needs.*filters" err
'
+test_expect_success '--textconv/--filters complain without path' '
+ test_must_fail git cat-file --textconv HEAD &&
+ test_must_fail git cat-file --filters HEAD
+'
+
test_expect_success 'cat-file --textconv --batch works' '
sha1=$(git rev-parse -q --verify HEAD:world.txt) &&
test_config diff.txt.textconv "tr A-Za-z N-ZA-Mn-za-m <" &&
. ./test-lib.sh
+if test_have_prereq !PIPE
+then
+ skip_all="svn dumpfile importer testing requires the PIPE prerequisite"
+ test_done
+fi
+
reinit_git () {
- if ! test_declared_prereq PIPE
- then
- echo >&4 "reinit_git: need to declare PIPE prerequisite"
- return 127
- fi
rm -fr .git &&
rm -f stream backflow &&
git init &&
>empty
-test_expect_success PIPE 'empty dump' '
+test_expect_success 'empty dump' '
reinit_git &&
echo "SVN-fs-dump-format-version: 2" >input &&
try_dump input
'
-test_expect_success PIPE 'v4 dumps not supported' '
+test_expect_success 'v4 dumps not supported' '
reinit_git &&
echo "SVN-fs-dump-format-version: 4" >v4.dump &&
try_dump v4.dump must_fail
'
-test_expect_failure PIPE 'empty revision' '
+test_expect_failure 'empty revision' '
reinit_git &&
printf "rev <nobody, nobody@local>: %s\n" "" "" >expect &&
cat >emptyrev.dump <<-\EOF &&
test_cmp expect actual
'
-test_expect_success PIPE 'empty properties' '
+test_expect_success 'empty properties' '
reinit_git &&
printf "rev <nobody, nobody@local>: %s\n" "" "" >expect &&
cat >emptyprop.dump <<-\EOF &&
test_cmp expect actual
'
-test_expect_success PIPE 'author name and commit message' '
+test_expect_success 'author name and commit message' '
reinit_git &&
echo "<author@example.com, author@example.com@local>" >expect.author &&
cat >message <<-\EOF &&
test_cmp expect.author actual.author
'
-test_expect_success PIPE 'unsupported properties are ignored' '
+test_expect_success 'unsupported properties are ignored' '
reinit_git &&
echo author >expect &&
cat >extraprop.dump <<-\EOF &&
test_cmp expect actual
'
-test_expect_failure PIPE 'timestamp and empty file' '
+test_expect_failure 'timestamp and empty file' '
echo author@example.com >expect.author &&
echo 1999-01-01 >expect.date &&
echo file >expect.files &&
test_cmp empty file
'
-test_expect_success PIPE 'directory with files' '
+test_expect_success 'directory with files' '
reinit_git &&
printf "%s\n" directory/file1 directory/file2 >expect.files &&
echo hi >hi &&
test_cmp hi directory/file2
'
-test_expect_success PIPE 'branch name with backslash' '
+test_expect_success 'branch name with backslash' '
reinit_git &&
sort <<-\EOF >expect.branch-files &&
trunk/file1
test_cmp expect.branch-files actual.branch-files
'
-test_expect_success PIPE 'node without action' '
+test_expect_success 'node without action' '
reinit_git &&
cat >inaction.dump <<-\EOF &&
SVN-fs-dump-format-version: 3
try_dump inaction.dump must_fail
'
-test_expect_success PIPE 'action: add node without text' '
+test_expect_success 'action: add node without text' '
reinit_git &&
cat >textless.dump <<-\EOF &&
SVN-fs-dump-format-version: 3
try_dump textless.dump must_fail
'
-test_expect_failure PIPE 'change file mode but keep old content' '
+test_expect_failure 'change file mode but keep old content' '
reinit_git &&
cat >expect <<-\EOF &&
OBJID
test_cmp hello actual.target
'
-test_expect_success PIPE 'NUL in property value' '
+test_expect_success 'NUL in property value' '
reinit_git &&
echo "commit message" >expect.message &&
{
test_cmp expect.message actual.message
'
-test_expect_success PIPE 'NUL in log message, file content, and property name' '
+test_expect_success 'NUL in log message, file content, and property name' '
# Caveat: svnadmin 1.6.16 (r1073529) truncates at \0 in the
# svn:specialQnotreally example.
reinit_git &&
test_cmp expect.hello2 actual.hello2
'
-test_expect_success PIPE 'change file mode and reiterate content' '
+test_expect_success 'change file mode and reiterate content' '
reinit_git &&
cat >expect <<-\EOF &&
OBJID
test_cmp hello actual.target
'
-test_expect_success PIPE 'deltas supported' '
+test_expect_success 'deltas supported' '
reinit_git &&
{
# (old) h + (inline) ello + (old) \n
try_dump delta.dump
'
-test_expect_success PIPE 'property deltas supported' '
+test_expect_success 'property deltas supported' '
reinit_git &&
cat >expect <<-\EOF &&
OBJID
test_cmp expect actual
'
-test_expect_success PIPE 'properties on /' '
+test_expect_success 'properties on /' '
reinit_git &&
cat <<-\EOF >expect &&
OBJID
test_cmp expect actual
'
-test_expect_success PIPE 'deltas for typechange' '
+test_expect_success 'deltas for typechange' '
reinit_git &&
cat >expect <<-\EOF &&
OBJID
test_cmp expect actual
'
-test_expect_success PIPE 'deltas need not consume the whole preimage' '
+test_expect_success 'deltas need not consume the whole preimage' '
reinit_git &&
cat >expect <<-\EOF &&
OBJID
test_cmp expect.3 actual.3
'
-test_expect_success PIPE 'no hang for delta trying to read past end of preimage' '
+test_expect_success 'no hang for delta trying to read past end of preimage' '
reinit_git &&
{
# COPY 1
fi
'
-test_expect_success SVNREPO,PIPE 't9135/svn.dump' '
+test_expect_success SVNREPO 't9135/svn.dump' '
mkdir -p simple-git &&
(
cd simple-git &&
test_cmp ../expect ../actual
'
+#------------
+# running via git-shell
+#------------
+
+cd "$WORKDIR"
+
+test_expect_success 'create remote-cvs helper' '
+ write_script remote-cvs <<-\EOF
+ exec git shell -c "cvs server"
+ EOF
+'
+
+test_expect_success 'cvs server does not run with vanilla git-shell' '
+ (
+ cd cvswork &&
+ CVS_SERVER=$WORKDIR/remote-cvs &&
+ export CVS_SERVER &&
+ test_must_fail cvs log merge
+ )
+'
+
+test_expect_success 'configure git shell to run cvs server' '
+ mkdir "$HOME"/git-shell-commands &&
+
+ write_script "$HOME"/git-shell-commands/cvs <<-\EOF &&
+ if ! test $# = 1 && test "$1" = "server"
+ then
+ echo >&2 "git-cvsserver only handles \"server\""
+ exit 1
+ fi
+ exec git cvsserver server
+ EOF
+
+ # Should not be used, but part of the recommended setup
+ write_script "$HOME"/git-shell-commands/no-interactive-login <<-\EOF
+ echo Interactive login forbidden
+ EOF
+'
+
+test_expect_success 'cvs server can run with recommended config' '
+ (
+ cd cvswork &&
+ CVS_SERVER=$WORKDIR/remote-cvs &&
+ export CVS_SERVER &&
+ cvs log merge
+ )
+'
+
test_done
test_lazy_prereq PIPE '
# test whether the filesystem supports FIFOs
- case $(uname -s) in
- CYGWIN*|MINGW*)
- false
- ;;
- *)
- rm -f testfifo && mkfifo testfifo
- ;;
- esac
+ test_have_prereq !MINGW,!CYGWIN &&
+ rm -f testfifo && mkfifo testfifo
'
test_lazy_prereq SYMLINKS '
(ulimit -s 128 && "$@")
}
-test_lazy_prereq CMDLINE_LIMIT 'run_with_limited_cmdline true'
+test_lazy_prereq CMDLINE_LIMIT '
+ test_have_prereq !MINGW,!CYGWIN &&
+ run_with_limited_cmdline true
+'
+
+run_with_limited_stack () {
+ (ulimit -s 128 && "$@")
+}
+
+test_lazy_prereq ULIMIT_STACK_SIZE '
+ test_have_prereq !MINGW,!CYGWIN &&
+ run_with_limited_stack true
+'
build_option () {
git version --build-options |
{
if (debug)
fprintf(stderr, "Debug: Remote helper: -> %s", buffer->buf);
- if (write_in_full(helper->helper->in, buffer->buf, buffer->len)
- != buffer->len)
+ if (write_in_full(helper->helper->in, buffer->buf, buffer->len) < 0)
die_errno("Full write to remote helper failed");
}
{
if (debug)
fprintf(stderr, "Debug: Remote helper: -> %s", str);
- if (write_in_full(fd, str, strlen(str)) != strlen(str))
+ if (write_in_full(fd, str, strlen(str)) < 0)
die_errno("Full write to remote helper failed");
}
"|//|\\*\\*|::|[/<>=]="),
IPATTERN("fountain", "^((\\.[^.]|(int|ext|est|int\\.?/ext|i/e)[. ]).*)$",
"[^ \t-]+"),
-PATTERNS("html", "^[ \t]*(<[Hh][1-6][ \t].*>.*)$",
+PATTERNS("html", "^[ \t]*(<[Hh][1-6]([ \t].*)?>.*)$",
"[^<>= \t]+"),
PATTERNS("java",
"!^[ \t]*(catch|do|for|if|instanceof|new|return|switch|throw|while)\n"
void write_file_buf(const char *path, const char *buf, size_t len)
{
int fd = xopen(path, O_WRONLY | O_CREAT | O_TRUNC, 0666);
- if (write_in_full(fd, buf, len) != len)
+ if (write_in_full(fd, buf, len) < 0)
die_errno(_("could not write to %s"), path);
if (close(fd))
die_errno(_("could not close %s"), path);
void wt_status_add_cut_line(FILE *fp)
{
- const char *explanation = _("Do not touch the line above.\nEverything below will be removed.");
+ const char *explanation = _("Do not modify or remove the line above.\nEverything below it will be ignored.");
struct strbuf buf = STRBUF_INIT;
fprintf(fp, "%c %s", comment_line_char, cut_line);