FORMULA=$1
SHA=$(brew fetch --force $FORMULA 2>&1 | grep ^SHA256: | cut -d ' ' -f 2)
sed -E -i.bak "s/sha256 \"[0-9a-f]{64}\"/sha256 \"$SHA\"/g" \
- /usr/local/Library/Taps/homebrew/homebrew-binary/$FORMULA.rb
+ "$(brew --repository homebrew/homebrew-binary)/$FORMULA.rb"
}
brew update --quiet
brew tap homebrew/binary --quiet
to allow "--submodule=diff" to show the patch between the submodule
commits bound to the superproject.
+ * Even though "git hash-objects", which is a tool to take an
+ on-filesystem data stream and put it into the Git object store,
+ allowed to perform the "outside-world-to-Git" conversions (e.g.
+ end-of-line conversions and application of the clean-filter), and
+ it had the feature on by default from very early days, its reverse
+ operation "git cat-file", which takes an object from the Git object
+ store and externalize for the consumption by the outside world,
+ lacked an equivalent mechanism to run the "Git-to-outside-world"
+ conversion. The command learned the "--filters" option to do so.
+
+ * Output from "git diff" can be made easier to read by selecting
+ which lines are common and which lines are added/deleted
+ intelligently when the lines before and after the changed section
+ are the same. A command line option is added to help with the
+ experiment to find a good heuristics.
+
+ * In some projects, it is common to use "[RFC PATCH]" as the subject
+ prefix for a patch meant for discussion rather than application. A
+ new option "--rfc" was a short-hand for "--subject-prefix=RFC PATCH"
+ to help the participants of such projects.
+
+ * "git add --chmod=+x <pathspec>" added recently only toggled the
+ executable bit for paths that are either new or modified. This has
+ been corrected to flip the executable bit for all paths that match
+ the given pathspec.
+
Performance, Internal Implementation, Development Support etc.
i.e. the object name recorded in the cache_entry, turns into an
object_id.
+ * JGit can show a fake ref "capabilities^{}" to "git fetch" when it
+ does not advertise any refs, but "git fetch" was not prepared to
+ see such an advertisement. When the other side disconnects without
+ giving any ref advertisement, we used to say "there may not be a
+ repository at that URL", but we may have seen other advertisement
+ like "shallow" and ".have" in which case we definitely know that a
+ repository is there. The code to detect this case has also been
+ updated.
+
+ * Some codepaths in "git pack-objects" were not ready to use an
+ existing pack bitmap; now they are and as the result they have
+ become faster.
+
Also contains various documentation updates and code clean-ups.
* Clarify various ways to specify the "revision ranges" in the
documentation.
- (merge a117be4 po/range-doc later to maint).
* "diff-highlight" script (in contrib/) learned to work better with
"git log -p --graph" output.
- (merge 3dbfe2b bh/diff-highlight-graph later to maint).
* The test framework left the number of tests and success/failure
count in the t/test-results directory, keyed by the name of the
test script plus the process ID. The latter however turned out not
to serve any useful purpose. The process ID part of the filename
has been removed.
- (merge 5c885c1 jk/test-lib-drop-pid-from-results later to maint).
* Having a submodule whose ".git" repository is somehow corrupt
caused a few commands that recurse into submodules loop forever.
- (merge 10f5c52 jc/submodule-anchor-git-dir later to maint).
* "git symbolic-ref -d HEAD" happily removes the symbolic ref, but
the resulting repository becomes an invalid one. Teach the command
to forbid removal of HEAD.
- (merge 12cfa79 jc/forbid-symbolic-ref-d-HEAD later to maint).
* A test spawned a short-lived background process, which sometimes
prevented the test directory from getting removed at the end of the
script on some platforms.
- (merge 5babb5b js/t6026-clean-up later to maint).
* Update a few tests that used to use GIT_CURL_VERBOSE to use the
newer GIT_TRACE_CURL.
- (merge 14e2411 ep/use-git-trace-curl-in-tests later to maint).
* "git pack-objects --include-tag" was taught that when we know that
we are sending an object C, we want a tag B that directly points at
this change.
(merge f14a310 js/git-gui-commit-gpgsign later to maint).
+ * "git add --chmod=+x" added recently lacked documentation, which has
+ been corrected.
+ (merge 7ef7903 et/add-chmod-x later to maint).
+
+ * "git log --cherry-pick" used to include merge commits as candidates
+ to be matched up with other commits, resulting a lot of wasted time.
+ The patch-id generation logic has been updated to ignore merges to
+ avoid the wastage.
+ (merge 7c81040 jk/patch-ids-no-merges later to maint).
+
+ * The http transport (with curl-multi option, which is the default
+ these days) failed to remove curl-easy handle from a curlm session,
+ which led to unnecessary API failures.
+ (merge 2abc848 ew/http-do-not-forget-to-call-curl-multi-remove-handle later to maint).
+
+ * There were numerous corner cases in which the configuration files
+ are read and used or not read at all depending on the directory a
+ Git command was run, leading to inconsistent behaviour. The code
+ to set-up repository access at the beginning of a Git process has
+ been updated to fix them.
+ (merge 4d0efa1 jk/setup-sequence-update later to maint).
+
+ * "git diff -W" output needs to extend the context backward to
+ include the header line of the current function and also forward to
+ include the body of the entire current function up to the header
+ line of the next one. This process may have to merge to adjacent
+ hunks, but the code forgot to do so in some cases.
+ (merge 45d2f75 rs/xdiff-merge-overlapping-hunks-for-W-context later to maint).
+
+ * Performance tests done via "t/perf" did not use the same set of
+ build configuration if the user relied on autoconf generated
+ configuration.
+ (merge cd5c281 ks/perf-build-with-autoconf later to maint).
+
+ * "git format-patch --base=..." feature that was recently added
+ showed the base commit information after "-- " e-mail signature
+ line, which turned out to be inconvenient. The base information
+ has been moved above the signature line.
+ (merge 480871e jt/format-patch-base-info-above-sig later to maint).
+
+ * More i18n.
+ (merge 43073f8 va/i18n later to maint).
+
+ * Even when "git pull --rebase=preserve" (and the underlying "git
+ rebase --preserve") can complete without creating any new commit
+ (i.e. fast-forwards), it still insisted on having a usable ident
+ information (read: user.email is set correctly), which was less
+ than nice. As the underlying commands used inside "git rebase"
+ would fail with a more meaningful error message and advice text
+ when the bogus ident matters, this extra check was removed.
+ (merge 1e461c4 jk/rebase-i-drop-ident-check later to maint).
+
+ * "git gc --aggressive" used to limit the delta-chain length to 250,
+ which is way too deep for gaining additional space savings and is
+ detrimental for runtime performance. The limit has been reduced to
+ 50.
+ (merge 07e7dbf jk/reduce-gc-aggressive-depth later to maint).
+
+ * Documentation for individual configuration variables to control use
+ of color (like `color.grep`) said that their default value is
+ 'false', instead of saying their default is taken from `color.ui`.
+ When we updated the default value for color.ui from 'false' to
+ 'auto' quite a while ago, all of them broke. This has been
+ corrected.
+ (merge 14d16e2 mm/config-color-ui-default-to-auto later to maint).
+
+ * The pretty-format specifier "%C(auto)" used by the "log" family of
+ commands to enable coloring of the output is taught to also issue a
+ color-reset sequence to the output.
+ (merge c99ad27 rs/c-auto-resets-attributes later to maint).
+
+ * A shell script example in check-ref-format documentation has been
+ fixed.
+ (merge 92dece7 ep/doc-check-ref-format-example later to maint).
+
+ * "git checkout <word>" does not follow the usual disambiguation
+ rules when the <word> can be both a rev and a path, to allow
+ checking out a branch 'foo' in a project that happens to have a
+ file 'foo' in the working tree without having to disambiguate.
+ This was poorly documented and the check was incorrect when the
+ command was run from a subdirectory.
+ (merge b829b94 nd/checkout-disambiguation later to maint).
+
+ * Some codepaths in "git diff" used regexec(3) on a buffer that was
+ mmap(2)ed, which may not have a terminating NUL, leading to a read
+ beyond the end of the mapped region. This was fixed by introducing
+ a regexec_buf() helper that takes a <ptr,len> pair with REG_STARTEND
+ extension.
+ (merge b7d36ff js/regexec-buf later to maint).
+
+ * The procedure to build Git on Mac OS X for Travis CI hardcoded the
+ internal directory structure we assumed HomeBrew uses, which was a
+ no-no. The procedure has been updated to ask HomeBrew things we
+ need to know to fix this.
+ (merge f86f49b ls/travis-homebrew-path-fix later to maint).
+
+ * When "git rebase -i" is given a broken instruction, it told the
+ user to fix it with "--edit-todo", but didn't say what the step
+ after that was (i.e. "--continue").
+ (merge 37875b4 rt/rebase-i-broken-insn-advise later to maint).
+
+ * Documentation around tools to import from CVS was fairly outdated.
+ (merge 106b672 jk/doc-cvs-update later to maint).
+
+ * "git clone --recurse-submodules" lost the progress eye-candy in
+ recent update, which has been corrected.
+
* Other minor doc, test and build updates and code cleanups.
- (merge 3e1952e jk/squelch-false-warning-from-gcc-o3 later to maint).
- (merge ca2baa3 rs/compat-strdup later to maint).
- (merge d233097 rs/hex2chr later to maint).
- (merge c00bfc9 js/t9903-chaining later to maint).
- (merge 5e4e5bb sb/xdiff-remove-unused-static-decl later to maint).
- (merge 5cb5fe4 sb/transport-report-missing-submodule-on-stderr later to maint).
- (merge a1c8044 ah/misc-message-fixes later to maint).
- (merge ca9b37e sb/diff-cleanup later to maint).
+ (merge e78d57e bw/pathspec-remove-unused-extern-decl later to maint).
+ (merge ce25e4c rs/checkout-some-states-are-const later to maint).
+ (merge a8342a4 rs/strbuf-remove-fix later to maint).
+ (merge b56aa5b rs/unpack-trees-reduce-file-scope-global later to maint).
+ (merge 5efc60c mr/vcs-svn-printf-ulong later to maint).
+ (merge a22ae75 rs/cocci later to maint).
A boolean to enable/disable color in the output of
linkgit:git-branch[1]. May be set to `always`,
`false` (or `never`) or `auto` (or `true`), in which case colors are used
- only when the output is to a terminal. Defaults to false.
+ only when the output is to a terminal. If unset, then the
+ value of `color.ui` is used (`auto` by default).
color.branch.<slot>::
Use customized color for branch coloration. `<slot>` is one of
linkgit:git-log[1], and linkgit:git-show[1] will use color
for all patches. If it is set to `true` or `auto`, those
commands will only use color when output is to the terminal.
- Defaults to false.
+ If unset, then the value of `color.ui` is used (`auto` by
+ default).
+
This does not affect linkgit:git-format-patch[1] or the
'git-diff-{asterisk}' plumbing commands. Can be overridden on the
color.grep::
When set to `always`, always highlight matches. When `false` (or
`never`), never. When set to `true` or `auto`, use color only
- when the output is written to the terminal. Defaults to `false`.
+ when the output is written to the terminal. If unset, then the
+ value of `color.ui` is used (`auto` by default).
color.grep.<slot>::
Use customized color for grep colorization. `<slot>` specifies which
and displays (such as those used by "git-add --interactive" and
"git-clean --interactive"). When false (or `never`), never.
When set to `true` or `auto`, use colors only when the output is
- to the terminal. Defaults to false.
+ to the terminal. If unset, then the value of `color.ui` is
+ used (`auto` by default).
color.interactive.<slot>::
Use customized color for 'git add --interactive' and 'git clean
A boolean to enable/disable color in the output of
linkgit:git-show-branch[1]. May be set to `always`,
`false` (or `never`) or `auto` (or `true`), in which case colors are used
- only when the output is to a terminal. Defaults to false.
+ only when the output is to a terminal. If unset, then the
+ value of `color.ui` is used (`auto` by default).
color.status::
A boolean to enable/disable color in the output of
linkgit:git-status[1]. May be set to `always`,
`false` (or `never`) or `auto` (or `true`), in which case colors are used
- only when the output is to a terminal. Defaults to false.
+ only when the output is to a terminal. If unset, then the
+ value of `color.ui` is used (`auto` by default).
color.status.<slot>::
Use customized color for status colorization. `<slot>` is
gc.aggressiveDepth::
The depth parameter used in the delta compression
algorithm used by 'git gc --aggressive'. This defaults
- to 250.
+ to 50.
gc.aggressiveWindow::
The window size parameter used in the delta compression
include::mergetools-diff.txt[]
+diff.indentHeuristic::
diff.compactionHeuristic::
- Set this option to `true` to enable an experimental heuristic that
- shifts the hunk boundary in an attempt to make the resulting
- patch easier to read.
+ Set one of these options to `true` to enable one of two
+ experimental heuristics that shift diff hunk boundaries to
+ make patches easier to read.
diff.algorithm::
Choose a diff algorithm. The variants are as follows:
--- /dev/null
+--indent-heuristic::
+--no-indent-heuristic::
+--compaction-heuristic::
+--no-compaction-heuristic::
+ These are to help debugging and tuning experimental heuristics
+ (which are off by default) that shift diff hunk boundaries to
+ make patches easier to read.
Synonym for `-p --raw`.
endif::git-format-patch[]
---compaction-heuristic::
---no-compaction-heuristic::
- These are to help debugging and tuning an experimental
- heuristic (which is off by default) that shifts the hunk
- boundary in an attempt to make the resulting patch easier
- to read.
+include::diff-heuristic-options.txt[]
--minimal::
Spend extra time to make sure the smallest possible
'git add' [--verbose | -v] [--dry-run | -n] [--force | -f] [--interactive | -i] [--patch | -p]
[--edit | -e] [--[no-]all | --[no-]ignore-removal | [--update | -u]]
[--intent-to-add | -N] [--refresh] [--ignore-errors] [--ignore-missing]
- [--] [<pathspec>...]
+ [--chmod=(+|-)x] [--] [<pathspec>...]
DESCRIPTION
-----------
be ignored, no matter if they are already present in the work
tree or not.
+--chmod=(+|-)x::
+ Override the executable bit of the added files. The executable
+ bit is only changed in the index, the files on disk are left
+ unchanged.
+
\--::
This option can be used to separate command-line options from
the list of files, (useful when filenames might be mistaken
OPTIONS
-------
include::blame-options.txt[]
+include::diff-heuristic-options.txt[]
SEE ALSO
--------
abbreviated object name, use <n>+1 digits. Note that 1 column
is used for a caret to mark the boundary commit.
+include::diff-heuristic-options.txt[]
+
THE PORCELAIN FORMAT
--------------------
SYNOPSIS
--------
[verse]
-'git cat-file' (-t [--allow-unknown-type]| -s [--allow-unknown-type]| -e | -p | <type> | --textconv ) <object>
-'git cat-file' (--batch | --batch-check) [--follow-symlinks]
+'git cat-file' (-t [--allow-unknown-type]| -s [--allow-unknown-type]| -e | -p | <type> | --textconv | --filters ) [--path=<path>] <object>
+'git cat-file' (--batch | --batch-check) [ --textconv | --filters ] [--follow-symlinks]
DESCRIPTION
-----------
In its first form, the command provides the content or the type of an object in
the repository. The type is required unless `-t` or `-p` is used to find the
-object type, or `-s` is used to find the object size, or `--textconv` is used
-(which implies type "blob").
+object type, or `-s` is used to find the object size, or `--textconv` or
+`--filters` is used (which imply type "blob").
In the second form, a list of objects (separated by linefeeds) is provided on
-stdin, and the SHA-1, type, and size of each object is printed on stdout.
+stdin, and the SHA-1, type, and size of each object is printed on stdout. The
+output format can be overridden using the optional `<format>` argument. If
+either `--textconv` or `--filters` was specified, the input is expected to
+list the object names followed by the path name, separated by a single white
+space, so that the appropriate drivers can be determined.
OPTIONS
-------
--textconv::
Show the content as transformed by a textconv filter. In this case,
- <object> has be of the form <tree-ish>:<path>, or :<path> in order
- to apply the filter to the content recorded in the index at <path>.
+ <object> has to be of the form <tree-ish>:<path>, or :<path> in
+ order to apply the filter to the content recorded in the index at
+ <path>.
+
+--filters::
+ Show the content as converted by the filters configured in
+ the current working tree for the given <path> (i.e. smudge filters,
+ end-of-line conversion, etc). In this case, <object> has to be of
+ the form <tree-ish>:<path>, or :<path>.
+
+--path=<path>::
+ For use with --textconv or --filters, to allow specifying an object
+ name and a path separately, e.g. when it is difficult to figure out
+ the revision from which the blob came.
--batch::
--batch=<format>::
Print object information and contents for each object provided
- on stdin. May not be combined with any other options or arguments.
- See the section `BATCH OUTPUT` below for details.
+ on stdin. May not be combined with any other options or arguments
+ except `--textconv` or `--filters`, in which case the input lines
+ also need to specify the path, separated by white space. See the
+ section `BATCH OUTPUT` below for details.
--batch-check::
--batch-check=<format>::
Print object information for each object provided on stdin. May
- not be combined with any other options or arguments. See the
+ not be combined with any other options or arguments except
+ `--textconv` or `--filters`, in which case the input lines also
+ need to specify the path, separated by white space. See the
section `BATCH OUTPUT` below for details.
--batch-all-objects::
* Determine the reference name to use for a new branch:
+
------------
-$ ref=$(git check-ref-format --normalize "refs/heads/$newbranch") ||
-die "we do not like '$newbranch' as a branch name."
+$ ref=$(git check-ref-format --normalize "refs/heads/$newbranch")||
+{ echo "we do not like '$newbranch' as a branch name." >&2 ; exit 1 ; }
------------
GIT
$ git log -g -2 HEAD
------------
+ARGUMENT DISAMBIGUATION
+-----------------------
+
+When there is only one argument given and it is not `--` (e.g. "git
+checkout abc"), and when the argument is both a valid `<tree-ish>`
+(e.g. a branch "abc" exists) and a valid `<pathspec>` (e.g. a file
+or a directory whose name is "abc" exists), Git would usually ask
+you to disambiguate. Because checking out a branch is so common an
+operation, however, "git checkout abc" takes "abc" as a `<tree-ish>`
+in such a situation. Use `git checkout -- <pathspec>` if you want
+to checkout these paths out of the index.
+
EXAMPLES
--------
deprecated; it does not work with cvsps version 3 and later. If you are
performing a one-shot import of a CVS repository consider using
http://cvs2svn.tigris.org/cvs2git.html[cvs2git] or
-https://github.com/BartMassey/parsecvs[parsecvs].
+http://www.catb.org/esr/cvs-fast-export/[cvs-fast-export].
Imports a CVS repository into Git. It will either create a new
repository, or incrementally import into an existing one.
[--start-number <n>] [--numbered-files]
[--in-reply-to=Message-Id] [--suffix=.<sfx>]
[--ignore-if-in-upstream]
- [--subject-prefix=Subject-Prefix] [(--reroll-count|-v) <n>]
+ [--rfc] [--subject-prefix=Subject-Prefix]
+ [(--reroll-count|-v) <n>]
[--to=<email>] [--cc=<email>]
[--[no-]cover-letter] [--quiet] [--notes[=<ref>]]
[<common diff options>]
allows for useful naming of a patch series, and can be
combined with the `--numbered` option.
+--rfc::
+ Alias for `--subject-prefix="RFC PATCH"`. RFC means "Request For
+ Comments"; use this when sending an experimental patch for
+ discussion rather than application.
+
-v <n>::
--reroll-count=<n>::
Mark the series as the <n>-th iteration of the topic. The
Importing a CVS archive
-----------------------
+NOTE: These instructions use the `git-cvsimport` script which ships with
+git, but other importers may provide better results. See the note in
+linkgit:git-cvsimport[1] for other options.
+
First, install version 2.1 or higher of cvsps from
-http://www.cobite.com/cvsps/[http://www.cobite.com/cvsps/] and make
+https://github.com/andreyvit/cvsps[https://github.com/andreyvit/cvsps] and make
sure it is in your path. Then cd to a checked out CVS working directory
of the project you are interested in and run linkgit:git-cvsimport[1]:
# crashes due to allocation and free working on different 'heaps'.
# It's defined automatically if USE_NED_ALLOCATOR is set.
#
-# Define NO_REGEX if you have no or inferior regex support in your C library.
+# Define NO_REGEX if your C library lacks regex support with REG_STARTEND
+# feature.
#
# Define HAVE_DEV_TTY if your system can open /dev/tty to interact with the
# user.
PTHREAD_LIBS = -lpthread
PTHREAD_CFLAGS =
GCOV = gcov
+SPATCH = spatch
export TCL_PATH TCLTK_PATH
exit 1; \
fi
+C_SOURCES = $(patsubst %.o,%.c,$(C_OBJ))
+%.cocci.patch: %.cocci $(C_SOURCES)
+ @echo ' ' SPATCH $<; \
+ for f in $(C_SOURCES); do \
+ $(SPATCH) --sp-file $< $$f; \
+ done >$@ 2>$@.log; \
+ if test -s $@; \
+ then \
+ echo ' ' SPATCH result: $@; \
+ fi
+coccicheck: $(patsubst %.cocci,%.cocci.patch,$(wildcard contrib/coccinelle/*.cocci))
+
### Installation rules
ifneq ($(filter /%,$(firstword $(template_dir))),)
$(RM) -r $(GIT_TARNAME) .doc-tmp-dir
$(RM) $(GIT_TARNAME).tar.gz git-core_$(GIT_VERSION)-*.tar.gz
$(RM) $(htmldocs).tar.gz $(manpages).tar.gz
+ $(RM) contrib/coccinelle/*.cocci.patch*
$(MAKE) -C Documentation/ clean
ifndef NO_PERL
$(MAKE) -C gitweb clean
static int checkout_target(struct index_state *istate,
struct cache_entry *ce, struct stat *st)
{
- struct checkout costate;
+ struct checkout costate = CHECKOUT_INIT;
- memset(&costate, 0, sizeof(costate));
- costate.base_dir = "";
costate.refresh_cache = 1;
costate.istate = istate;
if (checkout_entry(ce, &costate, NULL) || lstat(ce->name, st))
static int take_worktree_changes;
struct update_callback_data {
- int flags, force_mode;
+ int flags;
int add_errors;
};
+static void chmod_pathspec(struct pathspec *pathspec, int force_mode)
+{
+ int i;
+
+ for (i = 0; i < active_nr; i++) {
+ struct cache_entry *ce = active_cache[i];
+
+ if (pathspec && !ce_path_match(ce, pathspec, NULL))
+ continue;
+
+ if (chmod_cache_entry(ce, force_mode) < 0)
+ fprintf(stderr, "cannot chmod '%s'", ce->name);
+ }
+}
+
static int fix_unmerged_status(struct diff_filepair *p,
struct update_callback_data *data)
{
die(_("unexpected diff status %c"), p->status);
case DIFF_STATUS_MODIFIED:
case DIFF_STATUS_TYPE_CHANGED:
- if (add_file_to_index(&the_index, path,
- data->flags, data->force_mode)) {
+ if (add_file_to_index(&the_index, path, data->flags)) {
if (!(data->flags & ADD_CACHE_IGNORE_ERRORS))
die(_("updating files failed"));
data->add_errors++;
}
}
-int add_files_to_cache(const char *prefix, const struct pathspec *pathspec,
- int flags, int force_mode)
+int add_files_to_cache(const char *prefix,
+ const struct pathspec *pathspec, int flags)
{
struct update_callback_data data;
struct rev_info rev;
memset(&data, 0, sizeof(data));
data.flags = flags;
- data.force_mode = force_mode;
init_revisions(&rev, prefix);
setup_revisions(0, NULL, &rev, NULL);
return git_default_config(var, value, cb);
}
-static int add_files(struct dir_struct *dir, int flags, int force_mode)
+static int add_files(struct dir_struct *dir, int flags)
{
int i, exit_status = 0;
}
for (i = 0; i < dir->nr; i++)
- if (add_file_to_index(&the_index, dir->entries[i]->name,
- flags, force_mode)) {
+ if (add_file_to_index(&the_index, dir->entries[i]->name, flags)) {
if (!ignore_add_errors)
die(_("adding files failed"));
exit_status = 1;
int exit_status = 0;
struct pathspec pathspec;
struct dir_struct dir;
- int flags, force_mode;
+ int flags;
int add_new_files;
int require_pathspec;
char *seen = NULL;
if (!show_only && ignore_missing)
die(_("Option --ignore-missing can only be used together with --dry-run"));
- if (!chmod_arg)
- force_mode = 0;
- else if (!strcmp(chmod_arg, "-x"))
- force_mode = 0666;
- else if (!strcmp(chmod_arg, "+x"))
- force_mode = 0777;
- else
+ if (chmod_arg && ((chmod_arg[0] != '-' && chmod_arg[0] != '+') ||
+ chmod_arg[1] != 'x' || chmod_arg[2]))
die(_("--chmod param '%s' must be either -x or +x"), chmod_arg);
add_new_files = !take_worktree_changes && !refresh_only;
plug_bulk_checkin();
- exit_status |= add_files_to_cache(prefix, &pathspec, flags, force_mode);
+ exit_status |= add_files_to_cache(prefix, &pathspec, flags);
if (add_new_files)
- exit_status |= add_files(&dir, flags, force_mode);
+ exit_status |= add_files(&dir, flags);
+ if (chmod_arg && pathspec.nr)
+ chmod_pathspec(&pathspec, chmod_arg[0]);
unplug_bulk_checkin();
finish:
return 0;
}
+ if (git_diff_heuristic_config(var, value, cb) < 0)
+ return -1;
if (userdiff_config(var, value) < 0)
return -1;
OPT_BIT('s', NULL, &output_option, N_("Suppress author name and timestamp (Default: off)"), OUTPUT_NO_AUTHOR),
OPT_BIT('e', "show-email", &output_option, N_("Show author email instead of name (Default: off)"), OUTPUT_SHOW_EMAIL),
OPT_BIT('w', NULL, &xdl_opts, N_("Ignore whitespace differences"), XDF_IGNORE_WHITESPACE),
+
+ /*
+ * The following two options are parsed by parse_revision_opt()
+ * and are only included here to get included in the "-h"
+ * output:
+ */
+ { OPTION_LOWLEVEL_CALLBACK, 0, "indent-heuristic", NULL, NULL, N_("Use an experimental indent-based heuristic to improve diffs"), PARSE_OPT_NOARG, parse_opt_unknown_cb },
+ { OPTION_LOWLEVEL_CALLBACK, 0, "compaction-heuristic", NULL, NULL, N_("Use an experimental blank-line-based heuristic to improve diffs"), PARSE_OPT_NOARG, parse_opt_unknown_cb },
+
OPT_BIT(0, "minimal", &xdl_opts, N_("Spend extra cycles to find better match"), XDF_NEED_MINIMAL),
OPT_STRING('S', NULL, &revs_file, N_("file"), N_("Use revisions from <file> instead of calling git-rev-list")),
OPT_STRING(0, "contents", &contents_from, N_("file"), N_("Use <file>'s contents as the final image")),
}
parse_done:
no_whole_file_rename = !DIFF_OPT_TST(&revs.diffopt, FOLLOW_RENAMES);
+ xdl_opts |= revs.diffopt.xdl_opts & (XDF_COMPACTION_HEURISTIC | XDF_INDENT_HEURISTIC);
DIFF_OPT_CLR(&revs.diffopt, FOLLOW_RENAMES);
argc = parse_options_end(&ctx);
if (incremental || (output_option & OUTPUT_PORCELAIN)) {
if (show_progress > 0)
- die("--progress can't be used with --incremental or porcelain formats");
+ die(_("--progress can't be used with --incremental or porcelain formats"));
show_progress = 0;
} else if (show_progress < 0)
show_progress = isatty(2);
sb.commits.compare = compare_commits_by_commit_date;
}
else if (contents_from)
- die("--contents and --reverse do not blend well.");
+ die(_("--contents and --reverse do not blend well."));
else {
final_commit_name = prepare_initial(&sb);
sb.commits.compare = compare_commits_by_reverse_commit_date;
add_pending_object(&revs, &(sb.final->object), ":");
}
else if (contents_from)
- die("Cannot use --contents with final commit object name");
+ die(_("cannot use --contents with final commit object name"));
if (reverse && revs.first_parent_only) {
final_commit = find_single_final(sb.revs, NULL);
if (!final_commit)
- die("--reverse and --first-parent together require specified latest commit");
+ die(_("--reverse and --first-parent together require specified latest commit"));
}
/*
}
if (oidcmp(&c->object.oid, &sb.final->object.oid))
- die("--reverse --first-parent together require range along first-parent chain");
+ die(_("--reverse --first-parent together require range along first-parent chain"));
}
if (is_null_oid(&sb.final->object.oid)) {
else {
o = get_origin(&sb, sb.final, path);
if (fill_blob_sha1_and_mode(o))
- die("no such path %s in %s", path, final_commit_name);
+ die(_("no such path %s in %s"), path, final_commit_name);
if (DIFF_OPT_TST(&sb.revs->diffopt, ALLOW_TEXTCONV) &&
textconv_object(path, o->mode, &o->blob_oid, 1, (char **) &sb.final_buf,
&sb.final_buf_size);
if (!sb.final_buf)
- die("Cannot read blob %s for path %s",
+ die(_("cannot read blob %s for path %s"),
oid_to_hex(&o->blob_oid),
path);
}
&bottom, &top, sb.path))
usage(blame_usage);
if (lno < top || ((lno || bottom) && lno < bottom))
- die("file %s has only %lu lines", path, lno);
+ die(Q_("file %s has only %lu line",
+ "file %s has only %lu lines",
+ lno), path, lno);
if (bottom < 1)
bottom = 1;
if (top < 1)
OPT_SET_INT( 0, "set-upstream", &track, N_("change upstream info"),
BRANCH_TRACK_OVERRIDE),
OPT_STRING('u', "set-upstream-to", &new_upstream, N_("upstream"), N_("change the upstream info")),
- OPT_BOOL(0, "unset-upstream", &unset_upstream, "Unset the upstream info"),
+ OPT_BOOL(0, "unset-upstream", &unset_upstream, N_("Unset the upstream info")),
OPT__COLOR(&branch_use_color, N_("use colored output")),
OPT_SET_INT('r', "remotes", &filter.kind, N_("act on remote-tracking branches"),
FILTER_REFS_REMOTES),
int print_contents;
int buffer_output;
int all_objects;
+ int cmdmode; /* may be 'w' or 'c' for --filters or --textconv */
const char *format;
};
+static const char *force_path;
+
+static int filter_object(const char *path, unsigned mode,
+ const struct object_id *oid,
+ char **buf, unsigned long *size)
+{
+ enum object_type type;
+
+ *buf = read_sha1_file(oid->hash, &type, size);
+ if (!*buf)
+ return error(_("cannot read object %s '%s'"),
+ oid_to_hex(oid), path);
+ if ((type == OBJ_BLOB) && S_ISREG(mode)) {
+ struct strbuf strbuf = STRBUF_INIT;
+ if (convert_to_working_tree(path, *buf, *size, &strbuf)) {
+ free(*buf);
+ *size = strbuf.len;
+ *buf = strbuf_detach(&strbuf, NULL);
+ }
+ }
+
+ return 0;
+}
+
static int cat_one_file(int opt, const char *exp_type, const char *obj_name,
int unknown_type)
{
struct object_info oi = {NULL};
struct strbuf sb = STRBUF_INIT;
unsigned flags = LOOKUP_REPLACE_OBJECT;
+ const char *path = force_path;
if (unknown_type)
flags |= LOOKUP_UNKNOWN_OBJECT;
if (get_sha1_with_context(obj_name, 0, oid.hash, &obj_context))
die("Not a valid object name %s", obj_name);
+ if (!path)
+ path = obj_context.path;
+ if (obj_context.mode == S_IFINVALID)
+ obj_context.mode = 0100644;
+
buf = NULL;
switch (opt) {
case 't':
case 'e':
return !has_object_file(&oid);
+ case 'w':
+ if (!path[0])
+ die("git cat-file --filters %s: <object> must be "
+ "<sha1:path>", obj_name);
+
+ if (filter_object(path, obj_context.mode,
+ &oid, &buf, &size))
+ return -1;
+ break;
+
case 'c':
- if (!obj_context.path[0])
+ if (!path[0])
die("git cat-file --textconv %s: <object> must be <sha1:path>",
obj_name);
- if (textconv_object(obj_context.path, obj_context.mode, &oid, 1, &buf, &size))
+ if (textconv_object(path, obj_context.mode, &oid, 1, &buf, &size))
break;
case 'p':
if (data->type == OBJ_BLOB) {
if (opt->buffer_output)
fflush(stdout);
- if (stream_blob_to_fd(1, oid, NULL, 0) < 0)
+ if (opt->cmdmode) {
+ char *contents;
+ unsigned long size;
+
+ if (!data->rest)
+ die("missing path for '%s'", oid_to_hex(oid));
+
+ if (opt->cmdmode == 'w') {
+ if (filter_object(data->rest, 0100644, oid,
+ &contents, &size))
+ die("could not convert '%s' %s",
+ oid_to_hex(oid), data->rest);
+ } else if (opt->cmdmode == 'c') {
+ enum object_type type;
+ if (!textconv_object(data->rest, 0100644, oid,
+ 1, &contents, &size))
+ contents = read_sha1_file(oid->hash, &type,
+ &size);
+ if (!contents)
+ die("could not convert '%s' %s",
+ oid_to_hex(oid), data->rest);
+ } else
+ die("BUG: invalid cmdmode: %c", opt->cmdmode);
+ batch_write(opt, contents, size);
+ free(contents);
+ } else if (stream_blob_to_fd(1, oid, NULL, 0) < 0)
die("unable to stream %s to stdout", oid_to_hex(oid));
}
else {
data.mark_query = 1;
strbuf_expand(&buf, opt->format, expand_format, &data);
data.mark_query = 0;
+ if (opt->cmdmode)
+ data.split_on_whitespace = 1;
if (opt->all_objects) {
struct object_info empty;
}
static const char * const cat_file_usage[] = {
- N_("git cat-file (-t [--allow-unknown-type] | -s [--allow-unknown-type] | -e | -p | <type> | --textconv) <object>"),
- N_("git cat-file (--batch | --batch-check) [--follow-symlinks]"),
+ N_("git cat-file (-t [--allow-unknown-type] | -s [--allow-unknown-type] | -e | -p | <type> | --textconv | --filters) [--path=<path>] <object>"),
+ N_("git cat-file (--batch | --batch-check) [--follow-symlinks] [--textconv | --filters]"),
NULL
};
OPT_CMDMODE('p', NULL, &opt, N_("pretty-print object's content"), 'p'),
OPT_CMDMODE(0, "textconv", &opt,
N_("for blob objects, run textconv on object's content"), 'c'),
+ OPT_CMDMODE(0, "filters", &opt,
+ N_("for blob objects, run filters on object's content"), 'w'),
+ OPT_STRING(0, "path", &force_path, N_("blob"),
+ N_("use a specific path for --textconv/--filters")),
OPT_BOOL(0, "allow-unknown-type", &unknown_type,
N_("allow -s and -t to work with broken/corrupt objects")),
OPT_BOOL(0, "buffer", &batch.buffer_output, N_("buffer --batch output")),
argc = parse_options(argc, argv, prefix, options, cat_file_usage, 0);
if (opt) {
- if (argc == 1)
+ if (batch.enabled && (opt == 'c' || opt == 'w'))
+ batch.cmdmode = opt;
+ else if (argc == 1)
obj_name = argv[0];
else
usage_with_options(cat_file_usage, options);
} else
usage_with_options(cat_file_usage, options);
}
- if (batch.enabled && (opt || argc)) {
- usage_with_options(cat_file_usage, options);
+ if (batch.enabled) {
+ if (batch.cmdmode != opt || argc)
+ usage_with_options(cat_file_usage, options);
+ if (batch.cmdmode && batch.all_objects)
+ die("--batch-all-objects cannot be combined with "
+ "--textconv nor with --filters");
}
if ((batch.follow_symlinks || batch.all_objects) && !batch.enabled) {
usage_with_options(cat_file_usage, options);
}
+ if (force_path && opt != 'c' && opt != 'w') {
+ error("--path=<path> needs --textconv or --filters");
+ usage_with_options(cat_file_usage, options);
+ }
+
+ if (force_path && batch.enabled) {
+ error("--path=<path> incompatible with --batch");
+ usage_with_options(cat_file_usage, options);
+ }
+
if (batch.buffer_output < 0)
batch.buffer_output = batch.all_objects;
static int to_tempfile;
static char topath[4][TEMPORARY_FILENAME_LENGTH + 1];
-static struct checkout state;
+static struct checkout state = CHECKOUT_INIT;
static void write_tempfile_record(const char *name, const char *prefix)
{
return 0;
}
-static int checkout_stage(int stage, struct cache_entry *ce, int pos,
- struct checkout *state)
+static int checkout_stage(int stage, const struct cache_entry *ce, int pos,
+ const struct checkout *state)
{
while (pos < active_nr &&
!strcmp(active_cache[pos]->name, ce->name)) {
return error(_("path '%s' does not have their version"), ce->name);
}
-static int checkout_merged(int pos, struct checkout *state)
+static int checkout_merged(int pos, const struct checkout *state)
{
struct cache_entry *ce = active_cache[pos];
const char *path = ce->name;
const char *revision)
{
int pos;
- struct checkout state;
+ struct checkout state = CHECKOUT_INIT;
static char *ps_matched;
struct object_id rev;
struct commit *head;
return 1;
/* Now we are committed to check them out */
- memset(&state, 0, sizeof(state));
state.force = 1;
state.refresh_cache = 1;
state.istate = &the_index;
* entries in the index.
*/
- add_files_to_cache(NULL, NULL, 0, 0);
+ add_files_to_cache(NULL, NULL, 0);
/*
* NEEDSWORK: carrying over local changes
* when branches have different end-of-line
int recover_with_dwim = dwim_new_local_branch_ok;
if (!has_dash_dash &&
- (check_filename(NULL, arg) || !no_wildcard(arg)))
+ (check_filename(opts->prefix, arg) || !no_wildcard(arg)))
recover_with_dwim = 0;
/*
* Accept "git checkout foo" and "git checkout foo --"
if (!*source_tree) /* case (1): want a tree */
die(_("reference is not a tree: %s"), arg);
- if (!has_dash_dash) {/* case (3).(d) -> (1) */
+ if (!has_dash_dash) { /* case (3).(d) -> (1) */
/*
* Do not complain the most common case
* git checkout branch
* it would be extremely annoying.
*/
if (argc)
- verify_non_filename(NULL, arg);
+ verify_non_filename(opts->prefix, arg);
} else {
argcount++;
argv++;
}
}
-static int checkout(void)
+static int checkout(int submodule_progress)
{
unsigned char sha1[20];
char *head;
if (max_jobs != -1)
argv_array_pushf(&args, "--jobs=%d", max_jobs);
+ if (submodule_progress)
+ argv_array_push(&args, "--progress");
+
err = run_command_v_opt(args.argv, RUN_GIT_CMD);
argv_array_clear(&args);
}
const char *src_ref_prefix = "refs/heads/";
struct remote *remote;
int err = 0, complete_refs_before_fetch = 1;
+ int submodule_progress;
struct refspec *refspec;
const char *fetch_pattern;
update_head(our_head_points_at, remote_head, reflog_msg.buf);
+ /*
+ * We want to show progress for recursive submodule clones iff
+ * we did so for the main clone. But only the transport knows
+ * the final decision for this flag, so we need to rescue the value
+ * before we free the transport.
+ */
+ submodule_progress = transport->progress;
+
transport_unlock_pack(transport);
transport_disconnect(transport);
}
junk_mode = JUNK_LEAVE_REPO;
- err = checkout();
+ err = checkout(submodule_progress);
strbuf_release(&reflog_msg);
strbuf_release(&branch_top);
*/
if (all || (also && pathspec.nr)) {
hold_locked_index(&index_lock, 1);
- add_files_to_cache(also ? prefix : NULL, &pathspec, 0, 0);
+ add_files_to_cache(also ? prefix : NULL, &pathspec, 0);
refresh_cache_or_die(refresh_flags);
update_main_cache_tree(WRITE_TREE_SILENT);
if (write_locked_index(&the_index, &index_lock, CLOSE_LOCK))
value = normalize_value(argv[0], argv[1]);
ret = git_config_set_in_file_gently(given_config_source.file, argv[0], value);
if (ret == CONFIG_NOTHING_SET)
- error("cannot overwrite multiple values with a single value\n"
- " Use a regexp, --add or --replace-all to change %s.", argv[0]);
+ error(_("cannot overwrite multiple values with a single value\n"
+ " Use a regexp, --add or --replace-all to change %s."), argv[0]);
return ret;
}
else if (actions == ACTION_SET_ALL) {
break;
}
- if (!no_index)
- prefix = setup_git_directory_gently(&nongit);
+ prefix = setup_git_directory_gently(&nongit);
- /*
- * Treat git diff with at least one path outside of the
- * repo the same as if the command would have been executed
- * outside of a git repository. In this case it behaves
- * the same way as "git diff --no-index <a> <b>", which acts
- * as a colourful "diff" replacement.
- */
- if (nongit || ((argc == i + 2) &&
- (!path_inside_repo(prefix, argv[i]) ||
- !path_inside_repo(prefix, argv[i + 1]))))
- no_index = DIFF_NO_INDEX_IMPLICIT;
+ if (!no_index) {
+ /*
+ * Treat git diff with at least one path outside of the
+ * repo the same as if the command would have been executed
+ * outside of a git repository. In this case it behaves
+ * the same way as "git diff --no-index <a> <b>", which acts
+ * as a colourful "diff" replacement.
+ */
+ if (nongit || ((argc == i + 2) &&
+ (!path_inside_repo(prefix, argv[i]) ||
+ !path_inside_repo(prefix, argv[i + 1]))))
+ no_index = DIFF_NO_INDEX_IMPLICIT;
+ }
if (!no_index)
gitmodules_config();
for (i = 0; i < subjects.nr; i++)
if (i >= limit)
- strbuf_addf(out, " ...\n");
+ strbuf_addstr(out, " ...\n");
else
strbuf_addf(out, " %s\n", subjects.items[i].string);
static int pack_refs = 1;
static int prune_reflogs = 1;
-static int aggressive_depth = 250;
+static int aggressive_depth = 50;
static int aggressive_window = 250;
static int gc_auto_threshold = 6700;
static int gc_auto_pack_limit = 50;
int stdin_paths = 0;
int no_filters = 0;
int literally = 0;
+ int nongit = 0;
unsigned flags = HASH_FORMAT_CHECK;
const char *vpath = NULL;
const struct option hash_object_options[] = {
argc = parse_options(argc, argv, NULL, hash_object_options,
hash_object_usage, 0);
- if (flags & HASH_WRITE_OBJECT) {
+ if (flags & HASH_WRITE_OBJECT)
prefix = setup_git_directory();
- prefix_length = prefix ? strlen(prefix) : 0;
- if (vpath && prefix)
- vpath = prefix_filename(prefix, prefix_length, vpath);
- }
+ else
+ prefix = setup_git_directory_gently(&nongit);
+
+ prefix_length = prefix ? strlen(prefix) : 0;
+ if (vpath && prefix)
+ vpath = prefix_filename(prefix, prefix_length, vpath);
git_config(git_default_config, NULL);
/* Just look for `init.templatedir` */
git_config(git_init_db_config, NULL);
- /* First copy the templates -- we might have the default
+ /*
+ * First copy the templates -- we might have the default
* config file there, in which case we would want to read
* from it after installing.
+ *
+ * Before reading that config, we also need to clear out any cached
+ * values (since we've just potentially changed what's available on
+ * disk).
*/
copy_templates(template_path);
-
+ git_config_clear();
+ reset_shared_repository();
git_config(git_default_config, NULL);
- is_bare_repository_cfg = init_is_bare_repository;
- /* reading existing config may have overwrote it */
+ /*
+ * We must make sure command-line options continue to override any
+ * values we might have just re-read from the config.
+ */
+ is_bare_repository_cfg = init_is_bare_repository;
if (init_shared_repository != -1)
set_shared_repository(init_shared_repository);
diff_flush(&opts);
fprintf(rev->diffopt.file, "\n");
- print_signature(rev->diffopt.file);
}
static const char *clean_message_id(const char *msg_id)
return 0;
}
+static int rfc_callback(const struct option *opt, const char *arg, int unset)
+{
+ return subject_prefix_callback(opt, "RFC PATCH", unset);
+}
+
static int numbered_cmdline_opt = 0;
static int numbered_callback(const struct option *opt, const char *arg,
return;
/* Show the base commit */
- fprintf(file, "base-commit: %s\n", oid_to_hex(&bases->base_commit));
+ fprintf(file, "\nbase-commit: %s\n", oid_to_hex(&bases->base_commit));
/* Show the prerequisite patches */
for (i = bases->nr_patch_id - 1; i >= 0; i--)
N_("start numbering patches at <n> instead of 1")),
OPT_INTEGER('v', "reroll-count", &reroll_count,
N_("mark the series as Nth re-roll")),
+ { OPTION_CALLBACK, 0, "rfc", &rev, NULL,
+ N_("Use [RFC PATCH] instead of [PATCH]"),
+ PARSE_OPT_NOARG | PARSE_OPT_NONEG, rfc_callback },
{ OPTION_CALLBACK, 0, "subject-prefix", &rev, N_("prefix"),
N_("Use [<prefix>] instead of [PATCH]"),
PARSE_OPT_NONEG, subject_prefix_callback },
if (numbered && keep_subject)
die (_("-n and -k are mutually exclusive."));
if (keep_subject && subject_prefix)
- die (_("--subject-prefix and -k are mutually exclusive."));
+ die (_("--subject-prefix/--rfc and -k are mutually exclusive."));
rev.preserve_subject = keep_subject;
argc = setup_revisions(argc, argv, &rev, &s_r_opt);
make_cover_letter(&rev, use_stdout,
origin, nr, list, branch_name, quiet);
print_bases(&bases, rev.diffopt.file);
+ print_signature(rev.diffopt.file);
total++;
start_number--;
}
if (!use_stdout)
rev.shown_one = 0;
if (shown) {
+ print_bases(&bases, rev.diffopt.file);
if (rev.mime_boundary)
fprintf(rev.diffopt.file, "\n--%s%s--\n\n\n",
mime_boundary_leader,
rev.mime_boundary);
else
print_signature(rev.diffopt.file);
- print_bases(&bases, rev.diffopt.file);
}
if (!use_stdout)
fclose(rev.diffopt.file);
if (!arg[2])
break;
if (parse_merge_opt(&o, arg + 2))
- die("Unknown option %s", arg);
+ die(_("unknown option %s"), arg);
continue;
}
if (bases_count < ARRAY_SIZE(bases)-1) {
struct object_id *oid = xmalloc(sizeof(struct object_id));
if (get_oid(argv[i], oid))
- die("Could not parse object '%s'", argv[i]);
+ die(_("could not parse object '%s'"), argv[i]);
bases[bases_count++] = oid;
}
else
- warning("Cannot handle more than %d bases. "
- "Ignoring %s.",
+ warning(Q_("cannot handle more than %d base. "
+ "Ignoring %s.",
+ "cannot handle more than %d bases. "
+ "Ignoring %s.",
+ (int)ARRAY_SIZE(bases)-1),
(int)ARRAY_SIZE(bases)-1, argv[i]);
}
if (argc - i != 3) /* "--" "<head>" "<remote>" */
- die("Not handling anything other than two heads merge.");
+ die(_("not handling anything other than two heads merge."));
o.branch1 = argv[++i];
o.branch2 = argv[++i];
if (get_oid(o.branch1, &h1))
- die("Could not resolve ref '%s'", o.branch1);
+ die(_("could not resolve ref '%s'"), o.branch1);
if (get_oid(o.branch2, &h2))
- die("Could not resolve ref '%s'", o.branch2);
+ die(_("could not resolve ref '%s'"), o.branch2);
o.branch1 = better_branch_name(o.branch1);
o.branch2 = better_branch_name(o.branch2);
if (o.verbosity >= 3)
- printf("Merging %s with %s\n", o.branch1, o.branch2);
+ printf(_("Merging %s with %s\n"), o.branch1, o.branch2);
failed = merge_recursive_generic(&o, &h1, &h2, bases_count, bases, &result);
if (failed < 0)
strbuf_reset(&buf);
if (fast_forward == FF_NO)
- strbuf_addf(&buf, "no-ff");
+ strbuf_addstr(&buf, "no-ff");
write_file_buf(git_path_merge_mode(), buf.buf, buf.len);
}
strbuf_reset(&d->buf);
if (launch_editor(d->edit_path, &d->buf, NULL)) {
- die(_("Please supply the note contents using either -m or -F option"));
+ die(_("please supply the note contents using either -m or -F option"));
}
strbuf_stripspace(&d->buf, 1);
}
if (write_sha1_file(d->buf.buf, d->buf.len, blob_type, sha1)) {
error(_("unable to write note object"));
if (d->edit_path)
- error(_("The note contents have been left in %s"),
+ error(_("the note contents have been left in %s"),
d->edit_path);
exit(128);
}
strbuf_addch(&d->buf, '\n');
if (get_sha1(arg, object))
- die(_("Failed to resolve '%s' as a valid ref."), arg);
+ die(_("failed to resolve '%s' as a valid ref."), arg);
if (!(buf = read_sha1_file(object, &type, &len))) {
free(buf);
- die(_("Failed to read object '%s'."), arg);
+ die(_("failed to read object '%s'."), arg);
}
if (type != OBJ_BLOB) {
free(buf);
- die(_("Cannot read note data from non-blob object '%s'."), arg);
+ die(_("cannot read note data from non-blob object '%s'."), arg);
}
strbuf_add(&d->buf, buf, len);
free(buf);
split = strbuf_split(&buf, ' ');
if (!split[0] || !split[1])
- die(_("Malformed input line: '%s'."), buf.buf);
+ die(_("malformed input line: '%s'."), buf.buf);
strbuf_rtrim(split[0]);
strbuf_rtrim(split[1]);
if (get_sha1(split[0]->buf, from_obj))
- die(_("Failed to resolve '%s' as a valid ref."), split[0]->buf);
+ die(_("failed to resolve '%s' as a valid ref."), split[0]->buf);
if (get_sha1(split[1]->buf, to_obj))
- die(_("Failed to resolve '%s' as a valid ref."), split[1]->buf);
+ die(_("failed to resolve '%s' as a valid ref."), split[1]->buf);
if (rewrite_cmd)
err = copy_note_for_rewrite(c, from_obj, to_obj);
combine_notes_overwrite);
if (err) {
- error(_("Failed to copy notes from '%s' to '%s'"),
+ error(_("failed to copy notes from '%s' to '%s'"),
split[0]->buf, split[1]->buf);
ret = 1;
}
ref = (flags & NOTES_INIT_WRITABLE) ? t->update_ref : t->ref;
if (!starts_with(ref, "refs/notes/"))
- die("Refusing to %s notes in %s (outside of refs/notes/)",
+ /* TRANSLATORS: the first %s will be replaced by a
+ git notes command: 'add', 'merge', 'remove', etc.*/
+ die(_("refusing to %s notes in %s (outside of refs/notes/)"),
subcommand, ref);
return t;
}
t = init_notes_check("list", 0);
if (argc) {
if (get_sha1(argv[0], object))
- die(_("Failed to resolve '%s' as a valid ref."), argv[0]);
+ die(_("failed to resolve '%s' as a valid ref."), argv[0]);
note = get_note(t, object);
if (note) {
puts(sha1_to_hex(note));
retval = 0;
} else
- retval = error(_("No note found for object %s."),
+ retval = error(_("no note found for object %s."),
sha1_to_hex(object));
} else
retval = for_each_note(t, 0, list_each_note, NULL);
object_ref = argc > 1 ? argv[1] : "HEAD";
if (get_sha1(object_ref, object))
- die(_("Failed to resolve '%s' as a valid ref."), object_ref);
+ die(_("failed to resolve '%s' as a valid ref."), object_ref);
t = init_notes_check("add", NOTES_INIT_WRITABLE);
note = get_note(t, object);
}
if (get_sha1(argv[0], from_obj))
- die(_("Failed to resolve '%s' as a valid ref."), argv[0]);
+ die(_("failed to resolve '%s' as a valid ref."), argv[0]);
object_ref = 1 < argc ? argv[1] : "HEAD";
if (get_sha1(object_ref, object))
- die(_("Failed to resolve '%s' as a valid ref."), object_ref);
+ die(_("failed to resolve '%s' as a valid ref."), object_ref);
t = init_notes_check("copy", NOTES_INIT_WRITABLE);
note = get_note(t, object);
from_note = get_note(t, from_obj);
if (!from_note) {
- retval = error(_("Missing notes on source object %s. Cannot "
+ retval = error(_("missing notes on source object %s. Cannot "
"copy."), sha1_to_hex(from_obj));
goto out;
}
object_ref = 1 < argc ? argv[1] : "HEAD";
if (get_sha1(object_ref, object))
- die(_("Failed to resolve '%s' as a valid ref."), object_ref);
+ die(_("failed to resolve '%s' as a valid ref."), object_ref);
t = init_notes_check(argv[0], NOTES_INIT_WRITABLE);
note = get_note(t, object);
object_ref = argc ? argv[0] : "HEAD";
if (get_sha1(object_ref, object))
- die(_("Failed to resolve '%s' as a valid ref."), object_ref);
+ die(_("failed to resolve '%s' as a valid ref."), object_ref);
t = init_notes_check("show", 0);
note = get_note(t, object);
if (!note)
- retval = error(_("No note found for object %s."),
+ retval = error(_("no note found for object %s."),
sha1_to_hex(object));
else {
const char *show_args[3] = {"show", sha1_to_hex(note), NULL};
*/
if (delete_ref("NOTES_MERGE_PARTIAL", NULL, 0))
- ret += error("Failed to delete ref NOTES_MERGE_PARTIAL");
+ ret += error(_("failed to delete ref NOTES_MERGE_PARTIAL"));
if (delete_ref("NOTES_MERGE_REF", NULL, REF_NODEREF))
- ret += error("Failed to delete ref NOTES_MERGE_REF");
+ ret += error(_("failed to delete ref NOTES_MERGE_REF"));
if (notes_merge_abort(o))
- ret += error("Failed to remove 'git notes merge' worktree");
+ ret += error(_("failed to remove 'git notes merge' worktree"));
return ret;
}
*/
if (get_sha1("NOTES_MERGE_PARTIAL", sha1))
- die("Failed to read ref NOTES_MERGE_PARTIAL");
+ die(_("failed to read ref NOTES_MERGE_PARTIAL"));
else if (!(partial = lookup_commit_reference(sha1)))
- die("Could not find commit from NOTES_MERGE_PARTIAL.");
+ die(_("could not find commit from NOTES_MERGE_PARTIAL."));
else if (parse_commit(partial))
- die("Could not parse commit from NOTES_MERGE_PARTIAL.");
+ die(_("could not parse commit from NOTES_MERGE_PARTIAL."));
if (partial->parents)
hashcpy(parent_sha1, partial->parents->item->object.oid.hash);
o->local_ref = local_ref_to_free =
resolve_refdup("NOTES_MERGE_REF", 0, sha1, NULL);
if (!o->local_ref)
- die("Failed to resolve NOTES_MERGE_REF");
+ die(_("failed to resolve NOTES_MERGE_REF"));
if (notes_merge_commit(o, t, partial, sha1))
- die("Failed to finalize notes merge");
+ die(_("failed to finalize notes merge"));
/* Reuse existing commit message in reflog message */
memset(&pretty_ctx, 0, sizeof(pretty_ctx));
}
if (do_merge && argc != 1) {
- error(_("Must specify a notes ref to merge"));
+ error(_("must specify a notes ref to merge"));
usage_with_options(git_notes_merge_usage, options);
} else if (!do_merge && argc) {
error(_("too many parameters"));
if (strategy) {
if (parse_notes_merge_strategy(strategy, &o.strategy)) {
- error(_("Unknown -s/--strategy: %s"), strategy);
+ error(_("unknown -s/--strategy: %s"), strategy);
usage_with_options(git_notes_merge_usage, options);
}
} else {
/* Store ref-to-be-updated into .git/NOTES_MERGE_REF */
wt = find_shared_symref("NOTES_MERGE_REF", default_notes_ref());
if (wt)
- die(_("A notes merge into %s is already in-progress at %s"),
+ die(_("a notes merge into %s is already in-progress at %s"),
default_notes_ref(), wt->path);
if (create_symref("NOTES_MERGE_REF", default_notes_ref(), NULL))
- die(_("Failed to store link to current notes ref (%s)"),
+ die(_("failed to store link to current notes ref (%s)"),
default_notes_ref());
printf(_("Automatic notes merge failed. Fix conflicts in %s and "
"commit the result with 'git notes merge --commit', or "
else if (!strcmp(argv[0], "get-ref"))
result = get_ref(argc, argv, prefix);
else {
- result = error(_("Unknown subcommand: %s"), argv[0]);
+ result = error(_("unknown subcommand: %s"), argv[0]);
usage_with_options(git_notes_usage, options);
}
static uint32_t reuse_packfile_objects;
static off_t reuse_packfile_offset;
-static int use_bitmap_index = 1;
+static int use_bitmap_index_default = 1;
+static int use_bitmap_index = -1;
static int write_bitmap_index;
static uint16_t write_bitmap_options;
return 1;
}
+static int want_found_object(int exclude, struct packed_git *p)
+{
+ if (exclude)
+ return 1;
+ if (incremental)
+ return 0;
+
+ /*
+ * When asked to do --local (do not include an object that appears in a
+ * pack we borrow from elsewhere) or --honor-pack-keep (do not include
+ * an object that appears in a pack marked with .keep), finding a pack
+ * that matches the criteria is sufficient for us to decide to omit it.
+ * However, even if this pack does not satisfy the criteria, we need to
+ * make sure no copy of this object appears in _any_ pack that makes us
+ * to omit the object, so we need to check all the packs.
+ *
+ * We can however first check whether these options can possible matter;
+ * if they do not matter we know we want the object in generated pack.
+ * Otherwise, we signal "-1" at the end to tell the caller that we do
+ * not know either way, and it needs to check more packs.
+ */
+ if (!ignore_packed_keep &&
+ (!local || !have_non_local_packs))
+ return 1;
+
+ if (local && !p->pack_local)
+ return 0;
+ if (ignore_packed_keep && p->pack_local && p->pack_keep)
+ return 0;
+
+ /* we don't know yet; keep looking for more packs */
+ return -1;
+}
+
/*
* Check whether we want the object in the pack (e.g., we do not want
* objects found in non-local stores if the "--local" option was used).
*
- * As a side effect of this check, we will find the packed version of this
- * object, if any. We therefore pass out the pack information to avoid having
- * to look it up again later.
+ * If the caller already knows an existing pack it wants to take the object
+ * from, that is passed in *found_pack and *found_offset; otherwise this
+ * function finds if there is any pack that has the object and returns the pack
+ * and its offset in these variables.
*/
static int want_object_in_pack(const unsigned char *sha1,
int exclude,
off_t *found_offset)
{
struct packed_git *p;
+ int want;
if (!exclude && local && has_loose_object_nonlocal(sha1))
return 0;
- *found_pack = NULL;
- *found_offset = 0;
+ /*
+ * If we already know the pack object lives in, start checks from that
+ * pack - in the usual case when neither --local was given nor .keep files
+ * are present we will determine the answer right now.
+ */
+ if (*found_pack) {
+ want = want_found_object(exclude, *found_pack);
+ if (want != -1)
+ return want;
+ }
for (p = packed_git; p; p = p->next) {
- off_t offset = find_pack_entry_one(sha1, p);
+ off_t offset;
+
+ if (p == *found_pack)
+ offset = *found_offset;
+ else
+ offset = find_pack_entry_one(sha1, p);
+
if (offset) {
if (!*found_pack) {
if (!is_pack_valid(p))
*found_offset = offset;
*found_pack = p;
}
- if (exclude)
- return 1;
- if (incremental)
- return 0;
-
- /*
- * When asked to do --local (do not include an
- * object that appears in a pack we borrow
- * from elsewhere) or --honor-pack-keep (do not
- * include an object that appears in a pack marked
- * with .keep), we need to make sure no copy of this
- * object come from in _any_ pack that causes us to
- * omit it, and need to complete this loop. When
- * neither option is in effect, we know the object
- * we just found is going to be packed, so break
- * out of the loop to return 1 now.
- */
- if (!ignore_packed_keep &&
- (!local || !have_non_local_packs))
- break;
-
- if (local && !p->pack_local)
- return 0;
- if (ignore_packed_keep && p->pack_local && p->pack_keep)
- return 0;
+ want = want_found_object(exclude, p);
+ if (want != -1)
+ return want;
}
}
static int add_object_entry(const unsigned char *sha1, enum object_type type,
const char *name, int exclude)
{
- struct packed_git *found_pack;
- off_t found_offset;
+ struct packed_git *found_pack = NULL;
+ off_t found_offset = 0;
uint32_t index_pos;
if (have_duplicate_entry(sha1, exclude, &index_pos))
if (have_duplicate_entry(sha1, 0, &index_pos))
return 0;
+ if (!want_object_in_pack(sha1, 0, &pack, &offset))
+ return 0;
+
create_object_entry(sha1, type, name_hash, 0, 0, index_pos, pack, offset);
display_progress(progress_state, nr_result);
write_bitmap_options &= ~BITMAP_OPT_HASH_CACHE;
}
if (!strcmp(k, "pack.usebitmaps")) {
- use_bitmap_index = git_config_bool(k, v);
+ use_bitmap_index_default = git_config_bool(k, v);
return 0;
}
if (!strcmp(k, "pack.threads")) {
}
/*
- * This tracks any options which a reader of the pack might
- * not understand, and which would therefore prevent blind reuse
- * of what we have on disk.
+ * This tracks any options which pack-reuse code expects to be on, or which a
+ * reader of the pack might not understand, and which would therefore prevent
+ * blind reuse of what we have on disk.
*/
static int pack_options_allow_reuse(void)
{
- return allow_ofs_delta;
+ return pack_to_stdout && allow_ofs_delta;
}
static int get_object_list_from_bitmap(struct rev_info *revs)
if (!rev_list_all || !rev_list_reflog || !rev_list_index)
unpack_unreachable_expiration = 0;
- if (!use_internal_rev_list || !pack_to_stdout || is_repository_shallow())
+ /*
+ * "soft" reasons not to use bitmaps - for on-disk repack by default we want
+ *
+ * - to produce good pack (with bitmap index not-yet-packed objects are
+ * packed in suboptimal order).
+ *
+ * - to use more robust pack-generation codepath (avoiding possible
+ * bugs in bitmap code and possible bitmap index corruption).
+ */
+ if (!pack_to_stdout)
+ use_bitmap_index_default = 0;
+
+ if (use_bitmap_index < 0)
+ use_bitmap_index = use_bitmap_index_default;
+
+ /* "hard" reasons not to use bitmaps; these just won't work at all */
+ if (!use_internal_rev_list || (!pack_to_stdout && write_bitmap_index) || is_repository_shallow())
use_bitmap_index = 0;
if (pack_to_stdout || !rev_list_all)
return !strcmp(head_name, ref);
}
-static char *refuse_unconfigured_deny_msg[] = {
- "By default, updating the current branch in a non-bare repository",
- "is denied, because it will make the index and work tree inconsistent",
- "with what you pushed, and will require 'git reset --hard' to match",
- "the work tree to HEAD.",
- "",
- "You can set 'receive.denyCurrentBranch' configuration variable to",
- "'ignore' or 'warn' in the remote repository to allow pushing into",
- "its current branch; however, this is not recommended unless you",
- "arranged to update its work tree to match what you pushed in some",
- "other way.",
- "",
- "To squelch this message and still keep the default behaviour, set",
- "'receive.denyCurrentBranch' configuration variable to 'refuse'."
-};
+static char *refuse_unconfigured_deny_msg =
+ N_("By default, updating the current branch in a non-bare repository\n"
+ "is denied, because it will make the index and work tree inconsistent\n"
+ "with what you pushed, and will require 'git reset --hard' to match\n"
+ "the work tree to HEAD.\n"
+ "\n"
+ "You can set 'receive.denyCurrentBranch' configuration variable to\n"
+ "'ignore' or 'warn' in the remote repository to allow pushing into\n"
+ "its current branch; however, this is not recommended unless you\n"
+ "arranged to update its work tree to match what you pushed in some\n"
+ "other way.\n"
+ "\n"
+ "To squelch this message and still keep the default behaviour, set\n"
+ "'receive.denyCurrentBranch' configuration variable to 'refuse'.");
static void refuse_unconfigured_deny(void)
{
- int i;
- for (i = 0; i < ARRAY_SIZE(refuse_unconfigured_deny_msg); i++)
- rp_error("%s", refuse_unconfigured_deny_msg[i]);
+ rp_error("%s", _(refuse_unconfigured_deny_msg));
}
-static char *refuse_unconfigured_deny_delete_current_msg[] = {
- "By default, deleting the current branch is denied, because the next",
- "'git clone' won't result in any file checked out, causing confusion.",
- "",
- "You can set 'receive.denyDeleteCurrent' configuration variable to",
- "'warn' or 'ignore' in the remote repository to allow deleting the",
- "current branch, with or without a warning message.",
- "",
- "To squelch this message, you can set it to 'refuse'."
-};
+static char *refuse_unconfigured_deny_delete_current_msg =
+ N_("By default, deleting the current branch is denied, because the next\n"
+ "'git clone' won't result in any file checked out, causing confusion.\n"
+ "\n"
+ "You can set 'receive.denyDeleteCurrent' configuration variable to\n"
+ "'warn' or 'ignore' in the remote repository to allow deleting the\n"
+ "current branch, with or without a warning message.\n"
+ "\n"
+ "To squelch this message, you can set it to 'refuse'.");
static void refuse_unconfigured_deny_delete_current(void)
{
- int i;
- for (i = 0;
- i < ARRAY_SIZE(refuse_unconfigured_deny_delete_current_msg);
- i++)
- rp_error("%s", refuse_unconfigured_deny_delete_current_msg[i]);
+ rp_error("%s", _(refuse_unconfigured_deny_delete_current_msg));
}
static int command_singleton_iterator(void *cb_data, unsigned char sha1[20]);
return 0;
}
if (MAX_REVS <= ref_name_cnt) {
- warning("ignoring %s; cannot handle more than %d refs",
- refname, MAX_REVS);
+ warning(Q_("ignoring %s; cannot handle more than %d ref",
+ "ignoring %s; cannot handle more than %d refs",
+ MAX_REVS), refname, MAX_REVS);
return 0;
}
ref_name[ref_name_cnt++] = xstrdup(refname);
for_each_ref(append_matching_ref, NULL);
if (saved_matches == ref_name_cnt &&
ref_name_cnt < MAX_REVS)
- error("no matching refs with %s", av);
+ error(_("no matching refs with %s"), av);
if (saved_matches + 1 < ref_name_cnt)
sort_ref_range(saved_matches, ref_name_cnt);
return;
*
* Also --all and --remotes do not make sense either.
*/
- die("--reflog is incompatible with --all, --remotes, "
- "--independent or --merge-base");
+ die(_("--reflog is incompatible with --all, --remotes, "
+ "--independent or --merge-base"));
}
/* If nothing is specified, show all branches by default */
av = fake_av;
ac = 1;
if (!*av)
- die("no branches given, and HEAD is not valid");
+ die(_("no branches given, and HEAD is not valid"));
}
if (ac != 1)
- die("--reflog option needs one branch name");
+ die(_("--reflog option needs one branch name"));
if (MAX_REVS < reflog)
- die("Only %d entries can be shown at one time.",
- MAX_REVS);
+ die(Q_("only %d entry can be shown at one time.",
+ "only %d entries can be shown at one time.",
+ MAX_REVS), MAX_REVS);
if (!dwim_ref(*av, strlen(*av), oid.hash, &ref))
- die("No such ref %s", *av);
+ die(_("no such ref %s"), *av);
/* Has the base been specified? */
if (reflog_base) {
unsigned int flag = 1u << (num_rev + REV_SHIFT);
if (MAX_REVS <= num_rev)
- die("cannot handle more than %d revs.", MAX_REVS);
+ die(Q_("cannot handle more than %d rev.",
+ "cannot handle more than %d revs.",
+ MAX_REVS), MAX_REVS);
if (get_sha1(ref_name[num_rev], revkey.hash))
- die("'%s' is not a valid ref.", ref_name[num_rev]);
+ die(_("'%s' is not a valid ref."), ref_name[num_rev]);
commit = lookup_commit_reference(revkey.hash);
if (!commit)
- die("cannot find commit %s (%s)",
+ die(_("cannot find commit %s (%s)"),
ref_name[num_rev], oid_to_hex(&revkey));
parse_commit(commit);
mark_seen(commit, &seen);
}
static int clone_submodule(const char *path, const char *gitdir, const char *url,
- const char *depth, struct string_list *reference, int quiet)
+ const char *depth, struct string_list *reference,
+ int quiet, int progress)
{
struct child_process cp = CHILD_PROCESS_INIT;
argv_array_push(&cp.args, "--no-checkout");
if (quiet)
argv_array_push(&cp.args, "--quiet");
+ if (progress)
+ argv_array_push(&cp.args, "--progress");
if (depth && *depth)
argv_array_pushl(&cp.args, "--depth", depth, NULL);
if (reference->nr) {
{
const char *name = NULL, *url = NULL, *depth = NULL;
int quiet = 0;
+ int progress = 0;
FILE *submodule_dot_git;
char *p, *path = NULL, *sm_gitdir;
struct strbuf rel_path = STRBUF_INIT;
N_("string"),
N_("depth for shallow clones")),
OPT__QUIET(&quiet, "Suppress output for cloning a submodule"),
+ OPT_BOOL(0, "progress", &progress,
+ N_("force cloning progress")),
OPT_END()
};
prepare_possible_alternates(name, &reference);
- if (clone_submodule(path, sm_gitdir, url, depth, &reference, quiet))
+ if (clone_submodule(path, sm_gitdir, url, depth, &reference,
+ quiet, progress))
die(_("clone of '%s' into submodule path '%s' failed"),
url, path);
} else {
struct submodule_update_strategy update;
/* configuration parameters which are passed on to the children */
+ int progress;
int quiet;
int recommend_shallow;
struct string_list references;
int failed_clones_nr, failed_clones_alloc;
};
#define SUBMODULE_UPDATE_CLONE_INIT {0, MODULE_LIST_INIT, 0, \
- SUBMODULE_UPDATE_STRATEGY_INIT, 0, -1, STRING_LIST_INIT_DUP, \
+ SUBMODULE_UPDATE_STRATEGY_INIT, 0, 0, -1, STRING_LIST_INIT_DUP, \
NULL, NULL, NULL, \
STRING_LIST_INIT_DUP, 0, NULL, 0, 0}
child->err = -1;
argv_array_push(&child->args, "submodule--helper");
argv_array_push(&child->args, "clone");
+ if (suc->progress)
+ argv_array_push(&child->args, "--progress");
if (suc->quiet)
argv_array_push(&child->args, "--quiet");
if (suc->prefix)
ce = suc->failed_clones[index];
if (!prepare_to_clone_next_submodule(ce, child, suc, err)) {
suc->current ++;
- strbuf_addf(err, "BUG: submodule considered for cloning,"
- "doesn't need cloning any more?\n");
+ strbuf_addstr(err, "BUG: submodule considered for "
+ "cloning, doesn't need cloning "
+ "any more?\n");
return 0;
}
p = xmalloc(sizeof(*p));
OPT_BOOL(0, "recommend-shallow", &suc.recommend_shallow,
N_("whether the initial clone should follow the shallow recommendation")),
OPT__QUIET(&suc.quiet, N_("don't print cloning progress")),
+ OPT_BOOL(0, "progress", &suc.progress,
+ N_("force cloning progress")),
OPT_END()
};
return 0;
}
-static void chmod_path(int flip, const char *path)
+static void chmod_path(char flip, const char *path)
{
int pos;
struct cache_entry *ce;
- unsigned int mode;
pos = cache_name_pos(path, strlen(path));
if (pos < 0)
goto fail;
ce = active_cache[pos];
- mode = ce->ce_mode;
- if (!S_ISREG(mode))
- goto fail;
- switch (flip) {
- case '+':
- ce->ce_mode |= 0111; break;
- case '-':
- ce->ce_mode &= ~0111; break;
- default:
+ if (chmod_cache_entry(ce, flip) < 0)
goto fail;
- }
- cache_tree_invalidate_path(&the_index, path);
- ce->ce_flags |= CE_UPDATE_IN_BASE;
- active_cache_changed |= CE_ENTRY_CHANGED;
+
report("chmod %cx '%s'", flip, path);
return;
fail:
break;
case UC_DISABLE:
if (git_config_get_untracked_cache() == 1)
- warning("core.untrackedCache is set to true; "
- "remove or change it, if you really want to "
- "disable the untracked cache");
+ warning(_("core.untrackedCache is set to true; "
+ "remove or change it, if you really want to "
+ "disable the untracked cache"));
remove_untracked_cache(&the_index);
report(_("Untracked cache disabled"));
break;
case UC_ENABLE:
case UC_FORCE:
if (git_config_get_untracked_cache() == 0)
- warning("core.untrackedCache is set to false; "
- "remove or change it, if you really want to "
- "enable the untracked cache");
+ warning(_("core.untrackedCache is set to false; "
+ "remove or change it, if you really want to "
+ "enable the untracked cache"));
add_untracked_cache(&the_index);
report(_("Untracked cache enabled for '%s'"), get_git_work_tree());
break;
#define rename_cache_entry_at(pos, new_name) rename_index_entry_at(&the_index, (pos), (new_name))
#define remove_cache_entry_at(pos) remove_index_entry_at(&the_index, (pos))
#define remove_file_from_cache(path) remove_file_from_index(&the_index, (path))
-#define add_to_cache(path, st, flags) add_to_index(&the_index, (path), (st), (flags), 0)
-#define add_file_to_cache(path, flags) add_file_to_index(&the_index, (path), (flags), 0)
+#define add_to_cache(path, st, flags) add_to_index(&the_index, (path), (st), (flags))
+#define add_file_to_cache(path, flags) add_file_to_index(&the_index, (path), (flags))
+#define chmod_cache_entry(ce, flip) chmod_index_entry(&the_index, (ce), (flip))
#define refresh_cache(flags) refresh_index(&the_index, (flags), NULL, NULL, NULL)
#define ce_match_stat(ce, st, options) ie_match_stat(&the_index, (ce), (st), (options))
#define ce_modified(ce, st, options) ie_modified(&the_index, (ce), (st), (options))
*/
extern const char * const local_repo_env[];
+/*
+ * Returns true iff we have a configured git repository (either via
+ * setup_git_directory, or in the environment via $GIT_DIR).
+ */
+int have_git_dir(void);
+
extern int is_bare_repository_cfg;
extern int is_bare_repository(void);
extern int is_inside_git_dir(void);
#define ADD_CACHE_IGNORE_ERRORS 4
#define ADD_CACHE_IGNORE_REMOVAL 8
#define ADD_CACHE_INTENT 16
-extern int add_to_index(struct index_state *, const char *path, struct stat *, int flags, int force_mode);
-extern int add_file_to_index(struct index_state *, const char *path, int flags, int force_mode);
+extern int add_to_index(struct index_state *, const char *path, struct stat *, int flags);
+extern int add_file_to_index(struct index_state *, const char *path, int flags);
extern struct cache_entry *make_cache_entry(unsigned int mode, const unsigned char *sha1, const char *path, int stage, unsigned int refresh_options);
+extern int chmod_index_entry(struct index_state *, struct cache_entry *ce, char flip);
extern int ce_same_name(const struct cache_entry *a, const struct cache_entry *b);
extern void set_object_name_for_intent_to_add_entry(struct cache_entry *ce);
extern int index_name_is_other(const struct index_state *, const char *, int);
extern unsigned long big_file_threshold;
extern unsigned long pack_size_limit_cfg;
+/*
+ * Accessors for the core.sharedrepository config which lazy-load the value
+ * from the config (if not already set). The "reset" function can be
+ * used to unset "set" or cached value, meaning that the value will be loaded
+ * fresh from the config file on the next call to get_shared_repository().
+ */
void set_shared_repository(int value);
int get_shared_repository(void);
+void reset_shared_repository(void);
/*
* Do replace refs need to be checked this run? This variable is
not_new:1,
refresh_cache:1;
};
+#define CHECKOUT_INIT { NULL, "" }
#define TEMPORARY_FILENAME_LENGTH 25
extern int checkout_entry(struct cache_entry *ce, const struct checkout *state, char *topath);
/* pager.c */
extern void setup_pager(void);
-extern const char *pager_program;
extern int pager_in_use(void);
extern int pager_use_color;
extern int term_columns(void);
* return 0 if success, 1 - if addition of a file failed and
* ADD_FILES_IGNORE_ERRORS was specified in flags
*/
-int add_files_to_cache(const char *prefix, const struct pathspec *pathspec, int flags, int force_mode);
+int add_files_to_cache(const char *prefix, const struct pathspec *pathspec, int flags);
/* diff.c */
extern int diff_auto_refresh_index;
}
static const char commit_utf8_warn[] =
-"Warning: commit message did not conform to UTF-8.\n"
-"You may want to amend it after fixing the message, or set the config\n"
-"variable i18n.commitencoding to the encoding your project uses.\n";
+N_("Warning: commit message did not conform to UTF-8.\n"
+ "You may want to amend it after fixing the message, or set the config\n"
+ "variable i18n.commitencoding to the encoding your project uses.\n");
int commit_tree_extended(const char *msg, size_t msg_len,
const unsigned char *tree,
/* And check the encoding */
if (encoding_is_utf8 && !verify_utf8(&buffer))
- fprintf(stderr, commit_utf8_warn);
+ fprintf(stderr, _(commit_utf8_warn));
if (sign_commit && do_sign_commit(&buffer, sign_commit))
return -1;
return 0;
}
- if (!strcmp(var, "core.pager"))
- return git_config_string(&pager_program, var, value);
-
if (!strcmp(var, "core.editor"))
return git_config_string(&editor_program, var, value);
int ret = 0;
char *xdg_config = xdg_config_home("config");
char *user_config = expand_user_path("~/.gitconfig");
- char *repo_config = git_pathdup("config");
+ char *repo_config = have_git_dir() ? git_pathdup("config") : NULL;
current_parsing_scope = CONFIG_SCOPE_SYSTEM;
if (git_config_system() && !access_or_die(git_etc_gitconfig(), R_OK, 0))
return check_ref(ref->name, flags);
}
-static void die_initial_contact(int got_at_least_one_head)
+static void die_initial_contact(int unexpected)
{
- if (got_at_least_one_head)
- die("The remote end hung up upon initial contact");
+ if (unexpected)
+ die(_("The remote end hung up upon initial contact"));
else
- die("Could not read from remote repository.\n\n"
- "Please make sure you have the correct access rights\n"
- "and the repository exists.");
+ die(_("Could not read from remote repository.\n\n"
+ "Please make sure you have the correct access rights\n"
+ "and the repository exists."));
}
static void parse_one_symref_info(struct string_list *symref, const char *val, int len)
struct sha1_array *shallow_points)
{
struct ref **orig_list = list;
- int got_at_least_one_head = 0;
+
+ /*
+ * A hang-up after seeing some response from the other end
+ * means that it is unexpected, as we know the other end is
+ * willing to talk to us. A hang-up before seeing any
+ * response does not necessarily mean an ACL problem, though.
+ */
+ int saw_response;
+ int got_dummy_ref_with_capabilities_declaration = 0;
*list = NULL;
- for (;;) {
+ for (saw_response = 0; ; saw_response = 1) {
struct ref *ref;
struct object_id old_oid;
char *name;
PACKET_READ_GENTLE_ON_EOF |
PACKET_READ_CHOMP_NEWLINE);
if (len < 0)
- die_initial_contact(got_at_least_one_head);
+ die_initial_contact(saw_response);
if (!len)
break;
continue;
}
+ if (!strcmp(name, "capabilities^{}")) {
+ if (saw_response)
+ die("protocol error: unexpected capabilities^{}");
+ if (got_dummy_ref_with_capabilities_declaration)
+ die("protocol error: multiple capabilities^{}");
+ got_dummy_ref_with_capabilities_declaration = 1;
+ continue;
+ }
+
if (!check_ref(name, flags))
continue;
+
+ if (got_dummy_ref_with_capabilities_declaration)
+ die("protocol error: unexpected ref after capabilities^{}");
+
ref = alloc_ref(buffer + GIT_SHA1_HEXSZ + 1);
oidcpy(&ref->old_oid, &old_oid);
*list = ref;
list = &ref->next;
- got_at_least_one_head = 1;
}
annotate_refs_with_symref_info(*orig_list);
+ oid_to_hex(E1)
@@
-expression E1;
+expression E1, E2;
@@
-- sha1_to_hex_r(E1.hash)
-+ oid_to_hex_r(&E1)
+- sha1_to_hex_r(E1, E2.hash)
++ oid_to_hex_r(E1, &E2)
@@
-expression E1;
+expression E1, E2;
@@
-- sha1_to_hex_r(E1->hash)
-+ oid_to_hex_r(E1)
+- sha1_to_hex_r(E1, E2->hash)
++ oid_to_hex_r(E1, E2)
@@
expression E1;
--- /dev/null
+@@
+expression E1, E2;
+@@
+- strbuf_addf(E1, E2);
++ strbuf_addstr(E1, E2);
DIFF_OPT_SET(&revs->diffopt, NO_INDEX);
+ DIFF_OPT_SET(&revs->diffopt, RELATIVE_NAME);
+ revs->diffopt.prefix = prefix;
+
revs->max_count = -2;
diff_setup_done(&revs->diffopt);
#endif
static int diff_detect_rename_default;
+static int diff_indent_heuristic; /* experimental */
static int diff_compaction_heuristic; /* experimental */
static int diff_rename_limit_default = 400;
static int diff_suppress_blank_empty;
GIT_COLOR_NORMAL, /* FUNCINFO */
};
+static NORETURN void die_want_option(const char *option_name)
+{
+ die(_("option '%s' requires a value"), option_name);
+}
+
static int parse_diff_color_slot(const char *var)
{
if (!strcasecmp(var, "context") || !strcasecmp(var, "plain"))
diff_detect_rename_default = 1;
}
+int git_diff_heuristic_config(const char *var, const char *value, void *cb)
+{
+ if (!strcmp(var, "diff.indentheuristic")) {
+ diff_indent_heuristic = git_config_bool(var, value);
+ if (diff_indent_heuristic)
+ diff_compaction_heuristic = 0;
+ }
+ if (!strcmp(var, "diff.compactionheuristic")) {
+ diff_compaction_heuristic = git_config_bool(var, value);
+ if (diff_compaction_heuristic)
+ diff_indent_heuristic = 0;
+ }
+ return 0;
+}
+
int git_diff_ui_config(const char *var, const char *value, void *cb)
{
if (!strcmp(var, "diff.color") || !strcmp(var, "color.diff")) {
diff_detect_rename_default = git_config_rename(var, value);
return 0;
}
- if (!strcmp(var, "diff.compactionheuristic")) {
- diff_compaction_heuristic = git_config_bool(var, value);
- return 0;
- }
if (!strcmp(var, "diff.autorefreshindex")) {
diff_auto_refresh_index = git_config_bool(var, value);
return 0;
return 0;
}
+ if (git_diff_heuristic_config(var, value, cb) < 0)
+ return -1;
if (git_color_config(var, value, cb) < 0)
return -1;
{
if (word_regex && *begin < buffer->size) {
regmatch_t match[1];
- if (!regexec(word_regex, buffer->ptr + *begin, 1, match, 0)) {
+ if (!regexec_buf(word_regex, buffer->ptr + *begin,
+ buffer->size - *begin, 1, match, 0)) {
char *p = memchr(buffer->ptr + *begin + match[0].rm_so,
'\n', match[0].rm_eo - match[0].rm_so);
*end = p ? p - buffer->ptr : match[0].rm_eo + *begin;
options->use_color = diff_use_color_default;
options->detect_rename = diff_detect_rename_default;
options->xdl_opts |= diff_algorithm;
- if (diff_compaction_heuristic)
+ if (diff_indent_heuristic)
+ DIFF_XDL_SET(options, INDENT_HEURISTIC);
+ else if (diff_compaction_heuristic)
DIFF_XDL_SET(options, COMPACTION_HEURISTIC);
options->orderfile = diff_order_file_cfg;
if (options->output_format & DIFF_FORMAT_NO_OUTPUT)
count++;
if (count > 1)
- die("--name-only, --name-status, --check and -s are mutually exclusive");
+ die(_("--name-only, --name-status, --check and -s are mutually exclusive"));
/*
* Most of the time we can say "there are changes"
if (*arg == '=')
width = strtoul(arg + 1, &end, 10);
else if (!*arg && !av[1])
- die("Option '--stat-width' requires a value");
+ die_want_option("--stat-width");
else if (!*arg) {
width = strtoul(av[1], &end, 10);
argcount = 2;
if (*arg == '=')
name_width = strtoul(arg + 1, &end, 10);
else if (!*arg && !av[1])
- die("Option '--stat-name-width' requires a value");
+ die_want_option("--stat-name-width");
else if (!*arg) {
name_width = strtoul(av[1], &end, 10);
argcount = 2;
if (*arg == '=')
graph_width = strtoul(arg + 1, &end, 10);
else if (!*arg && !av[1])
- die("Option '--stat-graph-width' requires a value");
+ die_want_option("--stat-graph-width");
else if (!*arg) {
graph_width = strtoul(av[1], &end, 10);
argcount = 2;
if (*arg == '=')
count = strtoul(arg + 1, &end, 10);
else if (!*arg && !av[1])
- die("Option '--stat-count' requires a value");
+ die_want_option("--stat-count");
else if (!*arg) {
count = strtoul(av[1], &end, 10);
argcount = 2;
DIFF_XDL_SET(options, IGNORE_WHITESPACE_AT_EOL);
else if (!strcmp(arg, "--ignore-blank-lines"))
DIFF_XDL_SET(options, IGNORE_BLANK_LINES);
- else if (!strcmp(arg, "--compaction-heuristic"))
+ else if (!strcmp(arg, "--indent-heuristic")) {
+ DIFF_XDL_SET(options, INDENT_HEURISTIC);
+ DIFF_XDL_CLR(options, COMPACTION_HEURISTIC);
+ } else if (!strcmp(arg, "--no-indent-heuristic"))
+ DIFF_XDL_CLR(options, INDENT_HEURISTIC);
+ else if (!strcmp(arg, "--compaction-heuristic")) {
DIFF_XDL_SET(options, COMPACTION_HEURISTIC);
- else if (!strcmp(arg, "--no-compaction-heuristic"))
+ DIFF_XDL_CLR(options, INDENT_HEURISTIC);
+ } else if (!strcmp(arg, "--no-compaction-heuristic"))
DIFF_XDL_CLR(options, COMPACTION_HEURISTIC);
else if (!strcmp(arg, "--patience"))
options->xdl_opts = DIFF_WITH_ALG(options, PATIENCE_DIFF);
const char **optarg);
extern int git_diff_basic_config(const char *var, const char *value, void *cb);
+extern int git_diff_heuristic_config(const char *var, const char *value, void *cb);
extern void init_diff_ui_defaults(void);
extern int git_diff_ui_config(const char *var, const char *value, void *cb);
extern void diff_setup(struct diff_options *);
{
struct diffgrep_cb *data = priv;
regmatch_t regmatch;
- int hold;
if (line[0] != '+' && line[0] != '-')
return;
* caller early.
*/
return;
- /* Yuck -- line ought to be "const char *"! */
- hold = line[len];
- line[len] = '\0';
- data->hit = !regexec(data->regexp, line + 1, 1, ®match, 0);
- line[len] = hold;
+ data->hit = !regexec_buf(data->regexp, line + 1, len - 1, 1,
+ ®match, 0);
}
static int diff_grep(mmfile_t *one, mmfile_t *two,
xdemitconf_t xecfg;
if (!one)
- return !regexec(regexp, two->ptr, 1, ®match, 0);
+ return !regexec_buf(regexp, two->ptr, two->size,
+ 1, ®match, 0);
if (!two)
- return !regexec(regexp, one->ptr, 1, ®match, 0);
+ return !regexec_buf(regexp, one->ptr, one->size,
+ 1, ®match, 0);
/*
* We have both sides; need to run textual diff and see if
regmatch_t regmatch;
int flags = 0;
- assert(data[sz] == '\0');
- while (*data && !regexec(regexp, data, 1, ®match, flags)) {
+ while (*data &&
+ !regexec_buf(regexp, data, sz, 1, ®match, flags)) {
flags |= REG_NOTBOL;
data += regmatch.rm_eo;
if (*data && regmatch.rm_so == regmatch.rm_eo)
size_t packed_git_limit = DEFAULT_PACKED_GIT_LIMIT;
size_t delta_base_cache_limit = 96 * 1024 * 1024;
unsigned long big_file_threshold = 512 * 1024 * 1024;
-const char *pager_program;
int pager_use_color = 1;
const char *editor_program;
const char *askpass_program;
return is_bare_repository_cfg && !get_git_work_tree();
}
+int have_git_dir(void)
+{
+ return startup_info->have_repository
+ || git_dir
+ || getenv(GIT_DIR_ENVIRONMENT);
+}
+
const char *get_git_dir(void)
{
if (!git_dir)
}
return the_shared_repository;
}
+
+void reset_shared_repository(void)
+{
+ need_shared_repository_from_config = 1;
+}
my $normal_color = $repo->get_color("", "reset");
my $diff_algorithm = $repo->config('diff.algorithm');
+my $diff_indent_heuristic = $repo->config_bool('diff.indentheuristic');
my $diff_compaction_heuristic = $repo->config_bool('diff.compactionheuristic');
my $diff_filter = $repo->config('interactive.difffilter');
if (defined $diff_algorithm) {
splice @diff_cmd, 1, 0, "--diff-algorithm=${diff_algorithm}";
}
- if ($diff_compaction_heuristic) {
+ if ($diff_indent_heuristic) {
+ splice @diff_cmd, 1, 0, "--indent-heuristic";
+ } elsif ($diff_compaction_heuristic) {
splice @diff_cmd, 1, 0, "--compaction-heuristic";
}
if (defined $patch_mode_revision) {
#define qsort git_qsort
#endif
+#ifndef REG_STARTEND
+#error "Git requires REG_STARTEND support. Compile with NO_REGEX=NeedsStartEnd"
+#endif
+
+static inline int regexec_buf(const regex_t *preg, const char *buf, size_t size,
+ size_t nmatch, regmatch_t pmatch[], int eflags)
+{
+ assert(nmatch > 0 && pmatch);
+ pmatch[0].rm_so = 0;
+ pmatch[0].rm_eo = size;
+ return regexec(preg, buf, nmatch, pmatch, eflags | REG_STARTEND);
+}
+
#ifndef DIR_HAS_BSD_GROUP_SEMANTICS
# define FORCE_DIR_SET_GID S_ISGID
#else
# placed before the commit of the next action
checkout_onto
- warn "$(gettext "You can fix this with 'git rebase --edit-todo'.")"
+ warn "$(gettext "You can fix this with 'git rebase --edit-todo' and then run 'git rebase --continue'.")"
die "$(gettext "Or you can abort the rebase with 'git rebase --abort'.")"
fi
}
;;
esac
-git var GIT_COMMITTER_IDENT >/dev/null ||
- die "$(gettext "You need to set your committer info first")"
-
comment_for_reflog start
if test ! -z "$switch_to"
u_tree=$(git write-tree) &&
printf 'untracked files on %s\n' "$msg" | git commit-tree $u_tree &&
rm -f "$TMPindex"
- ) ) || die "Cannot save the untracked files"
+ ) ) || die "$(gettext "Cannot save the untracked files")"
untracked_commit_option="-p $u_commit";
else
if test -n "$patch_mode" && test -n "$untracked"
then
- die "Can't use --patch and --include-untracked or --all at the same time"
+ die "$(gettext "Can't use --patch and --include-untracked or --all at the same time")"
fi
stash_msg="$*"
GIT_INDEX_FILE="$TMPindex" git-read-tree "$u_tree" &&
GIT_INDEX_FILE="$TMPindex" git checkout-index --all &&
rm -f "$TMPindex" ||
- die 'Could not restore untracked files from stash'
+ die "$(gettext "Could not restore untracked files from stash")"
fi
eval "
prefix=
custom_name=
depth=
+progress=
die_if_unmatched ()
{
-q|--quiet)
GIT_QUIET=1
;;
+ --progress)
+ progress="--progress"
+ ;;
-i|--init)
init=1
;;
{
git submodule--helper update-clone ${GIT_QUIET:+--quiet} \
+ ${progress:+"$progress"} \
${wt_prefix:+--prefix "$wt_prefix"} \
${prefix:+--recursive-prefix "$prefix"} \
${update:+--update "$update"} \
{ "pack-objects", cmd_pack_objects, RUN_SETUP },
{ "pack-redundant", cmd_pack_redundant, RUN_SETUP },
{ "pack-refs", cmd_pack_refs, RUN_SETUP },
- { "patch-id", cmd_patch_id },
+ { "patch-id", cmd_patch_id, RUN_SETUP_GENTLY },
{ "pickaxe", cmd_blame, RUN_SETUP },
{ "prune", cmd_prune, RUN_SETUP },
{ "prune-packed", cmd_prune_packed, RUN_SETUP },
}
}
-static int regmatch(const regex_t *preg, char *line, char *eol,
- regmatch_t *match, int eflags)
-{
-#ifdef REG_STARTEND
- match->rm_so = 0;
- match->rm_eo = eol - line;
- eflags |= REG_STARTEND;
-#endif
- return regexec(preg, line, 1, match, eflags);
-}
-
static int patmatch(struct grep_pat *p, char *line, char *eol,
regmatch_t *match, int eflags)
{
else if (p->pcre_regexp)
hit = !pcrematch(p, line, eol, match, eflags);
else
- hit = !regmatch(&p->regexp, line, eol, match, eflags);
+ hit = !regexec_buf(&p->regexp, line, eol - line, 1, match,
+ eflags);
return hit;
}
slot->callback_func(slot->callback_data);
}
+static void xmulti_remove_handle(struct active_request_slot *slot)
+{
+#ifdef USE_CURL_MULTI
+ curl_multi_remove_handle(curlm, slot->curl);
+#endif
+}
+
#ifdef USE_CURL_MULTI
static void process_curl_messages(void)
{
slot->curl != curl_message->easy_handle)
slot = slot->next;
if (slot != NULL) {
- curl_multi_remove_handle(curlm, slot->curl);
+ xmulti_remove_handle(slot);
slot->curl_result = curl_result;
finish_active_slot(slot);
} else {
while (slot != NULL) {
struct active_request_slot *next = slot->next;
if (slot->curl != NULL) {
-#ifdef USE_CURL_MULTI
- curl_multi_remove_handle(curlm, slot->curl);
-#endif
+ xmulti_remove_handle(slot);
curl_easy_cleanup(slot->curl);
}
free(slot);
if (curlm_result != CURLM_OK &&
curlm_result != CURLM_CALL_MULTI_PERFORM) {
+ warning("curl_multi_add_handle failed: %s",
+ curl_multi_strerror(curlm_result));
active_requests--;
slot->in_use = 0;
return 0;
static void release_active_slot(struct active_request_slot *slot)
{
closedown_active_slot(slot);
- if (slot->curl && curl_session_count > min_curl_sessions) {
-#ifdef USE_CURL_MULTI
- curl_multi_remove_handle(curlm, slot->curl);
-#endif
- curl_easy_cleanup(slot->curl);
- slot->curl = NULL;
- curl_session_count--;
+ if (slot->curl) {
+ xmulti_remove_handle(slot);
+ if (curl_session_count > min_curl_sessions) {
+ curl_easy_cleanup(slot->curl);
+ slot->curl = NULL;
+ curl_session_count--;
+ }
}
#ifdef USE_CURL_MULTI
fill_active_slots();
}
static const char *env_hint =
-"\n"
-"*** Please tell me who you are.\n"
-"\n"
-"Run\n"
-"\n"
-" git config --global user.email \"you@example.com\"\n"
-" git config --global user.name \"Your Name\"\n"
-"\n"
-"to set your account\'s default identity.\n"
-"Omit --global to set the identity only in this repository.\n"
-"\n";
+N_("\n"
+ "*** Please tell me who you are.\n"
+ "\n"
+ "Run\n"
+ "\n"
+ " git config --global user.email \"you@example.com\"\n"
+ " git config --global user.name \"Your Name\"\n"
+ "\n"
+ "to set your account\'s default identity.\n"
+ "Omit --global to set the identity only in this repository.\n"
+ "\n");
const char *fmt_ident(const char *name, const char *email,
const char *date_str, int flag)
if (!name) {
if (strict && ident_use_config_only
&& !(ident_config_given & IDENT_NAME_GIVEN)) {
- fputs(env_hint, stderr);
+ fputs(_(env_hint), stderr);
die("no name was given and auto-detection is disabled");
}
name = ident_default_name();
using_default = 1;
if (strict && default_name_is_bogus) {
- fputs(env_hint, stderr);
+ fputs(_(env_hint), stderr);
die("unable to auto-detect name (got '%s')", name);
}
}
struct passwd *pw;
if (strict) {
if (using_default)
- fputs(env_hint, stderr);
+ fputs(_(env_hint), stderr);
die("empty ident name (for <%s>) not allowed", email);
}
pw = xgetpwuid_self(NULL);
if (!email) {
if (strict && ident_use_config_only
&& !(ident_config_given & IDENT_MAIL_GIVEN)) {
- fputs(env_hint, stderr);
+ fputs(_(env_hint), stderr);
die("no email was given and auto-detection is disabled");
}
email = ident_default_email();
if (strict && default_email_is_bogus) {
- fputs(env_hint, stderr);
+ fputs(_(env_hint), stderr);
die("unable to auto-detect email address (got '%s')", email);
}
}
find_unique_abbrev(commit->object.oid.hash,
DEFAULT_ABBREV));
if (parse_commit(commit) != 0)
- strbuf_addf(&o->obuf, _("(bad commit)\n"));
+ strbuf_addstr(&o->obuf, _("(bad commit)\n"));
else {
const char *title;
const char *msg = get_commit_buffer(commit, NULL);
if (file_exists(git_path(NOTES_MERGE_WORKTREE)) &&
!is_empty_dir(git_path(NOTES_MERGE_WORKTREE))) {
if (advice_resolve_conflict)
- die("You have not concluded your previous "
+ die(_("You have not concluded your previous "
"notes merge (%s exists).\nPlease, use "
"'git notes merge --commit' or 'git notes "
"merge --abort' to commit/abort the "
"previous merge before you start a new "
- "notes merge.", git_path("NOTES_MERGE_*"));
+ "notes merge."), git_path("NOTES_MERGE_*"));
else
- die("You have not concluded your notes merge "
- "(%s exists).", git_path("NOTES_MERGE_*"));
+ die(_("You have not concluded your notes merge "
+ "(%s exists)."), git_path("NOTES_MERGE_*"));
}
if (safe_create_leading_directories_const(git_path(
#define DEFAULT_PAGER "less"
#endif
-/*
- * This is split up from the rest of git so that we can do
- * something different on Windows.
- */
-
static struct child_process pager_process = CHILD_PROCESS_INIT;
+static const char *pager_program;
static void wait_for_pager(int in_signal)
{
raise(signo);
}
+static int core_pager_config(const char *var, const char *value, void *data)
+{
+ if (!strcmp(var, "core.pager"))
+ return git_config_string(&pager_program, var, value);
+ return 0;
+}
+
+static void read_early_config(config_fn_t cb, void *data)
+{
+ git_config_with_options(cb, data, NULL, 1);
+
+ /*
+ * Note that this is a really dirty hack that does the wrong thing in
+ * many cases. The crux of the problem is that we cannot run
+ * setup_git_directory() early on in git's setup, so we have no idea if
+ * we are in a repository or not, and therefore are not sure whether
+ * and how to read repository-local config.
+ *
+ * So if we _aren't_ in a repository (or we are but we would reject its
+ * core.repositoryformatversion), we'll read whatever is in .git/config
+ * blindly. Similarly, if we _are_ in a repository, but not at the
+ * root, we'll fail to find .git/config (because it's really
+ * ../.git/config, etc). See t7006 for a complete set of failures.
+ *
+ * However, we have historically provided this hack because it does
+ * work some of the time (namely when you are at the top-level of a
+ * valid repository), and would rarely make things worse (i.e., you do
+ * not generally have a .git/config file sitting around).
+ */
+ if (!startup_info->have_repository) {
+ struct git_config_source repo_config;
+
+ memset(&repo_config, 0, sizeof(repo_config));
+ repo_config.file = ".git/config";
+ git_config_with_options(cb, data, &repo_config, 1);
+ }
+}
+
const char *git_pager(int stdout_is_tty)
{
const char *pager;
pager = getenv("GIT_PAGER");
if (!pager) {
if (!pager_program)
- git_config(git_default_config, NULL);
+ read_early_config(core_pager_config, NULL);
pager = pager_program;
}
if (!pager)
return width;
}
-/* returns 0 for "no pager", 1 for "use pager", and -1 for "not specified" */
-int check_pager_config(const char *cmd)
+struct pager_command_config_data {
+ const char *cmd;
+ int want;
+ char *value;
+};
+
+static int pager_command_config(const char *var, const char *value, void *vdata)
{
- int want = -1;
- struct strbuf key = STRBUF_INIT;
- const char *value = NULL;
- strbuf_addf(&key, "pager.%s", cmd);
- if (git_config_key_is_valid(key.buf) &&
- !git_config_get_value(key.buf, &value)) {
- int b = git_config_maybe_bool(key.buf, value);
+ struct pager_command_config_data *data = vdata;
+ const char *cmd;
+
+ if (skip_prefix(var, "pager.", &cmd) && !strcmp(cmd, data->cmd)) {
+ int b = git_config_maybe_bool(var, value);
if (b >= 0)
- want = b;
+ data->want = b;
else {
- want = 1;
- pager_program = xstrdup(value);
+ data->want = 1;
+ data->value = xstrdup(value);
}
}
- strbuf_release(&key);
- return want;
+
+ return 0;
+}
+
+/* returns 0 for "no pager", 1 for "use pager", and -1 for "not specified" */
+int check_pager_config(const char *cmd)
+{
+ struct pager_command_config_data data;
+
+ data.cmd = cmd;
+ data.want = -1;
+ data.value = NULL;
+
+ read_early_config(pager_command_config, &data);
+
+ if (data.value)
+ pager_program = data.value;
+ return data.want;
}
return 0;
}
+/**
+ * Report that the option is unknown, so that other code can handle
+ * it. This can be used as a callback together with
+ * OPTION_LOWLEVEL_CALLBACK to allow an option to be documented in the
+ * "-h" output even if it's not being handled directly by
+ * parse_options().
+ */
+int parse_opt_unknown_cb(const struct option *opt, const char *arg, int unset)
+{
+ return -2;
+}
+
/**
* Recreates the command-line option in the strbuf.
*/
extern int parse_opt_tertiary(const struct option *, const char *, int);
extern int parse_opt_string_list(const struct option *, const char *, int);
extern int parse_opt_noop_cb(const struct option *, const char *, int);
+extern int parse_opt_unknown_cb(const struct option *, const char *, int);
extern int parse_opt_passthru(const struct option *, const char *, int);
extern int parse_opt_passthru_argv(const struct option *, const char *, int);
#include "sha1-lookup.h"
#include "patch-ids.h"
+static int patch_id_defined(struct commit *commit)
+{
+ /* must be 0 or 1 parents */
+ return !commit->parents || !commit->parents->next;
+}
+
int commit_patch_id(struct commit *commit, struct diff_options *options,
unsigned char *sha1, int diff_header_only)
{
+ if (!patch_id_defined(commit))
+ return -1;
+
if (commit->parents)
diff_tree_sha1(commit->parents->item->object.oid.hash,
commit->object.oid.hash, "", options);
{
memset(ids, 0, sizeof(*ids));
diff_setup(&ids->diffopts);
+ ids->diffopts.detect_rename = 0;
DIFF_OPT_SET(&ids->diffopts, RECURSIVE);
diff_setup_done(&ids->diffopts);
hashmap_init(&ids->patches, (hashmap_cmp_fn)patch_id_cmp, 256);
{
struct patch_id patch;
+ if (!patch_id_defined(commit))
+ return NULL;
+
memset(&patch, 0, sizeof(patch));
if (init_patch_id_entry(&patch, commit, ids))
return NULL;
{
struct patch_id *key = xcalloc(1, sizeof(*key));
+ if (!patch_id_defined(commit))
+ return NULL;
+
if (init_patch_id_entry(key, commit, ids)) {
free(key);
return NULL;
extern char *find_pathspecs_matching_against_index(const struct pathspec *pathspec);
extern void add_pathspec_matches_against_index(const struct pathspec *pathspec, char *seen);
-extern const char *check_path_for_gitlink(const char *path);
-extern void die_if_path_beyond_symlink(const char *path, const char *prefix);
#endif /* PATHSPEC_H */
case 'C':
if (starts_with(placeholder + 1, "(auto)")) {
c->auto_color = want_color(c->pretty_ctx->color);
+ if (c->auto_color)
+ strbuf_addstr(sb, GIT_COLOR_RESET);
return 7; /* consumed 7 bytes, "C(auto)" */
} else {
int ret = parse_color(sb, placeholder, c);
hashcpy(ce->oid.hash, sha1);
}
-int add_to_index(struct index_state *istate, const char *path, struct stat *st, int flags, int force_mode)
+int add_to_index(struct index_state *istate, const char *path, struct stat *st, int flags)
{
int size, namelen, was_same;
mode_t st_mode = st->st_mode;
else
ce->ce_flags |= CE_INTENT_TO_ADD;
- if (S_ISREG(st_mode) && force_mode)
- ce->ce_mode = create_ce_mode(force_mode);
- else if (trust_executable_bit && has_symlinks)
+
+ if (trust_executable_bit && has_symlinks) {
ce->ce_mode = create_ce_mode(st_mode);
- else {
+ } else {
/* If there is an existing entry, pick the mode bits and type
* from it, otherwise assume unexecutable regular file.
*/
return 0;
}
-int add_file_to_index(struct index_state *istate, const char *path,
- int flags, int force_mode)
+int add_file_to_index(struct index_state *istate, const char *path, int flags)
{
struct stat st;
if (lstat(path, &st))
die_errno("unable to stat '%s'", path);
- return add_to_index(istate, path, &st, flags, force_mode);
+ return add_to_index(istate, path, &st, flags);
}
struct cache_entry *make_cache_entry(unsigned int mode,
return ret;
}
+/*
+ * Chmod an index entry with either +x or -x.
+ *
+ * Returns -1 if the chmod for the particular cache entry failed (if it's
+ * not a regular file), -2 if an invalid flip argument is passed in, 0
+ * otherwise.
+ */
+int chmod_index_entry(struct index_state *istate, struct cache_entry *ce,
+ char flip)
+{
+ if (!S_ISREG(ce->ce_mode))
+ return -1;
+ switch (flip) {
+ case '+':
+ ce->ce_mode |= 0111;
+ break;
+ case '-':
+ ce->ce_mode &= ~0111;
+ break;
+ default:
+ return -2;
+ }
+ cache_tree_invalidate_path(istate, ce->name);
+ ce->ce_flags |= CE_UPDATE_IN_BASE;
+ istate->cache_changed |= CE_ENTRY_CHANGED;
+
+ return 0;
+}
+
int ce_same_name(const struct cache_entry *a, const struct cache_entry *b)
{
int len = ce_namelen(a);
_("Your branch is based on '%s', but the upstream is gone.\n"),
base);
if (advice_status_hints)
- strbuf_addf(sb,
+ strbuf_addstr(sb,
_(" (use \"git branch --unset-upstream\" to fixup)\n"));
} else if (!ours && !theirs) {
strbuf_addf(sb,
ours),
base, ours);
if (advice_status_hints)
- strbuf_addf(sb,
+ strbuf_addstr(sb,
_(" (use \"git push\" to publish your local commits)\n"));
} else if (!ours) {
strbuf_addf(sb,
theirs),
base, theirs);
if (advice_status_hints)
- strbuf_addf(sb,
+ strbuf_addstr(sb,
_(" (use \"git pull\" to update your local branch)\n"));
} else {
strbuf_addf(sb,
ours + theirs),
base, ours, theirs);
if (advice_status_hints)
- strbuf_addf(sb,
+ strbuf_addstr(sb,
_(" (use \"git pull\" to merge the remote branch into yours)\n"));
}
free(base);
#include "dir.h"
#include "mru.h"
#include "list.h"
+#include "mergesort.h"
#ifndef O_NOATIME
#if defined(__linux__) && (defined(__i386__) || defined(__PPC__))
strbuf_release(&path);
}
+static void *get_next_packed_git(const void *p)
+{
+ return ((const struct packed_git *)p)->next;
+}
+
+static void set_next_packed_git(void *p, void *next)
+{
+ ((struct packed_git *)p)->next = next;
+}
+
static int sort_pack(const void *a_, const void *b_)
{
- struct packed_git *a = *((struct packed_git **)a_);
- struct packed_git *b = *((struct packed_git **)b_);
+ const struct packed_git *a = a_;
+ const struct packed_git *b = b_;
int st;
/*
static void rearrange_packed_git(void)
{
- struct packed_git **ary, *p;
- int i, n;
-
- for (n = 0, p = packed_git; p; p = p->next)
- n++;
- if (n < 2)
- return;
-
- /* prepare an array of packed_git for easier sorting */
- ary = xcalloc(n, sizeof(struct packed_git *));
- for (n = 0, p = packed_git; p; p = p->next)
- ary[n++] = p;
-
- qsort(ary, n, sizeof(struct packed_git *), sort_pack);
-
- /* link them back again */
- for (i = 0; i < n - 1; i++)
- ary[i]->next = ary[i + 1];
- ary[n - 1]->next = NULL;
- packed_git = ary[0];
-
- free(ary);
+ packed_git = llist_mergesort(packed_git, get_next_packed_git,
+ set_next_packed_git, sort_pack);
}
static void prepare_packed_git_mru(void)
void *base, unsigned long base_size, enum object_type type)
{
struct delta_base_cache_entry *ent = xmalloc(sizeof(*ent));
- struct list_head *lru;
+ struct list_head *lru, *tmp;
delta_base_cached += base_size;
- list_for_each(lru, &delta_base_cache_lru) {
+ list_for_each_safe(lru, tmp, &delta_base_cache_lru) {
struct delta_base_cache_entry *f =
list_entry(lru, struct delta_base_cache_entry, lru);
if (delta_base_cached <= delta_base_cache_limit)
void strbuf_remove(struct strbuf *sb, size_t pos, size_t len)
{
- strbuf_splice(sb, pos, len, NULL, 0);
+ strbuf_splice(sb, pos, len, "", 0);
}
void strbuf_add(struct strbuf *sb, const void *data, size_t len)
const char *v;
const struct string_list *strptr;
struct config_set cs;
+
+ setup_git_directory();
+
git_configset_init(&cs);
if (argc < 2) {
} | git pack-objects --revs --stdout >/dev/null
'
+test_perf 'pack to file' '
+ git pack-objects --all pack1 </dev/null >/dev/null
+'
+
+test_perf 'pack to file (bitmap)' '
+ git pack-objects --use-bitmap-index --all pack1b </dev/null >/dev/null
+'
+
test_expect_success 'create partial bitmap state' '
# pick a commit to represent the repo tip in the past
cutoff=$(git rev-list HEAD~100 -1) &&
git update-ref HEAD $orig_tip
'
-test_perf 'partial bitmap' '
+test_perf 'clone (partial bitmap)' '
git pack-objects --stdout --all </dev/null >/dev/null
'
+test_perf 'pack to file (partial bitmap)' '
+ git pack-objects --use-bitmap-index --all pack2b </dev/null >/dev/null
+'
+
test_done
}
build_git_rev () {
rev=$1
- cp ../../config.mak build/$rev/config.mak
+ for config in config.mak config.mak.autogen config.status
+ do
+ if test -e "../../$config"
+ then
+ cp "../../$config" "build/$rev/"
+ fi
+ done
(cd build/$rev && make $GIT_PERF_MAKE_OPTS) ||
die "failed to build revision '$mydir'"
}
! is_hidden newdir
'
+test_expect_success 'remote init from does not use config from cwd' '
+ rm -rf newdir &&
+ test_config core.logallrefupdates true &&
+ git init newdir &&
+ echo true >expect &&
+ git -C newdir config --bool core.logallrefupdates >actual &&
+ test_cmp expect actual
+'
+
test_done
test "$obname1" = "$obname1new"
'
-test_expect_success 'check that appropriate filter is invoke when --path is used' '
+test_expect_success 'set up crlf tests' '
echo fooQ | tr Q "\\015" >file0 &&
cp file0 file1 &&
echo "file0 -crlf" >.gitattributes &&
git config core.autocrlf true &&
file0_sha=$(git hash-object file0) &&
file1_sha=$(git hash-object file1) &&
- test "$file0_sha" != "$file1_sha" &&
+ test "$file0_sha" != "$file1_sha"
+'
+
+test_expect_success 'check that appropriate filter is invoke when --path is used' '
path1_sha=$(git hash-object --path=file1 file0) &&
path0_sha=$(git hash-object --path=file0 file1) &&
test "$file0_sha" = "$path0_sha" &&
path1_sha=$(cat file0 | git hash-object --path=file1 --stdin) &&
path0_sha=$(cat file1 | git hash-object --path=file0 --stdin) &&
test "$file0_sha" = "$path0_sha" &&
- test "$file1_sha" = "$path1_sha" &&
- git config --unset core.autocrlf
+ test "$file1_sha" = "$path1_sha"
+'
+
+test_expect_success 'gitattributes also work in a subdirectory' '
+ mkdir subdir &&
+ (
+ cd subdir &&
+ subdir_sha0=$(git hash-object ../file0) &&
+ subdir_sha1=$(git hash-object ../file1) &&
+ test "$file0_sha" = "$subdir_sha0" &&
+ test "$file1_sha" = "$subdir_sha1"
+ )
'
test_expect_success 'check that --no-filters option works' '
- echo fooQ | tr Q "\\015" >file0 &&
- cp file0 file1 &&
- echo "file0 -crlf" >.gitattributes &&
- echo "file1 crlf" >>.gitattributes &&
- git config core.autocrlf true &&
- file0_sha=$(git hash-object file0) &&
- file1_sha=$(git hash-object file1) &&
- test "$file0_sha" != "$file1_sha" &&
nofilters_file1=$(git hash-object --no-filters file1) &&
test "$file0_sha" = "$nofilters_file1" &&
nofilters_file1=$(cat file1 | git hash-object --stdin) &&
- test "$file0_sha" = "$nofilters_file1" &&
- git config --unset core.autocrlf
+ test "$file0_sha" = "$nofilters_file1"
'
test_expect_success 'check that --no-filters option works with --stdin-paths' '
- echo fooQ | tr Q "\\015" >file0 &&
- cp file0 file1 &&
- echo "file0 -crlf" >.gitattributes &&
- echo "file1 crlf" >>.gitattributes &&
- git config core.autocrlf true &&
- file0_sha=$(git hash-object file0) &&
- file1_sha=$(git hash-object file1) &&
- test "$file0_sha" != "$file1_sha" &&
nofilters_file1=$(echo "file1" | git hash-object --stdin-paths --no-filters) &&
- test "$file0_sha" = "$nofilters_file1" &&
- git config --unset core.autocrlf
+ test "$file0_sha" = "$nofilters_file1"
'
pop_repo
}" actual)"
'
+test_expect_success POSIXPERM 'remote init does not use config from cwd' '
+ git config core.sharedrepository 0666 &&
+ umask 0022 &&
+ git init --bare child.git &&
+ echo "-rw-r--r--" >expect &&
+ modebits child.git/config >actual &&
+ test_cmp expect actual
+'
+
+test_expect_success POSIXPERM 're-init respects core.sharedrepository (local)' '
+ git config core.sharedrepository 0666 &&
+ umask 0022 &&
+ echo whatever >templates/foo &&
+ git init --template=templates &&
+ echo "-rw-rw-rw-" >expect &&
+ modebits .git/foo >actual &&
+ test_cmp expect actual
+'
+
+test_expect_success POSIXPERM 're-init respects core.sharedrepository (remote)' '
+ rm -rf child.git &&
+ umask 0022 &&
+ git init --bare --shared=0666 child.git &&
+ test_path_is_missing child.git/foo &&
+ git init --bare --template=../templates child.git &&
+ echo "-rw-rw-rw-" >expect &&
+ modebits child.git/foo >actual &&
+ test_cmp expect actual
+'
+
+test_expect_success POSIXPERM 'template can set core.sharedrepository' '
+ rm -rf child.git &&
+ umask 0022 &&
+ git config core.sharedrepository 0666 &&
+ cp .git/config templates/config &&
+ git init --bare --template=../templates child.git &&
+ echo "-rw-rw-rw-" >expect &&
+ modebits child.git/HEAD >actual &&
+ test_cmp expect actual
+'
+
test_done
test_expect_success 'gitdir selection on normal repos' '
echo 0 >expect &&
git config core.repositoryformatversion >actual &&
- (
- cd test &&
- git config core.repositoryformatversion >../actual2
- ) &&
+ git -C test config core.repositoryformatversion >actual2 &&
test_cmp expect actual &&
test_cmp expect actual2
'
test_expect_success 'gitdir selection on unsupported repo' '
# Make sure it would stop at test2, not trash
- echo 99 >expect &&
- (
- cd test2 &&
- git config core.repositoryformatversion >../actual
- ) &&
- test_cmp expect actual
+ test_expect_code 1 git -C test2 config core.repositoryformatversion >actual
'
test_expect_success 'gitdir not required mode' '
git apply --stat test.patch &&
- (
- cd test &&
- git apply --stat ../test.patch
- ) &&
- (
- cd test2 &&
- git apply --stat ../test.patch
- )
+ git -C test apply --stat ../test.patch &&
+ git -C test2 apply --stat ../test.patch
'
test_expect_success 'gitdir required mode' '
git apply --check --index test.patch &&
- (
- cd test &&
- git apply --check --index ../test.patch
- ) &&
- (
- cd test2 &&
- test_must_fail git apply --check --index ../test.patch
- )
+ git -C test apply --check --index ../test.patch &&
+ test_must_fail git -C test2 apply --check --index ../test.patch
'
check_allow () {
test_must_fail git checkout world all
'
+test_expect_success 'check ambiguity in subdir' '
+ mkdir sub &&
+ # not ambiguous because sub/world does not exist
+ git -C sub checkout world ../all &&
+ echo hello >sub/world &&
+ # ambiguous because sub/world does exist
+ test_must_fail git -C sub checkout world ../all
+'
+
test_expect_success 'disambiguate checking out from a tree-ish' '
echo bye > world &&
git checkout world -- world &&
test_branch master
'
+test_expect_success 'checkout of branch with a file in subdir having the same name fails' '
+ git checkout -B master &&
+ test_might_fail git branch -D spam &&
+
+ >spam &&
+ mkdir sub &&
+ mv spam sub/spam &&
+ test_must_fail git -C sub checkout spam &&
+ test_must_fail git rev-parse --verify refs/heads/spam &&
+ test_branch master
+'
+
test_expect_success 'checkout <branch> -- succeeds, even if a file with the same name exists' '
git checkout -B master &&
test_might_fail git branch -D spam &&
)
'
+test_expect_success '--chmod=+x and chmod=-x in the same argument list' '
+ >A &&
+ >B &&
+ git add A B &&
+ git update-index --chmod=+x A --chmod=-x B &&
+ cat >expect <<-\EOF &&
+ 100755 e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 0 A
+ 100644 e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 0 B
+ EOF
+ git ls-files --stage A B >actual &&
+ test_cmp expect actual
+'
+
test_done
test -d .git/NOTES_MERGE_WORKTREE &&
test_must_fail git notes merge z >output 2>&1 &&
# Output should indicate what is wrong
- grep -q "\\.git/NOTES_MERGE_\\* exists" output
+ test_i18ngrep -q "\\.git/NOTES_MERGE_\\* exists" output
'
# Setup non-conflicting merge between x and new notes ref w
cd worktree &&
git config core.notesRef refs/notes/y &&
test_must_fail git notes merge z 2>err &&
- test_i18ngrep "A notes merge into refs/notes/y is already in-progress at" err
+ test_i18ngrep "a notes merge into refs/notes/y is already in-progress at" err
) &&
test_path_is_missing .git/worktrees/worktree/NOTES_MERGE_REF
'
Use 'git config rebase.missingCommitsCheck' to change the level of warnings.
The possible behaviours are: ignore, warn, error.
-You can fix this with 'git rebase --edit-todo'.
+You can fix this with 'git rebase --edit-todo' and then run 'git rebase --continue'.
Or you can abort the rebase with 'git rebase --abort'.
EOF
Warning: the command isn't recognized in the following line:
- badcmd $(git rev-list --oneline -1 master~1)
-You can fix this with 'git rebase --edit-todo'.
+You can fix this with 'git rebase --edit-todo' and then run 'git rebase --continue'.
Or you can abort the rebase with 'git rebase --abort'.
EOF
Warning: the SHA-1 is missing or isn't a commit in the following line:
- edit XXXXXXX False commit
-You can fix this with 'git rebase --edit-todo'.
+You can fix this with 'git rebase --edit-todo' and then run 'git rebase --continue'.
Or you can abort the rebase with 'git rebase --abort'.
EOF
test_mode_in_index 100755 foo2
'
+test_expect_success 'git add --chmod=[+-]x changes index with already added file' '
+ echo foo >foo3 &&
+ git add foo3 &&
+ git add --chmod=+x foo3 &&
+ test_mode_in_index 100755 foo3 &&
+ echo foo >xfoo3 &&
+ chmod 755 xfoo3 &&
+ git add xfoo3 &&
+ git add --chmod=-x xfoo3 &&
+ test_mode_in_index 100644 xfoo3
+'
+
+test_expect_success POSIXPERM 'git add --chmod=[+-]x does not change the working tree' '
+ echo foo >foo4 &&
+ git add foo4 &&
+ git add --chmod=+x foo4 &&
+ ! test -x foo4
+'
+
+test_expect_success 'no file status change if no pathspec is given' '
+ >foo5 &&
+ >foo6 &&
+ git add foo5 foo6 &&
+ git add --chmod=+x &&
+ test_mode_in_index 100644 foo5 &&
+ test_mode_in_index 100644 foo6
+'
+
+test_expect_success 'no file status change if no pathspec is given in subdir' '
+ mkdir -p sub &&
+ (
+ cd sub &&
+ >sub-foo1 &&
+ >sub-foo2 &&
+ git add . &&
+ git add --chmod=+x &&
+ test_mode_in_index 100644 sub-foo1 &&
+ test_mode_in_index 100644 sub-foo2
+ )
+'
+
+test_expect_success 'all statuses changed in folder if . is given' '
+ git add --chmod=+x . &&
+ test $(git ls-files --stage | grep ^100644 | wc -l) -eq 0 &&
+ git add --chmod=-x . &&
+ test $(git ls-files --stage | grep ^100755 | wc -l) -eq 0
+'
+
test_done
printf "Commit message\n\nInvalid surrogate:\355\240\200\n" \
>"$HOME/invalid" &&
git commit -a -F "$HOME/invalid" 2>"$HOME"/stderr &&
- grep "did not conform" "$HOME"/stderr
+ test_i18ngrep "did not conform" "$HOME"/stderr
'
test_expect_success 'UTF-8 overlong sequences rejected' '
printf "\340\202\251ommit message\n\nThis is not a space:\300\240\n" \
>"$HOME/invalid" &&
git commit -a -F "$HOME/invalid" 2>"$HOME"/stderr &&
- grep "did not conform" "$HOME"/stderr
+ test_i18ngrep "did not conform" "$HOME"/stderr
'
test_expect_success 'UTF-8 non-characters refused' '
printf "Commit message\n\nNon-character:\364\217\277\276\n" \
>"$HOME/invalid" &&
git commit -a -F "$HOME/invalid" 2>"$HOME"/stderr &&
- grep "did not conform" "$HOME"/stderr
+ test_i18ngrep "did not conform" "$HOME"/stderr
'
test_expect_success 'UTF-8 non-characters refused' '
printf "Commit message\n\nNon-character:\357\267\220\n" \
>"$HOME/invalid" &&
git commit -a -F "$HOME/invalid" 2>"$HOME"/stderr &&
- grep "did not conform" "$HOME"/stderr
+ test_i18ngrep "did not conform" "$HOME"/stderr
'
for H in ISO8859-1 eucJP ISO-2022-JP
# commit-tree will warn that the commit message does not contain valid UTF-8
# as mailinfo did not convert it
- grep "did not conform" err &&
+ test_i18ngrep "did not conform" err &&
check_encoding 2
'
git format-patch --ignore-if-in-upstream HEAD
'
+git_version="$(git --version | sed "s/.* //")"
+
+signature() {
+ printf "%s\n%s\n\n" "-- " "${1:-$git_version}"
+}
+
+test_expect_success 'format-patch default signature' '
+ git format-patch --stdout -1 | tail -n 3 >output &&
+ signature >expect &&
+ test_cmp expect output
+'
+
test_expect_success 'format-patch --signature' '
- git format-patch --stdout --signature="my sig" -1 >output &&
- grep "my sig" output
+ git format-patch --stdout --signature="my sig" -1 | tail -n 3 >output &&
+ signature "my sig" >expect &&
+ test_cmp expect output
'
test_expect_success 'format-patch with format.signature config' '
test_cmp expect actual
'
+test_expect_success '--rfc' '
+ cat >expect <<-\EOF &&
+ Subject: [RFC PATCH 1/1] header with . in it
+ EOF
+ git format-patch -n -1 --stdout --rfc >patch &&
+ grep ^Subject: patch >actual &&
+ test_cmp expect actual
+'
+
test_expect_success '--from=ident notices bogus ident' '
test_must_fail git format-patch -1 --stdout --from=foo >patch
'
test_expect_success 'format-patch --base' '
git checkout side &&
- git format-patch --stdout --base=HEAD~3 -1 >patch &&
- grep "^base-commit:" patch >actual &&
- grep "^prerequisite-patch-id:" patch >>actual &&
- echo "base-commit: $(git rev-parse HEAD~3)" >expected &&
+ git format-patch --stdout --base=HEAD~3 -1 | tail -n 7 >actual &&
+ echo >expected &&
+ echo "base-commit: $(git rev-parse HEAD~3)" >>expected &&
echo "prerequisite-patch-id: $(git show --patch HEAD~2 | git patch-id --stable | awk "{print \$1}")" >>expected &&
echo "prerequisite-patch-id: $(git show --patch HEAD~1 | git patch-id --stable | awk "{print \$1}")" >>expected &&
+ signature >> expected &&
test_cmp expected actual
'
test_cmp expected actual
'
+test_expect_success 'format-patch --base with --attach' '
+ git format-patch --attach=mimemime --stdout --base=HEAD~ -1 >patch &&
+ sed -n -e "/^base-commit:/s/.*/1/p" -e "/^---*mimemime--$/s/.*/2/p" \
+ patch >actual &&
+ test_write_lines 1 2 >expect &&
+ test_cmp expect actual
+'
+
test_expect_success 'format-patch --pretty=mboxrd' '
sp=" " &&
cat >msg <<-INPUT_END &&
mv file.c.new file.c &&
commit_and_tag long_common_tail file.c &&
+ git checkout initial &&
+ cat "$dir/hello.c" "$dir/dummy.c" >file.c &&
+ commit_and_tag hello_dummy file.c &&
+
+ # overlap function context of 1st change and -u context of 2nd change
+ grep -v "delete me from hello" <"$dir/hello.c" >file.c &&
+ sed 2p <"$dir/dummy.c" >>file.c &&
+ commit_and_tag changed_hello_dummy file.c &&
+
git checkout initial &&
grep -v "delete me from hello" <file.c >file.c.new &&
mv file.c.new file.c &&
test $(grep -c "^[ +-].*Begin" changed_hello_appended.diff) -le 2
'
+check_diff changed_hello_dummy 'changed two consecutive functions'
+
+test_expect_success ' context includes begin' '
+ grep "^ .*Begin of hello" changed_hello_dummy.diff &&
+ grep "^ .*Begin of dummy" changed_hello_dummy.diff
+'
+
+test_expect_success ' context includes end' '
+ grep "^ .*End of hello" changed_hello_dummy.diff &&
+ grep "^ .*End of dummy" changed_hello_dummy.diff
+'
+
+test_expect_success ' overlapping hunks are merged' '
+ test $(grep -c "^@@" changed_hello_dummy.diff) -eq 1
+'
+
test_done
)
'
+test_expect_success 'diff from repo subdir shows real paths (explicit)' '
+ echo "diff --git a/../../non/git/a b/../../non/git/b" >expect &&
+ test_expect_code 1 \
+ git -C repo/sub \
+ diff --no-index ../../non/git/a ../../non/git/b >actual &&
+ head -n 1 <actual >actual.head &&
+ test_cmp expect actual.head
+'
+
+test_expect_success 'diff from repo subdir shows real paths (implicit)' '
+ echo "diff --git a/../../non/git/a b/../../non/git/b" >expect &&
+ test_expect_code 1 \
+ git -C repo/sub \
+ diff ../../non/git/a ../../non/git/b >actual &&
+ head -n 1 <actual >actual.head &&
+ test_cmp expect actual.head
+'
+
+test_expect_success 'diff --no-index from repo subdir respects config (explicit)' '
+ echo "diff --git ../../non/git/a ../../non/git/b" >expect &&
+ test_config -C repo diff.noprefix true &&
+ test_expect_code 1 \
+ git -C repo/sub \
+ diff --no-index ../../non/git/a ../../non/git/b >actual &&
+ head -n 1 <actual >actual.head &&
+ test_cmp expect actual.head
+'
+
+test_expect_success 'diff --no-index from repo subdir respects config (implicit)' '
+ echo "diff --git ../../non/git/a ../../non/git/b" >expect &&
+ test_config -C repo diff.noprefix true &&
+ test_expect_code 1 \
+ git -C repo/sub \
+ diff ../../non/git/a ../../non/git/b >actual &&
+ head -n 1 <actual >actual.head &&
+ test_cmp expect actual.head
+'
+
test_done
--- /dev/null
+#!/bin/sh
+
+test_description='Test diff indent heuristic.
+
+'
+. ./test-lib.sh
+. "$TEST_DIRECTORY"/diff-lib.sh
+
+# Compare two diff outputs. Ignore "index" lines, because we don't
+# care about SHA-1s or file modes.
+compare_diff () {
+ sed -e "/^index /d" <"$1" >.tmp-1
+ sed -e "/^index /d" <"$2" >.tmp-2
+ test_cmp .tmp-1 .tmp-2 && rm -f .tmp-1 .tmp-2
+}
+
+# Compare blame output using the expectation for a diff as reference.
+# Only look for the lines coming from non-boundary commits.
+compare_blame () {
+ sed -n -e "1,4d" -e "s/^\+//p" <"$1" >.tmp-1
+ sed -ne "s/^[^^][^)]*) *//p" <"$2" >.tmp-2
+ test_cmp .tmp-1 .tmp-2 && rm -f .tmp-1 .tmp-2
+}
+
+test_expect_success 'prepare' '
+ cat <<-\EOF >spaces.txt &&
+ 1
+ 2
+ a
+
+ b
+ 3
+ 4
+ EOF
+
+ cat <<-\EOF >functions.c &&
+ 1
+ 2
+ /* function */
+ foo() {
+ foo
+ }
+
+ 3
+ 4
+ EOF
+
+ git add spaces.txt functions.c &&
+ test_tick &&
+ git commit -m initial &&
+ git branch old &&
+
+ cat <<-\EOF >spaces.txt &&
+ 1
+ 2
+ a
+
+ b
+ a
+
+ b
+ 3
+ 4
+ EOF
+
+ cat <<-\EOF >functions.c &&
+ 1
+ 2
+ /* function */
+ bar() {
+ foo
+ }
+
+ /* function */
+ foo() {
+ foo
+ }
+
+ 3
+ 4
+ EOF
+
+ git add spaces.txt functions.c &&
+ test_tick &&
+ git commit -m initial &&
+ git branch new &&
+
+ tr "_" " " <<-\EOF >spaces-expect &&
+ diff --git a/spaces.txt b/spaces.txt
+ --- a/spaces.txt
+ +++ b/spaces.txt
+ @@ -3,5 +3,8 @@
+ a
+ _
+ b
+ +a
+ +
+ +b
+ 3
+ 4
+ EOF
+
+ tr "_" " " <<-\EOF >spaces-compacted-expect &&
+ diff --git a/spaces.txt b/spaces.txt
+ --- a/spaces.txt
+ +++ b/spaces.txt
+ @@ -2,6 +2,9 @@
+ 2
+ a
+ _
+ +b
+ +a
+ +
+ b
+ 3
+ 4
+ EOF
+
+ tr "_" " " <<-\EOF >functions-expect &&
+ diff --git a/functions.c b/functions.c
+ --- a/functions.c
+ +++ b/functions.c
+ @@ -1,6 +1,11 @@
+ 1
+ 2
+ /* function */
+ +bar() {
+ + foo
+ +}
+ +
+ +/* function */
+ foo() {
+ foo
+ }
+ EOF
+
+ tr "_" " " <<-\EOF >functions-compacted-expect
+ diff --git a/functions.c b/functions.c
+ --- a/functions.c
+ +++ b/functions.c
+ @@ -1,5 +1,10 @@
+ 1
+ 2
+ +/* function */
+ +bar() {
+ + foo
+ +}
+ +
+ /* function */
+ foo() {
+ foo
+ EOF
+'
+
+test_expect_success 'diff: ugly spaces' '
+ git diff old new -- spaces.txt >out &&
+ compare_diff spaces-expect out
+'
+
+test_expect_success 'diff: nice spaces with --indent-heuristic' '
+ git diff --indent-heuristic old new -- spaces.txt >out-compacted &&
+ compare_diff spaces-compacted-expect out-compacted
+'
+
+test_expect_success 'diff: nice spaces with diff.indentHeuristic' '
+ git -c diff.indentHeuristic=true diff old new -- spaces.txt >out-compacted2 &&
+ compare_diff spaces-compacted-expect out-compacted2
+'
+
+test_expect_success 'diff: --no-indent-heuristic overrides config' '
+ git -c diff.indentHeuristic=true diff --no-indent-heuristic old new -- spaces.txt >out2 &&
+ compare_diff spaces-expect out2
+'
+
+test_expect_success 'diff: --indent-heuristic with --patience' '
+ git diff --indent-heuristic --patience old new -- spaces.txt >out-compacted3 &&
+ compare_diff spaces-compacted-expect out-compacted3
+'
+
+test_expect_success 'diff: --indent-heuristic with --histogram' '
+ git diff --indent-heuristic --histogram old new -- spaces.txt >out-compacted4 &&
+ compare_diff spaces-compacted-expect out-compacted4
+'
+
+test_expect_success 'diff: ugly functions' '
+ git diff old new -- functions.c >out &&
+ compare_diff functions-expect out
+'
+
+test_expect_success 'diff: nice functions with --indent-heuristic' '
+ git diff --indent-heuristic old new -- functions.c >out-compacted &&
+ compare_diff functions-compacted-expect out-compacted
+'
+
+test_expect_success 'blame: ugly spaces' '
+ git blame old..new -- spaces.txt >out-blame &&
+ compare_blame spaces-expect out-blame
+'
+
+test_expect_success 'blame: nice spaces with --indent-heuristic' '
+ git blame --indent-heuristic old..new -- spaces.txt >out-blame-compacted &&
+ compare_blame spaces-compacted-expect out-blame-compacted
+'
+
+test_expect_success 'blame: nice spaces with diff.indentHeuristic' '
+ git -c diff.indentHeuristic=true blame old..new -- spaces.txt >out-blame-compacted2 &&
+ compare_blame spaces-compacted-expect out-blame-compacted2
+'
+
+test_expect_success 'blame: --no-indent-heuristic overrides config' '
+ git -c diff.indentHeuristic=true blame --no-indent-heuristic old..new -- spaces.txt >out-blame2 &&
+ git blame old..new -- spaces.txt >out-blame &&
+ compare_blame spaces-expect out-blame2
+'
+
+test_done
--- /dev/null
+#!/bin/sh
+#
+# Copyright (c) 2016 Johannes Schindelin
+#
+
+test_description='Pickaxe options'
+
+. ./test-lib.sh
+
+test_expect_success setup '
+ test_commit initial &&
+ printf "%04096d" 0 >4096-zeroes.txt &&
+ git add 4096-zeroes.txt &&
+ test_tick &&
+ git commit -m "A 4k file"
+'
+test_expect_success '-G matches' '
+ git diff --name-only -G "^0{4096}$" HEAD^ >out &&
+ test 4096-zeroes.txt = "$(cat out)"
+'
+
+test_done
test_cmp patch-id_master patch-id_same
'
+test_expect_success 'patch-id respects config from subdir' '
+ test_config patchid.stable true &&
+ mkdir subdir &&
+
+ # copy these because test_patch_id() looks for them in
+ # the current directory
+ cp bar-then-foo foo-then-bar subdir &&
+
+ (
+ cd subdir &&
+ test_patch_id irrelevant patchid.stable=true
+ )
+'
+
cat >nonl <<\EOF
diff --git i/a w/a
index e69de29..2e65efe 100644
echo ".git/objects/$(echo "$1" | sed -e 's|\(..\)|\1/|')"
}
+# show objects present in pack ($1 should be associated *.idx)
+list_packed_objects () {
+ git show-index <"$1" | cut -d' ' -f2
+}
+
+# has_any pattern-file content-file
+# tests whether content-file has any entry from pattern-file with entries being
+# whole lines.
+has_any () {
+ grep -Ff "$1" "$2"
+}
+
test_expect_success 'setup repo with moderate-sized history' '
for i in $(test_seq 1 10); do
test_commit $i
test_commit side-$i
done &&
git checkout master &&
+ bitmaptip=$(git rev-parse master) &&
blob=$(echo tagged-blob | git hash-object -w --stdin) &&
git tag tagged-blob $blob &&
git config repack.writebitmaps true &&
git repack -d --no-write-bitmap-index
'
+test_expect_success 'pack-objects respects --local (non-local loose)' '
+ git init --bare alt.git &&
+ echo $(pwd)/alt.git/objects >.git/objects/info/alternates &&
+ echo content1 >file1 &&
+ # non-local loose object which is not present in bitmapped pack
+ altblob=$(GIT_DIR=alt.git git hash-object -w file1) &&
+ # non-local loose object which is also present in bitmapped pack
+ git cat-file blob $blob | GIT_DIR=alt.git git hash-object -w --stdin &&
+ git add file1 &&
+ test_tick &&
+ git commit -m commit_file1 &&
+ echo HEAD | git pack-objects --local --stdout --revs >1.pack &&
+ git index-pack 1.pack &&
+ list_packed_objects 1.idx >1.objects &&
+ printf "%s\n" "$altblob" "$blob" >nonlocal-loose &&
+ ! has_any nonlocal-loose 1.objects
+'
+
+test_expect_success 'pack-objects respects --honor-pack-keep (local non-bitmapped pack)' '
+ echo content2 >file2 &&
+ blob2=$(git hash-object -w file2) &&
+ git add file2 &&
+ test_tick &&
+ git commit -m commit_file2 &&
+ printf "%s\n" "$blob2" "$bitmaptip" >keepobjects &&
+ pack2=$(git pack-objects pack2 <keepobjects) &&
+ mv pack2-$pack2.* .git/objects/pack/ &&
+ >.git/objects/pack/pack2-$pack2.keep &&
+ rm $(objpath $blob2) &&
+ echo HEAD | git pack-objects --honor-pack-keep --stdout --revs >2a.pack &&
+ git index-pack 2a.pack &&
+ list_packed_objects 2a.idx >2a.objects &&
+ ! has_any keepobjects 2a.objects
+'
+
+test_expect_success 'pack-objects respects --local (non-local pack)' '
+ mv .git/objects/pack/pack2-$pack2.* alt.git/objects/pack/ &&
+ echo HEAD | git pack-objects --local --stdout --revs >2b.pack &&
+ git index-pack 2b.pack &&
+ list_packed_objects 2b.idx >2b.objects &&
+ ! has_any keepobjects 2b.objects
+'
+
+test_expect_success 'pack-objects respects --honor-pack-keep (local bitmapped pack)' '
+ ls .git/objects/pack/ | grep bitmap >output &&
+ test_line_count = 1 output &&
+ packbitmap=$(basename $(cat output) .bitmap) &&
+ list_packed_objects .git/objects/pack/$packbitmap.idx >packbitmap.objects &&
+ test_when_finished "rm -f .git/objects/pack/$packbitmap.keep" &&
+ >.git/objects/pack/$packbitmap.keep &&
+ echo HEAD | git pack-objects --honor-pack-keep --stdout --revs >3a.pack &&
+ git index-pack 3a.pack &&
+ list_packed_objects 3a.idx >3a.objects &&
+ ! has_any packbitmap.objects 3a.objects
+'
+
+test_expect_success 'pack-objects respects --local (non-local bitmapped pack)' '
+ mv .git/objects/pack/$packbitmap.* alt.git/objects/pack/ &&
+ test_when_finished "mv alt.git/objects/pack/$packbitmap.* .git/objects/pack/" &&
+ echo HEAD | git pack-objects --local --stdout --revs >3b.pack &&
+ git index-pack 3b.pack &&
+ list_packed_objects 3b.idx >3b.objects &&
+ ! has_any packbitmap.objects 3b.objects
+'
+
+test_expect_success 'pack-objects to file can use bitmap' '
+ # make sure we still have 1 bitmap index from previous tests
+ ls .git/objects/pack/ | grep bitmap >output &&
+ test_line_count = 1 output &&
+ # verify equivalent packs are generated with/without using bitmap index
+ packasha1=$(git pack-objects --no-use-bitmap-index --all packa </dev/null) &&
+ packbsha1=$(git pack-objects --use-bitmap-index --all packb </dev/null) &&
+ list_packed_objects <packa-$packasha1.idx >packa.objects &&
+ list_packed_objects <packb-$packbsha1.idx >packb.objects &&
+ test_cmp packa.objects packb.objects
+'
+
test_expect_success 'full repack, reusing previous bitmaps' '
git repack -ad &&
ls .git/objects/pack/ | grep bitmap >output &&
EOF
'
+test_expect_success 'pack-objects respects --incremental' '
+ cat >revs2 <<-EOF &&
+ HEAD
+ $commit
+ EOF
+ git pack-objects --incremental --stdout --revs <revs2 >4.pack &&
+ git index-pack 4.pack &&
+ list_packed_objects 4.idx >4.objects &&
+ test_line_count = 4 4.objects &&
+ git rev-list --objects $commit >revlist &&
+ cut -d" " -f1 revlist |sort >objects &&
+ test_cmp 4.objects objects
+'
+
test_expect_success 'pack with missing blob' '
rm $(objpath $blob) &&
git pack-objects --stdout --revs <revs >/dev/null
git pack-objects --stdout --revs <revs >/dev/null
'
-test_lazy_prereq JGIT '
- type jgit
-'
-
test_expect_success JGIT 'we can read jgit bitmaps' '
git clone . compat-jgit &&
(
# We could just as easily have used "master"; the "*" emphasizes its
# role as a pattern.
test_must_fail git ls-remote refs*master >actual 2>&1 &&
- test_cmp exp actual
+ test_i18ncmp exp actual
'
test_expect_success 'die with non-2 for wrong repository even with --exit-code' '
test_cmp expect actual
'
+test_lazy_prereq GIT_DAEMON '
+ test_tristate GIT_TEST_GIT_DAEMON &&
+ test "$GIT_TEST_GIT_DAEMON" != false
+'
+
+# This test spawns a daemon, so run it only if the user would be OK with
+# testing with git-daemon.
+test_expect_success PIPE,JGIT,GIT_DAEMON 'indicate no refs in standards-compliant empty remote' '
+ JGIT_DAEMON_PORT=${JGIT_DAEMON_PORT-${this_test#t}} &&
+ JGIT_DAEMON_PID= &&
+ git init --bare empty.git &&
+ >empty.git/git-daemon-export-ok &&
+ mkfifo jgit_daemon_output &&
+ {
+ jgit daemon --port="$JGIT_DAEMON_PORT" . >jgit_daemon_output &
+ JGIT_DAEMON_PID=$!
+ } &&
+ test_when_finished kill "$JGIT_DAEMON_PID" &&
+ {
+ read line &&
+ case $line in
+ Exporting*)
+ ;;
+ *)
+ echo "Expected: Exporting" &&
+ false;;
+ esac &&
+ read line &&
+ case $line in
+ "Listening on"*)
+ ;;
+ *)
+ echo "Expected: Listening on" &&
+ false;;
+ esac
+ } <jgit_daemon_output &&
+ # --exit-code asks the command to exit with 2 when no
+ # matching refs are found.
+ test_expect_code 2 git ls-remote --exit-code git://localhost:$JGIT_DAEMON_PORT/empty.git
+'
test_done
test_expect_success '%C(auto) respects --color' '
git log --color --format="%C(auto)%H" -1 >actual &&
- printf "\\033[33m%s\\033[m\\n" $(git rev-parse HEAD) >expect &&
+ printf "\\033[m\\033[33m%s\\033[m\\n" $(git rev-parse HEAD) >expect &&
test_cmp expect actual
'
git clone . clone
'
+test_expect_success 'set up rebase scenarios' '
+ # temporarily enable an actual ident for this setup
+ test_config user.email foo@example.com &&
+ test_commit new &&
+ git branch side-without-commit HEAD^ &&
+ git checkout -b side-with-commit HEAD^ &&
+ test_commit side
+'
+
+test_expect_success 'fast-forward rebase does not care about ident' '
+ git checkout -B tmp side-without-commit &&
+ git rebase master
+'
+
+test_expect_success 'non-fast-forward rebase refuses to write commits' '
+ test_when_finished "git rebase --abort || true" &&
+ git checkout -B tmp side-with-commit &&
+ test_must_fail git rebase master
+'
+
+test_expect_success 'fast-forward rebase does not care about ident (interactive)' '
+ git checkout -B tmp side-without-commit &&
+ git rebase -i master
+'
+
+test_expect_success 'non-fast-forward rebase refuses to write commits (interactive)' '
+ test_when_finished "git rebase --abort || true" &&
+ git checkout -B tmp side-with-commit &&
+ test_must_fail git rebase -i master
+'
+
+test_expect_success 'noop interactive rebase does not care about ident' '
+ git checkout -B tmp side-with-commit &&
+ git rebase -i HEAD^
+'
+
+test_expect_success 'fast-forward rebase does not care about ident (preserve)' '
+ git checkout -B tmp side-without-commit &&
+ git rebase -p master
+'
+
+test_expect_success 'non-fast-forward rebase refuses to write commits (preserve)' '
+ test_when_finished "git rebase --abort || true" &&
+ git checkout -B tmp side-with-commit &&
+ test_must_fail git rebase -p master
+'
+
test_done
test_expect_success 'blame -L with invalid start' '
test_must_fail git blame -L5 tres 2>errors &&
- grep "has only 2 lines" errors
+ test_i18ngrep "has only 2 lines" errors
'
test_expect_success 'blame -L with invalid end' '
test_must_fail git blame -L1,5 tres 2>errors &&
- grep "has only 2 lines" errors
+ test_i18ngrep "has only 2 lines" errors
'
test_expect_success 'blame parses <end> part of -L' '
--- /dev/null
+#!/bin/sh
+
+test_description='git cat-file filters support'
+. ./test-lib.sh
+
+test_expect_success 'setup ' '
+ echo "*.txt eol=crlf diff=txt" >.gitattributes &&
+ echo "hello" | append_cr >world.txt &&
+ git add .gitattributes world.txt &&
+ test_tick &&
+ git commit -m "Initial commit"
+'
+
+has_cr () {
+ tr '\015' Q <"$1" | grep Q >/dev/null
+}
+
+test_expect_success 'no filters with `git show`' '
+ git show HEAD:world.txt >actual &&
+ ! has_cr actual
+
+'
+
+test_expect_success 'no filters with cat-file' '
+ git cat-file blob HEAD:world.txt >actual &&
+ ! has_cr actual
+'
+
+test_expect_success 'cat-file --filters converts to worktree version' '
+ git cat-file --filters HEAD:world.txt >actual &&
+ has_cr actual
+'
+
+test_expect_success 'cat-file --filters --path=<path> works' '
+ sha1=$(git rev-parse -q --verify HEAD:world.txt) &&
+ git cat-file --filters --path=world.txt $sha1 >actual &&
+ has_cr actual
+'
+
+test_expect_success 'cat-file --textconv --path=<path> works' '
+ sha1=$(git rev-parse -q --verify HEAD:world.txt) &&
+ test_config diff.txt.textconv "tr A-Za-z N-ZA-Mn-za-m <" &&
+ git cat-file --textconv --path=hello.txt $sha1 >rot13 &&
+ test uryyb = "$(cat rot13 | remove_cr)"
+'
+
+test_expect_success '--path=<path> complains without --textconv/--filters' '
+ sha1=$(git rev-parse -q --verify HEAD:world.txt) &&
+ test_must_fail git cat-file --path=hello.txt blob $sha1 >actual 2>err &&
+ test ! -s actual &&
+ grep "path.*needs.*filters" err
+'
+
+test_expect_success 'cat-file --textconv --batch works' '
+ sha1=$(git rev-parse -q --verify HEAD:world.txt) &&
+ test_config diff.txt.textconv "tr A-Za-z N-ZA-Mn-za-m <" &&
+ printf "%s hello.txt\n%s hello\n" $sha1 $sha1 |
+ git cat-file --textconv --batch >actual &&
+ printf "%s blob 6\nuryyb\r\n\n%s blob 6\nhello\n\n" \
+ $sha1 $sha1 >expect &&
+ test_cmp expect actual
+'
+
+test_done
test "$uid" != 0
'
+test_lazy_prereq JGIT '
+ type jgit
+'
+
# SANITY is about "can you correctly predict what the filesystem would
# do by only looking at the permission bits of the files and
# directories?" A typical example of !SANITY is running the test
schedule_dir_for_removal(ce->name, ce_namelen(ce));
}
-static struct checkout state;
-static int check_updates(struct unpack_trees_options *o)
+static int check_updates(struct unpack_trees_options *o,
+ const struct checkout *state)
{
unsigned cnt = 0, total = 0;
struct progress *progress = NULL;
display_progress(progress, ++cnt);
ce->ce_flags &= ~CE_UPDATE;
if (o->update && !o->dry_run) {
- errs |= checkout_entry(ce, &state, NULL);
+ errs |= checkout_entry(ce, state, NULL);
}
}
}
int i, ret;
static struct cache_entry *dfc;
struct exclude_list el;
+ struct checkout state = CHECKOUT_INIT;
if (len > MAX_UNPACK_TREES)
die("unpack_trees takes at most %d trees", MAX_UNPACK_TREES);
- memset(&state, 0, sizeof(state));
- state.base_dir = "";
state.force = 1;
state.quiet = 1;
state.refresh_cache = 1;
}
o->src_index = NULL;
- ret = check_updates(o) ? (-2) : 0;
+ ret = check_updates(o, &state) ? (-2) : 0;
if (o->dst_index) {
if (!ret) {
if (!o->result.cache_tree)
static int firstnote = 1;
size_t loglen = strlen(log);
printf("commit %s\n", note_ref);
- printf("committer %s <%s@%s> %ld +0000\n", author, author, "local", timestamp);
+ printf("committer %s <%s@%s> %lu +0000\n", author, author, "local", timestamp);
printf("data %"PRIuMAX"\n", (uintmax_t)loglen);
fwrite(log, loglen, 1, stdout);
if (firstnote) {
}
printf("commit %s\n", local_ref);
printf("mark :%"PRIu32"\n", revision);
- printf("committer %s <%s@%s> %ld +0000\n",
+ printf("committer %s <%s@%s> %lu +0000\n",
*author ? author : "nobody",
*author ? author : "nobody",
*uuid ? uuid : "local", timestamp);
if (d->new_submodule_commits || d->dirty_submodule) {
strbuf_addstr(&extra, " (");
if (d->new_submodule_commits)
- strbuf_addf(&extra, _("new commits, "));
+ strbuf_addstr(&extra, _("new commits, "));
if (d->dirty_submodule & DIRTY_SUBMODULE_MODIFIED)
- strbuf_addf(&extra, _("modified content, "));
+ strbuf_addstr(&extra, _("modified content, "));
if (d->dirty_submodule & DIRTY_SUBMODULE_UNTRACKED)
- strbuf_addf(&extra, _("untracked content, "));
+ strbuf_addstr(&extra, _("untracked content, "));
strbuf_setlen(&extra, extra.len - 2);
strbuf_addch(&extra, ')');
}
static long ff_regexp(const char *line, long len,
char *buffer, long buffer_size, void *priv)
{
- char *line_buffer;
struct ff_regs *regs = priv;
regmatch_t pmatch[2];
int i;
- int result = -1;
+ int result;
/* Exclude terminating newline (and cr) from matching */
if (len > 0 && line[len-1] == '\n') {
len--;
}
- line_buffer = xstrndup(line, len); /* make NUL terminated */
-
for (i = 0; i < regs->nr; i++) {
struct ff_reg *reg = regs->array + i;
- if (!regexec(®->re, line_buffer, 2, pmatch, 0)) {
+ if (!regexec_buf(®->re, line, len, 2, pmatch, 0)) {
if (reg->negate)
- goto fail;
+ return -1;
break;
}
}
if (regs->nr <= i)
- goto fail;
+ return -1;
i = pmatch[1].rm_so >= 0 ? 1 : 0;
line += pmatch[i].rm_so;
result = pmatch[i].rm_eo - pmatch[i].rm_so;
while (result > 0 && (isspace(line[result - 1])))
result--;
memcpy(buffer, line, result);
- fail:
- free(line_buffer);
return result;
}
#define XDF_IGNORE_BLANK_LINES (1 << 7)
#define XDF_COMPACTION_HEURISTIC (1 << 8)
+#define XDF_INDENT_HEURISTIC (1 << 9)
#define XDL_EMIT_FUNCNAMES (1 << 0)
#define XDL_EMIT_FUNCCONTEXT (1 << 2)
}
-static int is_blank_line(xrecord_t **recs, long ix, long flags)
+static int is_blank_line(xrecord_t *rec, long flags)
{
- return xdl_blankline(recs[ix]->ptr, recs[ix]->size, flags);
+ return xdl_blankline(rec->ptr, rec->size, flags);
}
-static int recs_match(xrecord_t **recs, long ixs, long ix, long flags)
+static int recs_match(xrecord_t *rec1, xrecord_t *rec2, long flags)
{
- return (recs[ixs]->ha == recs[ix]->ha &&
- xdl_recmatch(recs[ixs]->ptr, recs[ixs]->size,
- recs[ix]->ptr, recs[ix]->size,
+ return (rec1->ha == rec2->ha &&
+ xdl_recmatch(rec1->ptr, rec1->size,
+ rec2->ptr, rec2->size,
flags));
}
-int xdl_change_compact(xdfile_t *xdf, xdfile_t *xdfo, long flags) {
- long ix, ixo, ixs, ixref, grpsiz, nrec = xdf->nrec;
- char *rchg = xdf->rchg, *rchgo = xdfo->rchg;
- unsigned int blank_lines;
- xrecord_t **recs = xdf->recs;
+/*
+ * If a line is indented more than this, get_indent() just returns this value.
+ * This avoids having to do absurd amounts of work for data that are not
+ * human-readable text, and also ensures that the output of get_indent fits within
+ * an int.
+ */
+#define MAX_INDENT 200
+/*
+ * Return the amount of indentation of the specified line, treating TAB as 8
+ * columns. Return -1 if line is empty or contains only whitespace. Clamp the
+ * output value at MAX_INDENT.
+ */
+static int get_indent(xrecord_t *rec)
+{
+ long i;
+ int ret = 0;
+
+ for (i = 0; i < rec->size; i++) {
+ char c = rec->ptr[i];
+
+ if (!XDL_ISSPACE(c))
+ return ret;
+ else if (c == ' ')
+ ret += 1;
+ else if (c == '\t')
+ ret += 8 - ret % 8;
+ /* ignore other whitespace characters */
+
+ if (ret >= MAX_INDENT)
+ return MAX_INDENT;
+ }
+
+ /* The line contains only whitespace. */
+ return -1;
+}
+
+/*
+ * If more than this number of consecutive blank rows are found, just return this
+ * value. This avoids requiring O(N^2) work for pathological cases, and also
+ * ensures that the output of score_split fits in an int.
+ */
+#define MAX_BLANKS 20
+
+/* Characteristics measured about a hypothetical split position. */
+struct split_measurement {
/*
- * This is the same of what GNU diff does. Move back and forward
- * change groups for a consistent and pretty diff output. This also
- * helps in finding joinable change groups and reduce the diff size.
+ * Is the split at the end of the file (aside from any blank lines)?
*/
- for (ix = ixo = 0;;) {
- /*
- * Find the first changed line in the to-be-compacted file.
- * We need to keep track of both indexes, so if we find a
- * changed lines group on the other file, while scanning the
- * to-be-compacted file, we need to skip it properly. Note
- * that loops that are testing for changed lines on rchg* do
- * not need index bounding since the array is prepared with
- * a zero at position -1 and N.
- */
- for (; ix < nrec && !rchg[ix]; ix++)
- while (rchgo[ixo++]);
- if (ix == nrec)
+ int end_of_file;
+
+ /*
+ * How much is the line immediately following the split indented (or -1 if
+ * the line is blank):
+ */
+ int indent;
+
+ /*
+ * How many consecutive lines above the split are blank?
+ */
+ int pre_blank;
+
+ /*
+ * How much is the nearest non-blank line above the split indented (or -1
+ * if there is no such line)?
+ */
+ int pre_indent;
+
+ /*
+ * How many lines after the line following the split are blank?
+ */
+ int post_blank;
+
+ /*
+ * How much is the nearest non-blank line after the line following the
+ * split indented (or -1 if there is no such line)?
+ */
+ int post_indent;
+};
+
+struct split_score {
+ /* The effective indent of this split (smaller is preferred). */
+ int effective_indent;
+
+ /* Penalty for this split (smaller is preferred). */
+ int penalty;
+};
+
+/*
+ * Fill m with information about a hypothetical split of xdf above line split.
+ */
+static void measure_split(const xdfile_t *xdf, long split,
+ struct split_measurement *m)
+{
+ long i;
+
+ if (split >= xdf->nrec) {
+ m->end_of_file = 1;
+ m->indent = -1;
+ } else {
+ m->end_of_file = 0;
+ m->indent = get_indent(xdf->recs[split]);
+ }
+
+ m->pre_blank = 0;
+ m->pre_indent = -1;
+ for (i = split - 1; i >= 0; i--) {
+ m->pre_indent = get_indent(xdf->recs[i]);
+ if (m->pre_indent != -1)
+ break;
+ m->pre_blank += 1;
+ if (m->pre_blank == MAX_BLANKS) {
+ m->pre_indent = 0;
+ break;
+ }
+ }
+
+ m->post_blank = 0;
+ m->post_indent = -1;
+ for (i = split + 1; i < xdf->nrec; i++) {
+ m->post_indent = get_indent(xdf->recs[i]);
+ if (m->post_indent != -1)
break;
+ m->post_blank += 1;
+ if (m->post_blank == MAX_BLANKS) {
+ m->post_indent = 0;
+ break;
+ }
+ }
+}
+
+/*
+ * The empirically-determined weight factors used by score_split() below.
+ * Larger values means that the position is a less favorable place to split.
+ *
+ * Note that scores are only ever compared against each other, so multiplying
+ * all of these weight/penalty values by the same factor wouldn't change the
+ * heuristic's behavior. Still, we need to set that arbitrary scale *somehow*.
+ * In practice, these numbers are chosen to be large enough that they can be
+ * adjusted relative to each other with sufficient precision despite using
+ * integer math.
+ */
+
+/* Penalty if there are no non-blank lines before the split */
+#define START_OF_FILE_PENALTY 1
+
+/* Penalty if there are no non-blank lines after the split */
+#define END_OF_FILE_PENALTY 21
+/* Multiplier for the number of blank lines around the split */
+#define TOTAL_BLANK_WEIGHT (-30)
+
+/* Multiplier for the number of blank lines after the split */
+#define POST_BLANK_WEIGHT 6
+
+/*
+ * Penalties applied if the line is indented more than its predecessor
+ */
+#define RELATIVE_INDENT_PENALTY (-4)
+#define RELATIVE_INDENT_WITH_BLANK_PENALTY 10
+
+/*
+ * Penalties applied if the line is indented less than both its predecessor and
+ * its successor
+ */
+#define RELATIVE_OUTDENT_PENALTY 24
+#define RELATIVE_OUTDENT_WITH_BLANK_PENALTY 17
+
+/*
+ * Penalties applied if the line is indented less than its predecessor but not
+ * less than its successor
+ */
+#define RELATIVE_DEDENT_PENALTY 23
+#define RELATIVE_DEDENT_WITH_BLANK_PENALTY 17
+
+/*
+ * We only consider whether the sum of the effective indents for splits are
+ * less than (-1), equal to (0), or greater than (+1) each other. The resulting
+ * value is multiplied by the following weight and combined with the penalty to
+ * determine the better of two scores.
+ */
+#define INDENT_WEIGHT 60
+
+/*
+ * Compute a badness score for the hypothetical split whose measurements are
+ * stored in m. The weight factors were determined empirically using the tools and
+ * corpus described in
+ *
+ * https://github.com/mhagger/diff-slider-tools
+ *
+ * Also see that project if you want to improve the weights based on, for example,
+ * a larger or more diverse corpus.
+ */
+static void score_add_split(const struct split_measurement *m, struct split_score *s)
+{
+ /*
+ * A place to accumulate penalty factors (positive makes this index more
+ * favored):
+ */
+ int post_blank, total_blank, indent, any_blanks;
+
+ if (m->pre_indent == -1 && m->pre_blank == 0)
+ s->penalty += START_OF_FILE_PENALTY;
+
+ if (m->end_of_file)
+ s->penalty += END_OF_FILE_PENALTY;
+
+ /*
+ * Set post_blank to the number of blank lines following the split,
+ * including the line immediately after the split:
+ */
+ post_blank = (m->indent == -1) ? 1 + m->post_blank : 0;
+ total_blank = m->pre_blank + post_blank;
+
+ /* Penalties based on nearby blank lines: */
+ s->penalty += TOTAL_BLANK_WEIGHT * total_blank;
+ s->penalty += POST_BLANK_WEIGHT * post_blank;
+
+ if (m->indent != -1)
+ indent = m->indent;
+ else
+ indent = m->post_indent;
+
+ any_blanks = (total_blank != 0);
+
+ /* Note that the effective indent is -1 at the end of the file: */
+ s->effective_indent += indent;
+
+ if (indent == -1) {
+ /* No additional adjustments needed. */
+ } else if (m->pre_indent == -1) {
+ /* No additional adjustments needed. */
+ } else if (indent > m->pre_indent) {
+ /*
+ * The line is indented more than its predecessor.
+ */
+ s->penalty += any_blanks ?
+ RELATIVE_INDENT_WITH_BLANK_PENALTY :
+ RELATIVE_INDENT_PENALTY;
+ } else if (indent == m->pre_indent) {
+ /*
+ * The line has the same indentation level as its predecessor.
+ * No additional adjustments needed.
+ */
+ } else {
/*
- * Record the start of a changed-group in the to-be-compacted file
- * and find the end of it, on both to-be-compacted and other file
- * indexes (ix and ixo).
+ * The line is indented less than its predecessor. It could be
+ * the block terminator of the previous block, but it could
+ * also be the start of a new block (e.g., an "else" block, or
+ * maybe the previous block didn't have a block terminator).
+ * Try to distinguish those cases based on what comes next:
*/
- ixs = ix;
- for (ix++; rchg[ix]; ix++);
- for (; rchgo[ixo]; ixo++);
+ if (m->post_indent != -1 && m->post_indent > indent) {
+ /*
+ * The following line is indented more. So it is likely
+ * that this line is the start of a block.
+ */
+ s->penalty += any_blanks ?
+ RELATIVE_OUTDENT_WITH_BLANK_PENALTY :
+ RELATIVE_OUTDENT_PENALTY;
+ } else {
+ /*
+ * That was probably the end of a block.
+ */
+ s->penalty += any_blanks ?
+ RELATIVE_DEDENT_WITH_BLANK_PENALTY :
+ RELATIVE_DEDENT_PENALTY;
+ }
+ }
+}
+
+static int score_cmp(struct split_score *s1, struct split_score *s2)
+{
+ /* -1 if s1.effective_indent < s2->effective_indent, etc. */
+ int cmp_indents = ((s1->effective_indent > s2->effective_indent) -
+ (s1->effective_indent < s2->effective_indent));
+
+ return INDENT_WEIGHT * cmp_indents + (s1->penalty - s2->penalty);
+}
+
+/*
+ * Represent a group of changed lines in an xdfile_t (i.e., a contiguous group
+ * of lines that was inserted or deleted from the corresponding version of the
+ * file). We consider there to be such a group at the beginning of the file, at
+ * the end of the file, and between any two unchanged lines, though most such
+ * groups will usually be empty.
+ *
+ * If the first line in a group is equal to the line following the group, then
+ * the group can be slid down. Similarly, if the last line in a group is equal
+ * to the line preceding the group, then the group can be slid up. See
+ * group_slide_down() and group_slide_up().
+ *
+ * Note that loops that are testing for changed lines in xdf->rchg do not need
+ * index bounding since the array is prepared with a zero at position -1 and N.
+ */
+struct group {
+ /*
+ * The index of the first changed line in the group, or the index of
+ * the unchanged line above which the (empty) group is located.
+ */
+ long start;
+
+ /*
+ * The index of the first unchanged line after the group. For an empty
+ * group, end is equal to start.
+ */
+ long end;
+};
+
+/*
+ * Initialize g to point at the first group in xdf.
+ */
+static void group_init(xdfile_t *xdf, struct group *g)
+{
+ g->start = g->end = 0;
+ while (xdf->rchg[g->end])
+ g->end++;
+}
+
+/*
+ * Move g to describe the next (possibly empty) group in xdf and return 0. If g
+ * is already at the end of the file, do nothing and return -1.
+ */
+static inline int group_next(xdfile_t *xdf, struct group *g)
+{
+ if (g->end == xdf->nrec)
+ return -1;
+ g->start = g->end + 1;
+ for (g->end = g->start; xdf->rchg[g->end]; g->end++)
+ ;
+
+ return 0;
+}
+
+/*
+ * Move g to describe the previous (possibly empty) group in xdf and return 0.
+ * If g is already at the beginning of the file, do nothing and return -1.
+ */
+static inline int group_previous(xdfile_t *xdf, struct group *g)
+{
+ if (g->start == 0)
+ return -1;
+
+ g->end = g->start - 1;
+ for (g->start = g->end; xdf->rchg[g->start - 1]; g->start--)
+ ;
+
+ return 0;
+}
+
+/*
+ * If g can be slid toward the end of the file, do so, and if it bumps into a
+ * following group, expand this group to include it. Return 0 on success or -1
+ * if g cannot be slid down.
+ */
+static int group_slide_down(xdfile_t *xdf, struct group *g, long flags)
+{
+ if (g->end < xdf->nrec &&
+ recs_match(xdf->recs[g->start], xdf->recs[g->end], flags)) {
+ xdf->rchg[g->start++] = 0;
+ xdf->rchg[g->end++] = 1;
+
+ while (xdf->rchg[g->end])
+ g->end++;
+
+ return 0;
+ } else {
+ return -1;
+ }
+}
+
+/*
+ * If g can be slid toward the beginning of the file, do so, and if it bumps
+ * into a previous group, expand this group to include it. Return 0 on success
+ * or -1 if g cannot be slid up.
+ */
+static int group_slide_up(xdfile_t *xdf, struct group *g, long flags)
+{
+ if (g->start > 0 &&
+ recs_match(xdf->recs[g->start - 1], xdf->recs[g->end - 1], flags)) {
+ xdf->rchg[--g->start] = 1;
+ xdf->rchg[--g->end] = 0;
+
+ while (xdf->rchg[g->start - 1])
+ g->start--;
+
+ return 0;
+ } else {
+ return -1;
+ }
+}
+
+static void xdl_bug(const char *msg)
+{
+ fprintf(stderr, "BUG: %s\n", msg);
+ exit(1);
+}
+
+/*
+ * Move back and forward change groups for a consistent and pretty diff output.
+ * This also helps in finding joinable change groups and reducing the diff
+ * size.
+ */
+int xdl_change_compact(xdfile_t *xdf, xdfile_t *xdfo, long flags) {
+ struct group g, go;
+ long earliest_end, end_matching_other;
+ long groupsize;
+ unsigned int blank_lines;
+
+ group_init(xdf, &g);
+ group_init(xdfo, &go);
+
+ while (1) {
+ /* If the group is empty in the to-be-compacted file, skip it: */
+ if (g.end == g.start)
+ goto next;
+
+ /*
+ * Now shift the change up and then down as far as possible in
+ * each direction. If it bumps into any other changes, merge them.
+ */
do {
- grpsiz = ix - ixs;
- blank_lines = 0;
+ groupsize = g.end - g.start;
/*
- * If the line before the current change group, is equal to
- * the last line of the current change group, shift backward
- * the group.
+ * Keep track of the last "end" index that causes this
+ * group to align with a group of changed lines in the
+ * other file. -1 indicates that we haven't found such
+ * a match yet:
*/
- while (ixs > 0 && recs_match(recs, ixs - 1, ix - 1, flags)) {
- rchg[--ixs] = 1;
- rchg[--ix] = 0;
-
- /*
- * This change might have joined two change groups,
- * so we try to take this scenario in account by moving
- * the start index accordingly (and so the other-file
- * end-of-group index).
- */
- for (; rchg[ixs - 1]; ixs--);
- while (rchgo[--ixo]);
- }
+ end_matching_other = -1;
/*
- * Record the end-of-group position in case we are matched
- * with a group of changes in the other file (that is, the
- * change record before the end-of-group index in the other
- * file is set).
+ * Boolean value that records whether there are any blank
+ * lines that could be made to be the last line of this
+ * group.
*/
- ixref = rchgo[ixo - 1] ? ix: nrec;
+ blank_lines = 0;
+
+ /* Shift the group backward as much as possible: */
+ while (!group_slide_up(xdf, &g, flags))
+ if (group_previous(xdfo, &go))
+ xdl_bug("group sync broken sliding up");
/*
- * If the first line of the current change group, is equal to
- * the line next of the current change group, shift forward
- * the group.
+ * This is this highest that this group can be shifted.
+ * Record its end index:
*/
- while (ix < nrec && recs_match(recs, ixs, ix, flags)) {
- blank_lines += is_blank_line(recs, ix, flags);
-
- rchg[ixs++] = 0;
- rchg[ix++] = 1;
-
- /*
- * This change might have joined two change groups,
- * so we try to take this scenario in account by moving
- * the start index accordingly (and so the other-file
- * end-of-group index). Keep tracking the reference
- * index in case we are shifting together with a
- * corresponding group of changes in the other file.
- */
- for (; rchg[ix]; ix++);
- while (rchgo[++ixo])
- ixref = ix;
- }
- } while (grpsiz != ix - ixs);
+ earliest_end = g.end;
- /*
- * Try to move back the possibly merged group of changes, to match
- * the recorded position in the other file.
- */
- while (ixref < ix) {
- rchg[--ixs] = 1;
- rchg[--ix] = 0;
- while (rchgo[--ixo]);
- }
+ if (go.end > go.start)
+ end_matching_other = g.end;
+
+ /* Now shift the group forward as far as possible: */
+ while (1) {
+ if (!blank_lines)
+ blank_lines = is_blank_line(
+ xdf->recs[g.end - 1],
+ flags);
+
+ if (group_slide_down(xdf, &g, flags))
+ break;
+ if (group_next(xdfo, &go))
+ xdl_bug("group sync broken sliding down");
+
+ if (go.end > go.start)
+ end_matching_other = g.end;
+ }
+ } while (groupsize != g.end - g.start);
/*
- * If a group can be moved back and forth, see if there is a
- * blank line in the moving space. If there is a blank line,
- * make sure the last blank line is the end of the group.
+ * If the group can be shifted, then we can possibly use this
+ * freedom to produce a more intuitive diff.
*
- * As we already shifted the group forward as far as possible
- * in the earlier loop, we need to shift it back only if at all.
+ * The group is currently shifted as far down as possible, so the
+ * heuristics below only have to handle upwards shifts.
*/
- if ((flags & XDF_COMPACTION_HEURISTIC) && blank_lines) {
- while (ixs > 0 &&
- !is_blank_line(recs, ix - 1, flags) &&
- recs_match(recs, ixs - 1, ix - 1, flags)) {
- rchg[--ixs] = 1;
- rchg[--ix] = 0;
+
+ if (g.end == earliest_end) {
+ /* no shifting was possible */
+ } else if (end_matching_other != -1) {
+ /*
+ * Move the possibly merged group of changes back to line
+ * up with the last group of changes from the other file
+ * that it can align with.
+ */
+ while (go.end == go.start) {
+ if (group_slide_up(xdf, &g, flags))
+ xdl_bug("match disappeared");
+ if (group_previous(xdfo, &go))
+ xdl_bug("group sync broken sliding to match");
+ }
+ } else if ((flags & XDF_COMPACTION_HEURISTIC) && blank_lines) {
+ /*
+ * Compaction heuristic: if it is possible to shift the
+ * group to make its bottom line a blank line, do so.
+ *
+ * As we already shifted the group forward as far as
+ * possible in the earlier loop, we only need to handle
+ * backward shifts, not forward ones.
+ */
+ while (!is_blank_line(xdf->recs[g.end - 1], flags)) {
+ if (group_slide_up(xdf, &g, flags))
+ xdl_bug("blank line disappeared");
+ if (group_previous(xdfo, &go))
+ xdl_bug("group sync broken sliding to blank line");
+ }
+ } else if (flags & XDF_INDENT_HEURISTIC) {
+ /*
+ * Indent heuristic: a group of pure add/delete lines
+ * implies two splits, one between the end of the "before"
+ * context and the start of the group, and another between
+ * the end of the group and the beginning of the "after"
+ * context. Some splits are aesthetically better and some
+ * are worse. We compute a badness "score" for each split,
+ * and add the scores for the two splits to define a
+ * "score" for each position that the group can be shifted
+ * to. Then we pick the shift with the lowest score.
+ */
+ long shift, best_shift = -1;
+ struct split_score best_score;
+
+ for (shift = earliest_end; shift <= g.end; shift++) {
+ struct split_measurement m;
+ struct split_score score = {0, 0};
+
+ measure_split(xdf, shift, &m);
+ score_add_split(&m, &score);
+ measure_split(xdf, shift - groupsize, &m);
+ score_add_split(&m, &score);
+ if (best_shift == -1 ||
+ score_cmp(&score, &best_score) <= 0) {
+ best_score.effective_indent = score.effective_indent;
+ best_score.penalty = score.penalty;
+ best_shift = shift;
+ }
+ }
+
+ while (g.end > best_shift) {
+ if (group_slide_up(xdf, &g, flags))
+ xdl_bug("best shift unreached");
+ if (group_previous(xdfo, &go))
+ xdl_bug("group sync broken sliding to blank line");
}
}
+
+ next:
+ /* Move past the just-processed group: */
+ if (group_next(xdf, &g))
+ break;
+ if (group_next(xdfo, &go))
+ xdl_bug("group sync broken moving to next group");
}
+ if (!group_next(xdfo, &go))
+ xdl_bug("group sync broken at end of file");
+
return 0;
}
if (xche->next) {
long l = XDL_MIN(xche->next->i1,
xe->xdf1.nrec - 1);
- if (l <= e1 ||
+ if (l - xecfg->ctxlen <= e1 ||
get_func_line(xe, xecfg, NULL, l, e1) < 0) {
xche = xche->next;
goto post_context_calculation;