info: git.info gitman.info
-install: man
+install: install-man
+
+install-man: man
$(INSTALL) -d -m 755 $(DESTDIR)$(man1dir)
$(INSTALL) -d -m 755 $(DESTDIR)$(man5dir)
$(INSTALL) -d -m 755 $(DESTDIR)$(man7dir)
install-webdoc : html
sh ./install-webdoc.sh $(WEBDOC_DEST)
-quick-install:
+quick-install: quick-install-man
+
+quick-install-man:
sh ./install-doc-quick.sh $(DOC_REF) $(DESTDIR)$(mandir)
quick-install-html:
Fixes since v1.6.0.3
--------------------
-* 'git-add -p' said "No changes" when only binary files were changed.
+* 'git add -p' said "No changes" when only binary files were changed.
-* git-archive did not work correctly in bare repositories.
+* 'git archive' did not work correctly in bare repositories.
+
+* 'git checkout -t -b newbranch' when you are on detached HEAD was broken.
* when we refuse to detect renames because there are too many new or
- deleted files, we did not say how many there are.
+ deleted files, 'git diff' did not say how many there are.
-* 'git-push --mirror' tried and failed to push the stash; there is no
+* 'git push --mirror' tried and failed to push the stash; there is no
point in sending it to begin with.
-* 'git-send-email' had a small fd leak while scanning directory.
+* 'git push' did not update the remote tracking reference if the corresponding
+ ref on the remote end happened to be already up to date.
+
+* 'git pull $there $branch:$current_branch' did not work when you were on
+ a branch yet to be born.
+
+* when giving up resolving a conflicted merge, 'git reset --hard' failed
+ to remove new paths from the working tree.
+
+* 'git send-email' had a small fd leak while scanning directory.
-* git-svn used deprecated 'git-foo' form of subcommand invocaition.
+* 'git status' incorrectly reported a submodule directory as an untracked
+ directory.
+
+* 'git svn' used deprecated 'git-foo' form of subcommand invocaition.
+
+* 'git update-ref -d' to remove a reference did not honor --no-deref option.
* Plugged small memleaks here and there.
* Also contains many documentation updates.
-
---
-exec >/var/tmp/1
-O=v1.6.0.3-22-gc2163c6
-echo O=$(git describe maint)
-git shortlog --no-merges $O..maint
--- /dev/null
+GIT v1.6.0.5 Release Notes
+==========================
+
+Fixes since v1.6.0.4
+--------------------
+
+* 'git checkout' used to crash when your HEAD was pointing at a deleted
+ branch.
+
+* 'git checkout' from an un-checked-out state did not allow switching out
+ of the current branch.
+
+* 'git pack-objects' did not make its best effort to honor --max-pack-size
+ option when a single first object already busted the given limit and
+ placed many objects in a single pack.
+
+* 'make check' cannot be run without sparse; people may have meant to say
+ 'make test' instead, so suggest that.
+
+* Many unsafe call to sprintf() style varargs functions are corrected.
+
* "git bisect" is careful about a user mistake and suggests testing of
merge base first when good is not a strict ancestor of bad.
+* "git blame" re-encodes the commit metainfo to UTF-8 from i18n.commitEncoding
+ by default.
+
* "git check-attr --stdin" can check attributes for multiple paths.
* "git checkout --track origin/hack" used to be a syntax error. It now
* "git merge -s $strategy" can use a custom built strategy if you have a
command "git-merge-$strategy" on your $PATH.
+* "git push" can be told to reject deletion of refs with receive.denyDeletes
+ configuration.
+
* "git rebase" honours pre-rebase hook; use --no-verify to bypass it.
* "git rebase -p" uses interactive rebase machinery now to preserve the merges.
* "git svn branch" can create new branches on the other end.
+* "gitweb" can use more saner PATH_INFO based URL.
+
(internal)
* "git hash-object" learned to lie about the path being hashed, so that
* various callers of git-merge-recursive avoid forking it as an external
process.
+* Git class defined in "Git.pm" can be subclasses a bit more easily.
+
+* We used to link GNU regex library as a compatibility layer for some
+ platforms, but it turns out it is not necessary on most of them.
+
+* Some path handling routines used fixed number of buffers used alternately
+ but depending on the call depth, this arrangement led to hard to track
+ bugs. This issue is being addressed.
+
Fixes since v1.6.0
------------------
--
exec >/var/tmp/1
-O=v1.6.0.3-574-gaebd173
+O=v1.6.0.3-639-ga1a846a
echo O=$(git describe master)
git shortlog --no-merges $O..master ^maint
Show the result incrementally in a format designed for
machine consumption.
+--encoding=<encoding>::
+ Specifies the encoding used to output author names
+ and commit summaries. Setting it to `none` makes blame
+ output unconverted data. For more information see the
+ discussion about encoding in the linkgit:git-log[1]
+ manual page.
+
--contents <file>::
When <rev> is not specified, the command annotates the
changes starting backwards from the working tree copy.
even if that push is forced. This configuration variable is
set when initializing a shared repository.
+receive.denyCurrentBranch::
+ If set to true or "refuse", receive-pack will deny a ref update
+ to the currently checked out branch of a non-bare repository.
+ Such a push is potentially dangerous because it brings the HEAD
+ out of sync with the index and working tree. If set to "warn",
+ print a warning of such a push to stderr, but allow the push to
+ proceed. If set to false or "ignore", allow such pushes with no
+ message. Defaults to "warn".
+
transfer.unpackLimit::
When `fetch.unpackLimit` or `receive.unpackLimit` are
not set, the value of this variable is used instead.
. path for "dst"; only exists for C or R.
. an LF or a NUL when '-z' option is used, to terminate the record.
+Possible status letters are:
+
+- A: addition of a file
+- C: copy of a file into a new one
+- D: deletion of a file
+- M: modification of the contents or mode of a file
+- R: renaming of a file
+- T: change in the type of the file
+- U: file is unmerged (you must complete the merge before it can
+be committed)
+- X: "unknown" change type (most probably a bug, please report it)
+
+Status letters C and M are always followed by a score (denoting the
+percentage of similarity between the source and target of the move or
+copy), and are the only ones to be so.
+
<sha1> is shown as all 0's if a file is new on the filesystem
and it is out of sync with the index.
git bisect log
git bisect run <cmd>...
-This command uses 'git-rev-list --bisect' to help drive the
+This command uses 'git rev-list --bisect' to help drive the
binary search process to find which change introduced a bug, given an
old "good" commit object name and a later "bad" commit object name.
to see the currently remaining suspects in 'gitk'. `visualize` is a bit
too long to type and `view` is provided as a synonym.
-If 'DISPLAY' environment variable is not set, 'git-log' is used
+If 'DISPLAY' environment variable is not set, 'git log' is used
instead. You can even give command line options such as `-p` and
`--stat`.
work around other problem this bisection is not interested in")
applied to the revision being tested.
-To cope with such a situation, after the inner 'git-bisect' finds the
+To cope with such a situation, after the inner 'git bisect' finds the
next revision to test, with the "run" script, you can apply that tweak
before compiling, run the real test, and after the test decides if the
revision (possibly with the needed tweaks) passed the test, rewind the
NAME
----
-git-check-attr - Display gitattributes information.
+git-check-attr - Display gitattributes information
SYNOPSIS
commit relative to the named <commit>. Typically you
would want comparison with the latest commit, so if you
do not give <commit>, it defaults to HEAD.
+ --staged is a synonym of --cached.
'git diff' [--options] <commit> [--] [<path>...]::
since the beginning of the time". If you want to format
everything since project inception to one commit, say "git
format-patch \--root <commit>" to make it clear that it is the
-latter case.
+latter case. If you want to format a single commit, you can do
+this with "git format-patch -1 <commit>".
By default, each output file is numbered sequentially from 1, and uses the
first line of the commit message (massaged for pathname safety) as
--decorate::
Print out the ref names of any commits that are shown.
+--source::
+ Print out the ref name given on the command line by which each
+ commit was reached.
+
--full-diff::
Without this flag, "git log -p <path>..." shows commits that
touch the specified paths, and diffs about the same specified
The default is unlimited, unless the config variable
`pack.packSizeLimit` is set.
+--honor-pack-keep::
+ This flag causes an object already in a local pack that
+ has a .keep file to be ignored, even if it appears in the
+ standard input.
+
--incremental::
This flag causes an object already in a pack ignored
even if it appears in the standard input.
--local::
This flag is similar to `--incremental`; instead of
ignoring all packed objects, it only ignores objects
- that are packed and not in the local object store
+ that are packed and/or not in the local object store
(i.e. borrowed from an alternate).
--non-empty::
[verse]
'git remote' [-v | --verbose]
'git remote add' [-t <branch>] [-m <master>] [-f] [--mirror] <name> <url>
+'git remote rename' <old> <new>
'git remote rm' <name>
'git remote show' [-n] <name>
'git remote prune' [-n | --dry-run] <name>
mode, furthermore, `git push` will always behave as if `\--mirror`
was passed.
+'rename'::
+
+Rename the remote named <old> to <new>. All remote tracking branches and
+configuration settings for the remote are updated.
++
+In case <old> and <new> are the same, and <old> is a file under
+`$GIT_DIR/remotes` or `$GIT_DIR/branches`, the remote is converted to
+the configuration file format.
+
'rm'::
Remove the remote named <name>. All remote tracking branches and
branch of the `git.git` repository.
Documentation for older releases are available here:
-* link:v1.6.0.2/git.html[documentation for release 1.6.0.2]
+* link:v1.6.0.4/git.html[documentation for release 1.6.0.4]
* release notes for
+ link:RelNotes-1.6.0.4.txt[1.6.0.4],
+ link:RelNotes-1.6.0.3.txt[1.6.0.3],
link:RelNotes-1.6.0.2.txt[1.6.0.2],
link:RelNotes-1.6.0.1.txt[1.6.0.1],
link:RelNotes-1.6.0.txt[1.6.0].
`diff`
^^^^^^
-The attribute `diff` affects if 'git-diff' generates textual
-patch for the path or just says `Binary files differ`. It also
-can affect what line is shown on the hunk header `@@ -k,l +n,m @@`
-line.
+The attribute `diff` affects how 'git' generates diffs for particular
+files. It can tell git whether to generate a textual patch for the path
+or to treat the path as a binary file. It can also affect what line is
+shown on the hunk header `@@ -k,l +n,m @@` line, tell git to use an
+external command to generate the diff, or ask git to convert binary
+files to a text format before generating the diff.
Set::
Unset::
A path to which the `diff` attribute is unset will
- generate `Binary files differ`.
+ generate `Binary files differ` (or a binary patch, if
+ binary patches are enabled).
Unspecified::
String::
- Diff is shown using the specified custom diff driver.
- The driver program is given its input using the same
- calling convention as used for GIT_EXTERNAL_DIFF
- program. This name is also used for custom hunk header
- selection.
+ Diff is shown using the specified diff driver. Each driver may
+ specify one or more options, as described in the following
+ section. The options for the diff driver "foo" are defined
+ by the configuration variables in the "diff.foo" section of the
+ git config file.
-Defining a custom diff driver
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Defining an external diff driver
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
The definition of a diff driver is done in `gitconfig`, not
`gitattributes` file, so strictly speaking this manual page is a
wrong place to talk about it. However...
-To define a custom diff driver `jcdiff`, add a section to your
+To define an external diff driver `jcdiff`, add a section to your
`$GIT_DIR/config` file (or `$HOME/.gitconfig` file) like this:
----------------------------------------------------------------
- `tex` suitable for source code for LaTeX documents.
+Performing text diffs of binary files
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Sometimes it is desirable to see the diff of a text-converted
+version of some binary files. For example, a word processor
+document can be converted to an ASCII text representation, and
+the diff of the text shown. Even though this conversion loses
+some information, the resulting diff is useful for human
+viewing (but cannot be applied directly).
+
+The `textconv` config option is used to define a program for
+performing such a conversion. The program should take a single
+argument, the name of a file to convert, and produce the
+resulting text on stdout.
+
+For example, to show the diff of the exif information of a
+file instead of the binary information (assuming you have the
+exif tool installed):
+
+------------------------
+[diff "jpg"]
+ textconv = exif
+------------------------
+
+NOTE: The text conversion is generally a one-way conversion;
+in this example, we lose the actual image contents and focus
+just on the text data. This means that diffs generated by
+textconv are _not_ suitable for applying. For this reason,
+only `git diff` and the `git log` family of commands (i.e.,
+log, whatchanged, show) will perform text conversion. `git
+format-patch` will never generate this output. If you want to
+send somebody a text-converted diff of a binary file (e.g.,
+because it quickly conveys the changes you have made), you
+should generate it separately and send it as a comment _in
+addition to_ the usual binary diff that you might send.
+
+
Performing a three-way merge
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
help other people who look at them later. Lack of this header
implies that the commit log message is encoded in UTF-8.
-. 'git-log', 'git-show' and friends looks at the `encoding`
- header of a commit object, and tries to re-code the log
- message into UTF-8 unless otherwise specified. You can
+. 'git-log', 'git-show', 'git-blame' and friends look at the
+ `encoding` header of a commit object, and try to re-code the
+ log message into UTF-8 unless otherwise specified. You can
specify the desired output encoding with
`i18n.logoutputencoding` in `.git/config` file, like this:
+
Limit the commits output to ones with log message that
matches the specified pattern (regular expression).
+--all-match::
+ Limit the commits output to ones that match all given --grep,
+ --author and --committer instead of ones that match at least one.
+
-i::
--regexp-ignore-case::
History Simplification
~~~~~~~~~~~~~~~~~~~~~~
-When optional paths are given, 'git-rev-list' simplifies commits with
-various strategies, according to the options you have selected.
+Sometimes you are only interested in parts of the history, for example the
+commits modifying a particular <path>. But there are two parts of
+'History Simplification', one part is selecting the commits and the other
+is how to do it, as there are various strategies to simplify the history.
+
+The following options select the commits to be shown:
+
+<paths>::
+
+ Commits modifying the given <paths> are selected.
+
+--simplify-by-decoration::
+
+ Commits that are referred by some branch or tag are selected.
+
+Note that extra commits can be shown to give a meaningful history.
+
+The following options affect the way the simplification is performed:
+
+Default mode::
+
+ Simplifies the history to the simplest history explaining the
+ final state of the tree. Simplest because it prunes some side
+ branches if the end result is the same (i.e. merging branches
+ with the same content)
+
+--full-history::
+
+ As the default mode but does not prune some history.
+
+--dense::
+
+ Only the selected commits are shown, plus some to have a
+ meaningful history.
+
+--sparse::
+
+ All commits in the simplified history are shown.
+
+--simplify-merges::
+
+ Additional option to '--full-history' to remove some needless
+ merges from the resulting history, as there are no selected
+ commits contributing to this merge.
+
+A more detailed explanation follows.
Suppose you specified `foo` as the <paths>. We shall call commits
that modify `foo` !TREESAME, and the rest TREESAME. (In a diff
removed completely, because it had one parent and is TREESAME.
--
+The '\--simplify-by-decoration' option allows you to view only the
+big picture of the topology of the history, by omitting commits
+that are not referenced by tags. Commits are marked as !TREESAME
+(in other words, kept after history simplification rules described
+above) if (1) they are referenced by tags, or (2) they change the
+contents of the paths given on the command line. All other
+commits are marked as TREESAME (subject to be simplified away).
+
ifdef::git-rev-list[]
Bisection Helpers
~~~~~~~~~~~~~~~~~
the included and excluded commits. Thus, if
-----------------------------------------------------------------------
- $ git-rev-list --bisect foo ^bar ^baz
+ $ git rev-list --bisect foo ^bar ^baz
-----------------------------------------------------------------------
outputs 'midpoint', the output of the two commands
-----------------------------------------------------------------------
- $ git-rev-list foo ^midpoint
- $ git-rev-list midpoint ^bar ^baz
+ $ git rev-list foo ^midpoint
+ $ git rev-list midpoint ^bar ^baz
-----------------------------------------------------------------------
would be of roughly the same length. Finding the change which
------------
`<url>` is required; `#<head>` is optional.
-When you do not provide a refspec on the command line,
-git will use the following refspec, where `<head>` defaults to `master`,
-and `<repository>` is the name of this file
-you provided in the command line.
+
+Depending on the operation, git will use one of the following
+refspecs, if you don't provide one on the command line.
+`<branch>` is the name of this file in `$GIT_DIR/branches` and
+`<head>` defaults to `master`.
+
+git fetch uses:
+
+------------
+ refs/heads/<head>:refs/heads/<branch>
+------------
+
+git push uses:
------------
- refs/heads/<head>:<repository>
+ HEAD:refs/heads/<head>
------------
http://www.kernel.org/pub/software/scm/git/docs/
- There are also "make quick-install-doc" and "make quick-install-html"
- which install preformatted man pages and html documentation.
+ There are also "make quick-install-doc", "make quick-install-man"
+ and "make quick-install-html" which install preformatted man pages
+ and html documentation.
This does not require asciidoc/xmlto, but it only works from within
a cloned checkout of git.git with these two extra branches, and will
not work for the maintainer for obvious chicken-and-egg reasons.
./test-sha1.sh
check: common-cmds.h
- for i in *.c; do sparse $(ALL_CFLAGS) $(SPARSE_FLAGS) $$i || exit; done
+ if sparse; \
+ then \
+ for i in *.c; \
+ do \
+ sparse $(ALL_CFLAGS) $(SPARSE_FLAGS) $$i || exit; \
+ done; \
+ else \
+ echo 2>&1 "Did you mean 'make test'?"; \
+ exit 1; \
+ fi
remove-dashes:
./fixup-builtins $(BUILT_INS) $(PROGRAMS) $(SCRIPTS)
install-doc:
$(MAKE) -C Documentation install
+install-man:
+ $(MAKE) -C Documentation install-man
+
install-html:
$(MAKE) -C Documentation install-html
quick-install-doc:
$(MAKE) -C Documentation quick-install
+quick-install-man:
+ $(MAKE) -C Documentation quick-install-man
+
quick-install-html:
$(MAKE) -C Documentation quick-install-html
unsigned int nr = getpid();
for (;;) {
- const char *newpath;
- newpath = mkpath("%s~%u", path, nr);
+ char newpath[PATH_MAX];
+ mksnpath(newpath, sizeof(newpath), "%s~%u", path, nr);
if (!try_create_file(newpath, mode, buf, size)) {
if (!rename(newpath, path))
return;
return porigin;
}
-/*
- * Parsing of patch chunks...
- */
-struct chunk {
- /* line number in postimage; up to but not including this
- * line is the same as preimage
- */
- int same;
-
- /* preimage line number after this chunk */
- int p_next;
-
- /* postimage line number after this chunk */
- int t_next;
-};
-
-struct patch {
- struct chunk *chunks;
- int num;
-};
-
-struct blame_diff_state {
- struct patch *ret;
- unsigned hunk_post_context;
- unsigned hunk_in_pre_context : 1;
-};
-
-static void process_u_diff(void *state_, char *line, unsigned long len)
-{
- struct blame_diff_state *state = state_;
- struct chunk *chunk;
- int off1, off2, len1, len2, num;
-
- num = state->ret->num;
- if (len < 4 || line[0] != '@' || line[1] != '@') {
- if (state->hunk_in_pre_context && line[0] == ' ')
- state->ret->chunks[num - 1].same++;
- else {
- state->hunk_in_pre_context = 0;
- if (line[0] == ' ')
- state->hunk_post_context++;
- else
- state->hunk_post_context = 0;
- }
- return;
- }
-
- if (num && state->hunk_post_context) {
- chunk = &state->ret->chunks[num - 1];
- chunk->p_next -= state->hunk_post_context;
- chunk->t_next -= state->hunk_post_context;
- }
- state->ret->num = ++num;
- state->ret->chunks = xrealloc(state->ret->chunks,
- sizeof(struct chunk) * num);
- chunk = &state->ret->chunks[num - 1];
- if (parse_hunk_header(line, len, &off1, &len1, &off2, &len2)) {
- state->ret->num--;
- return;
- }
-
- /* Line numbers in patch output are one based. */
- off1--;
- off2--;
-
- chunk->same = len2 ? off2 : (off2 + 1);
-
- chunk->p_next = off1 + (len1 ? len1 : 1);
- chunk->t_next = chunk->same + len2;
- state->hunk_in_pre_context = 1;
- state->hunk_post_context = 0;
-}
-
-static struct patch *compare_buffer(mmfile_t *file_p, mmfile_t *file_o,
- int context)
-{
- struct blame_diff_state state;
- xpparam_t xpp;
- xdemitconf_t xecfg;
- xdemitcb_t ecb;
-
- xpp.flags = xdl_opts;
- memset(&xecfg, 0, sizeof(xecfg));
- xecfg.ctxlen = context;
- memset(&state, 0, sizeof(state));
- state.ret = xmalloc(sizeof(struct patch));
- state.ret->chunks = NULL;
- state.ret->num = 0;
-
- xdi_diff_outf(file_p, file_o, process_u_diff, &state, &xpp, &xecfg, &ecb);
-
- if (state.ret->num) {
- struct chunk *chunk;
- chunk = &state.ret->chunks[state.ret->num - 1];
- chunk->p_next -= state.hunk_post_context;
- chunk->t_next -= state.hunk_post_context;
- }
- return state.ret;
-}
-
-/*
- * Run diff between two origins and grab the patch output, so that
- * we can pass blame for lines origin is currently suspected for
- * to its parent.
- */
-static struct patch *get_patch(struct origin *parent, struct origin *origin)
-{
- mmfile_t file_p, file_o;
- struct patch *patch;
-
- fill_origin_blob(parent, &file_p);
- fill_origin_blob(origin, &file_o);
- if (!file_p.ptr || !file_o.ptr)
- return NULL;
- patch = compare_buffer(&file_p, &file_o, 0);
- num_get_patch++;
- return patch;
-}
-
-static void free_patch(struct patch *p)
-{
- free(p->chunks);
- free(p);
-}
-
/*
* Link in a new blame entry to the scoreboard. Entries that cover the
* same line range have been removed from the scoreboard previously.
}
}
+struct blame_chunk_cb_data {
+ struct scoreboard *sb;
+ struct origin *target;
+ struct origin *parent;
+ long plno;
+ long tlno;
+};
+
+static void blame_chunk_cb(void *data, long same, long p_next, long t_next)
+{
+ struct blame_chunk_cb_data *d = data;
+ blame_chunk(d->sb, d->tlno, d->plno, same, d->target, d->parent);
+ d->plno = p_next;
+ d->tlno = t_next;
+}
+
/*
* We are looking at the origin 'target' and aiming to pass blame
* for the lines it is suspected to its parent. Run diff to find
struct origin *target,
struct origin *parent)
{
- int i, last_in_target, plno, tlno;
- struct patch *patch;
+ int last_in_target;
+ mmfile_t file_p, file_o;
+ struct blame_chunk_cb_data d = { sb, target, parent, 0, 0 };
+ xpparam_t xpp;
+ xdemitconf_t xecfg;
last_in_target = find_last_in_target(sb, target);
if (last_in_target < 0)
return 1; /* nothing remains for this target */
- patch = get_patch(parent, target);
- plno = tlno = 0;
- for (i = 0; i < patch->num; i++) {
- struct chunk *chunk = &patch->chunks[i];
+ fill_origin_blob(parent, &file_p);
+ fill_origin_blob(target, &file_o);
+ num_get_patch++;
- blame_chunk(sb, tlno, plno, chunk->same, target, parent);
- plno = chunk->p_next;
- tlno = chunk->t_next;
- }
+ memset(&xpp, 0, sizeof(xpp));
+ xpp.flags = xdl_opts;
+ memset(&xecfg, 0, sizeof(xecfg));
+ xecfg.ctxlen = 0;
+ xdi_diff_hunks(&file_p, &file_o, blame_chunk_cb, &d, &xpp, &xecfg);
/* The rest (i.e. anything after tlno) are the same as the parent */
- blame_chunk(sb, tlno, plno, last_in_target, target, parent);
+ blame_chunk(sb, d.tlno, d.plno, last_in_target, target, parent);
- free_patch(patch);
return 0;
}
}
}
+struct handle_split_cb_data {
+ struct scoreboard *sb;
+ struct blame_entry *ent;
+ struct origin *parent;
+ struct blame_entry *split;
+ long plno;
+ long tlno;
+};
+
+static void handle_split_cb(void *data, long same, long p_next, long t_next)
+{
+ struct handle_split_cb_data *d = data;
+ handle_split(d->sb, d->ent, d->tlno, d->plno, same, d->parent, d->split);
+ d->plno = p_next;
+ d->tlno = t_next;
+}
+
/*
* Find the lines from parent that are the same as ent so that
* we can pass blames to it. file_p has the blob contents for
const char *cp;
int cnt;
mmfile_t file_o;
- struct patch *patch;
- int i, plno, tlno;
+ struct handle_split_cb_data d = { sb, ent, parent, split, 0, 0 };
+ xpparam_t xpp;
+ xdemitconf_t xecfg;
/*
* Prepare mmfile that contains only the lines in ent.
}
file_o.size = cp - file_o.ptr;
- patch = compare_buffer(file_p, &file_o, 1);
-
/*
* file_o is a part of final image we are annotating.
* file_p partially may match that image.
*/
+ memset(&xpp, 0, sizeof(xpp));
+ xpp.flags = xdl_opts;
+ memset(&xecfg, 0, sizeof(xecfg));
+ xecfg.ctxlen = 1;
memset(split, 0, sizeof(struct blame_entry [3]));
- plno = tlno = 0;
- for (i = 0; i < patch->num; i++) {
- struct chunk *chunk = &patch->chunks[i];
-
- handle_split(sb, ent, tlno, plno, chunk->same, parent, split);
- plno = chunk->p_next;
- tlno = chunk->t_next;
- }
+ xdi_diff_hunks(file_p, &file_o, handle_split_cb, &d, &xpp, &xecfg);
/* remainder, if any, all match the preimage */
- handle_split(sb, ent, tlno, plno, ent->num_lines, parent, split);
- free_patch(patch);
+ handle_split(sb, ent, d.tlno, d.plno, ent->num_lines, parent, split);
}
/*
int detailed)
{
int len;
- char *tmp, *endp;
+ char *tmp, *endp, *reencoded, *message;
static char author_buf[1024];
static char committer_buf[1024];
static char summary_buf[1024];
die("Cannot read commit %s",
sha1_to_hex(commit->object.sha1));
}
+ reencoded = reencode_commit_message(commit, NULL);
+ message = reencoded ? reencoded : commit->buffer;
ret->author = author_buf;
- get_ac_line(commit->buffer, "\nauthor ",
+ get_ac_line(message, "\nauthor ",
sizeof(author_buf), author_buf, &ret->author_mail,
&ret->author_time, &ret->author_tz);
- if (!detailed)
+ if (!detailed) {
+ free(reencoded);
return;
+ }
ret->committer = committer_buf;
- get_ac_line(commit->buffer, "\ncommitter ",
+ get_ac_line(message, "\ncommitter ",
sizeof(committer_buf), committer_buf, &ret->committer_mail,
&ret->committer_time, &ret->committer_tz);
ret->summary = summary_buf;
- tmp = strstr(commit->buffer, "\n\n");
+ tmp = strstr(message, "\n\n");
if (!tmp) {
error_out:
sprintf(summary_buf, "(%s)", sha1_to_hex(commit->object.sha1));
+ free(reencoded);
return;
}
tmp += 2;
goto error_out;
memcpy(summary_buf, tmp, len);
summary_buf[len] = 0;
+ free(reencoded);
}
/*
continue;
}
- if (delete_ref(name, sha1)) {
+ if (delete_ref(name, sha1, 0)) {
error("Error deleting %sbranch '%s'", remote,
argv[i]);
ret = 1;
else if (stdin_paths && doubledash < argc)
errstr = "Can't specify files with --stdin";
if (errstr) {
- error (errstr);
+ error("%s", errstr);
usage_with_options(check_attr_usage, check_attr_options);
}
#include "cache.h"
#include "quote.h"
#include "cache-tree.h"
+#include "parse-options.h"
#define CHECKOUT_ALL 4
static int line_termination = '\n';
exit(128);
}
-static const char checkout_cache_usage[] =
-"git checkout-index [-u] [-q] [-a] [-f] [-n] [--stage=[123]|all] [--prefix=<string>] [--temp] [--] <file>...";
+static const char * const builtin_checkout_index_usage[] = {
+ "git checkout-index [options] [--] <file>...",
+ NULL
+};
static struct lock_file lock_file;
+static int option_parse_u(const struct option *opt,
+ const char *arg, int unset)
+{
+ int *newfd = opt->value;
+
+ state.refresh_cache = 1;
+ if (*newfd < 0)
+ *newfd = hold_locked_index(&lock_file, 1);
+ return 0;
+}
+
+static int option_parse_z(const struct option *opt,
+ const char *arg, int unset)
+{
+ if (unset)
+ line_termination = '\n';
+ else
+ line_termination = 0;
+ return 0;
+}
+
+static int option_parse_prefix(const struct option *opt,
+ const char *arg, int unset)
+{
+ state.base_dir = arg;
+ state.base_dir_len = strlen(arg);
+ return 0;
+}
+
+static int option_parse_stage(const struct option *opt,
+ const char *arg, int unset)
+{
+ if (!strcmp(arg, "all")) {
+ to_tempfile = 1;
+ checkout_stage = CHECKOUT_ALL;
+ } else {
+ int ch = arg[0];
+ if ('1' <= ch && ch <= '3')
+ checkout_stage = arg[0] - '0';
+ else
+ die("stage should be between 1 and 3 or all");
+ }
+ return 0;
+}
+
int cmd_checkout_index(int argc, const char **argv, const char *prefix)
{
int i;
int all = 0;
int read_from_stdin = 0;
int prefix_length;
+ int force = 0, quiet = 0, not_new = 0;
+ struct option builtin_checkout_index_options[] = {
+ OPT_BOOLEAN('a', "all", &all,
+ "checks out all files in the index"),
+ OPT_BOOLEAN('f', "force", &force,
+ "forces overwrite of existing files"),
+ OPT__QUIET(&quiet),
+ OPT_BOOLEAN('n', "no-create", ¬_new,
+ "don't checkout new files"),
+ { OPTION_CALLBACK, 'u', "index", &newfd, NULL,
+ "update stat information in the index file",
+ PARSE_OPT_NOARG, option_parse_u },
+ { OPTION_CALLBACK, 'z', NULL, NULL, NULL,
+ "paths are separated with NUL character",
+ PARSE_OPT_NOARG, option_parse_z },
+ OPT_BOOLEAN(0, "stdin", &read_from_stdin,
+ "read list of paths from the standard input"),
+ OPT_BOOLEAN(0, "temp", &to_tempfile,
+ "write the content to temporary files"),
+ OPT_CALLBACK(0, "prefix", NULL, "string",
+ "when creating files, prepend <string>",
+ option_parse_prefix),
+ OPT_CALLBACK(0, "stage", NULL, NULL,
+ "copy out the files from named stage",
+ option_parse_stage),
+ OPT_END()
+ };
git_config(git_default_config, NULL);
state.base_dir = "";
die("invalid cache");
}
- for (i = 1; i < argc; i++) {
- const char *arg = argv[i];
-
- if (!strcmp(arg, "--")) {
- i++;
- break;
- }
- if (!strcmp(arg, "-a") || !strcmp(arg, "--all")) {
- all = 1;
- continue;
- }
- if (!strcmp(arg, "-f") || !strcmp(arg, "--force")) {
- state.force = 1;
- continue;
- }
- if (!strcmp(arg, "-q") || !strcmp(arg, "--quiet")) {
- state.quiet = 1;
- continue;
- }
- if (!strcmp(arg, "-n") || !strcmp(arg, "--no-create")) {
- state.not_new = 1;
- continue;
- }
- if (!strcmp(arg, "-u") || !strcmp(arg, "--index")) {
- state.refresh_cache = 1;
- if (newfd < 0)
- newfd = hold_locked_index(&lock_file, 1);
- continue;
- }
- if (!strcmp(arg, "-z")) {
- line_termination = 0;
- continue;
- }
- if (!strcmp(arg, "--stdin")) {
- if (i != argc - 1)
- die("--stdin must be at the end");
- read_from_stdin = 1;
- i++; /* do not consider arg as a file name */
- break;
- }
- if (!strcmp(arg, "--temp")) {
- to_tempfile = 1;
- continue;
- }
- if (!prefixcmp(arg, "--prefix=")) {
- state.base_dir = arg+9;
- state.base_dir_len = strlen(state.base_dir);
- continue;
- }
- if (!prefixcmp(arg, "--stage=")) {
- if (!strcmp(arg + 8, "all")) {
- to_tempfile = 1;
- checkout_stage = CHECKOUT_ALL;
- } else {
- int ch = arg[8];
- if ('1' <= ch && ch <= '3')
- checkout_stage = arg[8] - '0';
- else
- die("stage should be between 1 and 3 or all");
- }
- continue;
- }
- if (arg[0] == '-')
- usage(checkout_cache_usage);
- break;
- }
+ argc = parse_options(argc, argv, builtin_checkout_index_options,
+ builtin_checkout_index_usage, 0);
+ state.force = force;
+ state.quiet = quiet;
+ state.not_new = not_new;
if (state.base_dir_len || to_tempfile) {
/* when --prefix is specified we do not
}
/* Check out named files first */
- for ( ; i < argc; i++) {
+ for (i = 0; i < argc; i++) {
const char *arg = argv[i];
const char *p;
memset(&proc, 0, sizeof(proc));
argv[0] = name;
- argv[1] = xstrdup(sha1_to_hex(old->object.sha1));
+ argv[1] = xstrdup(sha1_to_hex(old ? old->object.sha1 : null_sha1));
argv[2] = xstrdup(sha1_to_hex(new->object.sha1));
argv[3] = changed ? "1" : "0";
argv[4] = NULL;
}
/* 2-way merge to the new branch */
- topts.initial_checkout = (!active_nr &&
- (old->commit == new->commit));
+ topts.initial_checkout = is_cache_unborn();
topts.update = 1;
topts.merge = 1;
topts.gently = opts->merge;
}
old_desc = old->name;
- if (!old_desc)
+ if (!old_desc && old->commit)
old_desc = sha1_to_hex(old->commit->object.sha1);
strbuf_addf(&msg, "checkout: moving from %s to %s",
- old_desc, new->name);
+ old_desc ? old_desc : "(invalid)", new->name);
if (new->path) {
create_symref("HEAD", new->path, msg.buf);
* a new commit, we want to mention the old commit once more
* to remind the user that it might be lost.
*/
- if (!opts->quiet && !old.path && new->commit != old.commit)
+ if (!opts->quiet && !old.path && old.commit && new->commit != old.commit)
describe_detached_head("Previous HEAD position was", old.commit);
if (!old.commit) {
local = config_exclusive_filename;
if (!local) {
const char *home = getenv("HOME");
- local = repo_config = xstrdup(git_path("config"));
+ local = repo_config = git_pathdup("config");
if (git_config_global() && home)
global = xstrdup(mkpath("%s/.gitconfig", home));
if (git_config_system())
int cached = 0;
while (1 < argc) {
const char *arg = argv[1];
- if (!strcmp(arg, "--cached"))
+ if (!strcmp(arg, "--cached") || !strcmp(arg, "--staged"))
cached = 1;
else
usage(builtin_diff_usage);
}
DIFF_OPT_SET(&rev.diffopt, ALLOW_EXTERNAL);
DIFF_OPT_SET(&rev.diffopt, RECURSIVE);
+ DIFF_OPT_SET(&rev.diffopt, ALLOW_TEXTCONV);
/*
* If the user asked for our exit code then don't start a
const char *arg = argv[i];
if (!strcmp(arg, "--"))
break;
- else if (!strcmp(arg, "--cached")) {
+ else if (!strcmp(arg, "--cached") ||
+ !strcmp(arg, "--staged")) {
add_head_to_pending(&rev);
if (!rev.pending.nr)
die("No HEAD commit to compare with (yet)");
for (j = 0; j < i; j++) {
const char *rule = ref_rev_parse_rules[j];
unsigned char short_objectname[20];
+ char refname[PATH_MAX];
/*
* the short name is ambiguous, if it resolves
* (with this previous rule) to a valid ref
* read_ref() returns 0 on success
*/
- if (!read_ref(mkpath(rule, short_name_len, short_name),
- short_objectname))
+ mksnpath(refname, sizeof(refname),
+ rule, short_name_len, short_name);
+ if (!read_ref(refname, short_objectname))
break;
}
prepare_packed_git();
for (cnt = 0, p = packed_git; p; p = p->next) {
- char path[PATH_MAX];
- size_t len;
- int keep;
-
if (!p->pack_local)
continue;
- len = strlen(p->pack_name);
- if (PATH_MAX <= len + 1)
- continue; /* oops, give up */
- memcpy(path, p->pack_name, len-5);
- memcpy(path + len - 5, ".keep", 6);
- keep = access(p->pack_name, F_OK) && (errno == ENOENT);
- if (keep)
+ if (p->pack_keep)
continue;
/*
* Perhaps check the size of the pack and count only
struct rev_info *rev)
{
int i;
- int decorate = 0;
rev->abbrev = DEFAULT_ABBREV;
rev->commit_format = CMIT_FMT_DEFAULT;
const char *arg = argv[i];
if (!strcmp(arg, "--decorate")) {
load_ref_decorations();
- decorate = 1;
+ rev->show_decorations = 1;
+ } else if (!strcmp(arg, "--source")) {
+ rev->show_source = 1;
} else
die("unrecognized argument: %s", arg);
}
+ DIFF_OPT_SET(&rev->diffopt, ALLOW_TEXTCONV);
}
/*
#include "remote.h"
static const char ls_remote_usage[] =
-"git ls-remote [--upload-pack=<git-upload-pack>] [<host>:]<directory>";
+"git ls-remote [--heads] [--tags] [-u <exec> | --upload-pack <exec>] <repository> <refs>...";
/*
* Is there one among the list of patterns that match the tail part
static int keep_unreachable, unpack_unreachable, include_tag;
static int local;
static int incremental;
+static int ignore_packed_keep;
static int allow_ofs_delta;
static const char *base_name;
static int progress = 1;
type = entry->type;
/* write limit if limited packsize and not first object */
- limit = pack_size_limit && nr_written ?
- pack_size_limit - write_offset : 0;
+ if (!pack_size_limit || !nr_written)
+ limit = 0;
+ else if (pack_size_limit <= write_offset)
+ /*
+ * the earlier object did not fit the limit; avoid
+ * mistaking this with unlimited (i.e. limit = 0).
+ */
+ limit = 1;
+ else
+ limit = pack_size_limit - write_offset;
if (!entry->delta)
usable_delta = 0; /* no delta */
return 0;
}
+ if (!exclude && local && has_loose_object_nonlocal(sha1))
+ return 0;
+
for (p = packed_git; p; p = p->next) {
off_t offset = find_pack_entry_one(sha1, p);
if (offset) {
return 0;
if (local && !p->pack_local)
return 0;
+ if (ignore_packed_keep && p->pack_local && p->pack_keep)
+ return 0;
}
}
incremental = 1;
continue;
}
+ if (!strcmp("--honor-pack-keep", arg)) {
+ ignore_packed_keep = 1;
+ continue;
+ }
if (!prefixcmp(arg, "--compression=")) {
char *end;
int level = strtoul(arg+14, &end, 0);
break;
case 2:
opts.fn = twoway_merge;
- opts.initial_checkout = !active_nr;
+ opts.initial_checkout = is_cache_unborn();
break;
case 3:
default:
static const char receive_pack_usage[] = "git-receive-pack <git-dir>";
+enum deny_action {
+ DENY_IGNORE,
+ DENY_WARN,
+ DENY_REFUSE,
+};
+
static int deny_deletes = 0;
static int deny_non_fast_forwards = 0;
+static enum deny_action deny_current_branch = DENY_WARN;
static int receive_fsck_objects;
static int receive_unpack_limit = -1;
static int transfer_unpack_limit = -1;
static char capabilities[] = " report-status delete-refs ";
static int capabilities_sent;
+static enum deny_action parse_deny_action(const char *var, const char *value)
+{
+ if (value) {
+ if (!strcasecmp(value, "ignore"))
+ return DENY_IGNORE;
+ if (!strcasecmp(value, "warn"))
+ return DENY_WARN;
+ if (!strcasecmp(value, "refuse"))
+ return DENY_REFUSE;
+ }
+ if (git_config_bool(var, value))
+ return DENY_REFUSE;
+ return DENY_IGNORE;
+}
+
static int receive_pack_config(const char *var, const char *value, void *cb)
{
if (strcmp(var, "receive.denydeletes") == 0) {
return 0;
}
+ if (!strcmp(var, "receive.denycurrentbranch")) {
+ deny_current_branch = parse_deny_action(var, value);
+ return 0;
+ }
+
return git_default_config(var, value, cb);
}
return hook_status(run_command(&proc), update_hook);
}
+static int is_ref_checked_out(const char *ref)
+{
+ unsigned char sha1[20];
+ const char *head;
+
+ if (is_bare_repository())
+ return 0;
+
+ head = resolve_ref("HEAD", sha1, 0, NULL);
+ if (!head)
+ return 0;
+ return !strcmp(head, ref);
+}
+
static const char *update(struct command *cmd)
{
const char *name = cmd->ref_name;
return "funny refname";
}
+ switch (deny_current_branch) {
+ case DENY_IGNORE:
+ break;
+ case DENY_WARN:
+ if (!is_ref_checked_out(name))
+ break;
+ warning("updating the currently checked out branch; this may"
+ " cause confusion,\n"
+ "as the index and working tree do not reflect changes"
+ " that are now in HEAD.");
+ break;
+ case DENY_REFUSE:
+ if (!is_ref_checked_out(name))
+ break;
+ error("refusing to update checked out branch: %s", name);
+ return "branch is currently checked out";
+ }
+
if (!is_null_sha1(new_sha1) && !has_sha1_file(new_sha1)) {
error("unpack should have generated %s, "
"but I can't find it!", sha1_to_hex(new_sha1));
warning ("Allowing deletion of corrupt ref.");
old_sha1 = NULL;
}
- if (delete_ref(name, old_sha1)) {
+ if (delete_ref(name, old_sha1, 0)) {
error("failed to delete %s", name);
return "failed to delete";
}
lock = lock_any_ref_for_update(ref, sha1, 0);
if (!lock)
return error("cannot lock ref '%s'", ref);
- log_file = xstrdup(git_path("logs/%s", ref));
+ log_file = git_pathdup("logs/%s", ref);
if (!file_exists(log_file))
goto finish;
if (!cmd->dry_run) {
- newlog_path = xstrdup(git_path("logs/%s.lock", ref));
+ newlog_path = git_pathdup("logs/%s.lock", ref);
cb.newlog = fopen(newlog_path, "w");
}
static const char * const builtin_remote_usage[] = {
"git remote",
"git remote add <name> <url>",
+ "git remote rename <old> <new>",
"git remote rm <name>",
"git remote show <name>",
"git remote prune <name>",
/* make sure that symrefs are deleted */
if (flags & REF_ISSYMREF)
- return unlink(git_path(refname));
+ return unlink(git_path("%s", refname));
item = string_list_append(refname, branches->branches);
item->util = xmalloc(20);
return 0;
}
+struct rename_info {
+ const char *old;
+ const char *new;
+ struct string_list *remote_branches;
+};
+
+static int read_remote_branches(const char *refname,
+ const unsigned char *sha1, int flags, void *cb_data)
+{
+ struct rename_info *rename = cb_data;
+ struct strbuf buf = STRBUF_INIT;
+ struct string_list_item *item;
+ int flag;
+ unsigned char orig_sha1[20];
+ const char *symref;
+
+ strbuf_addf(&buf, "refs/remotes/%s", rename->old);
+ if(!prefixcmp(refname, buf.buf)) {
+ item = string_list_append(xstrdup(refname), rename->remote_branches);
+ symref = resolve_ref(refname, orig_sha1, 1, &flag);
+ if (flag & REF_ISSYMREF)
+ item->util = xstrdup(symref);
+ else
+ item->util = NULL;
+ }
+
+ return 0;
+}
+
+static int migrate_file(struct remote *remote)
+{
+ struct strbuf buf = STRBUF_INIT;
+ int i;
+ char *path = NULL;
+
+ strbuf_addf(&buf, "remote.%s.url", remote->name);
+ for (i = 0; i < remote->url_nr; i++)
+ if (git_config_set_multivar(buf.buf, remote->url[i], "^$", 0))
+ return error("Could not append '%s' to '%s'",
+ remote->url[i], buf.buf);
+ strbuf_reset(&buf);
+ strbuf_addf(&buf, "remote.%s.push", remote->name);
+ for (i = 0; i < remote->push_refspec_nr; i++)
+ if (git_config_set_multivar(buf.buf, remote->push_refspec[i], "^$", 0))
+ return error("Could not append '%s' to '%s'",
+ remote->push_refspec[i], buf.buf);
+ strbuf_reset(&buf);
+ strbuf_addf(&buf, "remote.%s.fetch", remote->name);
+ for (i = 0; i < remote->fetch_refspec_nr; i++)
+ if (git_config_set_multivar(buf.buf, remote->fetch_refspec[i], "^$", 0))
+ return error("Could not append '%s' to '%s'",
+ remote->fetch_refspec[i], buf.buf);
+ if (remote->origin == REMOTE_REMOTES)
+ path = git_path("remotes/%s", remote->name);
+ else if (remote->origin == REMOTE_BRANCHES)
+ path = git_path("branches/%s", remote->name);
+ if (path && unlink(path))
+ warning("failed to remove '%s'", path);
+ return 0;
+}
+
+static int mv(int argc, const char **argv)
+{
+ struct option options[] = {
+ OPT_END()
+ };
+ struct remote *oldremote, *newremote;
+ struct strbuf buf = STRBUF_INIT, buf2 = STRBUF_INIT, buf3 = STRBUF_INIT;
+ struct string_list remote_branches = { NULL, 0, 0, 0 };
+ struct rename_info rename;
+ int i;
+
+ if (argc != 3)
+ usage_with_options(builtin_remote_usage, options);
+
+ rename.old = argv[1];
+ rename.new = argv[2];
+ rename.remote_branches = &remote_branches;
+
+ oldremote = remote_get(rename.old);
+ if (!oldremote)
+ die("No such remote: %s", rename.old);
+
+ if (!strcmp(rename.old, rename.new) && oldremote->origin != REMOTE_CONFIG)
+ return migrate_file(oldremote);
+
+ newremote = remote_get(rename.new);
+ if (newremote && (newremote->url_nr > 1 || newremote->fetch_refspec_nr))
+ die("remote %s already exists.", rename.new);
+
+ strbuf_addf(&buf, "refs/heads/test:refs/remotes/%s/test", rename.new);
+ if (!valid_fetch_refspec(buf.buf))
+ die("'%s' is not a valid remote name", rename.new);
+
+ strbuf_reset(&buf);
+ strbuf_addf(&buf, "remote.%s", rename.old);
+ strbuf_addf(&buf2, "remote.%s", rename.new);
+ if (git_config_rename_section(buf.buf, buf2.buf) < 1)
+ return error("Could not rename config section '%s' to '%s'",
+ buf.buf, buf2.buf);
+
+ strbuf_reset(&buf);
+ strbuf_addf(&buf, "remote.%s.fetch", rename.new);
+ if (git_config_set_multivar(buf.buf, NULL, NULL, 1))
+ return error("Could not remove config section '%s'", buf.buf);
+ for (i = 0; i < oldremote->fetch_refspec_nr; i++) {
+ char *ptr;
+
+ strbuf_reset(&buf2);
+ strbuf_addstr(&buf2, oldremote->fetch_refspec[i]);
+ ptr = strstr(buf2.buf, rename.old);
+ if (ptr)
+ strbuf_splice(&buf2, ptr-buf2.buf, strlen(rename.old),
+ rename.new, strlen(rename.new));
+ if (git_config_set_multivar(buf.buf, buf2.buf, "^$", 0))
+ return error("Could not append '%s'", buf.buf);
+ }
+
+ read_branches();
+ for (i = 0; i < branch_list.nr; i++) {
+ struct string_list_item *item = branch_list.items + i;
+ struct branch_info *info = item->util;
+ if (info->remote && !strcmp(info->remote, rename.old)) {
+ strbuf_reset(&buf);
+ strbuf_addf(&buf, "branch.%s.remote", item->string);
+ if (git_config_set(buf.buf, rename.new)) {
+ return error("Could not set '%s'", buf.buf);
+ }
+ }
+ }
+
+ /*
+ * First remove symrefs, then rename the rest, finally create
+ * the new symrefs.
+ */
+ for_each_ref(read_remote_branches, &rename);
+ for (i = 0; i < remote_branches.nr; i++) {
+ struct string_list_item *item = remote_branches.items + i;
+ int flag = 0;
+ unsigned char sha1[20];
+ const char *symref;
+
+ symref = resolve_ref(item->string, sha1, 1, &flag);
+ if (!(flag & REF_ISSYMREF))
+ continue;
+ if (delete_ref(item->string, NULL, REF_NODEREF))
+ die("deleting '%s' failed", item->string);
+ }
+ for (i = 0; i < remote_branches.nr; i++) {
+ struct string_list_item *item = remote_branches.items + i;
+
+ if (item->util)
+ continue;
+ strbuf_reset(&buf);
+ strbuf_addstr(&buf, item->string);
+ strbuf_splice(&buf, strlen("refs/remotes/"), strlen(rename.old),
+ rename.new, strlen(rename.new));
+ strbuf_reset(&buf2);
+ strbuf_addf(&buf2, "remote: renamed %s to %s",
+ item->string, buf.buf);
+ if (rename_ref(item->string, buf.buf, buf2.buf))
+ die("renaming '%s' failed", item->string);
+ }
+ for (i = 0; i < remote_branches.nr; i++) {
+ struct string_list_item *item = remote_branches.items + i;
+
+ if (!item->util)
+ continue;
+ strbuf_reset(&buf);
+ strbuf_addstr(&buf, item->string);
+ strbuf_splice(&buf, strlen("refs/remotes/"), strlen(rename.old),
+ rename.new, strlen(rename.new));
+ strbuf_reset(&buf2);
+ strbuf_addstr(&buf2, item->util);
+ strbuf_splice(&buf2, strlen("refs/remotes/"), strlen(rename.old),
+ rename.new, strlen(rename.new));
+ strbuf_reset(&buf3);
+ strbuf_addf(&buf3, "remote: renamed %s to %s",
+ item->string, buf.buf);
+ if (create_symref(buf.buf, buf2.buf, buf3.buf))
+ die("creating '%s' failed", buf.buf);
+ }
+ return 0;
+}
+
static int remove_branches(struct string_list *branches)
{
int i, result = 0;
const char *refname = item->string;
unsigned char *sha1 = item->util;
- if (delete_ref(refname, sha1))
+ if (delete_ref(refname, sha1, 0))
result |= error("Could not remove branch %s", refname);
}
return result;
const char *refname = states.stale.items[i].util;
if (!dry_run)
- result |= delete_ref(refname, NULL);
+ result |= delete_ref(refname, NULL, 0);
printf(" * [%s] %s\n", dry_run ? "would prune" : "pruned",
abbrev_ref(refname, "refs/remotes/"));
result = show_all();
else if (!strcmp(argv[0], "add"))
result = add(argc, argv);
+ else if (!strcmp(argv[0], "rename"))
+ result = mv(argc, argv);
else if (!strcmp(argv[0], "rm"))
result = rm(argc, argv);
else if (!strcmp(argv[0], "show"))
printf("--- a/%s\n+++ b/%s\n", label1, label2);
fflush(stdout);
+ memset(&xpp, 0, sizeof(xpp));
xpp.flags = XDF_NEED_MINIMAL;
memset(&xecfg, 0, sizeof(xecfg));
xecfg.ctxlen = 3;
update_ref(msg, "ORIG_HEAD", orig, old_orig, 0, MSG_ON_ERR);
}
else if (old_orig)
- delete_ref("ORIG_HEAD", old_orig);
+ delete_ref("ORIG_HEAD", old_orig, 0);
prepend_reflog_action("updating HEAD", msg, sizeof(msg));
update_ref_status = update_ref(msg, "HEAD", sha1, orig, 0, MSG_ON_ERR);
children = children->next;
}
}
- show_decorations(commit);
+ show_decorations(&revs, commit);
if (revs.commit_format == CMIT_FMT_ONELINE)
putchar(' ');
else
int i, index_fd, clean;
char *oneline, *reencoded_message = NULL;
const char *message, *encoding;
- char *defmsg = xstrdup(git_path("MERGE_MSG"));
+ char *defmsg = git_pathdup("MERGE_MSG");
struct merge_options o;
struct tree *result, *next_tree, *base_tree, *head_tree;
static struct lock_file index_lock;
{
struct refspec rs;
- if (ref->status != REF_STATUS_OK)
+ if (ref->status != REF_STATUS_OK && ref->status != REF_STATUS_UPTODATE)
return;
rs.src = ref->name;
if (args.verbose)
fprintf(stderr, "updating local tracking ref '%s'\n", rs.dst);
if (ref->deletion) {
- delete_ref(rs.dst, NULL);
+ delete_ref(rs.dst, NULL, 0);
} else
update_ref("update by push", rs.dst,
ref->new_sha1, NULL, 0, 0);
*/
new_refs = 0;
for (ref = remote_refs; ref; ref = ref->next) {
- const unsigned char *new_sha1;
-
- if (!ref->peer_ref) {
- if (!args.send_mirror)
- continue;
- new_sha1 = null_sha1;
- }
- else
- new_sha1 = ref->peer_ref->new_sha1;
+ if (ref->peer_ref)
+ hashcpy(ref->new_sha1, ref->peer_ref->new_sha1);
+ else if (!args.send_mirror)
+ continue;
- ref->deletion = is_null_sha1(new_sha1);
+ ref->deletion = is_null_sha1(ref->new_sha1);
if (ref->deletion && !allow_deleting_refs) {
ref->status = REF_STATUS_REJECT_NODELETE;
continue;
}
if (!ref->deletion &&
- !hashcmp(ref->old_sha1, new_sha1)) {
+ !hashcmp(ref->old_sha1, ref->new_sha1)) {
ref->status = REF_STATUS_UPTODATE;
continue;
}
!ref->deletion &&
!is_null_sha1(ref->old_sha1) &&
(!has_sha1_file(ref->old_sha1)
- || !ref_newer(new_sha1, ref->old_sha1));
+ || !ref_newer(ref->new_sha1, ref->old_sha1));
if (ref->nonfastforward && !ref->force && !args.force_update) {
ref->status = REF_STATUS_REJECT_NONFASTFORWARD;
continue;
}
- hashcpy(ref->new_sha1, new_sha1);
if (!ref->deletion)
new_refs++;
static int delete_tag(const char *name, const char *ref,
const unsigned char *sha1)
{
- if (delete_ref(ref, sha1))
+ if (delete_ref(ref, sha1, 0))
return 1;
printf("Deleted tag '%s'\n", name);
return 0;
int fd;
/* write the template message before editing: */
- path = xstrdup(git_path("TAG_EDITMSG"));
+ path = git_pathdup("TAG_EDITMSG");
fd = open(path, O_CREAT | O_TRUNC | O_WRONLY, 0600);
if (fd < 0)
die("could not create file '%s': %s",
const char *object_ref, *tag;
struct ref_lock *lock;
- int annotate = 0, sign = 0, force = 0, lines = 0,
+ int annotate = 0, sign = 0, force = 0, lines = -1,
list = 0, delete = 0, verify = 0;
const char *msgfile = NULL, *keyid = NULL;
struct msg_arg msg = { 0, STRBUF_INIT };
}
if (sign)
annotate = 1;
+ if (argc == 0 && !(delete || verify))
+ list = 1;
+ if ((annotate || msg.given || msgfile || force) &&
+ (list || delete || verify))
+ usage_with_options(git_tag_usage, options);
+
+ if (list + delete + verify > 1)
+ usage_with_options(git_tag_usage, options);
if (list)
- return list_tags(argv[0], lines);
+ return list_tags(argv[0], lines == -1 ? 0 : lines);
+ if (lines != -1)
+ die("-n option is only allowed with -l.");
if (delete)
return for_each_tag_name(argv, delete_tag);
if (verify)
}
}
- if (argc == 0) {
- if (annotate)
- usage_with_options(git_tag_usage, options);
- return list_tags(NULL, lines);
- }
tag = argv[0];
object_ref = argc == 2 ? argv[1] : "HEAD";
{
const char *refname, *oldval, *msg=NULL;
unsigned char sha1[20], oldsha1[20];
- int delete = 0, no_deref = 0;
+ int delete = 0, no_deref = 0, flags = 0;
struct option options[] = {
OPT_STRING( 'm', NULL, &msg, "reason", "reason of the update"),
OPT_BOOLEAN('d', NULL, &delete, "deletes the reference"),
if (oldval && *oldval && get_sha1(oldval, oldsha1))
die("%s: not a valid old SHA1", oldval);
+ if (no_deref)
+ flags = REF_NODEREF;
if (delete)
- return delete_ref(refname, oldval ? oldsha1 : NULL);
+ return delete_ref(refname, oldval ? oldsha1 : NULL, flags);
else
return update_ref(msg, refname, sha1, oldval ? oldsha1 : NULL,
- no_deref ? REF_NODEREF : 0, DIE_ON_ERR);
+ flags, DIE_ON_ERR);
}
continue;
}
if (++ret == 1)
- error(message);
+ error("%s", message);
error("%s %s", sha1_to_hex(e->sha1), e->name);
}
if (revs.pending.nr != p->nr)
for (i = 0; i < req_nr; i++)
if (!(refs.objects[i].item->flags & SHOWN)) {
if (++ret == 1)
- error(message);
+ error("%s", message);
error("%s %s", sha1_to_hex(refs.objects[i].item->sha1),
refs.objects[i].name);
}
#define read_cache() read_index(&the_index)
#define read_cache_from(path) read_index_from(&the_index, (path))
+#define is_cache_unborn() is_index_unborn(&the_index)
#define read_cache_unmerged() read_index_unmerged(&the_index)
#define write_cache(newfd, cache, entries) write_index(&the_index, (newfd))
#define discard_cache() discard_index(&the_index)
/* Initialize and use the cache information */
extern int read_index(struct index_state *);
extern int read_index_from(struct index_state *, const char *path);
+extern int is_index_unborn(struct index_state *);
extern int read_index_unmerged(struct index_state *);
extern int write_index(const struct index_state *, int newfd);
extern int discard_index(struct index_state *);
extern void set_alternate_index_output(const char *);
extern int close_lock_file(struct lock_file *);
extern void rollback_lock_file(struct lock_file *);
-extern int delete_ref(const char *, const unsigned char *sha1);
+extern int delete_ref(const char *, const unsigned char *sha1, int delopt);
/* Environment bits from configuration mechanism */
extern int trust_executable_bit;
#define DATA_CHANGED 0x0020
#define TYPE_CHANGED 0x0040
+extern char *mksnpath(char *buf, size_t n, const char *fmt, ...)
+ __attribute__((format (printf, 3, 4)));
+extern char *git_snpath(char *buf, size_t n, const char *fmt, ...)
+ __attribute__((format (printf, 3, 4)));
+extern char *git_pathdup(const char *fmt, ...)
+ __attribute__((format (printf, 1, 2)));
+
/* Return a statically allocated filename matching the sha1 signature */
extern char *mkpath(const char *fmt, ...) __attribute__((format (printf, 1, 2)));
extern char *git_path(const char *fmt, ...) __attribute__((format (printf, 1, 2)));
extern int has_sha1_pack(const unsigned char *sha1, const char **ignore);
extern int has_sha1_file(const unsigned char *sha1);
+extern int has_loose_object_nonlocal(const unsigned char *sha1);
extern int has_pack_file(const unsigned char *sha1);
extern int has_pack_index(const unsigned char *sha1);
int index_version;
time_t mtime;
int pack_fd;
- int pack_local;
+ unsigned pack_local:1,
+ pack_keep:1;
unsigned char sha1[20];
/* something like ".git/objects/pack/xxxxx.pack" */
char pack_name[FLEX_ARRAY]; /* more */
parent_file.ptr = grab_blob(parent, &sz);
parent_file.size = sz;
+ memset(&xpp, 0, sizeof(xpp));
xpp.flags = XDF_NEED_MINIMAL;
memset(&xecfg, 0, sizeof(xecfg));
memset(&state, 0, sizeof(state));
extern int non_ascii(int);
struct rev_info; /* in revision.h, it circularly uses enum cmit_fmt */
+extern char *reencode_commit_message(const struct commit *commit,
+ const char **encoding_p);
extern void get_commit_format(const char *arg, struct rev_info *);
extern void format_commit_message(const struct commit *commit,
const void *format, struct strbuf *sb,
* would normally create a console window. But
* since we'll be redirecting std streams, we do
* not need the console.
+ * It is necessary to use DETACHED_PROCESS
+ * instead of CREATE_NO_WINDOW to make ssh
+ * recognize that it has no console.
*/
- flags = CREATE_NO_WINDOW;
+ flags = DETACHED_PROCESS;
} else {
/* There is already a console. If we specified
- * CREATE_NO_WINDOW here, too, Windows would
+ * DETACHED_PROCESS here, too, Windows would
* disassociate the child from the console.
+ * The same is true for CREATE_NO_WINDOW.
* Go figure!
*/
flags = 0;
free(user_config);
}
- repo_config = xstrdup(git_path("config"));
+ repo_config = git_pathdup("config");
ret += git_config_from_file(fn, repo_config, data);
free(repo_config);
return ret;
if (config_exclusive_filename)
config_filename = xstrdup(config_exclusive_filename);
else
- config_filename = xstrdup(git_path("config"));
+ config_filename = git_pathdup("config");
/*
* Since "key" actually contains the section name and the real
if (config_exclusive_filename)
config_filename = xstrdup(config_exclusive_filename);
else
- config_filename = xstrdup(git_path("config"));
+ config_filename = git_pathdup("config");
out_fd = hold_lock_file_for_update(lock, config_filename, 0);
if (out_fd < 0) {
ret = error("could not lock config file %s", config_filename);
fi \
])# GIT_PARSE_WITH
-
+dnl
+dnl GIT_CHECK_FUNC(FUNCTION, IFTRUE, IFFALSE)
+dnl -----------------------------------------
+dnl Similar to AC_CHECK_FUNC, but on systems that do not generate
+dnl warnings for missing prototypes (e.g. FreeBSD when compiling without
+dnl -Wall), it does not work. By looking for function definition in
+dnl libraries, this problem can be worked around.
+AC_DEFUN([GIT_CHECK_FUNC],[AC_CHECK_FUNC([$1],[
+ AC_SEARCH_LIBS([$1],,
+ [$2],[$3])
+],[$3])])
## Site configuration related to programs (before tests)
## --with-PACKAGE[=ARG] and --without-PACKAGE
#
#
# Define NO_IPV6 if you lack IPv6 support and getaddrinfo().
AC_CHECK_TYPE([struct addrinfo],[
- AC_CHECK_FUNC([getaddrinfo],
+ GIT_CHECK_FUNC([getaddrinfo],
[NO_IPV6=],
[NO_IPV6=YesPlease])
],[NO_IPV6=YesPlease],[
AC_MSG_NOTICE([CHECKS for library functions])
#
# Define NO_STRCASESTR if you don't have strcasestr.
-AC_CHECK_FUNC(strcasestr,
+GIT_CHECK_FUNC(strcasestr,
[NO_STRCASESTR=],
[NO_STRCASESTR=YesPlease])
AC_SUBST(NO_STRCASESTR)
#
# Define NO_MEMMEM if you don't have memmem.
-AC_CHECK_FUNC(memmem,
+GIT_CHECK_FUNC(memmem,
[NO_MEMMEM=],
[NO_MEMMEM=YesPlease])
AC_SUBST(NO_MEMMEM)
#
# Define NO_STRLCPY if you don't have strlcpy.
-AC_CHECK_FUNC(strlcpy,
+GIT_CHECK_FUNC(strlcpy,
[NO_STRLCPY=],
[NO_STRLCPY=YesPlease])
AC_SUBST(NO_STRLCPY)
#
# Define NO_STRTOUMAX if you don't have strtoumax in the C library.
-AC_CHECK_FUNC(strtoumax,
+GIT_CHECK_FUNC(strtoumax,
[NO_STRTOUMAX=],
[NO_STRTOUMAX=YesPlease])
AC_SUBST(NO_STRTOUMAX)
#
# Define NO_SETENV if you don't have setenv in the C library.
-AC_CHECK_FUNC(setenv,
+GIT_CHECK_FUNC(setenv,
[NO_SETENV=],
[NO_SETENV=YesPlease])
AC_SUBST(NO_SETENV)
#
# Define NO_UNSETENV if you don't have unsetenv in the C library.
-AC_CHECK_FUNC(unsetenv,
+GIT_CHECK_FUNC(unsetenv,
[NO_UNSETENV=],
[NO_UNSETENV=YesPlease])
AC_SUBST(NO_UNSETENV)
#
# Define NO_MKDTEMP if you don't have mkdtemp in the C library.
-AC_CHECK_FUNC(mkdtemp,
+GIT_CHECK_FUNC(mkdtemp,
[NO_MKDTEMP=],
[NO_MKDTEMP=YesPlease])
AC_SUBST(NO_MKDTEMP)
stderr=subprocess.PIPE, stdout=subprocess.PIPE);
return proc.wait() == 0;
+_gitConfig = {}
def gitConfig(key):
- return read_pipe("git config %s" % key, ignore_error=True).strip()
+ if not _gitConfig.has_key(key):
+ _gitConfig[key] = read_pipe("git config %s" % key, ignore_error=True).strip()
+ return _gitConfig[key]
def p4BranchesInGit(branchesAreInRemotes = True):
branches = {}
if includeFile:
filesForCommit.append(f)
- if f['action'] != 'delete':
+ if f['action'] not in ('delete', 'purge'):
filesToRead.append(f)
filedata = []
while j < len(filedata):
stat = filedata[j]
j += 1
- text = [];
+ text = ''
while j < len(filedata) and filedata[j]['code'] in ('text', 'unicode', 'binary'):
- text.append(filedata[j]['data'])
+ text += filedata[j]['data']
+ del filedata[j]['data']
j += 1
- text = ''.join(text)
if not stat.has_key('depotFile'):
sys.stderr.write("p4 print fails with: %s\n" % repr(stat))
continue
relPath = self.stripRepoPath(file['path'], branchPrefixes)
- if file["action"] == "delete":
+ if file["action"] in ("delete", "purge"):
self.gitStream.write("D %s\n" % relPath)
else:
data = file['data']
cleanedFiles = {}
for info in files:
- if info["action"] == "delete":
+ if info["action"] in ("delete", "purge"):
continue
cleanedFiles[info["depotFile"]] = info["rev"]
if change > newestRevision:
newestRevision = change
- if info["action"] == "delete":
+ if info["action"] in ("delete", "purge"):
# don't increase the file cnt, otherwise details["depotFile123"] will have gaps!
#fileCnt = fileCnt + 1
continue
# hooks.emailprefix
# All emails have their subjects prefixed with this prefix, or "[SCM]"
# if emailprefix is unset, to aid filtering
+# hooks.showrev
+# The shell command used to format each revision in the email, with
+# "%s" replaced with the commit id. Defaults to "git rev-list -1
+# --pretty %s", displaying the commit id, author, date and log
+# message. To list full patches separated by a blank line, you
+# could set this to "git show -C %s; echo".
#
# Notes
# -----
echo ""
echo $LOGBEGIN
- # This shows all log entries that are not already covered by
- # another ref - i.e. commits that are now accessible from this
- # ref that were previously not accessible
- # (see generate_update_branch_email for the explanation of this
- # command)
- git rev-parse --not --branches | grep -v $(git rev-parse $refname) |
- git rev-list --pretty --stdin $newrev
+ show_new_revisions
echo $LOGEND
}
echo ""
echo $LOGBEGIN
- git rev-parse --not --branches | grep -v $(git rev-parse $refname) |
- git rev-list --pretty --stdin $oldrev..$newrev
+ show_new_revisions
# XXX: Need a way of detecting whether git rev-list actually
# outputted anything, so that we can issue a "no new
echo $LOGEND
}
+
+# --------------- Miscellaneous utilities
+
+#
+# Show new revisions as the user would like to see them in the email.
+#
+show_new_revisions()
+{
+ # This shows all log entries that are not already covered by
+ # another ref - i.e. commits that are now accessible from this
+ # ref that were previously not accessible
+ # (see generate_update_branch_email for the explanation of this
+ # command)
+
+ # Revision range passed to rev-list differs for new vs. updated
+ # branches.
+ if [ "$change_type" = create ]
+ then
+ # Show all revisions exclusive to this (new) branch.
+ revspec=$newrev
+ else
+ # Branch update; show revisions not part of $oldrev.
+ revspec=$oldrev..$newrev
+ fi
+
+ git rev-parse --not --branches | grep -v $(git rev-parse $refname) |
+ if [ -z "$custom_showrev" ]
+ then
+ git rev-list --pretty --stdin $revspec
+ else
+ git rev-list --stdin $revspec |
+ while read onerev
+ do
+ eval $(printf "$custom_showrev" $onerev)
+ done
+ fi
+}
+
+
send_mail()
{
if [ -n "$envelopesender" ]; then
announcerecipients=$(git config hooks.announcelist)
envelopesender=$(git config hooks.envelopesender)
emailprefix=$(git config hooks.emailprefix || echo '[SCM] ')
+custom_showrev=$(git config hooks.showrev)
# --- Main loop
# Allow dual mode: run from the command line just like the update hook, or
if (!strcmp(var, "diff.external"))
return git_config_string(&external_diff_cmd_cfg, var, value);
- switch (userdiff_config_porcelain(var, value)) {
- case 0: break;
- case -1: return -1;
- default: return 0;
- }
-
return git_diff_basic_config(var, value, cb);
}
return 0;
}
+ switch (userdiff_config(var, value)) {
+ case 0: break;
+ case -1: return -1;
+ default: return 0;
+ }
+
if (!prefixcmp(var, "diff.color.") || !prefixcmp(var, "color.diff.")) {
int slot = parse_diff_color_slot(var, 11);
if (!value)
return 0;
}
- switch (userdiff_config_basic(var, value)) {
- case 0: break;
- case -1: return -1;
- default: return 0;
- }
-
return git_color_default_config(var, value, cb);
}
else if (diff_populate_filespec(one, 0))
return -1;
- diff_filespec_load_driver(one);
- if (one->driver->textconv) {
- size_t size;
- mf->ptr = run_textconv(one->driver->textconv, one, &size);
- if (!mf->ptr)
- return -1;
- mf->size = size;
- }
- else {
- mf->ptr = one->data;
- mf->size = one->size;
- }
+ mf->ptr = one->data;
+ mf->size = one->size;
return 0;
}
mmfile_t minus, plus;
int i;
+ memset(&xpp, 0, sizeof(xpp));
memset(&xecfg, 0, sizeof(xecfg));
minus.size = diff_words->minus.text.size;
minus.ptr = xmalloc(minus.size);
emit_binary_diff_body(file, two, one);
}
-void diff_filespec_load_driver(struct diff_filespec *one)
+static void diff_filespec_load_driver(struct diff_filespec *one)
{
if (!one->driver)
one->driver = userdiff_find_by_path(one->path);
options->b_prefix = b;
}
+static const char *get_textconv(struct diff_filespec *one)
+{
+ if (!DIFF_FILE_VALID(one))
+ return NULL;
+ if (!S_ISREG(one->mode))
+ return NULL;
+ diff_filespec_load_driver(one);
+ return one->driver->textconv;
+}
+
static void builtin_diff(const char *name_a,
const char *name_b,
struct diff_filespec *one,
const char *set = diff_get_color_opt(o, DIFF_METAINFO);
const char *reset = diff_get_color_opt(o, DIFF_RESET);
const char *a_prefix, *b_prefix;
+ const char *textconv_one = NULL, *textconv_two = NULL;
diff_set_mnemonic_prefix(o, "a/", "b/");
if (DIFF_OPT_TST(o, REVERSE_DIFF)) {
if (fill_mmfile(&mf1, one) < 0 || fill_mmfile(&mf2, two) < 0)
die("unable to read files to diff");
+ if (DIFF_OPT_TST(o, ALLOW_TEXTCONV)) {
+ textconv_one = get_textconv(one);
+ textconv_two = get_textconv(two);
+ }
+
if (!DIFF_OPT_TST(o, TEXT) &&
- (diff_filespec_is_binary(one) || diff_filespec_is_binary(two))) {
+ ( (diff_filespec_is_binary(one) && !textconv_one) ||
+ (diff_filespec_is_binary(two) && !textconv_two) )) {
/* Quite common confusing case */
if (mf1.size == mf2.size &&
!memcmp(mf1.ptr, mf2.ptr, mf1.size))
struct emit_callback ecbdata;
const struct userdiff_funcname *pe;
+ if (textconv_one) {
+ size_t size;
+ mf1.ptr = run_textconv(textconv_one, one, &size);
+ if (!mf1.ptr)
+ die("unable to read files to diff");
+ mf1.size = size;
+ }
+ if (textconv_two) {
+ size_t size;
+ mf2.ptr = run_textconv(textconv_two, two, &size);
+ if (!mf2.ptr)
+ die("unable to read files to diff");
+ mf2.size = size;
+ }
+
pe = diff_funcname_pattern(one);
if (!pe)
pe = diff_funcname_pattern(two);
+ memset(&xpp, 0, sizeof(xpp));
memset(&xecfg, 0, sizeof(xecfg));
memset(&ecbdata, 0, sizeof(ecbdata));
ecbdata.label_path = lbl;
&xpp, &xecfg, &ecb);
if (DIFF_OPT_TST(o, COLOR_DIFF_WORDS))
free_diff_words_data(&ecbdata);
+ if (textconv_one)
+ free(mf1.ptr);
+ if (textconv_two)
+ free(mf2.ptr);
}
free_ab_and_return:
xdemitconf_t xecfg;
xdemitcb_t ecb;
+ memset(&xpp, 0, sizeof(xpp));
memset(&xecfg, 0, sizeof(xecfg));
xpp.flags = XDF_NEED_MINIMAL | o->xdl_opts;
xdi_diff_outf(&mf1, &mf2, diffstat_consume, diffstat,
xdemitconf_t xecfg;
xdemitcb_t ecb;
+ memset(&xpp, 0, sizeof(xpp));
memset(&xecfg, 0, sizeof(xecfg));
xecfg.ctxlen = 1; /* at least one context line */
xpp.flags = XDF_NEED_MINIMAL;
struct diff_filepair *p = q->queue[i];
int len1, len2;
+ memset(&xpp, 0, sizeof(xpp));
memset(&xecfg, 0, sizeof(xecfg));
if (p->status == 0)
return error("internal diff status error");
#define DIFF_OPT_IGNORE_SUBMODULES (1 << 18)
#define DIFF_OPT_DIRSTAT_CUMULATIVE (1 << 19)
#define DIFF_OPT_DIRSTAT_BY_FILE (1 << 20)
+#define DIFF_OPT_ALLOW_TEXTCONV (1 << 21)
#define DIFF_OPT_TST(opts, flag) ((opts)->flags & DIFF_OPT_##flag)
#define DIFF_OPT_SET(opts, flag) ((opts)->flags |= DIFF_OPT_##flag)
#define DIFF_OPT_CLR(opts, flag) ((opts)->flags &= ~DIFF_OPT_##flag)
struct diff_filespec *one;
struct diff_filespec *two;
unsigned short int score;
- char status; /* M C R N D U (see Documentation/diff-format.txt) */
+ char status; /* M C R A D U etc. (see Documentation/diff-format.txt or DIFF_STATUS_* in diff.h) */
unsigned broken_pair : 1;
unsigned renamed_pair : 1;
unsigned is_unmerged : 1;
}
git_graft_file = getenv(GRAFT_ENVIRONMENT);
if (!git_graft_file)
- git_graft_file = xstrdup(git_path("info/grafts"));
+ git_graft_file = git_pathdup("info/grafts");
}
int is_bare_repository(void)
work_tree = git_work_tree_cfg;
/* make_absolute_path also normalizes the path */
if (work_tree && !is_absolute_path(work_tree))
- work_tree = xstrdup(make_absolute_path(git_path(work_tree)));
+ work_tree = xstrdup(make_absolute_path(git_path("%s", work_tree)));
} else if (work_tree)
work_tree = xstrdup(make_absolute_path(work_tree));
git_work_tree_initialized = 1;
die("this should not happen, your snprintf is broken");
}
- error(sb.buf);
+ error("%s", sb.buf);
strbuf_release(&sb);
return 1;
}
good=$(git for-each-ref --format='^%(objectname)' \
"refs/bisect/good-*" | tr '\012' ' ') &&
skip=$(git for-each-ref --format='%(objectname)' \
- "refs/bisect/skip-*" | tr '\012' ' ') &&
+ "refs/bisect/skip-*" | tr '\012' ' ') || exit
# Maybe some merge bases must be tested first
check_good_are_ancestors_of_bad "$bad" "$good" "$skip"
}
}
+######################################################################
+##
+## execution environment
+
+set have_tk85 [expr {[package vcompare $tk_version "8.5"] >= 0}]
+
+# Suggest our implementation of askpass, if none is set
+if {![info exists env(SSH_ASKPASS)]} {
+ set env(SSH_ASKPASS) [gitexec git-gui--askpass]
+}
+
######################################################################
##
## repository setup
set nullid "0000000000000000000000000000000000000000"
set nullid2 "0000000000000000000000000000000000000001"
-set have_tk85 [expr {[package vcompare $tk_version "8.5"] >= 0}]
-
-######################################################################
-
-# Suggest our implementation of askpass, if none is set
-if {![info exists env(SSH_ASKPASS)]} {
- set env(SSH_ASKPASS) [gitexec git-gui--askpass]
-}
-
######################################################################
##
## task management
tk_popup $w.ctxm %X %Y
"
bind $i <Shift-Tab> "[list focus $w_cviewer];break"
- bind $i <Tab> "[list focus $w_cviewer];break"
+ bind $i <Tab> "[cb _focus_search $w_cviewer];break"
}
foreach i [concat $w_columns $w_cviewer] {
bind $i <Control-Key-f> {catch {%W yview scroll 1 pages};break}
}
- bind $w_cviewer <Shift-Tab> "[list focus $w_file];break"
+ bind $w_cviewer <Shift-Tab> "[cb _focus_search $w_file];break"
bind $w_cviewer <Tab> "[list focus $w_file];break"
- bind $w_cviewer <Button-1> [list focus $w_cviewer]
- bind $w_file <Visibility> [list focus $w_file]
+ bind $w_cviewer <Button-1> [list focus $w_cviewer]
+ bind $w_file <Visibility> [cb _focus_search $w_file]
bind $top <F7> [list searchbar::show $finder]
bind $top <Escape> [list searchbar::hide $finder]
bind $top <F3> [list searchbar::find_next $finder]
_load $this $i_jump
}
+method _focus_search {win} {
+ if {[searchbar::visible $finder]} {
+ focus [searchbar::editor $finder]
+ } else {
+ focus $win
+ }
+}
+
method _handle_destroy {win} {
if {$win eq $w} {
_kill $this
} ifdeleted { catch {close $fd} }
method _exec_blame {cur_w cur_d options cur_s} {
- lappend options --incremental
+ lappend options --incremental --encoding=utf-8
if {$commit eq {}} {
lappend options --contents $path
} else {
$w.mbar.apple add command \
-label [mc "About %s" [appname]] \
-command do_about
+ $w.mbar.apple add command \
+ -label [mc "Show SSH Key"] \
+ -command do_ssh_key
} else {
$w.mbar add cascade -label [mc Help] -menu $w.mbar.help
menu $w.mbar.help
$w.mbar.help add command \
-label [mc "About %s" [appname]] \
-command do_about
+ $w.mbar.help add command \
+ -label [mc "Show SSH Key"] \
+ -command do_ssh_key
}
wm protocol $top WM_DELETE_WINDOW exit
frame $w
label $w.l -text [mc Find:]
+ entry $w.ent -textvariable ${__this}::searchstring -background lightgreen
button $w.bn -text [mc Next] -command [cb find_next]
button $w.bp -text [mc Prev] -command [cb find_prev]
checkbutton $w.cs -text [mc Case-Sensitive] \
-variable ${__this}::casesensitive -command [cb _incrsearch]
- entry $w.ent -textvariable ${__this}::searchstring -background lightgreen
pack $w.l -side left
pack $w.cs -side right
pack $w.bp -side right
}
method show {} {
- if {![winfo ismapped $w]} {
+ if {![visible $this]} {
grid $w
}
focus -force $w.ent
}
method hide {} {
- if {[winfo ismapped $w]} {
+ if {[visible $this]} {
focus $ctext
grid remove $w
}
}
+method visible {} {
+ return [winfo ismapped $w]
+}
+
+method editor {} {
+ return $w.ent
+}
+
method _get_new_anchor {} {
# use start of selection if it is visible,
# or the bounds of the visible area
if test -f "$DOTEST"/current-commit
then
- current_commit=$(cat "$DOTEST"/current-commit) &&
- git rev-parse HEAD > "$REWRITTEN"/$current_commit &&
- rm "$DOTEST"/current-commit ||
- die "Cannot write current commit's replacement sha1"
+ if test "$fast_forward" = t
+ then
+ cat "$DOTEST"/current-commit | while read current_commit
+ do
+ git rev-parse HEAD > "$REWRITTEN"/$current_commit
+ done
+ rm "$DOTEST"/current-commit ||
+ die "Cannot write current commit's replacement sha1"
+ fi
fi
- echo $sha1 > "$DOTEST"/current-commit
+ echo $sha1 >> "$DOTEST"/current-commit
# rewrite parents; if none were rewritten, we can fast-forward.
new_parents=
if test -f "$REWRITTEN"/$p
then
new_p=$(cat "$REWRITTEN"/$p)
+
+ # If the todo reordered commits, and our parent is marked for
+ # rewriting, but hasn't been gotten to yet, assume the user meant to
+ # drop it on top of the current HEAD
+ if test -z "$new_p"
+ then
+ new_p=$(git rev-parse HEAD)
+ fi
+
test $p != $new_p && fast_forward=f
case "$new_parents" in
*$new_p*)
die "Cannot fast forward to $sha1"
;;
f)
- test "a$1" = a-n && die "Refusing to squash a merge: $sha1"
-
first_parent=$(expr "$new_parents" : ' \([^ ]*\)')
- # detach HEAD to current parent
- output git checkout $first_parent 2> /dev/null ||
- die "Cannot move HEAD to $first_parent"
+
+ if [ "$1" != "-n" ]
+ then
+ # detach HEAD to current parent
+ output git checkout $first_parent 2> /dev/null ||
+ die "Cannot move HEAD to $first_parent"
+ fi
case "$new_parents" in
' '*' '*)
+ test "a$1" = a-n && die "Refusing to squash a merge: $sha1"
+
# redo merge
author_script=$(get_author_ident_from_commit $sha1)
eval "$author_script"
HEADNAME=$(cat "$DOTEST"/head-name) &&
OLDHEAD=$(cat "$DOTEST"/head) &&
SHORTONTO=$(git rev-parse --short $(cat "$DOTEST"/onto)) &&
- if test -d "$REWRITTEN"
- then
- test -f "$DOTEST"/current-commit &&
- current_commit=$(cat "$DOTEST"/current-commit) &&
- git rev-parse HEAD > "$REWRITTEN"/$current_commit
- if test -f "$REWRITTEN"/$OLDHEAD
- then
- NEWHEAD=$(cat "$REWRITTEN"/$OLDHEAD)
- else
- NEWHEAD=$OLDHEAD
- fi
- else
- NEWHEAD=$(git rev-parse HEAD)
- fi &&
+ NEWHEAD=$(git rev-parse HEAD) &&
case $HEADNAME in
refs/*)
message="$GIT_REFLOG_ACTION: $HEADNAME onto $SHORTONTO)" &&
echo $ONTO > "$REWRITTEN"/$c ||
die "Could not init rewritten commits"
done
+ # No cherry-pick because our first pass is to determine
+ # parents to rewrite and skipping dropped commits would
+ # prematurely end our probe
MERGES_OPTION=
+ first_after_upstream="$(git rev-list --reverse --first-parent $UPSTREAM..$HEAD | head -n 1)"
else
- MERGES_OPTION=--no-merges
+ MERGES_OPTION="--no-merges --cherry-pick"
fi
SHORTUPSTREAM=$(git rev-parse --short $UPSTREAM)
SHORTHEAD=$(git rev-parse --short $HEAD)
SHORTONTO=$(git rev-parse --short $ONTO)
git rev-list $MERGES_OPTION --pretty=oneline --abbrev-commit \
- --abbrev=7 --reverse --left-right --cherry-pick \
+ --abbrev=7 --reverse --left-right --topo-order \
$UPSTREAM...$HEAD | \
- sed -n "s/^>/pick /p" > "$TODO"
+ sed -n "s/^>//p" | while read shortsha1 rest
+ do
+ if test t != "$PRESERVE_MERGES"
+ then
+ echo "pick $shortsha1 $rest" >> "$TODO"
+ else
+ sha1=$(git rev-parse $shortsha1)
+ preserve=t
+ for p in $(git rev-list --parents -1 $sha1 | cut -d' ' -f2-)
+ do
+ if test -f "$REWRITTEN"/$p -a \( $p != $UPSTREAM -o $sha1 = $first_after_upstream \)
+ then
+ preserve=f
+ fi
+ done
+ if test f = "$preserve"
+ then
+ touch "$REWRITTEN"/$sha1
+ echo "pick $shortsha1 $rest" >> "$TODO"
+ fi
+ fi
+ done
+
+ # Watch for commits that been dropped by --cherry-pick
+ if test t = "$PRESERVE_MERGES"
+ then
+ mkdir "$DROPPED"
+ # Save all non-cherry-picked changes
+ git rev-list $UPSTREAM...$HEAD --left-right --cherry-pick | \
+ sed -n "s/^>//p" > "$DOTEST"/not-cherry-picks
+ # Now all commits and note which ones are missing in
+ # not-cherry-picks and hence being dropped
+ git rev-list $UPSTREAM..$HEAD |
+ while read rev
+ do
+ if test -f "$REWRITTEN"/$rev -a "$(grep "$rev" "$DOTEST"/not-cherry-picks)" = ""
+ then
+ # Use -f2 because if rev-list is telling us this commit is
+ # not worthwhile, we don't want to track its multiple heads,
+ # just the history of its first-parent for others that will
+ # be rebasing on top of it
+ git rev-list --parents -1 $rev | cut -d' ' -f2 > "$DROPPED"/$rev
+ short=$(git rev-list -1 --abbrev-commit --abbrev=7 $rev)
+ grep -v "^[a-z][a-z]* $short" <"$TODO" > "${TODO}2" ; mv "${TODO}2" "$TODO"
+ rm "$REWRITTEN"/$rev
+ fi
+ done
+ fi
test -s "$TODO" || echo noop >> "$TODO"
cat >> "$TODO" << EOF
#
EOF
- # Watch for commits that been dropped by --cherry-pick
- if test t = "$PRESERVE_MERGES"
- then
- mkdir "$DROPPED"
- # drop the --cherry-pick parameter this time
- git rev-list $MERGES_OPTION --abbrev-commit \
- --abbrev=7 $UPSTREAM...$HEAD --left-right | \
- sed -n "s/^>//p" | while read rev
- do
- grep --quiet "$rev" "$TODO"
- if [ $? -ne 0 ]
- then
- # Use -f2 because if rev-list is telling this commit is not
- # worthwhile, we don't want to track its multiple heads,
- # just the history of its first-parent for others that will
- # be rebasing on top of us
- full=$(git rev-parse $rev)
- git rev-list --parents -1 $rev | cut -d' ' -f2 > "$DROPPED"/$full
- fi
- done
- fi
-
has_action "$TODO" ||
die_abort "Nothing to do"
existing="$existing $e"
fi
done
- fi
- if test -z "$args"
- then
- args='--unpacked --incremental'
- elif test -n "$unpack_unreachable"
- then
- args="$args $unpack_unreachable"
+ if test -n "$args" -a -n "$unpack_unreachable"
+ then
+ args="$args $unpack_unreachable"
+ fi
fi
;;
esac
args="$args $local $quiet $no_reuse$extra"
-names=$(git pack-objects --non-empty --all --reflog $args </dev/null "$PACKTMP") ||
+names=$(git pack-objects --honor-pack-keep --non-empty --all --reflog $args </dev/null "$PACKTMP") ||
exit 1
if [ -z "$names" ]; then
if test -z "$quiet"; then
# Only mention uninitialized submodules when its
# path have been specified
test "$#" != "0" &&
- say "Submodule path '$path' not initialized"
+ say "Submodule path '$path' not initialized" &&
say "Maybe you want to use 'update --init'?"
continue
fi
shows repositories only if this file exists in its object database
(if directory has the magic file named $export_ok).
+- Finally, it is possible to specify an arbitrary perl subroutine that
+ will be called for each project to determine if it can be exported.
+ The subroutine receives an absolute path to the project as its only
+ parameter.
+
+ For example, if you use mod_perl to run the script, and have dumb
+ http protocol authentication configured for your repositories, you
+ can use the following hook to allow access only if the user is
+ authorized to read the files:
+
+ $export_auth_hook = sub {
+ use Apache2::SubRequest ();
+ use Apache2::Const -compile => qw(HTTP_OK);
+ my $path = "$_[0]/HEAD";
+ my $r = Apache2::RequestUtil->request;
+ my $sub = $r->lookup_file($path);
+ return $sub->filename eq $path
+ && $sub->status == Apache2::Const::HTTP_OK;
+ };
+
+
Generating projects list using gitweb
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# if we're called with PATH_INFO, we have to strip that
# from the URL to find our real URL
# we make $path_info global because it's also used later on
-my $path_info = $ENV{"PATH_INFO"};
+our $path_info = $ENV{"PATH_INFO"};
if ($path_info) {
$my_url =~ s,\Q$path_info\E$,,;
$my_uri =~ s,\Q$path_info\E$,,;
# (only effective if this variable evaluates to true)
our $export_ok = "++GITWEB_EXPORT_OK++";
+# show repository only if this subroutine returns true
+# when given the path to the project, for example:
+# sub { return -e "$_[0]/git-daemon-export-ok"; }
+our $export_auth_hook = undef;
+
# only allow viewing of repositories also shown on the overview page
our $strict_export = "++GITWEB_STRICT_EXPORT++";
sub check_export_ok {
my ($dir) = @_;
return (check_head_link($dir) &&
- (!$export_ok || -e "$dir/$export_ok"));
+ (!$export_ok || -e "$dir/$export_ok") &&
+ (!$export_auth_hook || $export_auth_hook->($dir)));
}
# process alternate names for backward compatibility
# together during validation: this allows subsequent uses (e.g. href()) to be
# agnostic of the parameter origin
-my %input_params = ();
+our %input_params = ();
# input parameters are stored with the long parameter name as key. This will
# also be used in the href subroutine to convert parameters to their CGI
# XXX: Warning: If you touch this, check the search form for updating,
# too.
-my @cgi_param_mapping = (
+our @cgi_param_mapping = (
project => "p",
action => "a",
file_name => "f",
extra_options => "opt",
search_use_regexp => "sr",
);
-my %cgi_param_mapping = @cgi_param_mapping;
+our %cgi_param_mapping = @cgi_param_mapping;
# we will also need to know the possible actions, for validation
-my %actions = (
+our %actions = (
"blame" => \&git_blame,
"blobdiff" => \&git_blobdiff,
"blobdiff_plain" => \&git_blobdiff_plain,
# finally, we have the hash of allowed extra_options for the commands that
# allow them
-my %allowed_options = (
+our %allowed_options = (
"--no-merges" => [ qw(rss atom log shortlog history) ],
);
return if $input_params{'action'};
$path_info =~ s,^\Q$project\E/*,,;
- my ($refname, $pathname) = split(/:/, $path_info, 2);
+ # next, check if we have an action
+ my $action = $path_info;
+ $action =~ s,/.*$,,;
+ if (exists $actions{$action}) {
+ $path_info =~ s,^$action/*,,;
+ $input_params{'action'} = $action;
+ }
+
+ # list of actions that want hash_base instead of hash, but can have no
+ # pathname (f) parameter
+ my @wants_base = (
+ 'tree',
+ 'history',
+ );
+
+ # we want to catch
+ # [$hash_parent_base[:$file_parent]..]$hash_parent[:$file_name]
+ my ($parentrefname, $parentpathname, $refname, $pathname) =
+ ($path_info =~ /^(?:(.+?)(?::(.+))?\.\.)?(.+?)(?::(.+))?$/);
+
+ # first, analyze the 'current' part
if (defined $pathname) {
- # we got "project.git/branch:filename" or "project.git/branch:dir/"
- # we could use git_get_type(branch:pathname), but it needs $git_dir
+ # we got "branch:filename" or "branch:dir/"
+ # we could use git_get_type(branch:pathname), but:
+ # - it needs $git_dir
+ # - it does a git() call
+ # - the convention of terminating directories with a slash
+ # makes it superfluous
+ # - embedding the action in the PATH_INFO would make it even
+ # more superfluous
$pathname =~ s,^/+,,;
if (!$pathname || substr($pathname, -1) eq "/") {
- $input_params{'action'} = "tree";
+ $input_params{'action'} ||= "tree";
$pathname =~ s,/$,,;
} else {
- $input_params{'action'} = "blob_plain";
+ # the default action depends on whether we had parent info
+ # or not
+ if ($parentrefname) {
+ $input_params{'action'} ||= "blobdiff_plain";
+ } else {
+ $input_params{'action'} ||= "blob_plain";
+ }
}
$input_params{'hash_base'} ||= $refname;
$input_params{'file_name'} ||= $pathname;
} elsif (defined $refname) {
- # we got "project.git/branch"
- $input_params{'action'} = "shortlog";
- $input_params{'hash'} ||= $refname;
+ # we got "branch". In this case we have to choose if we have to
+ # set hash or hash_base.
+ #
+ # Most of the actions without a pathname only want hash to be
+ # set, except for the ones specified in @wants_base that want
+ # hash_base instead. It should also be noted that hand-crafted
+ # links having 'history' as an action and no pathname or hash
+ # set will fail, but that happens regardless of PATH_INFO.
+ $input_params{'action'} ||= "shortlog";
+ if (grep { $_ eq $input_params{'action'} } @wants_base) {
+ $input_params{'hash_base'} ||= $refname;
+ } else {
+ $input_params{'hash'} ||= $refname;
+ }
+ }
+
+ # next, handle the 'parent' part, if present
+ if (defined $parentrefname) {
+ # a missing pathspec defaults to the 'current' filename, allowing e.g.
+ # someproject/blobdiff/oldrev..newrev:/filename
+ if ($parentpathname) {
+ $parentpathname =~ s,^/+,,;
+ $parentpathname =~ s,/$,,;
+ $input_params{'file_parent'} ||= $parentpathname;
+ } else {
+ $input_params{'file_parent'} ||= $input_params{'file_name'};
+ }
+ # we assume that hash_parent_base is wanted if a path was specified,
+ # or if the action wants hash_base instead of hash
+ if (defined $input_params{'file_parent'} ||
+ grep { $_ eq $input_params{'action'} } @wants_base) {
+ $input_params{'hash_parent_base'} ||= $parentrefname;
+ } else {
+ $input_params{'hash_parent'} ||= $parentrefname;
+ }
}
}
evaluate_path_info();
my ($use_pathinfo) = gitweb_check_feature('pathinfo');
if ($use_pathinfo) {
- # use PATH_INFO for project name
+ # try to put as many parameters as possible in PATH_INFO:
+ # - project name
+ # - action
+ # - hash_parent or hash_parent_base:/file_parent
+ # - hash or hash_base:/filename
+
+ # When the script is the root DirectoryIndex for the domain,
+ # $href here would be something like http://gitweb.example.com/
+ # Thus, we strip any trailing / from $href, to spare us double
+ # slashes in the final URL
+ $href =~ s,/$,,;
+
+ # Then add the project name, if present
$href .= "/".esc_url($params{'project'}) if defined $params{'project'};
delete $params{'project'};
- # Summary just uses the project path URL
- if (defined $params{'action'} && $params{'action'} eq 'summary') {
+ # Summary just uses the project path URL, any other action is
+ # added to the URL
+ if (defined $params{'action'}) {
+ $href .= "/".esc_url($params{'action'}) unless $params{'action'} eq 'summary';
delete $params{'action'};
}
+
+ # Next, we put hash_parent_base:/file_parent..hash_base:/file_name,
+ # stripping nonexistent or useless pieces
+ $href .= "/" if ($params{'hash_base'} || $params{'hash_parent_base'}
+ || $params{'hash_parent'} || $params{'hash'});
+ if (defined $params{'hash_base'}) {
+ if (defined $params{'hash_parent_base'}) {
+ $href .= esc_url($params{'hash_parent_base'});
+ # skip the file_parent if it's the same as the file_name
+ delete $params{'file_parent'} if $params{'file_parent'} eq $params{'file_name'};
+ if (defined $params{'file_parent'} && $params{'file_parent'} !~ /\.\./) {
+ $href .= ":/".esc_url($params{'file_parent'});
+ delete $params{'file_parent'};
+ }
+ $href .= "..";
+ delete $params{'hash_parent'};
+ delete $params{'hash_parent_base'};
+ } elsif (defined $params{'hash_parent'}) {
+ $href .= esc_url($params{'hash_parent'}). "..";
+ delete $params{'hash_parent'};
+ }
+
+ $href .= esc_url($params{'hash_base'});
+ if (defined $params{'file_name'} && $params{'file_name'} !~ /\.\./) {
+ $href .= ":/".esc_url($params{'file_name'});
+ delete $params{'file_name'};
+ }
+ delete $params{'hash'};
+ delete $params{'hash_base'};
+ } elsif (defined $params{'hash'}) {
+ $href .= esc_url($params{'hash'});
+ delete $params{'hash'};
+ }
}
# now encode the parameters explicitly
my $input = shift || return undef;
if (!validate_pathname($input) ||
!(-d "$projectroot/$input") ||
- !check_head_link("$projectroot/$input") ||
- ($export_ok && !(-e "$projectroot/$input/$export_ok")) ||
+ !check_export_ok("$projectroot/$input") ||
($strict_export && !project_in_list($input))) {
return undef;
} else {
my $ctags = {};
$git_dir = "$projectroot/$path";
- foreach (<$git_dir/ctags/*>) {
+ unless (opendir D, "$git_dir/ctags") {
+ return $ctags;
+ }
+ foreach (grep { -f $_ } map { "$git_dir/ctags/$_" } readdir(D)) {
open CT, $_ or next;
my $val = <CT>;
chomp $val;
my $ctag = $_; $ctag =~ s#.*/##;
$ctags->{$ctag} = $val;
}
+ closedir D;
$ctags;
}
if (from <= last_shown)
from = last_shown + 1;
if (last_shown && from != last_shown + 1)
- printf(hunk_mark);
+ fputs(hunk_mark, stdout);
while (from < lno) {
pcl = &prev[lno-from-1];
show_line(opt, pcl->bol, pcl->eol,
last_shown = lno-1;
}
if (last_shown && lno != last_shown + 1)
- printf(hunk_mark);
+ fputs(hunk_mark, stdout);
if (!opt->count)
show_line(opt, bol, eol, name, lno, ':');
last_shown = last_hit = lno;
* we need to show this line.
*/
if (last_shown && lno != last_shown + 1)
- printf(hunk_mark);
+ fputs(hunk_mark, stdout);
show_line(opt, bol, eol, name, lno, '-');
last_shown = lno;
}
}
if (errstr) {
- error (errstr);
+ error("%s", errstr);
usage_with_options(hash_object_usage, hash_object_options);
}
die("pack has bad object at offset %lu: %s", offset, buf);
}
+static void free_base_data(struct base_data *c)
+{
+ if (c->data) {
+ free(c->data);
+ c->data = NULL;
+ base_cache_used -= c->size;
+ }
+}
+
static void prune_base_data(struct base_data *retain)
{
struct base_data *b = base_cache;
for (b = base_cache;
base_cache_used > delta_base_cache_limit && b;
b = b->child) {
- if (b->data && b != retain) {
- free(b->data);
- b->data = NULL;
- base_cache_used -= b->size;
- }
+ if (b->data && b != retain)
+ free_base_data(b);
}
}
c->base = base;
c->child = NULL;
- base_cache_used += c->size;
+ if (c->data)
+ base_cache_used += c->size;
prune_base_data(c);
}
base->child = NULL;
else
base_cache = NULL;
- if (c->data) {
- free(c->data);
- base_cache_used -= c->size;
- }
+ free_base_data(c);
}
static void *unpack_entry_data(unsigned long offset, unsigned long size)
return -first-1;
}
-static int find_delta_children(const union delta_base *base,
- int *first_index, int *last_index)
+static void find_delta_children(const union delta_base *base,
+ int *first_index, int *last_index)
{
int first = find_delta(base);
int last = first;
int end = nr_deltas - 1;
- if (first < 0)
- return -1;
+ if (first < 0) {
+ *first_index = 0;
+ *last_index = -1;
+ return;
+ }
while (first > 0 && !memcmp(&deltas[first - 1].base, base, UNION_BASE_SZ))
--first;
while (last < end && !memcmp(&deltas[last + 1].base, base, UNION_BASE_SZ))
++last;
*first_index = first;
*last_index = last;
- return 0;
}
static void sha1_object(const void *data, unsigned long size,
free(raw);
if (!c->data)
bad_object(obj->idx.offset, "failed to apply delta");
- } else
+ } else {
c->data = get_data_from_pack(obj);
+ c->size = obj->size;
+ }
base_cache_used += c->size;
prune_base_data(c);
}
static void resolve_delta(struct object_entry *delta_obj,
- struct base_data *base_obj, enum object_type type)
+ struct base_data *base, struct base_data *result)
{
- void *delta_data;
- unsigned long delta_size;
- union delta_base delta_base;
- int j, first, last;
- struct base_data result;
+ void *base_data, *delta_data;
- delta_obj->real_type = type;
+ delta_obj->real_type = base->obj->real_type;
delta_data = get_data_from_pack(delta_obj);
- delta_size = delta_obj->size;
- result.data = patch_delta(get_base_data(base_obj), base_obj->size,
- delta_data, delta_size,
- &result.size);
+ base_data = get_base_data(base);
+ result->obj = delta_obj;
+ result->data = patch_delta(base_data, base->size,
+ delta_data, delta_obj->size, &result->size);
free(delta_data);
- if (!result.data)
+ if (!result->data)
bad_object(delta_obj->idx.offset, "failed to apply delta");
- sha1_object(result.data, result.size, type, delta_obj->idx.sha1);
+ sha1_object(result->data, result->size, delta_obj->real_type,
+ delta_obj->idx.sha1);
nr_resolved_deltas++;
+}
+
+static void find_unresolved_deltas(struct base_data *base,
+ struct base_data *prev_base)
+{
+ int i, ref_first, ref_last, ofs_first, ofs_last;
+
+ /*
+ * This is a recursive function. Those brackets should help reducing
+ * stack usage by limiting the scope of the delta_base union.
+ */
+ {
+ union delta_base base_spec;
+
+ hashcpy(base_spec.sha1, base->obj->idx.sha1);
+ find_delta_children(&base_spec, &ref_first, &ref_last);
+
+ memset(&base_spec, 0, sizeof(base_spec));
+ base_spec.offset = base->obj->idx.offset;
+ find_delta_children(&base_spec, &ofs_first, &ofs_last);
+ }
- result.obj = delta_obj;
- link_base_data(base_obj, &result);
+ if (ref_last == -1 && ofs_last == -1) {
+ free(base->data);
+ return;
+ }
+
+ link_base_data(prev_base, base);
- hashcpy(delta_base.sha1, delta_obj->idx.sha1);
- if (!find_delta_children(&delta_base, &first, &last)) {
- for (j = first; j <= last; j++) {
- struct object_entry *child = objects + deltas[j].obj_no;
- if (child->real_type == OBJ_REF_DELTA)
- resolve_delta(child, &result, type);
+ for (i = ref_first; i <= ref_last; i++) {
+ struct object_entry *child = objects + deltas[i].obj_no;
+ if (child->real_type == OBJ_REF_DELTA) {
+ struct base_data result;
+ resolve_delta(child, base, &result);
+ if (i == ref_last && ofs_last == -1)
+ free_base_data(base);
+ find_unresolved_deltas(&result, base);
}
}
- memset(&delta_base, 0, sizeof(delta_base));
- delta_base.offset = delta_obj->idx.offset;
- if (!find_delta_children(&delta_base, &first, &last)) {
- for (j = first; j <= last; j++) {
- struct object_entry *child = objects + deltas[j].obj_no;
- if (child->real_type == OBJ_OFS_DELTA)
- resolve_delta(child, &result, type);
+ for (i = ofs_first; i <= ofs_last; i++) {
+ struct object_entry *child = objects + deltas[i].obj_no;
+ if (child->real_type == OBJ_OFS_DELTA) {
+ struct base_data result;
+ resolve_delta(child, base, &result);
+ if (i == ofs_last)
+ free_base_data(base);
+ find_unresolved_deltas(&result, base);
}
}
- unlink_base_data(&result);
+ unlink_base_data(base);
}
static int compare_delta_entry(const void *a, const void *b)
progress = start_progress("Resolving deltas", nr_deltas);
for (i = 0; i < nr_objects; i++) {
struct object_entry *obj = &objects[i];
- union delta_base base;
- int j, ref, ref_first, ref_last, ofs, ofs_first, ofs_last;
struct base_data base_obj;
if (obj->type == OBJ_REF_DELTA || obj->type == OBJ_OFS_DELTA)
continue;
- hashcpy(base.sha1, obj->idx.sha1);
- ref = !find_delta_children(&base, &ref_first, &ref_last);
- memset(&base, 0, sizeof(base));
- base.offset = obj->idx.offset;
- ofs = !find_delta_children(&base, &ofs_first, &ofs_last);
- if (!ref && !ofs)
- continue;
- base_obj.data = get_data_from_pack(obj);
- base_obj.size = obj->size;
base_obj.obj = obj;
- link_base_data(NULL, &base_obj);
-
- if (ref)
- for (j = ref_first; j <= ref_last; j++) {
- struct object_entry *child = objects + deltas[j].obj_no;
- if (child->real_type == OBJ_REF_DELTA)
- resolve_delta(child, &base_obj, obj->type);
- }
- if (ofs)
- for (j = ofs_first; j <= ofs_last; j++) {
- struct object_entry *child = objects + deltas[j].obj_no;
- if (child->real_type == OBJ_OFS_DELTA)
- resolve_delta(child, &base_obj, obj->type);
- }
- unlink_base_data(&base_obj);
+ base_obj.data = NULL;
+ find_unresolved_deltas(&base_obj, NULL);
display_progress(progress, nr_resolved_deltas);
}
}
for (i = 0; i < n; i++) {
struct delta_entry *d = sorted_by_pos[i];
enum object_type type;
- int j, first, last;
struct base_data base_obj;
if (objects[d->obj_no].real_type != OBJ_REF_DELTA)
die("local object %s is corrupt", sha1_to_hex(d->base.sha1));
base_obj.obj = append_obj_to_pack(f, d->base.sha1,
base_obj.data, base_obj.size, type);
- link_base_data(NULL, &base_obj);
-
- find_delta_children(&d->base, &first, &last);
- for (j = first; j <= last; j++) {
- struct object_entry *child = objects + deltas[j].obj_no;
- if (child->real_type == OBJ_REF_DELTA)
- resolve_delta(child, &base_obj, type);
- }
-
- unlink_base_data(&base_obj);
+ find_unresolved_deltas(&base_obj, NULL);
display_progress(progress, nr_resolved_deltas);
}
free(sorted_by_pos);
}
}
-void show_decorations(struct commit *commit)
+void show_decorations(struct rev_info *opt, struct commit *commit)
{
const char *prefix;
struct name_decoration *decoration;
+ if (opt->show_source && commit->util)
+ printf(" %s", (char *) commit->util);
+ if (!opt->show_decorations)
+ return;
decoration = lookup_decoration(&name_decoration, &commit->object);
if (!decoration)
return;
fputs(diff_unique_abbrev(commit->object.sha1, abbrev_commit), stdout);
if (opt->print_parents)
show_parents(commit, abbrev_commit);
- show_decorations(commit);
+ show_decorations(opt, commit);
if (opt->graph && !graph_is_commit_finished(opt->graph)) {
putchar('\n');
graph_show_remainder(opt->graph);
printf(" (from %s)",
diff_unique_abbrev(parent->object.sha1,
abbrev_commit));
- show_decorations(commit);
+ show_decorations(opt, commit);
printf("%s", diff_get_color_opt(&opt->diffopt, DIFF_RESET));
if (opt->commit_format == CMIT_FMT_ONELINE) {
putchar(' ');
int log_tree_commit(struct rev_info *, struct commit *);
int log_tree_opt_parse(struct rev_info *, const char **, int);
void show_log(struct rev_info *opt);
-void show_decorations(struct commit *commit);
+void show_decorations(struct rev_info *opt, struct commit *commit);
void log_write_email_headers(struct rev_info *opt, const char *name,
const char **subject_p,
const char **extra_headers_p,
xdemitconf_t xecfg;
xdemitcb_t ecb;
+ memset(&xpp, 0, sizeof(xpp));
xpp.flags = XDF_NEED_MINIMAL;
memset(&xecfg, 0, sizeof(xecfg));
xecfg.ctxlen = 3;
return path;
}
+char *mksnpath(char *buf, size_t n, const char *fmt, ...)
+{
+ va_list args;
+ unsigned len;
+
+ va_start(args, fmt);
+ len = vsnprintf(buf, n, fmt, args);
+ va_end(args);
+ if (len >= n) {
+ strlcpy(buf, bad_path, n);
+ return buf;
+ }
+ return cleanup_path(buf);
+}
+
+static char *git_vsnpath(char *buf, size_t n, const char *fmt, va_list args)
+{
+ const char *git_dir = get_git_dir();
+ size_t len;
+
+ len = strlen(git_dir);
+ if (n < len + 1)
+ goto bad;
+ memcpy(buf, git_dir, len);
+ if (len && !is_dir_sep(git_dir[len-1]))
+ buf[len++] = '/';
+ len += vsnprintf(buf + len, n - len, fmt, args);
+ if (len >= n)
+ goto bad;
+ return cleanup_path(buf);
+bad:
+ strlcpy(buf, bad_path, n);
+ return buf;
+}
+
+char *git_snpath(char *buf, size_t n, const char *fmt, ...)
+{
+ va_list args;
+ va_start(args, fmt);
+ (void)git_vsnpath(buf, n, fmt, args);
+ va_end(args);
+ return buf;
+}
+
+char *git_pathdup(const char *fmt, ...)
+{
+ char path[PATH_MAX];
+ va_list args;
+ va_start(args, fmt);
+ (void)git_vsnpath(path, sizeof(path), fmt, args);
+ va_end(args);
+ return xstrdup(path);
+}
+
char *mkpath(const char *fmt, ...)
{
va_list args;
# the method was called upon an instance and (undef, @args) if
# it was called directly.
sub _maybe_self {
- # This breaks inheritance. Oh well.
- ref $_[0] eq 'Git' ? @_ : (undef, @_);
+ UNIVERSAL::isa($_[0], 'Git') ? @_ : (undef, @_);
}
# Check if the command id is something reasonable.
}
}
+char *reencode_commit_message(const struct commit *commit, const char **encoding_p)
+{
+ const char *encoding;
+
+ encoding = (git_log_output_encoding
+ ? git_log_output_encoding
+ : git_commit_encoding);
+ if (!encoding)
+ encoding = "utf-8";
+ if (encoding_p)
+ *encoding_p = encoding;
+ return logmsg_reencode(commit, encoding);
+}
+
void pretty_print_commit(enum cmit_fmt fmt, const struct commit *commit,
struct strbuf *sb, int abbrev,
const char *subject, const char *after_subject,
return;
}
- encoding = (git_log_output_encoding
- ? git_log_output_encoding
- : git_commit_encoding);
- if (!encoding)
- encoding = "utf-8";
- reencoded = logmsg_reencode(commit, encoding);
+ reencoded = reencode_commit_message(commit, &encoding);
if (reencoded) {
msg = reencoded;
}
die("index file corrupt");
}
+int is_index_unborn(struct index_state *istate)
+{
+ return (!istate->cache_nr && !istate->alloc && !istate->timestamp);
+}
+
int discard_index(struct index_state *istate)
{
istate->cache_nr = 0;
*flag = 0;
for (;;) {
- const char *path = git_path("%s", ref);
+ char path[PATH_MAX];
struct stat st;
char *buf;
int fd;
if (--depth < 0)
return NULL;
+ git_snpath(path, sizeof(path), "%s", ref);
/* Special case: non-existing file. */
if (lstat(path, &st) < 0) {
struct ref_list *list = get_packed_refs();
char *ref_file;
const char *orig_ref = ref;
struct ref_lock *lock;
- struct stat st;
int last_errno = 0;
int type, lflags;
int mustexist = (old_sha1 && !is_null_sha1(old_sha1));
+ int missing = 0;
lock = xcalloc(1, sizeof(struct ref_lock));
lock->lock_fd = -1;
orig_ref, strerror(errno));
goto error_return;
}
+ missing = is_null_sha1(lock->old_sha1);
/* When the ref did not exist and we are creating it,
* make sure there is no existing ref that is packed
* whose name begins with our refname, nor a ref whose
* name is a proper prefix of our refname.
*/
- if (is_null_sha1(lock->old_sha1) &&
+ if (missing &&
!is_refname_available(ref, NULL, get_packed_refs(), 0))
goto error_return;
lock->ref_name = xstrdup(ref);
lock->orig_ref_name = xstrdup(orig_ref);
ref_file = git_path("%s", ref);
- if (lstat(ref_file, &st) && errno == ENOENT)
+ if (missing)
lock->force_write = 1;
if ((flags & REF_NODEREF) && (type & REF_ISSYMREF))
lock->force_write = 1;
return commit_lock_file(&packlock);
}
-int delete_ref(const char *refname, const unsigned char *sha1)
+int delete_ref(const char *refname, const unsigned char *sha1, int delopt)
{
struct ref_lock *lock;
- int err, i, ret = 0, flag = 0;
+ int err, i = 0, ret = 0, flag = 0;
lock = lock_ref_sha1_basic(refname, sha1, 0, &flag);
if (!lock)
return 1;
- if (!(flag & REF_ISPACKED)) {
+ if (!(flag & REF_ISPACKED) || flag & REF_ISSYMREF) {
/* loose */
- i = strlen(lock->lk->filename) - 5; /* .lock */
- lock->lk->filename[i] = 0;
- err = unlink(lock->lk->filename);
+ const char *path;
+
+ if (!(delopt & REF_NODEREF)) {
+ i = strlen(lock->lk->filename) - 5; /* .lock */
+ lock->lk->filename[i] = 0;
+ path = lock->lk->filename;
+ } else {
+ path = git_path("%s", refname);
+ }
+ err = unlink(path);
if (err && errno != ENOENT) {
ret = 1;
error("unlink(%s) failed: %s",
- lock->lk->filename, strerror(errno));
+ path, strerror(errno));
}
- lock->lk->filename[i] = '.';
+ if (!(delopt & REF_NODEREF))
+ lock->lk->filename[i] = '.';
}
/* removing the loose one could have resurrected an earlier
* packed one. Also, if it was not loose we need to repack
struct ref_lock *lock;
struct stat loginfo;
int log = !lstat(git_path("logs/%s", oldref), &loginfo);
+ const char *symref = NULL;
- if (S_ISLNK(loginfo.st_mode))
+ if (log && S_ISLNK(loginfo.st_mode))
return error("reflog for %s is a symlink", oldref);
- if (!resolve_ref(oldref, orig_sha1, 1, &flag))
+ symref = resolve_ref(oldref, orig_sha1, 1, &flag);
+ if (flag & REF_ISSYMREF)
+ return error("refname %s is a symbolic ref, renaming it is not supported",
+ oldref);
+ if (!symref)
return error("refname %s not found", oldref);
if (!is_refname_available(newref, oldref, get_packed_refs(), 0))
return error("unable to move logfile logs/%s to tmp-renamed-log: %s",
oldref, strerror(errno));
- if (delete_ref(oldref, orig_sha1)) {
+ if (delete_ref(oldref, orig_sha1, REF_NODEREF)) {
error("unable to delete old %s", oldref);
goto rollback;
}
- if (resolve_ref(newref, sha1, 1, &flag) && delete_ref(newref, sha1)) {
+ if (resolve_ref(newref, sha1, 1, &flag) && delete_ref(newref, sha1, REF_NODEREF)) {
if (errno==EISDIR) {
if (remove_empty_directories(git_path("%s", newref))) {
error("Directory not empty: %s", newref);
error("unable to lock %s for update", newref);
goto rollback;
}
-
lock->force_write = 1;
hashcpy(lock->old_sha1, orig_sha1);
if (write_ref_sha1(lock, orig_sha1, logmsg)) {
int logfd, written, oflags = O_APPEND | O_WRONLY;
unsigned maxlen, len;
int msglen;
- char *log_file, *logrec;
+ char log_file[PATH_MAX];
+ char *logrec;
const char *committer;
if (log_all_ref_updates < 0)
log_all_ref_updates = !is_bare_repository();
- log_file = git_path("logs/%s", ref_name);
+ git_snpath(log_file, sizeof(log_file), "logs/%s", ref_name);
if (log_all_ref_updates &&
(!prefixcmp(ref_name, "refs/heads/") ||
const char *lockpath;
char ref[1000];
int fd, len, written;
- char *git_HEAD = xstrdup(git_path("%s", ref_target));
+ char *git_HEAD = git_pathdup("%s", ref_target);
unsigned char old_sha1[20], new_sha1[20];
if (logmsg && read_ref(ref_target, old_sha1))
if (!f)
return;
+ remote->origin = REMOTE_REMOTES;
while (fgets(buffer, BUF_SIZE, f)) {
int value_list;
char *s, *p;
s++;
if (!*s)
return;
+ remote->origin = REMOTE_BRANCHES;
p = s + strlen(s);
while (isspace(p[-1]))
*--p = 0;
}
add_url_alias(remote, p);
add_fetch_refspec(remote, strbuf_detach(&branch, 0));
+ /*
+ * Cogito compatible push: push current HEAD to remote #branch
+ * (master if missing)
+ */
+ strbuf_init(&branch, 0);
+ strbuf_addstr(&branch, "HEAD");
+ if (frag)
+ strbuf_addf(&branch, ":refs/heads/%s", frag);
+ else
+ strbuf_addstr(&branch, ":refs/heads/master");
+ add_push_refspec(remote, strbuf_detach(&branch, 0));
remote->fetch_tags = 1; /* always auto-follow */
}
if (!subkey)
return error("Config with no key for remote %s", name);
remote = make_remote(name, subkey - name);
+ remote->origin = REMOTE_CONFIG;
if (!strcmp(subkey, ".mirror"))
remote->mirror = git_config_bool(key, value);
else if (!strcmp(subkey, ".skipdefaultupdate"))
#ifndef REMOTE_H
#define REMOTE_H
+enum {
+ REMOTE_CONFIG,
+ REMOTE_REMOTES,
+ REMOTE_BRANCHES
+};
+
struct remote {
const char *name;
+ int origin;
const char **url;
int url_nr;
if (!is_rerere_enabled())
return -1;
- merge_rr_path = xstrdup(git_path("MERGE_RR"));
+ merge_rr_path = git_pathdup("MERGE_RR");
fd = hold_lock_file_for_update(&write_lock, merge_rr_path,
LOCK_DIE_ON_ERROR);
read_rr(merge_rr);
#include "reflog-walk.h"
#include "patch-ids.h"
#include "decorate.h"
+#include "log-tree.h"
volatile show_early_output_fn_t show_early_output;
mark_parents_uninteresting(commit);
revs->limited = 1;
}
+ if (revs->show_source && !commit->util)
+ commit->util = (void *) name;
return commit;
}
DIFF_OPT_SET(options, HAS_CHANGES);
}
-static int rev_compare_tree(struct rev_info *revs, struct tree *t1, struct tree *t2)
+static int rev_compare_tree(struct rev_info *revs, struct commit *parent, struct commit *commit)
{
+ struct tree *t1 = parent->tree;
+ struct tree *t2 = commit->tree;
+
if (!t1)
return REV_TREE_NEW;
+
+ if (revs->simplify_by_decoration) {
+ /*
+ * If we are simplifying by decoration, then the commit
+ * is worth showing if it has a tag pointing at it.
+ */
+ if (lookup_decoration(&name_decoration, &commit->object))
+ return REV_TREE_DIFFERENT;
+ /*
+ * A commit that is not pointed by a tag is uninteresting
+ * if we are not limited by path. This means that you will
+ * see the usual "commits that touch the paths" plus any
+ * tagged commit by specifying both --simplify-by-decoration
+ * and pathspec.
+ */
+ if (!revs->prune_data)
+ return REV_TREE_SAME;
+ }
if (!t2)
return REV_TREE_DIFFERENT;
tree_difference = REV_TREE_SAME;
return tree_difference;
}
-static int rev_same_tree_as_empty(struct rev_info *revs, struct tree *t1)
+static int rev_same_tree_as_empty(struct rev_info *revs, struct commit *commit)
{
int retval;
void *tree;
unsigned long size;
struct tree_desc empty, real;
+ struct tree *t1 = commit->tree;
if (!t1)
return 0;
return;
if (!commit->parents) {
- if (rev_same_tree_as_empty(revs, commit->tree))
+ if (rev_same_tree_as_empty(revs, commit))
commit->object.flags |= TREESAME;
return;
}
die("cannot simplify commit %s (because of %s)",
sha1_to_hex(commit->object.sha1),
sha1_to_hex(p->object.sha1));
- switch (rev_compare_tree(revs, p->tree, commit->tree)) {
+ switch (rev_compare_tree(revs, p, commit)) {
case REV_TREE_SAME:
tree_same = 1;
if (!revs->simplify_history || (p->object.flags & UNINTERESTING)) {
case REV_TREE_NEW:
if (revs->remove_empty_trees &&
- rev_same_tree_as_empty(revs, p->tree)) {
+ rev_same_tree_as_empty(revs, p)) {
/* We are adding all the specified
* paths from this parent, so the
* history beyond this parent is not
if (parse_commit(p) < 0)
return -1;
+ if (revs->show_source && !p->util)
+ p->util = commit->util;
p->object.flags |= left_flag;
if (!(p->object.flags & SEEN)) {
p->object.flags |= SEEN;
revs->rewrite_parents = 1;
revs->simplify_history = 0;
revs->limited = 1;
+ } else if (!strcmp(arg, "--simplify-by-decoration")) {
+ revs->simplify_merges = 1;
+ revs->rewrite_parents = 1;
+ revs->simplify_history = 0;
+ revs->simplify_by_decoration = 1;
+ revs->limited = 1;
+ revs->prune = 1;
+ load_ref_decorations();
} else if (!strcmp(arg, "--date-order")) {
revs->lifo = 0;
revs->topo_order = 1;
lifo:1,
topo_order:1,
simplify_merges:1,
+ simplify_by_decoration:1,
tag_objects:1,
tree_objects:1,
blob_objects:1,
left_right:1,
rewrite_parents:1,
print_parents:1,
+ show_source:1,
+ show_decorations:1,
reverse:1,
reverse_output_stage:1,
cherry_pick:1,
static int update_info_refs(int force)
{
- char *path0 = xstrdup(git_path("info/refs"));
+ char *path0 = git_pathdup("info/refs");
int len = strlen(path0);
char *path1 = xmalloc(len + 2);
read_info_alternates(get_object_directory(), 0);
}
-static int has_loose_object(const unsigned char *sha1)
+static int has_loose_object_local(const unsigned char *sha1)
{
char *name = sha1_file_name(sha1);
- struct alternate_object_database *alt;
+ return !access(name, F_OK);
+}
- if (!access(name, F_OK))
- return 1;
+int has_loose_object_nonlocal(const unsigned char *sha1)
+{
+ struct alternate_object_database *alt;
prepare_alt_odb();
for (alt = alt_odb_list; alt; alt = alt->next) {
- name = alt->name;
- fill_sha1_path(name, sha1);
+ fill_sha1_path(alt->name, sha1);
if (!access(alt->base, F_OK))
return 1;
}
return 0;
}
+static int has_loose_object(const unsigned char *sha1)
+{
+ return has_loose_object_local(sha1) ||
+ has_loose_object_nonlocal(sha1);
+}
+
static unsigned int pack_used_ctr;
static unsigned int pack_mmap_calls;
static unsigned int peak_pack_open_windows;
return NULL;
}
memcpy(p->pack_name, path, path_len);
+
+ strcpy(p->pack_name + path_len, ".keep");
+ if (!access(p->pack_name, F_OK))
+ p->pack_keep = 1;
+
strcpy(p->pack_name + path_len, ".pack");
if (stat(p->pack_name, &st) || !S_ISREG(st.st_mode)) {
free(p);
*ref = NULL;
for (p = ref_rev_parse_rules; *p; p++) {
+ char fullref[PATH_MAX];
unsigned char sha1_from_ref[20];
unsigned char *this_result;
this_result = refs_found ? sha1_from_ref : sha1;
- r = resolve_ref(mkpath(*p, len, str), this_result, 1, NULL);
+ mksnpath(fullref, sizeof(fullref), *p, len, str);
+ r = resolve_ref(fullref, this_result, 1, NULL);
if (r) {
if (!refs_found++)
*ref = xstrdup(r);
char path[PATH_MAX];
const char *ref, *it;
- strcpy(path, mkpath(*p, len, str));
+ mksnpath(path, sizeof(path), *p, len, str);
ref = resolve_ref(path, hash, 1, NULL);
if (!ref)
continue;
'
rm -f .git/$m
+cp -f .git/HEAD .git/HEAD.orig
+test_expect_success "delete symref without dereference" '
+ git update-ref --no-deref -d HEAD &&
+ ! test -f .git/HEAD
+'
+cp -f .git/HEAD.orig .git/HEAD
+
+test_expect_success "delete symref without dereference when the referred ref is packed" '
+ echo foo >foo.c &&
+ git add foo.c &&
+ git commit -m foo &&
+ git pack-refs --all &&
+ git update-ref --no-deref -d HEAD &&
+ ! test -f .git/HEAD
+'
+cp -f .git/HEAD.orig .git/HEAD
+git update-ref -d $m
+
test_expect_success '(not) create HEAD with old sha1' "
test_must_fail git update-ref HEAD $A $B
"
test_expect_success \
'preparation' '
git config core.symlinks false &&
-l=$(echo -n file | git hash-object -t blob -w --stdin) &&
+l=$(printf file | git hash-object -t blob -w --stdin) &&
echo "120000 $l symlink" | git update-index --index-info'
test_expect_success \
--- /dev/null
+#!/bin/sh
+
+test_description='checkout switching away from an invalid branch'
+
+. ./test-lib.sh
+
+test_expect_success 'setup' '
+ echo hello >world &&
+ git add world &&
+ git commit -m initial
+'
+
+test_expect_success 'checkout master from invalid HEAD' '
+ echo 0000000000000000000000000000000000000000 >.git/HEAD &&
+ git checkout master --
+'
+
+test_done
test_expect_success \
'preparation' '
git config core.symlinks false &&
-l=$(echo -n file | git hash-object -t blob -w --stdin) &&
+l=$(printf file | git hash-object -t blob -w --stdin) &&
echo "120000 $l symlink" | git update-index --index-info'
test_expect_success \
'modify the symbolic link' '
-echo -n new-file > symlink &&
+printf new-file > symlink &&
git update-index symlink'
test_expect_success \
"test $(git config branch.s.dummy) = Hello &&
test_must_fail git config branch.s/s/dummy"
+test_expect_success 'renaming a symref is not allowed' \
+'
+ git symbolic-ref refs/heads/master2 refs/heads/master &&
+ test_must_fail git branch -m master2 master3 &&
+ git symbolic-ref refs/heads/master2 &&
+ test -f .git/refs/heads/master &&
+ ! test -f .git/refs/heads/master3
+'
+
test_expect_success \
'git branch -m u v should fail when the reflog for u is a symlink' '
git branch -l u &&
git branch -d n/o/p &&
git branch n'
+test_expect_success \
+ 'see if up-to-date packed refs are preserved' \
+ 'git branch q &&
+ git pack-refs --all --prune &&
+ git update-ref refs/heads/q refs/heads/q &&
+ ! test -f .git/refs/heads/q'
+
test_expect_success 'pack, prune and repack' '
git tag foo &&
git pack-refs --all --prune &&
--- /dev/null
+#!/bin/sh
+#
+# Copyright (c) 2008 Stephen Haberman
+#
+
+test_description='git rebase preserve merges
+
+This test runs git rebase with and tries to squash a commit from after a merge
+to before the merge.
+'
+. ./test-lib.sh
+
+# Copy/paste from t3404-rebase-interactive.sh
+echo "#!$SHELL_PATH" >fake-editor.sh
+cat >> fake-editor.sh <<\EOF
+case "$1" in
+*/COMMIT_EDITMSG)
+ test -z "$FAKE_COMMIT_MESSAGE" || echo "$FAKE_COMMIT_MESSAGE" > "$1"
+ test -z "$FAKE_COMMIT_AMEND" || echo "$FAKE_COMMIT_AMEND" >> "$1"
+ exit
+ ;;
+esac
+test -z "$EXPECT_COUNT" ||
+ test "$EXPECT_COUNT" = $(sed -e '/^#/d' -e '/^$/d' < "$1" | wc -l) ||
+ exit
+test -z "$FAKE_LINES" && exit
+grep -v '^#' < "$1" > "$1".tmp
+rm -f "$1"
+cat "$1".tmp
+action=pick
+for line in $FAKE_LINES; do
+ case $line in
+ squash|edit)
+ action="$line";;
+ *)
+ echo sed -n "${line}s/^pick/$action/p"
+ sed -n "${line}p" < "$1".tmp
+ sed -n "${line}s/^pick/$action/p" < "$1".tmp >> "$1"
+ action=pick;;
+ esac
+done
+EOF
+
+test_set_editor "$(pwd)/fake-editor.sh"
+chmod a+x fake-editor.sh
+
+# set up two branches like this:
+#
+# A1 - B1 - D1 - E1 - F1
+# \ /
+# -- C1 --
+
+test_expect_success 'setup' '
+ touch a &&
+ touch b &&
+ git add a &&
+ git commit -m A1 &&
+ git tag A1
+ git add b &&
+ git commit -m B1 &&
+ git tag B1 &&
+ git checkout -b branch &&
+ touch c &&
+ git add c &&
+ git commit -m C1 &&
+ git checkout master &&
+ touch d &&
+ git add d &&
+ git commit -m D1 &&
+ git merge branch &&
+ touch f &&
+ git add f &&
+ git commit -m F1 &&
+ git tag F1
+'
+
+# Should result in:
+#
+# A1 - B1 - D2 - E2
+# \ /
+# -- C1 --
+#
+test_expect_success 'squash F1 into D1' '
+ FAKE_LINES="1 squash 3 2" git rebase -i -p B1 &&
+ test "$(git rev-parse HEAD^2)" = "$(git rev-parse branch)" &&
+ test "$(git rev-parse HEAD~2)" = "$(git rev-parse B1)" &&
+ git tag E2
+'
+
+# Start with:
+#
+# A1 - B1 - D2 - E2
+# \
+# G1 ---- L1 ---- M1
+# \ /
+# H1 -- J1 -- K1
+# \ /
+# -- I1 --
+#
+# And rebase G1..M1 onto E2
+
+test_expect_success 'rebase two levels of merge' '
+ git checkout -b branch2 A1 &&
+ touch g &&
+ git add g &&
+ git commit -m G1 &&
+ git checkout -b branch3 &&
+ touch h
+ git add h &&
+ git commit -m H1 &&
+ git checkout -b branch4 &&
+ touch i &&
+ git add i &&
+ git commit -m I1 &&
+ git tag I1 &&
+ git checkout branch3 &&
+ touch j &&
+ git add j &&
+ git commit -m J1 &&
+ git merge I1 --no-commit &&
+ git commit -m K1 &&
+ git tag K1 &&
+ git checkout branch2 &&
+ touch l &&
+ git add l &&
+ git commit -m L1 &&
+ git merge K1 --no-commit &&
+ git commit -m M1 &&
+ GIT_EDITOR=: git rebase -i -p E2 &&
+ test "$(git rev-parse HEAD~3)" = "$(git rev-parse E2)" &&
+ test "$(git rev-parse HEAD~2)" = "$(git rev-parse HEAD^2^2~2)" &&
+ test "$(git rev-parse HEAD^2^1^1)" = "$(git rev-parse HEAD^2^2^1)"
+'
+
+test_done
--- /dev/null
+#!/bin/sh
+
+test_description='diff.*.textconv tests'
+. ./test-lib.sh
+
+find_diff() {
+ sed '1,/^index /d' | sed '/^-- $/,$d'
+}
+
+cat >expect.binary <<'EOF'
+Binary files a/file and b/file differ
+EOF
+
+cat >expect.text <<'EOF'
+--- a/file
++++ b/file
+@@ -1 +1,2 @@
+ 0
++1
+EOF
+
+cat >hexdump <<'EOF'
+#!/bin/sh
+perl -e '$/ = undef; $_ = <>; s/./ord($&)/ge; print $_' "$1"
+EOF
+chmod +x hexdump
+
+test_expect_success 'setup binary file with history' '
+ printf "\\0\\n" >file &&
+ git add file &&
+ git commit -m one &&
+ printf "\\1\\n" >>file &&
+ git add file &&
+ git commit -m two
+'
+
+test_expect_success 'file is considered binary by porcelain' '
+ git diff HEAD^ HEAD >diff &&
+ find_diff <diff >actual &&
+ test_cmp expect.binary actual
+'
+
+test_expect_success 'file is considered binary by plumbing' '
+ git diff-tree -p HEAD^ HEAD >diff &&
+ find_diff <diff >actual &&
+ test_cmp expect.binary actual
+'
+
+test_expect_success 'setup textconv filters' '
+ echo file diff=foo >.gitattributes &&
+ git config diff.foo.textconv "$PWD"/hexdump &&
+ git config diff.fail.textconv false
+'
+
+test_expect_success 'diff produces text' '
+ git diff HEAD^ HEAD >diff &&
+ find_diff <diff >actual &&
+ test_cmp expect.text actual
+'
+
+test_expect_success 'diff-tree produces binary' '
+ git diff-tree -p HEAD^ HEAD >diff &&
+ find_diff <diff >actual &&
+ test_cmp expect.binary actual
+'
+
+test_expect_success 'log produces text' '
+ git log -1 -p >log &&
+ find_diff <log >actual &&
+ test_cmp expect.text actual
+'
+
+test_expect_success 'format-patch produces binary' '
+ git format-patch --no-binary --stdout HEAD^ >patch &&
+ find_diff <patch >actual &&
+ test_cmp expect.binary actual
+'
+
+test_expect_success 'status -v produces text' '
+ git reset --soft HEAD^ &&
+ git status -v >diff &&
+ find_diff <diff >actual &&
+ test_cmp expect.text actual &&
+ git reset --soft HEAD@{1}
+'
+
+cat >expect.stat <<'EOF'
+ file | Bin 2 -> 4 bytes
+ 1 files changed, 0 insertions(+), 0 deletions(-)
+EOF
+test_expect_success 'diffstat does not run textconv' '
+ echo file diff=fail >.gitattributes &&
+ git diff --stat HEAD^ HEAD >actual &&
+ test_cmp expect.stat actual
+'
+# restore working setup
+echo file diff=foo >.gitattributes
+
+cat >expect.typechange <<'EOF'
+--- a/file
++++ /dev/null
+@@ -1,2 +0,0 @@
+-0
+-1
+diff --git a/file b/file
+new file mode 120000
+index ad8b3d2..67be421
+--- /dev/null
++++ b/file
+@@ -0,0 +1 @@
++frotz
+\ No newline at end of file
+EOF
+# make a symlink the hard way that works on symlink-challenged file systems
+test_expect_success 'textconv does not act on symlinks' '
+ printf frotz > file &&
+ git add file &&
+ git ls-files -s | sed -e s/100644/120000/ |
+ git update-index --index-info &&
+ git commit -m typechange &&
+ git show >diff &&
+ find_diff <diff >actual &&
+ test_cmp expect.typechange actual
+'
+
+test_done
)
'
+test_expect_success 'tolerate absurdly small packsizelimit' '
+ git config pack.packSizeLimit 2 &&
+ packname_9=$(git pack-objects test-9 <obj-list) &&
+ test $(wc -l <obj-list) = $(ls test-9-*.pack | wc -l)
+'
+
test_done
'rm -rf .git
git init &&
i=1 &&
- while test $i -le 100
+ while test $i -le 100
do
- i=`printf '%03i' $i`
- echo $i >file_$i &&
- test-genrandom "$i" 8192 >>file_$i &&
- git update-index --add file_$i &&
- i=`expr $i + 1` || return 1
+ iii=`printf '%03i' $i`
+ test-genrandom "bar" 200 > wide_delta_$iii &&
+ test-genrandom "baz $iii" 50 >> wide_delta_$iii &&
+ test-genrandom "foo"$i 100 > deep_delta_$iii &&
+ test-genrandom "foo"`expr $i + 1` 100 >> deep_delta_$iii &&
+ test-genrandom "foo"`expr $i + 2` 100 >> deep_delta_$iii &&
+ echo $iii >file_$iii &&
+ test-genrandom "$iii" 8192 >>file_$iii &&
+ git update-index --add file_$iii deep_delta_$iii wide_delta_$iii &&
+ i=`expr $i + 1` || return 1
done &&
{ echo 101 && test-genrandom 100 8192; } >file_101 &&
git update-index --add file_101 &&
'64-bit offsets: index-pack result should match pack-objects one' \
'cmp "test-3-${pack3}.idx" "3.idx"'
+# returns the object number for given object in given pack index
+index_obj_nr()
+{
+ idx_file=$1
+ object_sha1=$2
+ nr=0
+ git show-index < $idx_file |
+ while read offs sha1 extra
+ do
+ nr=$(($nr + 1))
+ test "$sha1" = "$object_sha1" || continue
+ echo "$(($nr - 1))"
+ break
+ done
+}
+
+# returns the pack offset for given object as found in given pack index
+index_obj_offset()
+{
+ idx_file=$1
+ object_sha1=$2
+ git show-index < $idx_file | grep $object_sha1 |
+ ( read offs extra && echo "$offs" )
+}
+
test_expect_success \
'[index v1] 1) stream pack to repository' \
'git index-pack --index-version=1 --stdin < "test-1-${pack1}.pack" &&
test_expect_success \
'[index v1] 2) create a stealth corruption in a delta base reference' \
- '# this test assumes a delta smaller than 16 bytes at the end of the pack
- git show-index <1.idx | sort -n | sed -ne \$p | (
- read delta_offs delta_sha1 &&
- git cat-file blob "$delta_sha1" > blob_1 &&
- chmod +w ".git/objects/pack/pack-${pack1}.pack" &&
- dd of=".git/objects/pack/pack-${pack1}.pack" seek=$(($delta_offs + 1)) \
- if=".git/objects/pack/pack-${pack1}.idx" skip=$((256 * 4 + 4)) \
- bs=1 count=20 conv=notrunc &&
- git cat-file blob "$delta_sha1" > blob_2 )'
+ '# This test assumes file_101 is a delta smaller than 16 bytes.
+ # It should be against file_100 but we substitute its base for file_099
+ sha1_101=`git hash-object file_101` &&
+ sha1_099=`git hash-object file_099` &&
+ offs_101=`index_obj_offset 1.idx $sha1_101` &&
+ nr_099=`index_obj_nr 1.idx $sha1_099` &&
+ chmod +w ".git/objects/pack/pack-${pack1}.pack" &&
+ dd of=".git/objects/pack/pack-${pack1}.pack" seek=$(($offs_101 + 1)) \
+ if=".git/objects/pack/pack-${pack1}.idx" \
+ skip=$((4 + 256 * 4 + $nr_099 * 24)) \
+ bs=1 count=20 conv=notrunc &&
+ git cat-file blob $sha1_101 > file_101_foo1'
test_expect_success \
'[index v1] 3) corrupted delta happily returned wrong data' \
- '! cmp blob_1 blob_2'
+ 'test -f file_101_foo1 && ! cmp file_101 file_101_foo1'
test_expect_success \
'[index v1] 4) confirm that the pack is actually corrupted' \
test_expect_success \
'[index v2] 2) create a stealth corruption in a delta base reference' \
- '# this test assumes a delta smaller than 16 bytes at the end of the pack
- git show-index <1.idx | sort -n | sed -ne \$p | (
- read delta_offs delta_sha1 delta_crc &&
- git cat-file blob "$delta_sha1" > blob_3 &&
- chmod +w ".git/objects/pack/pack-${pack1}.pack" &&
- dd of=".git/objects/pack/pack-${pack1}.pack" seek=$(($delta_offs + 1)) \
- if=".git/objects/pack/pack-${pack1}.idx" skip=$((8 + 256 * 4)) \
- bs=1 count=20 conv=notrunc &&
- git cat-file blob "$delta_sha1" > blob_4 )'
+ '# This test assumes file_101 is a delta smaller than 16 bytes.
+ # It should be against file_100 but we substitute its base for file_099
+ sha1_101=`git hash-object file_101` &&
+ sha1_099=`git hash-object file_099` &&
+ offs_101=`index_obj_offset 1.idx $sha1_101` &&
+ nr_099=`index_obj_nr 1.idx $sha1_099` &&
+ chmod +w ".git/objects/pack/pack-${pack1}.pack" &&
+ dd of=".git/objects/pack/pack-${pack1}.pack" seek=$(($offs_101 + 1)) \
+ if=".git/objects/pack/pack-${pack1}.idx" \
+ skip=$((8 + 256 * 4 + $nr_099 * 20)) \
+ bs=1 count=20 conv=notrunc &&
+ git cat-file blob $sha1_101 > file_101_foo2'
test_expect_success \
'[index v2] 3) corrupted delta happily returned wrong data' \
- '! cmp blob_3 blob_4'
+ 'test -f file_101_foo2 && ! cmp file_101 file_101_foo2'
test_expect_success \
'[index v2] 4) confirm that the pack is actually corrupted' \
'rm -f .git/objects/pack/* &&
git index-pack --index-version=2 --stdin < "test-1-${pack1}.pack" &&
git verify-pack ".git/objects/pack/pack-${pack1}.pack" &&
+ obj=`git hash-object file_001` &&
+ nr=`index_obj_nr ".git/objects/pack/pack-${pack1}.idx" $obj` &&
chmod +w ".git/objects/pack/pack-${pack1}.idx" &&
dd if=/dev/zero of=".git/objects/pack/pack-${pack1}.idx" conv=notrunc \
- bs=1 count=4 seek=$((8 + 256 * 4 + `wc -l <obj-list` * 20 + 0)) &&
+ bs=1 count=4 seek=$((8 + 256 * 4 + `wc -l <obj-list` * 20 + $nr * 4)) &&
( while read obj
do git cat-file -p $obj >/dev/null || exit 1
done <obj-list ) &&
HOME=`pwd`/no-such-directory
export HOME ;# this way we force the victim/.git/config to be used.
-test_expect_failure \
+test_expect_success \
'pushing a delete should be denied with denyDeletes' '
cd victim &&
git config receive.denyDeletes true &&
'
+# The first three test if the tracking branches are properly renamed,
+# the last two ones check if the config is updated.
+
+test_expect_success 'rename a remote' '
+
+ git clone one four &&
+ (cd four &&
+ git remote rename origin upstream &&
+ rmdir .git/refs/remotes/origin &&
+ test "$(git symbolic-ref refs/remotes/upstream/HEAD)" = "refs/remotes/upstream/master" &&
+ test "$(git rev-parse upstream/master)" = "$(git rev-parse master)" &&
+ test "$(git config remote.upstream.fetch)" = "+refs/heads/*:refs/remotes/upstream/*" &&
+ test "$(git config branch.master.remote)" = "upstream")
+
+'
+
+cat > remotes_origin << EOF
+URL: $(pwd)/one
+Push: refs/heads/master:refs/heads/upstream
+Pull: refs/heads/master:refs/heads/origin
+EOF
+
+test_expect_success 'migrate a remote from named file in $GIT_DIR/remotes' '
+ git clone one five &&
+ origin_url=$(pwd)/one &&
+ (cd five &&
+ git remote rm origin &&
+ mkdir -p .git/remotes &&
+ cat ../remotes_origin > .git/remotes/origin &&
+ git remote rename origin origin &&
+ ! test -f .git/remotes/origin &&
+ test "$(git config remote.origin.url)" = "$origin_url" &&
+ test "$(git config remote.origin.push)" = "refs/heads/master:refs/heads/upstream" &&
+ test "$(git config remote.origin.fetch)" = "refs/heads/master:refs/heads/origin")
+'
+
+test_expect_success 'migrate a remote from named file in $GIT_DIR/branches' '
+ git clone one six &&
+ origin_url=$(pwd)/one &&
+ (cd six &&
+ git remote rm origin &&
+ echo "$origin_url" > .git/branches/origin &&
+ git remote rename origin origin &&
+ ! test -f .git/branches/origin &&
+ test "$(git config remote.origin.url)" = "$origin_url" &&
+ test "$(git config remote.origin.fetch)" = "refs/heads/master:refs/heads/origin")
+'
+
test_done
)
}
+mk_child() {
+ rm -rf "$1" &&
+ git clone testrepo "$1"
+}
+
check_push_result () {
(
cd testrepo &&
test_expect_success 'push updates local refs' '
- rm -rf parent child &&
- mkdir parent &&
- (cd parent && git init &&
- echo one >foo && git add foo && git commit -m one) &&
- git clone parent child &&
+ mk_test heads/master &&
+ mk_child child &&
(cd child &&
- echo two >foo && git commit -a -m two &&
+ git pull .. master &&
+ git push &&
+ test $(git rev-parse master) = $(git rev-parse remotes/origin/master))
+
+'
+
+test_expect_success 'push updates up-to-date local refs' '
+
+ mk_test heads/master &&
+ mk_child child1 &&
+ mk_child child2 &&
+ (cd child1 && git pull .. master && git push) &&
+ (cd child2 &&
+ git pull ../child1 master &&
git push &&
test $(git rev-parse master) = $(git rev-parse remotes/origin/master))
'
+test_expect_success 'push preserves up-to-date packed refs' '
+
+ mk_test heads/master &&
+ mk_child child &&
+ (cd child &&
+ git push &&
+ ! test -f .git/refs/remotes/origin/master)
+
+'
+
test_expect_success 'push does not update local refs on failure' '
- rm -rf parent child &&
- mkdir parent &&
- (cd parent && git init &&
- echo one >foo && git add foo && git commit -m one &&
- echo exit 1 >.git/hooks/pre-receive &&
- chmod +x .git/hooks/pre-receive) &&
- git clone parent child &&
+ mk_test heads/master &&
+ mk_child child &&
+ mkdir testrepo/.git/hooks &&
+ echo exit 1 >testrepo/.git/hooks/pre-receive &&
+ chmod +x testrepo/.git/hooks/pre-receive &&
(cd child &&
- echo two >foo && git commit -a -m two &&
+ git pull .. master
test_must_fail git push &&
test $(git rev-parse master) != \
$(git rev-parse remotes/origin/master))
test_expect_success 'allow deleting an invalid remote ref' '
- pwd &&
+ mk_test heads/master &&
rm -f testrepo/.git/objects/??/* &&
git push testrepo :refs/heads/master &&
(cd testrepo && test_must_fail git rev-parse --verify refs/heads/master)
'
+test_expect_success 'warn on push to HEAD of non-bare repository' '
+ mk_test heads/master
+ (cd testrepo &&
+ git checkout master &&
+ git config receive.denyCurrentBranch warn) &&
+ git push testrepo master 2>stderr &&
+ grep "warning.*this may cause confusion" stderr
+'
+
+test_expect_success 'deny push to HEAD of non-bare repository' '
+ mk_test heads/master
+ (cd testrepo &&
+ git checkout master &&
+ git config receive.denyCurrentBranch true) &&
+ test_must_fail git push testrepo master
+'
+
+test_expect_success 'allow push to HEAD of bare repository (bare)' '
+ mk_test heads/master
+ (cd testrepo &&
+ git checkout master &&
+ git config receive.denyCurrentBranch true &&
+ git config core.bare true) &&
+ git push testrepo master 2>stderr &&
+ ! grep "warning.*this may cause confusion" stderr
+'
+
+test_expect_success 'allow push to HEAD of non-bare repository (config)' '
+ mk_test heads/master
+ (cd testrepo &&
+ git checkout master &&
+ git config receive.denyCurrentBranch false
+ ) &&
+ git push testrepo master 2>stderr &&
+ ! grep "warning.*this may cause confusion" stderr
+'
+
+test_expect_success 'fetch with branches' '
+ mk_empty &&
+ git branch second $the_first_commit &&
+ git checkout second &&
+ echo ".." > testrepo/.git/branches/branch1 &&
+ (cd testrepo &&
+ git fetch branch1 &&
+ r=$(git show-ref -s --verify refs/heads/branch1) &&
+ test "z$r" = "z$the_commit" &&
+ test 1 = $(git for-each-ref refs/heads | wc -l)
+ ) &&
+ git checkout master
+'
+
+test_expect_success 'fetch with branches containing #' '
+ mk_empty &&
+ echo "..#second" > testrepo/.git/branches/branch2 &&
+ (cd testrepo &&
+ git fetch branch2 &&
+ r=$(git show-ref -s --verify refs/heads/branch2) &&
+ test "z$r" = "z$the_first_commit" &&
+ test 1 = $(git for-each-ref refs/heads | wc -l)
+ ) &&
+ git checkout master
+'
+
+test_expect_success 'push with branches' '
+ mk_empty &&
+ git checkout second &&
+ echo "testrepo" > .git/branches/branch1 &&
+ git push branch1 &&
+ (cd testrepo &&
+ r=$(git show-ref -s --verify refs/heads/master) &&
+ test "z$r" = "z$the_first_commit" &&
+ test 1 = $(git for-each-ref refs/heads | wc -l)
+ )
+'
+
+test_expect_success 'push with branches containing #' '
+ mk_empty &&
+ echo "testrepo#branch3" > .git/branches/branch2 &&
+ git push branch2 &&
+ (cd testrepo &&
+ r=$(git show-ref -s --verify refs/heads/branch3) &&
+ test "z$r" = "z$the_first_commit" &&
+ test 1 = $(git for-each-ref refs/heads | wc -l)
+ ) &&
+ git checkout master
+'
+
test_done
git commit -m initial &&
git branch b-symlink &&
git branch b-file &&
-l=$(echo -n file | git hash-object -t blob -w --stdin) &&
+l=$(printf file | git hash-object -t blob -w --stdin) &&
echo "120000 $l symlink" | git update-index --index-info &&
git commit -m master &&
git checkout b-symlink &&
-l=$(echo -n file-different | git hash-object -t blob -w --stdin) &&
+l=$(printf file-different | git hash-object -t blob -w --stdin) &&
echo "120000 $l symlink" | git update-index --index-info &&
git commit -m b-symlink &&
git checkout b-file &&
git cat-file tag tag-from-subdir-2 | grep "in sub directory"
'
+# mixing modes and options:
+
+test_expect_success 'mixing incompatibles modes and options is forbidden' '
+ test_must_fail git tag -a
+ test_must_fail git tag -l -v
+ test_must_fail git tag -n 100
+ test_must_fail git tag -l -m msg
+ test_must_fail git tag -l -F some file
+ test_must_fail git tag -v -s
+'
+
test_done
'
+test_expect_success 'verbose respects diff config' '
+
+ git config color.diff always &&
+ git status -v >actual &&
+ grep "\[1mdiff --git" actual &&
+ git config --unset color.diff
+'
+
test_expect_success 'cleanup commit messages (verbatim,-t)' '
echo >>negative &&
--- /dev/null
+#!/bin/sh
+
+test_description='git repack works correctly'
+
+. ./test-lib.sh
+
+test_expect_success 'objects in packs marked .keep are not repacked' '
+ echo content1 > file1 &&
+ echo content2 > file2 &&
+ git add . &&
+ git commit -m initial_commit &&
+ # Create two packs
+ # The first pack will contain all of the objects except one
+ git rev-list --objects --all | grep -v file2 |
+ git pack-objects pack > /dev/null &&
+ # The second pack will contain the excluded object
+ packsha1=$(git rev-list --objects --all | grep file2 |
+ git pack-objects pack) &&
+ touch -r pack-$packsha1.pack pack-$packsha1.keep &&
+ objsha1=$(git verify-pack -v pack-$packsha1.idx | head -n 1 |
+ sed -e "s/^\([0-9a-f]\{40\}\).*/\1/") &&
+ mv pack-* .git/objects/pack/ &&
+ git repack -A -d -l &&
+ git prune-packed &&
+ for p in .git/objects/pack/*.idx; do
+ idx=$(basename $p)
+ test "pack-$packsha1.idx" = "$idx" && continue
+ if git verify-pack -v $p | egrep "^$objsha1"; then
+ found_duplicate_object=1
+ echo "DUPLICATE OBJECT FOUND"
+ break
+ fi
+ done &&
+ test -z "$found_duplicate_object"
+'
+
+test_expect_success 'loose objects in alternate ODB are not repacked' '
+ mkdir alt_objects &&
+ echo `pwd`/alt_objects > .git/objects/info/alternates &&
+ echo content3 > file3 &&
+ objsha1=$(GIT_OBJECT_DIRECTORY=alt_objects git hash-object -w file3) &&
+ git add file3 &&
+ git commit -m commit_file3 &&
+ git repack -a -d -l &&
+ git prune-packed &&
+ for p in .git/objects/pack/*.idx; do
+ if git verify-pack -v $p | egrep "^$objsha1"; then
+ found_duplicate_object=1
+ echo "DUPLICATE OBJECT FOUND"
+ break
+ fi
+ done &&
+ test -z "$found_duplicate_object"
+'
+
+test_expect_success 'packed obs in alt ODB are repacked even when local repo is packless' '
+ mkdir alt_objects/pack
+ mv .git/objects/pack/* alt_objects/pack &&
+ git repack -a &&
+ myidx=$(ls -1 .git/objects/pack/*.idx) &&
+ test -f "$myidx" &&
+ for p in alt_objects/pack/*.idx; do
+ git verify-pack -v $p | sed -n -e "/^[0-9a-f]\{40\}/p"
+ done | while read sha1 rest; do
+ if ! ( git verify-pack -v $myidx | grep "^$sha1" ); then
+ echo "Missing object in local pack: $sha1"
+ return 1
+ fi
+ done
+'
+
+test_done
+
--- /dev/null
+#!/bin/sh
+
+test_description='git blame encoding conversion'
+. ./test-lib.sh
+
+. "$TEST_DIRECTORY"/t8005/utf8.txt
+. "$TEST_DIRECTORY"/t8005/cp1251.txt
+. "$TEST_DIRECTORY"/t8005/sjis.txt
+
+test_expect_success 'setup the repository' '
+ # Create the file
+ echo "UTF-8 LINE" > file &&
+ git add file &&
+ git commit --author "$UTF8_NAME <utf8@localhost>" -m "$UTF8_MSG" &&
+
+ echo "CP1251 LINE" >> file &&
+ git add file &&
+ git config i18n.commitencoding cp1251 &&
+ git commit --author "$CP1251_NAME <cp1251@localhost>" -m "$CP1251_MSG" &&
+
+ echo "SJIS LINE" >> file &&
+ git add file &&
+ git config i18n.commitencoding shift-jis &&
+ git commit --author "$SJIS_NAME <sjis@localhost>" -m "$SJIS_MSG"
+'
+
+cat >expected <<EOF
+author $SJIS_NAME
+summary $SJIS_MSG
+author $SJIS_NAME
+summary $SJIS_MSG
+author $SJIS_NAME
+summary $SJIS_MSG
+EOF
+
+test_expect_success \
+ 'blame respects i18n.commitencoding' '
+ git blame --incremental file | \
+ grep "^\(author\|summary\) " > actual &&
+ test_cmp actual expected
+'
+
+cat >expected <<EOF
+author $CP1251_NAME
+summary $CP1251_MSG
+author $CP1251_NAME
+summary $CP1251_MSG
+author $CP1251_NAME
+summary $CP1251_MSG
+EOF
+
+test_expect_success \
+ 'blame respects i18n.logoutputencoding' '
+ git config i18n.logoutputencoding cp1251 &&
+ git blame --incremental file | \
+ grep "^\(author\|summary\) " > actual &&
+ test_cmp actual expected
+'
+
+cat >expected <<EOF
+author $UTF8_NAME
+summary $UTF8_MSG
+author $UTF8_NAME
+summary $UTF8_MSG
+author $UTF8_NAME
+summary $UTF8_MSG
+EOF
+
+test_expect_success \
+ 'blame respects --encoding=utf-8' '
+ git blame --incremental --encoding=utf-8 file | \
+ grep "^\(author\|summary\) " > actual &&
+ test_cmp actual expected
+'
+
+cat >expected <<EOF
+author $SJIS_NAME
+summary $SJIS_MSG
+author $CP1251_NAME
+summary $CP1251_MSG
+author $UTF8_NAME
+summary $UTF8_MSG
+EOF
+
+test_expect_success \
+ 'blame respects --encoding=none' '
+ git blame --incremental --encoding=none file | \
+ grep "^\(author\|summary\) " > actual &&
+ test_cmp actual expected
+'
+
+test_done
--- /dev/null
+CP1251_NAME="Èâàí Ïåòðîâè÷ Ñèäîðîâ"
+CP1251_MSG="Òåñòîâîå ñîîáùåíèå"
--- /dev/null
+SJIS_NAME="\84I\84r\84p\84~ \84P\84u\84\84\84\82\84\80\84r\84y\84\89 \84R\84y\84t\84\80\84\82\84\80\84r"
+SJIS_MSG="\84S\84u\84\83\84\84\84\80\84r\84\80\84u \84\83\84\80\84\80\84q\84\8b\84u\84~\84y\84u"
--- /dev/null
+UTF8_NAME="Иван Петрович Сидоров"
+UTF8_MSG="Тестовое сообщение"
test_expect_success 'cvs update (-p)' '
touch really-empty &&
echo Line 1 > no-lf &&
- echo -n Line 2 >> no-lf &&
+ printf "Line 2" >> no-lf &&
git add really-empty no-lf &&
git commit -q -m "Update -p test" &&
git push gitcvs.git >/dev/null &&
discard_index(&o->result);
if (!o->gently) {
if (message)
- return error(message);
+ return error("%s", message);
return -1;
}
return -1;
return 1;
}
-int userdiff_config_basic(const char *k, const char *v)
+int userdiff_config(const char *k, const char *v)
{
struct userdiff_driver *drv;
return parse_funcname(&drv->funcname, k, v, REG_EXTENDED);
if ((drv = parse_driver(k, v, "binary")))
return parse_tristate(&drv->binary, k, v);
-
- return 0;
-}
-
-int userdiff_config_porcelain(const char *k, const char *v)
-{
- struct userdiff_driver *drv;
-
if ((drv = parse_driver(k, v, "command")))
return parse_string(&drv->external, k, v);
if ((drv = parse_driver(k, v, "textconv")))
const char *textconv;
};
-int userdiff_config_basic(const char *k, const char *v);
-int userdiff_config_porcelain(const char *k, const char *v);
+int userdiff_config(const char *k, const char *v);
struct userdiff_driver *userdiff_find_by_name(const char *name);
struct userdiff_driver *userdiff_find_by_path(const char *path);
setup_revisions(0, NULL, &rev, s->reference);
rev.diffopt.output_format |= DIFF_FORMAT_PATCH;
rev.diffopt.detect_rename = 1;
+ DIFF_OPT_SET(&rev.diffopt, ALLOW_TEXTCONV);
rev.diffopt.file = s->fp;
rev.diffopt.close_file = 0;
+ /*
+ * If we're not going to stdout, then we definitely don't
+ * want color, since we are going to the commit message
+ * file (and even the "auto" setting won't work, since it
+ * will have checked isatty on stdout).
+ */
+ if (s->fp != stdout)
+ DIFF_OPT_CLR(&rev.diffopt, COLOR_DIFF);
run_diff_index(&rev, 1);
}
return error("Invalid untracked files mode '%s'", v);
return 0;
}
- return git_color_default_config(k, v, cb);
+ return git_diff_ui_config(k, v, cb);
}
#include "cache.h"
#include "xdiff-interface.h"
-#include "strbuf.h"
+#include "xdiff/xtypes.h"
+#include "xdiff/xdiffi.h"
+#include "xdiff/xemit.h"
+#include "xdiff/xmacros.h"
struct xdiff_emit_state {
xdiff_emit_consume_fn consume;
return ret;
}
+struct xdiff_emit_hunk_state {
+ xdiff_emit_hunk_consume_fn consume;
+ void *consume_callback_data;
+};
+
+static int process_diff(xdfenv_t *xe, xdchange_t *xscr, xdemitcb_t *ecb,
+ xdemitconf_t const *xecfg)
+{
+ long s1, s2, same, p_next, t_next;
+ xdchange_t *xch, *xche;
+ struct xdiff_emit_hunk_state *state = ecb->priv;
+ xdiff_emit_hunk_consume_fn fn = state->consume;
+ void *consume_callback_data = state->consume_callback_data;
+
+ for (xch = xscr; xch; xch = xche->next) {
+ xche = xdl_get_hunk(xch, xecfg);
+
+ s1 = XDL_MAX(xch->i1 - xecfg->ctxlen, 0);
+ s2 = XDL_MAX(xch->i2 - xecfg->ctxlen, 0);
+ same = s2 + XDL_MAX(xch->i1 - s1, 0);
+ p_next = xche->i1 + xche->chg1;
+ t_next = xche->i2 + xche->chg2;
+
+ fn(consume_callback_data, same, p_next, t_next);
+ }
+ return 0;
+}
+
+int xdi_diff_hunks(mmfile_t *mf1, mmfile_t *mf2,
+ xdiff_emit_hunk_consume_fn fn, void *consume_callback_data,
+ xpparam_t const *xpp, xdemitconf_t *xecfg)
+{
+ struct xdiff_emit_hunk_state state;
+ xdemitcb_t ecb;
+
+ memset(&state, 0, sizeof(state));
+ memset(&ecb, 0, sizeof(ecb));
+ state.consume = fn;
+ state.consume_callback_data = consume_callback_data;
+ xecfg->emit_func = (void (*)())process_diff;
+ ecb.priv = &state;
+ return xdi_diff(mf1, mf2, xpp, xecfg, &ecb);
+}
+
int read_mmfile(mmfile_t *ptr, const char *filename)
{
struct stat st;
#include "xdiff/xdiff.h"
typedef void (*xdiff_emit_consume_fn)(void *, char *, unsigned long);
+typedef void (*xdiff_emit_hunk_consume_fn)(void *, long, long, long);
int xdi_diff(mmfile_t *mf1, mmfile_t *mf2, xpparam_t const *xpp, xdemitconf_t const *xecfg, xdemitcb_t *ecb);
int xdi_diff_outf(mmfile_t *mf1, mmfile_t *mf2,
xdiff_emit_consume_fn fn, void *consume_callback_data,
xpparam_t const *xpp,
xdemitconf_t const *xecfg, xdemitcb_t *xecb);
+int xdi_diff_hunks(mmfile_t *mf1, mmfile_t *mf2,
+ xdiff_emit_hunk_consume_fn fn, void *consume_callback_data,
+ xpparam_t const *xpp, xdemitconf_t *xecfg);
int parse_hunk_header(char *line, int len,
int *ob, int *on,
int *nb, int *nn);
unsigned long flags;
find_func_t find_func;
void *find_func_priv;
+ void (*emit_func)();
} xdemitconf_t;
typedef struct s_bdiffparam {
xdemitconf_t const *xecfg, xdemitcb_t *ecb) {
xdchange_t *xscr;
xdfenv_t xe;
+ emit_func_t ef = xecfg->emit_func ?
+ (emit_func_t)xecfg->emit_func : xdl_emit_diff;
if (xdl_do_diff(mf1, mf2, xpp, &xe) < 0) {
return -1;
}
if (xscr) {
- if (xdl_emit_diff(&xe, xscr, ecb, xecfg) < 0) {
+ if (ef(&xe, xscr, ecb, xecfg) < 0) {
xdl_free_script(xscr);
xdl_free_env(&xe);
static long xdl_get_rec(xdfile_t *xdf, long ri, char const **rec);
static int xdl_emit_record(xdfile_t *xdf, long ri, char const *pre, xdemitcb_t *ecb);
-static xdchange_t *xdl_get_hunk(xdchange_t *xscr, xdemitconf_t const *xecfg);
* Starting at the passed change atom, find the latest change atom to be included
* inside the differential hunk according to the specified configuration.
*/
-static xdchange_t *xdl_get_hunk(xdchange_t *xscr, xdemitconf_t const *xecfg) {
+xdchange_t *xdl_get_hunk(xdchange_t *xscr, xdemitconf_t const *xecfg) {
xdchange_t *xch, *xchp;
for (xchp = xscr, xch = xscr->next; xch; xchp = xch, xch = xch->next)
#define XEMIT_H
+typedef int (*emit_func_t)(xdfenv_t *xe, xdchange_t *xscr, xdemitcb_t *ecb,
+ xdemitconf_t const *xecfg);
+xdchange_t *xdl_get_hunk(xdchange_t *xscr, xdemitconf_t const *xecfg);
int xdl_emit_diff(xdfenv_t *xe, xdchange_t *xscr, xdemitcb_t *ecb,
xdemitconf_t const *xecfg);
#include "xinclude.h"
-
#define XDL_KPDIS_RUN 4
#define XDL_MAX_EQLIMIT 1024
-
+#define XDL_SIMSCAN_WINDOW 100
typedef struct s_xdlclass {
static int xdl_clean_mmatch(char const *dis, long i, long s, long e) {
long r, rdis0, rpdis0, rdis1, rpdis1;
+ /*
+ * Limits the window the is examined during the similar-lines
+ * scan. The loops below stops when dis[i - r] == 1 (line that
+ * has no match), but there are corner cases where the loop
+ * proceed all the way to the extremities by causing huge
+ * performance penalties in case of big files.
+ */
+ if (i - s > XDL_SIMSCAN_WINDOW)
+ s = i - XDL_SIMSCAN_WINDOW;
+ if (e - i > XDL_SIMSCAN_WINDOW)
+ e = i + XDL_SIMSCAN_WINDOW;
+
/*
* Scans the lines before 'i' to find a run of lines that either
* have no match (dis[j] == 0) or have multiple matches (dis[j] > 1).