info: git.info gitman.info
-install: man
+install: install-man
+
+install-man: man
$(INSTALL) -d -m 755 $(DESTDIR)$(man1dir)
$(INSTALL) -d -m 755 $(DESTDIR)$(man5dir)
$(INSTALL) -d -m 755 $(DESTDIR)$(man7dir)
install-webdoc : html
sh ./install-webdoc.sh $(WEBDOC_DEST)
-quick-install:
+quick-install: quick-install-man
+
+quick-install-man:
sh ./install-doc-quick.sh $(DOC_REF) $(DESTDIR)$(mandir)
quick-install-html:
--- /dev/null
+GIT v1.6.0.4 Release Notes
+==========================
+
+Fixes since v1.6.0.3
+--------------------
+
+* 'git add -p' said "No changes" when only binary files were changed.
+
+* 'git archive' did not work correctly in bare repositories.
+
+* 'git checkout -t -b newbranch' when you are on detached HEAD was broken.
+
+* when we refuse to detect renames because there are too many new or
+ deleted files, 'git diff' did not say how many there are.
+
+* 'git push --mirror' tried and failed to push the stash; there is no
+ point in sending it to begin with.
+
+* 'git push' did not update the remote tracking reference if the corresponding
+ ref on the remote end happened to be already up to date.
+
+* 'git pull $there $branch:$current_branch' did not work when you were on
+ a branch yet to be born.
+
+* when giving up resolving a conflicted merge, 'git reset --hard' failed
+ to remove new paths from the working tree.
+
+* 'git send-email' had a small fd leak while scanning directory.
+
+* 'git status' incorrectly reported a submodule directory as an untracked
+ directory.
+
+* 'git svn' used deprecated 'git-foo' form of subcommand invocaition.
+
+* 'git update-ref -d' to remove a reference did not honor --no-deref option.
+
+* Plugged small memleaks here and there.
+
+* Also contains many documentation updates.
--- /dev/null
+GIT v1.6.0.5 Release Notes
+==========================
+
+Fixes since v1.6.0.4
+--------------------
+
+* 'git checkout' used to crash when your HEAD was pointing at a deleted
+ branch.
+
+* 'git checkout' from an un-checked-out state did not allow switching out
+ of the current branch.
+
+* 'git pack-objects' did not make its best effort to honor --max-pack-size
+ option when a single first object already busted the given limit and
+ placed many objects in a single pack.
+
+* 'make check' cannot be run without sparse; people may have meant to say
+ 'make test' instead, so suggest that.
+
+* Many unsafe call to sprintf() style varargs functions are corrected.
+
* "git bisect" is careful about a user mistake and suggests testing of
merge base first when good is not a strict ancestor of bad.
+* "git blame" re-encodes the commit metainfo to UTF-8 from i18n.commitEncoding
+ by default.
+
+* "git check-attr --stdin" can check attributes for multiple paths.
+
* "git checkout --track origin/hack" used to be a syntax error. It now
DWIMs to create a corresponding local branch "hack", i.e. acts as if you
said "git checkout --track -b hack origin/hack".
* "git cherry-pick" can also utilize rerere for conflict resolution.
+* "git clone" learned to be verbose with -v
+
* "git commit --author=$name" can look up author name from existing
commits.
* "git daemon" learned --max-connections=<count> option.
+* "git daemon" exports REMOTE_ADDR to record client address, so that
+ spawned programs can act differently on it.
+
+* "git describe --tags" favours closer lightweight tags than farther
+ annotated tags now.
+
* "git diff" learned to mimic --suppress-blank-empty from GNU diff via a
configuration option.
* "git diff" hunk header pattern for ObjC has been added.
+* a "textconv" filter that makes binary files textual form for human
+ consumption can be specified as an attribute for paths; "git diff"
+ learnt to make use of it.
+
* "git for-each-ref" learned "refname:short" token that gives an
unambiguously abbreviated refname.
+* Auto-numbering of the subject lines is the default for "git
+ format-patch" now.
+
* "git grep" learned to accept -z similar to GNU grep.
* "git help" learned to use GIT_MAN_VIEWER environment variable before
* "git merge -s $strategy" can use a custom built strategy if you have a
command "git-merge-$strategy" on your $PATH.
+* "git push" can be told to reject deletion of refs with receive.denyDeletes
+ configuration.
+
+* "git rebase" honours pre-rebase hook; use --no-verify to bypass it.
+
+* "git rebase -p" uses interactive rebase machinery now to preserve the merges.
+
* "git reflog expire branch" can be used in place of "git reflog expire
refs/heads/branch".
+* "git remote show $remote" lists remote branches one-per-line now.
+
+* when giving up resolving a conflicted merge, "git reset --hard" failed
+ to remove new paths from the working tree. [cherry-pick to 'maint'?]
+
* "git submodule foreach" subcommand allows you to iterate over checked
out submodules.
* "git svn branch" can create new branches on the other end.
+* "gitweb" can use more saner PATH_INFO based URL.
+
(internal)
* "git hash-object" learned to lie about the path being hashed, so that
* various callers of git-merge-recursive avoid forking it as an external
process.
+* Git class defined in "Git.pm" can be subclasses a bit more easily.
+
+* We used to link GNU regex library as a compatibility layer for some
+ platforms, but it turns out it is not necessary on most of them.
+
+* Some path handling routines used fixed number of buffers used alternately
+ but depending on the call depth, this arrangement led to hard to track
+ bugs. This issue is being addressed.
+
Fixes since v1.6.0
------------------
--
exec >/var/tmp/1
-O=v1.6.0.2-553-g58e0fa5
+O=v1.6.0.3-639-ga1a846a
echo O=$(git describe master)
git shortlog --no-merges $O..master ^maint
5) Back in the compose window: add whatever other text you wish to the
message, complete the addressing and subject fields, and press send.
+
+
+Gmail
+-----
+
+Submitting properly formatted patches via Gmail is simple now that
+IMAP support is available. First, edit your ~/.gitconfig to specify your
+account settings:
+
+[imap]
+ folder = "[Gmail]/Drafts"
+ host = imaps://imap.gmail.com
+ user = user@gmail.com
+ pass = p4ssw0rd
+ port = 993
+ sslverify = false
+
+Next, ensure that your Gmail settings are correct. In "Settings" the
+"Use Unicode (UTF-8) encoding for outgoing messages" should be checked.
+
+Once your commits are ready to send to the mailing list, run the following
+command to send the patch emails to your Gmail Drafts folder.
+
+ $ git format-patch -M --stdout origin/master | git imap-send
+
+Go to your Gmail account, open the Drafts folder, find the patch email, fill
+in the To: and CC: fields and send away!
</literallayout>
{title#}</example>
endif::docbook-xsl-172[]
+
+ifdef::docbook-xsl-172[]
+ifdef::doctype-manpage[]
+# The following two small workarounds insert a simple paragraph after screen
+[listingblock]
+<example><title>{title}</title>
+<screen>
+|
+</screen><simpara></simpara>
+{title#}</example>
+
+[verseblock]
+<formalpara{id? id="{id}"}><title>{title}</title><para>
+{title%}<literallayout{id? id="{id}"}>
+{title#}<literallayout>
+|
+</literallayout><simpara></simpara>
+{title#}</para></formalpara>
+endif::doctype-manpage[]
+endif::docbook-xsl-172[]
endif::backend-docbook[]
ifdef::doctype-manpage[]
Show the result incrementally in a format designed for
machine consumption.
+--encoding=<encoding>::
+ Specifies the encoding used to output author names
+ and commit summaries. Setting it to `none` makes blame
+ output unconverted data. For more information see the
+ discussion about encoding in the linkgit:git-log[1]
+ manual page.
+
--contents <file>::
When <rev> is not specified, the command annotates the
changes starting backwards from the working tree copy.
especially on slow filesystems. If not set, the value of
`transfer.unpackLimit` is used instead.
+receive.denyDeletes::
+ If set to true, git-receive-pack will deny a ref update that deletes
+ the ref. Use this to prevent such a ref deletion via a push.
+
receive.denyNonFastForwards::
If set to true, git-receive-pack will deny a ref update which is
not a fast forward. Use this to prevent such an update via a push,
. path for "dst"; only exists for C or R.
. an LF or a NUL when '-z' option is used, to terminate the record.
+Possible status letters are:
+
+- A: addition of a file
+- C: copy of a file into a new one
+- D: deletion of a file
+- M: modification of the contents or mode of a file
+- R: renaming of a file
+- T: change in the type of the file
+- U: file is unmerged (you must complete the merge before it can
+be committed)
+- X: "unknown" change type (most probably a bug, please report it)
+
+Status letters C and M are always followed by a score (denoting the
+percentage of similarity between the source and target of the move or
+copy), and are the only ones to be so.
+
<sha1> is shown as all 0's if a file is new on the filesystem
and it is out of sync with the index.
git bisect log
git bisect run <cmd>...
-This command uses 'git-rev-list --bisect' to help drive the
+This command uses 'git rev-list --bisect' to help drive the
binary search process to find which change introduced a bug, given an
old "good" commit object name and a later "bad" commit object name.
to see the currently remaining suspects in 'gitk'. `visualize` is a bit
too long to type and `view` is provided as a synonym.
-If 'DISPLAY' environment variable is not set, 'git-log' is used
+If 'DISPLAY' environment variable is not set, 'git log' is used
instead. You can even give command line options such as `-p` and
`--stat`.
work around other problem this bisection is not interested in")
applied to the revision being tested.
-To cope with such a situation, after the inner 'git-bisect' finds the
+To cope with such a situation, after the inner 'git bisect' finds the
next revision to test, with the "run" script, you can apply that tweak
before compiling, run the real test, and after the test decides if the
revision (possibly with the needed tweaks) passed the test, rewind the
NAME
----
-git-check-attr - Display gitattributes information.
+git-check-attr - Display gitattributes information
SYNOPSIS
----------------------------------------------------------------
+ENVIRONMENT
+-----------
+'git-daemon' will set REMOTE_ADDR to the IP address of the client
+that connected to it, if the IP address is available. REMOTE_ADDR will
+be available in the environment of hooks called when
+services are performed.
+
+
+
Author
------
Written by Linus Torvalds <torvalds@osdl.org>, YOSHIFUJI Hideaki
commit relative to the named <commit>. Typically you
would want comparison with the latest commit, so if you
do not give <commit>, it defaults to HEAD.
+ --staged is a synonym of --cached.
'git diff' [--options] <commit> [--] [<path>...]::
since the beginning of the time". If you want to format
everything since project inception to one commit, say "git
format-patch \--root <commit>" to make it clear that it is the
-latter case.
+latter case. If you want to format a single commit, you can do
+this with "git format-patch -1 <commit>".
By default, each output file is numbered sequentially from 1, and uses the
first line of the commit message (massaged for pathname safety) as
branch of the `git.git` repository.
Documentation for older releases are available here:
-* link:v1.6.0.2/git.html[documentation for release 1.6.0.2]
+* link:v1.6.0.4/git.html[documentation for release 1.6.0.4]
* release notes for
+ link:RelNotes-1.6.0.4.txt[1.6.0.4],
+ link:RelNotes-1.6.0.3.txt[1.6.0.3],
link:RelNotes-1.6.0.2.txt[1.6.0.2],
link:RelNotes-1.6.0.1.txt[1.6.0.1],
link:RelNotes-1.6.0.txt[1.6.0].
`ident`
^^^^^^^
-When the attribute `ident` is set to a path, git replaces
-`$Id$` in the blob object with `$Id:`, followed by
+When the attribute `ident` is set for a path, git replaces
+`$Id$` in the blob object with `$Id:`, followed by the
40-character hexadecimal blob object name, followed by a dollar
sign `$` upon checkout. Any byte sequence that begins with
`$Id:` and ends with `$` in the worktree file is replaced
Generating diff text
~~~~~~~~~~~~~~~~~~~~
+`diff`
+^^^^^^
+
The attribute `diff` affects how 'git' generates diffs for particular
files. It can tell git whether to generate a textual patch for the path
or to treat the path as a binary file. It can also affect what line is
Performing a three-way merge
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+`merge`
+^^^^^^^
+
The attribute `merge` affects how three versions of a file is
merged when a file-level merge is necessary during `git merge`,
and other programs such as `git revert` and `git cherry-pick`.
help other people who look at them later. Lack of this header
implies that the commit log message is encoded in UTF-8.
-. 'git-log', 'git-show' and friends looks at the `encoding`
- header of a commit object, and tries to re-code the log
- message into UTF-8 unless otherwise specified. You can
+. 'git-log', 'git-show', 'git-blame' and friends look at the
+ `encoding` header of a commit object, and try to re-code the
+ log message into UTF-8 unless otherwise specified. You can
specify the desired output encoding with
`i18n.logoutputencoding` in `.git/config` file, like this:
+
Limit the commits output to ones with log message that
matches the specified pattern (regular expression).
+--all-match::
+ Limit the commits output to ones that match all given --grep,
+ --author and --committer instead of ones that match at least one.
+
-i::
--regexp-ignore-case::
History Simplification
~~~~~~~~~~~~~~~~~~~~~~
-When optional paths are given, 'git-rev-list' simplifies commits with
+When optional paths are given, 'git rev-list' simplifies commits with
various strategies, according to the options you have selected.
Suppose you specified `foo` as the <paths>. We shall call commits
the included and excluded commits. Thus, if
-----------------------------------------------------------------------
- $ git-rev-list --bisect foo ^bar ^baz
+ $ git rev-list --bisect foo ^bar ^baz
-----------------------------------------------------------------------
outputs 'midpoint', the output of the two commands
-----------------------------------------------------------------------
- $ git-rev-list foo ^midpoint
- $ git-rev-list midpoint ^bar ^baz
+ $ git rev-list foo ^midpoint
+ $ git rev-list midpoint ^bar ^baz
-----------------------------------------------------------------------
would be of roughly the same length. Finding the change which
http://www.kernel.org/pub/software/scm/git/docs/
- There are also "make quick-install-doc" and "make quick-install-html"
- which install preformatted man pages and html documentation.
+ There are also "make quick-install-doc", "make quick-install-man"
+ and "make quick-install-html" which install preformatted man pages
+ and html documentation.
This does not require asciidoc/xmlto, but it only works from within
a cloned checkout of git.git with these two extra branches, and will
not work for the maintainer for obvious chicken-and-egg reasons.
endif
NO_STRLCPY = YesPlease
NO_MEMMEM = YesPlease
- COMPAT_CFLAGS += -Icompat/regex
- COMPAT_OBJS += compat/regex/regex.o
endif
ifeq ($(uname_S),SunOS)
NEEDS_SOCKET = YesPlease
BASIC_LDFLAGS += -L/usr/local/lib
DIR_HAS_BSD_GROUP_SEMANTICS = YesPlease
THREADED_DELTA_SEARCH = YesPlease
- COMPAT_CFLAGS += -Icompat/regex
- COMPAT_OBJS += compat/regex/regex.o
endif
ifeq ($(uname_S),OpenBSD)
NO_STRCASESTR = YesPlease
INTERNAL_QSORT = UnfortunatelyYes
NEEDS_LIBICONV=YesPlease
BASIC_CFLAGS += -D_LARGE_FILES
- COMPAT_CFLAGS += -Icompat/regex
- COMPAT_OBJS += compat/regex/regex.o
endif
ifeq ($(uname_S),GNU)
# GNU/Hurd
./test-sha1.sh
check: common-cmds.h
- for i in *.c; do sparse $(ALL_CFLAGS) $(SPARSE_FLAGS) $$i || exit; done
+ if sparse; \
+ then \
+ for i in *.c; \
+ do \
+ sparse $(ALL_CFLAGS) $(SPARSE_FLAGS) $$i || exit; \
+ done; \
+ else \
+ echo 2>&1 "Did you mean 'make test'?"; \
+ exit 1; \
+ fi
remove-dashes:
./fixup-builtins $(BUILT_INS) $(PROGRAMS) $(SCRIPTS)
install-doc:
$(MAKE) -C Documentation install
+install-man:
+ $(MAKE) -C Documentation install-man
+
install-html:
$(MAKE) -C Documentation install-html
quick-install-doc:
$(MAKE) -C Documentation quick-install
+quick-install-man:
+ $(MAKE) -C Documentation quick-install-man
+
quick-install-html:
$(MAKE) -C Documentation quick-install-html
parse_treeish_arg(argv, &args, prefix);
parse_pathspec_arg(argv + 1, &args);
+ git_config(git_default_config, NULL);
+
return ar->write_archive(&args);
}
unsigned int nr = getpid();
for (;;) {
- const char *newpath;
- newpath = mkpath("%s~%u", path, nr);
+ char newpath[PATH_MAX];
+ mksnpath(newpath, sizeof(newpath), "%s~%u", path, nr);
if (!try_create_file(newpath, mode, buf, size)) {
if (!rename(newpath, path))
return;
{
const char *remote = NULL;
- git_config(git_default_config, NULL);
-
remote = extract_remote_arg(&argc, argv);
if (remote)
return run_remote_archiver(remote, argc, argv);
return porigin;
}
-/*
- * Parsing of patch chunks...
- */
-struct chunk {
- /* line number in postimage; up to but not including this
- * line is the same as preimage
- */
- int same;
-
- /* preimage line number after this chunk */
- int p_next;
-
- /* postimage line number after this chunk */
- int t_next;
-};
-
-struct patch {
- struct chunk *chunks;
- int num;
-};
-
-struct blame_diff_state {
- struct patch *ret;
- unsigned hunk_post_context;
- unsigned hunk_in_pre_context : 1;
-};
-
-static void process_u_diff(void *state_, char *line, unsigned long len)
-{
- struct blame_diff_state *state = state_;
- struct chunk *chunk;
- int off1, off2, len1, len2, num;
-
- num = state->ret->num;
- if (len < 4 || line[0] != '@' || line[1] != '@') {
- if (state->hunk_in_pre_context && line[0] == ' ')
- state->ret->chunks[num - 1].same++;
- else {
- state->hunk_in_pre_context = 0;
- if (line[0] == ' ')
- state->hunk_post_context++;
- else
- state->hunk_post_context = 0;
- }
- return;
- }
-
- if (num && state->hunk_post_context) {
- chunk = &state->ret->chunks[num - 1];
- chunk->p_next -= state->hunk_post_context;
- chunk->t_next -= state->hunk_post_context;
- }
- state->ret->num = ++num;
- state->ret->chunks = xrealloc(state->ret->chunks,
- sizeof(struct chunk) * num);
- chunk = &state->ret->chunks[num - 1];
- if (parse_hunk_header(line, len, &off1, &len1, &off2, &len2)) {
- state->ret->num--;
- return;
- }
-
- /* Line numbers in patch output are one based. */
- off1--;
- off2--;
-
- chunk->same = len2 ? off2 : (off2 + 1);
-
- chunk->p_next = off1 + (len1 ? len1 : 1);
- chunk->t_next = chunk->same + len2;
- state->hunk_in_pre_context = 1;
- state->hunk_post_context = 0;
-}
-
-static struct patch *compare_buffer(mmfile_t *file_p, mmfile_t *file_o,
- int context)
-{
- struct blame_diff_state state;
- xpparam_t xpp;
- xdemitconf_t xecfg;
- xdemitcb_t ecb;
-
- xpp.flags = xdl_opts;
- memset(&xecfg, 0, sizeof(xecfg));
- xecfg.ctxlen = context;
- memset(&state, 0, sizeof(state));
- state.ret = xmalloc(sizeof(struct patch));
- state.ret->chunks = NULL;
- state.ret->num = 0;
-
- xdi_diff_outf(file_p, file_o, process_u_diff, &state, &xpp, &xecfg, &ecb);
-
- if (state.ret->num) {
- struct chunk *chunk;
- chunk = &state.ret->chunks[state.ret->num - 1];
- chunk->p_next -= state.hunk_post_context;
- chunk->t_next -= state.hunk_post_context;
- }
- return state.ret;
-}
-
-/*
- * Run diff between two origins and grab the patch output, so that
- * we can pass blame for lines origin is currently suspected for
- * to its parent.
- */
-static struct patch *get_patch(struct origin *parent, struct origin *origin)
-{
- mmfile_t file_p, file_o;
- struct patch *patch;
-
- fill_origin_blob(parent, &file_p);
- fill_origin_blob(origin, &file_o);
- if (!file_p.ptr || !file_o.ptr)
- return NULL;
- patch = compare_buffer(&file_p, &file_o, 0);
- num_get_patch++;
- return patch;
-}
-
-static void free_patch(struct patch *p)
-{
- free(p->chunks);
- free(p);
-}
-
/*
* Link in a new blame entry to the scoreboard. Entries that cover the
* same line range have been removed from the scoreboard previously.
}
}
+struct blame_chunk_cb_data {
+ struct scoreboard *sb;
+ struct origin *target;
+ struct origin *parent;
+ long plno;
+ long tlno;
+};
+
+static void blame_chunk_cb(void *data, long same, long p_next, long t_next)
+{
+ struct blame_chunk_cb_data *d = data;
+ blame_chunk(d->sb, d->tlno, d->plno, same, d->target, d->parent);
+ d->plno = p_next;
+ d->tlno = t_next;
+}
+
/*
* We are looking at the origin 'target' and aiming to pass blame
* for the lines it is suspected to its parent. Run diff to find
struct origin *target,
struct origin *parent)
{
- int i, last_in_target, plno, tlno;
- struct patch *patch;
+ int last_in_target;
+ mmfile_t file_p, file_o;
+ struct blame_chunk_cb_data d = { sb, target, parent, 0, 0 };
+ xpparam_t xpp;
+ xdemitconf_t xecfg;
last_in_target = find_last_in_target(sb, target);
if (last_in_target < 0)
return 1; /* nothing remains for this target */
- patch = get_patch(parent, target);
- plno = tlno = 0;
- for (i = 0; i < patch->num; i++) {
- struct chunk *chunk = &patch->chunks[i];
+ fill_origin_blob(parent, &file_p);
+ fill_origin_blob(target, &file_o);
+ num_get_patch++;
- blame_chunk(sb, tlno, plno, chunk->same, target, parent);
- plno = chunk->p_next;
- tlno = chunk->t_next;
- }
+ memset(&xpp, 0, sizeof(xpp));
+ xpp.flags = xdl_opts;
+ memset(&xecfg, 0, sizeof(xecfg));
+ xecfg.ctxlen = 0;
+ xdi_diff_hunks(&file_p, &file_o, blame_chunk_cb, &d, &xpp, &xecfg);
/* The rest (i.e. anything after tlno) are the same as the parent */
- blame_chunk(sb, tlno, plno, last_in_target, target, parent);
+ blame_chunk(sb, d.tlno, d.plno, last_in_target, target, parent);
- free_patch(patch);
return 0;
}
}
}
+struct handle_split_cb_data {
+ struct scoreboard *sb;
+ struct blame_entry *ent;
+ struct origin *parent;
+ struct blame_entry *split;
+ long plno;
+ long tlno;
+};
+
+static void handle_split_cb(void *data, long same, long p_next, long t_next)
+{
+ struct handle_split_cb_data *d = data;
+ handle_split(d->sb, d->ent, d->tlno, d->plno, same, d->parent, d->split);
+ d->plno = p_next;
+ d->tlno = t_next;
+}
+
/*
* Find the lines from parent that are the same as ent so that
* we can pass blames to it. file_p has the blob contents for
const char *cp;
int cnt;
mmfile_t file_o;
- struct patch *patch;
- int i, plno, tlno;
+ struct handle_split_cb_data d = { sb, ent, parent, split, 0, 0 };
+ xpparam_t xpp;
+ xdemitconf_t xecfg;
/*
* Prepare mmfile that contains only the lines in ent.
}
file_o.size = cp - file_o.ptr;
- patch = compare_buffer(file_p, &file_o, 1);
-
/*
* file_o is a part of final image we are annotating.
* file_p partially may match that image.
*/
+ memset(&xpp, 0, sizeof(xpp));
+ xpp.flags = xdl_opts;
+ memset(&xecfg, 0, sizeof(xecfg));
+ xecfg.ctxlen = 1;
memset(split, 0, sizeof(struct blame_entry [3]));
- plno = tlno = 0;
- for (i = 0; i < patch->num; i++) {
- struct chunk *chunk = &patch->chunks[i];
-
- handle_split(sb, ent, tlno, plno, chunk->same, parent, split);
- plno = chunk->p_next;
- tlno = chunk->t_next;
- }
+ xdi_diff_hunks(file_p, &file_o, handle_split_cb, &d, &xpp, &xecfg);
/* remainder, if any, all match the preimage */
- handle_split(sb, ent, tlno, plno, ent->num_lines, parent, split);
- free_patch(patch);
+ handle_split(sb, ent, d.tlno, d.plno, ent->num_lines, parent, split);
}
/*
int detailed)
{
int len;
- char *tmp, *endp;
+ char *tmp, *endp, *reencoded, *message;
static char author_buf[1024];
static char committer_buf[1024];
static char summary_buf[1024];
die("Cannot read commit %s",
sha1_to_hex(commit->object.sha1));
}
+ reencoded = reencode_commit_message(commit, NULL);
+ message = reencoded ? reencoded : commit->buffer;
ret->author = author_buf;
- get_ac_line(commit->buffer, "\nauthor ",
+ get_ac_line(message, "\nauthor ",
sizeof(author_buf), author_buf, &ret->author_mail,
&ret->author_time, &ret->author_tz);
- if (!detailed)
+ if (!detailed) {
+ free(reencoded);
return;
+ }
ret->committer = committer_buf;
- get_ac_line(commit->buffer, "\ncommitter ",
+ get_ac_line(message, "\ncommitter ",
sizeof(committer_buf), committer_buf, &ret->committer_mail,
&ret->committer_time, &ret->committer_tz);
ret->summary = summary_buf;
- tmp = strstr(commit->buffer, "\n\n");
+ tmp = strstr(message, "\n\n");
if (!tmp) {
error_out:
sprintf(summary_buf, "(%s)", sha1_to_hex(commit->object.sha1));
+ free(reencoded);
return;
}
tmp += 2;
goto error_out;
memcpy(summary_buf, tmp, len);
summary_buf[len] = 0;
+ free(reencoded);
}
/*
continue;
}
- if (delete_ref(name, sha1)) {
+ if (delete_ref(name, sha1, 0)) {
error("Error deleting %sbranch '%s'", remote,
argv[i]);
ret = 1;
else if (stdin_paths && doubledash < argc)
errstr = "Can't specify files with --stdin";
if (errstr) {
- error (errstr);
+ error("%s", errstr);
usage_with_options(check_attr_usage, check_attr_options);
}
#include "cache.h"
#include "quote.h"
#include "cache-tree.h"
+#include "parse-options.h"
#define CHECKOUT_ALL 4
static int line_termination = '\n';
exit(128);
}
-static const char checkout_cache_usage[] =
-"git checkout-index [-u] [-q] [-a] [-f] [-n] [--stage=[123]|all] [--prefix=<string>] [--temp] [--] <file>...";
+static const char * const builtin_checkout_index_usage[] = {
+ "git checkout-index [options] [--] <file>...",
+ NULL
+};
static struct lock_file lock_file;
+static int option_parse_u(const struct option *opt,
+ const char *arg, int unset)
+{
+ int *newfd = opt->value;
+
+ state.refresh_cache = 1;
+ if (*newfd < 0)
+ *newfd = hold_locked_index(&lock_file, 1);
+ return 0;
+}
+
+static int option_parse_z(const struct option *opt,
+ const char *arg, int unset)
+{
+ if (unset)
+ line_termination = '\n';
+ else
+ line_termination = 0;
+ return 0;
+}
+
+static int option_parse_prefix(const struct option *opt,
+ const char *arg, int unset)
+{
+ state.base_dir = arg;
+ state.base_dir_len = strlen(arg);
+ return 0;
+}
+
+static int option_parse_stage(const struct option *opt,
+ const char *arg, int unset)
+{
+ if (!strcmp(arg, "all")) {
+ to_tempfile = 1;
+ checkout_stage = CHECKOUT_ALL;
+ } else {
+ int ch = arg[0];
+ if ('1' <= ch && ch <= '3')
+ checkout_stage = arg[0] - '0';
+ else
+ die("stage should be between 1 and 3 or all");
+ }
+ return 0;
+}
+
int cmd_checkout_index(int argc, const char **argv, const char *prefix)
{
int i;
int all = 0;
int read_from_stdin = 0;
int prefix_length;
+ int force = 0, quiet = 0, not_new = 0;
+ struct option builtin_checkout_index_options[] = {
+ OPT_BOOLEAN('a', "all", &all,
+ "checks out all files in the index"),
+ OPT_BOOLEAN('f', "force", &force,
+ "forces overwrite of existing files"),
+ OPT__QUIET(&quiet),
+ OPT_BOOLEAN('n', "no-create", ¬_new,
+ "don't checkout new files"),
+ { OPTION_CALLBACK, 'u', "index", &newfd, NULL,
+ "update stat information in the index file",
+ PARSE_OPT_NOARG, option_parse_u },
+ { OPTION_CALLBACK, 'z', NULL, NULL, NULL,
+ "paths are separated with NUL character",
+ PARSE_OPT_NOARG, option_parse_z },
+ OPT_BOOLEAN(0, "stdin", &read_from_stdin,
+ "read list of paths from the standard input"),
+ OPT_BOOLEAN(0, "temp", &to_tempfile,
+ "write the content to temporary files"),
+ OPT_CALLBACK(0, "prefix", NULL, "string",
+ "when creating files, prepend <string>",
+ option_parse_prefix),
+ OPT_CALLBACK(0, "stage", NULL, NULL,
+ "copy out the files from named stage",
+ option_parse_stage),
+ OPT_END()
+ };
git_config(git_default_config, NULL);
state.base_dir = "";
die("invalid cache");
}
- for (i = 1; i < argc; i++) {
- const char *arg = argv[i];
-
- if (!strcmp(arg, "--")) {
- i++;
- break;
- }
- if (!strcmp(arg, "-a") || !strcmp(arg, "--all")) {
- all = 1;
- continue;
- }
- if (!strcmp(arg, "-f") || !strcmp(arg, "--force")) {
- state.force = 1;
- continue;
- }
- if (!strcmp(arg, "-q") || !strcmp(arg, "--quiet")) {
- state.quiet = 1;
- continue;
- }
- if (!strcmp(arg, "-n") || !strcmp(arg, "--no-create")) {
- state.not_new = 1;
- continue;
- }
- if (!strcmp(arg, "-u") || !strcmp(arg, "--index")) {
- state.refresh_cache = 1;
- if (newfd < 0)
- newfd = hold_locked_index(&lock_file, 1);
- continue;
- }
- if (!strcmp(arg, "-z")) {
- line_termination = 0;
- continue;
- }
- if (!strcmp(arg, "--stdin")) {
- if (i != argc - 1)
- die("--stdin must be at the end");
- read_from_stdin = 1;
- i++; /* do not consider arg as a file name */
- break;
- }
- if (!strcmp(arg, "--temp")) {
- to_tempfile = 1;
- continue;
- }
- if (!prefixcmp(arg, "--prefix=")) {
- state.base_dir = arg+9;
- state.base_dir_len = strlen(state.base_dir);
- continue;
- }
- if (!prefixcmp(arg, "--stage=")) {
- if (!strcmp(arg + 8, "all")) {
- to_tempfile = 1;
- checkout_stage = CHECKOUT_ALL;
- } else {
- int ch = arg[8];
- if ('1' <= ch && ch <= '3')
- checkout_stage = arg[8] - '0';
- else
- die("stage should be between 1 and 3 or all");
- }
- continue;
- }
- if (arg[0] == '-')
- usage(checkout_cache_usage);
- break;
- }
+ argc = parse_options(argc, argv, builtin_checkout_index_options,
+ builtin_checkout_index_usage, 0);
+ state.force = force;
+ state.quiet = quiet;
+ state.not_new = not_new;
if (state.base_dir_len || to_tempfile) {
/* when --prefix is specified we do not
}
/* Check out named files first */
- for ( ; i < argc; i++) {
+ for (i = 0; i < argc; i++) {
const char *arg = argv[i];
const char *p;
memset(&proc, 0, sizeof(proc));
argv[0] = name;
- argv[1] = xstrdup(sha1_to_hex(old->object.sha1));
+ argv[1] = xstrdup(sha1_to_hex(old ? old->object.sha1 : null_sha1));
argv[2] = xstrdup(sha1_to_hex(new->object.sha1));
argv[3] = changed ? "1" : "0";
argv[4] = NULL;
}
/* 2-way merge to the new branch */
- topts.initial_checkout = (!active_nr &&
- (old->commit == new->commit));
+ topts.initial_checkout = is_cache_unborn();
topts.update = 1;
topts.merge = 1;
topts.gently = opts->merge;
}
old_desc = old->name;
- if (!old_desc)
+ if (!old_desc && old->commit)
old_desc = sha1_to_hex(old->commit->object.sha1);
strbuf_addf(&msg, "checkout: moving from %s to %s",
- old_desc, new->name);
+ old_desc ? old_desc : "(invalid)", new->name);
if (new->path) {
create_symref("HEAD", new->path, msg.buf);
* a new commit, we want to mention the old commit once more
* to remind the user that it might be lost.
*/
- if (!opts->quiet && !old.path && new->commit != old.commit)
+ if (!opts->quiet && !old.path && old.commit && new->commit != old.commit)
describe_detached_head("Previous HEAD position was", old.commit);
if (!old.commit) {
local = config_exclusive_filename;
if (!local) {
const char *home = getenv("HOME");
- local = repo_config = xstrdup(git_path("config"));
+ local = repo_config = git_pathdup("config");
if (git_config_global() && home)
global = xstrdup(mkpath("%s/.gitconfig", home));
if (git_config_system())
int cached = 0;
while (1 < argc) {
const char *arg = argv[1];
- if (!strcmp(arg, "--cached"))
+ if (!strcmp(arg, "--cached") || !strcmp(arg, "--staged"))
cached = 1;
else
usage(builtin_diff_usage);
const char *arg = argv[i];
if (!strcmp(arg, "--"))
break;
- else if (!strcmp(arg, "--cached")) {
+ else if (!strcmp(arg, "--cached") ||
+ !strcmp(arg, "--staged")) {
add_head_to_pending(&rev);
if (!rev.pending.nr)
die("No HEAD commit to compare with (yet)");
for (j = 0; j < i; j++) {
const char *rule = ref_rev_parse_rules[j];
unsigned char short_objectname[20];
+ char refname[PATH_MAX];
/*
* the short name is ambiguous, if it resolves
* (with this previous rule) to a valid ref
* read_ref() returns 0 on success
*/
- if (!read_ref(mkpath(rule, short_name_len, short_name),
- short_objectname))
+ mksnpath(refname, sizeof(refname),
+ rule, short_name_len, short_name);
+ if (!read_ref(refname, short_objectname))
break;
}
#include "remote.h"
static const char ls_remote_usage[] =
-"git ls-remote [--upload-pack=<git-upload-pack>] [<host>:]<directory>";
+"git ls-remote [--heads] [--tags] [-u <exec> | --upload-pack <exec>] <repository> <refs>...";
/*
* Is there one among the list of patterns that match the tail part
type = entry->type;
/* write limit if limited packsize and not first object */
- limit = pack_size_limit && nr_written ?
- pack_size_limit - write_offset : 0;
+ if (!pack_size_limit || !nr_written)
+ limit = 0;
+ else if (pack_size_limit <= write_offset)
+ /*
+ * the earlier object did not fit the limit; avoid
+ * mistaking this with unlimited (i.e. limit = 0).
+ */
+ limit = 1;
+ else
+ limit = pack_size_limit - write_offset;
if (!entry->delta)
usable_delta = 0; /* no delta */
array = xcalloc(window, sizeof(struct unpacked));
for (;;) {
- struct object_entry *entry = *list++;
+ struct object_entry *entry;
struct unpacked *n = array + idx;
int j, max_depth, best_base = -1;
progress_unlock();
break;
}
+ entry = *list++;
(*list_size)--;
if (!entry->preferred_base) {
(*processed)++;
break;
case 2:
opts.fn = twoway_merge;
- opts.initial_checkout = !active_nr;
+ opts.initial_checkout = is_cache_unborn();
break;
case 3:
default:
static const char receive_pack_usage[] = "git-receive-pack <git-dir>";
+static int deny_deletes = 0;
static int deny_non_fast_forwards = 0;
static int receive_fsck_objects;
static int receive_unpack_limit = -1;
static int receive_pack_config(const char *var, const char *value, void *cb)
{
+ if (strcmp(var, "receive.denydeletes") == 0) {
+ deny_deletes = git_config_bool(var, value);
+ return 0;
+ }
+
if (strcmp(var, "receive.denynonfastforwards") == 0) {
deny_non_fast_forwards = git_config_bool(var, value);
return 0;
"but I can't find it!", sha1_to_hex(new_sha1));
return "bad pack";
}
+ if (deny_deletes && is_null_sha1(new_sha1) &&
+ !is_null_sha1(old_sha1) &&
+ !prefixcmp(name, "refs/heads/")) {
+ error("denying ref deletion for %s", name);
+ return "deletion prohibited";
+ }
if (deny_non_fast_forwards && !is_null_sha1(new_sha1) &&
!is_null_sha1(old_sha1) &&
!prefixcmp(name, "refs/heads/")) {
warning ("Allowing deletion of corrupt ref.");
old_sha1 = NULL;
}
- if (delete_ref(name, old_sha1)) {
+ if (delete_ref(name, old_sha1, 0)) {
error("failed to delete %s", name);
return "failed to delete";
}
static int add_refs_from_alternate(struct alternate_object_database *e, void *unused)
{
- char *other = xstrdup(make_absolute_path(e->base));
- size_t len = strlen(other);
+ char *other;
+ size_t len;
struct remote *remote;
struct transport *transport;
const struct ref *extra;
+ e->name[-1] = '\0';
+ other = xstrdup(make_absolute_path(e->base));
+ e->name[-1] = '/';
+ len = strlen(other);
+
while (other[len-1] == '/')
other[--len] = '\0';
if (len < 8 || memcmp(other + len - 8, "/objects", 8))
lock = lock_any_ref_for_update(ref, sha1, 0);
if (!lock)
return error("cannot lock ref '%s'", ref);
- log_file = xstrdup(git_path("logs/%s", ref));
+ log_file = git_pathdup("logs/%s", ref);
if (!file_exists(log_file))
goto finish;
if (!cmd->dry_run) {
- newlog_path = xstrdup(git_path("logs/%s.lock", ref));
+ newlog_path = git_pathdup("logs/%s.lock", ref);
cb.newlog = fopen(newlog_path, "w");
}
/* make sure that symrefs are deleted */
if (flags & REF_ISSYMREF)
- return unlink(git_path(refname));
+ return unlink(git_path("%s", refname));
item = string_list_append(refname, branches->branches);
item->util = xmalloc(20);
const char *refname = item->string;
unsigned char *sha1 = item->util;
- if (delete_ref(refname, sha1))
+ if (delete_ref(refname, sha1, 0))
result |= error("Could not remove branch %s", refname);
}
return result;
const char *refname = states.stale.items[i].util;
if (!dry_run)
- result |= delete_ref(refname, NULL);
+ result |= delete_ref(refname, NULL, 0);
printf(" * [%s] %s\n", dry_run ? "would prune" : "pruned",
abbrev_ref(refname, "refs/remotes/"));
printf("--- a/%s\n+++ b/%s\n", label1, label2);
fflush(stdout);
+ memset(&xpp, 0, sizeof(xpp));
xpp.flags = XDF_NEED_MINIMAL;
memset(&xecfg, 0, sizeof(xecfg));
xecfg.ctxlen = 3;
update_ref(msg, "ORIG_HEAD", orig, old_orig, 0, MSG_ON_ERR);
}
else if (old_orig)
- delete_ref("ORIG_HEAD", old_orig);
+ delete_ref("ORIG_HEAD", old_orig, 0);
prepend_reflog_action("updating HEAD", msg, sizeof(msg));
update_ref_status = update_ref(msg, "HEAD", sha1, orig, 0, MSG_ON_ERR);
int i, index_fd, clean;
char *oneline, *reencoded_message = NULL;
const char *message, *encoding;
- const char *defmsg = xstrdup(git_path("MERGE_MSG"));
+ char *defmsg = git_pathdup("MERGE_MSG");
struct merge_options o;
struct tree *result, *next_tree, *base_tree, *head_tree;
static struct lock_file index_lock;
return execv_git_cmd(args);
}
free(reencoded_message);
+ free(defmsg);
return 0;
}
static int one_local_ref(const char *refname, const unsigned char *sha1, int flag, void *cb_data)
{
struct ref *ref;
- int len = strlen(refname) + 1;
+ int len;
+
+ /* we already know it starts with refs/ to get here */
+ if (check_ref_format(refname + 5))
+ return 0;
+
+ len = strlen(refname) + 1;
ref = xcalloc(1, sizeof(*ref) + len);
hashcpy(ref->new_sha1, sha1);
memcpy(ref->name, refname, len);
{
struct refspec rs;
- if (ref->status != REF_STATUS_OK)
+ if (ref->status != REF_STATUS_OK && ref->status != REF_STATUS_UPTODATE)
return;
rs.src = ref->name;
if (args.verbose)
fprintf(stderr, "updating local tracking ref '%s'\n", rs.dst);
if (ref->deletion) {
- delete_ref(rs.dst, NULL);
+ delete_ref(rs.dst, NULL, 0);
} else
update_ref("update by push", rs.dst,
ref->new_sha1, NULL, 0, 0);
*/
new_refs = 0;
for (ref = remote_refs; ref; ref = ref->next) {
- const unsigned char *new_sha1;
-
- if (!ref->peer_ref) {
- if (!args.send_mirror)
- continue;
- new_sha1 = null_sha1;
- }
- else
- new_sha1 = ref->peer_ref->new_sha1;
+ if (ref->peer_ref)
+ hashcpy(ref->new_sha1, ref->peer_ref->new_sha1);
+ else if (!args.send_mirror)
+ continue;
- ref->deletion = is_null_sha1(new_sha1);
+ ref->deletion = is_null_sha1(ref->new_sha1);
if (ref->deletion && !allow_deleting_refs) {
ref->status = REF_STATUS_REJECT_NODELETE;
continue;
}
if (!ref->deletion &&
- !hashcmp(ref->old_sha1, new_sha1)) {
+ !hashcmp(ref->old_sha1, ref->new_sha1)) {
ref->status = REF_STATUS_UPTODATE;
continue;
}
!ref->deletion &&
!is_null_sha1(ref->old_sha1) &&
(!has_sha1_file(ref->old_sha1)
- || !ref_newer(new_sha1, ref->old_sha1));
+ || !ref_newer(ref->new_sha1, ref->old_sha1));
if (ref->nonfastforward && !ref->force && !args.force_update) {
ref->status = REF_STATUS_REJECT_NONFASTFORWARD;
continue;
}
- hashcpy(ref->new_sha1, new_sha1);
if (!ref->deletion)
new_refs++;
static int delete_tag(const char *name, const char *ref,
const unsigned char *sha1)
{
- if (delete_ref(ref, sha1))
+ if (delete_ref(ref, sha1, 0))
return 1;
printf("Deleted tag '%s'\n", name);
return 0;
int fd;
/* write the template message before editing: */
- path = xstrdup(git_path("TAG_EDITMSG"));
+ path = git_pathdup("TAG_EDITMSG");
fd = open(path, O_CREAT | O_TRUNC | O_WRONLY, 0600);
if (fd < 0)
die("could not create file '%s': %s",
const char *object_ref, *tag;
struct ref_lock *lock;
- int annotate = 0, sign = 0, force = 0, lines = 0,
+ int annotate = 0, sign = 0, force = 0, lines = -1,
list = 0, delete = 0, verify = 0;
const char *msgfile = NULL, *keyid = NULL;
struct msg_arg msg = { 0, STRBUF_INIT };
}
if (sign)
annotate = 1;
+ if (argc == 0 && !(delete || verify))
+ list = 1;
+ if ((annotate || msg.given || msgfile || force) &&
+ (list || delete || verify))
+ usage_with_options(git_tag_usage, options);
+
+ if (list + delete + verify > 1)
+ usage_with_options(git_tag_usage, options);
if (list)
- return list_tags(argv[0], lines);
+ return list_tags(argv[0], lines == -1 ? 0 : lines);
+ if (lines != -1)
+ die("-n option is only allowed with -l.");
if (delete)
return for_each_tag_name(argv, delete_tag);
if (verify)
}
}
- if (argc == 0) {
- if (annotate)
- usage_with_options(git_tag_usage, options);
- return list_tags(NULL, lines);
- }
tag = argv[0];
object_ref = argc == 2 ? argv[1] : "HEAD";
{
const char *refname, *oldval, *msg=NULL;
unsigned char sha1[20], oldsha1[20];
- int delete = 0, no_deref = 0;
+ int delete = 0, no_deref = 0, flags = 0;
struct option options[] = {
OPT_STRING( 'm', NULL, &msg, "reason", "reason of the update"),
OPT_BOOLEAN('d', NULL, &delete, "deletes the reference"),
if (oldval && *oldval && get_sha1(oldval, oldsha1))
die("%s: not a valid old SHA1", oldval);
+ if (no_deref)
+ flags = REF_NODEREF;
if (delete)
- return delete_ref(refname, oldval ? oldsha1 : NULL);
+ return delete_ref(refname, oldval ? oldsha1 : NULL, flags);
else
return update_ref(msg, refname, sha1, oldval ? oldsha1 : NULL,
- no_deref ? REF_NODEREF : 0, DIE_ON_ERR);
+ flags, DIE_ON_ERR);
}
continue;
}
if (++ret == 1)
- error(message);
+ error("%s", message);
error("%s %s", sha1_to_hex(e->sha1), e->name);
}
if (revs.pending.nr != p->nr)
for (i = 0; i < req_nr; i++)
if (!(refs.objects[i].item->flags & SHOWN)) {
if (++ret == 1)
- error(message);
+ error("%s", message);
error("%s %s", sha1_to_hex(refs.objects[i].item->sha1),
refs.objects[i].name);
}
#define read_cache() read_index(&the_index)
#define read_cache_from(path) read_index_from(&the_index, (path))
+#define is_cache_unborn() is_index_unborn(&the_index)
#define read_cache_unmerged() read_index_unmerged(&the_index)
#define write_cache(newfd, cache, entries) write_index(&the_index, (newfd))
#define discard_cache() discard_index(&the_index)
/* Initialize and use the cache information */
extern int read_index(struct index_state *);
extern int read_index_from(struct index_state *, const char *path);
+extern int is_index_unborn(struct index_state *);
extern int read_index_unmerged(struct index_state *);
extern int write_index(const struct index_state *, int newfd);
extern int discard_index(struct index_state *);
extern void set_alternate_index_output(const char *);
extern int close_lock_file(struct lock_file *);
extern void rollback_lock_file(struct lock_file *);
-extern int delete_ref(const char *, const unsigned char *sha1);
+extern int delete_ref(const char *, const unsigned char *sha1, int delopt);
/* Environment bits from configuration mechanism */
extern int trust_executable_bit;
#define DATA_CHANGED 0x0020
#define TYPE_CHANGED 0x0040
+extern char *mksnpath(char *buf, size_t n, const char *fmt, ...)
+ __attribute__((format (printf, 3, 4)));
+extern char *git_snpath(char *buf, size_t n, const char *fmt, ...)
+ __attribute__((format (printf, 3, 4)));
+extern char *git_pathdup(const char *fmt, ...)
+ __attribute__((format (printf, 1, 2)));
+
/* Return a statically allocated filename matching the sha1 signature */
extern char *mkpath(const char *fmt, ...) __attribute__((format (printf, 1, 2)));
extern char *git_path(const char *fmt, ...) __attribute__((format (printf, 1, 2)));
parent_file.ptr = grab_blob(parent, &sz);
parent_file.size = sz;
+ memset(&xpp, 0, sizeof(xpp));
xpp.flags = XDF_NEED_MINIMAL;
memset(&xecfg, 0, sizeof(xecfg));
memset(&state, 0, sizeof(state));
extern int non_ascii(int);
struct rev_info; /* in revision.h, it circularly uses enum cmit_fmt */
+extern char *reencode_commit_message(const struct commit *commit,
+ const char **encoding_p);
extern void get_commit_format(const char *arg, struct rev_info *);
extern void format_commit_message(const struct commit *commit,
const void *format, struct strbuf *sb,
* functions should be used. The choice is determined by core.ignorecygwinfstricks.
* Reading this option is not always possible immediately as git_dir may be
* not be set yet. So until it is set, use cygwin lstat/stat functions.
- * However, if the trust_executable_bit is set, we must use the Cygwin posix
+ * However, if core.filemode is set, we must use the Cygwin posix
* stat/lstat as the Windows stat fuctions do not determine posix filemode.
+ *
+ * Note that git_cygwin_config() does NOT call git_default_config() and this
+ * is deliberate. Many commands read from config to establish initial
+ * values in variables and later tweak them from elsewhere (e.g. command line).
+ * init_stat() is called lazily on demand, typically much late in the program,
+ * and calling git_default_config() from here would break such variables.
*/
static int native_stat = 1;
-extern int trust_executable_bit;
+static int core_filemode;
static int git_cygwin_config(const char *var, const char *value, void *cb)
{
- if (!strcmp(var, "core.ignorecygwinfstricks")) {
+ if (!strcmp(var, "core.ignorecygwinfstricks"))
native_stat = git_config_bool(var, value);
- return 0;
- }
- return git_default_config(var, value, cb);
+ else if (!strcmp(var, "core.filemode"))
+ core_filemode = git_config_bool(var, value);
+ return 0;
}
static int init_stat(void)
{
if (have_git_dir()) {
git_config(git_cygwin_config, NULL);
- if (!trust_executable_bit && native_stat) {
+ if (!core_filemode && native_stat) {
cygwin_stat_fn = cygwin_stat;
cygwin_lstat_fn = cygwin_lstat;
} else {
* would normally create a console window. But
* since we'll be redirecting std streams, we do
* not need the console.
+ * It is necessary to use DETACHED_PROCESS
+ * instead of CREATE_NO_WINDOW to make ssh
+ * recognize that it has no console.
*/
- flags = CREATE_NO_WINDOW;
+ flags = DETACHED_PROCESS;
} else {
/* There is already a console. If we specified
- * CREATE_NO_WINDOW here, too, Windows would
+ * DETACHED_PROCESS here, too, Windows would
* disassociate the child from the console.
+ * The same is true for CREATE_NO_WINDOW.
* Go figure!
*/
flags = 0;
free(user_config);
}
- repo_config = xstrdup(git_path("config"));
+ repo_config = git_pathdup("config");
ret += git_config_from_file(fn, repo_config, data);
free(repo_config);
return ret;
if (config_exclusive_filename)
config_filename = xstrdup(config_exclusive_filename);
else
- config_filename = xstrdup(git_path("config"));
+ config_filename = git_pathdup("config");
/*
* Since "key" actually contains the section name and the real
if (config_exclusive_filename)
config_filename = xstrdup(config_exclusive_filename);
else
- config_filename = xstrdup(git_path("config"));
+ config_filename = git_pathdup("config");
out_fd = hold_lock_file_for_update(lock, config_filename, 0);
if (out_fd < 0) {
ret = error("could not lock config file %s", config_filename);
fi \
])# GIT_PARSE_WITH
-
+dnl
+dnl GIT_CHECK_FUNC(FUNCTION, IFTRUE, IFFALSE)
+dnl -----------------------------------------
+dnl Similar to AC_CHECK_FUNC, but on systems that do not generate
+dnl warnings for missing prototypes (e.g. FreeBSD when compiling without
+dnl -Wall), it does not work. By looking for function definition in
+dnl libraries, this problem can be worked around.
+AC_DEFUN([GIT_CHECK_FUNC],[AC_CHECK_FUNC([$1],[
+ AC_SEARCH_LIBS([$1],,
+ [$2],[$3])
+],[$3])])
## Site configuration related to programs (before tests)
## --with-PACKAGE[=ARG] and --without-PACKAGE
#
#
# Define NO_IPV6 if you lack IPv6 support and getaddrinfo().
AC_CHECK_TYPE([struct addrinfo],[
- AC_CHECK_FUNC([getaddrinfo],
+ GIT_CHECK_FUNC([getaddrinfo],
[NO_IPV6=],
[NO_IPV6=YesPlease])
],[NO_IPV6=YesPlease],[
AC_MSG_NOTICE([CHECKS for library functions])
#
# Define NO_STRCASESTR if you don't have strcasestr.
-AC_CHECK_FUNC(strcasestr,
+GIT_CHECK_FUNC(strcasestr,
[NO_STRCASESTR=],
[NO_STRCASESTR=YesPlease])
AC_SUBST(NO_STRCASESTR)
#
# Define NO_MEMMEM if you don't have memmem.
-AC_CHECK_FUNC(memmem,
+GIT_CHECK_FUNC(memmem,
[NO_MEMMEM=],
[NO_MEMMEM=YesPlease])
AC_SUBST(NO_MEMMEM)
#
# Define NO_STRLCPY if you don't have strlcpy.
-AC_CHECK_FUNC(strlcpy,
+GIT_CHECK_FUNC(strlcpy,
[NO_STRLCPY=],
[NO_STRLCPY=YesPlease])
AC_SUBST(NO_STRLCPY)
#
# Define NO_STRTOUMAX if you don't have strtoumax in the C library.
-AC_CHECK_FUNC(strtoumax,
+GIT_CHECK_FUNC(strtoumax,
[NO_STRTOUMAX=],
[NO_STRTOUMAX=YesPlease])
AC_SUBST(NO_STRTOUMAX)
#
# Define NO_SETENV if you don't have setenv in the C library.
-AC_CHECK_FUNC(setenv,
+GIT_CHECK_FUNC(setenv,
[NO_SETENV=],
[NO_SETENV=YesPlease])
AC_SUBST(NO_SETENV)
#
# Define NO_UNSETENV if you don't have unsetenv in the C library.
-AC_CHECK_FUNC(unsetenv,
+GIT_CHECK_FUNC(unsetenv,
[NO_UNSETENV=],
[NO_UNSETENV=YesPlease])
AC_SUBST(NO_UNSETENV)
#
# Define NO_MKDTEMP if you don't have mkdtemp in the C library.
-AC_CHECK_FUNC(mkdtemp,
+GIT_CHECK_FUNC(mkdtemp,
[NO_MKDTEMP=],
[NO_MKDTEMP=YesPlease])
AC_SUBST(NO_MKDTEMP)
if (buffer[len-1] == '\n')
buffer[--len] = 0;
+ if (len > 4 && !prefixcmp(buffer, "ERR "))
+ die("remote error: %s", buffer + 4);
+
if (len < 42 || get_sha1_hex(buffer, old_sha1) || buffer[40] != ' ')
die("protocol error: expected sha/ref, got '%s'", buffer);
name = buffer + 41;
_git_show ()
{
+ __git_has_doubledash && return
+
local cur="${COMP_WORDS[COMP_CWORD]}"
case "$cur" in
--pretty=*)
stderr=subprocess.PIPE, stdout=subprocess.PIPE);
return proc.wait() == 0;
+_gitConfig = {}
def gitConfig(key):
- return read_pipe("git config %s" % key, ignore_error=True).strip()
+ if not _gitConfig.has_key(key):
+ _gitConfig[key] = read_pipe("git config %s" % key, ignore_error=True).strip()
+ return _gitConfig[key]
def p4BranchesInGit(branchesAreInRemotes = True):
branches = {}
if includeFile:
filesForCommit.append(f)
- if f['action'] != 'delete':
+ if f['action'] not in ('delete', 'purge'):
filesToRead.append(f)
filedata = []
while j < len(filedata):
stat = filedata[j]
j += 1
- text = [];
+ text = ''
while j < len(filedata) and filedata[j]['code'] in ('text', 'unicode', 'binary'):
- text.append(filedata[j]['data'])
+ text += filedata[j]['data']
+ del filedata[j]['data']
j += 1
- text = ''.join(text)
if not stat.has_key('depotFile'):
sys.stderr.write("p4 print fails with: %s\n" % repr(stat))
continue
relPath = self.stripRepoPath(file['path'], branchPrefixes)
- if file["action"] == "delete":
+ if file["action"] in ("delete", "purge"):
self.gitStream.write("D %s\n" % relPath)
else:
data = file['data']
cleanedFiles = {}
for info in files:
- if info["action"] == "delete":
+ if info["action"] in ("delete", "purge"):
continue
cleanedFiles[info["depotFile"]] = info["rev"]
if change > newestRevision:
newestRevision = change
- if info["action"] == "delete":
+ if info["action"] in ("delete", "purge"):
# don't increase the file cnt, otherwise details["depotFile123"] will have gaps!
#fileCnt = fileCnt + 1
continue
# hooks.emailprefix
# All emails have their subjects prefixed with this prefix, or "[SCM]"
# if emailprefix is unset, to aid filtering
+# hooks.showrev
+# The shell command used to format each revision in the email, with
+# "%s" replaced with the commit id. Defaults to "git rev-list -1
+# --pretty %s", displaying the commit id, author, date and log
+# message. To list full patches separated by a blank line, you
+# could set this to "git show -C %s; echo".
#
# Notes
# -----
echo ""
echo $LOGBEGIN
- # This shows all log entries that are not already covered by
- # another ref - i.e. commits that are now accessible from this
- # ref that were previously not accessible
- # (see generate_update_branch_email for the explanation of this
- # command)
- git rev-parse --not --branches | grep -v $(git rev-parse $refname) |
- git rev-list --pretty --stdin $newrev
+ show_new_revisions
echo $LOGEND
}
echo ""
echo $LOGBEGIN
- git rev-parse --not --branches | grep -v $(git rev-parse $refname) |
- git rev-list --pretty --stdin $oldrev..$newrev
+ show_new_revisions
# XXX: Need a way of detecting whether git rev-list actually
# outputted anything, so that we can issue a "no new
echo $LOGEND
}
+
+# --------------- Miscellaneous utilities
+
+#
+# Show new revisions as the user would like to see them in the email.
+#
+show_new_revisions()
+{
+ # This shows all log entries that are not already covered by
+ # another ref - i.e. commits that are now accessible from this
+ # ref that were previously not accessible
+ # (see generate_update_branch_email for the explanation of this
+ # command)
+
+ # Revision range passed to rev-list differs for new vs. updated
+ # branches.
+ if [ "$change_type" = create ]
+ then
+ # Show all revisions exclusive to this (new) branch.
+ revspec=$newrev
+ else
+ # Branch update; show revisions not part of $oldrev.
+ revspec=$oldrev..$newrev
+ fi
+
+ git rev-parse --not --branches | grep -v $(git rev-parse $refname) |
+ if [ -z "$custom_showrev" ]
+ then
+ git rev-list --pretty --stdin $revspec
+ else
+ git rev-list --stdin $revspec |
+ while read onerev
+ do
+ eval $(printf "$custom_showrev" $onerev)
+ done
+ fi
+}
+
+
send_mail()
{
if [ -n "$envelopesender" ]; then
announcerecipients=$(git config hooks.announcelist)
envelopesender=$(git config hooks.envelopesender)
emailprefix=$(git config hooks.emailprefix || echo '[SCM] ')
+custom_showrev=$(git config hooks.showrev)
# --- Main loop
# Allow dual mode: run from the command line just like the update hook, or
#endif
}
loginfo("Connection from %s:%d", addrbuf, port);
+ setenv("REMOTE_ADDR", addrbuf, 1);
+ }
+ else {
+ unsetenv("REMOTE_ADDR");
}
alarm(init_timeout ? init_timeout : timeout);
mmfile_t minus, plus;
int i;
+ memset(&xpp, 0, sizeof(xpp));
memset(&xecfg, 0, sizeof(xecfg));
minus.size = diff_words->minus.text.size;
minus.ptr = xmalloc(minus.size);
if (!pe)
pe = diff_funcname_pattern(two);
+ memset(&xpp, 0, sizeof(xpp));
memset(&xecfg, 0, sizeof(xecfg));
memset(&ecbdata, 0, sizeof(ecbdata));
ecbdata.label_path = lbl;
xdemitconf_t xecfg;
xdemitcb_t ecb;
+ memset(&xpp, 0, sizeof(xpp));
memset(&xecfg, 0, sizeof(xecfg));
xpp.flags = XDF_NEED_MINIMAL | o->xdl_opts;
xdi_diff_outf(&mf1, &mf2, diffstat_consume, diffstat,
xdemitconf_t xecfg;
xdemitcb_t ecb;
+ memset(&xpp, 0, sizeof(xpp));
memset(&xecfg, 0, sizeof(xecfg));
xecfg.ctxlen = 1; /* at least one context line */
xpp.flags = XDF_NEED_MINIMAL;
struct diff_filepair *p = q->queue[i];
int len1, len2;
+ memset(&xpp, 0, sizeof(xpp));
memset(&xecfg, 0, sizeof(xecfg));
if (p->status == 0)
return error("internal diff status error");
if ((num_create > rename_limit && num_src > rename_limit) ||
(num_create * num_src > rename_limit * rename_limit)) {
if (options->warn_on_too_large_rename)
- warning("too many files, skipping inexact rename detection");
+ warning("too many files (created: %d deleted: %d), skipping inexact rename detection", num_create, num_src);
goto cleanup;
}
struct diff_filespec *one;
struct diff_filespec *two;
unsigned short int score;
- char status; /* M C R N D U (see Documentation/diff-format.txt) */
+ char status; /* M C R A D U etc. (see Documentation/diff-format.txt or DIFF_STATUS_* in diff.h) */
unsigned broken_pair : 1;
unsigned renamed_pair : 1;
unsigned is_unmerged : 1;
}
git_graft_file = getenv(GRAFT_ENVIRONMENT);
if (!git_graft_file)
- git_graft_file = xstrdup(git_path("info/grafts"));
+ git_graft_file = git_pathdup("info/grafts");
}
int is_bare_repository(void)
work_tree = git_work_tree_cfg;
/* make_absolute_path also normalizes the path */
if (work_tree && !is_absolute_path(work_tree))
- work_tree = xstrdup(make_absolute_path(git_path(work_tree)));
+ work_tree = xstrdup(make_absolute_path(git_path("%s", work_tree)));
} else if (work_tree)
work_tree = xstrdup(make_absolute_path(work_tree));
git_work_tree_initialized = 1;
die("this should not happen, your snprintf is broken");
}
- error(sb.buf);
+ error("%s", sb.buf);
strbuf_release(&sb);
return 1;
}
}
sub patch_update_cmd {
- my @mods = grep { !($_->{BINARY}) } list_modified('file-only');
+ my @all_mods = list_modified('file-only');
+ my @mods = grep { !($_->{BINARY}) } @all_mods;
my @them;
if (!@mods) {
- print STDERR "No changes.\n";
+ if (@all_mods) {
+ print STDERR "Only binary files changed.\n";
+ } else {
+ print STDERR "No changes.\n";
+ }
return 0;
}
if ($patch_mode) {
good=$(git for-each-ref --format='^%(objectname)' \
"refs/bisect/good-*" | tr '\012' ' ') &&
skip=$(git for-each-ref --format='%(objectname)' \
- "refs/bisect/skip-*" | tr '\012' ' ') &&
+ "refs/bisect/skip-*" | tr '\012' ' ') || exit
# Maybe some merge bases must be tested first
check_good_are_ancestors_of_bad "$bad" "$good" "$skip"
install: all
$(QUIET)$(INSTALL_D0)'$(DESTDIR_SQ)$(gitexecdir_SQ)' $(INSTALL_D1)
$(QUIET)$(INSTALL_X0)git-gui $(INSTALL_X1) '$(DESTDIR_SQ)$(gitexecdir_SQ)'
+ $(QUIET)$(INSTALL_X0)git-gui--askpass $(INSTALL_X1) '$(DESTDIR_SQ)$(gitexecdir_SQ)'
$(QUIET)$(foreach p,$(GITGUI_BUILT_INS), $(INSTALL_L0)'$(DESTDIR_SQ)$(gitexecdir_SQ)/$p' $(INSTALL_L1)'$(DESTDIR_SQ)$(gitexecdir_SQ)/git-gui' $(INSTALL_L2)'$(DESTDIR_SQ)$(gitexecdir_SQ)/$p' $(INSTALL_L3) &&) true
ifdef GITGUI_WINDOWS_WRAPPER
$(QUIET)$(INSTALL_R0)git-gui.tcl $(INSTALL_R1) '$(DESTDIR_SQ)$(gitexecdir_SQ)'
uninstall:
$(QUIET)$(CLEAN_DST) '$(DESTDIR_SQ)$(gitexecdir_SQ)'
$(QUIET)$(REMOVE_F0)'$(DESTDIR_SQ)$(gitexecdir_SQ)'/git-gui $(REMOVE_F1)
+ $(QUIET)$(REMOVE_F0)'$(DESTDIR_SQ)$(gitexecdir_SQ)'/git-gui--askpass $(REMOVE_F1)
$(QUIET)$(foreach p,$(GITGUI_BUILT_INS), $(REMOVE_F0)'$(DESTDIR_SQ)$(gitexecdir_SQ)'/$p $(REMOVE_F1) &&) true
ifdef GITGUI_WINDOWS_WRAPPER
$(QUIET)$(REMOVE_F0)'$(DESTDIR_SQ)$(gitexecdir_SQ)'/git-gui.tcl $(REMOVE_F1)
--- /dev/null
+#!/bin/sh
+# Tcl ignores the next line -*- tcl -*- \
+exec wish "$0" -- "$@"
+
+# This is a trivial implementation of an SSH_ASKPASS handler.
+# Git-gui uses this script if none are already configured.
+
+set answer {}
+set yesno 0
+set rc 255
+
+if {$argc < 1} {
+ set prompt "Enter your OpenSSH passphrase:"
+} else {
+ set prompt [join $argv " "]
+ if {[regexp -nocase {\(yes\/no\)\?\s*$} $prompt]} {
+ set yesno 1
+ }
+}
+
+message .m -text $prompt -justify center -aspect 4000
+pack .m -side top -fill x -padx 20 -pady 20 -expand 1
+
+entry .e -textvariable answer -width 50
+pack .e -side top -fill x -padx 10 -pady 10
+
+if {!$yesno} {
+ .e configure -show "*"
+}
+
+frame .b
+button .b.ok -text OK -command finish
+button .b.cancel -text Cancel -command {destroy .}
+
+pack .b.ok -side left -expand 1
+pack .b.cancel -side right -expand 1
+pack .b -side bottom -fill x -padx 10 -pady 10
+
+bind . <Visibility> {focus -force .e}
+bind . <Key-Return> finish
+bind . <Key-Escape> {destroy .}
+bind . <Destroy> {exit $rc}
+
+proc finish {} {
+ if {$::yesno} {
+ if {$::answer ne "yes" && $::answer ne "no"} {
+ tk_messageBox -icon error -title "Error" -type ok \
+ -message "Only 'yes' or 'no' input allowed."
+ return
+ }
+ }
+
+ set ::rc 0
+ puts $::answer
+ destroy .
+}
+
+wm title . "OpenSSH"
+tk::PlaceWindow .
if {[is_Windows]} {
wm iconbitmap . -default $oguilib/git-gui.ico
+ set ::tk::AlwaysShowSelection 1
+
+ # Spoof an X11 display for SSH
+ if {![info exists env(DISPLAY)]} {
+ set env(DISPLAY) :9999
+ }
}
######################################################################
}
}
+######################################################################
+##
+## execution environment
+
+set have_tk85 [expr {[package vcompare $tk_version "8.5"] >= 0}]
+
+# Suggest our implementation of askpass, if none is set
+if {![info exists env(SSH_ASKPASS)]} {
+ set env(SSH_ASKPASS) [gitexec git-gui--askpass]
+}
+
######################################################################
##
## repository setup
+set picked 0
if {[catch {
set _gitdir $env(GIT_DIR)
set _prefix {}
load_config 1
apply_config
choose_repository::pick
+ set picked 1
}
if {![file isdirectory $_gitdir] && [is_Cygwin]} {
catch {set _gitdir [exec cygpath --windows $_gitdir]}
}
}
+proc do_explore {} {
+ set explorer {}
+ if {[is_Cygwin] || [is_Windows]} {
+ set explorer "explorer.exe"
+ } elseif {[is_MacOSX]} {
+ set explorer "open"
+ } else {
+ # freedesktop.org-conforming system is our best shot
+ set explorer "xdg-open"
+ }
+ eval exec $explorer [file dirname [gitdir]] &
+}
+
set is_quitting 0
set ret_code 1
if {$col == 0 && $y > 1} {
# Conflicts need special handling
if {[string first {U} $state] >= 0} {
- merge_stage_workdir $path $w $lno
+ # $w must always be $ui_workdir, but...
+ if {$w ne $ui_workdir} { set lno {} }
+ merge_stage_workdir $path $lno
return
}
#
menu .mbar.repository
+.mbar.repository add command \
+ -label [mc "Explore Working Copy"] \
+ -command {do_explore}
+.mbar.repository add separator
+
.mbar.repository add command \
-label [mc "Browse Current Branch's Files"] \
-command {browser::new $current_branch}
.mbar.commit add separator
- if {![is_enabled nocommit]} {
+ if {![is_enabled nocommitmsg]} {
.mbar.commit add command -label [mc "Sign Off"] \
-command do_signoff \
-accelerator $M1T-S
if {[is_enabled transport]} {
menu .mbar.remote
+ .mbar.remote add command \
+ -label [mc "Add..."] \
+ -command remote_add::dialog \
+ -accelerator $M1T-A
.mbar.remote add command \
-label [mc "Push..."] \
-command do_push_anywhere \
-accelerator $M1T-P
.mbar.remote add command \
- -label [mc "Delete..."] \
+ -label [mc "Delete Branch..."] \
-command remote_branch_delete::dialog
}
-command do_about
}
-set browser {}
-catch {set browser $repo_config(instaweb.browser)}
+
set doc_path [file dirname [gitexec]]
set doc_path [file join $doc_path Documentation index.html]
set doc_path [exec cygpath --mixed $doc_path]
}
-if {$browser eq {}} {
- if {[is_MacOSX]} {
- set browser open
- } elseif {[is_Cygwin]} {
- set program_files [file dirname [exec cygpath --windir]]
- set program_files [file join $program_files {Program Files}]
- set firefox [file join $program_files {Mozilla Firefox} firefox.exe]
- set ie [file join $program_files {Internet Explorer} IEXPLORE.EXE]
- if {[file exists $firefox]} {
- set browser $firefox
- } elseif {[file exists $ie]} {
- set browser $ie
- }
- unset program_files firefox ie
- }
-}
-
if {[file isfile $doc_path]} {
set doc_url "file:$doc_path"
} else {
set doc_url {http://www.kernel.org/pub/software/scm/git/docs/}
}
-if {$browser ne {}} {
- .mbar.help add command -label [mc "Online Documentation"] \
- -command [list exec $browser $doc_url &]
+proc start_browser {url} {
+ git "web--browse" $url
}
-unset browser doc_path doc_url
+
+.mbar.help add command -label [mc "Online Documentation"] \
+ -command [list start_browser $doc_url]
+
+.mbar.help add command -label [mc "Show SSH Key"] \
+ -command do_ssh_key
+
+unset doc_path doc_url
# -- Standard bindings
#
lappend disable_on_lock \
{.vpane.lower.commarea.buttons.incall conf -state}
-if {![is_enabled nocommit]} {
+if {![is_enabled nocommitmsg]} {
button .vpane.lower.commarea.buttons.signoff -text [mc "Sign Off"] \
-command do_signoff
pack .vpane.lower.commarea.buttons.signoff -side top -fill x
load_all_remotes
set n [.mbar.remote index end]
- populate_push_menu
- populate_fetch_menu
+ populate_remotes_menu
set n [expr {[.mbar.remote index end] - $n}]
if {$n > 0} {
if {[.mbar.remote type 0] eq "tearoff"} { incr n }
if {[is_enabled retcode]} {
bind . <Destroy> {+terminate_me %W}
}
+if {$picked && [is_config_true gui.autoexplore]} {
+ do_explore
+}
field w_asim ; # text column: annotations (simple computation)
field w_file ; # text column: actual file data
field w_cviewer ; # pane showing commit message
+field finder ; # find mini-dialog frame
field status ; # status mega-widget instance
field old_height ; # last known height of $w.file_pane
+
# Tk UI colors
#
variable active_color #c0edc5
field tooltip_commit {} ; # Commit(s) in tooltip
constructor new {i_commit i_path i_jump} {
- global cursor_ptr
+ global cursor_ptr M1B M1T have_tk85
variable active_color
variable group_colors
make_toplevel top w
wm title $top [append "[appname] ([reponame]): " [mc "File Viewer"]]
+ set font_w [font measure font_diff "0"]
+
frame $w.header -background gold
label $w.header.commit_l \
-text [mc "Commit:"] \
pack $w_path -fill x -side right
pack $w.header.path_l -side right
- panedwindow $w.file_pane -orient vertical
- frame $w.file_pane.out
- frame $w.file_pane.cm
+ panedwindow $w.file_pane -orient vertical -borderwidth 0 -sashwidth 3
+ frame $w.file_pane.out -relief flat -borderwidth 1
+ frame $w.file_pane.cm -relief sunken -borderwidth 1
$w.file_pane add $w.file_pane.out \
-sticky nsew \
-minsize 100 \
-width 80 \
-xscrollcommand [list $w.file_pane.out.sbx set] \
-font font_diff
+ if {$have_tk85} {
+ $w_file configure -inactiveselectbackground darkblue
+ }
+ $w_file tag conf found \
+ -background yellow
set w_columns [list $w_amov $w_asim $w_line $w_file]
-weight 1
grid rowconfigure $w.file_pane.out 0 -weight 1
+ set finder [::searchbar::new \
+ $w.file_pane.out.ff $w_file \
+ -column [expr {[llength $w_columns] - 1}] \
+ ]
+
set w_cviewer $w.file_pane.cm.t
text $w_cviewer \
-background white \
-label [mc "Copy Commit"] \
-command [cb _copycommit]
$w.ctxm add separator
+ $w.ctxm add command \
+ -label [mc "Find Text..."] \
+ -accelerator F7 \
+ -command [list searchbar::show $finder]
menu $w.ctxm.enc
build_encoding_menu $w.ctxm.enc [cb _setencoding]
$w.ctxm add cascade \
$i tag conf color$g -background [lindex $group_colors $g]
}
+ if {$i eq $w_file} {
+ $w_file tag raise found
+ }
+ $i tag raise sel
+
$i conf -cursor $cursor_ptr
- $i conf -yscrollcommand [list many2scrollbar \
- $w_columns yview $w.file_pane.out.sby]
+ $i conf -yscrollcommand \
+ "[list ::searchbar::scrolled $finder]
+ [list many2scrollbar $w_columns yview $w.file_pane.out.sby]"
bind $i <Button-1> "
[cb _hide_tooltip]
[cb _click $i @%x,%y]
tk_popup $w.ctxm %X %Y
"
bind $i <Shift-Tab> "[list focus $w_cviewer];break"
- bind $i <Tab> "[list focus $w_cviewer];break"
+ bind $i <Tab> "[cb _focus_search $w_cviewer];break"
}
foreach i [concat $w_columns $w_cviewer] {
bind $i <Control-Key-f> {catch {%W yview scroll 1 pages};break}
}
- bind $w_cviewer <Shift-Tab> "[list focus $w_file];break"
+ bind $w_cviewer <Shift-Tab> "[cb _focus_search $w_file];break"
bind $w_cviewer <Tab> "[list focus $w_file];break"
- bind $w_cviewer <Button-1> [list focus $w_cviewer]
- bind $w_file <Visibility> [list focus $w_file]
+ bind $w_cviewer <Button-1> [list focus $w_cviewer]
+ bind $w_file <Visibility> [cb _focus_search $w_file]
+ bind $top <F7> [list searchbar::show $finder]
+ bind $top <Escape> [list searchbar::hide $finder]
+ bind $top <F3> [list searchbar::find_next $finder]
+ bind $top <Shift-F3> [list searchbar::find_prev $finder]
+ catch { bind $top <Shift-Key-XF86_Switch_VT_3> [list searchbar::find_prev $finder] }
grid configure $w.header -sticky ew
grid configure $w.file_pane -sticky nsew
set req_w [winfo reqwidth $top]
set req_h [winfo reqheight $top]
- set scr_h [expr {[winfo screenheight $top] - 100}]
- if {$req_w < 600} {set req_w 600}
+ set scr_w [expr {[winfo screenwidth $top] - 40}]
+ set scr_h [expr {[winfo screenheight $top] - 120}]
+ set opt_w [expr {$font_w * (80 + 5*3 + 3)}]
+ if {$req_w < $opt_w} {set req_w $opt_w}
+ if {$req_w > $scr_w} {set req_w $scr_w}
+ set opt_h [expr {$req_w*4/3}]
if {$req_h < $scr_h} {set req_h $scr_h}
+ if {$req_h > $opt_h} {set req_h $opt_h}
set g "${req_w}x${req_h}"
wm geometry $top $g
update
set old_height [winfo height $w.file_pane]
$w.file_pane sash place 0 \
[lindex [$w.file_pane sash coord 0] 0] \
- [expr {int($old_height * 0.70)}]
+ [expr {int($old_height * 0.80)}]
bind $w.file_pane <Configure> \
"if {{$w.file_pane} eq {%W}} {[cb _resize %h]}"
wm protocol $top WM_DELETE_WINDOW "destroy $top"
- bind $top <Destroy> [cb _kill]
+ bind $top <Destroy> [cb _handle_destroy %W]
_load $this $i_jump
}
+method _focus_search {win} {
+ if {[searchbar::visible $finder]} {
+ focus [searchbar::editor $finder]
+ } else {
+ focus $win
+ }
+}
+
+method _handle_destroy {win} {
+ if {$win eq $w} {
+ _kill $this
+ delete_this
+ }
+}
+
method _kill {} {
if {$current_fd ne {}} {
kill_file_process $current_fd
} ifdeleted { catch {close $fd} }
method _exec_blame {cur_w cur_d options cur_s} {
- lappend options --incremental
+ lappend options --incremental --encoding=utf-8
if {$commit eq {}} {
lappend options --contents $path
} else {
foreach i $w_columns {
$i tag conf g$cmit -background $active_color
$i tag raise g$cmit
+ if {$i eq $w_file} {
+ $w_file tag raise found
+ }
+ $i tag raise sel
}
set author_name {}
$w.mbar.apple add command \
-label [mc "About %s" [appname]] \
-command do_about
+ $w.mbar.apple add command \
+ -label [mc "Show SSH Key"] \
+ -command do_ssh_key
} else {
$w.mbar add cascade -label [mc Help] -menu $w.mbar.help
menu $w.mbar.help
$w.mbar.help add command \
-label [mc "About %s" [appname]] \
-command do_about
+ $w.mbar.help add command \
+ -label [mc "Show SSH Key"] \
+ -command do_ssh_key
}
wm protocol $top WM_DELETE_WINDOW exit
label $w_body.where.l -text [mc "Directory:"]
entry $w_body.where.t \
-textvariable @local_path \
- -font font_diff \
+ -borderwidth 1 \
+ -relief sunken \
-width 50
button $w_body.where.b \
-text [mc "Browse"] \
frame $w_body.args
pack $args -fill both
- label $args.origin_l -text [mc "URL:"]
+ label $args.origin_l -text [mc "Source Location:"]
entry $args.origin_t \
-textvariable @origin_url \
- -font font_diff \
+ -borderwidth 1 \
+ -relief sunken \
-width 50
button $args.origin_b \
-text [mc "Browse"] \
-command [cb _open_origin]
grid $args.origin_l $args.origin_t $args.origin_b -sticky ew
- label $args.where_l -text [mc "Directory:"]
+ label $args.where_l -text [mc "Target Directory:"]
entry $args.where_t \
-textvariable @local_path \
- -font font_diff \
+ -borderwidth 1 \
+ -relief sunken \
-width 50
button $args.where_b \
-text [mc "Browse"] \
label $w_body.where.l -text [mc "Repository:"]
entry $w_body.where.t \
-textvariable @local_path \
- -font font_diff \
+ -borderwidth 1 \
+ -relief sunken \
-width 50
button $w_body.where.b \
-text [mc "Browse"] \
if {$merge_stages(2) eq {}} {
set is_conflict_diff 1
lappend current_diff_queue \
- [list "LOCAL: deleted\nREMOTE:\n" d======= \
+ [list [mc "LOCAL: deleted\nREMOTE:\n"] d======= \
[list ":1:$current_diff_path" ":3:$current_diff_path"]]
} elseif {$merge_stages(3) eq {}} {
set is_conflict_diff 1
lappend current_diff_queue \
- [list "REMOTE: deleted\nLOCAL:\n" d======= \
+ [list [mc "REMOTE: deleted\nLOCAL:\n"] d======= \
[list ":1:$current_diff_path" ":2:$current_diff_path"]]
} elseif {[lindex $merge_stages(1) 0] eq {120000}
|| [lindex $merge_stages(2) 0] eq {120000}
|| [lindex $merge_stages(3) 0] eq {120000}} {
set is_conflict_diff 1
lappend current_diff_queue \
- [list "LOCAL:\n" d======= \
+ [list [mc "LOCAL:\n"] d======= \
[list ":1:$current_diff_path" ":2:$current_diff_path"]]
lappend current_diff_queue \
- [list "REMOTE:\n" d======= \
+ [list [mc "REMOTE:\n"] d======= \
[list ":1:$current_diff_path" ":3:$current_diff_path"]]
} else {
start_show_diff $cont_info
# - Git won't give us the diff, there's nothing to compare to!
#
if {$m eq {_O}} {
- set max_sz [expr {128 * 1024}]
+ set max_sz 100000
set type unknown
if {[catch {
set type [file type $path]
d_@
} else {
if {$sz > $max_sz} {
- $ui_diff insert end \
-"* Untracked file is $sz bytes.
-* Showing only first $max_sz bytes.
-" d_@
+ $ui_diff insert end [mc \
+"* Untracked file is %d bytes.
+* Showing only first %d bytes.
+" $sz $max_sz] d_@
}
$ui_diff insert end $content
if {$sz > $max_sz} {
- $ui_diff insert end "
-* Untracked file clipped here by [appname].
+ $ui_diff insert end [mc "
+* Untracked file clipped here by %s.
* To see the entire file, use an external editor.
-" d_@
+" [appname]] d_@
}
}
$ui_diff conf -state disabled
{+} {
if {[regexp {^\+([<>]{7} |={7})} $line _g op]} {
set is_conflict_diff 1
- set line [string replace $line 0 0 { }]
set tags d$op
} else {
set tags d_+
set after {}
foreach path $paths {
switch -glob -- [lindex $file_states($path) 0] {
+ _U -
+ U? {
+ if {$path eq $current_diff_path} {
+ unlock_index
+ merge_stage_workdir $path
+ return
+ }
+ }
_O -
?M -
?D -
- ?T -
- U? {
+ ?T {
lappend pathList $path
if {$path eq $current_diff_path} {
set after {reshow_diff;}
_O {
continue; # and pray it works!
}
+ _U -
U? {
error_popup [mc "You are in the middle of a conflicted merge.
}
}
-proc merge_stage_workdir {path w lno} {
+proc merge_stage_workdir {path {lno {}}} {
global current_diff_path diff_active
+ global current_diff_side ui_workdir
if {$diff_active} return
- if {$path ne $current_diff_path} {
- show_diff $path $w $lno {} [list do_merge_stage_workdir $path]
+ if {$path ne $current_diff_path || $ui_workdir ne $current_diff_side} {
+ show_diff $path $ui_workdir $lno {} [list do_merge_stage_workdir $path]
} else {
do_merge_stage_workdir $path
}
}
}
- # Check the modification time of the target file
- if {!$failed && [file mtime $mtool_target] eq $mtool_mtime} {
- if {[ask_popup [mc "File %s unchanged, still accept as resolved?" \
- [short_path $mtool_target]]] ne {yes}} {
- set failed 1
- }
- }
-
# Finish
if {$failed} {
file rename -force -- $backup $mtool_target
delete_temp_files $mtool_tmpfiles
- merge_add_resolution $mtool_target
+ reshow_diff
}
}
set all_remotes [lsort -unique $all_remotes]
}
-proc populate_fetch_menu {} {
- global all_remotes repo_config
-
+proc add_fetch_entry {r} {
+ global repo_config
set remote_m .mbar.remote
set fetch_m $remote_m.fetch
set prune_m $remote_m.prune
-
- foreach r $all_remotes {
- set enable 0
- if {![catch {set a $repo_config(remote.$r.url)}]} {
- if {![catch {set a $repo_config(remote.$r.fetch)}]} {
- set enable 1
- }
- } else {
- catch {
- set fd [open [gitdir remotes $r] r]
- while {[gets $fd n] >= 0} {
- if {[regexp {^Pull:[ \t]*([^:]+):} $n]} {
- set enable 1
- break
- }
+ set remove_m $remote_m.remove
+ set enable 0
+ if {![catch {set a $repo_config(remote.$r.url)}]} {
+ if {![catch {set a $repo_config(remote.$r.fetch)}]} {
+ set enable 1
+ }
+ } else {
+ catch {
+ set fd [open [gitdir remotes $r] r]
+ while {[gets $fd n] >= 0} {
+ if {[regexp {^Pull:[ \t]*([^:]+):} $n]} {
+ set enable 1
+ break
}
- close $fd
}
+ close $fd
}
+ }
- if {$enable} {
- if {![winfo exists $fetch_m]} {
- menu $prune_m
- $remote_m insert 0 cascade \
- -label [mc "Prune from"] \
- -menu $prune_m
-
- menu $fetch_m
- $remote_m insert 0 cascade \
- -label [mc "Fetch from"] \
- -menu $fetch_m
- }
+ if {$enable} {
+ if {![winfo exists $fetch_m]} {
+ menu $remove_m
+ $remote_m insert 0 cascade \
+ -label [mc "Remove Remote"] \
+ -menu $remove_m
+
+ menu $prune_m
+ $remote_m insert 0 cascade \
+ -label [mc "Prune from"] \
+ -menu $prune_m
- $fetch_m add command \
- -label $r \
- -command [list fetch_from $r]
- $prune_m add command \
- -label $r \
- -command [list prune_from $r]
+ menu $fetch_m
+ $remote_m insert 0 cascade \
+ -label [mc "Fetch from"] \
+ -menu $fetch_m
}
+
+ $fetch_m add command \
+ -label $r \
+ -command [list fetch_from $r]
+ $prune_m add command \
+ -label $r \
+ -command [list prune_from $r]
+ $remove_m add command \
+ -label $r \
+ -command [list remove_remote $r]
}
}
-proc populate_push_menu {} {
- global all_remotes repo_config
-
+proc add_push_entry {r} {
+ global repo_config
set remote_m .mbar.remote
set push_m $remote_m.push
-
- foreach r $all_remotes {
- set enable 0
- if {![catch {set a $repo_config(remote.$r.url)}]} {
- if {![catch {set a $repo_config(remote.$r.push)}]} {
- set enable 1
- }
- } else {
- catch {
- set fd [open [gitdir remotes $r] r]
- while {[gets $fd n] >= 0} {
- if {[regexp {^Push:[ \t]*([^:]+):} $n]} {
- set enable 1
- break
- }
+ set enable 0
+ if {![catch {set a $repo_config(remote.$r.url)}]} {
+ if {![catch {set a $repo_config(remote.$r.push)}]} {
+ set enable 1
+ }
+ } else {
+ catch {
+ set fd [open [gitdir remotes $r] r]
+ while {[gets $fd n] >= 0} {
+ if {[regexp {^Push:[ \t]*([^:]+):} $n]} {
+ set enable 1
+ break
}
- close $fd
}
+ close $fd
}
+ }
- if {$enable} {
- if {![winfo exists $push_m]} {
- menu $push_m
- $remote_m insert 0 cascade \
- -label [mc "Push to"] \
- -menu $push_m
- }
-
- $push_m add command \
- -label $r \
- -command [list push_to $r]
+ if {$enable} {
+ if {![winfo exists $push_m]} {
+ menu $push_m
+ $remote_m insert 0 cascade \
+ -label [mc "Push to"] \
+ -menu $push_m
}
+
+ $push_m add command \
+ -label $r \
+ -command [list push_to $r]
+ }
+}
+
+proc populate_remotes_menu {} {
+ global all_remotes
+
+ foreach r $all_remotes {
+ add_fetch_entry $r
+ add_push_entry $r
+ }
+}
+
+proc add_single_remote {name location} {
+ global all_remotes repo_config
+ lappend all_remotes $name
+
+ git remote add $name $location
+
+ # XXX: Better re-read the config so that we will never get out
+ # of sync with git remote implementation?
+ set repo_config(remote.$name.url) $location
+ set repo_config(remote.$name.fetch) "+refs/heads/*:refs/remotes/$name/*"
+
+ add_fetch_entry $name
+ add_push_entry $name
+}
+
+proc delete_from_menu {menu name} {
+ if {[winfo exists $menu]} {
+ $menu delete $name
}
}
+
+proc remove_remote {name} {
+ global all_remotes repo_config
+
+ git remote rm $name
+
+ catch {
+ # Missing values are ok
+ unset repo_config(remote.$name.url)
+ unset repo_config(remote.$name.fetch)
+ unset repo_config(remote.$name.push)
+ }
+
+ set i [lsearch -exact all_remotes $name]
+ lreplace all_remotes $i $i
+
+ set remote_m .mbar.remote
+ delete_from_menu $remote_m.fetch $name
+ delete_from_menu $remote_m.prune $name
+ delete_from_menu $remote_m.remove $name
+ # Not all remotes are in the push menu
+ catch { delete_from_menu $remote_m.push $name }
+}
--- /dev/null
+# git-gui remote adding support
+# Copyright (C) 2008 Petr Baudis
+
+class remote_add {
+
+field w ; # widget path
+field w_name ; # new remote name widget
+field w_loc ; # new remote location widget
+
+field name {}; # name of the remote the user has chosen
+field location {}; # location of the remote the user has chosen
+
+field opt_action fetch; # action to do after registering the remote locally
+
+constructor dialog {} {
+ global repo_config
+
+ make_toplevel top w
+ wm title $top [append "[appname] ([reponame]): " [mc "Add Remote"]]
+ if {$top ne {.}} {
+ wm geometry $top "+[winfo rootx .]+[winfo rooty .]"
+ }
+
+ label $w.header -text [mc "Add New Remote"] -font font_uibold
+ pack $w.header -side top -fill x
+
+ frame $w.buttons
+ button $w.buttons.create -text [mc Add] \
+ -default active \
+ -command [cb _add]
+ pack $w.buttons.create -side right
+ button $w.buttons.cancel -text [mc Cancel] \
+ -command [list destroy $w]
+ pack $w.buttons.cancel -side right -padx 5
+ pack $w.buttons -side bottom -fill x -pady 10 -padx 10
+
+ labelframe $w.desc -text [mc "Remote Details"]
+
+ label $w.desc.name_l -text [mc "Name:"]
+ set w_name $w.desc.name_t
+ entry $w_name \
+ -borderwidth 1 \
+ -relief sunken \
+ -width 40 \
+ -textvariable @name \
+ -validate key \
+ -validatecommand [cb _validate_name %d %S]
+ grid $w.desc.name_l $w_name -sticky we -padx {0 5}
+
+ label $w.desc.loc_l -text [mc "Location:"]
+ set w_loc $w.desc.loc_t
+ entry $w_loc \
+ -borderwidth 1 \
+ -relief sunken \
+ -width 40 \
+ -textvariable @location
+ grid $w.desc.loc_l $w_loc -sticky we -padx {0 5}
+
+ grid columnconfigure $w.desc 1 -weight 1
+ pack $w.desc -anchor nw -fill x -pady 5 -padx 5
+
+ labelframe $w.action -text [mc "Further Action"]
+
+ radiobutton $w.action.fetch \
+ -text [mc "Fetch Immediately"] \
+ -value fetch \
+ -variable @opt_action
+ pack $w.action.fetch -anchor nw
+
+ radiobutton $w.action.push \
+ -text [mc "Initialize Remote Repository and Push"] \
+ -value push \
+ -variable @opt_action
+ pack $w.action.push -anchor nw
+
+ radiobutton $w.action.none \
+ -text [mc "Do Nothing Else Now"] \
+ -value none \
+ -variable @opt_action
+ pack $w.action.none -anchor nw
+
+ grid columnconfigure $w.action 1 -weight 1
+ pack $w.action -anchor nw -fill x -pady 5 -padx 5
+
+ bind $w <Visibility> [cb _visible]
+ bind $w <Key-Escape> [list destroy $w]
+ bind $w <Key-Return> [cb _add]\;break
+ tkwait window $w
+}
+
+method _add {} {
+ global repo_config env
+ global M1B
+
+ if {$name eq {}} {
+ tk_messageBox \
+ -icon error \
+ -type ok \
+ -title [wm title $w] \
+ -parent $w \
+ -message [mc "Please supply a remote name."]
+ focus $w_name
+ return
+ }
+
+ # XXX: We abuse check-ref-format here, but
+ # that should be ok.
+ if {[catch {git check-ref-format "remotes/$name"}]} {
+ tk_messageBox \
+ -icon error \
+ -type ok \
+ -title [wm title $w] \
+ -parent $w \
+ -message [mc "'%s' is not an acceptable remote name." $name]
+ focus $w_name
+ return
+ }
+
+ if {[catch {add_single_remote $name $location}]} {
+ tk_messageBox \
+ -icon error \
+ -type ok \
+ -title [wm title $w] \
+ -parent $w \
+ -message [mc "Failed to add remote '%s' of location '%s'." $name $location]
+ focus $w_name
+ return
+ }
+
+ switch -- $opt_action {
+ fetch {
+ set c [console::new \
+ [mc "fetch %s" $name] \
+ [mc "Fetching the %s" $name]]
+ console::exec $c [list git fetch $name]
+ }
+ push {
+ set cmds [list]
+
+ # Parse the location
+ if { [regexp {(?:git\+)?ssh://([^/]+)(/.+)} $location xx host path]
+ || [regexp {([^:][^:]+):(.+)} $location xx host path]} {
+ set ssh ssh
+ if {[info exists env(GIT_SSH)]} {
+ set ssh $env(GIT_SSH)
+ }
+ lappend cmds [list exec $ssh $host mkdir -p $location && git --git-dir=$path init --bare]
+ } elseif { ! [regexp {://} $location xx] } {
+ lappend cmds [list exec mkdir -p $location]
+ lappend cmds [list exec git --git-dir=$location init --bare]
+ } else {
+ tk_messageBox \
+ -icon error \
+ -type ok \
+ -title [wm title $w] \
+ -parent $w \
+ -message [mc "Do not know how to initialize repository at location '%s'." $location]
+ destroy $w
+ return
+ }
+
+ set c [console::new \
+ [mc "push %s" $name] \
+ [mc "Setting up the %s (at %s)" $name $location]]
+
+ lappend cmds [list exec git push -v --all $name]
+ console::chain $c $cmds
+ }
+ none {
+ }
+ }
+
+ destroy $w
+}
+
+method _validate_name {d S} {
+ if {$d == 1} {
+ if {[regexp {[~^:?*\[\0- ]} $S]} {
+ return 0
+ }
+ }
+ return 1
+}
+
+method _visible {} {
+ grab $w
+ $w_name icursor end
+ focus $w_name
+}
+
+}
global all_remotes M1B
make_toplevel top w
- wm title $top [append "[appname] ([reponame]): " [mc "Delete Remote Branch"]]
+ wm title $top [append "[appname] ([reponame]): " [mc "Delete Branch Remotely"]]
if {$top ne {.}} {
wm geometry $top "+[winfo rootx .]+[winfo rooty .]"
}
- label $w.header -text [mc "Delete Remote Branch"] -font font_uibold
+ label $w.header -text [mc "Delete Branch Remotely"] -font font_uibold
pack $w.header -side top -fill x
frame $w.buttons
set urltype url
}
radiobutton $w.dest.url_r \
- -text [mc "Arbitrary URL:"] \
+ -text [mc "Arbitrary Location:"] \
-value url \
-variable @urltype
entry $w.dest.url_t \
--- /dev/null
+# incremental search panel
+# based on code from gitk, Copyright (C) Paul Mackerras
+
+class searchbar {
+
+field w
+field ctext
+
+field searchstring {}
+field casesensitive 1
+field searchdirn -forwards
+
+field smarktop
+field smarkbot
+
+constructor new {i_w i_text args} {
+ set w $i_w
+ set ctext $i_text
+
+ frame $w
+ label $w.l -text [mc Find:]
+ entry $w.ent -textvariable ${__this}::searchstring -background lightgreen
+ button $w.bn -text [mc Next] -command [cb find_next]
+ button $w.bp -text [mc Prev] -command [cb find_prev]
+ checkbutton $w.cs -text [mc Case-Sensitive] \
+ -variable ${__this}::casesensitive -command [cb _incrsearch]
+ pack $w.l -side left
+ pack $w.cs -side right
+ pack $w.bp -side right
+ pack $w.bn -side right
+ pack $w.ent -side left -expand 1 -fill x
+
+ eval grid conf $w -sticky we $args
+ grid remove $w
+
+ trace add variable searchstring write [cb _incrsearch_cb]
+
+ bind $w <Destroy> [cb delete_this]
+ return $this
+}
+
+method show {} {
+ if {![visible $this]} {
+ grid $w
+ }
+ focus -force $w.ent
+}
+
+method hide {} {
+ if {[visible $this]} {
+ focus $ctext
+ grid remove $w
+ }
+}
+
+method visible {} {
+ return [winfo ismapped $w]
+}
+
+method editor {} {
+ return $w.ent
+}
+
+method _get_new_anchor {} {
+ # use start of selection if it is visible,
+ # or the bounds of the visible area
+ set top [$ctext index @0,0]
+ set bottom [$ctext index @0,[winfo height $ctext]]
+ set sel [$ctext tag ranges sel]
+ if {$sel ne {}} {
+ set spos [lindex $sel 0]
+ if {[lindex $spos 0] >= [lindex $top 0] &&
+ [lindex $spos 0] <= [lindex $bottom 0]} {
+ return $spos
+ }
+ }
+ if {$searchdirn eq "-forwards"} {
+ return $top
+ } else {
+ return $bottom
+ }
+}
+
+method _get_wrap_anchor {dir} {
+ if {$dir eq "-forwards"} {
+ return 1.0
+ } else {
+ return end
+ }
+}
+
+method _do_search {start {mlenvar {}} {dir {}} {endbound {}}} {
+ set cmd [list $ctext search]
+ if {$mlenvar ne {}} {
+ upvar $mlenvar mlen
+ lappend cmd -count mlen
+ }
+ if {!$casesensitive} {
+ lappend cmd -nocase
+ }
+ if {$dir eq {}} {
+ set dir $searchdirn
+ }
+ lappend cmd $dir -- $searchstring
+ if {$endbound ne {}} {
+ set here [eval $cmd [list $start] [list $endbound]]
+ } else {
+ set here [eval $cmd [list $start]]
+ if {$here eq {}} {
+ set here [eval $cmd [_get_wrap_anchor $this $dir]]
+ }
+ }
+ return $here
+}
+
+method _incrsearch_cb {name ix op} {
+ after idle [cb _incrsearch]
+}
+
+method _incrsearch {} {
+ $ctext tag remove found 1.0 end
+ if {[catch {$ctext index anchor}]} {
+ $ctext mark set anchor [_get_new_anchor $this]
+ }
+ if {$searchstring ne {}} {
+ set here [_do_search $this anchor mlen]
+ if {$here ne {}} {
+ $ctext see $here
+ $ctext tag remove sel 1.0 end
+ $ctext tag add sel $here "$here + $mlen c"
+ $w.ent configure -background lightgreen
+ _set_marks $this 1
+ } else {
+ $w.ent configure -background lightpink
+ }
+ }
+}
+
+method find_prev {} {
+ find_next $this -backwards
+}
+
+method find_next {{dir -forwards}} {
+ focus $w.ent
+ $w.ent icursor end
+ set searchdirn $dir
+ $ctext mark unset anchor
+ if {$searchstring ne {}} {
+ set start [_get_new_anchor $this]
+ if {$dir eq "-forwards"} {
+ set start "$start + 1c"
+ }
+ set match [_do_search $this $start mlen]
+ $ctext tag remove sel 1.0 end
+ if {$match ne {}} {
+ $ctext see $match
+ $ctext tag add sel $match "$match + $mlen c"
+ }
+ }
+}
+
+method _mark_range {first last} {
+ set mend $first.0
+ while {1} {
+ set match [_do_search $this $mend mlen -forwards $last.end]
+ if {$match eq {}} break
+ set mend "$match + $mlen c"
+ $ctext tag add found $match $mend
+ }
+}
+
+method _set_marks {doall} {
+ set topline [lindex [split [$ctext index @0,0] .] 0]
+ set botline [lindex [split [$ctext index @0,[winfo height $ctext]] .] 0]
+ if {$doall || $botline < $smarktop || $topline > $smarkbot} {
+ # no overlap with previous
+ _mark_range $this $topline $botline
+ set smarktop $topline
+ set smarkbot $botline
+ } else {
+ if {$topline < $smarktop} {
+ _mark_range $this $topline [expr {$smarktop-1}]
+ set smarktop $topline
+ }
+ if {$botline > $smarkbot} {
+ _mark_range $this [expr {$smarkbot+1}] $botline
+ set smarkbot $botline
+ }
+ }
+}
+
+method scrolled {} {
+ if {$searchstring ne {}} {
+ after idle [cb _set_marks 0]
+ }
+}
+
+}
\ No newline at end of file
error_popup [strcat [mc "Unrecognized spell checker"] ":\n\n$s_version"]
return
}
- set s_version [string range $s_version 5 end]
+ set s_version [string range [string trim $s_version] 5 end]
regexp \
{International Ispell Version .* \(but really (Aspell .*?)\)$} \
$s_version _junk s_version
method _read {} {
while {[gets $s_fd line] >= 0} {
set lineno [lindex $s_pending 0 0]
+ set line [string trim $line]
if {$s_clear} {
$w_text tag remove misspelled "$lineno.0" "$lineno.end"
--- /dev/null
+# git-gui about git-gui dialog
+# Copyright (C) 2006, 2007 Shawn Pearce
+
+proc find_ssh_key {} {
+ foreach name {~/.ssh/id_dsa.pub ~/.ssh/id_rsa.pub ~/.ssh/identity.pub} {
+ if {[file exists $name]} {
+ set fh [open $name r]
+ set cont [read $fh]
+ close $fh
+ return [list $name $cont]
+ }
+ }
+
+ return {}
+}
+
+proc do_ssh_key {} {
+ global sshkey_title have_tk85 sshkey_fd
+
+ set w .sshkey_dialog
+ if {[winfo exists $w]} {
+ raise $w
+ return
+ }
+
+ toplevel $w
+ wm transient $w .
+
+ set finfo [find_ssh_key]
+ if {$finfo eq {}} {
+ set sshkey_title [mc "No keys found."]
+ set gen_state normal
+ } else {
+ set sshkey_title [mc "Found a public key in: %s" [lindex $finfo 0]]
+ set gen_state disabled
+ }
+
+ frame $w.header -relief flat
+ label $w.header.lbl -textvariable sshkey_title -anchor w
+ button $w.header.gen -text [mc "Generate Key"] \
+ -command [list make_ssh_key $w] -state $gen_state
+ pack $w.header.lbl -side left -expand 1 -fill x
+ pack $w.header.gen -side right
+ pack $w.header -fill x -pady 5 -padx 5
+
+ text $w.contents -width 60 -height 10 -wrap char -relief sunken
+ pack $w.contents -fill both -expand 1
+ if {$have_tk85} {
+ $w.contents configure -inactiveselectbackground darkblue
+ }
+
+ frame $w.buttons
+ button $w.buttons.close -text [mc Close] \
+ -default active -command [list destroy $w]
+ pack $w.buttons.close -side right
+ button $w.buttons.copy -text [mc "Copy To Clipboard"] \
+ -command [list tk_textCopy $w.contents]
+ pack $w.buttons.copy -side left
+ pack $w.buttons -side bottom -fill x -pady 5 -padx 5
+
+ if {$finfo ne {}} {
+ $w.contents insert end [lindex $finfo 1] sel
+ }
+ $w.contents configure -state disabled
+
+ bind $w <Visibility> "grab $w; focus $w.buttons.close"
+ bind $w <Key-Escape> "destroy $w"
+ bind $w <Key-Return> "destroy $w"
+ bind $w <Destroy> kill_sshkey
+ wm title $w [mc "Your OpenSSH Public Key"]
+ tk::PlaceWindow $w widget .
+ tkwait window $w
+}
+
+proc make_ssh_key {w} {
+ global sshkey_title sshkey_output sshkey_fd
+
+ set sshkey_title [mc "Generating..."]
+ $w.header.gen configure -state disabled
+
+ set cmdline [list sh -c {echo | ssh-keygen -q -t rsa -f ~/.ssh/id_rsa 2>&1}]
+
+ if {[catch { set sshkey_fd [_open_stdout_stderr $cmdline] } err]} {
+ error_popup [mc "Could not start ssh-keygen:\n\n%s" $err]
+ return
+ }
+
+ set sshkey_output {}
+ fconfigure $sshkey_fd -blocking 0
+ fileevent $sshkey_fd readable [list read_sshkey_output $sshkey_fd $w]
+}
+
+proc kill_sshkey {} {
+ global sshkey_fd
+ if {![info exists sshkey_fd]} return
+ catch { kill_file_process $sshkey_fd }
+ catch { close $sshkey_fd }
+}
+
+proc read_sshkey_output {fd w} {
+ global sshkey_fd sshkey_output sshkey_title
+
+ set sshkey_output "$sshkey_output[read $fd]"
+ if {![eof $fd]} return
+
+ fconfigure $fd -blocking 1
+ unset sshkey_fd
+
+ $w.contents configure -state normal
+ if {[catch {close $fd} err]} {
+ set sshkey_title [mc "Generation failed."]
+ $w.contents insert end $err
+ $w.contents insert end "\n"
+ $w.contents insert end $sshkey_output
+ } else {
+ set finfo [find_ssh_key]
+ if {$finfo eq {}} {
+ set sshkey_title [mc "Generation succeded, but no keys found."]
+ $w.contents insert end $sshkey_output
+ } else {
+ set sshkey_title [mc "Your key is in: %s" [lindex $finfo 0]]
+ $w.contents insert end [lindex $finfo 1] sel
+ }
+ }
+ $w.contents configure -state disable
+}
set push_urltype url
}
radiobutton $w.dest.url_r \
- -text [mc "Arbitrary URL:"] \
+ -text [mc "Arbitrary Location:"] \
-value url \
-variable push_urltype
entry $w.dest.url_t \
msgstr ""
"Project-Id-Version: git-gui\n"
"Report-Msgid-Bugs-To: \n"
-"POT-Creation-Date: 2008-09-13 10:20+0200\n"
-"PO-Revision-Date: 2008-09-13 10:24+0200\n"
+"POT-Creation-Date: 2008-10-25 13:32+0200\n"
+"PO-Revision-Date: 2008-10-25 22:47+0200\n"
"Last-Translator: Christian Stimming <stimming@tuhh.de>\n"
"Language-Team: German\n"
"MIME-Version: 1.0\n"
msgid "Scanning for modified files ..."
msgstr "Nach geänderten Dateien suchen..."
-#: git-gui.sh:1324 lib/browser.tcl:246
+#: git-gui.sh:1325
+#, fuzzy
+msgid "Calling prepare-commit-msg hook..."
+msgstr "Aufrufen der Vor-Eintragen-Kontrolle..."
+
+#: git-gui.sh:1342
+#, fuzzy
+msgid "Commit declined by prepare-commit-msg hook."
+msgstr "Eintragen abgelehnt durch Vor-Eintragen-Kontrolle (»pre-commit hook«)."
+
+#: git-gui.sh:1502 lib/browser.tcl:246
msgid "Ready."
msgstr "Bereit."
msgid "Remote"
msgstr "Andere Archive"
-#: git-gui.sh:1879
+#: git-gui.sh:2242
+msgid "Explore Working Copy"
+msgstr "Arbeitskopie im Dateimanager"
+
+#: git-gui.sh:2247
msgid "Browse Current Branch's Files"
msgstr "Aktuellen Zweig durchblättern"
msgid "Reset..."
msgstr "Zurücksetzen..."
-#: git-gui.sh:2002 git-gui.sh:2389
+#: git-gui.sh:2372
+msgid "Done"
+msgstr "Fertig"
+
+#: git-gui.sh:2374
+msgid "Commit@@verb"
+msgstr "Eintragen"
+
+#: git-gui.sh:2383 git-gui.sh:2786
msgid "New Commit"
msgstr "Neue Version"
msgid "Sign Off"
msgstr "Abzeichnen"
-#: git-gui.sh:2053 git-gui.sh:2372
-msgid "Commit@@verb"
-msgstr "Eintragen"
-
-#: git-gui.sh:2064
+#: git-gui.sh:2458
msgid "Local Merge..."
msgstr "Lokales Zusammenführen..."
msgid "Abort Merge..."
msgstr "Zusammenführen abbrechen..."
-#: git-gui.sh:2081
+#: git-gui.sh:2475
+msgid "Add..."
+msgstr "Hinzufügen..."
+
+#: git-gui.sh:2479
msgid "Push..."
msgstr "Versenden..."
-#: git-gui.sh:2197 git-gui.sh:2219 lib/about.tcl:14
+#: git-gui.sh:2483
+msgid "Delete Branch..."
+msgstr "Zweig löschen..."
+
+#: git-gui.sh:2493 git-gui.sh:2515 lib/about.tcl:14
#: lib/choose_repository.tcl:44 lib/choose_repository.tcl:50
#, tcl-format
msgid "About %s"
msgid "Increase Font Size"
msgstr "Schriftgröße vergrößern"
-#: git-gui.sh:2870
+#: git-gui.sh:3033 lib/blame.tcl:281
+msgid "Encoding"
+msgstr "Zeichenkodierung"
+
+#: git-gui.sh:3044
msgid "Apply/Reverse Hunk"
msgstr "Kontext anwenden/umkehren"
msgid "Revert To Base"
msgstr "Ursprüngliche Version benutzen"
-#: git-gui.sh:2906
-msgid "Stage Working Copy"
-msgstr "Arbeitskopie bereitstellen"
-
-#: git-gui.sh:2925
+#: git-gui.sh:3091
msgid "Unstage Hunk From Commit"
msgstr "Kontext aus Bereitstellung herausnehmen"
msgid "Original File:"
msgstr "Ursprüngliche Datei:"
-#: lib/blame.tcl:990
+#: lib/blame.tcl:1013
+#, fuzzy
+msgid "Cannot find HEAD commit:"
+msgstr "Elternversion kann nicht gefunden werden:"
+
+#: lib/blame.tcl:1068
msgid "Cannot find parent commit:"
msgstr "Elternversion kann nicht gefunden werden:"
msgid "Clone"
msgstr "Klonen"
-#: lib/choose_repository.tcl:468
-msgid "URL:"
-msgstr "URL:"
+#: lib/choose_repository.tcl:467
+msgid "Source Location:"
+msgstr ""
+
+#: lib/choose_repository.tcl:478
+msgid "Target Directory:"
+msgstr "Zielverzeichnis:"
-#: lib/choose_repository.tcl:489
+#: lib/choose_repository.tcl:490
msgid "Clone Type:"
msgstr "Art des Klonens:"
msgid "Loading diff of %s..."
msgstr "Vergleich von »%s« laden..."
-#: lib/diff.tcl:114 lib/diff.tcl:184
+#: lib/diff.tcl:120
+msgid ""
+"LOCAL: deleted\n"
+"REMOTE:\n"
+msgstr ""
+
+#: lib/diff.tcl:125
+msgid ""
+"REMOTE: deleted\n"
+"LOCAL:\n"
+msgstr ""
+
+#: lib/diff.tcl:132
+msgid "LOCAL:\n"
+msgstr ""
+
+#: lib/diff.tcl:135
+msgid "REMOTE:\n"
+msgstr ""
+
+#: lib/diff.tcl:197 lib/diff.tcl:296
#, tcl-format
msgid "Unable to display %s"
msgstr "Datei »%s« kann nicht angezeigt werden"
msgid "* Binary file (not showing content)."
msgstr "* Binärdatei (Inhalt wird nicht angezeigt)"
-#: lib/diff.tcl:313
+#: lib/diff.tcl:222
+#, tcl-format
+msgid ""
+"* Untracked file is %d bytes.\n"
+"* Showing only first %d bytes.\n"
+msgstr ""
+
+#: lib/diff.tcl:228
+#, tcl-format
+msgid ""
+"\n"
+"* Untracked file clipped here by %s.\n"
+"* To see the entire file, use an external editor.\n"
+msgstr ""
+
+#: lib/diff.tcl:437
msgid "Failed to unstage selected hunk."
msgstr ""
"Fehler beim Herausnehmen des gewählten Kontexts aus der Bereitstellung."
msgid "Failed to stage selected line."
msgstr "Fehler beim Bereitstellen der gewählten Zeile."
+#: lib/encoding.tcl:443
+msgid "Default"
+msgstr "Voreinstellung"
+
+#: lib/encoding.tcl:448
+#, tcl-format
+msgid "System (%s)"
+msgstr "Systemweit (%s)"
+
+#: lib/encoding.tcl:459 lib/encoding.tcl:465
+msgid "Other"
+msgstr "Andere"
+
#: lib/error.tcl:20 lib/error.tcl:114
msgid "error"
msgstr "Fehler"
"Diese Operation kann nur rückgängig gemacht werden, wenn die\n"
"Zusammenführung erneut gestartet wird."
-#: lib/mergetool.tcl:32
+#: lib/mergetool.tcl:45
+#, tcl-format
+msgid "File %s seems to have unresolved conflicts, still stage?"
+msgstr "Datei »%s« hat nicht aufgelöste Konflikte. Trotzdem bereitstellen?"
+
+#: lib/mergetool.tcl:60
#, tcl-format
msgid "Adding resolution for %s"
msgstr "Auflösung hinzugefügt für %s"
msgid "Merge tool failed."
msgstr "Zusammenführungswerkzeug fehlgeschlagen."
-#: lib/mergetool.tcl:353
+#: lib/option.tcl:11
+#, tcl-format
+msgid "Invalid global encoding '%s'"
+msgstr "Ungültige globale Zeichenkodierung »%s«"
+
+#: lib/option.tcl:19
#, tcl-format
-msgid "File %s unchanged, still accept as resolved?"
-msgstr "Datei »%s« unverändert. Trotzdem Konflikt als gelöst akzeptieren?"
+msgid "Invalid repo encoding '%s'"
+msgstr "Ungültige Archiv-Zeichenkodierung »%s«"
-#: lib/option.tcl:95
+#: lib/option.tcl:117
msgid "Restore Defaults"
msgstr "Voreinstellungen wiederherstellen"
msgid "New Branch Name Template"
msgstr "Namensvorschlag für neue Zweige"
-#: lib/option.tcl:192
+#: lib/option.tcl:155
+msgid "Default File Contents Encoding"
+msgstr "Vorgestellte Zeichenkodierung"
+
+#: lib/option.tcl:203
+msgid "Change"
+msgstr "Ändern"
+
+#: lib/option.tcl:230
msgid "Spelling Dictionary:"
msgstr "Wörterbuch Rechtschreibprüfung:"
msgid "Failed to completely save options:"
msgstr "Optionen konnten nicht gespeichert werden:"
+#: lib/remote_add.tcl:19
+msgid "Add Remote"
+msgstr "Anderes Archiv hinzufügen"
+
+#: lib/remote_add.tcl:24
+msgid "Add New Remote"
+msgstr "Neues anderes Archiv hinzufügen"
+
+#: lib/remote_add.tcl:28
+msgid "Add"
+msgstr "Hinzufügen"
+
+#: lib/remote_add.tcl:37
+msgid "Remote Details"
+msgstr "Einzelheiten des anderen Archivs"
+
+#: lib/remote_add.tcl:50
+msgid "Location:"
+msgstr "Adresse:"
+
+#: lib/remote_add.tcl:62
+msgid "Further Action"
+msgstr "Weitere Aktion jetzt"
+
+#: lib/remote_add.tcl:65
+msgid "Fetch Immediately"
+msgstr "Gleich anfordern"
+
+#: lib/remote_add.tcl:71
+msgid "Initialize Remote Repository and Push"
+msgstr "Anderes Archiv initialisieren und dahin versenden"
+
+#: lib/remote_add.tcl:77
+msgid "Do Nothing Else Now"
+msgstr "Nichts tun"
+
+#: lib/remote_add.tcl:101
+#, fuzzy
+msgid "Please supply a remote name."
+msgstr "Bitte geben Sie einen Zweignamen an."
+
+#: lib/remote_add.tcl:114
+#, fuzzy, tcl-format
+msgid "'%s' is not an acceptable remote name."
+msgstr "»%s« ist kein zulässiger Zweigname."
+
+#: lib/remote_add.tcl:125
+#, fuzzy, tcl-format
+msgid "Failed to add remote '%s' of location '%s'."
+msgstr "Fehler beim Umbenennen von »%s«."
+
+#: lib/remote_add.tcl:133 lib/transport.tcl:6
+#, tcl-format
+msgid "fetch %s"
+msgstr "»%s« anfordern"
+
+#: lib/remote_add.tcl:134
+#, fuzzy, tcl-format
+msgid "Fetching the %s"
+msgstr "Änderungen »%s« von »%s« anfordern"
+
+#: lib/remote_add.tcl:157
+#, tcl-format
+msgid "Do not know how to initialize repository at location '%s'."
+msgstr "Initialisieren eines anderen Archivs an Adresse »%s« ist nicht möglich."
+
+#: lib/remote_add.tcl:163 lib/transport.tcl:25 lib/transport.tcl:71
+#, tcl-format
+msgid "push %s"
+msgstr "»%s« versenden..."
+
+#: lib/remote_add.tcl:164
+#, tcl-format
+msgid "Setting up the %s (at %s)"
+msgstr "Einrichten von »%s« an »%s«"
+
#: lib/remote_branch_delete.tcl:29 lib/remote_branch_delete.tcl:34
-msgid "Delete Remote Branch"
-msgstr "Zweig in anderem Projektarchiv löschen"
+msgid "Delete Branch Remotely"
+msgstr "Zweig in anderem Archiv löschen"
#: lib/remote_branch_delete.tcl:47
msgid "From Repository"
msgstr "Anderes Archiv:"
#: lib/remote_branch_delete.tcl:66 lib/transport.tcl:138
-msgid "Arbitrary URL:"
-msgstr "Archiv-URL:"
+msgid "Arbitrary Location:"
+msgstr "Adresse:"
#: lib/remote_branch_delete.tcl:84
msgid "Branches"
msgid "Scanning %s..."
msgstr "»%s« laden..."
-#: lib/remote.tcl:165
+#: lib/remote.tcl:163
+msgid "Remove Remote"
+msgstr "Anderes Archiv entfernen"
+
+#: lib/remote.tcl:168
msgid "Prune from"
msgstr "Aufräumen von"
msgid "Push to"
msgstr "Versenden nach"
+#: lib/search.tcl:21
+msgid "Find:"
+msgstr "Suchen:"
+
+#: lib/search.tcl:22
+msgid "Next"
+msgstr "Nächster"
+
+#: lib/search.tcl:23
+msgid "Prev"
+msgstr "Voriger"
+
+#: lib/search.tcl:24
+msgid "Case-Sensitive"
+msgstr ""
+
#: lib/shortcut.tcl:20 lib/shortcut.tcl:61
msgid "Cannot write shortcut:"
msgstr "Fehler beim Schreiben der Verknüpfung:"
msgid "%s ... %*i of %*i %s (%3i%%)"
msgstr "%s ... %*i von %*i %s (%3i%%)"
-#: lib/transport.tcl:6
-#, tcl-format
-msgid "fetch %s"
-msgstr "»%s« anfordern"
-
#: lib/transport.tcl:7
#, tcl-format
msgid "Fetching new changes from %s"
msgid "Pruning tracking branches deleted from %s"
msgstr "Übernahmezweige aufräumen und entfernen, die in »%s« gelöscht wurden"
-#: lib/transport.tcl:25 lib/transport.tcl:71
-#, tcl-format
-msgid "push %s"
-msgstr "»%s« versenden..."
-
#: lib/transport.tcl:26
#, tcl-format
msgid "Pushing changes to %s"
if test -f "$DOTEST"/current-commit
then
- current_commit=$(cat "$DOTEST"/current-commit) &&
- git rev-parse HEAD > "$REWRITTEN"/$current_commit &&
- rm "$DOTEST"/current-commit ||
- die "Cannot write current commit's replacement sha1"
+ if test "$fast_forward" = t
+ then
+ cat "$DOTEST"/current-commit | while read current_commit
+ do
+ git rev-parse HEAD > "$REWRITTEN"/$current_commit
+ done
+ rm "$DOTEST"/current-commit ||
+ die "Cannot write current commit's replacement sha1"
+ fi
fi
- echo $sha1 > "$DOTEST"/current-commit
+ echo $sha1 >> "$DOTEST"/current-commit
# rewrite parents; if none were rewritten, we can fast-forward.
new_parents=
if test -f "$REWRITTEN"/$p
then
new_p=$(cat "$REWRITTEN"/$p)
+
+ # If the todo reordered commits, and our parent is marked for
+ # rewriting, but hasn't been gotten to yet, assume the user meant to
+ # drop it on top of the current HEAD
+ if test -z "$new_p"
+ then
+ new_p=$(git rev-parse HEAD)
+ fi
+
test $p != $new_p && fast_forward=f
case "$new_parents" in
*$new_p*)
die "Cannot fast forward to $sha1"
;;
f)
- test "a$1" = a-n && die "Refusing to squash a merge: $sha1"
-
first_parent=$(expr "$new_parents" : ' \([^ ]*\)')
- # detach HEAD to current parent
- output git checkout $first_parent 2> /dev/null ||
- die "Cannot move HEAD to $first_parent"
+
+ if [ "$1" != "-n" ]
+ then
+ # detach HEAD to current parent
+ output git checkout $first_parent 2> /dev/null ||
+ die "Cannot move HEAD to $first_parent"
+ fi
case "$new_parents" in
' '*' '*)
+ test "a$1" = a-n && die "Refusing to squash a merge: $sha1"
+
# redo merge
author_script=$(get_author_ident_from_commit $sha1)
eval "$author_script"
HEADNAME=$(cat "$DOTEST"/head-name) &&
OLDHEAD=$(cat "$DOTEST"/head) &&
SHORTONTO=$(git rev-parse --short $(cat "$DOTEST"/onto)) &&
- if test -d "$REWRITTEN"
- then
- test -f "$DOTEST"/current-commit &&
- current_commit=$(cat "$DOTEST"/current-commit) &&
- git rev-parse HEAD > "$REWRITTEN"/$current_commit
- if test -f "$REWRITTEN"/$OLDHEAD
- then
- NEWHEAD=$(cat "$REWRITTEN"/$OLDHEAD)
- else
- NEWHEAD=$OLDHEAD
- fi
- else
- NEWHEAD=$(git rev-parse HEAD)
- fi &&
+ NEWHEAD=$(git rev-parse HEAD) &&
case $HEADNAME in
refs/*)
message="$GIT_REFLOG_ACTION: $HEADNAME onto $SHORTONTO)" &&
echo $ONTO > "$REWRITTEN"/$c ||
die "Could not init rewritten commits"
done
+ # No cherry-pick because our first pass is to determine
+ # parents to rewrite and skipping dropped commits would
+ # prematurely end our probe
MERGES_OPTION=
+ first_after_upstream="$(git rev-list --reverse --first-parent $UPSTREAM..$HEAD | head -n 1)"
else
- MERGES_OPTION=--no-merges
+ MERGES_OPTION="--no-merges --cherry-pick"
fi
SHORTUPSTREAM=$(git rev-parse --short $UPSTREAM)
SHORTHEAD=$(git rev-parse --short $HEAD)
SHORTONTO=$(git rev-parse --short $ONTO)
git rev-list $MERGES_OPTION --pretty=oneline --abbrev-commit \
- --abbrev=7 --reverse --left-right --cherry-pick \
+ --abbrev=7 --reverse --left-right --topo-order \
$UPSTREAM...$HEAD | \
- sed -n "s/^>/pick /p" > "$TODO"
+ sed -n "s/^>//p" | while read shortsha1 rest
+ do
+ if test t != "$PRESERVE_MERGES"
+ then
+ echo "pick $shortsha1 $rest" >> "$TODO"
+ else
+ sha1=$(git rev-parse $shortsha1)
+ preserve=t
+ for p in $(git rev-list --parents -1 $sha1 | cut -d' ' -f2-)
+ do
+ if test -f "$REWRITTEN"/$p -a \( $p != $UPSTREAM -o $sha1 = $first_after_upstream \)
+ then
+ preserve=f
+ fi
+ done
+ if test f = "$preserve"
+ then
+ touch "$REWRITTEN"/$sha1
+ echo "pick $shortsha1 $rest" >> "$TODO"
+ fi
+ fi
+ done
+
+ # Watch for commits that been dropped by --cherry-pick
+ if test t = "$PRESERVE_MERGES"
+ then
+ mkdir "$DROPPED"
+ # Save all non-cherry-picked changes
+ git rev-list $UPSTREAM...$HEAD --left-right --cherry-pick | \
+ sed -n "s/^>//p" > "$DOTEST"/not-cherry-picks
+ # Now all commits and note which ones are missing in
+ # not-cherry-picks and hence being dropped
+ git rev-list $UPSTREAM..$HEAD |
+ while read rev
+ do
+ if test -f "$REWRITTEN"/$rev -a "$(grep "$rev" "$DOTEST"/not-cherry-picks)" = ""
+ then
+ # Use -f2 because if rev-list is telling us this commit is
+ # not worthwhile, we don't want to track its multiple heads,
+ # just the history of its first-parent for others that will
+ # be rebasing on top of it
+ git rev-list --parents -1 $rev | cut -d' ' -f2 > "$DROPPED"/$rev
+ short=$(git rev-list -1 --abbrev-commit --abbrev=7 $rev)
+ grep -v "^[a-z][a-z]* $short" <"$TODO" > "${TODO}2" ; mv "${TODO}2" "$TODO"
+ rm "$REWRITTEN"/$rev
+ fi
+ done
+ fi
test -s "$TODO" || echo noop >> "$TODO"
cat >> "$TODO" << EOF
#
EOF
- # Watch for commits that been dropped by --cherry-pick
- if test t = "$PRESERVE_MERGES"
- then
- mkdir "$DROPPED"
- # drop the --cherry-pick parameter this time
- git rev-list $MERGES_OPTION --abbrev-commit \
- --abbrev=7 $UPSTREAM...$HEAD --left-right | \
- sed -n "s/^>//p" | while read rev
- do
- grep --quiet "$rev" "$TODO"
- if [ $? -ne 0 ]
- then
- # Use -f2 because if rev-list is telling this commit is not
- # worthwhile, we don't want to track its multiple heads,
- # just the history of its first-parent for others that will
- # be rebasing on top of us
- full=$(git rev-parse $rev)
- git rev-list --parents -1 $rev | cut -d' ' -f2 > "$DROPPED"/$full
- fi
- done
- fi
-
has_action "$TODO" ||
die_abort "Nothing to do"
push @files, grep { -f $_ } map { +$f . "/" . $_ }
sort readdir(DH);
-
+ closedir(DH);
} elsif (-f $f or -p $f) {
push @files, $f;
-
} else {
print STDERR "Skipping $f - not found.\n";
}
# Only mention uninitialized submodules when its
# path have been specified
test "$#" != "0" &&
- say "Submodule path '$path' not initialized"
+ say "Submodule path '$path' not initialized" &&
say "Maybe you want to use 'update --init'?"
continue
fi
my $v = $opts->{$o};
my ($key) = ($o =~ /^([a-zA-Z\-]+)/);
$key =~ s/-//g;
- my $arg = 'git-config';
+ my $arg = 'git config';
$arg .= ' --int' if ($o =~ /[:=]i$/);
$arg .= ' --bool' if ($o !~ /[:=][sfi]$/);
if (ref $v eq 'ARRAY') {
}
die "Tree is not a valid sha1: $tree\n" if $tree !~ /^$::sha1$/o;
- my @exec = ('git-commit-tree', $tree);
+ my @exec = ('git', 'commit-tree', $tree);
foreach ($self->get_commit_parents($log_entry)) {
push @exec, '-p', $_;
}
shows repositories only if this file exists in its object database
(if directory has the magic file named $export_ok).
+- Finally, it is possible to specify an arbitrary perl subroutine that
+ will be called for each project to determine if it can be exported.
+ The subroutine receives an absolute path to the project as its only
+ parameter.
+
+ For example, if you use mod_perl to run the script, and have dumb
+ http protocol authentication configured for your repositories, you
+ can use the following hook to allow access only if the user is
+ authorized to read the files:
+
+ $export_auth_hook = sub {
+ use Apache2::SubRequest ();
+ use Apache2::Const -compile => qw(HTTP_OK);
+ my $path = "$_[0]/HEAD";
+ my $r = Apache2::RequestUtil->request;
+ my $sub = $r->lookup_file($path);
+ return $sub->filename eq $path
+ && $sub->status == Apache2::Const::HTTP_OK;
+ };
+
+
Generating projects list using gitweb
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# if we're called with PATH_INFO, we have to strip that
# from the URL to find our real URL
# we make $path_info global because it's also used later on
-my $path_info = $ENV{"PATH_INFO"};
+our $path_info = $ENV{"PATH_INFO"};
if ($path_info) {
$my_url =~ s,\Q$path_info\E$,,;
$my_uri =~ s,\Q$path_info\E$,,;
# (only effective if this variable evaluates to true)
our $export_ok = "++GITWEB_EXPORT_OK++";
+# show repository only if this subroutine returns true
+# when given the path to the project, for example:
+# sub { return -e "$_[0]/git-daemon-export-ok"; }
+our $export_auth_hook = undef;
+
# only allow viewing of repositories also shown on the overview page
our $strict_export = "++GITWEB_STRICT_EXPORT++";
sub check_export_ok {
my ($dir) = @_;
return (check_head_link($dir) &&
- (!$export_ok || -e "$dir/$export_ok"));
+ (!$export_ok || -e "$dir/$export_ok") &&
+ (!$export_auth_hook || $export_auth_hook->($dir)));
}
# process alternate names for backward compatibility
# together during validation: this allows subsequent uses (e.g. href()) to be
# agnostic of the parameter origin
-my %input_params = ();
+our %input_params = ();
# input parameters are stored with the long parameter name as key. This will
# also be used in the href subroutine to convert parameters to their CGI
# XXX: Warning: If you touch this, check the search form for updating,
# too.
-my @cgi_param_mapping = (
+our @cgi_param_mapping = (
project => "p",
action => "a",
file_name => "f",
extra_options => "opt",
search_use_regexp => "sr",
);
-my %cgi_param_mapping = @cgi_param_mapping;
+our %cgi_param_mapping = @cgi_param_mapping;
# we will also need to know the possible actions, for validation
-my %actions = (
+our %actions = (
"blame" => \&git_blame,
"blobdiff" => \&git_blobdiff,
"blobdiff_plain" => \&git_blobdiff_plain,
# finally, we have the hash of allowed extra_options for the commands that
# allow them
-my %allowed_options = (
+our %allowed_options = (
"--no-merges" => [ qw(rss atom log shortlog history) ],
);
return if $input_params{'action'};
$path_info =~ s,^\Q$project\E/*,,;
- my ($refname, $pathname) = split(/:/, $path_info, 2);
+ # next, check if we have an action
+ my $action = $path_info;
+ $action =~ s,/.*$,,;
+ if (exists $actions{$action}) {
+ $path_info =~ s,^$action/*,,;
+ $input_params{'action'} = $action;
+ }
+
+ # list of actions that want hash_base instead of hash, but can have no
+ # pathname (f) parameter
+ my @wants_base = (
+ 'tree',
+ 'history',
+ );
+
+ # we want to catch
+ # [$hash_parent_base[:$file_parent]..]$hash_parent[:$file_name]
+ my ($parentrefname, $parentpathname, $refname, $pathname) =
+ ($path_info =~ /^(?:(.+?)(?::(.+))?\.\.)?(.+?)(?::(.+))?$/);
+
+ # first, analyze the 'current' part
if (defined $pathname) {
- # we got "project.git/branch:filename" or "project.git/branch:dir/"
- # we could use git_get_type(branch:pathname), but it needs $git_dir
+ # we got "branch:filename" or "branch:dir/"
+ # we could use git_get_type(branch:pathname), but:
+ # - it needs $git_dir
+ # - it does a git() call
+ # - the convention of terminating directories with a slash
+ # makes it superfluous
+ # - embedding the action in the PATH_INFO would make it even
+ # more superfluous
$pathname =~ s,^/+,,;
if (!$pathname || substr($pathname, -1) eq "/") {
- $input_params{'action'} = "tree";
+ $input_params{'action'} ||= "tree";
$pathname =~ s,/$,,;
} else {
- $input_params{'action'} = "blob_plain";
+ # the default action depends on whether we had parent info
+ # or not
+ if ($parentrefname) {
+ $input_params{'action'} ||= "blobdiff_plain";
+ } else {
+ $input_params{'action'} ||= "blob_plain";
+ }
}
$input_params{'hash_base'} ||= $refname;
$input_params{'file_name'} ||= $pathname;
} elsif (defined $refname) {
- # we got "project.git/branch"
- $input_params{'action'} = "shortlog";
- $input_params{'hash'} ||= $refname;
+ # we got "branch". In this case we have to choose if we have to
+ # set hash or hash_base.
+ #
+ # Most of the actions without a pathname only want hash to be
+ # set, except for the ones specified in @wants_base that want
+ # hash_base instead. It should also be noted that hand-crafted
+ # links having 'history' as an action and no pathname or hash
+ # set will fail, but that happens regardless of PATH_INFO.
+ $input_params{'action'} ||= "shortlog";
+ if (grep { $_ eq $input_params{'action'} } @wants_base) {
+ $input_params{'hash_base'} ||= $refname;
+ } else {
+ $input_params{'hash'} ||= $refname;
+ }
+ }
+
+ # next, handle the 'parent' part, if present
+ if (defined $parentrefname) {
+ # a missing pathspec defaults to the 'current' filename, allowing e.g.
+ # someproject/blobdiff/oldrev..newrev:/filename
+ if ($parentpathname) {
+ $parentpathname =~ s,^/+,,;
+ $parentpathname =~ s,/$,,;
+ $input_params{'file_parent'} ||= $parentpathname;
+ } else {
+ $input_params{'file_parent'} ||= $input_params{'file_name'};
+ }
+ # we assume that hash_parent_base is wanted if a path was specified,
+ # or if the action wants hash_base instead of hash
+ if (defined $input_params{'file_parent'} ||
+ grep { $_ eq $input_params{'action'} } @wants_base) {
+ $input_params{'hash_parent_base'} ||= $parentrefname;
+ } else {
+ $input_params{'hash_parent'} ||= $parentrefname;
+ }
}
}
evaluate_path_info();
my ($use_pathinfo) = gitweb_check_feature('pathinfo');
if ($use_pathinfo) {
- # use PATH_INFO for project name
+ # try to put as many parameters as possible in PATH_INFO:
+ # - project name
+ # - action
+ # - hash_parent or hash_parent_base:/file_parent
+ # - hash or hash_base:/filename
+
+ # When the script is the root DirectoryIndex for the domain,
+ # $href here would be something like http://gitweb.example.com/
+ # Thus, we strip any trailing / from $href, to spare us double
+ # slashes in the final URL
+ $href =~ s,/$,,;
+
+ # Then add the project name, if present
$href .= "/".esc_url($params{'project'}) if defined $params{'project'};
delete $params{'project'};
- # Summary just uses the project path URL
- if (defined $params{'action'} && $params{'action'} eq 'summary') {
+ # Summary just uses the project path URL, any other action is
+ # added to the URL
+ if (defined $params{'action'}) {
+ $href .= "/".esc_url($params{'action'}) unless $params{'action'} eq 'summary';
delete $params{'action'};
}
+
+ # Next, we put hash_parent_base:/file_parent..hash_base:/file_name,
+ # stripping nonexistent or useless pieces
+ $href .= "/" if ($params{'hash_base'} || $params{'hash_parent_base'}
+ || $params{'hash_parent'} || $params{'hash'});
+ if (defined $params{'hash_base'}) {
+ if (defined $params{'hash_parent_base'}) {
+ $href .= esc_url($params{'hash_parent_base'});
+ # skip the file_parent if it's the same as the file_name
+ delete $params{'file_parent'} if $params{'file_parent'} eq $params{'file_name'};
+ if (defined $params{'file_parent'} && $params{'file_parent'} !~ /\.\./) {
+ $href .= ":/".esc_url($params{'file_parent'});
+ delete $params{'file_parent'};
+ }
+ $href .= "..";
+ delete $params{'hash_parent'};
+ delete $params{'hash_parent_base'};
+ } elsif (defined $params{'hash_parent'}) {
+ $href .= esc_url($params{'hash_parent'}). "..";
+ delete $params{'hash_parent'};
+ }
+
+ $href .= esc_url($params{'hash_base'});
+ if (defined $params{'file_name'} && $params{'file_name'} !~ /\.\./) {
+ $href .= ":/".esc_url($params{'file_name'});
+ delete $params{'file_name'};
+ }
+ delete $params{'hash'};
+ delete $params{'hash_base'};
+ } elsif (defined $params{'hash'}) {
+ $href .= esc_url($params{'hash'});
+ delete $params{'hash'};
+ }
}
# now encode the parameters explicitly
my $input = shift || return undef;
if (!validate_pathname($input) ||
!(-d "$projectroot/$input") ||
- !check_head_link("$projectroot/$input") ||
- ($export_ok && !(-e "$projectroot/$input/$export_ok")) ||
+ !check_export_ok("$projectroot/$input") ||
($strict_export && !project_in_list($input))) {
return undef;
} else {
my $ctags = {};
$git_dir = "$projectroot/$path";
- foreach (<$git_dir/ctags/*>) {
+ unless (opendir D, "$git_dir/ctags") {
+ return $ctags;
+ }
+ foreach (grep { -f $_ } map { "$git_dir/ctags/$_" } readdir(D)) {
open CT, $_ or next;
my $val = <CT>;
chomp $val;
my $ctag = $_; $ctag =~ s#.*/##;
$ctags->{$ctag} = $val;
}
+ closedir D;
$ctags;
}
if (from <= last_shown)
from = last_shown + 1;
if (last_shown && from != last_shown + 1)
- printf(hunk_mark);
+ fputs(hunk_mark, stdout);
while (from < lno) {
pcl = &prev[lno-from-1];
show_line(opt, pcl->bol, pcl->eol,
last_shown = lno-1;
}
if (last_shown && lno != last_shown + 1)
- printf(hunk_mark);
+ fputs(hunk_mark, stdout);
if (!opt->count)
show_line(opt, bol, eol, name, lno, ':');
last_shown = last_hit = lno;
* we need to show this line.
*/
if (last_shown && lno != last_shown + 1)
- printf(hunk_mark);
+ fputs(hunk_mark, stdout);
show_line(opt, bol, eol, name, lno, '-');
last_shown = lno;
}
}
if (errstr) {
- error (errstr);
+ error("%s", errstr);
usage_with_options(hash_object_usage, hash_object_options);
}
die("pack has bad object at offset %lu: %s", offset, buf);
}
+static void free_base_data(struct base_data *c)
+{
+ if (c->data) {
+ free(c->data);
+ c->data = NULL;
+ base_cache_used -= c->size;
+ }
+}
+
static void prune_base_data(struct base_data *retain)
{
struct base_data *b = base_cache;
for (b = base_cache;
base_cache_used > delta_base_cache_limit && b;
b = b->child) {
- if (b->data && b != retain) {
- free(b->data);
- b->data = NULL;
- base_cache_used -= b->size;
- }
+ if (b->data && b != retain)
+ free_base_data(b);
}
}
c->base = base;
c->child = NULL;
- base_cache_used += c->size;
+ if (c->data)
+ base_cache_used += c->size;
prune_base_data(c);
}
base->child = NULL;
else
base_cache = NULL;
- if (c->data) {
- free(c->data);
- base_cache_used -= c->size;
- }
+ free_base_data(c);
}
static void *unpack_entry_data(unsigned long offset, unsigned long size)
return -first-1;
}
-static int find_delta_children(const union delta_base *base,
- int *first_index, int *last_index)
+static void find_delta_children(const union delta_base *base,
+ int *first_index, int *last_index)
{
int first = find_delta(base);
int last = first;
int end = nr_deltas - 1;
- if (first < 0)
- return -1;
+ if (first < 0) {
+ *first_index = 0;
+ *last_index = -1;
+ return;
+ }
while (first > 0 && !memcmp(&deltas[first - 1].base, base, UNION_BASE_SZ))
--first;
while (last < end && !memcmp(&deltas[last + 1].base, base, UNION_BASE_SZ))
++last;
*first_index = first;
*last_index = last;
- return 0;
}
static void sha1_object(const void *data, unsigned long size,
free(raw);
if (!c->data)
bad_object(obj->idx.offset, "failed to apply delta");
- } else
+ } else {
c->data = get_data_from_pack(obj);
+ c->size = obj->size;
+ }
base_cache_used += c->size;
prune_base_data(c);
}
static void resolve_delta(struct object_entry *delta_obj,
- struct base_data *base_obj, enum object_type type)
+ struct base_data *base, struct base_data *result)
{
- void *delta_data;
- unsigned long delta_size;
- union delta_base delta_base;
- int j, first, last;
- struct base_data result;
+ void *base_data, *delta_data;
- delta_obj->real_type = type;
+ delta_obj->real_type = base->obj->real_type;
delta_data = get_data_from_pack(delta_obj);
- delta_size = delta_obj->size;
- result.data = patch_delta(get_base_data(base_obj), base_obj->size,
- delta_data, delta_size,
- &result.size);
+ base_data = get_base_data(base);
+ result->obj = delta_obj;
+ result->data = patch_delta(base_data, base->size,
+ delta_data, delta_obj->size, &result->size);
free(delta_data);
- if (!result.data)
+ if (!result->data)
bad_object(delta_obj->idx.offset, "failed to apply delta");
- sha1_object(result.data, result.size, type, delta_obj->idx.sha1);
+ sha1_object(result->data, result->size, delta_obj->real_type,
+ delta_obj->idx.sha1);
nr_resolved_deltas++;
+}
+
+static void find_unresolved_deltas(struct base_data *base,
+ struct base_data *prev_base)
+{
+ int i, ref_first, ref_last, ofs_first, ofs_last;
+
+ /*
+ * This is a recursive function. Those brackets should help reducing
+ * stack usage by limiting the scope of the delta_base union.
+ */
+ {
+ union delta_base base_spec;
+
+ hashcpy(base_spec.sha1, base->obj->idx.sha1);
+ find_delta_children(&base_spec, &ref_first, &ref_last);
+
+ memset(&base_spec, 0, sizeof(base_spec));
+ base_spec.offset = base->obj->idx.offset;
+ find_delta_children(&base_spec, &ofs_first, &ofs_last);
+ }
- result.obj = delta_obj;
- link_base_data(base_obj, &result);
+ if (ref_last == -1 && ofs_last == -1) {
+ free(base->data);
+ return;
+ }
+
+ link_base_data(prev_base, base);
- hashcpy(delta_base.sha1, delta_obj->idx.sha1);
- if (!find_delta_children(&delta_base, &first, &last)) {
- for (j = first; j <= last; j++) {
- struct object_entry *child = objects + deltas[j].obj_no;
- if (child->real_type == OBJ_REF_DELTA)
- resolve_delta(child, &result, type);
+ for (i = ref_first; i <= ref_last; i++) {
+ struct object_entry *child = objects + deltas[i].obj_no;
+ if (child->real_type == OBJ_REF_DELTA) {
+ struct base_data result;
+ resolve_delta(child, base, &result);
+ if (i == ref_last && ofs_last == -1)
+ free_base_data(base);
+ find_unresolved_deltas(&result, base);
}
}
- memset(&delta_base, 0, sizeof(delta_base));
- delta_base.offset = delta_obj->idx.offset;
- if (!find_delta_children(&delta_base, &first, &last)) {
- for (j = first; j <= last; j++) {
- struct object_entry *child = objects + deltas[j].obj_no;
- if (child->real_type == OBJ_OFS_DELTA)
- resolve_delta(child, &result, type);
+ for (i = ofs_first; i <= ofs_last; i++) {
+ struct object_entry *child = objects + deltas[i].obj_no;
+ if (child->real_type == OBJ_OFS_DELTA) {
+ struct base_data result;
+ resolve_delta(child, base, &result);
+ if (i == ofs_last)
+ free_base_data(base);
+ find_unresolved_deltas(&result, base);
}
}
- unlink_base_data(&result);
+ unlink_base_data(base);
}
static int compare_delta_entry(const void *a, const void *b)
progress = start_progress("Resolving deltas", nr_deltas);
for (i = 0; i < nr_objects; i++) {
struct object_entry *obj = &objects[i];
- union delta_base base;
- int j, ref, ref_first, ref_last, ofs, ofs_first, ofs_last;
struct base_data base_obj;
if (obj->type == OBJ_REF_DELTA || obj->type == OBJ_OFS_DELTA)
continue;
- hashcpy(base.sha1, obj->idx.sha1);
- ref = !find_delta_children(&base, &ref_first, &ref_last);
- memset(&base, 0, sizeof(base));
- base.offset = obj->idx.offset;
- ofs = !find_delta_children(&base, &ofs_first, &ofs_last);
- if (!ref && !ofs)
- continue;
- base_obj.data = get_data_from_pack(obj);
- base_obj.size = obj->size;
base_obj.obj = obj;
- link_base_data(NULL, &base_obj);
-
- if (ref)
- for (j = ref_first; j <= ref_last; j++) {
- struct object_entry *child = objects + deltas[j].obj_no;
- if (child->real_type == OBJ_REF_DELTA)
- resolve_delta(child, &base_obj, obj->type);
- }
- if (ofs)
- for (j = ofs_first; j <= ofs_last; j++) {
- struct object_entry *child = objects + deltas[j].obj_no;
- if (child->real_type == OBJ_OFS_DELTA)
- resolve_delta(child, &base_obj, obj->type);
- }
- unlink_base_data(&base_obj);
+ base_obj.data = NULL;
+ find_unresolved_deltas(&base_obj, NULL);
display_progress(progress, nr_resolved_deltas);
}
}
for (i = 0; i < n; i++) {
struct delta_entry *d = sorted_by_pos[i];
enum object_type type;
- int j, first, last;
struct base_data base_obj;
if (objects[d->obj_no].real_type != OBJ_REF_DELTA)
die("local object %s is corrupt", sha1_to_hex(d->base.sha1));
base_obj.obj = append_obj_to_pack(f, d->base.sha1,
base_obj.data, base_obj.size, type);
- link_base_data(NULL, &base_obj);
-
- find_delta_children(&d->base, &first, &last);
- for (j = first; j <= last; j++) {
- struct object_entry *child = objects + deltas[j].obj_no;
- if (child->real_type == OBJ_REF_DELTA)
- resolve_delta(child, &base_obj, type);
- }
-
- unlink_base_data(&base_obj);
+ find_unresolved_deltas(&base_obj, NULL);
display_progress(progress, nr_resolved_deltas);
}
free(sorted_by_pos);
xdemitconf_t xecfg;
xdemitcb_t ecb;
+ memset(&xpp, 0, sizeof(xpp));
xpp.flags = XDF_NEED_MINIMAL;
memset(&xecfg, 0, sizeof(xecfg));
xecfg.ctxlen = 3;
return path;
}
+char *mksnpath(char *buf, size_t n, const char *fmt, ...)
+{
+ va_list args;
+ unsigned len;
+
+ va_start(args, fmt);
+ len = vsnprintf(buf, n, fmt, args);
+ va_end(args);
+ if (len >= n) {
+ strlcpy(buf, bad_path, n);
+ return buf;
+ }
+ return cleanup_path(buf);
+}
+
+static char *git_vsnpath(char *buf, size_t n, const char *fmt, va_list args)
+{
+ const char *git_dir = get_git_dir();
+ size_t len;
+
+ len = strlen(git_dir);
+ if (n < len + 1)
+ goto bad;
+ memcpy(buf, git_dir, len);
+ if (len && !is_dir_sep(git_dir[len-1]))
+ buf[len++] = '/';
+ len += vsnprintf(buf + len, n - len, fmt, args);
+ if (len >= n)
+ goto bad;
+ return cleanup_path(buf);
+bad:
+ strlcpy(buf, bad_path, n);
+ return buf;
+}
+
+char *git_snpath(char *buf, size_t n, const char *fmt, ...)
+{
+ va_list args;
+ va_start(args, fmt);
+ (void)git_vsnpath(buf, n, fmt, args);
+ va_end(args);
+ return buf;
+}
+
+char *git_pathdup(const char *fmt, ...)
+{
+ char path[PATH_MAX];
+ va_list args;
+ va_start(args, fmt);
+ (void)git_vsnpath(path, sizeof(path), fmt, args);
+ va_end(args);
+ return xstrdup(path);
+}
+
char *mkpath(const char *fmt, ...)
{
va_list args;
goto next;
}
- memcpy(dst, comp_start, comp_len);
+ memmove(dst, comp_start, comp_len);
dst += comp_len;
next:
comp_start = comp_end;
# the method was called upon an instance and (undef, @args) if
# it was called directly.
sub _maybe_self {
- # This breaks inheritance. Oh well.
- ref $_[0] eq 'Git' ? @_ : (undef, @_);
+ UNIVERSAL::isa($_[0], 'Git') ? @_ : (undef, @_);
}
# Check if the command id is something reasonable.
}
}
+char *reencode_commit_message(const struct commit *commit, const char **encoding_p)
+{
+ const char *encoding;
+
+ encoding = (git_log_output_encoding
+ ? git_log_output_encoding
+ : git_commit_encoding);
+ if (!encoding)
+ encoding = "utf-8";
+ if (encoding_p)
+ *encoding_p = encoding;
+ return logmsg_reencode(commit, encoding);
+}
+
void pretty_print_commit(enum cmit_fmt fmt, const struct commit *commit,
struct strbuf *sb, int abbrev,
const char *subject, const char *after_subject,
return;
}
- encoding = (git_log_output_encoding
- ? git_log_output_encoding
- : git_commit_encoding);
- if (!encoding)
- encoding = "utf-8";
- reencoded = logmsg_reencode(commit, encoding);
+ reencoded = reencode_commit_message(commit, &encoding);
if (reencoded) {
msg = reencoded;
}
die("index file corrupt");
}
+int is_index_unborn(struct index_state *istate)
+{
+ return (!istate->cache_nr && !istate->alloc && !istate->timestamp);
+}
+
int discard_index(struct index_state *istate)
{
istate->cache_nr = 0;
*flag = 0;
for (;;) {
- const char *path = git_path("%s", ref);
+ char path[PATH_MAX];
struct stat st;
char *buf;
int fd;
if (--depth < 0)
return NULL;
+ git_snpath(path, sizeof(path), "%s", ref);
/* Special case: non-existing file. */
if (lstat(path, &st) < 0) {
struct ref_list *list = get_packed_refs();
char *ref_file;
const char *orig_ref = ref;
struct ref_lock *lock;
- struct stat st;
int last_errno = 0;
int type, lflags;
int mustexist = (old_sha1 && !is_null_sha1(old_sha1));
+ int missing = 0;
lock = xcalloc(1, sizeof(struct ref_lock));
lock->lock_fd = -1;
orig_ref, strerror(errno));
goto error_return;
}
+ missing = is_null_sha1(lock->old_sha1);
/* When the ref did not exist and we are creating it,
* make sure there is no existing ref that is packed
* whose name begins with our refname, nor a ref whose
* name is a proper prefix of our refname.
*/
- if (is_null_sha1(lock->old_sha1) &&
+ if (missing &&
!is_refname_available(ref, NULL, get_packed_refs(), 0))
goto error_return;
lock->ref_name = xstrdup(ref);
lock->orig_ref_name = xstrdup(orig_ref);
ref_file = git_path("%s", ref);
- if (lstat(ref_file, &st) && errno == ENOENT)
+ if (missing)
lock->force_write = 1;
if ((flags & REF_NODEREF) && (type & REF_ISSYMREF))
lock->force_write = 1;
return commit_lock_file(&packlock);
}
-int delete_ref(const char *refname, const unsigned char *sha1)
+int delete_ref(const char *refname, const unsigned char *sha1, int delopt)
{
struct ref_lock *lock;
- int err, i, ret = 0, flag = 0;
+ int err, i = 0, ret = 0, flag = 0;
lock = lock_ref_sha1_basic(refname, sha1, 0, &flag);
if (!lock)
return 1;
- if (!(flag & REF_ISPACKED)) {
+ if (!(flag & REF_ISPACKED) || flag & REF_ISSYMREF) {
/* loose */
- i = strlen(lock->lk->filename) - 5; /* .lock */
- lock->lk->filename[i] = 0;
- err = unlink(lock->lk->filename);
+ const char *path;
+
+ if (!(delopt & REF_NODEREF)) {
+ i = strlen(lock->lk->filename) - 5; /* .lock */
+ lock->lk->filename[i] = 0;
+ path = lock->lk->filename;
+ } else {
+ path = git_path("%s", refname);
+ }
+ err = unlink(path);
if (err && errno != ENOENT) {
ret = 1;
error("unlink(%s) failed: %s",
- lock->lk->filename, strerror(errno));
+ path, strerror(errno));
}
- lock->lk->filename[i] = '.';
+ if (!(delopt & REF_NODEREF))
+ lock->lk->filename[i] = '.';
}
/* removing the loose one could have resurrected an earlier
* packed one. Also, if it was not loose we need to repack
struct ref_lock *lock;
struct stat loginfo;
int log = !lstat(git_path("logs/%s", oldref), &loginfo);
+ const char *symref = NULL;
- if (S_ISLNK(loginfo.st_mode))
+ if (log && S_ISLNK(loginfo.st_mode))
return error("reflog for %s is a symlink", oldref);
- if (!resolve_ref(oldref, orig_sha1, 1, &flag))
+ symref = resolve_ref(oldref, orig_sha1, 1, &flag);
+ if (flag & REF_ISSYMREF)
+ return error("refname %s is a symbolic ref, renaming it is not supported",
+ oldref);
+ if (!symref)
return error("refname %s not found", oldref);
if (!is_refname_available(newref, oldref, get_packed_refs(), 0))
return error("unable to move logfile logs/%s to tmp-renamed-log: %s",
oldref, strerror(errno));
- if (delete_ref(oldref, orig_sha1)) {
+ if (delete_ref(oldref, orig_sha1, REF_NODEREF)) {
error("unable to delete old %s", oldref);
goto rollback;
}
- if (resolve_ref(newref, sha1, 1, &flag) && delete_ref(newref, sha1)) {
+ if (resolve_ref(newref, sha1, 1, &flag) && delete_ref(newref, sha1, REF_NODEREF)) {
if (errno==EISDIR) {
if (remove_empty_directories(git_path("%s", newref))) {
error("Directory not empty: %s", newref);
error("unable to lock %s for update", newref);
goto rollback;
}
-
lock->force_write = 1;
hashcpy(lock->old_sha1, orig_sha1);
if (write_ref_sha1(lock, orig_sha1, logmsg)) {
int logfd, written, oflags = O_APPEND | O_WRONLY;
unsigned maxlen, len;
int msglen;
- char *log_file, *logrec;
+ char log_file[PATH_MAX];
+ char *logrec;
const char *committer;
if (log_all_ref_updates < 0)
log_all_ref_updates = !is_bare_repository();
- log_file = git_path("logs/%s", ref_name);
+ git_snpath(log_file, sizeof(log_file), "logs/%s", ref_name);
if (log_all_ref_updates &&
(!prefixcmp(ref_name, "refs/heads/") ||
const char *lockpath;
char ref[1000];
int fd, len, written;
- char *git_HEAD = xstrdup(git_path("%s", ref_target));
+ char *git_HEAD = git_pathdup("%s", ref_target);
unsigned char old_sha1[20], new_sha1[20];
if (logmsg && read_ref(ref_target, old_sha1))
if (!is_rerere_enabled())
return -1;
- merge_rr_path = xstrdup(git_path("MERGE_RR"));
+ merge_rr_path = git_pathdup("MERGE_RR");
fd = hold_lock_file_for_update(&write_lock, merge_rr_path,
LOCK_DIE_ON_ERROR);
read_rr(merge_rr);
static int update_info_refs(int force)
{
- char *path0 = xstrdup(git_path("info/refs"));
+ char *path0 = git_pathdup("info/refs");
int len = strlen(path0);
char *path1 = xmalloc(len + 2);
*ref = NULL;
for (p = ref_rev_parse_rules; *p; p++) {
+ char fullref[PATH_MAX];
unsigned char sha1_from_ref[20];
unsigned char *this_result;
this_result = refs_found ? sha1_from_ref : sha1;
- r = resolve_ref(mkpath(*p, len, str), this_result, 1, NULL);
+ mksnpath(fullref, sizeof(fullref), *p, len, str);
+ r = resolve_ref(fullref, this_result, 1, NULL);
if (r) {
if (!refs_found++)
*ref = xstrdup(r);
char path[PATH_MAX];
const char *ref, *it;
- strcpy(path, mkpath(*p, len, str));
+ mksnpath(path, sizeof(path), *p, len, str);
ref = resolve_ref(path, hash, 1, NULL);
if (!ref)
continue;
'
rm -f .git/$m
+cp -f .git/HEAD .git/HEAD.orig
+test_expect_success "delete symref without dereference" '
+ git update-ref --no-deref -d HEAD &&
+ ! test -f .git/HEAD
+'
+cp -f .git/HEAD.orig .git/HEAD
+
+test_expect_success "delete symref without dereference when the referred ref is packed" '
+ echo foo >foo.c &&
+ git add foo.c &&
+ git commit -m foo &&
+ git pack-refs --all &&
+ git update-ref --no-deref -d HEAD &&
+ ! test -f .git/HEAD
+'
+cp -f .git/HEAD.orig .git/HEAD
+git update-ref -d $m
+
test_expect_success '(not) create HEAD with old sha1' "
test_must_fail git update-ref HEAD $A $B
"
--- /dev/null
+#!/bin/sh
+
+test_description='checkout switching away from an invalid branch'
+
+. ./test-lib.sh
+
+test_expect_success 'setup' '
+ echo hello >world &&
+ git add world &&
+ git commit -m initial
+'
+
+test_expect_success 'checkout master from invalid HEAD' '
+ echo 0000000000000000000000000000000000000000 >.git/HEAD &&
+ git checkout master --
+'
+
+test_done
"test $(git config branch.s.dummy) = Hello &&
test_must_fail git config branch.s/s/dummy"
+test_expect_success 'renaming a symref is not allowed' \
+'
+ git symbolic-ref refs/heads/master2 refs/heads/master &&
+ test_must_fail git branch -m master2 master3 &&
+ git symbolic-ref refs/heads/master2 &&
+ test -f .git/refs/heads/master &&
+ ! test -f .git/refs/heads/master3
+'
+
test_expect_success \
'git branch -m u v should fail when the reflog for u is a symlink' '
git branch -l u &&
git branch -d n/o/p &&
git branch n'
+test_expect_success \
+ 'see if up-to-date packed refs are preserved' \
+ 'git branch q &&
+ git pack-refs --all --prune &&
+ git update-ref refs/heads/q refs/heads/q &&
+ ! test -f .git/refs/heads/q'
+
test_expect_success 'pack, prune and repack' '
git tag foo &&
git pack-refs --all --prune &&
--- /dev/null
+#!/bin/sh
+#
+# Copyright (c) 2008 Stephen Haberman
+#
+
+test_description='git rebase preserve merges
+
+This test runs git rebase with and tries to squash a commit from after a merge
+to before the merge.
+'
+. ./test-lib.sh
+
+# Copy/paste from t3404-rebase-interactive.sh
+echo "#!$SHELL_PATH" >fake-editor.sh
+cat >> fake-editor.sh <<\EOF
+case "$1" in
+*/COMMIT_EDITMSG)
+ test -z "$FAKE_COMMIT_MESSAGE" || echo "$FAKE_COMMIT_MESSAGE" > "$1"
+ test -z "$FAKE_COMMIT_AMEND" || echo "$FAKE_COMMIT_AMEND" >> "$1"
+ exit
+ ;;
+esac
+test -z "$EXPECT_COUNT" ||
+ test "$EXPECT_COUNT" = $(sed -e '/^#/d' -e '/^$/d' < "$1" | wc -l) ||
+ exit
+test -z "$FAKE_LINES" && exit
+grep -v '^#' < "$1" > "$1".tmp
+rm -f "$1"
+cat "$1".tmp
+action=pick
+for line in $FAKE_LINES; do
+ case $line in
+ squash|edit)
+ action="$line";;
+ *)
+ echo sed -n "${line}s/^pick/$action/p"
+ sed -n "${line}p" < "$1".tmp
+ sed -n "${line}s/^pick/$action/p" < "$1".tmp >> "$1"
+ action=pick;;
+ esac
+done
+EOF
+
+test_set_editor "$(pwd)/fake-editor.sh"
+chmod a+x fake-editor.sh
+
+# set up two branches like this:
+#
+# A1 - B1 - D1 - E1 - F1
+# \ /
+# -- C1 --
+
+test_expect_success 'setup' '
+ touch a &&
+ touch b &&
+ git add a &&
+ git commit -m A1 &&
+ git tag A1
+ git add b &&
+ git commit -m B1 &&
+ git tag B1 &&
+ git checkout -b branch &&
+ touch c &&
+ git add c &&
+ git commit -m C1 &&
+ git checkout master &&
+ touch d &&
+ git add d &&
+ git commit -m D1 &&
+ git merge branch &&
+ touch f &&
+ git add f &&
+ git commit -m F1 &&
+ git tag F1
+'
+
+# Should result in:
+#
+# A1 - B1 - D2 - E2
+# \ /
+# -- C1 --
+#
+test_expect_success 'squash F1 into D1' '
+ FAKE_LINES="1 squash 3 2" git rebase -i -p B1 &&
+ test "$(git rev-parse HEAD^2)" = "$(git rev-parse branch)" &&
+ test "$(git rev-parse HEAD~2)" = "$(git rev-parse B1)" &&
+ git tag E2
+'
+
+# Start with:
+#
+# A1 - B1 - D2 - E2
+# \
+# G1 ---- L1 ---- M1
+# \ /
+# H1 -- J1 -- K1
+# \ /
+# -- I1 --
+#
+# And rebase G1..M1 onto E2
+
+test_expect_success 'rebase two levels of merge' '
+ git checkout -b branch2 A1 &&
+ touch g &&
+ git add g &&
+ git commit -m G1 &&
+ git checkout -b branch3 &&
+ touch h
+ git add h &&
+ git commit -m H1 &&
+ git checkout -b branch4 &&
+ touch i &&
+ git add i &&
+ git commit -m I1 &&
+ git tag I1 &&
+ git checkout branch3 &&
+ touch j &&
+ git add j &&
+ git commit -m J1 &&
+ git merge I1 --no-commit &&
+ git commit -m K1 &&
+ git tag K1 &&
+ git checkout branch2 &&
+ touch l &&
+ git add l &&
+ git commit -m L1 &&
+ git merge K1 --no-commit &&
+ git commit -m M1 &&
+ GIT_EDITOR=: git rebase -i -p E2 &&
+ test "$(git rev-parse HEAD~3)" = "$(git rev-parse E2)" &&
+ test "$(git rev-parse HEAD~2)" = "$(git rev-parse HEAD^2^2~2)" &&
+ test "$(git rev-parse HEAD^2^1^1)" = "$(git rev-parse HEAD^2^2^1)"
+'
+
+test_done
git update-ref HEAD $(TZ=GMT GIT_COMMITTER_DATE="2005-05-27 22:00:00" \
git commit-tree $treeid </dev/null)'
+test_expect_success \
+ 'create bare clone' \
+ 'git clone --bare . bare.git &&
+ cp .gitattributes bare.git/info/attributes'
+
test_expect_success \
'remove ignored file' \
'rm a/ignored'
'git archive vs. git tar-tree' \
'diff b.tar b2.tar'
+test_expect_success \
+ 'git archive in a bare repo' \
+ '(cd bare.git && git archive HEAD) >b3.tar'
+
+test_expect_success \
+ 'git archive vs. the same in a bare repo' \
+ 'test_cmp b.tar b3.tar'
+
test_expect_success \
'validate file modification time' \
'mkdir extract &&
"$TAR" xf b.tar -C extract a/a &&
- perl -e '\''print((stat("extract/a/a"))[9], "\n")'\'' >b.mtime &&
+ test-chmtime -v +0 extract/a/a |cut -f 1 >b.mtime &&
echo "1117231200" >expected.mtime &&
diff expected.mtime b.mtime'
'git archive --format=zip' \
'git archive --format=zip HEAD >d.zip'
+test_expect_success \
+ 'git archive --format=zip in a bare repo' \
+ '(cd bare.git && git archive --format=zip HEAD) >d1.zip'
+
+test_expect_success \
+ 'git archive --format=zip vs. the same in a bare repo' \
+ 'test_cmp d.zip d1.zip'
+
$UNZIP -v >/dev/null 2>&1
if [ $? -eq 127 ]; then
echo "Skipping ZIP tests, because unzip was not found"
)
'
+test_expect_success 'tolerate absurdly small packsizelimit' '
+ git config pack.packSizeLimit 2 &&
+ packname_9=$(git pack-objects test-9 <obj-list) &&
+ test $(wc -l <obj-list) = $(ls test-9-*.pack | wc -l)
+'
+
test_done
'rm -rf .git
git init &&
i=1 &&
- while test $i -le 100
+ while test $i -le 100
do
- i=`printf '%03i' $i`
- echo $i >file_$i &&
- test-genrandom "$i" 8192 >>file_$i &&
- git update-index --add file_$i &&
- i=`expr $i + 1` || return 1
+ iii=`printf '%03i' $i`
+ test-genrandom "bar" 200 > wide_delta_$iii &&
+ test-genrandom "baz $iii" 50 >> wide_delta_$iii &&
+ test-genrandom "foo"$i 100 > deep_delta_$iii &&
+ test-genrandom "foo"`expr $i + 1` 100 >> deep_delta_$iii &&
+ test-genrandom "foo"`expr $i + 2` 100 >> deep_delta_$iii &&
+ echo $iii >file_$iii &&
+ test-genrandom "$iii" 8192 >>file_$iii &&
+ git update-index --add file_$iii deep_delta_$iii wide_delta_$iii &&
+ i=`expr $i + 1` || return 1
done &&
{ echo 101 && test-genrandom 100 8192; } >file_101 &&
git update-index --add file_101 &&
'64-bit offsets: index-pack result should match pack-objects one' \
'cmp "test-3-${pack3}.idx" "3.idx"'
+# returns the object number for given object in given pack index
+index_obj_nr()
+{
+ idx_file=$1
+ object_sha1=$2
+ nr=0
+ git show-index < $idx_file |
+ while read offs sha1 extra
+ do
+ nr=$(($nr + 1))
+ test "$sha1" = "$object_sha1" || continue
+ echo "$(($nr - 1))"
+ break
+ done
+}
+
+# returns the pack offset for given object as found in given pack index
+index_obj_offset()
+{
+ idx_file=$1
+ object_sha1=$2
+ git show-index < $idx_file | grep $object_sha1 |
+ ( read offs extra && echo "$offs" )
+}
+
test_expect_success \
'[index v1] 1) stream pack to repository' \
'git index-pack --index-version=1 --stdin < "test-1-${pack1}.pack" &&
test_expect_success \
'[index v1] 2) create a stealth corruption in a delta base reference' \
- '# this test assumes a delta smaller than 16 bytes at the end of the pack
- git show-index <1.idx | sort -n | sed -ne \$p | (
- read delta_offs delta_sha1 &&
- git cat-file blob "$delta_sha1" > blob_1 &&
- chmod +w ".git/objects/pack/pack-${pack1}.pack" &&
- dd of=".git/objects/pack/pack-${pack1}.pack" seek=$(($delta_offs + 1)) \
- if=".git/objects/pack/pack-${pack1}.idx" skip=$((256 * 4 + 4)) \
- bs=1 count=20 conv=notrunc &&
- git cat-file blob "$delta_sha1" > blob_2 )'
+ '# This test assumes file_101 is a delta smaller than 16 bytes.
+ # It should be against file_100 but we substitute its base for file_099
+ sha1_101=`git hash-object file_101` &&
+ sha1_099=`git hash-object file_099` &&
+ offs_101=`index_obj_offset 1.idx $sha1_101` &&
+ nr_099=`index_obj_nr 1.idx $sha1_099` &&
+ chmod +w ".git/objects/pack/pack-${pack1}.pack" &&
+ dd of=".git/objects/pack/pack-${pack1}.pack" seek=$(($offs_101 + 1)) \
+ if=".git/objects/pack/pack-${pack1}.idx" \
+ skip=$((4 + 256 * 4 + $nr_099 * 24)) \
+ bs=1 count=20 conv=notrunc &&
+ git cat-file blob $sha1_101 > file_101_foo1'
test_expect_success \
'[index v1] 3) corrupted delta happily returned wrong data' \
- '! cmp blob_1 blob_2'
+ 'test -f file_101_foo1 && ! cmp file_101 file_101_foo1'
test_expect_success \
'[index v1] 4) confirm that the pack is actually corrupted' \
test_expect_success \
'[index v2] 2) create a stealth corruption in a delta base reference' \
- '# this test assumes a delta smaller than 16 bytes at the end of the pack
- git show-index <1.idx | sort -n | sed -ne \$p | (
- read delta_offs delta_sha1 delta_crc &&
- git cat-file blob "$delta_sha1" > blob_3 &&
- chmod +w ".git/objects/pack/pack-${pack1}.pack" &&
- dd of=".git/objects/pack/pack-${pack1}.pack" seek=$(($delta_offs + 1)) \
- if=".git/objects/pack/pack-${pack1}.idx" skip=$((8 + 256 * 4)) \
- bs=1 count=20 conv=notrunc &&
- git cat-file blob "$delta_sha1" > blob_4 )'
+ '# This test assumes file_101 is a delta smaller than 16 bytes.
+ # It should be against file_100 but we substitute its base for file_099
+ sha1_101=`git hash-object file_101` &&
+ sha1_099=`git hash-object file_099` &&
+ offs_101=`index_obj_offset 1.idx $sha1_101` &&
+ nr_099=`index_obj_nr 1.idx $sha1_099` &&
+ chmod +w ".git/objects/pack/pack-${pack1}.pack" &&
+ dd of=".git/objects/pack/pack-${pack1}.pack" seek=$(($offs_101 + 1)) \
+ if=".git/objects/pack/pack-${pack1}.idx" \
+ skip=$((8 + 256 * 4 + $nr_099 * 20)) \
+ bs=1 count=20 conv=notrunc &&
+ git cat-file blob $sha1_101 > file_101_foo2'
test_expect_success \
'[index v2] 3) corrupted delta happily returned wrong data' \
- '! cmp blob_3 blob_4'
+ 'test -f file_101_foo2 && ! cmp file_101 file_101_foo2'
test_expect_success \
'[index v2] 4) confirm that the pack is actually corrupted' \
'rm -f .git/objects/pack/* &&
git index-pack --index-version=2 --stdin < "test-1-${pack1}.pack" &&
git verify-pack ".git/objects/pack/pack-${pack1}.pack" &&
+ obj=`git hash-object file_001` &&
+ nr=`index_obj_nr ".git/objects/pack/pack-${pack1}.idx" $obj` &&
chmod +w ".git/objects/pack/pack-${pack1}.idx" &&
dd if=/dev/zero of=".git/objects/pack/pack-${pack1}.idx" conv=notrunc \
- bs=1 count=4 seek=$((8 + 256 * 4 + `wc -l <obj-list` * 20 + 0)) &&
+ bs=1 count=4 seek=$((8 + 256 * 4 + `wc -l <obj-list` * 20 + $nr * 4)) &&
( while read obj
do git cat-file -p $obj >/dev/null || exit 1
done <obj-list ) &&
HOME=`pwd`/no-such-directory
export HOME ;# this way we force the victim/.git/config to be used.
+test_expect_success \
+ 'pushing a delete should be denied with denyDeletes' '
+ cd victim &&
+ git config receive.denyDeletes true &&
+ git branch extra master &&
+ cd .. &&
+ test -f victim/.git/refs/heads/extra &&
+ test_must_fail git send-pack ./victim/.git/ :extra master
+'
+rm -f victim/.git/refs/heads/extra
+
test_expect_success \
'pushing with --force should be denied with denyNonFastforwards' '
cd victim &&
'
+test_expect_success 'push updates up-to-date local refs' '
+
+ rm -rf parent child &&
+ mkdir parent &&
+ (cd parent && git init &&
+ echo one >foo && git add foo && git commit -m one) &&
+ git clone parent child1 &&
+ git clone parent child2 &&
+ (cd child1 &&
+ echo two >foo && git commit -a -m two &&
+ git push) &&
+ (cd child2 &&
+ git pull ../child1 master &&
+ git push &&
+ test $(git rev-parse master) = $(git rev-parse remotes/origin/master))
+
+'
+
+test_expect_success 'push preserves up-to-date packed refs' '
+
+ rm -rf parent child &&
+ mkdir parent &&
+ (cd parent && git init &&
+ echo one >foo && git add foo && git commit -m one) &&
+ git clone parent child &&
+ (cd child &&
+ git push &&
+ ! test -f .git/refs/remotes/origin/master)
+
+'
+
test_expect_success 'push does not update local refs on failure' '
rm -rf parent child &&
git cat-file tag tag-from-subdir-2 | grep "in sub directory"
'
+# mixing modes and options:
+
+test_expect_success 'mixing incompatibles modes and options is forbidden' '
+ test_must_fail git tag -a
+ test_must_fail git tag -l -v
+ test_must_fail git tag -n 100
+ test_must_fail git tag -l -m msg
+ test_must_fail git tag -l -F some file
+ test_must_fail git tag -v -s
+'
+
test_done
--- /dev/null
+#!/bin/sh
+
+test_description='git blame encoding conversion'
+. ./test-lib.sh
+
+. "$TEST_DIRECTORY"/t8005/utf8.txt
+. "$TEST_DIRECTORY"/t8005/cp1251.txt
+. "$TEST_DIRECTORY"/t8005/sjis.txt
+
+test_expect_success 'setup the repository' '
+ # Create the file
+ echo "UTF-8 LINE" > file &&
+ git add file &&
+ git commit --author "$UTF8_NAME <utf8@localhost>" -m "$UTF8_MSG" &&
+
+ echo "CP1251 LINE" >> file &&
+ git add file &&
+ git config i18n.commitencoding cp1251 &&
+ git commit --author "$CP1251_NAME <cp1251@localhost>" -m "$CP1251_MSG" &&
+
+ echo "SJIS LINE" >> file &&
+ git add file &&
+ git config i18n.commitencoding shift-jis &&
+ git commit --author "$SJIS_NAME <sjis@localhost>" -m "$SJIS_MSG"
+'
+
+cat >expected <<EOF
+author $SJIS_NAME
+summary $SJIS_MSG
+author $SJIS_NAME
+summary $SJIS_MSG
+author $SJIS_NAME
+summary $SJIS_MSG
+EOF
+
+test_expect_success \
+ 'blame respects i18n.commitencoding' '
+ git blame --incremental file | \
+ grep "^\(author\|summary\) " > actual &&
+ test_cmp actual expected
+'
+
+cat >expected <<EOF
+author $CP1251_NAME
+summary $CP1251_MSG
+author $CP1251_NAME
+summary $CP1251_MSG
+author $CP1251_NAME
+summary $CP1251_MSG
+EOF
+
+test_expect_success \
+ 'blame respects i18n.logoutputencoding' '
+ git config i18n.logoutputencoding cp1251 &&
+ git blame --incremental file | \
+ grep "^\(author\|summary\) " > actual &&
+ test_cmp actual expected
+'
+
+cat >expected <<EOF
+author $UTF8_NAME
+summary $UTF8_MSG
+author $UTF8_NAME
+summary $UTF8_MSG
+author $UTF8_NAME
+summary $UTF8_MSG
+EOF
+
+test_expect_success \
+ 'blame respects --encoding=utf-8' '
+ git blame --incremental --encoding=utf-8 file | \
+ grep "^\(author\|summary\) " > actual &&
+ test_cmp actual expected
+'
+
+cat >expected <<EOF
+author $SJIS_NAME
+summary $SJIS_MSG
+author $CP1251_NAME
+summary $CP1251_MSG
+author $UTF8_NAME
+summary $UTF8_MSG
+EOF
+
+test_expect_success \
+ 'blame respects --encoding=none' '
+ git blame --incremental --encoding=none file | \
+ grep "^\(author\|summary\) " > actual &&
+ test_cmp actual expected
+'
+
+test_done
--- /dev/null
+CP1251_NAME="Èâàí Ïåòðîâè÷ Ñèäîðîâ"
+CP1251_MSG="Òåñòîâîå ñîîáùåíèå"
--- /dev/null
+SJIS_NAME="\84I\84r\84p\84~ \84P\84u\84\84\84\82\84\80\84r\84y\84\89 \84R\84y\84t\84\80\84\82\84\80\84r"
+SJIS_MSG="\84S\84u\84\83\84\84\84\80\84r\84\80\84u \84\83\84\80\84\80\84q\84\8b\84u\84~\84y\84u"
--- /dev/null
+UTF8_NAME="Иван Петрович Сидоров"
+UTF8_MSG="Тестовое сообщение"
+/*
+ * This program can either change modification time of the given
+ * file(s) or just print it. The program does not change atime nor
+ * ctime (their values are explicitely preserved).
+ *
+ * The mtime can be changed to an absolute value:
+ *
+ * test-chmtime =<seconds> file...
+ *
+ * Relative to the current time as returned by time(3):
+ *
+ * test-chmtime =+<seconds> (or =-<seconds>) file...
+ *
+ * Or relative to the current mtime of the file:
+ *
+ * test-chmtime <seconds> file...
+ * test-chmtime +<seconds> (or -<seconds>) file...
+ *
+ * Examples:
+ *
+ * To just print the mtime use --verbose and set the file mtime offset to 0:
+ *
+ * test-chmtime -v +0 file
+ *
+ * To set the mtime to current time:
+ *
+ * test-chmtime =+0 file
+ *
+ */
#include "git-compat-util.h"
#include <utime.h>
-static const char usage_str[] = "(+|=|=+|=-|-)<seconds> <file>...";
+static const char usage_str[] = "-v|--verbose (+|=|=+|=-|-)<seconds> <file>...";
-int main(int argc, const char *argv[])
+static int timespec_arg(const char *arg, long int *set_time, int *set_eq)
{
- int i;
- int set_eq;
- long int set_time;
char *test;
- const char *timespec;
-
- if (argc < 3)
- goto usage;
-
- timespec = argv[1];
- set_eq = (*timespec == '=') ? 1 : 0;
- if (set_eq) {
+ const char *timespec = arg;
+ *set_eq = (*timespec == '=') ? 1 : 0;
+ if (*set_eq) {
timespec++;
if (*timespec == '+') {
- set_eq = 2; /* relative "in the future" */
+ *set_eq = 2; /* relative "in the future" */
timespec++;
}
}
- set_time = strtol(timespec, &test, 10);
+ *set_time = strtol(timespec, &test, 10);
if (*test) {
- fprintf(stderr, "Not a base-10 integer: %s\n", argv[1] + 1);
- goto usage;
+ fprintf(stderr, "Not a base-10 integer: %s\n", arg + 1);
+ return 0;
}
- if ((set_eq && set_time < 0) || set_eq == 2) {
+ if ((*set_eq && *set_time < 0) || *set_eq == 2) {
time_t now = time(NULL);
- set_time += now;
+ *set_time += now;
}
+ return 1;
+}
+
+int main(int argc, const char *argv[])
+{
+ static int verbose;
- for (i = 2; i < argc; i++) {
+ int i = 1;
+ /* no mtime change by default */
+ int set_eq = 0;
+ long int set_time = 0;
+
+ if (argc < 3)
+ goto usage;
+
+ if (strcmp(argv[i], "--verbose") == 0 || strcmp(argv[i], "-v") == 0) {
+ verbose = 1;
+ ++i;
+ }
+ if (timespec_arg(argv[i], &set_time, &set_eq))
+ ++i;
+ else
+ goto usage;
+
+ for (; i < argc; i++) {
struct stat sb;
struct utimbuf utb;
utb.actime = sb.st_atime;
utb.modtime = set_eq ? set_time : sb.st_mtime + set_time;
- if (utime(argv[i], &utb) < 0) {
+ if (verbose) {
+ uintmax_t mtime = utb.modtime < 0 ? 0: utb.modtime;
+ printf("%"PRIuMAX"\t%s\n", mtime, argv[i]);
+ }
+
+ if (utb.modtime != sb.st_mtime && utime(argv[i], &utb) < 0) {
fprintf(stderr, "Failed to modify time on %s: %s\n",
argv[i], strerror(errno));
return -1;
discard_index(&o->result);
if (!o->gently) {
if (message)
- return error(message);
+ return error("%s", message);
return -1;
}
return -1;
o->merge_size = len;
if (!dfc)
- dfc = xcalloc(1, sizeof(struct cache_entry) + 1);
+ dfc = xcalloc(1, cache_entry_size(0));
o->df_conflict_entry = dfc;
if (len) {
#include "cache.h"
#include "xdiff-interface.h"
-#include "strbuf.h"
+#include "xdiff/xtypes.h"
+#include "xdiff/xdiffi.h"
+#include "xdiff/xemit.h"
+#include "xdiff/xmacros.h"
struct xdiff_emit_state {
xdiff_emit_consume_fn consume;
return ret;
}
+struct xdiff_emit_hunk_state {
+ xdiff_emit_hunk_consume_fn consume;
+ void *consume_callback_data;
+};
+
+static int process_diff(xdfenv_t *xe, xdchange_t *xscr, xdemitcb_t *ecb,
+ xdemitconf_t const *xecfg)
+{
+ long s1, s2, same, p_next, t_next;
+ xdchange_t *xch, *xche;
+ struct xdiff_emit_hunk_state *state = ecb->priv;
+ xdiff_emit_hunk_consume_fn fn = state->consume;
+ void *consume_callback_data = state->consume_callback_data;
+
+ for (xch = xscr; xch; xch = xche->next) {
+ xche = xdl_get_hunk(xch, xecfg);
+
+ s1 = XDL_MAX(xch->i1 - xecfg->ctxlen, 0);
+ s2 = XDL_MAX(xch->i2 - xecfg->ctxlen, 0);
+ same = s2 + XDL_MAX(xch->i1 - s1, 0);
+ p_next = xche->i1 + xche->chg1;
+ t_next = xche->i2 + xche->chg2;
+
+ fn(consume_callback_data, same, p_next, t_next);
+ }
+ return 0;
+}
+
+int xdi_diff_hunks(mmfile_t *mf1, mmfile_t *mf2,
+ xdiff_emit_hunk_consume_fn fn, void *consume_callback_data,
+ xpparam_t const *xpp, xdemitconf_t *xecfg)
+{
+ struct xdiff_emit_hunk_state state;
+ xdemitcb_t ecb;
+
+ memset(&state, 0, sizeof(state));
+ memset(&ecb, 0, sizeof(ecb));
+ state.consume = fn;
+ state.consume_callback_data = consume_callback_data;
+ xecfg->emit_func = (void (*)())process_diff;
+ ecb.priv = &state;
+ return xdi_diff(mf1, mf2, xpp, xecfg, &ecb);
+}
+
int read_mmfile(mmfile_t *ptr, const char *filename)
{
struct stat st;
#include "xdiff/xdiff.h"
typedef void (*xdiff_emit_consume_fn)(void *, char *, unsigned long);
+typedef void (*xdiff_emit_hunk_consume_fn)(void *, long, long, long);
int xdi_diff(mmfile_t *mf1, mmfile_t *mf2, xpparam_t const *xpp, xdemitconf_t const *xecfg, xdemitcb_t *ecb);
int xdi_diff_outf(mmfile_t *mf1, mmfile_t *mf2,
xdiff_emit_consume_fn fn, void *consume_callback_data,
xpparam_t const *xpp,
xdemitconf_t const *xecfg, xdemitcb_t *xecb);
+int xdi_diff_hunks(mmfile_t *mf1, mmfile_t *mf2,
+ xdiff_emit_hunk_consume_fn fn, void *consume_callback_data,
+ xpparam_t const *xpp, xdemitconf_t *xecfg);
int parse_hunk_header(char *line, int len,
int *ob, int *on,
int *nb, int *nn);
unsigned long flags;
find_func_t find_func;
void *find_func_priv;
+ void (*emit_func)();
} xdemitconf_t;
typedef struct s_bdiffparam {
xdemitconf_t const *xecfg, xdemitcb_t *ecb) {
xdchange_t *xscr;
xdfenv_t xe;
+ emit_func_t ef = xecfg->emit_func ?
+ (emit_func_t)xecfg->emit_func : xdl_emit_diff;
if (xdl_do_diff(mf1, mf2, xpp, &xe) < 0) {
return -1;
}
if (xscr) {
- if (xdl_emit_diff(&xe, xscr, ecb, xecfg) < 0) {
+ if (ef(&xe, xscr, ecb, xecfg) < 0) {
xdl_free_script(xscr);
xdl_free_env(&xe);
static long xdl_get_rec(xdfile_t *xdf, long ri, char const **rec);
static int xdl_emit_record(xdfile_t *xdf, long ri, char const *pre, xdemitcb_t *ecb);
-static xdchange_t *xdl_get_hunk(xdchange_t *xscr, xdemitconf_t const *xecfg);
* Starting at the passed change atom, find the latest change atom to be included
* inside the differential hunk according to the specified configuration.
*/
-static xdchange_t *xdl_get_hunk(xdchange_t *xscr, xdemitconf_t const *xecfg) {
+xdchange_t *xdl_get_hunk(xdchange_t *xscr, xdemitconf_t const *xecfg) {
xdchange_t *xch, *xchp;
for (xchp = xscr, xch = xscr->next; xch; xchp = xch, xch = xch->next)
#define XEMIT_H
+typedef int (*emit_func_t)(xdfenv_t *xe, xdchange_t *xscr, xdemitcb_t *ecb,
+ xdemitconf_t const *xecfg);
+xdchange_t *xdl_get_hunk(xdchange_t *xscr, xdemitconf_t const *xecfg);
int xdl_emit_diff(xdfenv_t *xe, xdchange_t *xscr, xdemitcb_t *ecb,
xdemitconf_t const *xecfg);