/git-name-rev
/git-mv
/git-notes
+/git-p4
/git-pack-redundant
/git-pack-objects
/git-pack-refs
/test-index-version
/test-line-buffer
/test-match-trees
+/test-mergesort
/test-mktemp
/test-parse-options
/test-path-utils
+/test-revision-walking
/test-run-command
/test-sha1
/test-sigchain
# Shell quote;
SHELL_PATH_SQ = $(subst ','\'',$(SHELL_PATH))
+ifdef DEFAULT_PAGER
+DEFAULT_PAGER_SQ = $(subst ','\'',$(DEFAULT_PAGER))
+ASCIIDOC_EXTRA += -a 'git-default-pager=$(DEFAULT_PAGER_SQ)'
+endif
+
+ifdef DEFAULT_EDITOR
+DEFAULT_EDITOR_SQ = $(subst ','\'',$(DEFAULT_EDITOR))
+ASCIIDOC_EXTRA += -a 'git-default-editor=$(DEFAULT_EDITOR_SQ)'
+endif
+
#
# Please note that there is a minor bug in asciidoc.
# The version after 6.0.3 _will_ include the patch found here:
* A third-party tool "git subtree" is distributed in contrib/
+ * Even with "-q"uiet option, "checkout" used to report setting up
+ tracking. Also "branch" learned the "-q"uiet option to squelch
+ informational message.
+
+ * The smart-http backend used to always override GIT_COMMITTER_*
+ variables with REMOTE_USER and REMOTE_ADDR, but these variables are
+ now preserved when set.
+
+ * "git am" learned the "--include" option, which is an opposite of
+ existing the "--exclude" option.
+
+ * When "git am -3" needs to fall back to an application to a
+ synthesized preimage followed by a 3-way merge, the paths that
+ needed such treatment are now reported to the end user, so that the
+ result in them can be eyeballed with extra care.
+
+ * The "fmt-merge-msg" command learns to list the primary contributors
+ involved in the side topic you are merging.
+
+ * The cases "git push" fails due to non-ff can be broken into three
+ categories; each case is given a separate advise message.
+
+ * A 'snapshot' request to "gitweb" honors If-Modified-Since: header,
+ based on the commit date.
Foreign Interface
+ * "git p4" has been moved out of contrib/ area.
Performance
+ * "git apply" had some memory leaks plugged.
+
+ * Setting up a revision traversal with many starting points was
+ inefficient as these were placed in a date-order priority queue
+ one-by-one. Now they are collected in the queue unordered first,
+ and sorted immediately before getting used.
Internal Implementation (please report possible regressions)
+ * "git rev-parse --show-prefix" used to emit nothing when run at the
+ top-level of the working tree, but now it gives a blank line.
+
+ * Minor memory leak during unpack_trees (hence "merge" and "checkout"
+ to check out another branch) has been plugged.
+
+ * More lower-level commands learned to use the streaming API to read
+ from the object store without keeping everything in core.
+
+ * Because "sh" on the user's PATH may be utterly broken on some
+ systems, run-command API now uses SHELL_PATH, not /bin/sh, when
+ spawning an external command (not applicable to Windows port).
Also contains minor documentation updates and code clean-ups.
Unless otherwise noted, all the fixes since v1.7.10 in the maintenance
releases are contained in this release (see release notes to them for
details).
+
+ * "git fetch" that recurses into submodules on demand did not check
+ if it needs to go into submodules when non branches (most notably,
+ tags) are fetched.
+ (merge a6801ad jl/maint-submodule-recurse-fetch later to maint).
+
+ * "git blame" started missing quite a few changes from the origin
+ since we stopped using the diff minimalization by default in v1.7.2
+ era.
+ (merge 059a500 jc/maint-blame-minimal later to maint).
+
+ * "log -p --graph" used with "--stat" had a few formatting error.
+ (merge e2c5966 lp/maint-diff-three-dash-with-graph later to maint).
+
+ * Giving "--continue" to a conflicted "rebase -i" session skipped a
+ commit that only results in changes to submodules.
+ (merge a6754cd jk/rebase-i-submodule-conflict-only later to maint).
+
+ * When PATH contains an unreadable directory, alias expansion code
+ did not kick in, and failed with an error that said "git-subcmd"
+ was not found.
+ (merge 38f865c jk/run-command-eacces later to maint).
+
+ * The 'push to upstream' implementation was broken in some corner
+ cases. "git push $there" without refspec, when the current branch
+ is set to push to a remote different from $there, used to push to
+ $there using the upstream information to a remote unreleated to
+ $there.
+ (merge 135dade jc/push-upstream-sanity later to maint).
+
+ * "git clean -d -f" (not "-d -f -f") is supposed to protect nested
+ working trees of independent git repositories that exist in the
+ current project working tree from getting removed, but the
+ protection applied only to such working trees that are at the
+ top-level of the current project by mistake.
+ (merge ae2f203 jc/maint-clean-nested-worktree-in-subdir later to maint).
+
+ * Rename detection logic used to match two empty files as renames
+ during merge-recursive, leading unnatural mismerges.
+ (merge 4f7cb99 jk/diff-no-rename-empty later to maint).
+
+ * An age-old corner case bug in combine diff (only triggered with -U0
+ and the hunk at the beginning of the file needs to be shown) has
+ been fixed.
+ (merge e5e9b56 rs/combine-diff-zero-context-at-the-beginning later to maint).
+
+ * When "git commit --template F" errors out because the user did not
+ touch the message, it claimed that it aborts due to "empty
+ message", which was utterly wrong.
+ (merge 1f08c2c jc/commit-unedited-template later to maint).
+
+ * "git add -p" is not designed to deal with unmerged paths but did
+ not exclude them and tried to apply funny patches only to fail.
+ (merge 4066bd6 jk/add-p-skip-conflicts later to maint).
+
+ * "git commit --author=$name" did not tell the name that was being
+ recorded in the resulting commit to hooks, even though it does do
+ so when the end user overrode the authorship via the
+ "GIT_AUTHOR_NAME" environment variable.
+ (merge 7dfe8ad jc/commit-hook-authorship later to maint).
+
+ * The regexp configured with diff.wordregex was incorrectly reused
+ across files.
+ (merge 6440d34 tr/maint-word-diff-regex-sticky later to maint).
+
+ * Running "notes merge --commit" failed to perform correctly when run
+ from any directory inside $GIT_DIR/. When "notes merge" stops with
+ conflicts, $GIT_DIR/NOTES_MERGE_WORKTREE is the place a user edits
+ to resolve it.
+ (merge dabba59 jh/notes-merge-in-git-dir-worktree later to maint).
+
--
pushNonFastForward::
- Advice shown when linkgit:git-push[1] refuses
- non-fast-forward refs.
+ Set this variable to 'false' if you want to disable
+ 'pushNonFFCurrent', 'pushNonFFDefault', and
+ 'pushNonFFMatching' simultaneously.
+ pushNonFFCurrent::
+ Advice shown when linkgit:git-push[1] fails due to a
+ non-fast-forward update to the current branch.
+ pushNonFFDefault::
+ Advice to set 'push.default' to 'upstream' or 'current'
+ when you ran linkgit:git-push[1] and pushed 'matching
+ refs' by default (i.e. you did not provide an explicit
+ refspec, and no 'push.default' configuration was set)
+ and it resulted in a non-fast-forward error.
+ pushNonFFMatching::
+ Advice shown when you ran linkgit:git-push[1] and pushed
+ 'matching refs' explicitly (i.e. you used ':', or
+ specified a refspec that isn't your current branch) and
+ it resulted in a non-fast-forward error.
statusHints::
Directions on how to stage/unstage/add shown in the
output of linkgit:git-status[1] and the template shown
[--3way] [--interactive] [--committer-date-is-author-date]
[--ignore-date] [--ignore-space-change | --ignore-whitespace]
[--whitespace=<option>] [-C<n>] [-p<n>] [--directory=<dir>]
- [--exclude=<path>] [--reject] [-q | --quiet]
+ [--exclude=<path>] [--include=<path>] [--reject] [-q | --quiet]
[--scissors | --no-scissors]
[(<mbox> | <Maildir>)...]
'git am' (--continue | --skip | --abort)
-p<n>::
--directory=<dir>::
--exclude=<path>::
+--include=<path>::
--reject::
These flags are passed to the 'git apply' (see linkgit:git-apply[1])
program that applies
relationship to upstream branch (if any). If given twice, print
the name of the upstream branch, as well.
+-q::
+--quiet::
+ Be more quiet when creating or deleting a branch, suppressing
+ non-error messages.
+
--abbrev=<length>::
Alter the sha1's minimum display length in the output listing.
The default value is 7 and can be overridden by the `core.abbrev`
-t <file>::
--template=<file>::
- Use the contents of the given file as the initial version
- of the commit message. The editor is invoked and you can
- make subsequent changes. If a message is specified using
- the `-m` or `-F` options, this option has no effect. This
- overrides the `commit.template` configuration variable.
+ When editing the commit message, start the editor with the
+ contents in the given file. The `commit.template` configuration
+ variable is often used to give this option implicitly to the
+ command. This mechanism can be used by projects that want to
+ guide participants with some hints on what to write in the message
+ in what order. If the user exits the editor without editing the
+ message, the commit is aborted. This has no effect when a message
+ is given by other means, e.g. with the `-m` or `-F` options.
-s::
--signoff::
options.
--cat-blob-fd=<fd>::
- Specify the file descriptor that will be written to
- when the `cat-blob` command is encountered in the stream.
- The default behaviour is to write to `stdout`.
+ Write responses to `cat-blob` and `ls` queries to the
+ file descriptor <fd> instead of `stdout`. Allows `progress`
+ output intended for the end-user to be separated from other
+ output.
--done::
Require a `done` command at the end of the stream.
accepted. In particular, the `cat-blob` command can be used in the
middle of a commit but not in the middle of a `data` command.
+See ``Responses To Commands'' below for details about how to read
+this output safely.
+
`ls`
~~~~
Prints information about the object at a path to a file descriptor
missing SP <path> LF
====
+See ``Responses To Commands'' below for details about how to read
+this output safely.
+
`feature`
~~~~~~~~~
Require that fast-import supports the specified feature, or abort if
in use, the `done` command is mandatory and marks the end of the
stream.
+Responses To Commands
+---------------------
+New objects written by fast-import are not available immediately.
+Most fast-import commands have no visible effect until the next
+checkpoint (or completion). The frontend can send commands to
+fill fast-import's input pipe without worrying about how quickly
+they will take effect, which improves performance by simplifying
+scheduling.
+
+For some frontends, though, it is useful to be able to read back
+data from the current repository as it is being updated (for
+example when the source material describes objects in terms of
+patches to be applied to previously imported objects). This can
+be accomplished by connecting the frontend and fast-import via
+bidirectional pipes:
+
+====
+ mkfifo fast-import-output
+ frontend <fast-import-output |
+ git fast-import >fast-import-output
+====
+
+A frontend set up this way can use `progress`, `ls`, and `cat-blob`
+commands to read information from the import in progress.
+
+To avoid deadlock, such frontends must completely consume any
+pending output from `progress`, `ls`, and `cat-blob` before
+performing writes to fast-import that might block.
+
Crash Reports
-------------
If fast-import is supplied invalid input it will terminate with a
--all::
Fetch all remote refs.
+--stdin::
+ Take the list of refs from stdin, one per line. If there
+ are refs specified on the command line in addition to this
+ option, then the refs from stdin are processed after those
+ on the command line.
++
+If '--stateless-rpc' is specified together with this option then
+the list of refs must be in packet format (pkt-line). Each ref must
+be in a separate packet, and the list must end with a flush packet.
+
-q::
--quiet::
Pass '-q' flag to 'git unpack-objects'; this makes the
EXAMPLE
-------
-* Create an alias for 'git p4', using the full path to the 'git-p4'
- script if needed:
-+
-------------
-$ git config --global alias.p4 '!git-p4'
-------------
-
* Clone a repository:
+
------------
work properly; the submit command looks only at the variable and does
not have a command-line option.
-The full syntax for a p4 view is documented in 'p4 help views'. Git-p4
+The full syntax for a p4 view is documented in 'p4 help views'. 'Git p4'
knows only a subset of the view syntax. It understands multi-line
mappings, overlays with '+', exclusions with '-' and double-quotes
-around whitespace. Of the possible wildcards, git-p4 only handles
-'...', and only when it is at the end of the path. Git-p4 will complain
+around whitespace. Of the possible wildcards, 'git p4' only handles
+'...', and only when it is at the end of the path. 'Git p4' will complain
if it encounters an unhandled wildcard.
Bugs in the implementation of overlap mappings exist. If multiple depot
paths map through overlays to the same location in the repository,
-git-p4 can choose the wrong one. This is hard to solve without
-dedicating a client spec just for git-p4.
+'git p4' can choose the wrong one. This is hard to solve without
+dedicating a client spec just for 'git p4'.
-The name of the client can be given to git-p4 in multiple ways. The
+The name of the client can be given to 'git p4' in multiple ways. The
variable 'git-p4.client' takes precedence if it exists. Otherwise,
normal p4 mechanisms of determining the client are used: environment
variable P4CLIENT, a file referenced by P4CONFIG, or the local host name.
is specified. This flag forces progress status even if the
standard error stream is not directed to a terminal.
---recurse-submodules=check::
- Check whether all submodule commits used by the revisions to be
- pushed are available on a remote tracking branch. Otherwise the
- push will be aborted and the command will exit with non-zero status.
+--recurse-submodules=check|on-demand::
+ Make sure all submodule commits used by the revisions to be
+ pushed are available on a remote tracking branch. If 'check' is
+ used git will verify that all submodule commits that changed in
+ the revisions to be pushed are available on at least one remote
+ of the submodule. If any commits are missing the push will be
+ aborted and exit with non-zero status. If 'on-demand' is used
+ all submodules that changed in the revisions to be pushed will
+ be pushed. If on-demand was not able to push all necessary
+ revisions it will also be aborted and exit with non-zero status.
include::urls-remotes.txt[]
`$SOME_ENVIRONMENT_VARIABLE`, `"C:\Program Files\Vim\gvim.exe"
--nofork`. The order of preference is the `$GIT_EDITOR`
environment variable, then `core.editor` configuration, then
- `$VISUAL`, then `$EDITOR`, and then finally 'vi'.
+ `$VISUAL`, then `$EDITOR`, and then the default chosen at compile
+ time, which is usually 'vi'.
+ifdef::git-default-editor[]
+ The build you are using chose '{git-default-editor}' as the default.
+endif::git-default-editor[]
GIT_PAGER::
Text viewer for use by git commands (e.g., 'less'). The value
is meant to be interpreted by the shell. The order of preference
is the `$GIT_PAGER` environment variable, then `core.pager`
- configuration, then `$PAGER`, and then finally 'less'.
+ configuration, then `$PAGER`, and then the default chosen at
+ compile time (usually 'less').
+ifdef::git-default-pager[]
+ The build you are using chose '{git-default-pager}' as the default.
+endif::git-default-pager[]
Diagnostics
-----------
returning a `struct commit *` each time you call it. The end of the
revision list is indicated by returning a NULL pointer.
+`reset_revision_walk`::
+
+ Reset the flags used by the revision walking api. You can use
+ this to do multiple sequencial revision walks.
+
Data structures
---------------
use English. Under autoconf the configure script will do this
automatically if it can't find libintl on the system.
+ - Python version 2.6 or later is needed to use the git-p4
+ interface to Perforce.
+
- Some platform specific issues are dealt with Makefile rules,
but depending on your specific installation, you may not
have all the libraries/tools needed, or you may have
SCRIPT_PERL += git-svn.perl
SCRIPT_PYTHON += git-remote-testgit.py
+SCRIPT_PYTHON += git-p4.py
SCRIPTS = $(patsubst %.sh,%,$(SCRIPT_SH)) \
$(patsubst %.perl,%,$(SCRIPT_PERL)) \
TEST_PROGRAMS_NEED_X += test-index-version
TEST_PROGRAMS_NEED_X += test-line-buffer
TEST_PROGRAMS_NEED_X += test-match-trees
+TEST_PROGRAMS_NEED_X += test-mergesort
TEST_PROGRAMS_NEED_X += test-mktemp
TEST_PROGRAMS_NEED_X += test-parse-options
TEST_PROGRAMS_NEED_X += test-path-utils
+TEST_PROGRAMS_NEED_X += test-revision-walking
TEST_PROGRAMS_NEED_X += test-run-command
TEST_PROGRAMS_NEED_X += test-sha1
TEST_PROGRAMS_NEED_X += test-sigchain
LIB_H += mailmap.h
LIB_H += merge-file.h
LIB_H += merge-recursive.h
+LIB_H += mergesort.h
LIB_H += notes.h
LIB_H += notes-cache.h
LIB_H += notes-merge.h
LIB_OBJS += match-trees.o
LIB_OBJS += merge-file.o
LIB_OBJS += merge-recursive.o
+LIB_OBJS += mergesort.o
LIB_OBJS += name-hash.o
LIB_OBJS += notes.o
LIB_OBJS += notes-cache.o
BASIC_CFLAGS += -DDEFAULT_PAGER='$(DEFAULT_PAGER_CQ_SQ)'
endif
+ifdef SHELL_PATH
+SHELL_PATH_CQ = "$(subst ",\",$(subst \,\\,$(SHELL_PATH)))"
+SHELL_PATH_CQ_SQ = $(subst ','\'',$(SHELL_PATH_CQ))
+
+BASIC_CFLAGS += -DSHELL_PATH='$(SHELL_PATH_CQ_SQ)'
+endif
+
ALL_CFLAGS += $(BASIC_CFLAGS)
ALL_LDFLAGS += $(BASIC_LDFLAGS)
$(VCSSVN_LIB): $(VCSSVN_OBJS)
$(QUIET_AR)$(RM) $@ && $(AR) rcs $@ $(VCSSVN_OBJS)
+export DEFAULT_EDITOR DEFAULT_PAGER
+
doc:
$(MAKE) -C Documentation all
#include "cache.h"
int advice_push_nonfastforward = 1;
+int advice_push_non_ff_current = 1;
+int advice_push_non_ff_default = 1;
+int advice_push_non_ff_matching = 1;
int advice_status_hints = 1;
int advice_commit_before_merge = 1;
int advice_resolve_conflict = 1;
int *preference;
} advice_config[] = {
{ "pushnonfastforward", &advice_push_nonfastforward },
+ { "pushnonffcurrent", &advice_push_non_ff_current },
+ { "pushnonffdefault", &advice_push_non_ff_default },
+ { "pushnonffmatching", &advice_push_non_ff_matching },
{ "statushints", &advice_status_hints },
{ "commitbeforemerge", &advice_commit_before_merge },
{ "resolveconflict", &advice_resolve_conflict },
#include "git-compat-util.h"
extern int advice_push_nonfastforward;
+extern int advice_push_non_ff_current;
+extern int advice_push_non_ff_default;
+extern int advice_push_non_ff_matching;
extern int advice_status_hints;
extern int advice_commit_before_merge;
extern int advice_resolve_conflict;
* config.
*/
static int setup_tracking(const char *new_ref, const char *orig_ref,
- enum branch_track track)
+ enum branch_track track, int quiet)
{
struct tracking tracking;
+ int config_flags = quiet ? 0 : BRANCH_CONFIG_VERBOSE;
if (strlen(new_ref) > 1024 - 7 - 7 - 1)
return error("Tracking not set up: name too long: %s",
return error("Not tracking: ambiguous information for ref %s",
orig_ref);
- install_branch_config(BRANCH_CONFIG_VERBOSE, new_ref, tracking.remote,
+ install_branch_config(config_flags, new_ref, tracking.remote,
tracking.src ? tracking.src : orig_ref);
free(tracking.src);
void create_branch(const char *head,
const char *name, const char *start_name,
int force, int reflog, int clobber_head,
- enum branch_track track)
+ int quiet, enum branch_track track)
{
struct ref_lock *lock = NULL;
struct commit *commit;
start_name);
if (real_ref && track)
- setup_tracking(ref.buf+11, real_ref, track);
+ setup_tracking(ref.buf+11, real_ref, track, quiet);
if (!dont_change_ref)
if (write_ref_sha1(lock, sha1, msg) < 0)
*/
void create_branch(const char *head, const char *name, const char *start_name,
int force, int reflog,
- int clobber_head, enum branch_track track);
+ int clobber_head, int quiet, enum branch_track track);
/*
* Validates that the requested branch may be created, returning the
unsigned long leading, trailing;
unsigned long oldpos, oldlines;
unsigned long newpos, newlines;
+ /*
+ * 'patch' is usually borrowed from buf in apply_patch(),
+ * but some codepaths store an allocated buffer.
+ */
const char *patch;
+ unsigned free_patch:1,
+ rejected:1;
int size;
- int rejected;
int linenr;
struct fragment *next;
};
struct patch *next;
};
+static void free_fragment_list(struct fragment *list)
+{
+ while (list) {
+ struct fragment *next = list->next;
+ if (list->free_patch)
+ free((char *)list->patch);
+ free(list);
+ list = next;
+ }
+}
+
+static void free_patch(struct patch *patch)
+{
+ free_fragment_list(patch->fragments);
+ free(patch->def_name);
+ free(patch->old_name);
+ free(patch->new_name);
+ free(patch->result);
+ free(patch);
+}
+
+static void free_patch_list(struct patch *list)
+{
+ while (list) {
+ struct patch *next = list->next;
+ free_patch(list);
+ list = next;
+ }
+}
+
/*
* A line in a file, len-bytes long (includes the terminating LF,
* except for an incomplete line at the end if the file ends with
img->nr++;
}
+/*
+ * "buf" has the file contents to be patched (read from various sources).
+ * attach it to "image" and add line-based index to it.
+ * "image" now owns the "buf".
+ */
static void prepare_image(struct image *image, char *buf, size_t len,
int prepare_linetable)
{
fputs(post, output);
}
-#define CHUNKSIZE (8192)
#define SLOP (16)
static void read_patch_file(struct strbuf *sb, int fd)
return name;
}
-static char *find_name_gnu(const char *line, char *def, int p_value)
+static char *find_name_gnu(const char *line, const char *def, int p_value)
{
struct strbuf name = STRBUF_INIT;
char *cp;
cp++;
}
- /* name can later be freed, so we need
- * to memmove, not just return cp
- */
strbuf_remove(&name, 0, cp - name.buf);
- free(def);
if (root)
strbuf_insert(&name, 0, root, root_len);
return squash_slash(strbuf_detach(&name, NULL));
return line + len - end;
}
-static char *find_name_common(const char *line, char *def, int p_value,
- const char *end, int terminate)
+static char *null_strdup(const char *s)
+{
+ return s ? xstrdup(s) : NULL;
+}
+
+static char *find_name_common(const char *line, const char *def,
+ int p_value, const char *end, int terminate)
{
int len;
const char *start = NULL;
start = line;
}
if (!start)
- return squash_slash(def);
+ return squash_slash(null_strdup(def));
len = line - start;
if (!len)
- return squash_slash(def);
+ return squash_slash(null_strdup(def));
/*
* Generally we prefer the shorter name, especially
if (def) {
int deflen = strlen(def);
if (deflen < len && !strncmp(start, def, deflen))
- return squash_slash(def);
- free(def);
+ return squash_slash(xstrdup(def));
}
if (root) {
name = find_name_traditional(first, NULL, p_value);
patch->old_name = name;
} else {
- name = find_name_traditional(first, NULL, p_value);
- name = find_name_traditional(second, name, p_value);
+ char *first_name;
+ first_name = find_name_traditional(first, NULL, p_value);
+ name = find_name_traditional(second, first_name, p_value);
+ free(first_name);
if (has_epoch_timestamp(first)) {
patch->is_new = 1;
patch->is_delete = 0;
patch->is_delete = 1;
patch->old_name = name;
} else {
- patch->old_name = patch->new_name = name;
+ patch->old_name = name;
+ patch->new_name = xstrdup(name);
}
}
if (!name)
static int gitdiff_oldname(const char *line, struct patch *patch)
{
+ char *orig = patch->old_name;
patch->old_name = gitdiff_verify_name(line, patch->is_new, patch->old_name, "old");
+ if (orig != patch->old_name)
+ free(orig);
return 0;
}
static int gitdiff_newname(const char *line, struct patch *patch)
{
+ char *orig = patch->new_name;
patch->new_name = gitdiff_verify_name(line, patch->is_delete, patch->new_name, "new");
+ if (orig != patch->new_name)
+ free(orig);
return 0;
}
static int gitdiff_delete(const char *line, struct patch *patch)
{
patch->is_delete = 1;
- patch->old_name = patch->def_name;
+ free(patch->old_name);
+ patch->old_name = null_strdup(patch->def_name);
return gitdiff_oldmode(line, patch);
}
static int gitdiff_newfile(const char *line, struct patch *patch)
{
patch->is_new = 1;
- patch->new_name = patch->def_name;
+ free(patch->new_name);
+ patch->new_name = null_strdup(patch->def_name);
return gitdiff_newmode(line, patch);
}
static int gitdiff_copysrc(const char *line, struct patch *patch)
{
patch->is_copy = 1;
+ free(patch->old_name);
patch->old_name = find_name(line, NULL, p_value ? p_value - 1 : 0, 0);
return 0;
}
static int gitdiff_copydst(const char *line, struct patch *patch)
{
patch->is_copy = 1;
+ free(patch->new_name);
patch->new_name = find_name(line, NULL, p_value ? p_value - 1 : 0, 0);
return 0;
}
static int gitdiff_renamesrc(const char *line, struct patch *patch)
{
patch->is_rename = 1;
+ free(patch->old_name);
patch->old_name = find_name(line, NULL, p_value ? p_value - 1 : 0, 0);
return 0;
}
static int gitdiff_renamedst(const char *line, struct patch *patch)
{
patch->is_rename = 1;
+ free(patch->new_name);
patch->new_name = find_name(line, NULL, p_value ? p_value - 1 : 0, 0);
return 0;
}
* creation or deletion of an empty file. In any of these cases,
* both sides are the same name under a/ and b/ respectively.
*/
-static char *git_header_name(char *line, int llen)
+static char *git_header_name(const char *line, int llen)
{
const char *name;
const char *second = NULL;
}
/* Verify that we recognize the lines following a git header */
-static int parse_git_header(char *line, int len, unsigned int size, struct patch *patch)
+static int parse_git_header(const char *line, int len, unsigned int size, struct patch *patch)
{
unsigned long offset;
return offset + ex;
}
-static void recount_diff(char *line, int size, struct fragment *fragment)
+static void recount_diff(const char *line, int size, struct fragment *fragment)
{
int oldlines = 0, newlines = 0, ret = 0;
* Parse a unified diff fragment header of the
* form "@@ -a,b +c,d @@"
*/
-static int parse_fragment_header(char *line, int len, struct fragment *fragment)
+static int parse_fragment_header(const char *line, int len, struct fragment *fragment)
{
int offset;
return offset;
}
-static int find_header(char *line, unsigned long size, int *hdrsize, struct patch *patch)
+static int find_header(const char *line, unsigned long size, int *hdrsize, struct patch *patch)
{
unsigned long offset, len;
if (!patch->def_name)
die("git diff header lacks filename information when removing "
"%d leading pathname components (line %d)" , p_value, linenr);
- patch->old_name = patch->new_name = patch->def_name;
+ patch->old_name = xstrdup(patch->def_name);
+ patch->new_name = xstrdup(patch->def_name);
}
if (!patch->is_delete && !patch->new_name)
die("git diff header lacks filename information "
* between a "---" that is part of a patch, and a "---" that starts
* the next patch is to look at the line counts..
*/
-static int parse_fragment(char *line, unsigned long size,
+static int parse_fragment(const char *line, unsigned long size,
struct patch *patch, struct fragment *fragment)
{
int added, deleted;
return offset;
}
-static int parse_single_patch(char *line, unsigned long size, struct patch *patch)
+/*
+ * We have seen "diff --git a/... b/..." header (or a traditional patch
+ * header). Read hunks that belong to this patch into fragments and hang
+ * them to the given patch structure.
+ *
+ * The (fragment->patch, fragment->size) pair points into the memory given
+ * by the caller, not a copy, when we return.
+ */
+static int parse_single_patch(const char *line, unsigned long size, struct patch *patch)
{
unsigned long offset = 0;
unsigned long oldlines = 0, newlines = 0, context = 0;
return out;
}
+/*
+ * Read a binary hunk and return a new fragment; fragment->patch
+ * points at an allocated memory that the caller must free, so
+ * it is marked as "->free_patch = 1".
+ */
static struct fragment *parse_binary_hunk(char **buf_p,
unsigned long *sz_p,
int *status_p,
frag = xcalloc(1, sizeof(*frag));
frag->patch = inflate_it(data, hunk_size, origlen);
+ frag->free_patch = 1;
if (!frag->patch)
goto corrupt;
free(data);
return used;
}
+/*
+ * Read the patch text in "buffer" taht extends for "size" bytes; stop
+ * reading after seeing a single patch (i.e. changes to a single file).
+ * Create fragments (i.e. patch hunks) and hang them to the given patch.
+ * Return the number of bytes consumed, so that the caller can call us
+ * again for the next patch.
+ */
static int parse_chunk(char *buffer, unsigned long size, struct patch *patch)
{
int hdrsize, patchsize;
img->len -= img->line[--img->nr].len;
}
+/*
+ * The change from "preimage" and "postimage" has been found to
+ * apply at applied_pos (counts in line numbers) in "img".
+ * Update "img" to remove "preimage" and replace it with "postimage".
+ */
static void update_image(struct image *img,
int applied_pos,
struct image *preimage,
img->nr = nr;
}
+/*
+ * Use the patch-hunk text in "frag" to prepare two images (preimage and
+ * postimage) for the hunk. Find lines that match "preimage" in "img" and
+ * replace the part of "img" with "postimage" text.
+ */
static int apply_one_fragment(struct image *img, struct fragment *frag,
int inaccurate_eof, unsigned ws_rule,
int nth_fragment)
return -1;
}
+/*
+ * Replace "img" with the result of applying the binary patch.
+ * The binary patch data itself in patch->fragment is still kept
+ * but the preimage prepared by the caller in "img" is freed here
+ * or in the helper function apply_binary_fragment() this calls.
+ */
static int apply_binary(struct image *img, struct patch *patch)
{
const char *name = patch->old_name ? patch->old_name : patch->new_name;
return error("patch %s has been renamed/deleted",
patch->old_name);
}
- /* We have a patched copy in memory use that */
+ /* We have a patched copy in memory; use that. */
strbuf_add(&buf, tpatch->result, tpatch->resultsize);
} else if (cached) {
if (read_file_or_gitlink(ce, &buf))
/*
* There is no way to apply subproject
* patch without looking at the index.
+ * NEEDSWORK: shouldn't this be flagged
+ * as an error???
*/
+ free_fragment_list(patch->fragments);
patch->fragments = NULL;
}
} else {
is_new:
patch->is_new = 1;
patch->is_delete = 0;
+ free(patch->old_name);
patch->old_name = NULL;
return 0;
}
+/*
+ * Check and apply the patch in-core; leave the result in patch->result
+ * for the caller to write it out to the final destination.
+ */
static int check_patch(struct patch *patch)
{
struct stat st;
if (!prefix || p->is_toplevel_relative)
return;
for ( ; p; p = p->next) {
- if (p->new_name == p->old_name) {
- char *prefixed = p->new_name;
- prefix_one(&prefixed);
- p->new_name = p->old_name = prefixed;
- }
- else {
- prefix_one(&p->new_name);
- prefix_one(&p->old_name);
- }
+ prefix_one(&p->new_name);
+ prefix_one(&p->old_name);
}
}
static int apply_patch(int fd, const char *filename, int options)
{
size_t offset;
- struct strbuf buf = STRBUF_INIT;
+ struct strbuf buf = STRBUF_INIT; /* owns the patch text */
struct patch *list = NULL, **listp = &list;
int skipped_patch = 0;
- /* FIXME - memory leak when using multiple patch files as inputs */
- memset(&fn_table, 0, sizeof(struct string_list));
patch_input_file = filename;
read_patch_file(&buf, fd);
offset = 0;
listp = &patch->next;
}
else {
- /* perhaps free it a bit better? */
- free(patch);
+ free_patch(patch);
skipped_patch++;
}
offset += nr;
if (summary)
summary_patch_list(list);
+ free_patch_list(list);
strbuf_release(&buf);
+ string_list_clear(&fn_table, 0);
return 0;
}
OPT_BIT('s', NULL, &output_option, "Suppress author name and timestamp (Default: off)", OUTPUT_NO_AUTHOR),
OPT_BIT('e', "show-email", &output_option, "Show author email instead of name (Default: off)", OUTPUT_SHOW_EMAIL),
OPT_BIT('w', NULL, &xdl_opts, "Ignore whitespace differences", XDF_IGNORE_WHITESPACE),
+ OPT_BIT(0, "minimal", &xdl_opts, "Spend extra cycles to find better match", XDF_NEED_MINIMAL),
OPT_STRING('S', NULL, &revs_file, "file", "Use revisions from <file> instead of calling git-rev-list"),
OPT_STRING(0, "contents", &contents_from, "file", "Use <file>'s contents as the final image"),
{ OPTION_CALLBACK, 'C', NULL, &opt, "score", "Find line copies within and across files", PARSE_OPT_OPTARG, blame_copy_callback },
return merged;
}
-static int delete_branches(int argc, const char **argv, int force, int kinds)
+static int delete_branches(int argc, const char **argv, int force, int kinds,
+ int quiet)
{
struct commit *rev, *head_rev = NULL;
unsigned char sha1[20];
ret = 1;
} else {
struct strbuf buf = STRBUF_INIT;
- printf(_("Deleted %sbranch %s (was %s).\n"), remote,
- bname.buf,
- find_unique_abbrev(sha1, DEFAULT_ABBREV));
+ if (!quiet)
+ printf(_("Deleted %sbranch %s (was %s).\n"),
+ remote, bname.buf,
+ find_unique_abbrev(sha1, DEFAULT_ABBREV));
strbuf_addf(&buf, "branch.%s", bname.buf);
if (git_config_rename_section(buf.buf, NULL) < 0)
warning(_("Update of config-file failed"));
int delete = 0, rename = 0, force_create = 0, list = 0;
int verbose = 0, abbrev = -1, detached = 0;
int reflog = 0, edit_description = 0;
+ int quiet = 0;
enum branch_track track;
int kinds = REF_LOCAL_BRANCH;
struct commit_list *with_commit = NULL;
OPT_GROUP("Generic options"),
OPT__VERBOSE(&verbose,
"show hash and subject, give twice for upstream branch"),
+ OPT__QUIET(&quiet, "suppress informational messages"),
OPT_SET_INT('t', "track", &track, "set up tracking mode (see git-pull(1))",
BRANCH_TRACK_EXPLICIT),
OPT_SET_INT( 0, "set-upstream", &track, "change upstream info",
abbrev = DEFAULT_ABBREV;
if (delete)
- return delete_branches(argc, argv, delete > 1, kinds);
+ return delete_branches(argc, argv, delete > 1, kinds, quiet);
else if (list)
return print_ref_list(kinds, detached, verbose, abbrev,
with_commit, argv);
if (kinds != REF_LOCAL_BRANCH)
die(_("-a and -r options to 'git branch' do not make sense with a branch name"));
create_branch(head, argv[0], (argc == 2) ? argv[1] : head,
- force_create, reflog, 0, track);
+ force_create, reflog, 0, quiet, track);
} else
usage_with_options(builtin_branch_usage, options);
#include "parse-options.h"
#include "diff.h"
#include "userdiff.h"
+#include "streaming.h"
#define BATCH 1
#define BATCH_CHECK 2
return cmd_ls_tree(2, ls_args, NULL);
}
+ if (type == OBJ_BLOB)
+ return stream_blob_to_fd(1, sha1, NULL, 0);
buf = read_sha1_file(sha1, &type, &size);
if (!buf)
die("Cannot read object %s", obj_name);
break;
case 0:
+ if (type_from_string(exp_type) == OBJ_BLOB) {
+ unsigned char blob_sha1[20];
+ if (sha1_object_info(sha1, NULL) == OBJ_TAG) {
+ enum object_type type;
+ unsigned long size;
+ char *buffer = read_sha1_file(sha1, &type, &size);
+ if (memcmp(buffer, "object ", 7) ||
+ get_sha1_hex(buffer + 7, blob_sha1))
+ die("%s not a valid tag", sha1_to_hex(sha1));
+ free(buffer);
+ } else
+ hashcpy(blob_sha1, sha1);
+
+ if (sha1_object_info(blob_sha1, NULL) == OBJ_BLOB)
+ return stream_blob_to_fd(1, blob_sha1, NULL, 0);
+ /*
+ * we attempted to dereference a tag to a blob
+ * and failed; there may be new dereference
+ * mechanisms this code is not aware of.
+ * fall-back to the usual case.
+ */
+ }
buf = read_object_with_reference(sha1, exp_type, &size, NULL);
break;
opts->new_branch_force ? 1 : 0,
opts->new_branch_log,
opts->new_branch_force ? 1 : 0,
+ opts->quiet,
opts->track);
new->name = opts->new_branch;
setup_branch_path(new);
static const char sign_off_header[] = "Signed-off-by: ";
+static void export_one(const char *var, const char *s, const char *e, int hack)
+{
+ struct strbuf buf = STRBUF_INIT;
+ if (hack)
+ strbuf_addch(&buf, hack);
+ strbuf_addf(&buf, "%.*s", (int)(e - s), s);
+ setenv(var, buf.buf, 1);
+ strbuf_release(&buf);
+}
+
static void determine_author_info(struct strbuf *author_ident)
{
char *name, *email, *date;
+ struct ident_split author;
name = getenv("GIT_AUTHOR_NAME");
email = getenv("GIT_AUTHOR_EMAIL");
date = force_date;
strbuf_addstr(author_ident, fmt_ident(name, email, date,
IDENT_ERROR_ON_NO_NAME));
+ if (!split_ident_line(&author, author_ident->buf, author_ident->len)) {
+ export_one("GIT_AUTHOR_NAME", author.name_begin, author.name_end, 0);
+ export_one("GIT_AUTHOR_EMAIL", author.mail_begin, author.mail_end, 0);
+ export_one("GIT_AUTHOR_DATE", author.date_begin, author.tz_end, '@');
+ }
}
static int ends_rfc2822_footer(struct strbuf *sb)
int ident_shown = 0;
int clean_message_contents = (cleanup_mode != CLEANUP_NONE);
+ /* This checks and barfs if author is badly specified */
+ determine_author_info(author_ident);
+
if (!no_verify && run_hook(index_file, "pre-commit", NULL))
return 0;
strbuf_release(&sb);
- /* This checks and barfs if author is badly specified */
- determine_author_info(author_ident);
-
/* This checks if committer ident is explicitly given */
strbuf_addstr(&committer_ident, git_committer_info(0));
if (use_editor && include_status) {
return 1;
}
-/*
- * Find out if the message in the strbuf contains only whitespace and
- * Signed-off-by lines.
- */
-static int message_is_empty(struct strbuf *sb)
+static int rest_is_empty(struct strbuf *sb, int start)
{
- struct strbuf tmpl = STRBUF_INIT;
+ int i, eol;
const char *nl;
- int eol, i, start = 0;
-
- if (cleanup_mode == CLEANUP_NONE && sb->len)
- return 0;
-
- /* See if the template is just a prefix of the message. */
- if (template_file && strbuf_read_file(&tmpl, template_file, 0) > 0) {
- stripspace(&tmpl, cleanup_mode == CLEANUP_ALL);
- if (start + tmpl.len <= sb->len &&
- memcmp(tmpl.buf, sb->buf + start, tmpl.len) == 0)
- start += tmpl.len;
- }
- strbuf_release(&tmpl);
/* Check if the rest is just whitespace and Signed-of-by's. */
for (i = start; i < sb->len; i++) {
return 1;
}
+/*
+ * Find out if the message in the strbuf contains only whitespace and
+ * Signed-off-by lines.
+ */
+static int message_is_empty(struct strbuf *sb)
+{
+ if (cleanup_mode == CLEANUP_NONE && sb->len)
+ return 0;
+ return rest_is_empty(sb, 0);
+}
+
+/*
+ * See if the user edited the message in the editor or left what
+ * was in the template intact
+ */
+static int template_untouched(struct strbuf *sb)
+{
+ struct strbuf tmpl = STRBUF_INIT;
+ char *start;
+
+ if (cleanup_mode == CLEANUP_NONE && sb->len)
+ return 0;
+
+ if (!template_file || strbuf_read_file(&tmpl, template_file, 0) <= 0)
+ return 0;
+
+ stripspace(&tmpl, cleanup_mode == CLEANUP_ALL);
+ start = (char *)skip_prefix(sb->buf, tmpl.buf);
+ if (!start)
+ start = sb->buf;
+ strbuf_release(&tmpl);
+ return rest_is_empty(sb, start - sb->buf);
+}
+
static const char *find_author_by_nickname(const char *name)
{
struct rev_info revs;
die(_("Only one of -c/-C/-F/--fixup can be used."));
if (message.len && f > 0)
die((_("Option -m cannot be combined with -c/-C/-F/--fixup.")));
+ if (f || message.len)
+ template_file = NULL;
if (edit_message)
use_message = edit_message;
if (amend && !use_message && !fixup_message)
if (cleanup_mode != CLEANUP_NONE)
stripspace(&sb, cleanup_mode == CLEANUP_ALL);
+ if (template_untouched(&sb) && !allow_empty_message) {
+ rollback_index_files();
+ fprintf(stderr, _("Aborting commit; you did not edit the message.\n"));
+ exit(1);
+ }
if (message_is_empty(&sb) && !allow_empty_message) {
rollback_index_files();
fprintf(stderr, _("Aborting commit due to empty commit message.\n"));
add_head_to_pending(&rev);
if (!rev.pending.nr) {
struct tree *tree;
- tree = lookup_tree((const unsigned char*)EMPTY_TREE_SHA1_BIN);
+ tree = lookup_tree(EMPTY_TREE_SHA1_BIN);
add_pending_object(&rev, &tree->object, "HEAD");
}
break;
};
static const char fetch_pack_usage[] =
-"git fetch-pack [--all] [--quiet|-q] [--keep|-k] [--thin] [--include-tag] [--upload-pack=<git-upload-pack>] [--depth=<n>] [--no-progress] [-v] [<host>:]<directory> [<refs>...]";
+"git fetch-pack [--all] [--stdin] [--quiet|-q] [--keep|-k] [--thin] "
+"[--include-tag] [--upload-pack=<git-upload-pack>] [--depth=<n>] "
+"[--no-progress] [-v] [<host>:]<directory> [<refs>...]";
#define COMPLETE (1U << 0)
#define COMMON (1U << 1)
args.fetch_all = 1;
continue;
}
+ if (!strcmp("--stdin", arg)) {
+ args.stdin_refs = 1;
+ continue;
+ }
if (!strcmp("-v", arg)) {
args.verbose = 1;
continue;
if (!dest)
usage(fetch_pack_usage);
+ if (args.stdin_refs) {
+ /*
+ * Copy refs from cmdline to new growable list, then
+ * append the refs from the standard input.
+ */
+ int alloc_heads = nr_heads;
+ int size = nr_heads * sizeof(*heads);
+ heads = memcpy(xmalloc(size), heads, size);
+ if (args.stateless_rpc) {
+ /* in stateless RPC mode we use pkt-line to read
+ * from stdin, until we get a flush packet
+ */
+ static char line[1000];
+ for (;;) {
+ int n = packet_read_line(0, line, sizeof(line));
+ if (!n)
+ break;
+ if (line[n-1] == '\n')
+ n--;
+ ALLOC_GROW(heads, nr_heads + 1, alloc_heads);
+ heads[nr_heads++] = xmemdupz(line, n);
+ }
+ }
+ else {
+ /* read from stdin one ref per line, until EOF */
+ struct strbuf line = STRBUF_INIT;
+ while (strbuf_getline(&line, stdin, '\n') != EOF) {
+ ALLOC_GROW(heads, nr_heads + 1, alloc_heads);
+ heads[nr_heads++] = strbuf_detach(&line, NULL);
+ }
+ strbuf_release(&line);
+ }
+ }
+
if (args.stateless_rpc) {
conn = NULL;
fd[0] = 0;
else {
msg = "storing head";
what = _("[new branch]");
- if ((recurse_submodules != RECURSE_SUBMODULES_OFF) &&
- (recurse_submodules != RECURSE_SUBMODULES_ON))
- check_for_new_submodule_commits(ref->new_sha1);
}
+ if ((recurse_submodules != RECURSE_SUBMODULES_OFF) &&
+ (recurse_submodules != RECURSE_SUBMODULES_ON))
+ check_for_new_submodule_commits(ref->new_sha1);
r = s_update_ref(msg, ref, 0);
strbuf_addf(display, "%c %-*s %-*s -> %s%s",
r ? '!' : '*',
merge_log_config = DEFAULT_MERGE_LOG_LEN;
} else if (!strcmp(key, "merge.branchdesc")) {
use_branch_desc = git_config_bool(key, value);
+ } else {
+ return git_default_config(key, value, cb);
}
return 0;
}
strbuf_release(&desc);
}
+#define util_as_integral(elem) ((intptr_t)((elem)->util))
+
+static void record_person(int which, struct string_list *people,
+ struct commit *commit)
+{
+ char name_buf[MAX_GITNAME], *name, *name_end;
+ struct string_list_item *elem;
+ const char *field = (which == 'a') ? "\nauthor " : "\ncommitter ";
+
+ name = strstr(commit->buffer, field);
+ if (!name)
+ return;
+ name += strlen(field);
+ name_end = strchrnul(name, '<');
+ if (*name_end)
+ name_end--;
+ while (isspace(*name_end) && name <= name_end)
+ name_end--;
+ if (name_end < name || name + MAX_GITNAME <= name_end)
+ return;
+ memcpy(name_buf, name, name_end - name + 1);
+ name_buf[name_end - name + 1] = '\0';
+
+ elem = string_list_lookup(people, name_buf);
+ if (!elem) {
+ elem = string_list_insert(people, name_buf);
+ elem->util = (void *)0;
+ }
+ elem->util = (void*)(util_as_integral(elem) + 1);
+}
+
+static int cmp_string_list_util_as_integral(const void *a_, const void *b_)
+{
+ const struct string_list_item *a = a_, *b = b_;
+ return util_as_integral(b) - util_as_integral(a);
+}
+
+static void add_people_count(struct strbuf *out, struct string_list *people)
+{
+ if (people->nr == 1)
+ strbuf_addf(out, "%s", people->items[0].string);
+ else if (people->nr == 2)
+ strbuf_addf(out, "%s (%d) and %s (%d)",
+ people->items[0].string,
+ (int)util_as_integral(&people->items[0]),
+ people->items[1].string,
+ (int)util_as_integral(&people->items[1]));
+ else if (people->nr)
+ strbuf_addf(out, "%s (%d) and others",
+ people->items[0].string,
+ (int)util_as_integral(&people->items[0]));
+}
+
+static void credit_people(struct strbuf *out,
+ struct string_list *them,
+ int kind)
+{
+ const char *label;
+ const char *me;
+
+ if (kind == 'a') {
+ label = "\nBy ";
+ me = git_author_info(IDENT_NO_DATE);
+ } else {
+ label = "\nvia ";
+ me = git_committer_info(IDENT_NO_DATE);
+ }
+
+ if (!them->nr ||
+ (them->nr == 1 &&
+ me &&
+ (me = skip_prefix(me, them->items->string)) != NULL &&
+ skip_prefix(me, " <")))
+ return;
+ strbuf_addstr(out, label);
+ add_people_count(out, them);
+}
+
+static void add_people_info(struct strbuf *out,
+ struct string_list *authors,
+ struct string_list *committers)
+{
+ if (authors->nr)
+ qsort(authors->items,
+ authors->nr, sizeof(authors->items[0]),
+ cmp_string_list_util_as_integral);
+ if (committers->nr)
+ qsort(committers->items,
+ committers->nr, sizeof(committers->items[0]),
+ cmp_string_list_util_as_integral);
+
+ credit_people(out, authors, 'a');
+ credit_people(out, committers, 'c');
+}
+
static void shortlog(const char *name,
struct origin_data *origin_data,
struct commit *head,
struct commit *commit;
struct object *branch;
struct string_list subjects = STRING_LIST_INIT_DUP;
+ struct string_list authors = STRING_LIST_INIT_DUP;
+ struct string_list committers = STRING_LIST_INIT_DUP;
int flags = UNINTERESTING | TREESAME | SEEN | SHOWN | ADDED;
struct strbuf sb = STRBUF_INIT;
const unsigned char *sha1 = origin_data->sha1;
return;
setup_revisions(0, NULL, rev, NULL);
- rev->ignore_merges = 1;
add_pending_object(rev, branch, name);
add_pending_object(rev, &head->object, "^HEAD");
head->object.flags |= UNINTERESTING;
while ((commit = get_revision(rev)) != NULL) {
struct pretty_print_context ctx = {0};
- /* ignore merges */
- if (commit->parents && commit->parents->next)
+ if (commit->parents && commit->parents->next) {
+ /* do not list a merge but count committer */
+ record_person('c', &committers, commit);
continue;
-
+ }
+ if (!count)
+ /* the 'tip' committer */
+ record_person('c', &committers, commit);
+ record_person('a', &authors, commit);
count++;
if (subjects.nr > limit)
continue;
string_list_append(&subjects, strbuf_detach(&sb, NULL));
}
+ add_people_info(out, &authors, &committers);
if (count > limit)
strbuf_addf(out, "\n* %s: (%d commits)\n", name, count);
else
rev->commits = NULL;
rev->pending.nr = 0;
+ string_list_clear(&authors, 0);
+ string_list_clear(&committers, 0);
string_list_clear(&subjects, 0);
}
#include "parse-options.h"
#include "dir.h"
#include "progress.h"
+#include "streaming.h"
#define REACHABLE 0x0001
#define SEEN 0x0002
if (!(f = fopen(filename, "w")))
die_errno("Could not open '%s'", filename);
if (obj->type == OBJ_BLOB) {
- enum object_type type;
- unsigned long size;
- char *buf = read_sha1_file(obj->sha1,
- &type, &size);
- if (buf && fwrite(buf, 1, size, f) != size)
+ if (stream_blob_to_fd(fileno(f), obj->sha1, NULL, 1))
die_errno("Could not write '%s'", filename);
- free(buf);
} else
fprintf(f, "%s\n", sha1_to_hex(obj->sha1));
if (fclose(f))
#include "string-list.h"
#include "parse-options.h"
#include "branch.h"
+#include "streaming.h"
/* Set a default date-time format for git log ("log.date" config variable) */
static const char *default_date_mode = NULL;
strbuf_release(&out);
}
-static int show_object(const unsigned char *sha1, int show_tag_object,
- struct rev_info *rev)
+static int show_blob_object(const unsigned char *sha1, struct rev_info *rev)
+{
+ fflush(stdout);
+ return stream_blob_to_fd(1, sha1, NULL, 0);
+}
+
+static int show_tag_object(const unsigned char *sha1, struct rev_info *rev)
{
unsigned long size;
enum object_type type;
if (!buf)
return error(_("Could not read object %s"), sha1_to_hex(sha1));
- if (show_tag_object)
- while (offset < size && buf[offset] != '\n') {
- int new_offset = offset + 1;
- while (new_offset < size && buf[new_offset++] != '\n')
- ; /* do nothing */
- if (!prefixcmp(buf + offset, "tagger "))
- show_tagger(buf + offset + 7,
- new_offset - offset - 7, rev);
- offset = new_offset;
- }
+ assert(type == OBJ_TAG);
+ while (offset < size && buf[offset] != '\n') {
+ int new_offset = offset + 1;
+ while (new_offset < size && buf[new_offset++] != '\n')
+ ; /* do nothing */
+ if (!prefixcmp(buf + offset, "tagger "))
+ show_tagger(buf + offset + 7,
+ new_offset - offset - 7, rev);
+ offset = new_offset;
+ }
if (offset < size)
fwrite(buf + offset, size - offset, 1, stdout);
const char *name = objects[i].name;
switch (o->type) {
case OBJ_BLOB:
- ret = show_object(o->sha1, 0, NULL);
+ ret = show_blob_object(o->sha1, NULL);
break;
case OBJ_TAG: {
struct tag *t = (struct tag *)o;
diff_get_color_opt(&rev.diffopt, DIFF_COMMIT),
t->tag,
diff_get_color_opt(&rev.diffopt, DIFF_RESET));
- ret = show_object(o->sha1, 1, &rev);
+ ret = show_tag_object(o->sha1, &rev);
rev.shown_one = 1;
if (ret)
break;
static const char **refspec;
static int refspec_nr;
static int refspec_alloc;
+static int default_matching_used;
static void add_refspec(const char *ref)
{
}
}
+static int push_url_of_remote(struct remote *remote, const char ***url_p)
+{
+ if (remote->pushurl_nr) {
+ *url_p = remote->pushurl;
+ return remote->pushurl_nr;
+ }
+ *url_p = remote->url;
+ return remote->url_nr;
+}
+
static void setup_push_upstream(struct remote *remote)
{
struct strbuf refspec = STRBUF_INIT;
"\n"
" git push %s HEAD:<name-of-remote-branch>\n"),
remote->name);
- if (!branch->merge_nr || !branch->merge)
+ if (!branch->merge_nr || !branch->merge || !branch->remote_name)
die(_("The current branch %s has no upstream branch.\n"
"To push the current branch and set the remote as upstream, use\n"
"\n"
if (branch->merge_nr != 1)
die(_("The current branch %s has multiple upstream branches, "
"refusing to push."), branch->name);
+ if (strcmp(branch->remote_name, remote->name))
+ die(_("You are pushing to remote '%s', which is not the upstream of\n"
+ "your current branch '%s', without telling me what to push\n"
+ "to update which remote branch."),
+ remote->name, branch->name);
+
strbuf_addf(&refspec, "%s:%s", branch->name, branch->merge[0]->src);
add_refspec(refspec.buf);
}
{
switch (push_default) {
default:
+ case PUSH_DEFAULT_UNSPECIFIED:
+ default_matching_used = 1;
+ /* fallthru */
case PUSH_DEFAULT_MATCHING:
add_refspec(":");
break;
}
}
+static const char message_advice_pull_before_push[] =
+ N_("Updates were rejected because the tip of your current branch is behind\n"
+ "its remote counterpart. Merge the remote changes (e.g. 'git pull')\n"
+ "before pushing again.\n"
+ "See the 'Note about fast-forwards' in 'git push --help' for details.");
+
+static const char message_advice_use_upstream[] =
+ N_("Updates were rejected because a pushed branch tip is behind its remote\n"
+ "counterpart. If you did not intend to push that branch, you may want to\n"
+ "specify branches to push or set the 'push.default' configuration\n"
+ "variable to 'current' or 'upstream' to push only the current branch.");
+
+static const char message_advice_checkout_pull_push[] =
+ N_("Updates were rejected because a pushed branch tip is behind its remote\n"
+ "counterpart. Check out this branch and merge the remote changes\n"
+ "(e.g. 'git pull') before pushing again.\n"
+ "See the 'Note about fast-forwards' in 'git push --help' for details.");
+
+static void advise_pull_before_push(void)
+{
+ if (!advice_push_non_ff_current || !advice_push_nonfastforward)
+ return;
+ advise(_(message_advice_pull_before_push));
+}
+
+static void advise_use_upstream(void)
+{
+ if (!advice_push_non_ff_default || !advice_push_nonfastforward)
+ return;
+ advise(_(message_advice_use_upstream));
+}
+
+static void advise_checkout_pull_push(void)
+{
+ if (!advice_push_non_ff_matching || !advice_push_nonfastforward)
+ return;
+ advise(_(message_advice_checkout_pull_push));
+}
+
static int push_with_options(struct transport *transport, int flags)
{
int err;
error(_("failed to push some refs to '%s'"), transport->url);
err |= transport_disconnect(transport);
-
if (!err)
return 0;
- if (nonfastforward && advice_push_nonfastforward) {
- fprintf(stderr, _("To prevent you from losing history, non-fast-forward updates were rejected\n"
- "Merge the remote changes (e.g. 'git pull') before pushing again. See the\n"
- "'Note about fast-forwards' section of 'git push --help' for details.\n"));
+ switch (nonfastforward) {
+ default:
+ break;
+ case NON_FF_HEAD:
+ advise_pull_before_push();
+ break;
+ case NON_FF_OTHER:
+ if (default_matching_used)
+ advise_use_upstream();
+ else
+ advise_checkout_pull_push();
+ break;
}
return 1;
setup_default_push_refspecs(remote);
}
errs = 0;
- if (remote->pushurl_nr) {
- url = remote->pushurl;
- url_nr = remote->pushurl_nr;
- } else {
- url = remote->url;
- url_nr = remote->url_nr;
- }
+ url_nr = push_url_of_remote(remote, &url);
if (url_nr) {
for (i = 0; i < url_nr; i++) {
struct transport *transport =
const char *arg, int unset)
{
int *flags = opt->value;
+
+ if (*flags & (TRANSPORT_RECURSE_SUBMODULES_CHECK |
+ TRANSPORT_RECURSE_SUBMODULES_ON_DEMAND))
+ die("%s can only be used once.", opt->long_name);
+
if (arg) {
if (!strcmp(arg, "check"))
*flags |= TRANSPORT_RECURSE_SUBMODULES_CHECK;
+ else if (!strcmp(arg, "on-demand"))
+ *flags |= TRANSPORT_RECURSE_SUBMODULES_ON_DEMAND;
else
die("bad %s argument: %s", opt->long_name, arg);
} else
- die("option %s needs an argument (check)", opt->long_name);
+ die("option %s needs an argument (check|on-demand)",
+ opt->long_name);
return 0;
}
static const char * const builtin_remote_usage[] = {
"git remote [-v | --verbose]",
- "git remote add [-t <branch>] [-m <master>] [-f] [--mirror=<fetch|push>] <name> <url>",
+ "git remote add [-t <branch>] [-m <master>] [-f] [--tags|--no-tags] [--mirror=<fetch|push>] <name> <url>",
"git remote rename <old> <new>",
"git remote rm <name>",
"git remote set-head <name> (-a | -d | <branch>)",
"git remote prune [-n | --dry-run] <name>",
"git remote [-v | --verbose] update [-p | --prune] [(<group> | <remote>)...]",
"git remote set-branches [--add] <name> <branch>...",
- "git remote set-url <name> <newurl> [<oldurl>]",
+ "git remote set-url [--push] <name> <newurl> [<oldurl>]",
"git remote set-url --add <name> <newurl>",
"git remote set-url --delete <name> <url>",
NULL
if (!strcmp(arg, "--show-prefix")) {
if (prefix)
puts(prefix);
+ else
+ putchar('\n');
continue;
}
if (!strcmp(arg, "--show-cdup")) {
break;
}
}
+ va_end(ap);
if (opt1 && opt2)
die(_("%s: %s cannot be used with %s"), me, opt1, opt2);
OPT_END()
};
+ git_config(git_default_config, NULL);
argc = parse_options(argc, argv, prefix, options,
update_server_info_usage, 0);
if (argc > 0)
PUSH_DEFAULT_NOTHING = 0,
PUSH_DEFAULT_MATCHING,
PUSH_DEFAULT_UPSTREAM,
- PUSH_DEFAULT_CURRENT
+ PUSH_DEFAULT_CURRENT,
+ PUSH_DEFAULT_UNSPECIFIED
};
extern enum branch_track git_branch_track;
#define EMPTY_TREE_SHA1_BIN \
((const unsigned char *) EMPTY_TREE_SHA1_BIN_LITERAL)
+#define EMPTY_BLOB_SHA1_HEX \
+ "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391"
+#define EMPTY_BLOB_SHA1_BIN_LITERAL \
+ "\xe6\x9d\xe2\x9b\xb2\xd1\xd6\x43\x4b\x8b" \
+ "\x29\xae\x77\x5a\xd8\xc2\xe4\x8c\x53\x91"
+#define EMPTY_BLOB_SHA1_BIN \
+ ((const unsigned char *) EMPTY_BLOB_SHA1_BIN_LITERAL)
+
+static inline int is_empty_blob_sha1(const unsigned char *sha1)
+{
+ return !hashcmp(sha1, EMPTY_BLOB_SHA1_BIN);
+}
+
int git_mkstemp(char *path, size_t n, const char *template);
int git_mkstemps(char *path, size_t n, const char *template, int suffix_len);
extern const char *git_editor(void);
extern const char *git_pager(int stdout_is_tty);
+struct ident_split {
+ const char *name_begin;
+ const char *name_end;
+ const char *mail_begin;
+ const char *mail_end;
+ const char *date_begin;
+ const char *date_end;
+ const char *tz_begin;
+ const char *tz_end;
+};
+/*
+ * Signals an success with 0, but time part of the result may be NULL
+ * if the input lacks timestamp and zone
+ */
+extern int split_ident_line(struct ident_split *, const char *, int);
+
struct checkout {
const char *base_dir;
int base_dir_len;
/* builtin/merge.c */
int checkout_fast_forward(const unsigned char *from, const unsigned char *to);
+int sane_execvp(const char *file, char *const argv[]);
+
#endif /* CACHE_H */
hunk_begin, j);
la = (la + context < cnt + 1) ?
(la + context) : cnt + 1;
- while (j <= --la) {
+ while (la && j <= --la) {
if (sline[la].flag & mark) {
contin = 1;
break;
git-mv mainporcelain common
git-name-rev plumbinginterrogators
git-notes mainporcelain
+git-p4 foreignscminterface
git-pack-objects plumbingmanipulators
git-pack-redundant plumbinginterrogators
git-pack-refs ancillarymanipulators
#include "revision.h"
#include "notes.h"
#include "gpg-interface.h"
+#include "mergesort.h"
int save_commit_buffer = 1;
return new_list;
}
+void commit_list_reverse(struct commit_list **list_p)
+{
+ struct commit_list *prev = NULL, *curr = *list_p, *next;
+
+ if (!list_p)
+ return;
+ while (curr) {
+ next = curr->next;
+ curr->next = prev;
+ prev = curr;
+ curr = next;
+ }
+ *list_p = prev;
+}
+
unsigned commit_list_count(const struct commit_list *l)
{
unsigned c = 0;
return commit_list_insert(item, pp);
}
+static int commit_list_compare_by_date(const void *a, const void *b)
+{
+ unsigned long a_date = ((const struct commit_list *)a)->item->date;
+ unsigned long b_date = ((const struct commit_list *)b)->item->date;
+ if (a_date < b_date)
+ return 1;
+ if (a_date > b_date)
+ return -1;
+ return 0;
+}
+
+static void *commit_list_get_next(const void *a)
+{
+ return ((const struct commit_list *)a)->next;
+}
+
+static void commit_list_set_next(void *a, void *next)
+{
+ ((struct commit_list *)a)->next = next;
+}
void commit_list_sort_by_date(struct commit_list **list)
{
- struct commit_list *ret = NULL;
- while (*list) {
- commit_list_insert_by_date((*list)->item, &ret);
- *list = (*list)->next;
- }
- *list = ret;
+ *list = llist_mergesort(*list, commit_list_get_next, commit_list_set_next,
+ commit_list_compare_by_date);
}
struct commit *pop_most_recent_commit(struct commit_list **list,
struct commit_list *commit_list_insert_by_date(struct commit *item,
struct commit_list **list);
void commit_list_sort_by_date(struct commit_list **list);
+void commit_list_reverse(struct commit_list **list_p);
void free_commit_list(struct commit_list *list);
}
}
-void mingw_execvp(const char *cmd, char *const *argv)
+int mingw_execvp(const char *cmd, char *const *argv)
{
char **path = get_path_split();
char *prog = path_lookup(cmd, path, 0);
errno = ENOENT;
free_path_split(path);
+ return -1;
}
-void mingw_execv(const char *cmd, char *const *argv)
+int mingw_execv(const char *cmd, char *const *argv)
{
mingw_execve(cmd, argv, environ);
+ return -1;
}
int mingw_kill(pid_t pid, int sig)
#define S_IWOTH 0
#define S_IXOTH 0
#define S_IRWXO (S_IROTH | S_IWOTH | S_IXOTH)
-#define S_ISUID 0
-#define S_ISGID 0
-#define S_ISVTX 0
+
+#define S_ISUID 0004000
+#define S_ISGID 0002000
+#define S_ISVTX 0001000
#define WIFEXITED(x) 1
#define WIFSIGNALED(x) 0
pid_t mingw_spawnvpe(const char *cmd, const char **argv, char **env,
const char *dir,
int fhin, int fhout, int fherr);
-void mingw_execvp(const char *cmd, char *const *argv);
+int mingw_execvp(const char *cmd, char *const *argv);
#define execvp mingw_execvp
-void mingw_execv(const char *cmd, char *const *argv);
+int mingw_execv(const char *cmd, char *const *argv);
#define execv mingw_execv
static inline unsigned int git_ntohl(unsigned int x)
# -*- Autoconf -*-
# Process this file with autoconf to produce a configure script.
-AC_PREREQ(2.59)
-AC_INIT([git], [@@GIT_VERSION@@], [git@vger.kernel.org])
-
-AC_CONFIG_SRCDIR([git.c])
-
-config_file=config.mak.autogen
-config_append=config.mak.append
-config_in=config.mak.in
-
-echo "# ${config_append}. Generated by configure." > "${config_append}"
-
+## Definitions of private macros.
-## Definitions of macros
# GIT_CONF_APPEND_LINE(LINE)
# --------------------------
# Append LINE to file ${config_append}
AC_DEFUN([GIT_CONF_APPEND_LINE],
-[echo "$1" >> "${config_append}"])# GIT_CONF_APPEND_LINE
-#
+ [echo "$1" >> "${config_append}"])
+
# GIT_ARG_SET_PATH(PROGRAM)
# -------------------------
# Provide --with-PROGRAM=PATH option to set PATH to PROGRAM
# Optional second argument allows setting NO_PROGRAM=YesPlease if
# --without-PROGRAM version used.
AC_DEFUN([GIT_ARG_SET_PATH],
-[AC_ARG_WITH([$1],
- [AS_HELP_STRING([--with-$1=PATH],
- [provide PATH to $1])],
- [GIT_CONF_APPEND_PATH($1,$2)],[])
-])# GIT_ARG_SET_PATH
-#
+ [AC_ARG_WITH([$1],
+ [AS_HELP_STRING([--with-$1=PATH],
+ [provide PATH to $1])],
+ [GIT_CONF_APPEND_PATH([$1], [$2])],
+ [])])
+
# GIT_CONF_APPEND_PATH(PROGRAM)
-# ------------------------------
+# -----------------------------
# Parse --with-PROGRAM=PATH option to set PROGRAM_PATH=PATH
# Used by GIT_ARG_SET_PATH(PROGRAM)
# Optional second argument allows setting NO_PROGRAM=YesPlease if
# --without-PROGRAM is used.
AC_DEFUN([GIT_CONF_APPEND_PATH],
-[PROGRAM=m4_toupper($1); \
-if test "$withval" = "no"; then \
- if test -n "$2"; then \
- m4_toupper($1)_PATH=$withval; \
- AC_MSG_NOTICE([Disabling use of ${PROGRAM}]); \
- GIT_CONF_APPEND_LINE(NO_${PROGRAM}=YesPlease); \
- GIT_CONF_APPEND_LINE(${PROGRAM}_PATH=); \
- else \
- AC_MSG_ERROR([You cannot use git without $1]); \
- fi; \
-else \
- if test "$withval" = "yes"; then \
- AC_MSG_WARN([You should provide path for --with-$1=PATH]); \
- else \
- m4_toupper($1)_PATH=$withval; \
- AC_MSG_NOTICE([Setting m4_toupper($1)_PATH to $withval]); \
- GIT_CONF_APPEND_LINE(${PROGRAM}_PATH=$withval); \
- fi; \
-fi; \
-]) # GIT_CONF_APPEND_PATH
-#
+ [m4_pushdef([GIT_UC_PROGRAM], m4_toupper([$1]))dnl
+ PROGRAM=GIT_UC_PROGRAM
+ if test "$withval" = "no"; then
+ if test -n "$2"; then
+ GIT_UC_PROGRAM[]_PATH=$withval
+ AC_MSG_NOTICE([Disabling use of ${PROGRAM}])
+ GIT_CONF_APPEND_LINE(NO_${PROGRAM}=YesPlease)
+ GIT_CONF_APPEND_LINE(${PROGRAM}_PATH=)
+ else
+ AC_MSG_ERROR([You cannot use git without $1])
+ fi
+ else
+ if test "$withval" = "yes"; then
+ AC_MSG_WARN([You should provide path for --with-$1=PATH])
+ else
+ GIT_UC_PROGRAM[]_PATH=$withval
+ AC_MSG_NOTICE([Setting GIT_UC_PROGRAM[]_PATH to $withval])
+ GIT_CONF_APPEND_LINE(${PROGRAM}_PATH=$withval)
+ fi
+ fi
+ m4_popdef([GIT_UC_PROGRAM])])
+
# GIT_PARSE_WITH(PACKAGE)
# -----------------------
# For use in AC_ARG_WITH action-if-found, for packages default ON.
# * Set PACKAGEDIR=PATH for --with-PACKAGE=PATH
# * Unset NO_PACKAGE for --with-PACKAGE without ARG
AC_DEFUN([GIT_PARSE_WITH],
-[PACKAGE=m4_toupper($1); \
-if test "$withval" = "no"; then \
- m4_toupper(NO_$1)=YesPlease; \
-elif test "$withval" = "yes"; then \
- m4_toupper(NO_$1)=; \
-else \
- m4_toupper(NO_$1)=; \
- m4_toupper($1)DIR=$withval; \
- AC_MSG_NOTICE([Setting m4_toupper($1)DIR to $withval]); \
- GIT_CONF_APPEND_LINE(${PACKAGE}DIR=$withval); \
-fi \
-])# GIT_PARSE_WITH
-#
+ [m4_pushdef([GIT_UC_PACKAGE], m4_toupper([$1]))dnl
+ PACKAGE=GIT_UC_PACKAGE
+ if test "$withval" = "no"; then
+ NO_[]GIT_UC_PACKAGE=YesPlease
+ elif test "$withval" = "yes"; then
+ NO_[]GIT_UC_PACKAGE=
+ else
+ NO_[]GIT_UC_PACKAGE=
+ GIT_UC_PACKAGE[]DIR=$withval
+ AC_MSG_NOTICE([Setting GIT_UC_PACKAGE[]DIR to $withval])
+ GIT_CONF_APPEND_LINE(${PACKAGE}DIR=$withval)
+ fi
+ m4_popdef([GIT_UC_PACKAGE])])
+
# GIT_PARSE_WITH_SET_MAKE_VAR(WITHNAME, VAR, HELP_TEXT)
-# ---------------------
+# -----------------------------------------------------
# Set VAR to the value specied by --with-WITHNAME.
# No verification of arguments is performed, but warnings are issued
# if either 'yes' or 'no' is specified.
AC_DEFUN([GIT_PARSE_WITH_SET_MAKE_VAR],
[AC_ARG_WITH([$1],
[AS_HELP_STRING([--with-$1=VALUE], $3)],
- if test -n "$withval"; then \
- if test "$withval" = "yes" -o "$withval" = "no"; then \
+ if test -n "$withval"; then
+ if test "$withval" = "yes" -o "$withval" = "no"; then
AC_MSG_WARN([You likely do not want either 'yes' or 'no' as]
- [a value for $1 ($2). Maybe you do...?]); \
- fi; \
- \
- AC_MSG_NOTICE([Setting $2 to $withval]); \
- GIT_CONF_APPEND_LINE($2=$withval); \
+ [a value for $1 ($2). Maybe you do...?])
+ fi
+ AC_MSG_NOTICE([Setting $2 to $withval])
+ GIT_CONF_APPEND_LINE($2=$withval)
fi)])# GIT_PARSE_WITH_SET_MAKE_VAR
-dnl
-dnl GIT_CHECK_FUNC(FUNCTION, IFTRUE, IFFALSE)
-dnl -----------------------------------------
-dnl Similar to AC_CHECK_FUNC, but on systems that do not generate
-dnl warnings for missing prototypes (e.g. FreeBSD when compiling without
-dnl -Wall), it does not work. By looking for function definition in
-dnl libraries, this problem can be worked around.
+#
+# GIT_CHECK_FUNC(FUNCTION, IFTRUE, IFFALSE)
+# -----------------------------------------
+# Similar to AC_CHECK_FUNC, but on systems that do not generate
+# warnings for missing prototypes (e.g. FreeBSD when compiling without
+# -Wall), it does not work. By looking for function definition in
+# libraries, this problem can be worked around.
AC_DEFUN([GIT_CHECK_FUNC],[AC_CHECK_FUNC([$1],[
AC_SEARCH_LIBS([$1],,
[$2],[$3])
],[$3])])
-dnl
-dnl GIT_STASH_FLAGS(BASEPATH_VAR)
-dnl -----------------------------
-dnl Allow for easy stashing of LDFLAGS and CPPFLAGS before running
-dnl tests that may want to take user settings into account.
+#
+# GIT_STASH_FLAGS(BASEPATH_VAR)
+# -----------------------------
+# Allow for easy stashing of LDFLAGS and CPPFLAGS before running
+# tests that may want to take user settings into account.
AC_DEFUN([GIT_STASH_FLAGS],[
if test -n "$1"; then
old_CPPFLAGS="$CPPFLAGS"
fi
])
+## Configure body starts here.
+
+AC_PREREQ(2.59)
+AC_INIT([git], [@@GIT_VERSION@@], [git@vger.kernel.org])
+
+AC_CONFIG_SRCDIR([git.c])
+
+config_file=config.mak.autogen
+config_append=config.mak.append
+config_in=config.mak.in
+
+echo "# ${config_append}. Generated by configure." > "${config_append}"
+
# Directories holding "saner" versions of common or POSIX binaries.
AC_ARG_WITH([sane-tool-path],
[AS_HELP_STRING(
AC_ARG_WITH([lib],
[AS_HELP_STRING([--with-lib=ARG],
[ARG specifies alternative name for lib directory])],
- [if test "$withval" = "no" || test "$withval" = "yes"; then \
- AC_MSG_WARN([You should provide name for --with-lib=ARG]); \
-else \
- lib=$withval; \
- AC_MSG_NOTICE([Setting lib to '$lib']); \
- GIT_CONF_APPEND_LINE(lib=$withval); \
-fi; \
-],[])
+ [if test "$withval" = "no" || test "$withval" = "yes"; then
+ AC_MSG_WARN([You should provide name for --with-lib=ARG])
+ else
+ lib=$withval
+ AC_MSG_NOTICE([Setting lib to '$lib'])
+ GIT_CONF_APPEND_LINE(lib=$withval)
+ fi])
if test -z "$lib"; then
AC_MSG_NOTICE([Setting lib to 'lib' (the default)])
# /foo/bar/include and /foo/bar/lib directories.
AC_ARG_WITH(openssl,
AS_HELP_STRING([--with-openssl],[use OpenSSL library (default is YES)])
-AS_HELP_STRING([], [ARG can be prefix for openssl library and headers]),\
-GIT_PARSE_WITH(openssl))
-#
+AS_HELP_STRING([], [ARG can be prefix for openssl library and headers]),
+GIT_PARSE_WITH([openssl]))
+
# Define USE_LIBPCRE if you have and want to use libpcre. git-grep will be
# able to use Perl-compatible regular expressions.
#
AC_ARG_WITH(libpcre,
AS_HELP_STRING([--with-libpcre],[support Perl-compatible regexes (default is NO)])
AS_HELP_STRING([], [ARG can be also prefix for libpcre library and headers]),
-if test "$withval" = "no"; then \
- USE_LIBPCRE=; \
-elif test "$withval" = "yes"; then \
- USE_LIBPCRE=YesPlease; \
-else
- USE_LIBPCRE=YesPlease; \
- LIBPCREDIR=$withval; \
- AC_MSG_NOTICE([Setting LIBPCREDIR to $withval]); \
- GIT_CONF_APPEND_LINE(LIBPCREDIR=$withval); \
-fi \
-)
+ if test "$withval" = "no"; then
+ USE_LIBPCRE=
+ elif test "$withval" = "yes"; then
+ USE_LIBPCRE=YesPlease
+ else
+ USE_LIBPCRE=YesPlease
+ LIBPCREDIR=$withval
+ AC_MSG_NOTICE([Setting LIBPCREDIR to $withval])
+ GIT_CONF_APPEND_LINE(LIBPCREDIR=$withval)
+ fi)
#
# Define NO_CURL if you do not have curl installed. git-http-pull and
# git-http-push are not built, and you cannot use http:// and https://
AS_HELP_STRING([--with-tcltk],[use Tcl/Tk GUI (default is YES)])
AS_HELP_STRING([],[ARG is the full path to the Tcl/Tk interpreter.])
AS_HELP_STRING([],[Bare --with-tcltk will make the GUI part only if])
-AS_HELP_STRING([],[Tcl/Tk interpreter will be found in a system.]),\
+AS_HELP_STRING([],[Tcl/Tk interpreter will be found in a system.]),
GIT_PARSE_WITH(tcltk))
#
+++ /dev/null
-#!/usr/bin/env python
-#
-# git-p4.py -- A tool for bidirectional operation between a Perforce depot and git.
-#
-# Author: Simon Hausmann <simon@lst.de>
-# Copyright: 2007 Simon Hausmann <simon@lst.de>
-# 2007 Trolltech ASA
-# License: MIT <http://www.opensource.org/licenses/mit-license.php>
-#
-
-import optparse, sys, os, marshal, subprocess, shelve
-import tempfile, getopt, os.path, time, platform
-import re, shutil
-
-verbose = False
-
-
-def p4_build_cmd(cmd):
- """Build a suitable p4 command line.
-
- This consolidates building and returning a p4 command line into one
- location. It means that hooking into the environment, or other configuration
- can be done more easily.
- """
- real_cmd = ["p4"]
-
- user = gitConfig("git-p4.user")
- if len(user) > 0:
- real_cmd += ["-u",user]
-
- password = gitConfig("git-p4.password")
- if len(password) > 0:
- real_cmd += ["-P", password]
-
- port = gitConfig("git-p4.port")
- if len(port) > 0:
- real_cmd += ["-p", port]
-
- host = gitConfig("git-p4.host")
- if len(host) > 0:
- real_cmd += ["-H", host]
-
- client = gitConfig("git-p4.client")
- if len(client) > 0:
- real_cmd += ["-c", client]
-
-
- if isinstance(cmd,basestring):
- real_cmd = ' '.join(real_cmd) + ' ' + cmd
- else:
- real_cmd += cmd
- return real_cmd
-
-def chdir(dir):
- # P4 uses the PWD environment variable rather than getcwd(). Since we're
- # not using the shell, we have to set it ourselves. This path could
- # be relative, so go there first, then figure out where we ended up.
- os.chdir(dir)
- os.environ['PWD'] = os.getcwd()
-
-def die(msg):
- if verbose:
- raise Exception(msg)
- else:
- sys.stderr.write(msg + "\n")
- sys.exit(1)
-
-def write_pipe(c, stdin):
- if verbose:
- sys.stderr.write('Writing pipe: %s\n' % str(c))
-
- expand = isinstance(c,basestring)
- p = subprocess.Popen(c, stdin=subprocess.PIPE, shell=expand)
- pipe = p.stdin
- val = pipe.write(stdin)
- pipe.close()
- if p.wait():
- die('Command failed: %s' % str(c))
-
- return val
-
-def p4_write_pipe(c, stdin):
- real_cmd = p4_build_cmd(c)
- return write_pipe(real_cmd, stdin)
-
-def read_pipe(c, ignore_error=False):
- if verbose:
- sys.stderr.write('Reading pipe: %s\n' % str(c))
-
- expand = isinstance(c,basestring)
- p = subprocess.Popen(c, stdout=subprocess.PIPE, shell=expand)
- pipe = p.stdout
- val = pipe.read()
- if p.wait() and not ignore_error:
- die('Command failed: %s' % str(c))
-
- return val
-
-def p4_read_pipe(c, ignore_error=False):
- real_cmd = p4_build_cmd(c)
- return read_pipe(real_cmd, ignore_error)
-
-def read_pipe_lines(c):
- if verbose:
- sys.stderr.write('Reading pipe: %s\n' % str(c))
-
- expand = isinstance(c, basestring)
- p = subprocess.Popen(c, stdout=subprocess.PIPE, shell=expand)
- pipe = p.stdout
- val = pipe.readlines()
- if pipe.close() or p.wait():
- die('Command failed: %s' % str(c))
-
- return val
-
-def p4_read_pipe_lines(c):
- """Specifically invoke p4 on the command supplied. """
- real_cmd = p4_build_cmd(c)
- return read_pipe_lines(real_cmd)
-
-def system(cmd):
- expand = isinstance(cmd,basestring)
- if verbose:
- sys.stderr.write("executing %s\n" % str(cmd))
- subprocess.check_call(cmd, shell=expand)
-
-def p4_system(cmd):
- """Specifically invoke p4 as the system command. """
- real_cmd = p4_build_cmd(cmd)
- expand = isinstance(real_cmd, basestring)
- subprocess.check_call(real_cmd, shell=expand)
-
-def p4_integrate(src, dest):
- p4_system(["integrate", "-Dt", src, dest])
-
-def p4_sync(path):
- p4_system(["sync", path])
-
-def p4_add(f):
- p4_system(["add", f])
-
-def p4_delete(f):
- p4_system(["delete", f])
-
-def p4_edit(f):
- p4_system(["edit", f])
-
-def p4_revert(f):
- p4_system(["revert", f])
-
-def p4_reopen(type, file):
- p4_system(["reopen", "-t", type, file])
-
-#
-# Canonicalize the p4 type and return a tuple of the
-# base type, plus any modifiers. See "p4 help filetypes"
-# for a list and explanation.
-#
-def split_p4_type(p4type):
-
- p4_filetypes_historical = {
- "ctempobj": "binary+Sw",
- "ctext": "text+C",
- "cxtext": "text+Cx",
- "ktext": "text+k",
- "kxtext": "text+kx",
- "ltext": "text+F",
- "tempobj": "binary+FSw",
- "ubinary": "binary+F",
- "uresource": "resource+F",
- "uxbinary": "binary+Fx",
- "xbinary": "binary+x",
- "xltext": "text+Fx",
- "xtempobj": "binary+Swx",
- "xtext": "text+x",
- "xunicode": "unicode+x",
- "xutf16": "utf16+x",
- }
- if p4type in p4_filetypes_historical:
- p4type = p4_filetypes_historical[p4type]
- mods = ""
- s = p4type.split("+")
- base = s[0]
- mods = ""
- if len(s) > 1:
- mods = s[1]
- return (base, mods)
-
-#
-# return the raw p4 type of a file (text, text+ko, etc)
-#
-def p4_type(file):
- results = p4CmdList(["fstat", "-T", "headType", file])
- return results[0]['headType']
-
-#
-# Given a type base and modifier, return a regexp matching
-# the keywords that can be expanded in the file
-#
-def p4_keywords_regexp_for_type(base, type_mods):
- if base in ("text", "unicode", "binary"):
- kwords = None
- if "ko" in type_mods:
- kwords = 'Id|Header'
- elif "k" in type_mods:
- kwords = 'Id|Header|Author|Date|DateTime|Change|File|Revision'
- else:
- return None
- pattern = r"""
- \$ # Starts with a dollar, followed by...
- (%s) # one of the keywords, followed by...
- (:[^$]+)? # possibly an old expansion, followed by...
- \$ # another dollar
- """ % kwords
- return pattern
- else:
- return None
-
-#
-# Given a file, return a regexp matching the possible
-# RCS keywords that will be expanded, or None for files
-# with kw expansion turned off.
-#
-def p4_keywords_regexp_for_file(file):
- if not os.path.exists(file):
- return None
- else:
- (type_base, type_mods) = split_p4_type(p4_type(file))
- return p4_keywords_regexp_for_type(type_base, type_mods)
-
-def setP4ExecBit(file, mode):
- # Reopens an already open file and changes the execute bit to match
- # the execute bit setting in the passed in mode.
-
- p4Type = "+x"
-
- if not isModeExec(mode):
- p4Type = getP4OpenedType(file)
- p4Type = re.sub('^([cku]?)x(.*)', '\\1\\2', p4Type)
- p4Type = re.sub('(.*?\+.*?)x(.*?)', '\\1\\2', p4Type)
- if p4Type[-1] == "+":
- p4Type = p4Type[0:-1]
-
- p4_reopen(p4Type, file)
-
-def getP4OpenedType(file):
- # Returns the perforce file type for the given file.
-
- result = p4_read_pipe(["opened", file])
- match = re.match(".*\((.+)\)\r?$", result)
- if match:
- return match.group(1)
- else:
- die("Could not determine file type for %s (result: '%s')" % (file, result))
-
-def diffTreePattern():
- # This is a simple generator for the diff tree regex pattern. This could be
- # a class variable if this and parseDiffTreeEntry were a part of a class.
- pattern = re.compile(':(\d+) (\d+) (\w+) (\w+) ([A-Z])(\d+)?\t(.*?)((\t(.*))|$)')
- while True:
- yield pattern
-
-def parseDiffTreeEntry(entry):
- """Parses a single diff tree entry into its component elements.
-
- See git-diff-tree(1) manpage for details about the format of the diff
- output. This method returns a dictionary with the following elements:
-
- src_mode - The mode of the source file
- dst_mode - The mode of the destination file
- src_sha1 - The sha1 for the source file
- dst_sha1 - The sha1 fr the destination file
- status - The one letter status of the diff (i.e. 'A', 'M', 'D', etc)
- status_score - The score for the status (applicable for 'C' and 'R'
- statuses). This is None if there is no score.
- src - The path for the source file.
- dst - The path for the destination file. This is only present for
- copy or renames. If it is not present, this is None.
-
- If the pattern is not matched, None is returned."""
-
- match = diffTreePattern().next().match(entry)
- if match:
- return {
- 'src_mode': match.group(1),
- 'dst_mode': match.group(2),
- 'src_sha1': match.group(3),
- 'dst_sha1': match.group(4),
- 'status': match.group(5),
- 'status_score': match.group(6),
- 'src': match.group(7),
- 'dst': match.group(10)
- }
- return None
-
-def isModeExec(mode):
- # Returns True if the given git mode represents an executable file,
- # otherwise False.
- return mode[-3:] == "755"
-
-def isModeExecChanged(src_mode, dst_mode):
- return isModeExec(src_mode) != isModeExec(dst_mode)
-
-def p4CmdList(cmd, stdin=None, stdin_mode='w+b', cb=None):
-
- if isinstance(cmd,basestring):
- cmd = "-G " + cmd
- expand = True
- else:
- cmd = ["-G"] + cmd
- expand = False
-
- cmd = p4_build_cmd(cmd)
- if verbose:
- sys.stderr.write("Opening pipe: %s\n" % str(cmd))
-
- # Use a temporary file to avoid deadlocks without
- # subprocess.communicate(), which would put another copy
- # of stdout into memory.
- stdin_file = None
- if stdin is not None:
- stdin_file = tempfile.TemporaryFile(prefix='p4-stdin', mode=stdin_mode)
- if isinstance(stdin,basestring):
- stdin_file.write(stdin)
- else:
- for i in stdin:
- stdin_file.write(i + '\n')
- stdin_file.flush()
- stdin_file.seek(0)
-
- p4 = subprocess.Popen(cmd,
- shell=expand,
- stdin=stdin_file,
- stdout=subprocess.PIPE)
-
- result = []
- try:
- while True:
- entry = marshal.load(p4.stdout)
- if cb is not None:
- cb(entry)
- else:
- result.append(entry)
- except EOFError:
- pass
- exitCode = p4.wait()
- if exitCode != 0:
- entry = {}
- entry["p4ExitCode"] = exitCode
- result.append(entry)
-
- return result
-
-def p4Cmd(cmd):
- list = p4CmdList(cmd)
- result = {}
- for entry in list:
- result.update(entry)
- return result;
-
-def p4Where(depotPath):
- if not depotPath.endswith("/"):
- depotPath += "/"
- depotPath = depotPath + "..."
- outputList = p4CmdList(["where", depotPath])
- output = None
- for entry in outputList:
- if "depotFile" in entry:
- if entry["depotFile"] == depotPath:
- output = entry
- break
- elif "data" in entry:
- data = entry.get("data")
- space = data.find(" ")
- if data[:space] == depotPath:
- output = entry
- break
- if output == None:
- return ""
- if output["code"] == "error":
- return ""
- clientPath = ""
- if "path" in output:
- clientPath = output.get("path")
- elif "data" in output:
- data = output.get("data")
- lastSpace = data.rfind(" ")
- clientPath = data[lastSpace + 1:]
-
- if clientPath.endswith("..."):
- clientPath = clientPath[:-3]
- return clientPath
-
-def currentGitBranch():
- return read_pipe("git name-rev HEAD").split(" ")[1].strip()
-
-def isValidGitDir(path):
- if (os.path.exists(path + "/HEAD")
- and os.path.exists(path + "/refs") and os.path.exists(path + "/objects")):
- return True;
- return False
-
-def parseRevision(ref):
- return read_pipe("git rev-parse %s" % ref).strip()
-
-def branchExists(ref):
- rev = read_pipe(["git", "rev-parse", "-q", "--verify", ref],
- ignore_error=True)
- return len(rev) > 0
-
-def extractLogMessageFromGitCommit(commit):
- logMessage = ""
-
- ## fixme: title is first line of commit, not 1st paragraph.
- foundTitle = False
- for log in read_pipe_lines("git cat-file commit %s" % commit):
- if not foundTitle:
- if len(log) == 1:
- foundTitle = True
- continue
-
- logMessage += log
- return logMessage
-
-def extractSettingsGitLog(log):
- values = {}
- for line in log.split("\n"):
- line = line.strip()
- m = re.search (r"^ *\[git-p4: (.*)\]$", line)
- if not m:
- continue
-
- assignments = m.group(1).split (':')
- for a in assignments:
- vals = a.split ('=')
- key = vals[0].strip()
- val = ('='.join (vals[1:])).strip()
- if val.endswith ('\"') and val.startswith('"'):
- val = val[1:-1]
-
- values[key] = val
-
- paths = values.get("depot-paths")
- if not paths:
- paths = values.get("depot-path")
- if paths:
- values['depot-paths'] = paths.split(',')
- return values
-
-def gitBranchExists(branch):
- proc = subprocess.Popen(["git", "rev-parse", branch],
- stderr=subprocess.PIPE, stdout=subprocess.PIPE);
- return proc.wait() == 0;
-
-_gitConfig = {}
-def gitConfig(key, args = None): # set args to "--bool", for instance
- if not _gitConfig.has_key(key):
- argsFilter = ""
- if args != None:
- argsFilter = "%s " % args
- cmd = "git config %s%s" % (argsFilter, key)
- _gitConfig[key] = read_pipe(cmd, ignore_error=True).strip()
- return _gitConfig[key]
-
-def gitConfigList(key):
- if not _gitConfig.has_key(key):
- _gitConfig[key] = read_pipe("git config --get-all %s" % key, ignore_error=True).strip().split(os.linesep)
- return _gitConfig[key]
-
-def p4BranchesInGit(branchesAreInRemotes = True):
- branches = {}
-
- cmdline = "git rev-parse --symbolic "
- if branchesAreInRemotes:
- cmdline += " --remotes"
- else:
- cmdline += " --branches"
-
- for line in read_pipe_lines(cmdline):
- line = line.strip()
-
- ## only import to p4/
- if not line.startswith('p4/') or line == "p4/HEAD":
- continue
- branch = line
-
- # strip off p4
- branch = re.sub ("^p4/", "", line)
-
- branches[branch] = parseRevision(line)
- return branches
-
-def findUpstreamBranchPoint(head = "HEAD"):
- branches = p4BranchesInGit()
- # map from depot-path to branch name
- branchByDepotPath = {}
- for branch in branches.keys():
- tip = branches[branch]
- log = extractLogMessageFromGitCommit(tip)
- settings = extractSettingsGitLog(log)
- if settings.has_key("depot-paths"):
- paths = ",".join(settings["depot-paths"])
- branchByDepotPath[paths] = "remotes/p4/" + branch
-
- settings = None
- parent = 0
- while parent < 65535:
- commit = head + "~%s" % parent
- log = extractLogMessageFromGitCommit(commit)
- settings = extractSettingsGitLog(log)
- if settings.has_key("depot-paths"):
- paths = ",".join(settings["depot-paths"])
- if branchByDepotPath.has_key(paths):
- return [branchByDepotPath[paths], settings]
-
- parent = parent + 1
-
- return ["", settings]
-
-def createOrUpdateBranchesFromOrigin(localRefPrefix = "refs/remotes/p4/", silent=True):
- if not silent:
- print ("Creating/updating branch(es) in %s based on origin branch(es)"
- % localRefPrefix)
-
- originPrefix = "origin/p4/"
-
- for line in read_pipe_lines("git rev-parse --symbolic --remotes"):
- line = line.strip()
- if (not line.startswith(originPrefix)) or line.endswith("HEAD"):
- continue
-
- headName = line[len(originPrefix):]
- remoteHead = localRefPrefix + headName
- originHead = line
-
- original = extractSettingsGitLog(extractLogMessageFromGitCommit(originHead))
- if (not original.has_key('depot-paths')
- or not original.has_key('change')):
- continue
-
- update = False
- if not gitBranchExists(remoteHead):
- if verbose:
- print "creating %s" % remoteHead
- update = True
- else:
- settings = extractSettingsGitLog(extractLogMessageFromGitCommit(remoteHead))
- if settings.has_key('change') > 0:
- if settings['depot-paths'] == original['depot-paths']:
- originP4Change = int(original['change'])
- p4Change = int(settings['change'])
- if originP4Change > p4Change:
- print ("%s (%s) is newer than %s (%s). "
- "Updating p4 branch from origin."
- % (originHead, originP4Change,
- remoteHead, p4Change))
- update = True
- else:
- print ("Ignoring: %s was imported from %s while "
- "%s was imported from %s"
- % (originHead, ','.join(original['depot-paths']),
- remoteHead, ','.join(settings['depot-paths'])))
-
- if update:
- system("git update-ref %s %s" % (remoteHead, originHead))
-
-def originP4BranchesExist():
- return gitBranchExists("origin") or gitBranchExists("origin/p4") or gitBranchExists("origin/p4/master")
-
-def p4ChangesForPaths(depotPaths, changeRange):
- assert depotPaths
- cmd = ['changes']
- for p in depotPaths:
- cmd += ["%s...%s" % (p, changeRange)]
- output = p4_read_pipe_lines(cmd)
-
- changes = {}
- for line in output:
- changeNum = int(line.split(" ")[1])
- changes[changeNum] = True
-
- changelist = changes.keys()
- changelist.sort()
- return changelist
-
-def p4PathStartsWith(path, prefix):
- # This method tries to remedy a potential mixed-case issue:
- #
- # If UserA adds //depot/DirA/file1
- # and UserB adds //depot/dira/file2
- #
- # we may or may not have a problem. If you have core.ignorecase=true,
- # we treat DirA and dira as the same directory
- ignorecase = gitConfig("core.ignorecase", "--bool") == "true"
- if ignorecase:
- return path.lower().startswith(prefix.lower())
- return path.startswith(prefix)
-
-def getClientSpec():
- """Look at the p4 client spec, create a View() object that contains
- all the mappings, and return it."""
-
- specList = p4CmdList("client -o")
- if len(specList) != 1:
- die('Output from "client -o" is %d lines, expecting 1' %
- len(specList))
-
- # dictionary of all client parameters
- entry = specList[0]
-
- # just the keys that start with "View"
- view_keys = [ k for k in entry.keys() if k.startswith("View") ]
-
- # hold this new View
- view = View()
-
- # append the lines, in order, to the view
- for view_num in range(len(view_keys)):
- k = "View%d" % view_num
- if k not in view_keys:
- die("Expected view key %s missing" % k)
- view.append(entry[k])
-
- return view
-
-def getClientRoot():
- """Grab the client directory."""
-
- output = p4CmdList("client -o")
- if len(output) != 1:
- die('Output from "client -o" is %d lines, expecting 1' % len(output))
-
- entry = output[0]
- if "Root" not in entry:
- die('Client has no "Root"')
-
- return entry["Root"]
-
-class Command:
- def __init__(self):
- self.usage = "usage: %prog [options]"
- self.needsGit = True
-
-class P4UserMap:
- def __init__(self):
- self.userMapFromPerforceServer = False
- self.myP4UserId = None
-
- def p4UserId(self):
- if self.myP4UserId:
- return self.myP4UserId
-
- results = p4CmdList("user -o")
- for r in results:
- if r.has_key('User'):
- self.myP4UserId = r['User']
- return r['User']
- die("Could not find your p4 user id")
-
- def p4UserIsMe(self, p4User):
- # return True if the given p4 user is actually me
- me = self.p4UserId()
- if not p4User or p4User != me:
- return False
- else:
- return True
-
- def getUserCacheFilename(self):
- home = os.environ.get("HOME", os.environ.get("USERPROFILE"))
- return home + "/.gitp4-usercache.txt"
-
- def getUserMapFromPerforceServer(self):
- if self.userMapFromPerforceServer:
- return
- self.users = {}
- self.emails = {}
-
- for output in p4CmdList("users"):
- if not output.has_key("User"):
- continue
- self.users[output["User"]] = output["FullName"] + " <" + output["Email"] + ">"
- self.emails[output["Email"]] = output["User"]
-
-
- s = ''
- for (key, val) in self.users.items():
- s += "%s\t%s\n" % (key.expandtabs(1), val.expandtabs(1))
-
- open(self.getUserCacheFilename(), "wb").write(s)
- self.userMapFromPerforceServer = True
-
- def loadUserMapFromCache(self):
- self.users = {}
- self.userMapFromPerforceServer = False
- try:
- cache = open(self.getUserCacheFilename(), "rb")
- lines = cache.readlines()
- cache.close()
- for line in lines:
- entry = line.strip().split("\t")
- self.users[entry[0]] = entry[1]
- except IOError:
- self.getUserMapFromPerforceServer()
-
-class P4Debug(Command):
- def __init__(self):
- Command.__init__(self)
- self.options = [
- optparse.make_option("--verbose", dest="verbose", action="store_true",
- default=False),
- ]
- self.description = "A tool to debug the output of p4 -G."
- self.needsGit = False
- self.verbose = False
-
- def run(self, args):
- j = 0
- for output in p4CmdList(args):
- print 'Element: %d' % j
- j += 1
- print output
- return True
-
-class P4RollBack(Command):
- def __init__(self):
- Command.__init__(self)
- self.options = [
- optparse.make_option("--verbose", dest="verbose", action="store_true"),
- optparse.make_option("--local", dest="rollbackLocalBranches", action="store_true")
- ]
- self.description = "A tool to debug the multi-branch import. Don't use :)"
- self.verbose = False
- self.rollbackLocalBranches = False
-
- def run(self, args):
- if len(args) != 1:
- return False
- maxChange = int(args[0])
-
- if "p4ExitCode" in p4Cmd("changes -m 1"):
- die("Problems executing p4");
-
- if self.rollbackLocalBranches:
- refPrefix = "refs/heads/"
- lines = read_pipe_lines("git rev-parse --symbolic --branches")
- else:
- refPrefix = "refs/remotes/"
- lines = read_pipe_lines("git rev-parse --symbolic --remotes")
-
- for line in lines:
- if self.rollbackLocalBranches or (line.startswith("p4/") and line != "p4/HEAD\n"):
- line = line.strip()
- ref = refPrefix + line
- log = extractLogMessageFromGitCommit(ref)
- settings = extractSettingsGitLog(log)
-
- depotPaths = settings['depot-paths']
- change = settings['change']
-
- changed = False
-
- if len(p4Cmd("changes -m 1 " + ' '.join (['%s...@%s' % (p, maxChange)
- for p in depotPaths]))) == 0:
- print "Branch %s did not exist at change %s, deleting." % (ref, maxChange)
- system("git update-ref -d %s `git rev-parse %s`" % (ref, ref))
- continue
-
- while change and int(change) > maxChange:
- changed = True
- if self.verbose:
- print "%s is at %s ; rewinding towards %s" % (ref, change, maxChange)
- system("git update-ref %s \"%s^\"" % (ref, ref))
- log = extractLogMessageFromGitCommit(ref)
- settings = extractSettingsGitLog(log)
-
-
- depotPaths = settings['depot-paths']
- change = settings['change']
-
- if changed:
- print "%s rewound to %s" % (ref, change)
-
- return True
-
-class P4Submit(Command, P4UserMap):
- def __init__(self):
- Command.__init__(self)
- P4UserMap.__init__(self)
- self.options = [
- optparse.make_option("--verbose", dest="verbose", action="store_true"),
- optparse.make_option("--origin", dest="origin"),
- optparse.make_option("-M", dest="detectRenames", action="store_true"),
- # preserve the user, requires relevant p4 permissions
- optparse.make_option("--preserve-user", dest="preserveUser", action="store_true"),
- ]
- self.description = "Submit changes from git to the perforce depot."
- self.usage += " [name of git branch to submit into perforce depot]"
- self.interactive = True
- self.origin = ""
- self.detectRenames = False
- self.verbose = False
- self.preserveUser = gitConfig("git-p4.preserveUser").lower() == "true"
- self.isWindows = (platform.system() == "Windows")
-
- def check(self):
- if len(p4CmdList("opened ...")) > 0:
- die("You have files opened with perforce! Close them before starting the sync.")
-
- # replaces everything between 'Description:' and the next P4 submit template field with the
- # commit message
- def prepareLogMessage(self, template, message):
- result = ""
-
- inDescriptionSection = False
-
- for line in template.split("\n"):
- if line.startswith("#"):
- result += line + "\n"
- continue
-
- if inDescriptionSection:
- if line.startswith("Files:") or line.startswith("Jobs:"):
- inDescriptionSection = False
- else:
- continue
- else:
- if line.startswith("Description:"):
- inDescriptionSection = True
- line += "\n"
- for messageLine in message.split("\n"):
- line += "\t" + messageLine + "\n"
-
- result += line + "\n"
-
- return result
-
- def patchRCSKeywords(self, file, pattern):
- # Attempt to zap the RCS keywords in a p4 controlled file matching the given pattern
- (handle, outFileName) = tempfile.mkstemp(dir='.')
- try:
- outFile = os.fdopen(handle, "w+")
- inFile = open(file, "r")
- regexp = re.compile(pattern, re.VERBOSE)
- for line in inFile.readlines():
- line = regexp.sub(r'$\1$', line)
- outFile.write(line)
- inFile.close()
- outFile.close()
- # Forcibly overwrite the original file
- os.unlink(file)
- shutil.move(outFileName, file)
- except:
- # cleanup our temporary file
- os.unlink(outFileName)
- print "Failed to strip RCS keywords in %s" % file
- raise
-
- print "Patched up RCS keywords in %s" % file
-
- def p4UserForCommit(self,id):
- # Return the tuple (perforce user,git email) for a given git commit id
- self.getUserMapFromPerforceServer()
- gitEmail = read_pipe("git log --max-count=1 --format='%%ae' %s" % id)
- gitEmail = gitEmail.strip()
- if not self.emails.has_key(gitEmail):
- return (None,gitEmail)
- else:
- return (self.emails[gitEmail],gitEmail)
-
- def checkValidP4Users(self,commits):
- # check if any git authors cannot be mapped to p4 users
- for id in commits:
- (user,email) = self.p4UserForCommit(id)
- if not user:
- msg = "Cannot find p4 user for email %s in commit %s." % (email, id)
- if gitConfig('git-p4.allowMissingP4Users').lower() == "true":
- print "%s" % msg
- else:
- die("Error: %s\nSet git-p4.allowMissingP4Users to true to allow this." % msg)
-
- def lastP4Changelist(self):
- # Get back the last changelist number submitted in this client spec. This
- # then gets used to patch up the username in the change. If the same
- # client spec is being used by multiple processes then this might go
- # wrong.
- results = p4CmdList("client -o") # find the current client
- client = None
- for r in results:
- if r.has_key('Client'):
- client = r['Client']
- break
- if not client:
- die("could not get client spec")
- results = p4CmdList(["changes", "-c", client, "-m", "1"])
- for r in results:
- if r.has_key('change'):
- return r['change']
- die("Could not get changelist number for last submit - cannot patch up user details")
-
- def modifyChangelistUser(self, changelist, newUser):
- # fixup the user field of a changelist after it has been submitted.
- changes = p4CmdList("change -o %s" % changelist)
- if len(changes) != 1:
- die("Bad output from p4 change modifying %s to user %s" %
- (changelist, newUser))
-
- c = changes[0]
- if c['User'] == newUser: return # nothing to do
- c['User'] = newUser
- input = marshal.dumps(c)
-
- result = p4CmdList("change -f -i", stdin=input)
- for r in result:
- if r.has_key('code'):
- if r['code'] == 'error':
- die("Could not modify user field of changelist %s to %s:%s" % (changelist, newUser, r['data']))
- if r.has_key('data'):
- print("Updated user field for changelist %s to %s" % (changelist, newUser))
- return
- die("Could not modify user field of changelist %s to %s" % (changelist, newUser))
-
- def canChangeChangelists(self):
- # check to see if we have p4 admin or super-user permissions, either of
- # which are required to modify changelists.
- results = p4CmdList(["protects", self.depotPath])
- for r in results:
- if r.has_key('perm'):
- if r['perm'] == 'admin':
- return 1
- if r['perm'] == 'super':
- return 1
- return 0
-
- def prepareSubmitTemplate(self):
- # remove lines in the Files section that show changes to files outside the depot path we're committing into
- template = ""
- inFilesSection = False
- for line in p4_read_pipe_lines(['change', '-o']):
- if line.endswith("\r\n"):
- line = line[:-2] + "\n"
- if inFilesSection:
- if line.startswith("\t"):
- # path starts and ends with a tab
- path = line[1:]
- lastTab = path.rfind("\t")
- if lastTab != -1:
- path = path[:lastTab]
- if not p4PathStartsWith(path, self.depotPath):
- continue
- else:
- inFilesSection = False
- else:
- if line.startswith("Files:"):
- inFilesSection = True
-
- template += line
-
- return template
-
- def edit_template(self, template_file):
- """Invoke the editor to let the user change the submission
- message. Return true if okay to continue with the submit."""
-
- # if configured to skip the editing part, just submit
- if gitConfig("git-p4.skipSubmitEdit") == "true":
- return True
-
- # look at the modification time, to check later if the user saved
- # the file
- mtime = os.stat(template_file).st_mtime
-
- # invoke the editor
- if os.environ.has_key("P4EDITOR"):
- editor = os.environ.get("P4EDITOR")
- else:
- editor = read_pipe("git var GIT_EDITOR").strip()
- system(editor + " " + template_file)
-
- # If the file was not saved, prompt to see if this patch should
- # be skipped. But skip this verification step if configured so.
- if gitConfig("git-p4.skipSubmitEditCheck") == "true":
- return True
-
- # modification time updated means user saved the file
- if os.stat(template_file).st_mtime > mtime:
- return True
-
- while True:
- response = raw_input("Submit template unchanged. Submit anyway? [y]es, [n]o (skip this patch) ")
- if response == 'y':
- return True
- if response == 'n':
- return False
-
- def applyCommit(self, id):
- print "Applying %s" % (read_pipe("git log --max-count=1 --pretty=oneline %s" % id))
-
- (p4User, gitEmail) = self.p4UserForCommit(id)
-
- if not self.detectRenames:
- # If not explicitly set check the config variable
- self.detectRenames = gitConfig("git-p4.detectRenames")
-
- if self.detectRenames.lower() == "false" or self.detectRenames == "":
- diffOpts = ""
- elif self.detectRenames.lower() == "true":
- diffOpts = "-M"
- else:
- diffOpts = "-M%s" % self.detectRenames
-
- detectCopies = gitConfig("git-p4.detectCopies")
- if detectCopies.lower() == "true":
- diffOpts += " -C"
- elif detectCopies != "" and detectCopies.lower() != "false":
- diffOpts += " -C%s" % detectCopies
-
- if gitConfig("git-p4.detectCopiesHarder", "--bool") == "true":
- diffOpts += " --find-copies-harder"
-
- diff = read_pipe_lines("git diff-tree -r %s \"%s^\" \"%s\"" % (diffOpts, id, id))
- filesToAdd = set()
- filesToDelete = set()
- editedFiles = set()
- filesToChangeExecBit = {}
-
- for line in diff:
- diff = parseDiffTreeEntry(line)
- modifier = diff['status']
- path = diff['src']
- if modifier == "M":
- p4_edit(path)
- if isModeExecChanged(diff['src_mode'], diff['dst_mode']):
- filesToChangeExecBit[path] = diff['dst_mode']
- editedFiles.add(path)
- elif modifier == "A":
- filesToAdd.add(path)
- filesToChangeExecBit[path] = diff['dst_mode']
- if path in filesToDelete:
- filesToDelete.remove(path)
- elif modifier == "D":
- filesToDelete.add(path)
- if path in filesToAdd:
- filesToAdd.remove(path)
- elif modifier == "C":
- src, dest = diff['src'], diff['dst']
- p4_integrate(src, dest)
- if diff['src_sha1'] != diff['dst_sha1']:
- p4_edit(dest)
- if isModeExecChanged(diff['src_mode'], diff['dst_mode']):
- p4_edit(dest)
- filesToChangeExecBit[dest] = diff['dst_mode']
- os.unlink(dest)
- editedFiles.add(dest)
- elif modifier == "R":
- src, dest = diff['src'], diff['dst']
- p4_integrate(src, dest)
- if diff['src_sha1'] != diff['dst_sha1']:
- p4_edit(dest)
- if isModeExecChanged(diff['src_mode'], diff['dst_mode']):
- p4_edit(dest)
- filesToChangeExecBit[dest] = diff['dst_mode']
- os.unlink(dest)
- editedFiles.add(dest)
- filesToDelete.add(src)
- else:
- die("unknown modifier %s for %s" % (modifier, path))
-
- diffcmd = "git format-patch -k --stdout \"%s^\"..\"%s\"" % (id, id)
- patchcmd = diffcmd + " | git apply "
- tryPatchCmd = patchcmd + "--check -"
- applyPatchCmd = patchcmd + "--check --apply -"
- patch_succeeded = True
-
- if os.system(tryPatchCmd) != 0:
- fixed_rcs_keywords = False
- patch_succeeded = False
- print "Unfortunately applying the change failed!"
-
- # Patch failed, maybe it's just RCS keyword woes. Look through
- # the patch to see if that's possible.
- if gitConfig("git-p4.attemptRCSCleanup","--bool") == "true":
- file = None
- pattern = None
- kwfiles = {}
- for file in editedFiles | filesToDelete:
- # did this file's delta contain RCS keywords?
- pattern = p4_keywords_regexp_for_file(file)
-
- if pattern:
- # this file is a possibility...look for RCS keywords.
- regexp = re.compile(pattern, re.VERBOSE)
- for line in read_pipe_lines(["git", "diff", "%s^..%s" % (id, id), file]):
- if regexp.search(line):
- if verbose:
- print "got keyword match on %s in %s in %s" % (pattern, line, file)
- kwfiles[file] = pattern
- break
-
- for file in kwfiles:
- if verbose:
- print "zapping %s with %s" % (line,pattern)
- self.patchRCSKeywords(file, kwfiles[file])
- fixed_rcs_keywords = True
-
- if fixed_rcs_keywords:
- print "Retrying the patch with RCS keywords cleaned up"
- if os.system(tryPatchCmd) == 0:
- patch_succeeded = True
-
- if not patch_succeeded:
- print "What do you want to do?"
- response = "x"
- while response != "s" and response != "a" and response != "w":
- response = raw_input("[s]kip this patch / [a]pply the patch forcibly "
- "and with .rej files / [w]rite the patch to a file (patch.txt) ")
- if response == "s":
- print "Skipping! Good luck with the next patches..."
- for f in editedFiles:
- p4_revert(f)
- for f in filesToAdd:
- os.remove(f)
- return
- elif response == "a":
- os.system(applyPatchCmd)
- if len(filesToAdd) > 0:
- print "You may also want to call p4 add on the following files:"
- print " ".join(filesToAdd)
- if len(filesToDelete):
- print "The following files should be scheduled for deletion with p4 delete:"
- print " ".join(filesToDelete)
- die("Please resolve and submit the conflict manually and "
- + "continue afterwards with git-p4 submit --continue")
- elif response == "w":
- system(diffcmd + " > patch.txt")
- print "Patch saved to patch.txt in %s !" % self.clientPath
- die("Please resolve and submit the conflict manually and "
- "continue afterwards with git-p4 submit --continue")
-
- system(applyPatchCmd)
-
- for f in filesToAdd:
- p4_add(f)
- for f in filesToDelete:
- p4_revert(f)
- p4_delete(f)
-
- # Set/clear executable bits
- for f in filesToChangeExecBit.keys():
- mode = filesToChangeExecBit[f]
- setP4ExecBit(f, mode)
-
- logMessage = extractLogMessageFromGitCommit(id)
- logMessage = logMessage.strip()
-
- template = self.prepareSubmitTemplate()
-
- if self.interactive:
- submitTemplate = self.prepareLogMessage(template, logMessage)
-
- if self.preserveUser:
- submitTemplate = submitTemplate + ("\n######## Actual user %s, modified after commit\n" % p4User)
-
- if os.environ.has_key("P4DIFF"):
- del(os.environ["P4DIFF"])
- diff = ""
- for editedFile in editedFiles:
- diff += p4_read_pipe(['diff', '-du', editedFile])
-
- newdiff = ""
- for newFile in filesToAdd:
- newdiff += "==== new file ====\n"
- newdiff += "--- /dev/null\n"
- newdiff += "+++ %s\n" % newFile
- f = open(newFile, "r")
- for line in f.readlines():
- newdiff += "+" + line
- f.close()
-
- if self.checkAuthorship and not self.p4UserIsMe(p4User):
- submitTemplate += "######## git author %s does not match your p4 account.\n" % gitEmail
- submitTemplate += "######## Use git-p4 option --preserve-user to modify authorship\n"
- submitTemplate += "######## Use git-p4 config git-p4.skipUserNameCheck hides this message.\n"
-
- separatorLine = "######## everything below this line is just the diff #######\n"
-
- (handle, fileName) = tempfile.mkstemp()
- tmpFile = os.fdopen(handle, "w+")
- if self.isWindows:
- submitTemplate = submitTemplate.replace("\n", "\r\n")
- separatorLine = separatorLine.replace("\n", "\r\n")
- newdiff = newdiff.replace("\n", "\r\n")
- tmpFile.write(submitTemplate + separatorLine + diff + newdiff)
- tmpFile.close()
-
- if self.edit_template(fileName):
- # read the edited message and submit
- tmpFile = open(fileName, "rb")
- message = tmpFile.read()
- tmpFile.close()
- submitTemplate = message[:message.index(separatorLine)]
- if self.isWindows:
- submitTemplate = submitTemplate.replace("\r\n", "\n")
- p4_write_pipe(['submit', '-i'], submitTemplate)
-
- if self.preserveUser:
- if p4User:
- # Get last changelist number. Cannot easily get it from
- # the submit command output as the output is
- # unmarshalled.
- changelist = self.lastP4Changelist()
- self.modifyChangelistUser(changelist, p4User)
- else:
- # skip this patch
- print "Submission cancelled, undoing p4 changes."
- for f in editedFiles:
- p4_revert(f)
- for f in filesToAdd:
- p4_revert(f)
- os.remove(f)
-
- os.remove(fileName)
- else:
- fileName = "submit.txt"
- file = open(fileName, "w+")
- file.write(self.prepareLogMessage(template, logMessage))
- file.close()
- print ("Perforce submit template written as %s. "
- + "Please review/edit and then use p4 submit -i < %s to submit directly!"
- % (fileName, fileName))
-
- def run(self, args):
- if len(args) == 0:
- self.master = currentGitBranch()
- if len(self.master) == 0 or not gitBranchExists("refs/heads/%s" % self.master):
- die("Detecting current git branch failed!")
- elif len(args) == 1:
- self.master = args[0]
- if not branchExists(self.master):
- die("Branch %s does not exist" % self.master)
- else:
- return False
-
- allowSubmit = gitConfig("git-p4.allowSubmit")
- if len(allowSubmit) > 0 and not self.master in allowSubmit.split(","):
- die("%s is not in git-p4.allowSubmit" % self.master)
-
- [upstream, settings] = findUpstreamBranchPoint()
- self.depotPath = settings['depot-paths'][0]
- if len(self.origin) == 0:
- self.origin = upstream
-
- if self.preserveUser:
- if not self.canChangeChangelists():
- die("Cannot preserve user names without p4 super-user or admin permissions")
-
- if self.verbose:
- print "Origin branch is " + self.origin
-
- if len(self.depotPath) == 0:
- print "Internal error: cannot locate perforce depot path from existing branches"
- sys.exit(128)
-
- self.useClientSpec = False
- if gitConfig("git-p4.useclientspec", "--bool") == "true":
- self.useClientSpec = True
- if self.useClientSpec:
- self.clientSpecDirs = getClientSpec()
-
- if self.useClientSpec:
- # all files are relative to the client spec
- self.clientPath = getClientRoot()
- else:
- self.clientPath = p4Where(self.depotPath)
-
- if self.clientPath == "":
- die("Error: Cannot locate perforce checkout of %s in client view" % self.depotPath)
-
- print "Perforce checkout for depot path %s located at %s" % (self.depotPath, self.clientPath)
- self.oldWorkingDirectory = os.getcwd()
-
- # ensure the clientPath exists
- if not os.path.exists(self.clientPath):
- os.makedirs(self.clientPath)
-
- chdir(self.clientPath)
- print "Synchronizing p4 checkout..."
- p4_sync("...")
- self.check()
-
- commits = []
- for line in read_pipe_lines("git rev-list --no-merges %s..%s" % (self.origin, self.master)):
- commits.append(line.strip())
- commits.reverse()
-
- if self.preserveUser or (gitConfig("git-p4.skipUserNameCheck") == "true"):
- self.checkAuthorship = False
- else:
- self.checkAuthorship = True
-
- if self.preserveUser:
- self.checkValidP4Users(commits)
-
- while len(commits) > 0:
- commit = commits[0]
- commits = commits[1:]
- self.applyCommit(commit)
- if not self.interactive:
- break
-
- if len(commits) == 0:
- print "All changes applied!"
- chdir(self.oldWorkingDirectory)
-
- sync = P4Sync()
- sync.run([])
-
- rebase = P4Rebase()
- rebase.rebase()
-
- return True
-
-class View(object):
- """Represent a p4 view ("p4 help views"), and map files in a
- repo according to the view."""
-
- class Path(object):
- """A depot or client path, possibly containing wildcards.
- The only one supported is ... at the end, currently.
- Initialize with the full path, with //depot or //client."""
-
- def __init__(self, path, is_depot):
- self.path = path
- self.is_depot = is_depot
- self.find_wildcards()
- # remember the prefix bit, useful for relative mappings
- m = re.match("(//[^/]+/)", self.path)
- if not m:
- die("Path %s does not start with //prefix/" % self.path)
- prefix = m.group(1)
- if not self.is_depot:
- # strip //client/ on client paths
- self.path = self.path[len(prefix):]
-
- def find_wildcards(self):
- """Make sure wildcards are valid, and set up internal
- variables."""
-
- self.ends_triple_dot = False
- # There are three wildcards allowed in p4 views
- # (see "p4 help views"). This code knows how to
- # handle "..." (only at the end), but cannot deal with
- # "%%n" or "*". Only check the depot_side, as p4 should
- # validate that the client_side matches too.
- if re.search(r'%%[1-9]', self.path):
- die("Can't handle %%n wildcards in view: %s" % self.path)
- if self.path.find("*") >= 0:
- die("Can't handle * wildcards in view: %s" % self.path)
- triple_dot_index = self.path.find("...")
- if triple_dot_index >= 0:
- if triple_dot_index != len(self.path) - 3:
- die("Can handle only single ... wildcard, at end: %s" %
- self.path)
- self.ends_triple_dot = True
-
- def ensure_compatible(self, other_path):
- """Make sure the wildcards agree."""
- if self.ends_triple_dot != other_path.ends_triple_dot:
- die("Both paths must end with ... if either does;\n" +
- "paths: %s %s" % (self.path, other_path.path))
-
- def match_wildcards(self, test_path):
- """See if this test_path matches us, and fill in the value
- of the wildcards if so. Returns a tuple of
- (True|False, wildcards[]). For now, only the ... at end
- is supported, so at most one wildcard."""
- if self.ends_triple_dot:
- dotless = self.path[:-3]
- if test_path.startswith(dotless):
- wildcard = test_path[len(dotless):]
- return (True, [ wildcard ])
- else:
- if test_path == self.path:
- return (True, [])
- return (False, [])
-
- def match(self, test_path):
- """Just return if it matches; don't bother with the wildcards."""
- b, _ = self.match_wildcards(test_path)
- return b
-
- def fill_in_wildcards(self, wildcards):
- """Return the relative path, with the wildcards filled in
- if there are any."""
- if self.ends_triple_dot:
- return self.path[:-3] + wildcards[0]
- else:
- return self.path
-
- class Mapping(object):
- def __init__(self, depot_side, client_side, overlay, exclude):
- # depot_side is without the trailing /... if it had one
- self.depot_side = View.Path(depot_side, is_depot=True)
- self.client_side = View.Path(client_side, is_depot=False)
- self.overlay = overlay # started with "+"
- self.exclude = exclude # started with "-"
- assert not (self.overlay and self.exclude)
- self.depot_side.ensure_compatible(self.client_side)
-
- def __str__(self):
- c = " "
- if self.overlay:
- c = "+"
- if self.exclude:
- c = "-"
- return "View.Mapping: %s%s -> %s" % \
- (c, self.depot_side.path, self.client_side.path)
-
- def map_depot_to_client(self, depot_path):
- """Calculate the client path if using this mapping on the
- given depot path; does not consider the effect of other
- mappings in a view. Even excluded mappings are returned."""
- matches, wildcards = self.depot_side.match_wildcards(depot_path)
- if not matches:
- return ""
- client_path = self.client_side.fill_in_wildcards(wildcards)
- return client_path
-
- #
- # View methods
- #
- def __init__(self):
- self.mappings = []
-
- def append(self, view_line):
- """Parse a view line, splitting it into depot and client
- sides. Append to self.mappings, preserving order."""
-
- # Split the view line into exactly two words. P4 enforces
- # structure on these lines that simplifies this quite a bit.
- #
- # Either or both words may be double-quoted.
- # Single quotes do not matter.
- # Double-quote marks cannot occur inside the words.
- # A + or - prefix is also inside the quotes.
- # There are no quotes unless they contain a space.
- # The line is already white-space stripped.
- # The two words are separated by a single space.
- #
- if view_line[0] == '"':
- # First word is double quoted. Find its end.
- close_quote_index = view_line.find('"', 1)
- if close_quote_index <= 0:
- die("No first-word closing quote found: %s" % view_line)
- depot_side = view_line[1:close_quote_index]
- # skip closing quote and space
- rhs_index = close_quote_index + 1 + 1
- else:
- space_index = view_line.find(" ")
- if space_index <= 0:
- die("No word-splitting space found: %s" % view_line)
- depot_side = view_line[0:space_index]
- rhs_index = space_index + 1
-
- if view_line[rhs_index] == '"':
- # Second word is double quoted. Make sure there is a
- # double quote at the end too.
- if not view_line.endswith('"'):
- die("View line with rhs quote should end with one: %s" %
- view_line)
- # skip the quotes
- client_side = view_line[rhs_index+1:-1]
- else:
- client_side = view_line[rhs_index:]
-
- # prefix + means overlay on previous mapping
- overlay = False
- if depot_side.startswith("+"):
- overlay = True
- depot_side = depot_side[1:]
-
- # prefix - means exclude this path
- exclude = False
- if depot_side.startswith("-"):
- exclude = True
- depot_side = depot_side[1:]
-
- m = View.Mapping(depot_side, client_side, overlay, exclude)
- self.mappings.append(m)
-
- def map_in_client(self, depot_path):
- """Return the relative location in the client where this
- depot file should live. Returns "" if the file should
- not be mapped in the client."""
-
- paths_filled = []
- client_path = ""
-
- # look at later entries first
- for m in self.mappings[::-1]:
-
- # see where will this path end up in the client
- p = m.map_depot_to_client(depot_path)
-
- if p == "":
- # Depot path does not belong in client. Must remember
- # this, as previous items should not cause files to
- # exist in this path either. Remember that the list is
- # being walked from the end, which has higher precedence.
- # Overlap mappings do not exclude previous mappings.
- if not m.overlay:
- paths_filled.append(m.client_side)
-
- else:
- # This mapping matched; no need to search any further.
- # But, the mapping could be rejected if the client path
- # has already been claimed by an earlier mapping (i.e.
- # one later in the list, which we are walking backwards).
- already_mapped_in_client = False
- for f in paths_filled:
- # this is View.Path.match
- if f.match(p):
- already_mapped_in_client = True
- break
- if not already_mapped_in_client:
- # Include this file, unless it is from a line that
- # explicitly said to exclude it.
- if not m.exclude:
- client_path = p
-
- # a match, even if rejected, always stops the search
- break
-
- return client_path
-
-class P4Sync(Command, P4UserMap):
- delete_actions = ( "delete", "move/delete", "purge" )
-
- def __init__(self):
- Command.__init__(self)
- P4UserMap.__init__(self)
- self.options = [
- optparse.make_option("--branch", dest="branch"),
- optparse.make_option("--detect-branches", dest="detectBranches", action="store_true"),
- optparse.make_option("--changesfile", dest="changesFile"),
- optparse.make_option("--silent", dest="silent", action="store_true"),
- optparse.make_option("--detect-labels", dest="detectLabels", action="store_true"),
- optparse.make_option("--verbose", dest="verbose", action="store_true"),
- optparse.make_option("--import-local", dest="importIntoRemotes", action="store_false",
- help="Import into refs/heads/ , not refs/remotes"),
- optparse.make_option("--max-changes", dest="maxChanges"),
- optparse.make_option("--keep-path", dest="keepRepoPath", action='store_true',
- help="Keep entire BRANCH/DIR/SUBDIR prefix during import"),
- optparse.make_option("--use-client-spec", dest="useClientSpec", action='store_true',
- help="Only sync files that are included in the Perforce Client Spec")
- ]
- self.description = """Imports from Perforce into a git repository.\n
- example:
- //depot/my/project/ -- to import the current head
- //depot/my/project/@all -- to import everything
- //depot/my/project/@1,6 -- to import only from revision 1 to 6
-
- (a ... is not needed in the path p4 specification, it's added implicitly)"""
-
- self.usage += " //depot/path[@revRange]"
- self.silent = False
- self.createdBranches = set()
- self.committedChanges = set()
- self.branch = ""
- self.detectBranches = False
- self.detectLabels = False
- self.changesFile = ""
- self.syncWithOrigin = True
- self.verbose = False
- self.importIntoRemotes = True
- self.maxChanges = ""
- self.isWindows = (platform.system() == "Windows")
- self.keepRepoPath = False
- self.depotPaths = None
- self.p4BranchesInGit = []
- self.cloneExclude = []
- self.useClientSpec = False
- self.useClientSpec_from_options = False
- self.clientSpecDirs = None
- self.tempBranches = []
- self.tempBranchLocation = "git-p4-tmp"
-
- if gitConfig("git-p4.syncFromOrigin") == "false":
- self.syncWithOrigin = False
-
- #
- # P4 wildcards are not allowed in filenames. P4 complains
- # if you simply add them, but you can force it with "-f", in
- # which case it translates them into %xx encoding internally.
- # Search for and fix just these four characters. Do % last so
- # that fixing it does not inadvertently create new %-escapes.
- #
- def wildcard_decode(self, path):
- # Cannot have * in a filename in windows; untested as to
- # what p4 would do in such a case.
- if not self.isWindows:
- path = path.replace("%2A", "*")
- path = path.replace("%23", "#") \
- .replace("%40", "@") \
- .replace("%25", "%")
- return path
-
- # Force a checkpoint in fast-import and wait for it to finish
- def checkpoint(self):
- self.gitStream.write("checkpoint\n\n")
- self.gitStream.write("progress checkpoint\n\n")
- out = self.gitOutput.readline()
- if self.verbose:
- print "checkpoint finished: " + out
-
- def extractFilesFromCommit(self, commit):
- self.cloneExclude = [re.sub(r"\.\.\.$", "", path)
- for path in self.cloneExclude]
- files = []
- fnum = 0
- while commit.has_key("depotFile%s" % fnum):
- path = commit["depotFile%s" % fnum]
-
- if [p for p in self.cloneExclude
- if p4PathStartsWith(path, p)]:
- found = False
- else:
- found = [p for p in self.depotPaths
- if p4PathStartsWith(path, p)]
- if not found:
- fnum = fnum + 1
- continue
-
- file = {}
- file["path"] = path
- file["rev"] = commit["rev%s" % fnum]
- file["action"] = commit["action%s" % fnum]
- file["type"] = commit["type%s" % fnum]
- files.append(file)
- fnum = fnum + 1
- return files
-
- def stripRepoPath(self, path, prefixes):
- if self.useClientSpec:
- return self.clientSpecDirs.map_in_client(path)
-
- if self.keepRepoPath:
- prefixes = [re.sub("^(//[^/]+/).*", r'\1', prefixes[0])]
-
- for p in prefixes:
- if p4PathStartsWith(path, p):
- path = path[len(p):]
-
- return path
-
- def splitFilesIntoBranches(self, commit):
- branches = {}
- fnum = 0
- while commit.has_key("depotFile%s" % fnum):
- path = commit["depotFile%s" % fnum]
- found = [p for p in self.depotPaths
- if p4PathStartsWith(path, p)]
- if not found:
- fnum = fnum + 1
- continue
-
- file = {}
- file["path"] = path
- file["rev"] = commit["rev%s" % fnum]
- file["action"] = commit["action%s" % fnum]
- file["type"] = commit["type%s" % fnum]
- fnum = fnum + 1
-
- relPath = self.stripRepoPath(path, self.depotPaths)
-
- for branch in self.knownBranches.keys():
-
- # add a trailing slash so that a commit into qt/4.2foo doesn't end up in qt/4.2
- if relPath.startswith(branch + "/"):
- if branch not in branches:
- branches[branch] = []
- branches[branch].append(file)
- break
-
- return branches
-
- # output one file from the P4 stream
- # - helper for streamP4Files
-
- def streamOneP4File(self, file, contents):
- relPath = self.stripRepoPath(file['depotFile'], self.branchPrefixes)
- relPath = self.wildcard_decode(relPath)
- if verbose:
- sys.stderr.write("%s\n" % relPath)
-
- (type_base, type_mods) = split_p4_type(file["type"])
-
- git_mode = "100644"
- if "x" in type_mods:
- git_mode = "100755"
- if type_base == "symlink":
- git_mode = "120000"
- # p4 print on a symlink contains "target\n"; remove the newline
- data = ''.join(contents)
- contents = [data[:-1]]
-
- if type_base == "utf16":
- # p4 delivers different text in the python output to -G
- # than it does when using "print -o", or normal p4 client
- # operations. utf16 is converted to ascii or utf8, perhaps.
- # But ascii text saved as -t utf16 is completely mangled.
- # Invoke print -o to get the real contents.
- text = p4_read_pipe(['print', '-q', '-o', '-', file['depotFile']])
- contents = [ text ]
-
- if type_base == "apple":
- # Apple filetype files will be streamed as a concatenation of
- # its appledouble header and the contents. This is useless
- # on both macs and non-macs. If using "print -q -o xx", it
- # will create "xx" with the data, and "%xx" with the header.
- # This is also not very useful.
- #
- # Ideally, someday, this script can learn how to generate
- # appledouble files directly and import those to git, but
- # non-mac machines can never find a use for apple filetype.
- print "\nIgnoring apple filetype file %s" % file['depotFile']
- return
-
- # Perhaps windows wants unicode, utf16 newlines translated too;
- # but this is not doing it.
- if self.isWindows and type_base == "text":
- mangled = []
- for data in contents:
- data = data.replace("\r\n", "\n")
- mangled.append(data)
- contents = mangled
-
- # Note that we do not try to de-mangle keywords on utf16 files,
- # even though in theory somebody may want that.
- pattern = p4_keywords_regexp_for_type(type_base, type_mods)
- if pattern:
- regexp = re.compile(pattern, re.VERBOSE)
- text = ''.join(contents)
- text = regexp.sub(r'$\1$', text)
- contents = [ text ]
-
- self.gitStream.write("M %s inline %s\n" % (git_mode, relPath))
-
- # total length...
- length = 0
- for d in contents:
- length = length + len(d)
-
- self.gitStream.write("data %d\n" % length)
- for d in contents:
- self.gitStream.write(d)
- self.gitStream.write("\n")
-
- def streamOneP4Deletion(self, file):
- relPath = self.stripRepoPath(file['path'], self.branchPrefixes)
- if verbose:
- sys.stderr.write("delete %s\n" % relPath)
- self.gitStream.write("D %s\n" % relPath)
-
- # handle another chunk of streaming data
- def streamP4FilesCb(self, marshalled):
-
- if marshalled.has_key('depotFile') and self.stream_have_file_info:
- # start of a new file - output the old one first
- self.streamOneP4File(self.stream_file, self.stream_contents)
- self.stream_file = {}
- self.stream_contents = []
- self.stream_have_file_info = False
-
- # pick up the new file information... for the
- # 'data' field we need to append to our array
- for k in marshalled.keys():
- if k == 'data':
- self.stream_contents.append(marshalled['data'])
- else:
- self.stream_file[k] = marshalled[k]
-
- self.stream_have_file_info = True
-
- # Stream directly from "p4 files" into "git fast-import"
- def streamP4Files(self, files):
- filesForCommit = []
- filesToRead = []
- filesToDelete = []
-
- for f in files:
- # if using a client spec, only add the files that have
- # a path in the client
- if self.clientSpecDirs:
- if self.clientSpecDirs.map_in_client(f['path']) == "":
- continue
-
- filesForCommit.append(f)
- if f['action'] in self.delete_actions:
- filesToDelete.append(f)
- else:
- filesToRead.append(f)
-
- # deleted files...
- for f in filesToDelete:
- self.streamOneP4Deletion(f)
-
- if len(filesToRead) > 0:
- self.stream_file = {}
- self.stream_contents = []
- self.stream_have_file_info = False
-
- # curry self argument
- def streamP4FilesCbSelf(entry):
- self.streamP4FilesCb(entry)
-
- fileArgs = ['%s#%s' % (f['path'], f['rev']) for f in filesToRead]
-
- p4CmdList(["-x", "-", "print"],
- stdin=fileArgs,
- cb=streamP4FilesCbSelf)
-
- # do the last chunk
- if self.stream_file.has_key('depotFile'):
- self.streamOneP4File(self.stream_file, self.stream_contents)
-
- def make_email(self, userid):
- if userid in self.users:
- return self.users[userid]
- else:
- return "%s <a@b>" % userid
-
- def commit(self, details, files, branch, branchPrefixes, parent = ""):
- epoch = details["time"]
- author = details["user"]
- self.branchPrefixes = branchPrefixes
-
- if self.verbose:
- print "commit into %s" % branch
-
- # start with reading files; if that fails, we should not
- # create a commit.
- new_files = []
- for f in files:
- if [p for p in branchPrefixes if p4PathStartsWith(f['path'], p)]:
- new_files.append (f)
- else:
- sys.stderr.write("Ignoring file outside of prefix: %s\n" % f['path'])
-
- self.gitStream.write("commit %s\n" % branch)
-# gitStream.write("mark :%s\n" % details["change"])
- self.committedChanges.add(int(details["change"]))
- committer = ""
- if author not in self.users:
- self.getUserMapFromPerforceServer()
- committer = "%s %s %s" % (self.make_email(author), epoch, self.tz)
-
- self.gitStream.write("committer %s\n" % committer)
-
- self.gitStream.write("data <<EOT\n")
- self.gitStream.write(details["desc"])
- self.gitStream.write("\n[git-p4: depot-paths = \"%s\": change = %s"
- % (','.join (branchPrefixes), details["change"]))
- if len(details['options']) > 0:
- self.gitStream.write(": options = %s" % details['options'])
- self.gitStream.write("]\nEOT\n\n")
-
- if len(parent) > 0:
- if self.verbose:
- print "parent %s" % parent
- self.gitStream.write("from %s\n" % parent)
-
- self.streamP4Files(new_files)
- self.gitStream.write("\n")
-
- change = int(details["change"])
-
- if self.labels.has_key(change):
- label = self.labels[change]
- labelDetails = label[0]
- labelRevisions = label[1]
- if self.verbose:
- print "Change %s is labelled %s" % (change, labelDetails)
-
- files = p4CmdList(["files"] + ["%s...@%s" % (p, change)
- for p in branchPrefixes])
-
- if len(files) == len(labelRevisions):
-
- cleanedFiles = {}
- for info in files:
- if info["action"] in self.delete_actions:
- continue
- cleanedFiles[info["depotFile"]] = info["rev"]
-
- if cleanedFiles == labelRevisions:
- self.gitStream.write("tag tag_%s\n" % labelDetails["label"])
- self.gitStream.write("from %s\n" % branch)
-
- owner = labelDetails["Owner"]
-
- # Try to use the owner of the p4 label, or failing that,
- # the current p4 user id.
- if owner:
- email = self.make_email(owner)
- else:
- email = self.make_email(self.p4UserId())
- tagger = "%s %s %s" % (email, epoch, self.tz)
-
- self.gitStream.write("tagger %s\n" % tagger)
-
- description = labelDetails["Description"]
- self.gitStream.write("data %d\n" % len(description))
- self.gitStream.write(description)
- self.gitStream.write("\n")
-
- else:
- if not self.silent:
- print ("Tag %s does not match with change %s: files do not match."
- % (labelDetails["label"], change))
-
- else:
- if not self.silent:
- print ("Tag %s does not match with change %s: file count is different."
- % (labelDetails["label"], change))
-
- def getLabels(self):
- self.labels = {}
-
- l = p4CmdList(["labels"] + ["%s..." % p for p in self.depotPaths])
- if len(l) > 0 and not self.silent:
- print "Finding files belonging to labels in %s" % `self.depotPaths`
-
- for output in l:
- label = output["label"]
- revisions = {}
- newestChange = 0
- if self.verbose:
- print "Querying files for label %s" % label
- for file in p4CmdList(["files"] +
- ["%s...@%s" % (p, label)
- for p in self.depotPaths]):
- revisions[file["depotFile"]] = file["rev"]
- change = int(file["change"])
- if change > newestChange:
- newestChange = change
-
- self.labels[newestChange] = [output, revisions]
-
- if self.verbose:
- print "Label changes: %s" % self.labels.keys()
-
- def guessProjectName(self):
- for p in self.depotPaths:
- if p.endswith("/"):
- p = p[:-1]
- p = p[p.strip().rfind("/") + 1:]
- if not p.endswith("/"):
- p += "/"
- return p
-
- def getBranchMapping(self):
- lostAndFoundBranches = set()
-
- user = gitConfig("git-p4.branchUser")
- if len(user) > 0:
- command = "branches -u %s" % user
- else:
- command = "branches"
-
- for info in p4CmdList(command):
- details = p4Cmd(["branch", "-o", info["branch"]])
- viewIdx = 0
- while details.has_key("View%s" % viewIdx):
- paths = details["View%s" % viewIdx].split(" ")
- viewIdx = viewIdx + 1
- # require standard //depot/foo/... //depot/bar/... mapping
- if len(paths) != 2 or not paths[0].endswith("/...") or not paths[1].endswith("/..."):
- continue
- source = paths[0]
- destination = paths[1]
- ## HACK
- if p4PathStartsWith(source, self.depotPaths[0]) and p4PathStartsWith(destination, self.depotPaths[0]):
- source = source[len(self.depotPaths[0]):-4]
- destination = destination[len(self.depotPaths[0]):-4]
-
- if destination in self.knownBranches:
- if not self.silent:
- print "p4 branch %s defines a mapping from %s to %s" % (info["branch"], source, destination)
- print "but there exists another mapping from %s to %s already!" % (self.knownBranches[destination], destination)
- continue
-
- self.knownBranches[destination] = source
-
- lostAndFoundBranches.discard(destination)
-
- if source not in self.knownBranches:
- lostAndFoundBranches.add(source)
-
- # Perforce does not strictly require branches to be defined, so we also
- # check git config for a branch list.
- #
- # Example of branch definition in git config file:
- # [git-p4]
- # branchList=main:branchA
- # branchList=main:branchB
- # branchList=branchA:branchC
- configBranches = gitConfigList("git-p4.branchList")
- for branch in configBranches:
- if branch:
- (source, destination) = branch.split(":")
- self.knownBranches[destination] = source
-
- lostAndFoundBranches.discard(destination)
-
- if source not in self.knownBranches:
- lostAndFoundBranches.add(source)
-
-
- for branch in lostAndFoundBranches:
- self.knownBranches[branch] = branch
-
- def getBranchMappingFromGitBranches(self):
- branches = p4BranchesInGit(self.importIntoRemotes)
- for branch in branches.keys():
- if branch == "master":
- branch = "main"
- else:
- branch = branch[len(self.projectName):]
- self.knownBranches[branch] = branch
-
- def listExistingP4GitBranches(self):
- # branches holds mapping from name to commit
- branches = p4BranchesInGit(self.importIntoRemotes)
- self.p4BranchesInGit = branches.keys()
- for branch in branches.keys():
- self.initialParents[self.refPrefix + branch] = branches[branch]
-
- def updateOptionDict(self, d):
- option_keys = {}
- if self.keepRepoPath:
- option_keys['keepRepoPath'] = 1
-
- d["options"] = ' '.join(sorted(option_keys.keys()))
-
- def readOptions(self, d):
- self.keepRepoPath = (d.has_key('options')
- and ('keepRepoPath' in d['options']))
-
- def gitRefForBranch(self, branch):
- if branch == "main":
- return self.refPrefix + "master"
-
- if len(branch) <= 0:
- return branch
-
- return self.refPrefix + self.projectName + branch
-
- def gitCommitByP4Change(self, ref, change):
- if self.verbose:
- print "looking in ref " + ref + " for change %s using bisect..." % change
-
- earliestCommit = ""
- latestCommit = parseRevision(ref)
-
- while True:
- if self.verbose:
- print "trying: earliest %s latest %s" % (earliestCommit, latestCommit)
- next = read_pipe("git rev-list --bisect %s %s" % (latestCommit, earliestCommit)).strip()
- if len(next) == 0:
- if self.verbose:
- print "argh"
- return ""
- log = extractLogMessageFromGitCommit(next)
- settings = extractSettingsGitLog(log)
- currentChange = int(settings['change'])
- if self.verbose:
- print "current change %s" % currentChange
-
- if currentChange == change:
- if self.verbose:
- print "found %s" % next
- return next
-
- if currentChange < change:
- earliestCommit = "^%s" % next
- else:
- latestCommit = "%s" % next
-
- return ""
-
- def importNewBranch(self, branch, maxChange):
- # make fast-import flush all changes to disk and update the refs using the checkpoint
- # command so that we can try to find the branch parent in the git history
- self.gitStream.write("checkpoint\n\n");
- self.gitStream.flush();
- branchPrefix = self.depotPaths[0] + branch + "/"
- range = "@1,%s" % maxChange
- #print "prefix" + branchPrefix
- changes = p4ChangesForPaths([branchPrefix], range)
- if len(changes) <= 0:
- return False
- firstChange = changes[0]
- #print "first change in branch: %s" % firstChange
- sourceBranch = self.knownBranches[branch]
- sourceDepotPath = self.depotPaths[0] + sourceBranch
- sourceRef = self.gitRefForBranch(sourceBranch)
- #print "source " + sourceBranch
-
- branchParentChange = int(p4Cmd(["changes", "-m", "1", "%s...@1,%s" % (sourceDepotPath, firstChange)])["change"])
- #print "branch parent: %s" % branchParentChange
- gitParent = self.gitCommitByP4Change(sourceRef, branchParentChange)
- if len(gitParent) > 0:
- self.initialParents[self.gitRefForBranch(branch)] = gitParent
- #print "parent git commit: %s" % gitParent
-
- self.importChanges(changes)
- return True
-
- def searchParent(self, parent, branch, target):
- parentFound = False
- for blob in read_pipe_lines(["git", "rev-list", "--reverse", "--no-merges", parent]):
- blob = blob.strip()
- if len(read_pipe(["git", "diff-tree", blob, target])) == 0:
- parentFound = True
- if self.verbose:
- print "Found parent of %s in commit %s" % (branch, blob)
- break
- if parentFound:
- return blob
- else:
- return None
-
- def importChanges(self, changes):
- cnt = 1
- for change in changes:
- description = p4Cmd(["describe", str(change)])
- self.updateOptionDict(description)
-
- if not self.silent:
- sys.stdout.write("\rImporting revision %s (%s%%)" % (change, cnt * 100 / len(changes)))
- sys.stdout.flush()
- cnt = cnt + 1
-
- try:
- if self.detectBranches:
- branches = self.splitFilesIntoBranches(description)
- for branch in branches.keys():
- ## HACK --hwn
- branchPrefix = self.depotPaths[0] + branch + "/"
-
- parent = ""
-
- filesForCommit = branches[branch]
-
- if self.verbose:
- print "branch is %s" % branch
-
- self.updatedBranches.add(branch)
-
- if branch not in self.createdBranches:
- self.createdBranches.add(branch)
- parent = self.knownBranches[branch]
- if parent == branch:
- parent = ""
- else:
- fullBranch = self.projectName + branch
- if fullBranch not in self.p4BranchesInGit:
- if not self.silent:
- print("\n Importing new branch %s" % fullBranch);
- if self.importNewBranch(branch, change - 1):
- parent = ""
- self.p4BranchesInGit.append(fullBranch)
- if not self.silent:
- print("\n Resuming with change %s" % change);
-
- if self.verbose:
- print "parent determined through known branches: %s" % parent
-
- branch = self.gitRefForBranch(branch)
- parent = self.gitRefForBranch(parent)
-
- if self.verbose:
- print "looking for initial parent for %s; current parent is %s" % (branch, parent)
-
- if len(parent) == 0 and branch in self.initialParents:
- parent = self.initialParents[branch]
- del self.initialParents[branch]
-
- blob = None
- if len(parent) > 0:
- tempBranch = os.path.join(self.tempBranchLocation, "%d" % (change))
- if self.verbose:
- print "Creating temporary branch: " + tempBranch
- self.commit(description, filesForCommit, tempBranch, [branchPrefix])
- self.tempBranches.append(tempBranch)
- self.checkpoint()
- blob = self.searchParent(parent, branch, tempBranch)
- if blob:
- self.commit(description, filesForCommit, branch, [branchPrefix], blob)
- else:
- if self.verbose:
- print "Parent of %s not found. Committing into head of %s" % (branch, parent)
- self.commit(description, filesForCommit, branch, [branchPrefix], parent)
- else:
- files = self.extractFilesFromCommit(description)
- self.commit(description, files, self.branch, self.depotPaths,
- self.initialParent)
- self.initialParent = ""
- except IOError:
- print self.gitError.read()
- sys.exit(1)
-
- def importHeadRevision(self, revision):
- print "Doing initial import of %s from revision %s into %s" % (' '.join(self.depotPaths), revision, self.branch)
-
- details = {}
- details["user"] = "git perforce import user"
- details["desc"] = ("Initial import of %s from the state at revision %s\n"
- % (' '.join(self.depotPaths), revision))
- details["change"] = revision
- newestRevision = 0
-
- fileCnt = 0
- fileArgs = ["%s...%s" % (p,revision) for p in self.depotPaths]
-
- for info in p4CmdList(["files"] + fileArgs):
-
- if 'code' in info and info['code'] == 'error':
- sys.stderr.write("p4 returned an error: %s\n"
- % info['data'])
- if info['data'].find("must refer to client") >= 0:
- sys.stderr.write("This particular p4 error is misleading.\n")
- sys.stderr.write("Perhaps the depot path was misspelled.\n");
- sys.stderr.write("Depot path: %s\n" % " ".join(self.depotPaths))
- sys.exit(1)
- if 'p4ExitCode' in info:
- sys.stderr.write("p4 exitcode: %s\n" % info['p4ExitCode'])
- sys.exit(1)
-
-
- change = int(info["change"])
- if change > newestRevision:
- newestRevision = change
-
- if info["action"] in self.delete_actions:
- # don't increase the file cnt, otherwise details["depotFile123"] will have gaps!
- #fileCnt = fileCnt + 1
- continue
-
- for prop in ["depotFile", "rev", "action", "type" ]:
- details["%s%s" % (prop, fileCnt)] = info[prop]
-
- fileCnt = fileCnt + 1
-
- details["change"] = newestRevision
-
- # Use time from top-most change so that all git-p4 clones of
- # the same p4 repo have the same commit SHA1s.
- res = p4CmdList("describe -s %d" % newestRevision)
- newestTime = None
- for r in res:
- if r.has_key('time'):
- newestTime = int(r['time'])
- if newestTime is None:
- die("\"describe -s\" on newest change %d did not give a time")
- details["time"] = newestTime
-
- self.updateOptionDict(details)
- try:
- self.commit(details, self.extractFilesFromCommit(details), self.branch, self.depotPaths)
- except IOError:
- print "IO error with git fast-import. Is your git version recent enough?"
- print self.gitError.read()
-
-
- def run(self, args):
- self.depotPaths = []
- self.changeRange = ""
- self.initialParent = ""
- self.previousDepotPaths = []
-
- # map from branch depot path to parent branch
- self.knownBranches = {}
- self.initialParents = {}
- self.hasOrigin = originP4BranchesExist()
- if not self.syncWithOrigin:
- self.hasOrigin = False
-
- if self.importIntoRemotes:
- self.refPrefix = "refs/remotes/p4/"
- else:
- self.refPrefix = "refs/heads/p4/"
-
- if self.syncWithOrigin and self.hasOrigin:
- if not self.silent:
- print "Syncing with origin first by calling git fetch origin"
- system("git fetch origin")
-
- if len(self.branch) == 0:
- self.branch = self.refPrefix + "master"
- if gitBranchExists("refs/heads/p4") and self.importIntoRemotes:
- system("git update-ref %s refs/heads/p4" % self.branch)
- system("git branch -D p4");
- # create it /after/ importing, when master exists
- if not gitBranchExists(self.refPrefix + "HEAD") and self.importIntoRemotes and gitBranchExists(self.branch):
- system("git symbolic-ref %sHEAD %s" % (self.refPrefix, self.branch))
-
- # accept either the command-line option, or the configuration variable
- if self.useClientSpec:
- # will use this after clone to set the variable
- self.useClientSpec_from_options = True
- else:
- if gitConfig("git-p4.useclientspec", "--bool") == "true":
- self.useClientSpec = True
- if self.useClientSpec:
- self.clientSpecDirs = getClientSpec()
-
- # TODO: should always look at previous commits,
- # merge with previous imports, if possible.
- if args == []:
- if self.hasOrigin:
- createOrUpdateBranchesFromOrigin(self.refPrefix, self.silent)
- self.listExistingP4GitBranches()
-
- if len(self.p4BranchesInGit) > 1:
- if not self.silent:
- print "Importing from/into multiple branches"
- self.detectBranches = True
-
- if self.verbose:
- print "branches: %s" % self.p4BranchesInGit
-
- p4Change = 0
- for branch in self.p4BranchesInGit:
- logMsg = extractLogMessageFromGitCommit(self.refPrefix + branch)
-
- settings = extractSettingsGitLog(logMsg)
-
- self.readOptions(settings)
- if (settings.has_key('depot-paths')
- and settings.has_key ('change')):
- change = int(settings['change']) + 1
- p4Change = max(p4Change, change)
-
- depotPaths = sorted(settings['depot-paths'])
- if self.previousDepotPaths == []:
- self.previousDepotPaths = depotPaths
- else:
- paths = []
- for (prev, cur) in zip(self.previousDepotPaths, depotPaths):
- prev_list = prev.split("/")
- cur_list = cur.split("/")
- for i in range(0, min(len(cur_list), len(prev_list))):
- if cur_list[i] <> prev_list[i]:
- i = i - 1
- break
-
- paths.append ("/".join(cur_list[:i + 1]))
-
- self.previousDepotPaths = paths
-
- if p4Change > 0:
- self.depotPaths = sorted(self.previousDepotPaths)
- self.changeRange = "@%s,#head" % p4Change
- if not self.detectBranches:
- self.initialParent = parseRevision(self.branch)
- if not self.silent and not self.detectBranches:
- print "Performing incremental import into %s git branch" % self.branch
-
- if not self.branch.startswith("refs/"):
- self.branch = "refs/heads/" + self.branch
-
- if len(args) == 0 and self.depotPaths:
- if not self.silent:
- print "Depot paths: %s" % ' '.join(self.depotPaths)
- else:
- if self.depotPaths and self.depotPaths != args:
- print ("previous import used depot path %s and now %s was specified. "
- "This doesn't work!" % (' '.join (self.depotPaths),
- ' '.join (args)))
- sys.exit(1)
-
- self.depotPaths = sorted(args)
-
- revision = ""
- self.users = {}
-
- # Make sure no revision specifiers are used when --changesfile
- # is specified.
- bad_changesfile = False
- if len(self.changesFile) > 0:
- for p in self.depotPaths:
- if p.find("@") >= 0 or p.find("#") >= 0:
- bad_changesfile = True
- break
- if bad_changesfile:
- die("Option --changesfile is incompatible with revision specifiers")
-
- newPaths = []
- for p in self.depotPaths:
- if p.find("@") != -1:
- atIdx = p.index("@")
- self.changeRange = p[atIdx:]
- if self.changeRange == "@all":
- self.changeRange = ""
- elif ',' not in self.changeRange:
- revision = self.changeRange
- self.changeRange = ""
- p = p[:atIdx]
- elif p.find("#") != -1:
- hashIdx = p.index("#")
- revision = p[hashIdx:]
- p = p[:hashIdx]
- elif self.previousDepotPaths == []:
- # pay attention to changesfile, if given, else import
- # the entire p4 tree at the head revision
- if len(self.changesFile) == 0:
- revision = "#head"
-
- p = re.sub ("\.\.\.$", "", p)
- if not p.endswith("/"):
- p += "/"
-
- newPaths.append(p)
-
- self.depotPaths = newPaths
-
-
- self.loadUserMapFromCache()
- self.labels = {}
- if self.detectLabels:
- self.getLabels();
-
- if self.detectBranches:
- ## FIXME - what's a P4 projectName ?
- self.projectName = self.guessProjectName()
-
- if self.hasOrigin:
- self.getBranchMappingFromGitBranches()
- else:
- self.getBranchMapping()
- if self.verbose:
- print "p4-git branches: %s" % self.p4BranchesInGit
- print "initial parents: %s" % self.initialParents
- for b in self.p4BranchesInGit:
- if b != "master":
-
- ## FIXME
- b = b[len(self.projectName):]
- self.createdBranches.add(b)
-
- self.tz = "%+03d%02d" % (- time.timezone / 3600, ((- time.timezone % 3600) / 60))
-
- importProcess = subprocess.Popen(["git", "fast-import"],
- stdin=subprocess.PIPE, stdout=subprocess.PIPE,
- stderr=subprocess.PIPE);
- self.gitOutput = importProcess.stdout
- self.gitStream = importProcess.stdin
- self.gitError = importProcess.stderr
-
- if revision:
- self.importHeadRevision(revision)
- else:
- changes = []
-
- if len(self.changesFile) > 0:
- output = open(self.changesFile).readlines()
- changeSet = set()
- for line in output:
- changeSet.add(int(line))
-
- for change in changeSet:
- changes.append(change)
-
- changes.sort()
- else:
- # catch "git-p4 sync" with no new branches, in a repo that
- # does not have any existing git-p4 branches
- if len(args) == 0 and not self.p4BranchesInGit:
- die("No remote p4 branches. Perhaps you never did \"git p4 clone\" in here.");
- if self.verbose:
- print "Getting p4 changes for %s...%s" % (', '.join(self.depotPaths),
- self.changeRange)
- changes = p4ChangesForPaths(self.depotPaths, self.changeRange)
-
- if len(self.maxChanges) > 0:
- changes = changes[:min(int(self.maxChanges), len(changes))]
-
- if len(changes) == 0:
- if not self.silent:
- print "No changes to import!"
- return True
-
- if not self.silent and not self.detectBranches:
- print "Import destination: %s" % self.branch
-
- self.updatedBranches = set()
-
- self.importChanges(changes)
-
- if not self.silent:
- print ""
- if len(self.updatedBranches) > 0:
- sys.stdout.write("Updated branches: ")
- for b in self.updatedBranches:
- sys.stdout.write("%s " % b)
- sys.stdout.write("\n")
-
- self.gitStream.close()
- if importProcess.wait() != 0:
- die("fast-import failed: %s" % self.gitError.read())
- self.gitOutput.close()
- self.gitError.close()
-
- # Cleanup temporary branches created during import
- if self.tempBranches != []:
- for branch in self.tempBranches:
- read_pipe("git update-ref -d %s" % branch)
- os.rmdir(os.path.join(os.environ.get("GIT_DIR", ".git"), self.tempBranchLocation))
-
- return True
-
-class P4Rebase(Command):
- def __init__(self):
- Command.__init__(self)
- self.options = [ ]
- self.description = ("Fetches the latest revision from perforce and "
- + "rebases the current work (branch) against it")
- self.verbose = False
-
- def run(self, args):
- sync = P4Sync()
- sync.run([])
-
- return self.rebase()
-
- def rebase(self):
- if os.system("git update-index --refresh") != 0:
- die("Some files in your working directory are modified and different than what is in your index. You can use git update-index <filename> to bring the index up-to-date or stash away all your changes with git stash.");
- if len(read_pipe("git diff-index HEAD --")) > 0:
- die("You have uncommited changes. Please commit them before rebasing or stash them away with git stash.");
-
- [upstream, settings] = findUpstreamBranchPoint()
- if len(upstream) == 0:
- die("Cannot find upstream branchpoint for rebase")
-
- # the branchpoint may be p4/foo~3, so strip off the parent
- upstream = re.sub("~[0-9]+$", "", upstream)
-
- print "Rebasing the current branch onto %s" % upstream
- oldHead = read_pipe("git rev-parse HEAD").strip()
- system("git rebase %s" % upstream)
- system("git diff-tree --stat --summary -M %s HEAD" % oldHead)
- return True
-
-class P4Clone(P4Sync):
- def __init__(self):
- P4Sync.__init__(self)
- self.description = "Creates a new git repository and imports from Perforce into it"
- self.usage = "usage: %prog [options] //depot/path[@revRange]"
- self.options += [
- optparse.make_option("--destination", dest="cloneDestination",
- action='store', default=None,
- help="where to leave result of the clone"),
- optparse.make_option("-/", dest="cloneExclude",
- action="append", type="string",
- help="exclude depot path"),
- optparse.make_option("--bare", dest="cloneBare",
- action="store_true", default=False),
- ]
- self.cloneDestination = None
- self.needsGit = False
- self.cloneBare = False
-
- # This is required for the "append" cloneExclude action
- def ensure_value(self, attr, value):
- if not hasattr(self, attr) or getattr(self, attr) is None:
- setattr(self, attr, value)
- return getattr(self, attr)
-
- def defaultDestination(self, args):
- ## TODO: use common prefix of args?
- depotPath = args[0]
- depotDir = re.sub("(@[^@]*)$", "", depotPath)
- depotDir = re.sub("(#[^#]*)$", "", depotDir)
- depotDir = re.sub(r"\.\.\.$", "", depotDir)
- depotDir = re.sub(r"/$", "", depotDir)
- return os.path.split(depotDir)[1]
-
- def run(self, args):
- if len(args) < 1:
- return False
-
- if self.keepRepoPath and not self.cloneDestination:
- sys.stderr.write("Must specify destination for --keep-path\n")
- sys.exit(1)
-
- depotPaths = args
-
- if not self.cloneDestination and len(depotPaths) > 1:
- self.cloneDestination = depotPaths[-1]
- depotPaths = depotPaths[:-1]
-
- self.cloneExclude = ["/"+p for p in self.cloneExclude]
- for p in depotPaths:
- if not p.startswith("//"):
- return False
-
- if not self.cloneDestination:
- self.cloneDestination = self.defaultDestination(args)
-
- print "Importing from %s into %s" % (', '.join(depotPaths), self.cloneDestination)
-
- if not os.path.exists(self.cloneDestination):
- os.makedirs(self.cloneDestination)
- chdir(self.cloneDestination)
-
- init_cmd = [ "git", "init" ]
- if self.cloneBare:
- init_cmd.append("--bare")
- subprocess.check_call(init_cmd)
-
- if not P4Sync.run(self, depotPaths):
- return False
- if self.branch != "master":
- if self.importIntoRemotes:
- masterbranch = "refs/remotes/p4/master"
- else:
- masterbranch = "refs/heads/p4/master"
- if gitBranchExists(masterbranch):
- system("git branch master %s" % masterbranch)
- if not self.cloneBare:
- system("git checkout -f")
- else:
- print "Could not detect main branch. No checkout/master branch created."
-
- # auto-set this variable if invoked with --use-client-spec
- if self.useClientSpec_from_options:
- system("git config --bool git-p4.useclientspec true")
-
- return True
-
-class P4Branches(Command):
- def __init__(self):
- Command.__init__(self)
- self.options = [ ]
- self.description = ("Shows the git branches that hold imports and their "
- + "corresponding perforce depot paths")
- self.verbose = False
-
- def run(self, args):
- if originP4BranchesExist():
- createOrUpdateBranchesFromOrigin()
-
- cmdline = "git rev-parse --symbolic "
- cmdline += " --remotes"
-
- for line in read_pipe_lines(cmdline):
- line = line.strip()
-
- if not line.startswith('p4/') or line == "p4/HEAD":
- continue
- branch = line
-
- log = extractLogMessageFromGitCommit("refs/remotes/%s" % branch)
- settings = extractSettingsGitLog(log)
-
- print "%s <= %s (%s)" % (branch, ",".join(settings["depot-paths"]), settings["change"])
- return True
-
-class HelpFormatter(optparse.IndentedHelpFormatter):
- def __init__(self):
- optparse.IndentedHelpFormatter.__init__(self)
-
- def format_description(self, description):
- if description:
- return description + "\n"
- else:
- return ""
-
-def printUsage(commands):
- print "usage: %s <command> [options]" % sys.argv[0]
- print ""
- print "valid commands: %s" % ", ".join(commands)
- print ""
- print "Try %s <command> --help for command specific help." % sys.argv[0]
- print ""
-
-commands = {
- "debug" : P4Debug,
- "submit" : P4Submit,
- "commit" : P4Submit,
- "sync" : P4Sync,
- "rebase" : P4Rebase,
- "clone" : P4Clone,
- "rollback" : P4RollBack,
- "branches" : P4Branches
-}
-
-
-def main():
- if len(sys.argv[1:]) == 0:
- printUsage(commands.keys())
- sys.exit(2)
-
- cmd = ""
- cmdName = sys.argv[1]
- try:
- klass = commands[cmdName]
- cmd = klass()
- except KeyError:
- print "unknown command %s" % cmdName
- print ""
- printUsage(commands.keys())
- sys.exit(2)
-
- options = cmd.options
- cmd.gitdir = os.environ.get("GIT_DIR", None)
-
- args = sys.argv[2:]
-
- if len(options) > 0:
- if cmd.needsGit:
- options.append(optparse.make_option("--git-dir", dest="gitdir"))
-
- parser = optparse.OptionParser(cmd.usage.replace("%prog", "%prog " + cmdName),
- options,
- description = cmd.description,
- formatter = HelpFormatter())
-
- (cmd, args) = parser.parse_args(sys.argv[2:], cmd);
- global verbose
- verbose = cmd.verbose
- if cmd.needsGit:
- if cmd.gitdir == None:
- cmd.gitdir = os.path.abspath(".git")
- if not isValidGitDir(cmd.gitdir):
- cmd.gitdir = read_pipe("git rev-parse --git-dir").strip()
- if os.path.exists(cmd.gitdir):
- cdup = read_pipe("git rev-parse --show-cdup").strip()
- if len(cdup) > 0:
- chdir(cdup);
-
- if not isValidGitDir(cmd.gitdir):
- if isValidGitDir(cmd.gitdir + "/.git"):
- cmd.gitdir += "/.git"
- else:
- die("fatal: cannot locate git repository at %s" % cmd.gitdir)
-
- os.environ["GIT_DIR"] = cmd.gitdir
-
- if not cmd.run(args):
- parser.print_help()
- sys.exit(2)
-
-
-if __name__ == '__main__':
- main()
--- /dev/null
+The git-p4 script moved to the top-level of the git source directory.
+
+Invoke it as any other git command, like "git p4 clone", for instance.
+
+Note that the top-level git-p4.py script is now the source. It is
+built using make to git-p4, which will be installed.
+
+Windows users can copy the git-p4.py source script directly, possibly
+invoking it through a batch file called "git-p4.bat" in the same folder.
+It should contain just one line:
+
+ @python "%~d0%~p0git-p4.py" %*
+++ /dev/null
-@python "%~d0%~p0git-p4" %*
diff_words_show(ecbdata->diff_words);
}
+static void diff_filespec_load_driver(struct diff_filespec *one)
+{
+ /* Use already-loaded driver */
+ if (one->driver)
+ return;
+
+ if (S_ISREG(one->mode))
+ one->driver = userdiff_find_by_path(one->path);
+
+ /* Fallback to default settings */
+ if (!one->driver)
+ one->driver = userdiff_find_by_name("default");
+}
+
+static const char *userdiff_word_regex(struct diff_filespec *one)
+{
+ diff_filespec_load_driver(one);
+ return one->driver->word_regex;
+}
+
+static void init_diff_words_data(struct emit_callback *ecbdata,
+ struct diff_options *orig_opts,
+ struct diff_filespec *one,
+ struct diff_filespec *two)
+{
+ int i;
+ struct diff_options *o = xmalloc(sizeof(struct diff_options));
+ memcpy(o, orig_opts, sizeof(struct diff_options));
+
+ ecbdata->diff_words =
+ xcalloc(1, sizeof(struct diff_words_data));
+ ecbdata->diff_words->type = o->word_diff;
+ ecbdata->diff_words->opt = o;
+ if (!o->word_regex)
+ o->word_regex = userdiff_word_regex(one);
+ if (!o->word_regex)
+ o->word_regex = userdiff_word_regex(two);
+ if (!o->word_regex)
+ o->word_regex = diff_word_regex_cfg;
+ if (o->word_regex) {
+ ecbdata->diff_words->word_regex = (regex_t *)
+ xmalloc(sizeof(regex_t));
+ if (regcomp(ecbdata->diff_words->word_regex,
+ o->word_regex,
+ REG_EXTENDED | REG_NEWLINE))
+ die ("Invalid regular expression: %s",
+ o->word_regex);
+ }
+ for (i = 0; i < ARRAY_SIZE(diff_words_styles); i++) {
+ if (o->word_diff == diff_words_styles[i].type) {
+ ecbdata->diff_words->style =
+ &diff_words_styles[i];
+ break;
+ }
+ }
+ if (want_color(o->use_color)) {
+ struct diff_words_style *st = ecbdata->diff_words->style;
+ st->old.color = diff_get_color_opt(o, DIFF_FILE_OLD);
+ st->new.color = diff_get_color_opt(o, DIFF_FILE_NEW);
+ st->ctx.color = diff_get_color_opt(o, DIFF_PLAIN);
+ }
+}
+
static void free_diff_words_data(struct emit_callback *ecbdata)
{
if (ecbdata->diff_words) {
diff_words_flush(ecbdata);
+ free (ecbdata->diff_words->opt);
free (ecbdata->diff_words->minus.text.ptr);
free (ecbdata->diff_words->minus.orig);
free (ecbdata->diff_words->plus.text.ptr);
emit_binary_diff_body(file, two, one, prefix);
}
-static void diff_filespec_load_driver(struct diff_filespec *one)
-{
- /* Use already-loaded driver */
- if (one->driver)
- return;
-
- if (S_ISREG(one->mode))
- one->driver = userdiff_find_by_path(one->path);
-
- /* Fallback to default settings */
- if (!one->driver)
- one->driver = userdiff_find_by_name("default");
-}
-
int diff_filespec_is_binary(struct diff_filespec *one)
{
if (one->is_binary == -1) {
return one->driver->funcname.pattern ? &one->driver->funcname : NULL;
}
-static const char *userdiff_word_regex(struct diff_filespec *one)
-{
- diff_filespec_load_driver(one);
- return one->driver->word_regex;
-}
-
void diff_set_mnemonic_prefix(struct diff_options *options, const char *a, const char *b)
{
if (!options->a_prefix)
xecfg.ctxlen = strtoul(diffopts + 10, NULL, 10);
else if (!prefixcmp(diffopts, "-u"))
xecfg.ctxlen = strtoul(diffopts + 2, NULL, 10);
- if (o->word_diff) {
- int i;
-
- ecbdata.diff_words =
- xcalloc(1, sizeof(struct diff_words_data));
- ecbdata.diff_words->type = o->word_diff;
- ecbdata.diff_words->opt = o;
- if (!o->word_regex)
- o->word_regex = userdiff_word_regex(one);
- if (!o->word_regex)
- o->word_regex = userdiff_word_regex(two);
- if (!o->word_regex)
- o->word_regex = diff_word_regex_cfg;
- if (o->word_regex) {
- ecbdata.diff_words->word_regex = (regex_t *)
- xmalloc(sizeof(regex_t));
- if (regcomp(ecbdata.diff_words->word_regex,
- o->word_regex,
- REG_EXTENDED | REG_NEWLINE))
- die ("Invalid regular expression: %s",
- o->word_regex);
- }
- for (i = 0; i < ARRAY_SIZE(diff_words_styles); i++) {
- if (o->word_diff == diff_words_styles[i].type) {
- ecbdata.diff_words->style =
- &diff_words_styles[i];
- break;
- }
- }
- if (want_color(o->use_color)) {
- struct diff_words_style *st = ecbdata.diff_words->style;
- st->old.color = diff_get_color_opt(o, DIFF_FILE_OLD);
- st->new.color = diff_get_color_opt(o, DIFF_FILE_NEW);
- st->ctx.color = diff_get_color_opt(o, DIFF_PLAIN);
- }
- }
+ if (o->word_diff)
+ init_diff_words_data(&ecbdata, o, one, two);
xdi_diff_outf(&mf1, &mf2, fn_out_consume, &ecbdata,
&xpp, &xecfg);
if (o->word_diff)
options->rename_limit = -1;
options->dirstat_permille = diff_dirstat_permille_default;
options->context = 3;
+ DIFF_OPT_SET(options, RENAME_EMPTY);
options->change = diff_change;
options->add_remove = diff_addremove;
}
else if (!strcmp(arg, "--no-renames"))
options->detect_rename = 0;
+ else if (!strcmp(arg, "--rename-empty"))
+ DIFF_OPT_SET(options, RENAME_EMPTY);
+ else if (!strcmp(arg, "--no-rename-empty"))
+ DIFF_OPT_CLR(options, RENAME_EMPTY);
else if (!strcmp(arg, "--relative"))
DIFF_OPT_SET(options, RELATIVE_NAME);
else if (!prefixcmp(arg, "--relative=")) {
else if (!strcmp(arg, "--ignore-space-at-eol"))
DIFF_XDL_SET(options, IGNORE_WHITESPACE_AT_EOL);
else if (!strcmp(arg, "--patience"))
- DIFF_XDL_SET(options, PATIENCE_DIFF);
+ options->xdl_opts = DIFF_WITH_ALG(options, PATIENCE_DIFF);
else if (!strcmp(arg, "--histogram"))
- DIFF_XDL_SET(options, HISTOGRAM_DIFF);
+ options->xdl_opts = DIFF_WITH_ALG(options, HISTOGRAM_DIFF);
/* flags options */
else if (!strcmp(arg, "--binary")) {
if (output_format & DIFF_FORMAT_PATCH) {
if (separator) {
+ if (options->output_prefix) {
+ struct strbuf *msg = NULL;
+ msg = options->output_prefix(options,
+ options->output_prefix_data);
+ fwrite(msg->buf, msg->len, 1, stdout);
+ }
putc(options->line_termination, options->file);
if (options->stat_sep) {
/* attach patch instead of inline */
#define DIFF_OPT_SILENT_ON_REMOVE (1 << 5)
#define DIFF_OPT_FIND_COPIES_HARDER (1 << 6)
#define DIFF_OPT_FOLLOW_RENAMES (1 << 7)
-/* (1 << 8) unused */
+#define DIFF_OPT_RENAME_EMPTY (1 << 8)
/* (1 << 9) unused */
#define DIFF_OPT_HAS_CHANGES (1 << 10)
#define DIFF_OPT_QUICK (1 << 11)
#define DIFF_XDL_SET(opts, flag) ((opts)->xdl_opts |= XDF_##flag)
#define DIFF_XDL_CLR(opts, flag) ((opts)->xdl_opts &= ~XDF_##flag)
+#define DIFF_WITH_ALG(opts, flag) (((opts)->xdl_opts & ~XDF_DIFF_ALGORITHM_MASK) | XDF_##flag)
+
enum diff_words_type {
DIFF_WORDS_NONE = 0,
DIFF_WORDS_PORCELAIN,
else if (options->single_follow &&
strcmp(options->single_follow, p->two->path))
continue; /* not interested */
+ else if (!DIFF_OPT_TST(options, RENAME_EMPTY) &&
+ is_empty_blob_sha1(p->two->sha1))
+ continue;
else
locate_rename_dst(p->two, 1);
}
+ else if (!DIFF_OPT_TST(options, RENAME_EMPTY) &&
+ is_empty_blob_sha1(p->one->sha1))
+ continue;
else if (!DIFF_PAIR_UNMERGED(p) && !DIFF_FILE_VALID(p->two)) {
/*
* If the source is a broken "delete", and
return ret;
}
-int remove_dir_recursively(struct strbuf *path, int flag)
+static int remove_dir_recurse(struct strbuf *path, int flag, int *kept_up)
{
DIR *dir;
struct dirent *e;
- int ret = 0, original_len = path->len, len;
+ int ret = 0, original_len = path->len, len, kept_down = 0;
int only_empty = (flag & REMOVE_DIR_EMPTY_ONLY);
+ int keep_toplevel = (flag & REMOVE_DIR_KEEP_TOPLEVEL);
unsigned char submodule_head[20];
if ((flag & REMOVE_DIR_KEEP_NESTED_GIT) &&
- !resolve_gitlink_ref(path->buf, "HEAD", submodule_head))
+ !resolve_gitlink_ref(path->buf, "HEAD", submodule_head)) {
/* Do not descend and nuke a nested git work tree. */
+ if (kept_up)
+ *kept_up = 1;
return 0;
+ }
+ flag &= ~REMOVE_DIR_KEEP_TOPLEVEL;
dir = opendir(path->buf);
- if (!dir)
- return rmdir(path->buf);
+ if (!dir) {
+ /* an empty dir could be removed even if it is unreadble */
+ if (!keep_toplevel)
+ return rmdir(path->buf);
+ else
+ return -1;
+ }
if (path->buf[original_len - 1] != '/')
strbuf_addch(path, '/');
if (lstat(path->buf, &st))
; /* fall thru */
else if (S_ISDIR(st.st_mode)) {
- if (!remove_dir_recursively(path, only_empty))
+ if (!remove_dir_recurse(path, flag, &kept_down))
continue; /* happy */
} else if (!only_empty && !unlink(path->buf))
continue; /* happy, too */
closedir(dir);
strbuf_setlen(path, original_len);
- if (!ret)
+ if (!ret && !keep_toplevel && !kept_down)
ret = rmdir(path->buf);
+ else if (kept_up)
+ /*
+ * report the uplevel that it is not an error that we
+ * did not rmdir() our directory.
+ */
+ *kept_up = !ret;
return ret;
}
+int remove_dir_recursively(struct strbuf *path, int flag)
+{
+ return remove_dir_recurse(path, flag, NULL);
+}
+
void setup_standard_excludes(struct dir_struct *dir)
{
const char *path;
#define REMOVE_DIR_EMPTY_ONLY 01
#define REMOVE_DIR_KEEP_NESTED_GIT 02
+#define REMOVE_DIR_KEEP_TOPLEVEL 04
extern int remove_dir_recursively(struct strbuf *path, int flag);
/* tries to remove the path with empty directories along it, ignores ENOENT */
const struct checkout *state, int to_tempfile,
int *fstat_done, struct stat *statbuf)
{
- struct git_istream *st;
- enum object_type type;
- unsigned long sz;
int result = -1;
- ssize_t kept = 0;
- int fd = -1;
-
- st = open_istream(ce->sha1, &type, &sz, filter);
- if (!st)
- return -1;
- if (type != OBJ_BLOB)
- goto close_and_exit;
+ int fd;
fd = open_output_fd(path, ce, to_tempfile);
- if (fd < 0)
- goto close_and_exit;
-
- for (;;) {
- char buf[1024 * 16];
- ssize_t wrote, holeto;
- ssize_t readlen = read_istream(st, buf, sizeof(buf));
-
- if (!readlen)
- break;
- if (sizeof(buf) == readlen) {
- for (holeto = 0; holeto < readlen; holeto++)
- if (buf[holeto])
- break;
- if (readlen == holeto) {
- kept += holeto;
- continue;
- }
- }
-
- if (kept && lseek(fd, kept, SEEK_CUR) == (off_t) -1)
- goto close_and_exit;
- else
- kept = 0;
- wrote = write_in_full(fd, buf, readlen);
-
- if (wrote != readlen)
- goto close_and_exit;
- }
- if (kept && (lseek(fd, kept - 1, SEEK_CUR) == (off_t) -1 ||
- write(fd, "", 1) != 1))
- goto close_and_exit;
- *fstat_done = fstat_output(fd, state, statbuf);
-
-close_and_exit:
- close_istream(st);
- if (0 <= fd)
+ if (0 <= fd) {
+ result = stream_blob_to_fd(fd, ce->sha1, filter, 1);
+ *fstat_done = fstat_output(fd, state, statbuf);
result = close(fd);
+ }
if (result && 0 <= fd)
unlink(path);
return result;
unsigned whitespace_rule_cfg = WS_DEFAULT_RULE;
enum branch_track git_branch_track = BRANCH_TRACK_REMOTE;
enum rebase_setup_type autorebase = AUTOREBASE_NEVER;
-enum push_default_type push_default = PUSH_DEFAULT_MATCHING;
+enum push_default_type push_default = PUSH_DEFAULT_UNSPECIFIED;
#ifndef OBJECT_CREATION_MODE
#define OBJECT_CREATION_MODE OBJECT_CREATION_USES_HARDLINKS
#endif
trace_argv_printf(nargv, "trace: exec:");
/* execvp() can only ever return if it fails */
- execvp("git", (char **)nargv);
+ sane_execvp("git", (char **)nargv);
trace_printf("trace: exec failed: %s\n", strerror(errno));
return do_change_note_fanout(root, root, hex_sha1, 0, path, 0, fanout);
}
+/*
+ * Given a pointer into a string, parse a mark reference:
+ *
+ * idnum ::= ':' bigint;
+ *
+ * Return the first character after the value in *endptr.
+ *
+ * Complain if the following character is not what is expected,
+ * either a space or end of the string.
+ */
+static uintmax_t parse_mark_ref(const char *p, char **endptr)
+{
+ uintmax_t mark;
+
+ assert(*p == ':');
+ p++;
+ mark = strtoumax(p, endptr, 10);
+ if (*endptr == p)
+ die("No value after ':' in mark: %s", command_buf.buf);
+ return mark;
+}
+
+/*
+ * Parse the mark reference, and complain if this is not the end of
+ * the string.
+ */
+static uintmax_t parse_mark_ref_eol(const char *p)
+{
+ char *end;
+ uintmax_t mark;
+
+ mark = parse_mark_ref(p, &end);
+ if (*end != '\0')
+ die("Garbage after mark: %s", command_buf.buf);
+ return mark;
+}
+
+/*
+ * Parse the mark reference, demanding a trailing space. Return a
+ * pointer to the space.
+ */
+static uintmax_t parse_mark_ref_space(const char **p)
+{
+ uintmax_t mark;
+ char *end;
+
+ mark = parse_mark_ref(*p, &end);
+ if (*end != ' ')
+ die("Missing space after mark: %s", command_buf.buf);
+ *p = end;
+ return mark;
+}
+
static void file_change_m(struct branch *b)
{
const char *p = command_buf.buf + 2;
}
if (*p == ':') {
- char *x;
- oe = find_mark(strtoumax(p + 1, &x, 10));
+ oe = find_mark(parse_mark_ref_space(&p));
hashcpy(sha1, oe->idx.sha1);
- p = x;
- } else if (!prefixcmp(p, "inline")) {
+ } else if (!prefixcmp(p, "inline ")) {
inline_data = 1;
- p += 6;
+ p += strlen("inline"); /* advance to space */
} else {
if (get_sha1_hex(p, sha1))
- die("Invalid SHA1: %s", command_buf.buf);
+ die("Invalid dataref: %s", command_buf.buf);
oe = find_object(sha1);
p += 40;
+ if (*p != ' ')
+ die("Missing space after SHA1: %s", command_buf.buf);
}
- if (*p++ != ' ')
- die("Missing space after SHA1: %s", command_buf.buf);
+ assert(*p == ' ');
+ p++; /* skip space */
strbuf_reset(&uq);
if (!unquote_c_style(&uq, p, &endp)) {
/* Now parse the notemodify command. */
/* <dataref> or 'inline' */
if (*p == ':') {
- char *x;
- oe = find_mark(strtoumax(p + 1, &x, 10));
+ oe = find_mark(parse_mark_ref_space(&p));
hashcpy(sha1, oe->idx.sha1);
- p = x;
- } else if (!prefixcmp(p, "inline")) {
+ } else if (!prefixcmp(p, "inline ")) {
inline_data = 1;
- p += 6;
+ p += strlen("inline"); /* advance to space */
} else {
if (get_sha1_hex(p, sha1))
- die("Invalid SHA1: %s", command_buf.buf);
+ die("Invalid dataref: %s", command_buf.buf);
oe = find_object(sha1);
p += 40;
+ if (*p != ' ')
+ die("Missing space after SHA1: %s", command_buf.buf);
}
- if (*p++ != ' ')
- die("Missing space after SHA1: %s", command_buf.buf);
+ assert(*p == ' ');
+ p++; /* skip space */
/* <committish> */
s = lookup_branch(p);
die("Can't add a note on empty branch.");
hashcpy(commit_sha1, s->sha1);
} else if (*p == ':') {
- uintmax_t commit_mark = strtoumax(p + 1, NULL, 10);
+ uintmax_t commit_mark = parse_mark_ref_eol(p);
struct object_entry *commit_oe = find_mark(commit_mark);
if (commit_oe->type != OBJ_COMMIT)
die("Mark :%" PRIuMAX " not a commit", commit_mark);
hashcpy(b->branch_tree.versions[0].sha1, t);
hashcpy(b->branch_tree.versions[1].sha1, t);
} else if (*from == ':') {
- uintmax_t idnum = strtoumax(from + 1, NULL, 10);
+ uintmax_t idnum = parse_mark_ref_eol(from);
struct object_entry *oe = find_mark(idnum);
if (oe->type != OBJ_COMMIT)
die("Mark :%" PRIuMAX " not a commit", idnum);
if (s)
hashcpy(n->sha1, s->sha1);
else if (*from == ':') {
- uintmax_t idnum = strtoumax(from + 1, NULL, 10);
+ uintmax_t idnum = parse_mark_ref_eol(from);
struct object_entry *oe = find_mark(idnum);
if (oe->type != OBJ_COMMIT)
die("Mark :%" PRIuMAX " not a commit", idnum);
type = OBJ_COMMIT;
} else if (*from == ':') {
struct object_entry *oe;
- from_mark = strtoumax(from + 1, NULL, 10);
+ from_mark = parse_mark_ref_eol(from);
oe = find_mark(from_mark);
type = oe->type;
hashcpy(sha1, oe->idx.sha1);
/* cat-blob SP <object> LF */
p = command_buf.buf + strlen("cat-blob ");
if (*p == ':') {
- char *x;
- oe = find_mark(strtoumax(p + 1, &x, 10));
- if (x == p + 1)
- die("Invalid mark: %s", command_buf.buf);
+ oe = find_mark(parse_mark_ref_eol(p));
if (!oe)
die("Unknown mark: %s", command_buf.buf);
- if (*x)
- die("Garbage after mark: %s", command_buf.buf);
hashcpy(sha1, oe->idx.sha1);
} else {
if (get_sha1_hex(p, sha1))
- die("Invalid SHA1: %s", command_buf.buf);
+ die("Invalid dataref: %s", command_buf.buf);
if (p[40])
die("Garbage after SHA1: %s", command_buf.buf);
oe = find_object(sha1);
struct object_entry *e;
if (**p == ':') { /* <mark> */
- char *endptr;
- e = find_mark(strtoumax(*p + 1, &endptr, 10));
- if (endptr == *p + 1)
- die("Invalid mark: %s", command_buf.buf);
+ e = find_mark(parse_mark_ref_space(p));
if (!e)
die("Unknown mark: %s", command_buf.buf);
- *p = endptr;
hashcpy(sha1, e->idx.sha1);
} else { /* <sha1> */
if (get_sha1_hex(*p, sha1))
- die("Invalid SHA1: %s", command_buf.buf);
+ die("Invalid dataref: %s", command_buf.buf);
e = find_object(sha1);
*p += 40;
}
lock_pack:1,
use_thin_pack:1,
fetch_all:1,
+ stdin_refs:1,
verbose:1,
no_progress:1,
include_tag:1,
# FILE: is file different from index?
# INDEX_ADDDEL: is it add/delete between HEAD and index?
# FILE_ADDDEL: is it add/delete between index and file?
+# UNMERGED: is the path unmerged
sub list_modified {
my ($only) = @_;
}
}
- for (run_cmd_pipe(qw(git diff-files --numstat --summary --), @tracked)) {
+ for (run_cmd_pipe(qw(git diff-files --numstat --summary --raw --), @tracked)) {
if (($add, $del, $file) =
/^([-\d]+) ([-\d]+) (.*)/) {
$file = unquote_path($file);
- if (!exists $data{$file}) {
- $data{$file} = +{
- INDEX => 'unchanged',
- BINARY => 0,
- };
- }
my ($change, $bin);
if ($add eq '-' && $del eq '-') {
$change = 'binary';
$file = unquote_path($file);
$data{$file}{FILE_ADDDEL} = $adddel;
}
+ elsif (/^:[0-7]+ [0-7]+ [0-9a-f]+ [0-9a-f]+ (.) (.*)$/) {
+ $file = unquote_path($2);
+ if (!exists $data{$file}) {
+ $data{$file} = +{
+ INDEX => 'unchanged',
+ BINARY => 0,
+ };
+ }
+ if ($1 eq 'U') {
+ $data{$file}{UNMERGED} = 1;
+ }
+ }
}
for (sort keys %data) {
sub patch_update_cmd {
my @all_mods = list_modified($patch_mode_flavour{FILTER});
+ error_msg "ignoring unmerged: $_->{VALUE}\n"
+ for grep { $_->{UNMERGED} } @all_mods;
+ @all_mods = grep { !$_->{UNMERGED} } @all_mods;
+
my @mods = grep { !($_->{BINARY}) } @all_mods;
my @them;
ignore-whitespace pass it through git-apply
directory= pass it through git-apply
exclude= pass it through git-apply
+include= pass it through git-apply
C= pass it through git-apply
p= pass it through git-apply
patch-format= format the patch(es) are in
say Using index info to reconstruct a base tree...
cmd='GIT_INDEX_FILE="$dotest/patch-merge-tmp-index"'
+
+ if test -z "$GIT_QUIET"
+ then
+ eval "$cmd git diff-index --cached --diff-filter=AM --name-status HEAD"
+ fi
+
cmd="$cmd git apply --cached $git_apply_opt"' <"$dotest/patch"'
if eval "$cmd"
then
;;
--resolvemsg)
shift; resolvemsg=$1 ;;
- --whitespace|--directory|--exclude)
+ --whitespace|--directory|--exclude|--include)
git_apply_opt="$git_apply_opt $(sq "$1=$2")"; shift ;;
-C|-p)
git_apply_opt="$git_apply_opt $(sq "$1$2")"; shift ;;
--- /dev/null
+#!/usr/bin/env python
+#
+# git-p4.py -- A tool for bidirectional operation between a Perforce depot and git.
+#
+# Author: Simon Hausmann <simon@lst.de>
+# Copyright: 2007 Simon Hausmann <simon@lst.de>
+# 2007 Trolltech ASA
+# License: MIT <http://www.opensource.org/licenses/mit-license.php>
+#
+
+import optparse, sys, os, marshal, subprocess, shelve
+import tempfile, getopt, os.path, time, platform
+import re, shutil
+
+verbose = False
+
+
+def p4_build_cmd(cmd):
+ """Build a suitable p4 command line.
+
+ This consolidates building and returning a p4 command line into one
+ location. It means that hooking into the environment, or other configuration
+ can be done more easily.
+ """
+ real_cmd = ["p4"]
+
+ user = gitConfig("git-p4.user")
+ if len(user) > 0:
+ real_cmd += ["-u",user]
+
+ password = gitConfig("git-p4.password")
+ if len(password) > 0:
+ real_cmd += ["-P", password]
+
+ port = gitConfig("git-p4.port")
+ if len(port) > 0:
+ real_cmd += ["-p", port]
+
+ host = gitConfig("git-p4.host")
+ if len(host) > 0:
+ real_cmd += ["-H", host]
+
+ client = gitConfig("git-p4.client")
+ if len(client) > 0:
+ real_cmd += ["-c", client]
+
+
+ if isinstance(cmd,basestring):
+ real_cmd = ' '.join(real_cmd) + ' ' + cmd
+ else:
+ real_cmd += cmd
+ return real_cmd
+
+def chdir(dir):
+ # P4 uses the PWD environment variable rather than getcwd(). Since we're
+ # not using the shell, we have to set it ourselves. This path could
+ # be relative, so go there first, then figure out where we ended up.
+ os.chdir(dir)
+ os.environ['PWD'] = os.getcwd()
+
+def die(msg):
+ if verbose:
+ raise Exception(msg)
+ else:
+ sys.stderr.write(msg + "\n")
+ sys.exit(1)
+
+def write_pipe(c, stdin):
+ if verbose:
+ sys.stderr.write('Writing pipe: %s\n' % str(c))
+
+ expand = isinstance(c,basestring)
+ p = subprocess.Popen(c, stdin=subprocess.PIPE, shell=expand)
+ pipe = p.stdin
+ val = pipe.write(stdin)
+ pipe.close()
+ if p.wait():
+ die('Command failed: %s' % str(c))
+
+ return val
+
+def p4_write_pipe(c, stdin):
+ real_cmd = p4_build_cmd(c)
+ return write_pipe(real_cmd, stdin)
+
+def read_pipe(c, ignore_error=False):
+ if verbose:
+ sys.stderr.write('Reading pipe: %s\n' % str(c))
+
+ expand = isinstance(c,basestring)
+ p = subprocess.Popen(c, stdout=subprocess.PIPE, shell=expand)
+ pipe = p.stdout
+ val = pipe.read()
+ if p.wait() and not ignore_error:
+ die('Command failed: %s' % str(c))
+
+ return val
+
+def p4_read_pipe(c, ignore_error=False):
+ real_cmd = p4_build_cmd(c)
+ return read_pipe(real_cmd, ignore_error)
+
+def read_pipe_lines(c):
+ if verbose:
+ sys.stderr.write('Reading pipe: %s\n' % str(c))
+
+ expand = isinstance(c, basestring)
+ p = subprocess.Popen(c, stdout=subprocess.PIPE, shell=expand)
+ pipe = p.stdout
+ val = pipe.readlines()
+ if pipe.close() or p.wait():
+ die('Command failed: %s' % str(c))
+
+ return val
+
+def p4_read_pipe_lines(c):
+ """Specifically invoke p4 on the command supplied. """
+ real_cmd = p4_build_cmd(c)
+ return read_pipe_lines(real_cmd)
+
+def system(cmd):
+ expand = isinstance(cmd,basestring)
+ if verbose:
+ sys.stderr.write("executing %s\n" % str(cmd))
+ subprocess.check_call(cmd, shell=expand)
+
+def p4_system(cmd):
+ """Specifically invoke p4 as the system command. """
+ real_cmd = p4_build_cmd(cmd)
+ expand = isinstance(real_cmd, basestring)
+ subprocess.check_call(real_cmd, shell=expand)
+
+def p4_integrate(src, dest):
+ p4_system(["integrate", "-Dt", src, dest])
+
+def p4_sync(path):
+ p4_system(["sync", path])
+
+def p4_add(f):
+ p4_system(["add", f])
+
+def p4_delete(f):
+ p4_system(["delete", f])
+
+def p4_edit(f):
+ p4_system(["edit", f])
+
+def p4_revert(f):
+ p4_system(["revert", f])
+
+def p4_reopen(type, file):
+ p4_system(["reopen", "-t", type, file])
+
+#
+# Canonicalize the p4 type and return a tuple of the
+# base type, plus any modifiers. See "p4 help filetypes"
+# for a list and explanation.
+#
+def split_p4_type(p4type):
+
+ p4_filetypes_historical = {
+ "ctempobj": "binary+Sw",
+ "ctext": "text+C",
+ "cxtext": "text+Cx",
+ "ktext": "text+k",
+ "kxtext": "text+kx",
+ "ltext": "text+F",
+ "tempobj": "binary+FSw",
+ "ubinary": "binary+F",
+ "uresource": "resource+F",
+ "uxbinary": "binary+Fx",
+ "xbinary": "binary+x",
+ "xltext": "text+Fx",
+ "xtempobj": "binary+Swx",
+ "xtext": "text+x",
+ "xunicode": "unicode+x",
+ "xutf16": "utf16+x",
+ }
+ if p4type in p4_filetypes_historical:
+ p4type = p4_filetypes_historical[p4type]
+ mods = ""
+ s = p4type.split("+")
+ base = s[0]
+ mods = ""
+ if len(s) > 1:
+ mods = s[1]
+ return (base, mods)
+
+#
+# return the raw p4 type of a file (text, text+ko, etc)
+#
+def p4_type(file):
+ results = p4CmdList(["fstat", "-T", "headType", file])
+ return results[0]['headType']
+
+#
+# Given a type base and modifier, return a regexp matching
+# the keywords that can be expanded in the file
+#
+def p4_keywords_regexp_for_type(base, type_mods):
+ if base in ("text", "unicode", "binary"):
+ kwords = None
+ if "ko" in type_mods:
+ kwords = 'Id|Header'
+ elif "k" in type_mods:
+ kwords = 'Id|Header|Author|Date|DateTime|Change|File|Revision'
+ else:
+ return None
+ pattern = r"""
+ \$ # Starts with a dollar, followed by...
+ (%s) # one of the keywords, followed by...
+ (:[^$]+)? # possibly an old expansion, followed by...
+ \$ # another dollar
+ """ % kwords
+ return pattern
+ else:
+ return None
+
+#
+# Given a file, return a regexp matching the possible
+# RCS keywords that will be expanded, or None for files
+# with kw expansion turned off.
+#
+def p4_keywords_regexp_for_file(file):
+ if not os.path.exists(file):
+ return None
+ else:
+ (type_base, type_mods) = split_p4_type(p4_type(file))
+ return p4_keywords_regexp_for_type(type_base, type_mods)
+
+def setP4ExecBit(file, mode):
+ # Reopens an already open file and changes the execute bit to match
+ # the execute bit setting in the passed in mode.
+
+ p4Type = "+x"
+
+ if not isModeExec(mode):
+ p4Type = getP4OpenedType(file)
+ p4Type = re.sub('^([cku]?)x(.*)', '\\1\\2', p4Type)
+ p4Type = re.sub('(.*?\+.*?)x(.*?)', '\\1\\2', p4Type)
+ if p4Type[-1] == "+":
+ p4Type = p4Type[0:-1]
+
+ p4_reopen(p4Type, file)
+
+def getP4OpenedType(file):
+ # Returns the perforce file type for the given file.
+
+ result = p4_read_pipe(["opened", file])
+ match = re.match(".*\((.+)\)\r?$", result)
+ if match:
+ return match.group(1)
+ else:
+ die("Could not determine file type for %s (result: '%s')" % (file, result))
+
+def diffTreePattern():
+ # This is a simple generator for the diff tree regex pattern. This could be
+ # a class variable if this and parseDiffTreeEntry were a part of a class.
+ pattern = re.compile(':(\d+) (\d+) (\w+) (\w+) ([A-Z])(\d+)?\t(.*?)((\t(.*))|$)')
+ while True:
+ yield pattern
+
+def parseDiffTreeEntry(entry):
+ """Parses a single diff tree entry into its component elements.
+
+ See git-diff-tree(1) manpage for details about the format of the diff
+ output. This method returns a dictionary with the following elements:
+
+ src_mode - The mode of the source file
+ dst_mode - The mode of the destination file
+ src_sha1 - The sha1 for the source file
+ dst_sha1 - The sha1 fr the destination file
+ status - The one letter status of the diff (i.e. 'A', 'M', 'D', etc)
+ status_score - The score for the status (applicable for 'C' and 'R'
+ statuses). This is None if there is no score.
+ src - The path for the source file.
+ dst - The path for the destination file. This is only present for
+ copy or renames. If it is not present, this is None.
+
+ If the pattern is not matched, None is returned."""
+
+ match = diffTreePattern().next().match(entry)
+ if match:
+ return {
+ 'src_mode': match.group(1),
+ 'dst_mode': match.group(2),
+ 'src_sha1': match.group(3),
+ 'dst_sha1': match.group(4),
+ 'status': match.group(5),
+ 'status_score': match.group(6),
+ 'src': match.group(7),
+ 'dst': match.group(10)
+ }
+ return None
+
+def isModeExec(mode):
+ # Returns True if the given git mode represents an executable file,
+ # otherwise False.
+ return mode[-3:] == "755"
+
+def isModeExecChanged(src_mode, dst_mode):
+ return isModeExec(src_mode) != isModeExec(dst_mode)
+
+def p4CmdList(cmd, stdin=None, stdin_mode='w+b', cb=None):
+
+ if isinstance(cmd,basestring):
+ cmd = "-G " + cmd
+ expand = True
+ else:
+ cmd = ["-G"] + cmd
+ expand = False
+
+ cmd = p4_build_cmd(cmd)
+ if verbose:
+ sys.stderr.write("Opening pipe: %s\n" % str(cmd))
+
+ # Use a temporary file to avoid deadlocks without
+ # subprocess.communicate(), which would put another copy
+ # of stdout into memory.
+ stdin_file = None
+ if stdin is not None:
+ stdin_file = tempfile.TemporaryFile(prefix='p4-stdin', mode=stdin_mode)
+ if isinstance(stdin,basestring):
+ stdin_file.write(stdin)
+ else:
+ for i in stdin:
+ stdin_file.write(i + '\n')
+ stdin_file.flush()
+ stdin_file.seek(0)
+
+ p4 = subprocess.Popen(cmd,
+ shell=expand,
+ stdin=stdin_file,
+ stdout=subprocess.PIPE)
+
+ result = []
+ try:
+ while True:
+ entry = marshal.load(p4.stdout)
+ if cb is not None:
+ cb(entry)
+ else:
+ result.append(entry)
+ except EOFError:
+ pass
+ exitCode = p4.wait()
+ if exitCode != 0:
+ entry = {}
+ entry["p4ExitCode"] = exitCode
+ result.append(entry)
+
+ return result
+
+def p4Cmd(cmd):
+ list = p4CmdList(cmd)
+ result = {}
+ for entry in list:
+ result.update(entry)
+ return result;
+
+def p4Where(depotPath):
+ if not depotPath.endswith("/"):
+ depotPath += "/"
+ depotPath = depotPath + "..."
+ outputList = p4CmdList(["where", depotPath])
+ output = None
+ for entry in outputList:
+ if "depotFile" in entry:
+ if entry["depotFile"] == depotPath:
+ output = entry
+ break
+ elif "data" in entry:
+ data = entry.get("data")
+ space = data.find(" ")
+ if data[:space] == depotPath:
+ output = entry
+ break
+ if output == None:
+ return ""
+ if output["code"] == "error":
+ return ""
+ clientPath = ""
+ if "path" in output:
+ clientPath = output.get("path")
+ elif "data" in output:
+ data = output.get("data")
+ lastSpace = data.rfind(" ")
+ clientPath = data[lastSpace + 1:]
+
+ if clientPath.endswith("..."):
+ clientPath = clientPath[:-3]
+ return clientPath
+
+def currentGitBranch():
+ return read_pipe("git name-rev HEAD").split(" ")[1].strip()
+
+def isValidGitDir(path):
+ if (os.path.exists(path + "/HEAD")
+ and os.path.exists(path + "/refs") and os.path.exists(path + "/objects")):
+ return True;
+ return False
+
+def parseRevision(ref):
+ return read_pipe("git rev-parse %s" % ref).strip()
+
+def branchExists(ref):
+ rev = read_pipe(["git", "rev-parse", "-q", "--verify", ref],
+ ignore_error=True)
+ return len(rev) > 0
+
+def extractLogMessageFromGitCommit(commit):
+ logMessage = ""
+
+ ## fixme: title is first line of commit, not 1st paragraph.
+ foundTitle = False
+ for log in read_pipe_lines("git cat-file commit %s" % commit):
+ if not foundTitle:
+ if len(log) == 1:
+ foundTitle = True
+ continue
+
+ logMessage += log
+ return logMessage
+
+def extractSettingsGitLog(log):
+ values = {}
+ for line in log.split("\n"):
+ line = line.strip()
+ m = re.search (r"^ *\[git-p4: (.*)\]$", line)
+ if not m:
+ continue
+
+ assignments = m.group(1).split (':')
+ for a in assignments:
+ vals = a.split ('=')
+ key = vals[0].strip()
+ val = ('='.join (vals[1:])).strip()
+ if val.endswith ('\"') and val.startswith('"'):
+ val = val[1:-1]
+
+ values[key] = val
+
+ paths = values.get("depot-paths")
+ if not paths:
+ paths = values.get("depot-path")
+ if paths:
+ values['depot-paths'] = paths.split(',')
+ return values
+
+def gitBranchExists(branch):
+ proc = subprocess.Popen(["git", "rev-parse", branch],
+ stderr=subprocess.PIPE, stdout=subprocess.PIPE);
+ return proc.wait() == 0;
+
+_gitConfig = {}
+def gitConfig(key, args = None): # set args to "--bool", for instance
+ if not _gitConfig.has_key(key):
+ argsFilter = ""
+ if args != None:
+ argsFilter = "%s " % args
+ cmd = "git config %s%s" % (argsFilter, key)
+ _gitConfig[key] = read_pipe(cmd, ignore_error=True).strip()
+ return _gitConfig[key]
+
+def gitConfigList(key):
+ if not _gitConfig.has_key(key):
+ _gitConfig[key] = read_pipe("git config --get-all %s" % key, ignore_error=True).strip().split(os.linesep)
+ return _gitConfig[key]
+
+def p4BranchesInGit(branchesAreInRemotes = True):
+ branches = {}
+
+ cmdline = "git rev-parse --symbolic "
+ if branchesAreInRemotes:
+ cmdline += " --remotes"
+ else:
+ cmdline += " --branches"
+
+ for line in read_pipe_lines(cmdline):
+ line = line.strip()
+
+ ## only import to p4/
+ if not line.startswith('p4/') or line == "p4/HEAD":
+ continue
+ branch = line
+
+ # strip off p4
+ branch = re.sub ("^p4/", "", line)
+
+ branches[branch] = parseRevision(line)
+ return branches
+
+def findUpstreamBranchPoint(head = "HEAD"):
+ branches = p4BranchesInGit()
+ # map from depot-path to branch name
+ branchByDepotPath = {}
+ for branch in branches.keys():
+ tip = branches[branch]
+ log = extractLogMessageFromGitCommit(tip)
+ settings = extractSettingsGitLog(log)
+ if settings.has_key("depot-paths"):
+ paths = ",".join(settings["depot-paths"])
+ branchByDepotPath[paths] = "remotes/p4/" + branch
+
+ settings = None
+ parent = 0
+ while parent < 65535:
+ commit = head + "~%s" % parent
+ log = extractLogMessageFromGitCommit(commit)
+ settings = extractSettingsGitLog(log)
+ if settings.has_key("depot-paths"):
+ paths = ",".join(settings["depot-paths"])
+ if branchByDepotPath.has_key(paths):
+ return [branchByDepotPath[paths], settings]
+
+ parent = parent + 1
+
+ return ["", settings]
+
+def createOrUpdateBranchesFromOrigin(localRefPrefix = "refs/remotes/p4/", silent=True):
+ if not silent:
+ print ("Creating/updating branch(es) in %s based on origin branch(es)"
+ % localRefPrefix)
+
+ originPrefix = "origin/p4/"
+
+ for line in read_pipe_lines("git rev-parse --symbolic --remotes"):
+ line = line.strip()
+ if (not line.startswith(originPrefix)) or line.endswith("HEAD"):
+ continue
+
+ headName = line[len(originPrefix):]
+ remoteHead = localRefPrefix + headName
+ originHead = line
+
+ original = extractSettingsGitLog(extractLogMessageFromGitCommit(originHead))
+ if (not original.has_key('depot-paths')
+ or not original.has_key('change')):
+ continue
+
+ update = False
+ if not gitBranchExists(remoteHead):
+ if verbose:
+ print "creating %s" % remoteHead
+ update = True
+ else:
+ settings = extractSettingsGitLog(extractLogMessageFromGitCommit(remoteHead))
+ if settings.has_key('change') > 0:
+ if settings['depot-paths'] == original['depot-paths']:
+ originP4Change = int(original['change'])
+ p4Change = int(settings['change'])
+ if originP4Change > p4Change:
+ print ("%s (%s) is newer than %s (%s). "
+ "Updating p4 branch from origin."
+ % (originHead, originP4Change,
+ remoteHead, p4Change))
+ update = True
+ else:
+ print ("Ignoring: %s was imported from %s while "
+ "%s was imported from %s"
+ % (originHead, ','.join(original['depot-paths']),
+ remoteHead, ','.join(settings['depot-paths'])))
+
+ if update:
+ system("git update-ref %s %s" % (remoteHead, originHead))
+
+def originP4BranchesExist():
+ return gitBranchExists("origin") or gitBranchExists("origin/p4") or gitBranchExists("origin/p4/master")
+
+def p4ChangesForPaths(depotPaths, changeRange):
+ assert depotPaths
+ cmd = ['changes']
+ for p in depotPaths:
+ cmd += ["%s...%s" % (p, changeRange)]
+ output = p4_read_pipe_lines(cmd)
+
+ changes = {}
+ for line in output:
+ changeNum = int(line.split(" ")[1])
+ changes[changeNum] = True
+
+ changelist = changes.keys()
+ changelist.sort()
+ return changelist
+
+def p4PathStartsWith(path, prefix):
+ # This method tries to remedy a potential mixed-case issue:
+ #
+ # If UserA adds //depot/DirA/file1
+ # and UserB adds //depot/dira/file2
+ #
+ # we may or may not have a problem. If you have core.ignorecase=true,
+ # we treat DirA and dira as the same directory
+ ignorecase = gitConfig("core.ignorecase", "--bool") == "true"
+ if ignorecase:
+ return path.lower().startswith(prefix.lower())
+ return path.startswith(prefix)
+
+def getClientSpec():
+ """Look at the p4 client spec, create a View() object that contains
+ all the mappings, and return it."""
+
+ specList = p4CmdList("client -o")
+ if len(specList) != 1:
+ die('Output from "client -o" is %d lines, expecting 1' %
+ len(specList))
+
+ # dictionary of all client parameters
+ entry = specList[0]
+
+ # just the keys that start with "View"
+ view_keys = [ k for k in entry.keys() if k.startswith("View") ]
+
+ # hold this new View
+ view = View()
+
+ # append the lines, in order, to the view
+ for view_num in range(len(view_keys)):
+ k = "View%d" % view_num
+ if k not in view_keys:
+ die("Expected view key %s missing" % k)
+ view.append(entry[k])
+
+ return view
+
+def getClientRoot():
+ """Grab the client directory."""
+
+ output = p4CmdList("client -o")
+ if len(output) != 1:
+ die('Output from "client -o" is %d lines, expecting 1' % len(output))
+
+ entry = output[0]
+ if "Root" not in entry:
+ die('Client has no "Root"')
+
+ return entry["Root"]
+
+class Command:
+ def __init__(self):
+ self.usage = "usage: %prog [options]"
+ self.needsGit = True
+
+class P4UserMap:
+ def __init__(self):
+ self.userMapFromPerforceServer = False
+ self.myP4UserId = None
+
+ def p4UserId(self):
+ if self.myP4UserId:
+ return self.myP4UserId
+
+ results = p4CmdList("user -o")
+ for r in results:
+ if r.has_key('User'):
+ self.myP4UserId = r['User']
+ return r['User']
+ die("Could not find your p4 user id")
+
+ def p4UserIsMe(self, p4User):
+ # return True if the given p4 user is actually me
+ me = self.p4UserId()
+ if not p4User or p4User != me:
+ return False
+ else:
+ return True
+
+ def getUserCacheFilename(self):
+ home = os.environ.get("HOME", os.environ.get("USERPROFILE"))
+ return home + "/.gitp4-usercache.txt"
+
+ def getUserMapFromPerforceServer(self):
+ if self.userMapFromPerforceServer:
+ return
+ self.users = {}
+ self.emails = {}
+
+ for output in p4CmdList("users"):
+ if not output.has_key("User"):
+ continue
+ self.users[output["User"]] = output["FullName"] + " <" + output["Email"] + ">"
+ self.emails[output["Email"]] = output["User"]
+
+
+ s = ''
+ for (key, val) in self.users.items():
+ s += "%s\t%s\n" % (key.expandtabs(1), val.expandtabs(1))
+
+ open(self.getUserCacheFilename(), "wb").write(s)
+ self.userMapFromPerforceServer = True
+
+ def loadUserMapFromCache(self):
+ self.users = {}
+ self.userMapFromPerforceServer = False
+ try:
+ cache = open(self.getUserCacheFilename(), "rb")
+ lines = cache.readlines()
+ cache.close()
+ for line in lines:
+ entry = line.strip().split("\t")
+ self.users[entry[0]] = entry[1]
+ except IOError:
+ self.getUserMapFromPerforceServer()
+
+class P4Debug(Command):
+ def __init__(self):
+ Command.__init__(self)
+ self.options = [
+ optparse.make_option("--verbose", dest="verbose", action="store_true",
+ default=False),
+ ]
+ self.description = "A tool to debug the output of p4 -G."
+ self.needsGit = False
+ self.verbose = False
+
+ def run(self, args):
+ j = 0
+ for output in p4CmdList(args):
+ print 'Element: %d' % j
+ j += 1
+ print output
+ return True
+
+class P4RollBack(Command):
+ def __init__(self):
+ Command.__init__(self)
+ self.options = [
+ optparse.make_option("--verbose", dest="verbose", action="store_true"),
+ optparse.make_option("--local", dest="rollbackLocalBranches", action="store_true")
+ ]
+ self.description = "A tool to debug the multi-branch import. Don't use :)"
+ self.verbose = False
+ self.rollbackLocalBranches = False
+
+ def run(self, args):
+ if len(args) != 1:
+ return False
+ maxChange = int(args[0])
+
+ if "p4ExitCode" in p4Cmd("changes -m 1"):
+ die("Problems executing p4");
+
+ if self.rollbackLocalBranches:
+ refPrefix = "refs/heads/"
+ lines = read_pipe_lines("git rev-parse --symbolic --branches")
+ else:
+ refPrefix = "refs/remotes/"
+ lines = read_pipe_lines("git rev-parse --symbolic --remotes")
+
+ for line in lines:
+ if self.rollbackLocalBranches or (line.startswith("p4/") and line != "p4/HEAD\n"):
+ line = line.strip()
+ ref = refPrefix + line
+ log = extractLogMessageFromGitCommit(ref)
+ settings = extractSettingsGitLog(log)
+
+ depotPaths = settings['depot-paths']
+ change = settings['change']
+
+ changed = False
+
+ if len(p4Cmd("changes -m 1 " + ' '.join (['%s...@%s' % (p, maxChange)
+ for p in depotPaths]))) == 0:
+ print "Branch %s did not exist at change %s, deleting." % (ref, maxChange)
+ system("git update-ref -d %s `git rev-parse %s`" % (ref, ref))
+ continue
+
+ while change and int(change) > maxChange:
+ changed = True
+ if self.verbose:
+ print "%s is at %s ; rewinding towards %s" % (ref, change, maxChange)
+ system("git update-ref %s \"%s^\"" % (ref, ref))
+ log = extractLogMessageFromGitCommit(ref)
+ settings = extractSettingsGitLog(log)
+
+
+ depotPaths = settings['depot-paths']
+ change = settings['change']
+
+ if changed:
+ print "%s rewound to %s" % (ref, change)
+
+ return True
+
+class P4Submit(Command, P4UserMap):
+ def __init__(self):
+ Command.__init__(self)
+ P4UserMap.__init__(self)
+ self.options = [
+ optparse.make_option("--verbose", dest="verbose", action="store_true"),
+ optparse.make_option("--origin", dest="origin"),
+ optparse.make_option("-M", dest="detectRenames", action="store_true"),
+ # preserve the user, requires relevant p4 permissions
+ optparse.make_option("--preserve-user", dest="preserveUser", action="store_true"),
+ ]
+ self.description = "Submit changes from git to the perforce depot."
+ self.usage += " [name of git branch to submit into perforce depot]"
+ self.interactive = True
+ self.origin = ""
+ self.detectRenames = False
+ self.verbose = False
+ self.preserveUser = gitConfig("git-p4.preserveUser").lower() == "true"
+ self.isWindows = (platform.system() == "Windows")
+
+ def check(self):
+ if len(p4CmdList("opened ...")) > 0:
+ die("You have files opened with perforce! Close them before starting the sync.")
+
+ # replaces everything between 'Description:' and the next P4 submit template field with the
+ # commit message
+ def prepareLogMessage(self, template, message):
+ result = ""
+
+ inDescriptionSection = False
+
+ for line in template.split("\n"):
+ if line.startswith("#"):
+ result += line + "\n"
+ continue
+
+ if inDescriptionSection:
+ if line.startswith("Files:") or line.startswith("Jobs:"):
+ inDescriptionSection = False
+ else:
+ continue
+ else:
+ if line.startswith("Description:"):
+ inDescriptionSection = True
+ line += "\n"
+ for messageLine in message.split("\n"):
+ line += "\t" + messageLine + "\n"
+
+ result += line + "\n"
+
+ return result
+
+ def patchRCSKeywords(self, file, pattern):
+ # Attempt to zap the RCS keywords in a p4 controlled file matching the given pattern
+ (handle, outFileName) = tempfile.mkstemp(dir='.')
+ try:
+ outFile = os.fdopen(handle, "w+")
+ inFile = open(file, "r")
+ regexp = re.compile(pattern, re.VERBOSE)
+ for line in inFile.readlines():
+ line = regexp.sub(r'$\1$', line)
+ outFile.write(line)
+ inFile.close()
+ outFile.close()
+ # Forcibly overwrite the original file
+ os.unlink(file)
+ shutil.move(outFileName, file)
+ except:
+ # cleanup our temporary file
+ os.unlink(outFileName)
+ print "Failed to strip RCS keywords in %s" % file
+ raise
+
+ print "Patched up RCS keywords in %s" % file
+
+ def p4UserForCommit(self,id):
+ # Return the tuple (perforce user,git email) for a given git commit id
+ self.getUserMapFromPerforceServer()
+ gitEmail = read_pipe("git log --max-count=1 --format='%%ae' %s" % id)
+ gitEmail = gitEmail.strip()
+ if not self.emails.has_key(gitEmail):
+ return (None,gitEmail)
+ else:
+ return (self.emails[gitEmail],gitEmail)
+
+ def checkValidP4Users(self,commits):
+ # check if any git authors cannot be mapped to p4 users
+ for id in commits:
+ (user,email) = self.p4UserForCommit(id)
+ if not user:
+ msg = "Cannot find p4 user for email %s in commit %s." % (email, id)
+ if gitConfig('git-p4.allowMissingP4Users').lower() == "true":
+ print "%s" % msg
+ else:
+ die("Error: %s\nSet git-p4.allowMissingP4Users to true to allow this." % msg)
+
+ def lastP4Changelist(self):
+ # Get back the last changelist number submitted in this client spec. This
+ # then gets used to patch up the username in the change. If the same
+ # client spec is being used by multiple processes then this might go
+ # wrong.
+ results = p4CmdList("client -o") # find the current client
+ client = None
+ for r in results:
+ if r.has_key('Client'):
+ client = r['Client']
+ break
+ if not client:
+ die("could not get client spec")
+ results = p4CmdList(["changes", "-c", client, "-m", "1"])
+ for r in results:
+ if r.has_key('change'):
+ return r['change']
+ die("Could not get changelist number for last submit - cannot patch up user details")
+
+ def modifyChangelistUser(self, changelist, newUser):
+ # fixup the user field of a changelist after it has been submitted.
+ changes = p4CmdList("change -o %s" % changelist)
+ if len(changes) != 1:
+ die("Bad output from p4 change modifying %s to user %s" %
+ (changelist, newUser))
+
+ c = changes[0]
+ if c['User'] == newUser: return # nothing to do
+ c['User'] = newUser
+ input = marshal.dumps(c)
+
+ result = p4CmdList("change -f -i", stdin=input)
+ for r in result:
+ if r.has_key('code'):
+ if r['code'] == 'error':
+ die("Could not modify user field of changelist %s to %s:%s" % (changelist, newUser, r['data']))
+ if r.has_key('data'):
+ print("Updated user field for changelist %s to %s" % (changelist, newUser))
+ return
+ die("Could not modify user field of changelist %s to %s" % (changelist, newUser))
+
+ def canChangeChangelists(self):
+ # check to see if we have p4 admin or super-user permissions, either of
+ # which are required to modify changelists.
+ results = p4CmdList(["protects", self.depotPath])
+ for r in results:
+ if r.has_key('perm'):
+ if r['perm'] == 'admin':
+ return 1
+ if r['perm'] == 'super':
+ return 1
+ return 0
+
+ def prepareSubmitTemplate(self):
+ # remove lines in the Files section that show changes to files outside the depot path we're committing into
+ template = ""
+ inFilesSection = False
+ for line in p4_read_pipe_lines(['change', '-o']):
+ if line.endswith("\r\n"):
+ line = line[:-2] + "\n"
+ if inFilesSection:
+ if line.startswith("\t"):
+ # path starts and ends with a tab
+ path = line[1:]
+ lastTab = path.rfind("\t")
+ if lastTab != -1:
+ path = path[:lastTab]
+ if not p4PathStartsWith(path, self.depotPath):
+ continue
+ else:
+ inFilesSection = False
+ else:
+ if line.startswith("Files:"):
+ inFilesSection = True
+
+ template += line
+
+ return template
+
+ def edit_template(self, template_file):
+ """Invoke the editor to let the user change the submission
+ message. Return true if okay to continue with the submit."""
+
+ # if configured to skip the editing part, just submit
+ if gitConfig("git-p4.skipSubmitEdit") == "true":
+ return True
+
+ # look at the modification time, to check later if the user saved
+ # the file
+ mtime = os.stat(template_file).st_mtime
+
+ # invoke the editor
+ if os.environ.has_key("P4EDITOR"):
+ editor = os.environ.get("P4EDITOR")
+ else:
+ editor = read_pipe("git var GIT_EDITOR").strip()
+ system(editor + " " + template_file)
+
+ # If the file was not saved, prompt to see if this patch should
+ # be skipped. But skip this verification step if configured so.
+ if gitConfig("git-p4.skipSubmitEditCheck") == "true":
+ return True
+
+ # modification time updated means user saved the file
+ if os.stat(template_file).st_mtime > mtime:
+ return True
+
+ while True:
+ response = raw_input("Submit template unchanged. Submit anyway? [y]es, [n]o (skip this patch) ")
+ if response == 'y':
+ return True
+ if response == 'n':
+ return False
+
+ def applyCommit(self, id):
+ print "Applying %s" % (read_pipe("git log --max-count=1 --pretty=oneline %s" % id))
+
+ (p4User, gitEmail) = self.p4UserForCommit(id)
+
+ if not self.detectRenames:
+ # If not explicitly set check the config variable
+ self.detectRenames = gitConfig("git-p4.detectRenames")
+
+ if self.detectRenames.lower() == "false" or self.detectRenames == "":
+ diffOpts = ""
+ elif self.detectRenames.lower() == "true":
+ diffOpts = "-M"
+ else:
+ diffOpts = "-M%s" % self.detectRenames
+
+ detectCopies = gitConfig("git-p4.detectCopies")
+ if detectCopies.lower() == "true":
+ diffOpts += " -C"
+ elif detectCopies != "" and detectCopies.lower() != "false":
+ diffOpts += " -C%s" % detectCopies
+
+ if gitConfig("git-p4.detectCopiesHarder", "--bool") == "true":
+ diffOpts += " --find-copies-harder"
+
+ diff = read_pipe_lines("git diff-tree -r %s \"%s^\" \"%s\"" % (diffOpts, id, id))
+ filesToAdd = set()
+ filesToDelete = set()
+ editedFiles = set()
+ filesToChangeExecBit = {}
+
+ for line in diff:
+ diff = parseDiffTreeEntry(line)
+ modifier = diff['status']
+ path = diff['src']
+ if modifier == "M":
+ p4_edit(path)
+ if isModeExecChanged(diff['src_mode'], diff['dst_mode']):
+ filesToChangeExecBit[path] = diff['dst_mode']
+ editedFiles.add(path)
+ elif modifier == "A":
+ filesToAdd.add(path)
+ filesToChangeExecBit[path] = diff['dst_mode']
+ if path in filesToDelete:
+ filesToDelete.remove(path)
+ elif modifier == "D":
+ filesToDelete.add(path)
+ if path in filesToAdd:
+ filesToAdd.remove(path)
+ elif modifier == "C":
+ src, dest = diff['src'], diff['dst']
+ p4_integrate(src, dest)
+ if diff['src_sha1'] != diff['dst_sha1']:
+ p4_edit(dest)
+ if isModeExecChanged(diff['src_mode'], diff['dst_mode']):
+ p4_edit(dest)
+ filesToChangeExecBit[dest] = diff['dst_mode']
+ os.unlink(dest)
+ editedFiles.add(dest)
+ elif modifier == "R":
+ src, dest = diff['src'], diff['dst']
+ p4_integrate(src, dest)
+ if diff['src_sha1'] != diff['dst_sha1']:
+ p4_edit(dest)
+ if isModeExecChanged(diff['src_mode'], diff['dst_mode']):
+ p4_edit(dest)
+ filesToChangeExecBit[dest] = diff['dst_mode']
+ os.unlink(dest)
+ editedFiles.add(dest)
+ filesToDelete.add(src)
+ else:
+ die("unknown modifier %s for %s" % (modifier, path))
+
+ diffcmd = "git format-patch -k --stdout \"%s^\"..\"%s\"" % (id, id)
+ patchcmd = diffcmd + " | git apply "
+ tryPatchCmd = patchcmd + "--check -"
+ applyPatchCmd = patchcmd + "--check --apply -"
+ patch_succeeded = True
+
+ if os.system(tryPatchCmd) != 0:
+ fixed_rcs_keywords = False
+ patch_succeeded = False
+ print "Unfortunately applying the change failed!"
+
+ # Patch failed, maybe it's just RCS keyword woes. Look through
+ # the patch to see if that's possible.
+ if gitConfig("git-p4.attemptRCSCleanup","--bool") == "true":
+ file = None
+ pattern = None
+ kwfiles = {}
+ for file in editedFiles | filesToDelete:
+ # did this file's delta contain RCS keywords?
+ pattern = p4_keywords_regexp_for_file(file)
+
+ if pattern:
+ # this file is a possibility...look for RCS keywords.
+ regexp = re.compile(pattern, re.VERBOSE)
+ for line in read_pipe_lines(["git", "diff", "%s^..%s" % (id, id), file]):
+ if regexp.search(line):
+ if verbose:
+ print "got keyword match on %s in %s in %s" % (pattern, line, file)
+ kwfiles[file] = pattern
+ break
+
+ for file in kwfiles:
+ if verbose:
+ print "zapping %s with %s" % (line,pattern)
+ self.patchRCSKeywords(file, kwfiles[file])
+ fixed_rcs_keywords = True
+
+ if fixed_rcs_keywords:
+ print "Retrying the patch with RCS keywords cleaned up"
+ if os.system(tryPatchCmd) == 0:
+ patch_succeeded = True
+
+ if not patch_succeeded:
+ print "What do you want to do?"
+ response = "x"
+ while response != "s" and response != "a" and response != "w":
+ response = raw_input("[s]kip this patch / [a]pply the patch forcibly "
+ "and with .rej files / [w]rite the patch to a file (patch.txt) ")
+ if response == "s":
+ print "Skipping! Good luck with the next patches..."
+ for f in editedFiles:
+ p4_revert(f)
+ for f in filesToAdd:
+ os.remove(f)
+ return
+ elif response == "a":
+ os.system(applyPatchCmd)
+ if len(filesToAdd) > 0:
+ print "You may also want to call p4 add on the following files:"
+ print " ".join(filesToAdd)
+ if len(filesToDelete):
+ print "The following files should be scheduled for deletion with p4 delete:"
+ print " ".join(filesToDelete)
+ die("Please resolve and submit the conflict manually and "
+ + "continue afterwards with git p4 submit --continue")
+ elif response == "w":
+ system(diffcmd + " > patch.txt")
+ print "Patch saved to patch.txt in %s !" % self.clientPath
+ die("Please resolve and submit the conflict manually and "
+ "continue afterwards with git p4 submit --continue")
+
+ system(applyPatchCmd)
+
+ for f in filesToAdd:
+ p4_add(f)
+ for f in filesToDelete:
+ p4_revert(f)
+ p4_delete(f)
+
+ # Set/clear executable bits
+ for f in filesToChangeExecBit.keys():
+ mode = filesToChangeExecBit[f]
+ setP4ExecBit(f, mode)
+
+ logMessage = extractLogMessageFromGitCommit(id)
+ logMessage = logMessage.strip()
+
+ template = self.prepareSubmitTemplate()
+
+ if self.interactive:
+ submitTemplate = self.prepareLogMessage(template, logMessage)
+
+ if self.preserveUser:
+ submitTemplate = submitTemplate + ("\n######## Actual user %s, modified after commit\n" % p4User)
+
+ if os.environ.has_key("P4DIFF"):
+ del(os.environ["P4DIFF"])
+ diff = ""
+ for editedFile in editedFiles:
+ diff += p4_read_pipe(['diff', '-du', editedFile])
+
+ newdiff = ""
+ for newFile in filesToAdd:
+ newdiff += "==== new file ====\n"
+ newdiff += "--- /dev/null\n"
+ newdiff += "+++ %s\n" % newFile
+ f = open(newFile, "r")
+ for line in f.readlines():
+ newdiff += "+" + line
+ f.close()
+
+ if self.checkAuthorship and not self.p4UserIsMe(p4User):
+ submitTemplate += "######## git author %s does not match your p4 account.\n" % gitEmail
+ submitTemplate += "######## Use option --preserve-user to modify authorship.\n"
+ submitTemplate += "######## Variable git-p4.skipUserNameCheck hides this message.\n"
+
+ separatorLine = "######## everything below this line is just the diff #######\n"
+
+ (handle, fileName) = tempfile.mkstemp()
+ tmpFile = os.fdopen(handle, "w+")
+ if self.isWindows:
+ submitTemplate = submitTemplate.replace("\n", "\r\n")
+ separatorLine = separatorLine.replace("\n", "\r\n")
+ newdiff = newdiff.replace("\n", "\r\n")
+ tmpFile.write(submitTemplate + separatorLine + diff + newdiff)
+ tmpFile.close()
+
+ if self.edit_template(fileName):
+ # read the edited message and submit
+ tmpFile = open(fileName, "rb")
+ message = tmpFile.read()
+ tmpFile.close()
+ submitTemplate = message[:message.index(separatorLine)]
+ if self.isWindows:
+ submitTemplate = submitTemplate.replace("\r\n", "\n")
+ p4_write_pipe(['submit', '-i'], submitTemplate)
+
+ if self.preserveUser:
+ if p4User:
+ # Get last changelist number. Cannot easily get it from
+ # the submit command output as the output is
+ # unmarshalled.
+ changelist = self.lastP4Changelist()
+ self.modifyChangelistUser(changelist, p4User)
+ else:
+ # skip this patch
+ print "Submission cancelled, undoing p4 changes."
+ for f in editedFiles:
+ p4_revert(f)
+ for f in filesToAdd:
+ p4_revert(f)
+ os.remove(f)
+
+ os.remove(fileName)
+ else:
+ fileName = "submit.txt"
+ file = open(fileName, "w+")
+ file.write(self.prepareLogMessage(template, logMessage))
+ file.close()
+ print ("Perforce submit template written as %s. "
+ + "Please review/edit and then use p4 submit -i < %s to submit directly!"
+ % (fileName, fileName))
+
+ def run(self, args):
+ if len(args) == 0:
+ self.master = currentGitBranch()
+ if len(self.master) == 0 or not gitBranchExists("refs/heads/%s" % self.master):
+ die("Detecting current git branch failed!")
+ elif len(args) == 1:
+ self.master = args[0]
+ if not branchExists(self.master):
+ die("Branch %s does not exist" % self.master)
+ else:
+ return False
+
+ allowSubmit = gitConfig("git-p4.allowSubmit")
+ if len(allowSubmit) > 0 and not self.master in allowSubmit.split(","):
+ die("%s is not in git-p4.allowSubmit" % self.master)
+
+ [upstream, settings] = findUpstreamBranchPoint()
+ self.depotPath = settings['depot-paths'][0]
+ if len(self.origin) == 0:
+ self.origin = upstream
+
+ if self.preserveUser:
+ if not self.canChangeChangelists():
+ die("Cannot preserve user names without p4 super-user or admin permissions")
+
+ if self.verbose:
+ print "Origin branch is " + self.origin
+
+ if len(self.depotPath) == 0:
+ print "Internal error: cannot locate perforce depot path from existing branches"
+ sys.exit(128)
+
+ self.useClientSpec = False
+ if gitConfig("git-p4.useclientspec", "--bool") == "true":
+ self.useClientSpec = True
+ if self.useClientSpec:
+ self.clientSpecDirs = getClientSpec()
+
+ if self.useClientSpec:
+ # all files are relative to the client spec
+ self.clientPath = getClientRoot()
+ else:
+ self.clientPath = p4Where(self.depotPath)
+
+ if self.clientPath == "":
+ die("Error: Cannot locate perforce checkout of %s in client view" % self.depotPath)
+
+ print "Perforce checkout for depot path %s located at %s" % (self.depotPath, self.clientPath)
+ self.oldWorkingDirectory = os.getcwd()
+
+ # ensure the clientPath exists
+ if not os.path.exists(self.clientPath):
+ os.makedirs(self.clientPath)
+
+ chdir(self.clientPath)
+ print "Synchronizing p4 checkout..."
+ p4_sync("...")
+ self.check()
+
+ commits = []
+ for line in read_pipe_lines("git rev-list --no-merges %s..%s" % (self.origin, self.master)):
+ commits.append(line.strip())
+ commits.reverse()
+
+ if self.preserveUser or (gitConfig("git-p4.skipUserNameCheck") == "true"):
+ self.checkAuthorship = False
+ else:
+ self.checkAuthorship = True
+
+ if self.preserveUser:
+ self.checkValidP4Users(commits)
+
+ while len(commits) > 0:
+ commit = commits[0]
+ commits = commits[1:]
+ self.applyCommit(commit)
+ if not self.interactive:
+ break
+
+ if len(commits) == 0:
+ print "All changes applied!"
+ chdir(self.oldWorkingDirectory)
+
+ sync = P4Sync()
+ sync.run([])
+
+ rebase = P4Rebase()
+ rebase.rebase()
+
+ return True
+
+class View(object):
+ """Represent a p4 view ("p4 help views"), and map files in a
+ repo according to the view."""
+
+ class Path(object):
+ """A depot or client path, possibly containing wildcards.
+ The only one supported is ... at the end, currently.
+ Initialize with the full path, with //depot or //client."""
+
+ def __init__(self, path, is_depot):
+ self.path = path
+ self.is_depot = is_depot
+ self.find_wildcards()
+ # remember the prefix bit, useful for relative mappings
+ m = re.match("(//[^/]+/)", self.path)
+ if not m:
+ die("Path %s does not start with //prefix/" % self.path)
+ prefix = m.group(1)
+ if not self.is_depot:
+ # strip //client/ on client paths
+ self.path = self.path[len(prefix):]
+
+ def find_wildcards(self):
+ """Make sure wildcards are valid, and set up internal
+ variables."""
+
+ self.ends_triple_dot = False
+ # There are three wildcards allowed in p4 views
+ # (see "p4 help views"). This code knows how to
+ # handle "..." (only at the end), but cannot deal with
+ # "%%n" or "*". Only check the depot_side, as p4 should
+ # validate that the client_side matches too.
+ if re.search(r'%%[1-9]', self.path):
+ die("Can't handle %%n wildcards in view: %s" % self.path)
+ if self.path.find("*") >= 0:
+ die("Can't handle * wildcards in view: %s" % self.path)
+ triple_dot_index = self.path.find("...")
+ if triple_dot_index >= 0:
+ if triple_dot_index != len(self.path) - 3:
+ die("Can handle only single ... wildcard, at end: %s" %
+ self.path)
+ self.ends_triple_dot = True
+
+ def ensure_compatible(self, other_path):
+ """Make sure the wildcards agree."""
+ if self.ends_triple_dot != other_path.ends_triple_dot:
+ die("Both paths must end with ... if either does;\n" +
+ "paths: %s %s" % (self.path, other_path.path))
+
+ def match_wildcards(self, test_path):
+ """See if this test_path matches us, and fill in the value
+ of the wildcards if so. Returns a tuple of
+ (True|False, wildcards[]). For now, only the ... at end
+ is supported, so at most one wildcard."""
+ if self.ends_triple_dot:
+ dotless = self.path[:-3]
+ if test_path.startswith(dotless):
+ wildcard = test_path[len(dotless):]
+ return (True, [ wildcard ])
+ else:
+ if test_path == self.path:
+ return (True, [])
+ return (False, [])
+
+ def match(self, test_path):
+ """Just return if it matches; don't bother with the wildcards."""
+ b, _ = self.match_wildcards(test_path)
+ return b
+
+ def fill_in_wildcards(self, wildcards):
+ """Return the relative path, with the wildcards filled in
+ if there are any."""
+ if self.ends_triple_dot:
+ return self.path[:-3] + wildcards[0]
+ else:
+ return self.path
+
+ class Mapping(object):
+ def __init__(self, depot_side, client_side, overlay, exclude):
+ # depot_side is without the trailing /... if it had one
+ self.depot_side = View.Path(depot_side, is_depot=True)
+ self.client_side = View.Path(client_side, is_depot=False)
+ self.overlay = overlay # started with "+"
+ self.exclude = exclude # started with "-"
+ assert not (self.overlay and self.exclude)
+ self.depot_side.ensure_compatible(self.client_side)
+
+ def __str__(self):
+ c = " "
+ if self.overlay:
+ c = "+"
+ if self.exclude:
+ c = "-"
+ return "View.Mapping: %s%s -> %s" % \
+ (c, self.depot_side.path, self.client_side.path)
+
+ def map_depot_to_client(self, depot_path):
+ """Calculate the client path if using this mapping on the
+ given depot path; does not consider the effect of other
+ mappings in a view. Even excluded mappings are returned."""
+ matches, wildcards = self.depot_side.match_wildcards(depot_path)
+ if not matches:
+ return ""
+ client_path = self.client_side.fill_in_wildcards(wildcards)
+ return client_path
+
+ #
+ # View methods
+ #
+ def __init__(self):
+ self.mappings = []
+
+ def append(self, view_line):
+ """Parse a view line, splitting it into depot and client
+ sides. Append to self.mappings, preserving order."""
+
+ # Split the view line into exactly two words. P4 enforces
+ # structure on these lines that simplifies this quite a bit.
+ #
+ # Either or both words may be double-quoted.
+ # Single quotes do not matter.
+ # Double-quote marks cannot occur inside the words.
+ # A + or - prefix is also inside the quotes.
+ # There are no quotes unless they contain a space.
+ # The line is already white-space stripped.
+ # The two words are separated by a single space.
+ #
+ if view_line[0] == '"':
+ # First word is double quoted. Find its end.
+ close_quote_index = view_line.find('"', 1)
+ if close_quote_index <= 0:
+ die("No first-word closing quote found: %s" % view_line)
+ depot_side = view_line[1:close_quote_index]
+ # skip closing quote and space
+ rhs_index = close_quote_index + 1 + 1
+ else:
+ space_index = view_line.find(" ")
+ if space_index <= 0:
+ die("No word-splitting space found: %s" % view_line)
+ depot_side = view_line[0:space_index]
+ rhs_index = space_index + 1
+
+ if view_line[rhs_index] == '"':
+ # Second word is double quoted. Make sure there is a
+ # double quote at the end too.
+ if not view_line.endswith('"'):
+ die("View line with rhs quote should end with one: %s" %
+ view_line)
+ # skip the quotes
+ client_side = view_line[rhs_index+1:-1]
+ else:
+ client_side = view_line[rhs_index:]
+
+ # prefix + means overlay on previous mapping
+ overlay = False
+ if depot_side.startswith("+"):
+ overlay = True
+ depot_side = depot_side[1:]
+
+ # prefix - means exclude this path
+ exclude = False
+ if depot_side.startswith("-"):
+ exclude = True
+ depot_side = depot_side[1:]
+
+ m = View.Mapping(depot_side, client_side, overlay, exclude)
+ self.mappings.append(m)
+
+ def map_in_client(self, depot_path):
+ """Return the relative location in the client where this
+ depot file should live. Returns "" if the file should
+ not be mapped in the client."""
+
+ paths_filled = []
+ client_path = ""
+
+ # look at later entries first
+ for m in self.mappings[::-1]:
+
+ # see where will this path end up in the client
+ p = m.map_depot_to_client(depot_path)
+
+ if p == "":
+ # Depot path does not belong in client. Must remember
+ # this, as previous items should not cause files to
+ # exist in this path either. Remember that the list is
+ # being walked from the end, which has higher precedence.
+ # Overlap mappings do not exclude previous mappings.
+ if not m.overlay:
+ paths_filled.append(m.client_side)
+
+ else:
+ # This mapping matched; no need to search any further.
+ # But, the mapping could be rejected if the client path
+ # has already been claimed by an earlier mapping (i.e.
+ # one later in the list, which we are walking backwards).
+ already_mapped_in_client = False
+ for f in paths_filled:
+ # this is View.Path.match
+ if f.match(p):
+ already_mapped_in_client = True
+ break
+ if not already_mapped_in_client:
+ # Include this file, unless it is from a line that
+ # explicitly said to exclude it.
+ if not m.exclude:
+ client_path = p
+
+ # a match, even if rejected, always stops the search
+ break
+
+ return client_path
+
+class P4Sync(Command, P4UserMap):
+ delete_actions = ( "delete", "move/delete", "purge" )
+
+ def __init__(self):
+ Command.__init__(self)
+ P4UserMap.__init__(self)
+ self.options = [
+ optparse.make_option("--branch", dest="branch"),
+ optparse.make_option("--detect-branches", dest="detectBranches", action="store_true"),
+ optparse.make_option("--changesfile", dest="changesFile"),
+ optparse.make_option("--silent", dest="silent", action="store_true"),
+ optparse.make_option("--detect-labels", dest="detectLabels", action="store_true"),
+ optparse.make_option("--verbose", dest="verbose", action="store_true"),
+ optparse.make_option("--import-local", dest="importIntoRemotes", action="store_false",
+ help="Import into refs/heads/ , not refs/remotes"),
+ optparse.make_option("--max-changes", dest="maxChanges"),
+ optparse.make_option("--keep-path", dest="keepRepoPath", action='store_true',
+ help="Keep entire BRANCH/DIR/SUBDIR prefix during import"),
+ optparse.make_option("--use-client-spec", dest="useClientSpec", action='store_true',
+ help="Only sync files that are included in the Perforce Client Spec")
+ ]
+ self.description = """Imports from Perforce into a git repository.\n
+ example:
+ //depot/my/project/ -- to import the current head
+ //depot/my/project/@all -- to import everything
+ //depot/my/project/@1,6 -- to import only from revision 1 to 6
+
+ (a ... is not needed in the path p4 specification, it's added implicitly)"""
+
+ self.usage += " //depot/path[@revRange]"
+ self.silent = False
+ self.createdBranches = set()
+ self.committedChanges = set()
+ self.branch = ""
+ self.detectBranches = False
+ self.detectLabels = False
+ self.changesFile = ""
+ self.syncWithOrigin = True
+ self.verbose = False
+ self.importIntoRemotes = True
+ self.maxChanges = ""
+ self.isWindows = (platform.system() == "Windows")
+ self.keepRepoPath = False
+ self.depotPaths = None
+ self.p4BranchesInGit = []
+ self.cloneExclude = []
+ self.useClientSpec = False
+ self.useClientSpec_from_options = False
+ self.clientSpecDirs = None
+ self.tempBranches = []
+ self.tempBranchLocation = "git-p4-tmp"
+
+ if gitConfig("git-p4.syncFromOrigin") == "false":
+ self.syncWithOrigin = False
+
+ #
+ # P4 wildcards are not allowed in filenames. P4 complains
+ # if you simply add them, but you can force it with "-f", in
+ # which case it translates them into %xx encoding internally.
+ # Search for and fix just these four characters. Do % last so
+ # that fixing it does not inadvertently create new %-escapes.
+ #
+ def wildcard_decode(self, path):
+ # Cannot have * in a filename in windows; untested as to
+ # what p4 would do in such a case.
+ if not self.isWindows:
+ path = path.replace("%2A", "*")
+ path = path.replace("%23", "#") \
+ .replace("%40", "@") \
+ .replace("%25", "%")
+ return path
+
+ # Force a checkpoint in fast-import and wait for it to finish
+ def checkpoint(self):
+ self.gitStream.write("checkpoint\n\n")
+ self.gitStream.write("progress checkpoint\n\n")
+ out = self.gitOutput.readline()
+ if self.verbose:
+ print "checkpoint finished: " + out
+
+ def extractFilesFromCommit(self, commit):
+ self.cloneExclude = [re.sub(r"\.\.\.$", "", path)
+ for path in self.cloneExclude]
+ files = []
+ fnum = 0
+ while commit.has_key("depotFile%s" % fnum):
+ path = commit["depotFile%s" % fnum]
+
+ if [p for p in self.cloneExclude
+ if p4PathStartsWith(path, p)]:
+ found = False
+ else:
+ found = [p for p in self.depotPaths
+ if p4PathStartsWith(path, p)]
+ if not found:
+ fnum = fnum + 1
+ continue
+
+ file = {}
+ file["path"] = path
+ file["rev"] = commit["rev%s" % fnum]
+ file["action"] = commit["action%s" % fnum]
+ file["type"] = commit["type%s" % fnum]
+ files.append(file)
+ fnum = fnum + 1
+ return files
+
+ def stripRepoPath(self, path, prefixes):
+ if self.useClientSpec:
+ return self.clientSpecDirs.map_in_client(path)
+
+ if self.keepRepoPath:
+ prefixes = [re.sub("^(//[^/]+/).*", r'\1', prefixes[0])]
+
+ for p in prefixes:
+ if p4PathStartsWith(path, p):
+ path = path[len(p):]
+
+ return path
+
+ def splitFilesIntoBranches(self, commit):
+ branches = {}
+ fnum = 0
+ while commit.has_key("depotFile%s" % fnum):
+ path = commit["depotFile%s" % fnum]
+ found = [p for p in self.depotPaths
+ if p4PathStartsWith(path, p)]
+ if not found:
+ fnum = fnum + 1
+ continue
+
+ file = {}
+ file["path"] = path
+ file["rev"] = commit["rev%s" % fnum]
+ file["action"] = commit["action%s" % fnum]
+ file["type"] = commit["type%s" % fnum]
+ fnum = fnum + 1
+
+ relPath = self.stripRepoPath(path, self.depotPaths)
+
+ for branch in self.knownBranches.keys():
+
+ # add a trailing slash so that a commit into qt/4.2foo doesn't end up in qt/4.2
+ if relPath.startswith(branch + "/"):
+ if branch not in branches:
+ branches[branch] = []
+ branches[branch].append(file)
+ break
+
+ return branches
+
+ # output one file from the P4 stream
+ # - helper for streamP4Files
+
+ def streamOneP4File(self, file, contents):
+ relPath = self.stripRepoPath(file['depotFile'], self.branchPrefixes)
+ relPath = self.wildcard_decode(relPath)
+ if verbose:
+ sys.stderr.write("%s\n" % relPath)
+
+ (type_base, type_mods) = split_p4_type(file["type"])
+
+ git_mode = "100644"
+ if "x" in type_mods:
+ git_mode = "100755"
+ if type_base == "symlink":
+ git_mode = "120000"
+ # p4 print on a symlink contains "target\n"; remove the newline
+ data = ''.join(contents)
+ contents = [data[:-1]]
+
+ if type_base == "utf16":
+ # p4 delivers different text in the python output to -G
+ # than it does when using "print -o", or normal p4 client
+ # operations. utf16 is converted to ascii or utf8, perhaps.
+ # But ascii text saved as -t utf16 is completely mangled.
+ # Invoke print -o to get the real contents.
+ text = p4_read_pipe(['print', '-q', '-o', '-', file['depotFile']])
+ contents = [ text ]
+
+ if type_base == "apple":
+ # Apple filetype files will be streamed as a concatenation of
+ # its appledouble header and the contents. This is useless
+ # on both macs and non-macs. If using "print -q -o xx", it
+ # will create "xx" with the data, and "%xx" with the header.
+ # This is also not very useful.
+ #
+ # Ideally, someday, this script can learn how to generate
+ # appledouble files directly and import those to git, but
+ # non-mac machines can never find a use for apple filetype.
+ print "\nIgnoring apple filetype file %s" % file['depotFile']
+ return
+
+ # Perhaps windows wants unicode, utf16 newlines translated too;
+ # but this is not doing it.
+ if self.isWindows and type_base == "text":
+ mangled = []
+ for data in contents:
+ data = data.replace("\r\n", "\n")
+ mangled.append(data)
+ contents = mangled
+
+ # Note that we do not try to de-mangle keywords on utf16 files,
+ # even though in theory somebody may want that.
+ pattern = p4_keywords_regexp_for_type(type_base, type_mods)
+ if pattern:
+ regexp = re.compile(pattern, re.VERBOSE)
+ text = ''.join(contents)
+ text = regexp.sub(r'$\1$', text)
+ contents = [ text ]
+
+ self.gitStream.write("M %s inline %s\n" % (git_mode, relPath))
+
+ # total length...
+ length = 0
+ for d in contents:
+ length = length + len(d)
+
+ self.gitStream.write("data %d\n" % length)
+ for d in contents:
+ self.gitStream.write(d)
+ self.gitStream.write("\n")
+
+ def streamOneP4Deletion(self, file):
+ relPath = self.stripRepoPath(file['path'], self.branchPrefixes)
+ if verbose:
+ sys.stderr.write("delete %s\n" % relPath)
+ self.gitStream.write("D %s\n" % relPath)
+
+ # handle another chunk of streaming data
+ def streamP4FilesCb(self, marshalled):
+
+ if marshalled.has_key('depotFile') and self.stream_have_file_info:
+ # start of a new file - output the old one first
+ self.streamOneP4File(self.stream_file, self.stream_contents)
+ self.stream_file = {}
+ self.stream_contents = []
+ self.stream_have_file_info = False
+
+ # pick up the new file information... for the
+ # 'data' field we need to append to our array
+ for k in marshalled.keys():
+ if k == 'data':
+ self.stream_contents.append(marshalled['data'])
+ else:
+ self.stream_file[k] = marshalled[k]
+
+ self.stream_have_file_info = True
+
+ # Stream directly from "p4 files" into "git fast-import"
+ def streamP4Files(self, files):
+ filesForCommit = []
+ filesToRead = []
+ filesToDelete = []
+
+ for f in files:
+ # if using a client spec, only add the files that have
+ # a path in the client
+ if self.clientSpecDirs:
+ if self.clientSpecDirs.map_in_client(f['path']) == "":
+ continue
+
+ filesForCommit.append(f)
+ if f['action'] in self.delete_actions:
+ filesToDelete.append(f)
+ else:
+ filesToRead.append(f)
+
+ # deleted files...
+ for f in filesToDelete:
+ self.streamOneP4Deletion(f)
+
+ if len(filesToRead) > 0:
+ self.stream_file = {}
+ self.stream_contents = []
+ self.stream_have_file_info = False
+
+ # curry self argument
+ def streamP4FilesCbSelf(entry):
+ self.streamP4FilesCb(entry)
+
+ fileArgs = ['%s#%s' % (f['path'], f['rev']) for f in filesToRead]
+
+ p4CmdList(["-x", "-", "print"],
+ stdin=fileArgs,
+ cb=streamP4FilesCbSelf)
+
+ # do the last chunk
+ if self.stream_file.has_key('depotFile'):
+ self.streamOneP4File(self.stream_file, self.stream_contents)
+
+ def make_email(self, userid):
+ if userid in self.users:
+ return self.users[userid]
+ else:
+ return "%s <a@b>" % userid
+
+ def commit(self, details, files, branch, branchPrefixes, parent = ""):
+ epoch = details["time"]
+ author = details["user"]
+ self.branchPrefixes = branchPrefixes
+
+ if self.verbose:
+ print "commit into %s" % branch
+
+ # start with reading files; if that fails, we should not
+ # create a commit.
+ new_files = []
+ for f in files:
+ if [p for p in branchPrefixes if p4PathStartsWith(f['path'], p)]:
+ new_files.append (f)
+ else:
+ sys.stderr.write("Ignoring file outside of prefix: %s\n" % f['path'])
+
+ self.gitStream.write("commit %s\n" % branch)
+# gitStream.write("mark :%s\n" % details["change"])
+ self.committedChanges.add(int(details["change"]))
+ committer = ""
+ if author not in self.users:
+ self.getUserMapFromPerforceServer()
+ committer = "%s %s %s" % (self.make_email(author), epoch, self.tz)
+
+ self.gitStream.write("committer %s\n" % committer)
+
+ self.gitStream.write("data <<EOT\n")
+ self.gitStream.write(details["desc"])
+ self.gitStream.write("\n[git-p4: depot-paths = \"%s\": change = %s"
+ % (','.join (branchPrefixes), details["change"]))
+ if len(details['options']) > 0:
+ self.gitStream.write(": options = %s" % details['options'])
+ self.gitStream.write("]\nEOT\n\n")
+
+ if len(parent) > 0:
+ if self.verbose:
+ print "parent %s" % parent
+ self.gitStream.write("from %s\n" % parent)
+
+ self.streamP4Files(new_files)
+ self.gitStream.write("\n")
+
+ change = int(details["change"])
+
+ if self.labels.has_key(change):
+ label = self.labels[change]
+ labelDetails = label[0]
+ labelRevisions = label[1]
+ if self.verbose:
+ print "Change %s is labelled %s" % (change, labelDetails)
+
+ files = p4CmdList(["files"] + ["%s...@%s" % (p, change)
+ for p in branchPrefixes])
+
+ if len(files) == len(labelRevisions):
+
+ cleanedFiles = {}
+ for info in files:
+ if info["action"] in self.delete_actions:
+ continue
+ cleanedFiles[info["depotFile"]] = info["rev"]
+
+ if cleanedFiles == labelRevisions:
+ self.gitStream.write("tag tag_%s\n" % labelDetails["label"])
+ self.gitStream.write("from %s\n" % branch)
+
+ owner = labelDetails["Owner"]
+
+ # Try to use the owner of the p4 label, or failing that,
+ # the current p4 user id.
+ if owner:
+ email = self.make_email(owner)
+ else:
+ email = self.make_email(self.p4UserId())
+ tagger = "%s %s %s" % (email, epoch, self.tz)
+
+ self.gitStream.write("tagger %s\n" % tagger)
+
+ description = labelDetails["Description"]
+ self.gitStream.write("data %d\n" % len(description))
+ self.gitStream.write(description)
+ self.gitStream.write("\n")
+
+ else:
+ if not self.silent:
+ print ("Tag %s does not match with change %s: files do not match."
+ % (labelDetails["label"], change))
+
+ else:
+ if not self.silent:
+ print ("Tag %s does not match with change %s: file count is different."
+ % (labelDetails["label"], change))
+
+ def getLabels(self):
+ self.labels = {}
+
+ l = p4CmdList(["labels"] + ["%s..." % p for p in self.depotPaths])
+ if len(l) > 0 and not self.silent:
+ print "Finding files belonging to labels in %s" % `self.depotPaths`
+
+ for output in l:
+ label = output["label"]
+ revisions = {}
+ newestChange = 0
+ if self.verbose:
+ print "Querying files for label %s" % label
+ for file in p4CmdList(["files"] +
+ ["%s...@%s" % (p, label)
+ for p in self.depotPaths]):
+ revisions[file["depotFile"]] = file["rev"]
+ change = int(file["change"])
+ if change > newestChange:
+ newestChange = change
+
+ self.labels[newestChange] = [output, revisions]
+
+ if self.verbose:
+ print "Label changes: %s" % self.labels.keys()
+
+ def guessProjectName(self):
+ for p in self.depotPaths:
+ if p.endswith("/"):
+ p = p[:-1]
+ p = p[p.strip().rfind("/") + 1:]
+ if not p.endswith("/"):
+ p += "/"
+ return p
+
+ def getBranchMapping(self):
+ lostAndFoundBranches = set()
+
+ user = gitConfig("git-p4.branchUser")
+ if len(user) > 0:
+ command = "branches -u %s" % user
+ else:
+ command = "branches"
+
+ for info in p4CmdList(command):
+ details = p4Cmd(["branch", "-o", info["branch"]])
+ viewIdx = 0
+ while details.has_key("View%s" % viewIdx):
+ paths = details["View%s" % viewIdx].split(" ")
+ viewIdx = viewIdx + 1
+ # require standard //depot/foo/... //depot/bar/... mapping
+ if len(paths) != 2 or not paths[0].endswith("/...") or not paths[1].endswith("/..."):
+ continue
+ source = paths[0]
+ destination = paths[1]
+ ## HACK
+ if p4PathStartsWith(source, self.depotPaths[0]) and p4PathStartsWith(destination, self.depotPaths[0]):
+ source = source[len(self.depotPaths[0]):-4]
+ destination = destination[len(self.depotPaths[0]):-4]
+
+ if destination in self.knownBranches:
+ if not self.silent:
+ print "p4 branch %s defines a mapping from %s to %s" % (info["branch"], source, destination)
+ print "but there exists another mapping from %s to %s already!" % (self.knownBranches[destination], destination)
+ continue
+
+ self.knownBranches[destination] = source
+
+ lostAndFoundBranches.discard(destination)
+
+ if source not in self.knownBranches:
+ lostAndFoundBranches.add(source)
+
+ # Perforce does not strictly require branches to be defined, so we also
+ # check git config for a branch list.
+ #
+ # Example of branch definition in git config file:
+ # [git-p4]
+ # branchList=main:branchA
+ # branchList=main:branchB
+ # branchList=branchA:branchC
+ configBranches = gitConfigList("git-p4.branchList")
+ for branch in configBranches:
+ if branch:
+ (source, destination) = branch.split(":")
+ self.knownBranches[destination] = source
+
+ lostAndFoundBranches.discard(destination)
+
+ if source not in self.knownBranches:
+ lostAndFoundBranches.add(source)
+
+
+ for branch in lostAndFoundBranches:
+ self.knownBranches[branch] = branch
+
+ def getBranchMappingFromGitBranches(self):
+ branches = p4BranchesInGit(self.importIntoRemotes)
+ for branch in branches.keys():
+ if branch == "master":
+ branch = "main"
+ else:
+ branch = branch[len(self.projectName):]
+ self.knownBranches[branch] = branch
+
+ def listExistingP4GitBranches(self):
+ # branches holds mapping from name to commit
+ branches = p4BranchesInGit(self.importIntoRemotes)
+ self.p4BranchesInGit = branches.keys()
+ for branch in branches.keys():
+ self.initialParents[self.refPrefix + branch] = branches[branch]
+
+ def updateOptionDict(self, d):
+ option_keys = {}
+ if self.keepRepoPath:
+ option_keys['keepRepoPath'] = 1
+
+ d["options"] = ' '.join(sorted(option_keys.keys()))
+
+ def readOptions(self, d):
+ self.keepRepoPath = (d.has_key('options')
+ and ('keepRepoPath' in d['options']))
+
+ def gitRefForBranch(self, branch):
+ if branch == "main":
+ return self.refPrefix + "master"
+
+ if len(branch) <= 0:
+ return branch
+
+ return self.refPrefix + self.projectName + branch
+
+ def gitCommitByP4Change(self, ref, change):
+ if self.verbose:
+ print "looking in ref " + ref + " for change %s using bisect..." % change
+
+ earliestCommit = ""
+ latestCommit = parseRevision(ref)
+
+ while True:
+ if self.verbose:
+ print "trying: earliest %s latest %s" % (earliestCommit, latestCommit)
+ next = read_pipe("git rev-list --bisect %s %s" % (latestCommit, earliestCommit)).strip()
+ if len(next) == 0:
+ if self.verbose:
+ print "argh"
+ return ""
+ log = extractLogMessageFromGitCommit(next)
+ settings = extractSettingsGitLog(log)
+ currentChange = int(settings['change'])
+ if self.verbose:
+ print "current change %s" % currentChange
+
+ if currentChange == change:
+ if self.verbose:
+ print "found %s" % next
+ return next
+
+ if currentChange < change:
+ earliestCommit = "^%s" % next
+ else:
+ latestCommit = "%s" % next
+
+ return ""
+
+ def importNewBranch(self, branch, maxChange):
+ # make fast-import flush all changes to disk and update the refs using the checkpoint
+ # command so that we can try to find the branch parent in the git history
+ self.gitStream.write("checkpoint\n\n");
+ self.gitStream.flush();
+ branchPrefix = self.depotPaths[0] + branch + "/"
+ range = "@1,%s" % maxChange
+ #print "prefix" + branchPrefix
+ changes = p4ChangesForPaths([branchPrefix], range)
+ if len(changes) <= 0:
+ return False
+ firstChange = changes[0]
+ #print "first change in branch: %s" % firstChange
+ sourceBranch = self.knownBranches[branch]
+ sourceDepotPath = self.depotPaths[0] + sourceBranch
+ sourceRef = self.gitRefForBranch(sourceBranch)
+ #print "source " + sourceBranch
+
+ branchParentChange = int(p4Cmd(["changes", "-m", "1", "%s...@1,%s" % (sourceDepotPath, firstChange)])["change"])
+ #print "branch parent: %s" % branchParentChange
+ gitParent = self.gitCommitByP4Change(sourceRef, branchParentChange)
+ if len(gitParent) > 0:
+ self.initialParents[self.gitRefForBranch(branch)] = gitParent
+ #print "parent git commit: %s" % gitParent
+
+ self.importChanges(changes)
+ return True
+
+ def searchParent(self, parent, branch, target):
+ parentFound = False
+ for blob in read_pipe_lines(["git", "rev-list", "--reverse", "--no-merges", parent]):
+ blob = blob.strip()
+ if len(read_pipe(["git", "diff-tree", blob, target])) == 0:
+ parentFound = True
+ if self.verbose:
+ print "Found parent of %s in commit %s" % (branch, blob)
+ break
+ if parentFound:
+ return blob
+ else:
+ return None
+
+ def importChanges(self, changes):
+ cnt = 1
+ for change in changes:
+ description = p4Cmd(["describe", str(change)])
+ self.updateOptionDict(description)
+
+ if not self.silent:
+ sys.stdout.write("\rImporting revision %s (%s%%)" % (change, cnt * 100 / len(changes)))
+ sys.stdout.flush()
+ cnt = cnt + 1
+
+ try:
+ if self.detectBranches:
+ branches = self.splitFilesIntoBranches(description)
+ for branch in branches.keys():
+ ## HACK --hwn
+ branchPrefix = self.depotPaths[0] + branch + "/"
+
+ parent = ""
+
+ filesForCommit = branches[branch]
+
+ if self.verbose:
+ print "branch is %s" % branch
+
+ self.updatedBranches.add(branch)
+
+ if branch not in self.createdBranches:
+ self.createdBranches.add(branch)
+ parent = self.knownBranches[branch]
+ if parent == branch:
+ parent = ""
+ else:
+ fullBranch = self.projectName + branch
+ if fullBranch not in self.p4BranchesInGit:
+ if not self.silent:
+ print("\n Importing new branch %s" % fullBranch);
+ if self.importNewBranch(branch, change - 1):
+ parent = ""
+ self.p4BranchesInGit.append(fullBranch)
+ if not self.silent:
+ print("\n Resuming with change %s" % change);
+
+ if self.verbose:
+ print "parent determined through known branches: %s" % parent
+
+ branch = self.gitRefForBranch(branch)
+ parent = self.gitRefForBranch(parent)
+
+ if self.verbose:
+ print "looking for initial parent for %s; current parent is %s" % (branch, parent)
+
+ if len(parent) == 0 and branch in self.initialParents:
+ parent = self.initialParents[branch]
+ del self.initialParents[branch]
+
+ blob = None
+ if len(parent) > 0:
+ tempBranch = os.path.join(self.tempBranchLocation, "%d" % (change))
+ if self.verbose:
+ print "Creating temporary branch: " + tempBranch
+ self.commit(description, filesForCommit, tempBranch, [branchPrefix])
+ self.tempBranches.append(tempBranch)
+ self.checkpoint()
+ blob = self.searchParent(parent, branch, tempBranch)
+ if blob:
+ self.commit(description, filesForCommit, branch, [branchPrefix], blob)
+ else:
+ if self.verbose:
+ print "Parent of %s not found. Committing into head of %s" % (branch, parent)
+ self.commit(description, filesForCommit, branch, [branchPrefix], parent)
+ else:
+ files = self.extractFilesFromCommit(description)
+ self.commit(description, files, self.branch, self.depotPaths,
+ self.initialParent)
+ self.initialParent = ""
+ except IOError:
+ print self.gitError.read()
+ sys.exit(1)
+
+ def importHeadRevision(self, revision):
+ print "Doing initial import of %s from revision %s into %s" % (' '.join(self.depotPaths), revision, self.branch)
+
+ details = {}
+ details["user"] = "git perforce import user"
+ details["desc"] = ("Initial import of %s from the state at revision %s\n"
+ % (' '.join(self.depotPaths), revision))
+ details["change"] = revision
+ newestRevision = 0
+
+ fileCnt = 0
+ fileArgs = ["%s...%s" % (p,revision) for p in self.depotPaths]
+
+ for info in p4CmdList(["files"] + fileArgs):
+
+ if 'code' in info and info['code'] == 'error':
+ sys.stderr.write("p4 returned an error: %s\n"
+ % info['data'])
+ if info['data'].find("must refer to client") >= 0:
+ sys.stderr.write("This particular p4 error is misleading.\n")
+ sys.stderr.write("Perhaps the depot path was misspelled.\n");
+ sys.stderr.write("Depot path: %s\n" % " ".join(self.depotPaths))
+ sys.exit(1)
+ if 'p4ExitCode' in info:
+ sys.stderr.write("p4 exitcode: %s\n" % info['p4ExitCode'])
+ sys.exit(1)
+
+
+ change = int(info["change"])
+ if change > newestRevision:
+ newestRevision = change
+
+ if info["action"] in self.delete_actions:
+ # don't increase the file cnt, otherwise details["depotFile123"] will have gaps!
+ #fileCnt = fileCnt + 1
+ continue
+
+ for prop in ["depotFile", "rev", "action", "type" ]:
+ details["%s%s" % (prop, fileCnt)] = info[prop]
+
+ fileCnt = fileCnt + 1
+
+ details["change"] = newestRevision
+
+ # Use time from top-most change so that all git p4 clones of
+ # the same p4 repo have the same commit SHA1s.
+ res = p4CmdList("describe -s %d" % newestRevision)
+ newestTime = None
+ for r in res:
+ if r.has_key('time'):
+ newestTime = int(r['time'])
+ if newestTime is None:
+ die("\"describe -s\" on newest change %d did not give a time")
+ details["time"] = newestTime
+
+ self.updateOptionDict(details)
+ try:
+ self.commit(details, self.extractFilesFromCommit(details), self.branch, self.depotPaths)
+ except IOError:
+ print "IO error with git fast-import. Is your git version recent enough?"
+ print self.gitError.read()
+
+
+ def run(self, args):
+ self.depotPaths = []
+ self.changeRange = ""
+ self.initialParent = ""
+ self.previousDepotPaths = []
+
+ # map from branch depot path to parent branch
+ self.knownBranches = {}
+ self.initialParents = {}
+ self.hasOrigin = originP4BranchesExist()
+ if not self.syncWithOrigin:
+ self.hasOrigin = False
+
+ if self.importIntoRemotes:
+ self.refPrefix = "refs/remotes/p4/"
+ else:
+ self.refPrefix = "refs/heads/p4/"
+
+ if self.syncWithOrigin and self.hasOrigin:
+ if not self.silent:
+ print "Syncing with origin first by calling git fetch origin"
+ system("git fetch origin")
+
+ if len(self.branch) == 0:
+ self.branch = self.refPrefix + "master"
+ if gitBranchExists("refs/heads/p4") and self.importIntoRemotes:
+ system("git update-ref %s refs/heads/p4" % self.branch)
+ system("git branch -D p4");
+ # create it /after/ importing, when master exists
+ if not gitBranchExists(self.refPrefix + "HEAD") and self.importIntoRemotes and gitBranchExists(self.branch):
+ system("git symbolic-ref %sHEAD %s" % (self.refPrefix, self.branch))
+
+ # accept either the command-line option, or the configuration variable
+ if self.useClientSpec:
+ # will use this after clone to set the variable
+ self.useClientSpec_from_options = True
+ else:
+ if gitConfig("git-p4.useclientspec", "--bool") == "true":
+ self.useClientSpec = True
+ if self.useClientSpec:
+ self.clientSpecDirs = getClientSpec()
+
+ # TODO: should always look at previous commits,
+ # merge with previous imports, if possible.
+ if args == []:
+ if self.hasOrigin:
+ createOrUpdateBranchesFromOrigin(self.refPrefix, self.silent)
+ self.listExistingP4GitBranches()
+
+ if len(self.p4BranchesInGit) > 1:
+ if not self.silent:
+ print "Importing from/into multiple branches"
+ self.detectBranches = True
+
+ if self.verbose:
+ print "branches: %s" % self.p4BranchesInGit
+
+ p4Change = 0
+ for branch in self.p4BranchesInGit:
+ logMsg = extractLogMessageFromGitCommit(self.refPrefix + branch)
+
+ settings = extractSettingsGitLog(logMsg)
+
+ self.readOptions(settings)
+ if (settings.has_key('depot-paths')
+ and settings.has_key ('change')):
+ change = int(settings['change']) + 1
+ p4Change = max(p4Change, change)
+
+ depotPaths = sorted(settings['depot-paths'])
+ if self.previousDepotPaths == []:
+ self.previousDepotPaths = depotPaths
+ else:
+ paths = []
+ for (prev, cur) in zip(self.previousDepotPaths, depotPaths):
+ prev_list = prev.split("/")
+ cur_list = cur.split("/")
+ for i in range(0, min(len(cur_list), len(prev_list))):
+ if cur_list[i] <> prev_list[i]:
+ i = i - 1
+ break
+
+ paths.append ("/".join(cur_list[:i + 1]))
+
+ self.previousDepotPaths = paths
+
+ if p4Change > 0:
+ self.depotPaths = sorted(self.previousDepotPaths)
+ self.changeRange = "@%s,#head" % p4Change
+ if not self.detectBranches:
+ self.initialParent = parseRevision(self.branch)
+ if not self.silent and not self.detectBranches:
+ print "Performing incremental import into %s git branch" % self.branch
+
+ if not self.branch.startswith("refs/"):
+ self.branch = "refs/heads/" + self.branch
+
+ if len(args) == 0 and self.depotPaths:
+ if not self.silent:
+ print "Depot paths: %s" % ' '.join(self.depotPaths)
+ else:
+ if self.depotPaths and self.depotPaths != args:
+ print ("previous import used depot path %s and now %s was specified. "
+ "This doesn't work!" % (' '.join (self.depotPaths),
+ ' '.join (args)))
+ sys.exit(1)
+
+ self.depotPaths = sorted(args)
+
+ revision = ""
+ self.users = {}
+
+ # Make sure no revision specifiers are used when --changesfile
+ # is specified.
+ bad_changesfile = False
+ if len(self.changesFile) > 0:
+ for p in self.depotPaths:
+ if p.find("@") >= 0 or p.find("#") >= 0:
+ bad_changesfile = True
+ break
+ if bad_changesfile:
+ die("Option --changesfile is incompatible with revision specifiers")
+
+ newPaths = []
+ for p in self.depotPaths:
+ if p.find("@") != -1:
+ atIdx = p.index("@")
+ self.changeRange = p[atIdx:]
+ if self.changeRange == "@all":
+ self.changeRange = ""
+ elif ',' not in self.changeRange:
+ revision = self.changeRange
+ self.changeRange = ""
+ p = p[:atIdx]
+ elif p.find("#") != -1:
+ hashIdx = p.index("#")
+ revision = p[hashIdx:]
+ p = p[:hashIdx]
+ elif self.previousDepotPaths == []:
+ # pay attention to changesfile, if given, else import
+ # the entire p4 tree at the head revision
+ if len(self.changesFile) == 0:
+ revision = "#head"
+
+ p = re.sub ("\.\.\.$", "", p)
+ if not p.endswith("/"):
+ p += "/"
+
+ newPaths.append(p)
+
+ self.depotPaths = newPaths
+
+
+ self.loadUserMapFromCache()
+ self.labels = {}
+ if self.detectLabels:
+ self.getLabels();
+
+ if self.detectBranches:
+ ## FIXME - what's a P4 projectName ?
+ self.projectName = self.guessProjectName()
+
+ if self.hasOrigin:
+ self.getBranchMappingFromGitBranches()
+ else:
+ self.getBranchMapping()
+ if self.verbose:
+ print "p4-git branches: %s" % self.p4BranchesInGit
+ print "initial parents: %s" % self.initialParents
+ for b in self.p4BranchesInGit:
+ if b != "master":
+
+ ## FIXME
+ b = b[len(self.projectName):]
+ self.createdBranches.add(b)
+
+ self.tz = "%+03d%02d" % (- time.timezone / 3600, ((- time.timezone % 3600) / 60))
+
+ importProcess = subprocess.Popen(["git", "fast-import"],
+ stdin=subprocess.PIPE, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE);
+ self.gitOutput = importProcess.stdout
+ self.gitStream = importProcess.stdin
+ self.gitError = importProcess.stderr
+
+ if revision:
+ self.importHeadRevision(revision)
+ else:
+ changes = []
+
+ if len(self.changesFile) > 0:
+ output = open(self.changesFile).readlines()
+ changeSet = set()
+ for line in output:
+ changeSet.add(int(line))
+
+ for change in changeSet:
+ changes.append(change)
+
+ changes.sort()
+ else:
+ # catch "git p4 sync" with no new branches, in a repo that
+ # does not have any existing p4 branches
+ if len(args) == 0 and not self.p4BranchesInGit:
+ die("No remote p4 branches. Perhaps you never did \"git p4 clone\" in here.");
+ if self.verbose:
+ print "Getting p4 changes for %s...%s" % (', '.join(self.depotPaths),
+ self.changeRange)
+ changes = p4ChangesForPaths(self.depotPaths, self.changeRange)
+
+ if len(self.maxChanges) > 0:
+ changes = changes[:min(int(self.maxChanges), len(changes))]
+
+ if len(changes) == 0:
+ if not self.silent:
+ print "No changes to import!"
+ return True
+
+ if not self.silent and not self.detectBranches:
+ print "Import destination: %s" % self.branch
+
+ self.updatedBranches = set()
+
+ self.importChanges(changes)
+
+ if not self.silent:
+ print ""
+ if len(self.updatedBranches) > 0:
+ sys.stdout.write("Updated branches: ")
+ for b in self.updatedBranches:
+ sys.stdout.write("%s " % b)
+ sys.stdout.write("\n")
+
+ self.gitStream.close()
+ if importProcess.wait() != 0:
+ die("fast-import failed: %s" % self.gitError.read())
+ self.gitOutput.close()
+ self.gitError.close()
+
+ # Cleanup temporary branches created during import
+ if self.tempBranches != []:
+ for branch in self.tempBranches:
+ read_pipe("git update-ref -d %s" % branch)
+ os.rmdir(os.path.join(os.environ.get("GIT_DIR", ".git"), self.tempBranchLocation))
+
+ return True
+
+class P4Rebase(Command):
+ def __init__(self):
+ Command.__init__(self)
+ self.options = [ ]
+ self.description = ("Fetches the latest revision from perforce and "
+ + "rebases the current work (branch) against it")
+ self.verbose = False
+
+ def run(self, args):
+ sync = P4Sync()
+ sync.run([])
+
+ return self.rebase()
+
+ def rebase(self):
+ if os.system("git update-index --refresh") != 0:
+ die("Some files in your working directory are modified and different than what is in your index. You can use git update-index <filename> to bring the index up-to-date or stash away all your changes with git stash.");
+ if len(read_pipe("git diff-index HEAD --")) > 0:
+ die("You have uncommited changes. Please commit them before rebasing or stash them away with git stash.");
+
+ [upstream, settings] = findUpstreamBranchPoint()
+ if len(upstream) == 0:
+ die("Cannot find upstream branchpoint for rebase")
+
+ # the branchpoint may be p4/foo~3, so strip off the parent
+ upstream = re.sub("~[0-9]+$", "", upstream)
+
+ print "Rebasing the current branch onto %s" % upstream
+ oldHead = read_pipe("git rev-parse HEAD").strip()
+ system("git rebase %s" % upstream)
+ system("git diff-tree --stat --summary -M %s HEAD" % oldHead)
+ return True
+
+class P4Clone(P4Sync):
+ def __init__(self):
+ P4Sync.__init__(self)
+ self.description = "Creates a new git repository and imports from Perforce into it"
+ self.usage = "usage: %prog [options] //depot/path[@revRange]"
+ self.options += [
+ optparse.make_option("--destination", dest="cloneDestination",
+ action='store', default=None,
+ help="where to leave result of the clone"),
+ optparse.make_option("-/", dest="cloneExclude",
+ action="append", type="string",
+ help="exclude depot path"),
+ optparse.make_option("--bare", dest="cloneBare",
+ action="store_true", default=False),
+ ]
+ self.cloneDestination = None
+ self.needsGit = False
+ self.cloneBare = False
+
+ # This is required for the "append" cloneExclude action
+ def ensure_value(self, attr, value):
+ if not hasattr(self, attr) or getattr(self, attr) is None:
+ setattr(self, attr, value)
+ return getattr(self, attr)
+
+ def defaultDestination(self, args):
+ ## TODO: use common prefix of args?
+ depotPath = args[0]
+ depotDir = re.sub("(@[^@]*)$", "", depotPath)
+ depotDir = re.sub("(#[^#]*)$", "", depotDir)
+ depotDir = re.sub(r"\.\.\.$", "", depotDir)
+ depotDir = re.sub(r"/$", "", depotDir)
+ return os.path.split(depotDir)[1]
+
+ def run(self, args):
+ if len(args) < 1:
+ return False
+
+ if self.keepRepoPath and not self.cloneDestination:
+ sys.stderr.write("Must specify destination for --keep-path\n")
+ sys.exit(1)
+
+ depotPaths = args
+
+ if not self.cloneDestination and len(depotPaths) > 1:
+ self.cloneDestination = depotPaths[-1]
+ depotPaths = depotPaths[:-1]
+
+ self.cloneExclude = ["/"+p for p in self.cloneExclude]
+ for p in depotPaths:
+ if not p.startswith("//"):
+ return False
+
+ if not self.cloneDestination:
+ self.cloneDestination = self.defaultDestination(args)
+
+ print "Importing from %s into %s" % (', '.join(depotPaths), self.cloneDestination)
+
+ if not os.path.exists(self.cloneDestination):
+ os.makedirs(self.cloneDestination)
+ chdir(self.cloneDestination)
+
+ init_cmd = [ "git", "init" ]
+ if self.cloneBare:
+ init_cmd.append("--bare")
+ subprocess.check_call(init_cmd)
+
+ if not P4Sync.run(self, depotPaths):
+ return False
+ if self.branch != "master":
+ if self.importIntoRemotes:
+ masterbranch = "refs/remotes/p4/master"
+ else:
+ masterbranch = "refs/heads/p4/master"
+ if gitBranchExists(masterbranch):
+ system("git branch master %s" % masterbranch)
+ if not self.cloneBare:
+ system("git checkout -f")
+ else:
+ print "Could not detect main branch. No checkout/master branch created."
+
+ # auto-set this variable if invoked with --use-client-spec
+ if self.useClientSpec_from_options:
+ system("git config --bool git-p4.useclientspec true")
+
+ return True
+
+class P4Branches(Command):
+ def __init__(self):
+ Command.__init__(self)
+ self.options = [ ]
+ self.description = ("Shows the git branches that hold imports and their "
+ + "corresponding perforce depot paths")
+ self.verbose = False
+
+ def run(self, args):
+ if originP4BranchesExist():
+ createOrUpdateBranchesFromOrigin()
+
+ cmdline = "git rev-parse --symbolic "
+ cmdline += " --remotes"
+
+ for line in read_pipe_lines(cmdline):
+ line = line.strip()
+
+ if not line.startswith('p4/') or line == "p4/HEAD":
+ continue
+ branch = line
+
+ log = extractLogMessageFromGitCommit("refs/remotes/%s" % branch)
+ settings = extractSettingsGitLog(log)
+
+ print "%s <= %s (%s)" % (branch, ",".join(settings["depot-paths"]), settings["change"])
+ return True
+
+class HelpFormatter(optparse.IndentedHelpFormatter):
+ def __init__(self):
+ optparse.IndentedHelpFormatter.__init__(self)
+
+ def format_description(self, description):
+ if description:
+ return description + "\n"
+ else:
+ return ""
+
+def printUsage(commands):
+ print "usage: %s <command> [options]" % sys.argv[0]
+ print ""
+ print "valid commands: %s" % ", ".join(commands)
+ print ""
+ print "Try %s <command> --help for command specific help." % sys.argv[0]
+ print ""
+
+commands = {
+ "debug" : P4Debug,
+ "submit" : P4Submit,
+ "commit" : P4Submit,
+ "sync" : P4Sync,
+ "rebase" : P4Rebase,
+ "clone" : P4Clone,
+ "rollback" : P4RollBack,
+ "branches" : P4Branches
+}
+
+
+def main():
+ if len(sys.argv[1:]) == 0:
+ printUsage(commands.keys())
+ sys.exit(2)
+
+ cmd = ""
+ cmdName = sys.argv[1]
+ try:
+ klass = commands[cmdName]
+ cmd = klass()
+ except KeyError:
+ print "unknown command %s" % cmdName
+ print ""
+ printUsage(commands.keys())
+ sys.exit(2)
+
+ options = cmd.options
+ cmd.gitdir = os.environ.get("GIT_DIR", None)
+
+ args = sys.argv[2:]
+
+ if len(options) > 0:
+ if cmd.needsGit:
+ options.append(optparse.make_option("--git-dir", dest="gitdir"))
+
+ parser = optparse.OptionParser(cmd.usage.replace("%prog", "%prog " + cmdName),
+ options,
+ description = cmd.description,
+ formatter = HelpFormatter())
+
+ (cmd, args) = parser.parse_args(sys.argv[2:], cmd);
+ global verbose
+ verbose = cmd.verbose
+ if cmd.needsGit:
+ if cmd.gitdir == None:
+ cmd.gitdir = os.path.abspath(".git")
+ if not isValidGitDir(cmd.gitdir):
+ cmd.gitdir = read_pipe("git rev-parse --git-dir").strip()
+ if os.path.exists(cmd.gitdir):
+ cdup = read_pipe("git rev-parse --show-cdup").strip()
+ if len(cdup) > 0:
+ chdir(cdup);
+
+ if not isValidGitDir(cmd.gitdir):
+ if isValidGitDir(cmd.gitdir + "/.git"):
+ cmd.gitdir += "/.git"
+ else:
+ die("fatal: cannot locate git repository at %s" % cmd.gitdir)
+
+ os.environ["GIT_DIR"] = cmd.gitdir
+
+ if not cmd.run(args):
+ parser.print_help()
+ sys.exit(2)
+
+
+if __name__ == '__main__':
+ main()
case "$action" in
continue)
# do we have anything to commit?
- if git diff-index --cached --quiet --ignore-submodules HEAD --
+ if git diff-index --cached --quiet HEAD --
then
: Nothing to commit -- skip this
else
# f, fixup = like "squash", but discard this commit's log message
# x, exec = run command (the rest of the line) using shell
#
+# These lines can be re-ordered; they are executed from top to bottom.
+#
# If you remove a line here THAT COMMIT WILL BE LOST.
# However, if you remove everything, the rebase will be aborted.
#
find () {
/usr/bin/find "$@"
}
+ # git sees Windows-style pwd
+ pwd () {
+ builtin pwd -W
+ }
is_absolute_path () {
case "$1" in
[/\\]* | [A-Za-z]:*)
module_name()
{
# Do we have "submodule.<something>.path = $1" defined in .gitmodules file?
+ sm_path="$1"
re=$(printf '%s\n' "$1" | sed -e 's/[].[^$\\*]/\\&/g')
name=$( git config -f .gitmodules --get-regexp '^submodule\..*\.path$' |
sed -n -e 's|^submodule\.\(.*\)\.path '"$re"'$|\1|p' )
test -z "$name" &&
- die "$(eval_gettext "No submodule mapping found in .gitmodules for path '\$path'")"
+ die "$(eval_gettext "No submodule mapping found in .gitmodules for path '\$sm_path'")"
echo "$name"
}
#
module_clone()
{
- path=$1
+ sm_path=$1
url=$2
reference="$3"
quiet=
gitdir=
gitdir_base=
- name=$(module_name "$path" 2>/dev/null)
- test -n "$name" || name="$path"
+ name=$(module_name "$sm_path" 2>/dev/null)
+ test -n "$name" || name="$sm_path"
base_name=$(dirname "$name")
gitdir=$(git rev-parse --git-dir)
if test -d "$gitdir"
then
- mkdir -p "$path"
+ mkdir -p "$sm_path"
rm -f "$gitdir/index"
else
mkdir -p "$gitdir_base"
git clone $quiet -n ${reference:+"$reference"} \
- --separate-git-dir "$gitdir" "$url" "$path" ||
- die "$(eval_gettext "Clone of '\$url' into submodule path '\$path' failed")"
+ --separate-git-dir "$gitdir" "$url" "$sm_path" ||
+ die "$(eval_gettext "Clone of '\$url' into submodule path '\$sm_path' failed")"
fi
a=$(cd "$gitdir" && pwd)/
- b=$(cd "$path" && pwd)/
+ b=$(cd "$sm_path" && pwd)/
# normalize Windows-style absolute paths to POSIX-style absolute paths
case $a in [a-zA-Z]:/*) a=/${a%%:*}${a#*:} ;; esac
case $b in [a-zA-Z]:/*) b=/${b%%:*}${b#*:} ;; esac
a=${a%/}
b=${b%/}
- rel=$(echo $b | sed -e 's|[^/]*|..|g')
- echo "gitdir: $rel/$a" >"$path/.git"
+ # Turn each leading "*/" component into "../"
+ rel=$(echo $b | sed -e 's|[^/][^/]*|..|g')
+ echo "gitdir: $rel/$a" >"$sm_path/.git"
- rel=$(echo $a | sed -e 's|[^/]*|..|g')
- (clear_local_git_env; cd "$path" && GIT_WORK_TREE=. git config core.worktree "$rel/$b")
+ rel=$(echo $a | sed -e 's|[^/][^/]*|..|g')
+ (clear_local_git_env; cd "$sm_path" && GIT_WORK_TREE=. git config core.worktree "$rel/$b")
}
#
done
repo=$1
- path=$2
+ sm_path=$2
- if test -z "$path"; then
- path=$(echo "$repo" |
+ if test -z "$sm_path"; then
+ sm_path=$(echo "$repo" |
sed -e 's|/$||' -e 's|:*/*\.git$||' -e 's|.*[/:]||g')
fi
- if test -z "$repo" -o -z "$path"; then
+ if test -z "$repo" -o -z "$sm_path"; then
usage
fi
# normalize path:
# multiple //; leading ./; /./; /../; trailing /
- path=$(printf '%s/\n' "$path" |
+ sm_path=$(printf '%s/\n' "$sm_path" |
sed -e '
s|//*|/|g
s|^\(\./\)*||
tstart
s|/*$||
')
- git ls-files --error-unmatch "$path" > /dev/null 2>&1 &&
- die "$(eval_gettext "'\$path' already exists in the index")"
+ git ls-files --error-unmatch "$sm_path" > /dev/null 2>&1 &&
+ die "$(eval_gettext "'\$sm_path' already exists in the index")"
- if test -z "$force" && ! git add --dry-run --ignore-missing "$path" > /dev/null 2>&1
+ if test -z "$force" && ! git add --dry-run --ignore-missing "$sm_path" > /dev/null 2>&1
then
eval_gettextln "The following path is ignored by one of your .gitignore files:
-\$path
+\$sm_path
Use -f if you really want to add it." >&2
exit 1
fi
# perhaps the path exists and is already a git repo, else clone it
- if test -e "$path"
+ if test -e "$sm_path"
then
- if test -d "$path"/.git -o -f "$path"/.git
+ if test -d "$sm_path"/.git -o -f "$sm_path"/.git
then
- eval_gettextln "Adding existing repo at '\$path' to the index"
+ eval_gettextln "Adding existing repo at '\$sm_path' to the index"
else
- die "$(eval_gettext "'\$path' already exists and is not a valid git repo")"
+ die "$(eval_gettext "'\$sm_path' already exists and is not a valid git repo")"
fi
else
- module_clone "$path" "$realrepo" "$reference" || exit
+ module_clone "$sm_path" "$realrepo" "$reference" || exit
(
clear_local_git_env
- cd "$path" &&
+ cd "$sm_path" &&
# ash fails to wordsplit ${branch:+-b "$branch"...}
case "$branch" in
'') git checkout -f -q ;;
?*) git checkout -f -q -B "$branch" "origin/$branch" ;;
esac
- ) || die "$(eval_gettext "Unable to checkout submodule '\$path'")"
+ ) || die "$(eval_gettext "Unable to checkout submodule '\$sm_path'")"
fi
- git config submodule."$path".url "$realrepo"
+ git config submodule."$sm_path".url "$realrepo"
- git add $force "$path" ||
- die "$(eval_gettext "Failed to add submodule '\$path'")"
+ git add $force "$sm_path" ||
+ die "$(eval_gettext "Failed to add submodule '\$sm_path'")"
- git config -f .gitmodules submodule."$path".path "$path" &&
- git config -f .gitmodules submodule."$path".url "$repo" &&
+ git config -f .gitmodules submodule."$sm_path".path "$sm_path" &&
+ git config -f .gitmodules submodule."$sm_path".url "$repo" &&
git add --force .gitmodules ||
- die "$(eval_gettext "Failed to register submodule '\$path'")"
+ die "$(eval_gettext "Failed to register submodule '\$sm_path'")"
}
#
exec 3<&0
module_list |
- while read mode sha1 stage path
+ while read mode sha1 stage sm_path
do
- if test -e "$path"/.git
+ if test -e "$sm_path"/.git
then
- say "$(eval_gettext "Entering '\$prefix\$path'")"
- name=$(module_name "$path")
+ say "$(eval_gettext "Entering '\$prefix\$sm_path'")"
+ name=$(module_name "$sm_path")
(
- prefix="$prefix$path/"
+ prefix="$prefix$sm_path/"
clear_local_git_env
- cd "$path" &&
+ # we make $path available to scripts ...
+ path=$sm_path
+ cd "$sm_path" &&
eval "$@" &&
if test -n "$recursive"
then
cmd_foreach "--recursive" "$@"
fi
) <&3 3<&- ||
- die "$(eval_gettext "Stopping at '\$path'; script returned non-zero status.")"
+ die "$(eval_gettext "Stopping at '\$sm_path'; script returned non-zero status.")"
fi
done
}
done
module_list "$@" |
- while read mode sha1 stage path
+ while read mode sha1 stage sm_path
do
# Skip already registered paths
- name=$(module_name "$path") || exit
+ name=$(module_name "$sm_path") || exit
if test -z "$(git config "submodule.$name.url")"
then
url=$(git config -f .gitmodules submodule."$name".url)
test -z "$url" &&
- die "$(eval_gettext "No url found for submodule path '\$path' in .gitmodules")"
+ die "$(eval_gettext "No url found for submodule path '\$sm_path' in .gitmodules")"
# Possibly a url relative to parent
case "$url" in
;;
esac
git config submodule."$name".url "$url" ||
- die "$(eval_gettext "Failed to register url for submodule path '\$path'")"
+ die "$(eval_gettext "Failed to register url for submodule path '\$sm_path'")"
fi
# Copy "update" setting when it is not set yet
test -z "$upd" ||
test -n "$(git config submodule."$name".update)" ||
git config submodule."$name".update "$upd" ||
- die "$(eval_gettext "Failed to register update mode for submodule path '\$path'")"
+ die "$(eval_gettext "Failed to register update mode for submodule path '\$sm_path'")"
- say "$(eval_gettext "Submodule '\$name' (\$url) registered for path '\$path'")"
+ say "$(eval_gettext "Submodule '\$name' (\$url) registered for path '\$sm_path'")"
done
}
cloned_modules=
module_list "$@" | {
err=
- while read mode sha1 stage path
+ while read mode sha1 stage sm_path
do
if test "$stage" = U
then
- echo >&2 "Skipping unmerged submodule $path"
+ echo >&2 "Skipping unmerged submodule $sm_path"
continue
fi
- name=$(module_name "$path") || exit
+ name=$(module_name "$sm_path") || exit
url=$(git config submodule."$name".url)
if ! test -z "$update"
then
if test "$update_module" = "none"
then
- echo "Skipping submodule '$path'"
+ echo "Skipping submodule '$sm_path'"
continue
fi
# Only mention uninitialized submodules when its
# path have been specified
test "$#" != "0" &&
- say "$(eval_gettext "Submodule path '\$path' not initialized
+ say "$(eval_gettext "Submodule path '\$sm_path' not initialized
Maybe you want to use 'update --init'?")"
continue
fi
- if ! test -d "$path"/.git -o -f "$path"/.git
+ if ! test -d "$sm_path"/.git -o -f "$sm_path"/.git
then
- module_clone "$path" "$url" "$reference"|| exit
+ module_clone "$sm_path" "$url" "$reference"|| exit
cloned_modules="$cloned_modules;$name"
subsha1=
else
- subsha1=$(clear_local_git_env; cd "$path" &&
+ subsha1=$(clear_local_git_env; cd "$sm_path" &&
git rev-parse --verify HEAD) ||
- die "$(eval_gettext "Unable to find current revision in submodule path '\$path'")"
+ die "$(eval_gettext "Unable to find current revision in submodule path '\$sm_path'")"
fi
if test "$subsha1" != "$sha1"
then
# Run fetch only if $sha1 isn't present or it
# is not reachable from a ref.
- (clear_local_git_env; cd "$path" &&
+ (clear_local_git_env; cd "$sm_path" &&
( (rev=$(git rev-list -n 1 $sha1 --not --all 2>/dev/null) &&
test -z "$rev") || git-fetch)) ||
- die "$(eval_gettext "Unable to fetch in submodule path '\$path'")"
+ die "$(eval_gettext "Unable to fetch in submodule path '\$sm_path'")"
fi
# Is this something we just cloned?
case "$update_module" in
rebase)
command="git rebase"
- die_msg="$(eval_gettext "Unable to rebase '\$sha1' in submodule path '\$path'")"
- say_msg="$(eval_gettext "Submodule path '\$path': rebased into '\$sha1'")"
+ die_msg="$(eval_gettext "Unable to rebase '\$sha1' in submodule path '\$sm_path'")"
+ say_msg="$(eval_gettext "Submodule path '\$sm_path': rebased into '\$sha1'")"
must_die_on_failure=yes
;;
merge)
command="git merge"
- die_msg="$(eval_gettext "Unable to merge '\$sha1' in submodule path '\$path'")"
- say_msg="$(eval_gettext "Submodule path '\$path': merged in '\$sha1'")"
+ die_msg="$(eval_gettext "Unable to merge '\$sha1' in submodule path '\$sm_path'")"
+ say_msg="$(eval_gettext "Submodule path '\$sm_path': merged in '\$sha1'")"
must_die_on_failure=yes
;;
*)
command="git checkout $subforce -q"
- die_msg="$(eval_gettext "Unable to checkout '\$sha1' in submodule path '\$path'")"
- say_msg="$(eval_gettext "Submodule path '\$path': checked out '\$sha1'")"
+ die_msg="$(eval_gettext "Unable to checkout '\$sha1' in submodule path '\$sm_path'")"
+ say_msg="$(eval_gettext "Submodule path '\$sm_path': checked out '\$sha1'")"
;;
esac
- if (clear_local_git_env; cd "$path" && $command "$sha1")
+ if (clear_local_git_env; cd "$sm_path" && $command "$sha1")
then
say "$say_msg"
elif test -n "$must_die_on_failure"
if test -n "$recursive"
then
- (clear_local_git_env; cd "$path" && eval cmd_update "$orig_flags")
+ (clear_local_git_env; cd "$sm_path" && eval cmd_update "$orig_flags")
res=$?
if test $res -gt 0
then
- die_msg="$(eval_gettext "Failed to recurse into submodule path '\$path'")"
+ die_msg="$(eval_gettext "Failed to recurse into submodule path '\$sm_path'")"
if test $res -eq 1
then
err="${err};$die_msg"
done
module_list "$@" |
- while read mode sha1 stage path
+ while read mode sha1 stage sm_path
do
- name=$(module_name "$path") || exit
+ name=$(module_name "$sm_path") || exit
url=$(git config submodule."$name".url)
- displaypath="$prefix$path"
+ displaypath="$prefix$sm_path"
if test "$stage" = U
then
say "U$sha1 $displaypath"
continue
fi
- if test -z "$url" || ! test -d "$path"/.git -o -f "$path"/.git
+ if test -z "$url" || ! test -d "$sm_path"/.git -o -f "$sm_path"/.git
then
say "-$sha1 $displaypath"
continue;
fi
- set_name_rev "$path" "$sha1"
- if git diff-files --ignore-submodules=dirty --quiet -- "$path"
+ set_name_rev "$sm_path" "$sha1"
+ if git diff-files --ignore-submodules=dirty --quiet -- "$sm_path"
then
say " $sha1 $displaypath$revname"
else
if test -z "$cached"
then
- sha1=$(clear_local_git_env; cd "$path" && git rev-parse --verify HEAD)
- set_name_rev "$path" "$sha1"
+ sha1=$(clear_local_git_env; cd "$sm_path" && git rev-parse --verify HEAD)
+ set_name_rev "$sm_path" "$sha1"
fi
say "+$sha1 $displaypath$revname"
fi
(
prefix="$displaypath/"
clear_local_git_env
- cd "$path" &&
+ cd "$sm_path" &&
eval cmd_status "$orig_args"
) ||
- die "$(eval_gettext "Failed to recurse into submodule path '\$path'")"
+ die "$(eval_gettext "Failed to recurse into submodule path '\$sm_path'")"
fi
done
}
done
cd_to_toplevel
module_list "$@" |
- while read mode sha1 stage path
+ while read mode sha1 stage sm_path
do
- name=$(module_name "$path")
+ name=$(module_name "$sm_path")
url=$(git config -f .gitmodules --get submodule."$name".url)
# Possibly a url relative to parent
say "$(eval_gettext "Synchronizing submodule url for '\$name'")"
git config submodule."$name".url "$url"
- if test -e "$path"/.git
+ if test -e "$sm_path"/.git
then
(
clear_local_git_env
- cd "$path"
+ cd "$sm_path"
remote=$(get_default_remote)
git config remote."$remote".url "$url"
)
$| = 1; # unbuffer STDOUT
sub fatal (@) { print STDERR "@_\n"; exit 1 }
+
+# All SVN commands do it. Otherwise we may die on SIGPIPE when the remote
+# repository decides to close the connection which we expect to be kept alive.
+$SIG{PIPE} = 'IGNORE';
+
sub _req_svn {
require SVN::Core; # use()-ing this causes segfaults for me... *shrug*
require SVN::Ra;
use Time::Local;
use Memoize; # core since 5.8.0, Jul 2002
use Memoize::Storable;
+use POSIX qw(:signal_h);
my ($_gc_nr, $_gc_period);
length $commit == 40 or die "arg3 must be a full SHA1 hexsum\n";
my $db = $self->map_path($uuid);
my $db_lock = "$db.lock";
- my $sig;
+ my $sigmask;
$update_ref ||= 0;
if ($update_ref) {
- $SIG{INT} = $SIG{HUP} = $SIG{TERM} = $SIG{ALRM} = $SIG{PIPE} =
- $SIG{USR1} = $SIG{USR2} = sub { $sig = $_[0] };
+ $sigmask = POSIX::SigSet->new();
+ my $signew = POSIX::SigSet->new(SIGINT, SIGHUP, SIGTERM,
+ SIGALRM, SIGUSR1, SIGUSR2);
+ sigprocmask(SIG_BLOCK, $signew, $sigmask) or
+ croak "Can't block signals: $!";
}
mkfile($db);
"$db_lock => $db ($!)\n";
delete $LOCKFILES{$db_lock};
if ($update_ref) {
- $SIG{INT} = $SIG{HUP} = $SIG{TERM} = $SIG{ALRM} = $SIG{PIPE} =
- $SIG{USR1} = $SIG{USR2} = 'DEFAULT';
- kill $sig, $$ if defined $sig;
+ sigprocmask(SIG_SETMASK, $sigmask) or
+ croak "Can't restore signal mask: $!";
}
}
# '<span class="mark">foo</span>bar'
sub esc_html_hl_regions {
my ($str, $css_class, @sel) = @_;
- return esc_html($str) unless @sel;
+ my %opts = grep { ref($_) ne 'ARRAY' } @sel;
+ @sel = grep { ref($_) eq 'ARRAY' } @sel;
+ return esc_html($str, %opts) unless @sel;
my $out = '';
my $pos = 0;
for my $s (@sel) {
- $out .= esc_html(substr($str, $pos, $s->[0] - $pos))
- if ($s->[0] - $pos > 0);
- $out .= $cgi->span({-class => $css_class},
- esc_html(substr($str, $s->[0], $s->[1] - $s->[0])));
+ my ($begin, $end) = @$s;
- $pos = $s->[1];
+ # Don't create empty <span> elements.
+ next if $end <= $begin;
+
+ my $escaped = esc_html(substr($str, $begin, $end - $begin),
+ %opts);
+
+ $out .= esc_html(substr($str, $pos, $begin - $pos), %opts)
+ if ($begin - $pos > 0);
+ $out .= $cgi->span({-class => $css_class}, $escaped);
+
+ $pos = $end;
}
- $out .= esc_html(substr($str, $pos))
+ $out .= esc_html(substr($str, $pos), %opts)
if ($pos < length($str));
return $out;
}
# process patch (diff) line (not to be used for diff headers),
-# returning class and HTML-formatted (but not wrapped) line
-sub process_diff_line {
- my $line = shift;
- my ($from, $to) = @_;
-
- my $diff_class = diff_line_class($line, $from, $to);
-
- chomp $line;
- $line = untabify($line);
+# returning HTML-formatted (but not wrapped) line.
+# If the line is passed as a reference, it is treated as HTML and not
+# esc_html()'ed.
+sub format_diff_line {
+ my ($line, $diff_class, $from, $to) = @_;
+
+ if (ref($line)) {
+ $line = $$line;
+ } else {
+ chomp $line;
+ $line = untabify($line);
- if ($from && $to && $line =~ m/^\@{2} /) {
- $line = format_unidiff_chunk_header($line, $from, $to);
- return $diff_class, $line;
+ if ($from && $to && $line =~ m/^\@{2} /) {
+ $line = format_unidiff_chunk_header($line, $from, $to);
+ } elsif ($from && $to && $line =~ m/^\@{3}/) {
+ $line = format_cc_diff_chunk_header($line, $from, $to);
+ } else {
+ $line = esc_html($line, -nbsp=>1);
+ }
+ }
- } elsif ($from && $to && $line =~ m/^\@{3}/) {
- $line = format_cc_diff_chunk_header($line, $from, $to);
- return $diff_class, $line;
+ my $diff_classes = "diff";
+ $diff_classes .= " $diff_class" if ($diff_class);
+ $line = "<div class=\"$diff_classes\">$line</div>\n";
- }
- return $diff_class, esc_html($line, -nbsp=>1);
+ return $line;
}
# Generates undef or something like "_snapshot_" or "snapshot (_tbz2_ _zip_)",
print "</table>\n";
}
-sub print_sidebyside_diff_chunk {
- my @chunk = @_;
+# Print context lines and then rem/add lines in a side-by-side manner.
+sub print_sidebyside_diff_lines {
+ my ($ctx, $rem, $add) = @_;
+
+ # print context block before add/rem block
+ if (@$ctx) {
+ print join '',
+ '<div class="chunk_block ctx">',
+ '<div class="old">',
+ @$ctx,
+ '</div>',
+ '<div class="new">',
+ @$ctx,
+ '</div>',
+ '</div>';
+ }
+
+ if (!@$add) {
+ # pure removal
+ print join '',
+ '<div class="chunk_block rem">',
+ '<div class="old">',
+ @$rem,
+ '</div>',
+ '</div>';
+ } elsif (!@$rem) {
+ # pure addition
+ print join '',
+ '<div class="chunk_block add">',
+ '<div class="new">',
+ @$add,
+ '</div>',
+ '</div>';
+ } else {
+ print join '',
+ '<div class="chunk_block chg">',
+ '<div class="old">',
+ @$rem,
+ '</div>',
+ '<div class="new">',
+ @$add,
+ '</div>',
+ '</div>';
+ }
+}
+
+# Print context lines and then rem/add lines in inline manner.
+sub print_inline_diff_lines {
+ my ($ctx, $rem, $add) = @_;
+
+ print @$ctx, @$rem, @$add;
+}
+
+# Format removed and added line, mark changed part and HTML-format them.
+# Implementation is based on contrib/diff-highlight
+sub format_rem_add_lines_pair {
+ my ($rem, $add, $num_parents) = @_;
+
+ # We need to untabify lines before split()'ing them;
+ # otherwise offsets would be invalid.
+ chomp $rem;
+ chomp $add;
+ $rem = untabify($rem);
+ $add = untabify($add);
+
+ my @rem = split(//, $rem);
+ my @add = split(//, $add);
+ my ($esc_rem, $esc_add);
+ # Ignore leading +/- characters for each parent.
+ my ($prefix_len, $suffix_len) = ($num_parents, 0);
+ my ($prefix_has_nonspace, $suffix_has_nonspace);
+
+ my $shorter = (@rem < @add) ? @rem : @add;
+ while ($prefix_len < $shorter) {
+ last if ($rem[$prefix_len] ne $add[$prefix_len]);
+
+ $prefix_has_nonspace = 1 if ($rem[$prefix_len] !~ /\s/);
+ $prefix_len++;
+ }
+
+ while ($prefix_len + $suffix_len < $shorter) {
+ last if ($rem[-1 - $suffix_len] ne $add[-1 - $suffix_len]);
+
+ $suffix_has_nonspace = 1 if ($rem[-1 - $suffix_len] !~ /\s/);
+ $suffix_len++;
+ }
+
+ # Mark lines that are different from each other, but have some common
+ # part that isn't whitespace. If lines are completely different, don't
+ # mark them because that would make output unreadable, especially if
+ # diff consists of multiple lines.
+ if ($prefix_has_nonspace || $suffix_has_nonspace) {
+ $esc_rem = esc_html_hl_regions($rem, 'marked',
+ [$prefix_len, @rem - $suffix_len], -nbsp=>1);
+ $esc_add = esc_html_hl_regions($add, 'marked',
+ [$prefix_len, @add - $suffix_len], -nbsp=>1);
+ } else {
+ $esc_rem = esc_html($rem, -nbsp=>1);
+ $esc_add = esc_html($add, -nbsp=>1);
+ }
+
+ return format_diff_line(\$esc_rem, 'rem'),
+ format_diff_line(\$esc_add, 'add');
+}
+
+# HTML-format diff context, removed and added lines.
+sub format_ctx_rem_add_lines {
+ my ($ctx, $rem, $add, $num_parents) = @_;
+ my (@new_ctx, @new_rem, @new_add);
+ my $can_highlight = 0;
+ my $is_combined = ($num_parents > 1);
+
+ # Highlight if every removed line has a corresponding added line.
+ if (@$add > 0 && @$add == @$rem) {
+ $can_highlight = 1;
+
+ # Highlight lines in combined diff only if the chunk contains
+ # diff between the same version, e.g.
+ #
+ # - a
+ # - b
+ # + c
+ # + d
+ #
+ # Otherwise the highlightling would be confusing.
+ if ($is_combined) {
+ for (my $i = 0; $i < @$add; $i++) {
+ my $prefix_rem = substr($rem->[$i], 0, $num_parents);
+ my $prefix_add = substr($add->[$i], 0, $num_parents);
+
+ $prefix_rem =~ s/-/+/g;
+
+ if ($prefix_rem ne $prefix_add) {
+ $can_highlight = 0;
+ last;
+ }
+ }
+ }
+ }
+
+ if ($can_highlight) {
+ for (my $i = 0; $i < @$add; $i++) {
+ my ($line_rem, $line_add) = format_rem_add_lines_pair(
+ $rem->[$i], $add->[$i], $num_parents);
+ push @new_rem, $line_rem;
+ push @new_add, $line_add;
+ }
+ } else {
+ @new_rem = map { format_diff_line($_, 'rem') } @$rem;
+ @new_add = map { format_diff_line($_, 'add') } @$add;
+ }
+
+ @new_ctx = map { format_diff_line($_, 'ctx') } @$ctx;
+
+ return (\@new_ctx, \@new_rem, \@new_add);
+}
+
+# Print context lines and then rem/add lines.
+sub print_diff_lines {
+ my ($ctx, $rem, $add, $diff_style, $num_parents) = @_;
+ my $is_combined = $num_parents > 1;
+
+ ($ctx, $rem, $add) = format_ctx_rem_add_lines($ctx, $rem, $add,
+ $num_parents);
+
+ if ($diff_style eq 'sidebyside' && !$is_combined) {
+ print_sidebyside_diff_lines($ctx, $rem, $add);
+ } else {
+ # default 'inline' style and unknown styles
+ print_inline_diff_lines($ctx, $rem, $add);
+ }
+}
+
+sub print_diff_chunk {
+ my ($diff_style, $num_parents, $from, $to, @chunk) = @_;
my (@ctx, @rem, @add);
+ # The class of the previous line.
+ my $prev_class = '';
+
return unless @chunk;
# incomplete last line might be among removed or added lines,
# print chunk headers
if ($class && $class eq 'chunk_header') {
- print $line;
+ print format_diff_line($line, $class, $from, $to);
next;
}
- ## print from accumulator when type of class of lines change
- # empty contents block on start rem/add block, or end of chunk
- if (@ctx && (!$class || $class eq 'rem' || $class eq 'add')) {
- print join '',
- '<div class="chunk_block ctx">',
- '<div class="old">',
- @ctx,
- '</div>',
- '<div class="new">',
- @ctx,
- '</div>',
- '</div>';
- @ctx = ();
- }
- # empty add/rem block on start context block, or end of chunk
- if ((@rem || @add) && (!$class || $class eq 'ctx')) {
- if (!@add) {
- # pure removal
- print join '',
- '<div class="chunk_block rem">',
- '<div class="old">',
- @rem,
- '</div>',
- '</div>';
- } elsif (!@rem) {
- # pure addition
- print join '',
- '<div class="chunk_block add">',
- '<div class="new">',
- @add,
- '</div>',
- '</div>';
- } else {
- # assume that it is change
- print join '',
- '<div class="chunk_block chg">',
- '<div class="old">',
- @rem,
- '</div>',
- '<div class="new">',
- @add,
- '</div>',
- '</div>';
- }
- @rem = @add = ();
+ ## print from accumulator when have some add/rem lines or end
+ # of chunk (flush context lines), or when have add and rem
+ # lines and new block is reached (otherwise add/rem lines could
+ # be reordered)
+ if (!$class || ((@rem || @add) && $class eq 'ctx') ||
+ (@rem && @add && $class ne $prev_class)) {
+ print_diff_lines(\@ctx, \@rem, \@add,
+ $diff_style, $num_parents);
+ @ctx = @rem = @add = ();
}
## adding lines to accumulator
if ($class eq 'ctx') {
push @ctx, $line;
}
+
+ $prev_class = $class;
}
}
next PATCH if ($patch_line =~ m/^diff /);
- my ($class, $line) = process_diff_line($patch_line, \%from, \%to);
- my $diff_classes = "diff";
- $diff_classes .= " $class" if ($class);
- $line = "<div class=\"$diff_classes\">$line</div>\n";
+ my $class = diff_line_class($patch_line, \%from, \%to);
- if ($diff_style eq 'sidebyside' && !$is_combined) {
- if ($class eq 'chunk_header') {
- print_sidebyside_diff_chunk(@chunk);
- @chunk = ( [ $class, $line ] );
- } else {
- push @chunk, [ $class, $line ];
- }
- } else {
- # default 'inline' style and unknown styles
- print $line;
+ if ($class eq 'chunk_header') {
+ print_diff_chunk($diff_style, scalar @hash_parents, \%from, \%to, @chunk);
+ @chunk = ();
}
+
+ push @chunk, [ $class, $patch_line ];
}
} continue {
if (@chunk) {
- print_sidebyside_diff_chunk(@chunk);
+ print_diff_chunk($diff_style, scalar @hash_parents, \%from, \%to, @chunk);
@chunk = ();
}
print "</div>\n"; # class="patch"
return wantarray ? ($name, $name) : $name;
}
+sub exit_if_unmodified_since {
+ my ($latest_epoch) = @_;
+ our $cgi;
+
+ my $if_modified = $cgi->http('IF_MODIFIED_SINCE');
+ if (defined $if_modified) {
+ my $since;
+ if (eval { require HTTP::Date; 1; }) {
+ $since = HTTP::Date::str2time($if_modified);
+ } elsif (eval { require Time::ParseDate; 1; }) {
+ $since = Time::ParseDate::parsedate($if_modified, GMT => 1);
+ }
+ if (defined $since && $latest_epoch <= $since) {
+ my %latest_date = parse_date($latest_epoch);
+ print $cgi->header(
+ -last_modified => $latest_date{'rfc2822'},
+ -status => '304 Not Modified');
+ goto DONE_GITWEB;
+ }
+ }
+}
+
sub git_snapshot {
my $format = $input_params{'snapshot_format'};
if (!@snapshot_fmts) {
my ($name, $prefix) = snapshot_name($project, $hash);
my $filename = "$name$known_snapshot_formats{$format}{'suffix'}";
+
+ my %co = parse_commit($hash);
+ exit_if_unmodified_since($co{'committer_epoch'}) if %co;
+
my $cmd = quote_command(
git_cmd(), 'archive',
"--format=$known_snapshot_formats{$format}{'format'}",
}
$filename =~ s/(["\\])/\\$1/g;
+ my %latest_date;
+ if (%co) {
+ %latest_date = parse_date($co{'committer_epoch'}, $co{'committer_tz'});
+ }
+
print $cgi->header(
-type => $known_snapshot_formats{$format}{'type'},
-content_disposition => 'inline; filename="' . $filename . '"',
+ %co ? (-last_modified => $latest_date{'rfc2822'}) : (),
-status => '200 OK');
open my $fd, "-|", $cmd
if (defined($commitlist[0])) {
%latest_commit = %{$commitlist[0]};
my $latest_epoch = $latest_commit{'committer_epoch'};
- %latest_date = parse_date($latest_epoch, $latest_commit{'comitter_tz'});
- my $if_modified = $cgi->http('IF_MODIFIED_SINCE');
- if (defined $if_modified) {
- my $since;
- if (eval { require HTTP::Date; 1; }) {
- $since = HTTP::Date::str2time($if_modified);
- } elsif (eval { require Time::ParseDate; 1; }) {
- $since = Time::ParseDate::parsedate($if_modified, GMT => 1);
- }
- if (defined $since && $latest_epoch <= $since) {
- print $cgi->header(
- -type => $content_type,
- -charset => 'utf-8',
- -last_modified => $latest_date{'rfc2822'},
- -status => '304 Not Modified');
- return;
- }
- }
- print $cgi->header(
- -type => $content_type,
- -charset => 'utf-8',
- -last_modified => $latest_date{'rfc2822'});
- } else {
- print $cgi->header(
- -type => $content_type,
- -charset => 'utf-8');
+ exit_if_unmodified_since($latest_epoch);
+ %latest_date = parse_date($latest_epoch, $latest_commit{'comitter_tz'});
}
+ print $cgi->header(
+ -type => $content_type,
+ -charset => 'utf-8',
+ %latest_date ? (-last_modified => $latest_date{'rfc2822'}) : (),
+ -status => '200 OK');
# Optimization: skip generating the body if client asks only
# for Last-Modified date.
color: #008800;
}
+div.diff.add span.marked {
+ background-color: #aaffaa;
+}
+
div.diff.from_file a.path,
div.diff.from_file {
color: #aa0000;
color: #cc0000;
}
+div.diff.rem span.marked {
+ background-color: #ffaaaa;
+}
+
div.diff.chunk_header a,
div.diff.chunk_header {
color: #990099;
#include "run-command.h"
#include "string-list.h"
#include "url.h"
+#include "argv-array.h"
static const char content_type[] = "Content-Type";
static const char content_length[] = "Content-Length";
const char *encoding = getenv("HTTP_CONTENT_ENCODING");
const char *user = getenv("REMOTE_USER");
const char *host = getenv("REMOTE_ADDR");
- char *env[3];
- struct strbuf buf = STRBUF_INIT;
+ struct argv_array env = ARGV_ARRAY_INIT;
int gzipped_request = 0;
struct child_process cld;
if (!host || !*host)
host = "(none)";
- memset(&env, 0, sizeof(env));
- strbuf_addf(&buf, "GIT_COMMITTER_NAME=%s", user);
- env[0] = strbuf_detach(&buf, NULL);
-
- strbuf_addf(&buf, "GIT_COMMITTER_EMAIL=%s@http.%s", user, host);
- env[1] = strbuf_detach(&buf, NULL);
- env[2] = NULL;
+ if (!getenv("GIT_COMMITTER_NAME"))
+ argv_array_pushf(&env, "GIT_COMMITTER_NAME=%s", user);
+ if (!getenv("GIT_COMMITTER_EMAIL"))
+ argv_array_pushf(&env, "GIT_COMMITTER_EMAIL=%s@http.%s",
+ user, host);
memset(&cld, 0, sizeof(cld));
cld.argv = argv;
- cld.env = (const char *const *)env;
+ cld.env = env.argv;
if (gzipped_request)
cld.in = -1;
cld.git_cmd = 1;
if (finish_command(&cld))
exit(1);
- free(env[0]);
- free(env[1]);
- strbuf_release(&buf);
+ argv_array_clear(&env);
}
static int show_text_ref(const char *name, const unsigned char *sha1,
return offset;
}
+/*
+ * Reverse of fmt_ident(); given an ident line, split the fields
+ * to allow the caller to parse it.
+ * Signal a success by returning 0, but date/tz fields of the result
+ * can still be NULL if the input line only has the name/email part
+ * (e.g. reading from a reflog entry).
+ */
+int split_ident_line(struct ident_split *split, const char *line, int len)
+{
+ const char *cp;
+ size_t span;
+ int status = -1;
+
+ memset(split, 0, sizeof(*split));
+
+ split->name_begin = line;
+ for (cp = line; *cp && cp < line + len; cp++)
+ if (*cp == '<') {
+ split->mail_begin = cp + 1;
+ break;
+ }
+ if (!split->mail_begin)
+ return status;
+
+ for (cp = split->mail_begin - 2; line < cp; cp--)
+ if (!isspace(*cp)) {
+ split->name_end = cp + 1;
+ break;
+ }
+ if (!split->name_end)
+ return status;
+
+ for (cp = split->mail_begin; cp < line + len; cp++)
+ if (*cp == '>') {
+ split->mail_end = cp;
+ break;
+ }
+ if (!split->mail_end)
+ return status;
+
+ for (cp = split->mail_end + 1; cp < line + len && isspace(*cp); cp++)
+ ;
+ if (line + len <= cp)
+ goto person_only;
+ split->date_begin = cp;
+ span = strspn(cp, "0123456789");
+ if (!span)
+ goto person_only;
+ split->date_end = split->date_begin + span;
+ for (cp = split->date_end; cp < line + len && isspace(*cp); cp++)
+ ;
+ if (line + len <= cp || (*cp != '+' && *cp != '-'))
+ goto person_only;
+ split->tz_begin = cp;
+ span = strspn(cp + 1, "0123456789");
+ if (!span)
+ goto person_only;
+ split->tz_end = split->tz_begin + 1 + span;
+ return 0;
+
+person_only:
+ split->date_begin = NULL;
+ split->date_end = NULL;
+ split->tz_begin = NULL;
+ split->tz_end = NULL;
+ return 0;
+}
+
static const char *env_hint =
"\n"
"*** Please tell me who you are.\n"
opt->verbose_header &&
opt->commit_format != CMIT_FMT_ONELINE) {
int pch = DIFF_FORMAT_DIFFSTAT | DIFF_FORMAT_PATCH;
- if ((pch & opt->diffopt.output_format) == pch)
- printf("---");
if (opt->diffopt.output_prefix) {
struct strbuf *msg = NULL;
msg = opt->diffopt.output_prefix(&opt->diffopt,
opt->diffopt.output_prefix_data);
fwrite(msg->buf, msg->len, 1, stdout);
}
+ if ((pch & opt->diffopt.output_format) == pch) {
+ printf("---");
+ }
putchar('\n');
}
}
renames = xcalloc(1, sizeof(struct string_list));
diff_setup(&opts);
DIFF_OPT_SET(&opts, RECURSIVE);
+ DIFF_OPT_CLR(&opts, RENAME_EMPTY);
opts.detect_rename = DIFF_DETECT_RENAME;
opts.rename_limit = o->merge_rename_limit >= 0 ? o->merge_rename_limit :
o->diff_rename_limit >= 0 ? o->diff_rename_limit :
/* if there is no common ancestor, use an empty tree */
struct tree *tree;
- tree = lookup_tree((const unsigned char *)EMPTY_TREE_SHA1_BIN);
+ tree = lookup_tree(EMPTY_TREE_SHA1_BIN);
merged_common_ancestors = make_virtual_commit(tree, "ancestor");
}
else if (!prefixcmp(s, "subtree="))
o->subtree_shift = s + strlen("subtree=");
else if (!strcmp(s, "patience"))
- o->xdl_opts |= XDF_PATIENCE_DIFF;
+ o->xdl_opts = DIFF_WITH_ALG(o, PATIENCE_DIFF);
else if (!strcmp(s, "histogram"))
- o->xdl_opts |= XDF_HISTOGRAM_DIFF;
+ o->xdl_opts = DIFF_WITH_ALG(o, HISTOGRAM_DIFF);
else if (!strcmp(s, "ignore-space-change"))
o->xdl_opts |= XDF_IGNORE_WHITESPACE_CHANGE;
else if (!strcmp(s, "ignore-all-space"))
--- /dev/null
+#include "cache.h"
+#include "mergesort.h"
+
+struct mergesort_sublist {
+ void *ptr;
+ unsigned long len;
+};
+
+static void *get_nth_next(void *list, unsigned long n,
+ void *(*get_next_fn)(const void *))
+{
+ while (n-- && list)
+ list = get_next_fn(list);
+ return list;
+}
+
+static void *pop_item(struct mergesort_sublist *l,
+ void *(*get_next_fn)(const void *))
+{
+ void *p = l->ptr;
+ l->ptr = get_next_fn(l->ptr);
+ l->len = l->ptr ? (l->len - 1) : 0;
+ return p;
+}
+
+void *llist_mergesort(void *list,
+ void *(*get_next_fn)(const void *),
+ void (*set_next_fn)(void *, void *),
+ int (*compare_fn)(const void *, const void *))
+{
+ unsigned long l;
+
+ if (!list)
+ return NULL;
+ for (l = 1; ; l *= 2) {
+ void *curr;
+ struct mergesort_sublist p, q;
+
+ p.ptr = list;
+ q.ptr = get_nth_next(p.ptr, l, get_next_fn);
+ if (!q.ptr)
+ break;
+ p.len = q.len = l;
+
+ if (compare_fn(p.ptr, q.ptr) > 0)
+ list = curr = pop_item(&q, get_next_fn);
+ else
+ list = curr = pop_item(&p, get_next_fn);
+
+ while (p.ptr) {
+ while (p.len || q.len) {
+ void *prev = curr;
+
+ if (!p.len)
+ curr = pop_item(&q, get_next_fn);
+ else if (!q.len)
+ curr = pop_item(&p, get_next_fn);
+ else if (compare_fn(p.ptr, q.ptr) > 0)
+ curr = pop_item(&q, get_next_fn);
+ else
+ curr = pop_item(&p, get_next_fn);
+ set_next_fn(prev, curr);
+ }
+ p.ptr = q.ptr;
+ p.len = l;
+ q.ptr = get_nth_next(p.ptr, l, get_next_fn);
+ q.len = q.ptr ? l : 0;
+
+ }
+ set_next_fn(curr, NULL);
+ }
+ return list;
+}
--- /dev/null
+#ifndef MERGESORT_H
+#define MERGESORT_H
+
+/*
+ * Sort linked list in place.
+ * - get_next_fn() returns the next element given an element of a linked list.
+ * - set_next_fn() takes two elements A and B, and makes B the "next" element
+ * of A on the list.
+ * - compare_fn() takes two elements A and B, and returns negative, 0, positive
+ * as the same sign as "subtracting" B from A.
+ */
+void *llist_mergesort(void *list,
+ void *(*get_next_fn)(const void *),
+ void (*set_next_fn)(void *, void *),
+ int (*compare_fn)(const void *, const void *));
+
+#endif
* Must establish NOTES_MERGE_WORKTREE.
* Abort if NOTES_MERGE_WORKTREE already exists
*/
- if (file_exists(git_path(NOTES_MERGE_WORKTREE))) {
+ if (file_exists(git_path(NOTES_MERGE_WORKTREE)) &&
+ !is_empty_dir(git_path(NOTES_MERGE_WORKTREE))) {
if (advice_resolve_conflict)
die("You have not concluded your previous "
"notes merge (%s exists).\nPlease, use "
{
/*
* Iterate through files in .git/NOTES_MERGE_WORKTREE and add all
- * found notes to 'partial_tree'. Write the updates notes tree to
+ * found notes to 'partial_tree'. Write the updated notes tree to
* the DB, and commit the resulting tree object while reusing the
* commit message and parents from 'partial_commit'.
* Finally store the new commit object SHA1 into 'result_sha1'.
*/
- struct dir_struct dir;
- char *path = xstrdup(git_path(NOTES_MERGE_WORKTREE "/"));
- int path_len = strlen(path), i;
+ DIR *dir;
+ struct dirent *e;
+ struct strbuf path = STRBUF_INIT;
char *msg = strstr(partial_commit->buffer, "\n\n");
struct strbuf sb_msg = STRBUF_INIT;
+ int baselen;
+ strbuf_addstr(&path, git_path(NOTES_MERGE_WORKTREE));
if (o->verbosity >= 3)
- printf("Committing notes in notes merge worktree at %.*s\n",
- path_len - 1, path);
+ printf("Committing notes in notes merge worktree at %s\n",
+ path.buf);
if (!msg || msg[2] == '\0')
die("partial notes commit has empty message");
msg += 2;
- memset(&dir, 0, sizeof(dir));
- read_directory(&dir, path, path_len, NULL);
- for (i = 0; i < dir.nr; i++) {
- struct dir_entry *ent = dir.entries[i];
+ dir = opendir(path.buf);
+ if (!dir)
+ die_errno("could not open %s", path.buf);
+
+ strbuf_addch(&path, '/');
+ baselen = path.len;
+ while ((e = readdir(dir)) != NULL) {
struct stat st;
- const char *relpath = ent->name + path_len;
unsigned char obj_sha1[20], blob_sha1[20];
- if (ent->len - path_len != 40 || get_sha1_hex(relpath, obj_sha1)) {
+ if (is_dot_or_dotdot(e->d_name))
+ continue;
+
+ if (strlen(e->d_name) != 40 || get_sha1_hex(e->d_name, obj_sha1)) {
if (o->verbosity >= 3)
- printf("Skipping non-SHA1 entry '%s'\n",
- ent->name);
+ printf("Skipping non-SHA1 entry '%s%s'\n",
+ path.buf, e->d_name);
continue;
}
+ strbuf_addstr(&path, e->d_name);
/* write file as blob, and add to partial_tree */
- if (stat(ent->name, &st))
- die_errno("Failed to stat '%s'", ent->name);
- if (index_path(blob_sha1, ent->name, &st, HASH_WRITE_OBJECT))
- die("Failed to write blob object from '%s'", ent->name);
+ if (stat(path.buf, &st))
+ die_errno("Failed to stat '%s'", path.buf);
+ if (index_path(blob_sha1, path.buf, &st, HASH_WRITE_OBJECT))
+ die("Failed to write blob object from '%s'", path.buf);
if (add_note(partial_tree, obj_sha1, blob_sha1, NULL))
die("Failed to add resolved note '%s' to notes tree",
- ent->name);
+ path.buf);
if (o->verbosity >= 4)
printf("Added resolved note for object %s: %s\n",
sha1_to_hex(obj_sha1), sha1_to_hex(blob_sha1));
+ strbuf_setlen(&path, baselen);
}
strbuf_attach(&sb_msg, msg, strlen(msg), strlen(msg) + 1);
if (o->verbosity >= 4)
printf("Finalized notes merge commit: %s\n",
sha1_to_hex(result_sha1));
- free(path);
+ strbuf_release(&path);
+ closedir(dir);
return 0;
}
int notes_merge_abort(struct notes_merge_options *o)
{
- /* Remove .git/NOTES_MERGE_WORKTREE directory and all files within */
+ /*
+ * Remove all files within .git/NOTES_MERGE_WORKTREE. We do not remove
+ * the .git/NOTES_MERGE_WORKTREE directory itself, since it might be
+ * the current working directory of the user.
+ */
struct strbuf buf = STRBUF_INIT;
int ret;
strbuf_addstr(&buf, git_path(NOTES_MERGE_WORKTREE));
if (o->verbosity >= 3)
- printf("Removing notes merge worktree at %s\n", buf.buf);
- ret = remove_dir_recursively(&buf, 0);
+ printf("Removing notes merge worktree at %s/*\n", buf.buf);
+ ret = remove_dir_recursively(&buf, REMOVE_DIR_KEEP_TOPLEVEL);
strbuf_release(&buf);
return ret;
}
if (obj && obj->parsed)
return obj;
+ if ((obj && obj->type == OBJ_BLOB) ||
+ (!obj && has_sha1_file(sha1) &&
+ sha1_object_info(sha1, NULL) == OBJ_BLOB)) {
+ if (check_sha1_signature(repl, NULL, 0, NULL) < 0) {
+ error("sha1 mismatch %s\n", sha1_to_hex(repl));
+ return NULL;
+ }
+ parse_blob_buffer(lookup_blob(sha1), NULL, 0);
+ return lookup_object(sha1);
+ }
+
buffer = read_sha1_file(sha1, &type, &size);
if (buffer) {
if (check_sha1_signature(repl, buffer, size, typename(type)) < 0) {
array->nr = dst;
}
}
+
+void clear_object_flags(unsigned flags)
+{
+ int i;
+
+ for (i=0; i < obj_hash_size; i++) {
+ struct object *obj = obj_hash[i];
+ if (obj)
+ obj->flags &= ~flags;
+ }
+}
void add_object_array_with_mode(struct object *obj, const char *name, struct object_array *array, unsigned mode);
void object_array_remove_duplicates(struct object_array *);
+void clear_object_flags(unsigned flags);
+
#endif /* OBJECT_H */
{
/* currently all placeholders have same length */
const int placeholder_len = 2;
- int start, end, tz = 0;
+ int tz;
unsigned long date = 0;
- char *ep;
- const char *name_start, *name_end, *mail_start, *mail_end, *msg_end = msg+len;
char person_name[1024];
char person_mail[1024];
+ struct ident_split s;
+ const char *name_start, *name_end, *mail_start, *mail_end;
- /* advance 'end' to point to email start delimiter */
- for (end = 0; end < len && msg[end] != '<'; end++)
- ; /* do nothing */
-
- /*
- * When end points at the '<' that we found, it should have
- * matching '>' later, which means 'end' must be strictly
- * below len - 1.
- */
- if (end >= len - 2)
+ if (split_ident_line(&s, msg, len) < 0)
goto skip;
- /* Seek for both name and email part */
- name_start = msg;
- name_end = msg+end;
- while (name_end > name_start && isspace(*(name_end-1)))
- name_end--;
- mail_start = msg+end+1;
- mail_end = mail_start;
- while (mail_end < msg_end && *mail_end != '>')
- mail_end++;
- if (mail_end == msg_end)
- goto skip;
- end = mail_end-msg;
+ name_start = s.name_begin;
+ name_end = s.name_end;
+ mail_start = s.mail_begin;
+ mail_end = s.mail_end;
if (part == 'N' || part == 'E') { /* mailmap lookup */
- strlcpy(person_name, name_start, name_end-name_start+1);
- strlcpy(person_mail, mail_start, mail_end-mail_start+1);
+ strlcpy(person_name, name_start, name_end - name_start + 1);
+ strlcpy(person_mail, mail_start, mail_end - mail_start + 1);
mailmap_name(person_mail, sizeof(person_mail), person_name, sizeof(person_name));
name_start = person_name;
name_end = name_start + strlen(person_name);
return placeholder_len;
}
- /* advance 'start' to point to date start delimiter */
- for (start = end + 1; start < len && isspace(msg[start]); start++)
- ; /* do nothing */
- if (start >= len)
- goto skip;
- date = strtoul(msg + start, &ep, 10);
- if (msg + start == ep)
+ if (!s.date_begin)
goto skip;
+ date = strtoul(s.date_begin, NULL, 10);
+
if (part == 't') { /* date, UNIX timestamp */
- strbuf_add(sb, msg + start, ep - (msg + start));
+ strbuf_add(sb, s.date_begin, s.date_end - s.date_begin);
return placeholder_len;
}
/* parse tz */
- for (start = ep - msg + 1; start < len && isspace(msg[start]); start++)
- ; /* do nothing */
- if (start + 1 < len) {
- tz = strtoul(msg + start + 1, NULL, 10);
- if (msg[start] == '-')
- tz = -tz;
- }
+ tz = strtoul(s.tz_begin + 1, NULL, 10);
+ if (*s.tz_begin == '-')
+ tz = -tz;
switch (part) {
case 'd': /* date */
skip:
/*
- * bogus commit, 'sb' cannot be updated, but we still need to
- * compute a valid return value.
+ * reading from either a bogus commit, or a reflog entry with
+ * %gn, %ge, etc.; 'sb' cannot be updated, but we still need
+ * to compute a valid return value.
*/
if (part == 'n' || part == 'e' || part == 't' || part == 'd'
|| part == 'D' || part == 'r' || part == 'i')
return 0;
}
-static int is_empty_blob_sha1(const unsigned char *sha1)
-{
- static const unsigned char empty_blob_sha1[20] = {
- 0xe6,0x9d,0xe2,0x9b,0xb2,0xd1,0xd6,0x43,0x4b,0x8b,
- 0x29,0xae,0x77,0x5a,0xd8,0xc2,0xe4,0x8c,0x53,0x91
- };
-
- return !hashcmp(sha1, empty_blob_sha1);
-}
-
static int ce_match_stat_basic(struct cache_entry *ce, struct stat *st)
{
unsigned int changed = 0;
#include "tag.h"
#include "dir.h"
-/* ISSYMREF=0x01, ISPACKED=0x02 and ISBROKEN=0x04 are public interfaces */
-#define REF_KNOWS_PEELED 0x10
+/*
+ * Make sure "ref" is something reasonable to have under ".git/refs/";
+ * We do not like it if:
+ *
+ * - any path component of it begins with ".", or
+ * - it has double dots "..", or
+ * - it has ASCII control character, "~", "^", ":" or SP, anywhere, or
+ * - it ends with a "/".
+ * - it ends with ".lock"
+ * - it contains a "\" (backslash)
+ */
-struct ref_entry {
- unsigned char flag; /* ISSYMREF? ISPACKED? */
+/* Return true iff ch is not allowed in reference names. */
+static inline int bad_ref_char(int ch)
+{
+ if (((unsigned) ch) <= ' ' || ch == 0x7f ||
+ ch == '~' || ch == '^' || ch == ':' || ch == '\\')
+ return 1;
+ /* 2.13 Pattern Matching Notation */
+ if (ch == '*' || ch == '?' || ch == '[') /* Unsupported */
+ return 1;
+ return 0;
+}
+
+/*
+ * Try to read one refname component from the front of refname. Return
+ * the length of the component found, or -1 if the component is not
+ * legal.
+ */
+static int check_refname_component(const char *refname, int flags)
+{
+ const char *cp;
+ char last = '\0';
+
+ for (cp = refname; ; cp++) {
+ char ch = *cp;
+ if (ch == '\0' || ch == '/')
+ break;
+ if (bad_ref_char(ch))
+ return -1; /* Illegal character in refname. */
+ if (last == '.' && ch == '.')
+ return -1; /* Refname contains "..". */
+ if (last == '@' && ch == '{')
+ return -1; /* Refname contains "@{". */
+ last = ch;
+ }
+ if (cp == refname)
+ return 0; /* Component has zero length. */
+ if (refname[0] == '.') {
+ if (!(flags & REFNAME_DOT_COMPONENT))
+ return -1; /* Component starts with '.'. */
+ /*
+ * Even if leading dots are allowed, don't allow "."
+ * as a component (".." is prevented by a rule above).
+ */
+ if (refname[1] == '\0')
+ return -1; /* Component equals ".". */
+ }
+ if (cp - refname >= 5 && !memcmp(cp - 5, ".lock", 5))
+ return -1; /* Refname ends with ".lock". */
+ return cp - refname;
+}
+
+int check_refname_format(const char *refname, int flags)
+{
+ int component_len, component_count = 0;
+
+ while (1) {
+ /* We are at the start of a path component. */
+ component_len = check_refname_component(refname, flags);
+ if (component_len <= 0) {
+ if ((flags & REFNAME_REFSPEC_PATTERN) &&
+ refname[0] == '*' &&
+ (refname[1] == '\0' || refname[1] == '/')) {
+ /* Accept one wildcard as a full refname component. */
+ flags &= ~REFNAME_REFSPEC_PATTERN;
+ component_len = 1;
+ } else {
+ return -1;
+ }
+ }
+ component_count++;
+ if (refname[component_len] == '\0')
+ break;
+ /* Skip to next component. */
+ refname += component_len + 1;
+ }
+
+ if (refname[component_len - 1] == '.')
+ return -1; /* Refname ends with '.'. */
+ if (!(flags & REFNAME_ALLOW_ONELEVEL) && component_count < 2)
+ return -1; /* Refname has only one component. */
+ return 0;
+}
+
+struct ref_entry;
+
+struct ref_value {
unsigned char sha1[20];
unsigned char peeled[20];
- /* The full name of the reference (e.g., "refs/heads/master"): */
- char name[FLEX_ARRAY];
};
-struct ref_array {
+struct ref_dir {
int nr, alloc;
/*
*/
int sorted;
- struct ref_entry **refs;
+ struct ref_entry **entries;
};
+/* ISSYMREF=0x01, ISPACKED=0x02, and ISBROKEN=0x04 are public interfaces */
+#define REF_KNOWS_PEELED 0x08
+#define REF_DIR 0x10
+
/*
- * Parse one line from a packed-refs file. Write the SHA1 to sha1.
- * Return a pointer to the refname within the line (null-terminated),
- * or NULL if there was a problem.
+ * A ref_entry represents either a reference or a "subdirectory" of
+ * references. Each directory in the reference namespace is
+ * represented by a ref_entry with (flags & REF_DIR) set and
+ * containing a subdir member that holds the entries in that
+ * directory. References are represented by a ref_entry with (flags &
+ * REF_DIR) unset and a value member that describes the reference's
+ * value. The flag member is at the ref_entry level, but it is also
+ * needed to interpret the contents of the value field (in other
+ * words, a ref_value object is not very much use without the
+ * enclosing ref_entry).
+ *
+ * Reference names cannot end with slash and directories' names are
+ * always stored with a trailing slash (except for the top-level
+ * directory, which is always denoted by ""). This has two nice
+ * consequences: (1) when the entries in each subdir are sorted
+ * lexicographically by name (as they usually are), the references in
+ * a whole tree can be generated in lexicographic order by traversing
+ * the tree in left-to-right, depth-first order; (2) the names of
+ * references and subdirectories cannot conflict, and therefore the
+ * presence of an empty subdirectory does not block the creation of a
+ * similarly-named reference. (The fact that reference names with the
+ * same leading components can conflict *with each other* is a
+ * separate issue that is regulated by is_refname_available().)
+ *
+ * Please note that the name field contains the fully-qualified
+ * reference (or subdirectory) name. Space could be saved by only
+ * storing the relative names. But that would require the full names
+ * to be generated on the fly when iterating in do_for_each_ref(), and
+ * would break callback functions, who have always been able to assume
+ * that the name strings that they are passed will not be freed during
+ * the iteration.
*/
-static const char *parse_ref_line(char *line, unsigned char *sha1)
-{
+struct ref_entry {
+ unsigned char flag; /* ISSYMREF? ISPACKED? */
+ union {
+ struct ref_value value; /* if not (flags&REF_DIR) */
+ struct ref_dir subdir; /* if (flags&REF_DIR) */
+ } u;
/*
- * 42: the answer to everything.
- *
- * In this case, it happens to be the answer to
- * 40 (length of sha1 hex representation)
- * +1 (space in between hex and name)
- * +1 (newline at the end of the line)
+ * The full name of the reference (e.g., "refs/heads/master")
+ * or the full name of the directory with a trailing slash
+ * (e.g., "refs/heads/"):
*/
- int len = strlen(line) - 42;
-
- if (len <= 0)
- return NULL;
- if (get_sha1_hex(line, sha1) < 0)
- return NULL;
- if (!isspace(line[40]))
- return NULL;
- line += 41;
- if (isspace(*line))
- return NULL;
- if (line[len] != '\n')
- return NULL;
- line[len] = 0;
-
- return line;
-}
+ char name[FLEX_ARRAY];
+};
static struct ref_entry *create_ref_entry(const char *refname,
const unsigned char *sha1, int flag,
die("Reference has invalid format: '%s'", refname);
len = strlen(refname) + 1;
ref = xmalloc(sizeof(struct ref_entry) + len);
- hashcpy(ref->sha1, sha1);
- hashclr(ref->peeled);
+ hashcpy(ref->u.value.sha1, sha1);
+ hashclr(ref->u.value.peeled);
memcpy(ref->name, refname, len);
ref->flag = flag;
return ref;
}
-/* Add a ref_entry to the end of the ref_array (unsorted). */
-static void add_ref(struct ref_array *refs, struct ref_entry *ref)
+static void clear_ref_dir(struct ref_dir *dir);
+
+static void free_ref_entry(struct ref_entry *entry)
+{
+ if (entry->flag & REF_DIR)
+ clear_ref_dir(&entry->u.subdir);
+ free(entry);
+}
+
+/*
+ * Add a ref_entry to the end of dir (unsorted). Entry is always
+ * stored directly in dir; no recursion into subdirectories is
+ * done.
+ */
+static void add_entry_to_dir(struct ref_dir *dir, struct ref_entry *entry)
{
- ALLOC_GROW(refs->refs, refs->nr + 1, refs->alloc);
- refs->refs[refs->nr++] = ref;
+ ALLOC_GROW(dir->entries, dir->nr + 1, dir->alloc);
+ dir->entries[dir->nr++] = entry;
+}
+
+/*
+ * Clear and free all entries in dir, recursively.
+ */
+static void clear_ref_dir(struct ref_dir *dir)
+{
+ int i;
+ for (i = 0; i < dir->nr; i++)
+ free_ref_entry(dir->entries[i]);
+ free(dir->entries);
+ dir->sorted = dir->nr = dir->alloc = 0;
+ dir->entries = NULL;
+}
+
+/*
+ * Create a struct ref_entry object for the specified dirname.
+ * dirname is the name of the directory with a trailing slash (e.g.,
+ * "refs/heads/") or "" for the top-level directory.
+ */
+static struct ref_entry *create_dir_entry(const char *dirname)
+{
+ struct ref_entry *direntry;
+ int len = strlen(dirname);
+ direntry = xcalloc(1, sizeof(struct ref_entry) + len + 1);
+ memcpy(direntry->name, dirname, len + 1);
+ direntry->flag = REF_DIR;
+ return direntry;
}
static int ref_entry_cmp(const void *a, const void *b)
return strcmp(one->name, two->name);
}
+static void sort_ref_dir(struct ref_dir *dir);
+
+/*
+ * Return the entry with the given refname from the ref_dir
+ * (non-recursively), sorting dir if necessary. Return NULL if no
+ * such entry is found.
+ */
+static struct ref_entry *search_ref_dir(struct ref_dir *dir, const char *refname)
+{
+ struct ref_entry *e, **r;
+ int len;
+
+ if (refname == NULL || !dir->nr)
+ return NULL;
+
+ sort_ref_dir(dir);
+
+ len = strlen(refname) + 1;
+ e = xmalloc(sizeof(struct ref_entry) + len);
+ memcpy(e->name, refname, len);
+
+ r = bsearch(&e, dir->entries, dir->nr, sizeof(*dir->entries), ref_entry_cmp);
+
+ free(e);
+
+ if (r == NULL)
+ return NULL;
+
+ return *r;
+}
+
+/*
+ * If refname is a reference name, find the ref_dir within the dir
+ * tree that should hold refname. If refname is a directory name
+ * (i.e., ends in '/'), then return that ref_dir itself. dir must
+ * represent the top-level directory. Sort ref_dirs and recurse into
+ * subdirectories as necessary. If mkdir is set, then create any
+ * missing directories; otherwise, return NULL if the desired
+ * directory cannot be found.
+ */
+static struct ref_dir *find_containing_dir(struct ref_dir *dir,
+ const char *refname, int mkdir)
+{
+ char *refname_copy = xstrdup(refname);
+ char *slash;
+ struct ref_entry *entry;
+ for (slash = strchr(refname_copy, '/'); slash; slash = strchr(slash + 1, '/')) {
+ char tmp = slash[1];
+ slash[1] = '\0';
+ entry = search_ref_dir(dir, refname_copy);
+ if (!entry) {
+ if (!mkdir) {
+ dir = NULL;
+ break;
+ }
+ entry = create_dir_entry(refname_copy);
+ add_entry_to_dir(dir, entry);
+ }
+ slash[1] = tmp;
+ assert(entry->flag & REF_DIR);
+ dir = &entry->u.subdir;
+ }
+
+ free(refname_copy);
+ return dir;
+}
+
+/*
+ * Find the value entry with the given name in dir, sorting ref_dirs
+ * and recursing into subdirectories as necessary. If the name is not
+ * found or it corresponds to a directory entry, return NULL.
+ */
+static struct ref_entry *find_ref(struct ref_dir *dir, const char *refname)
+{
+ struct ref_entry *entry;
+ dir = find_containing_dir(dir, refname, 0);
+ if (!dir)
+ return NULL;
+ entry = search_ref_dir(dir, refname);
+ return (entry && !(entry->flag & REF_DIR)) ? entry : NULL;
+}
+
+/*
+ * Add a ref_entry to the ref_dir (unsorted), recursing into
+ * subdirectories as necessary. dir must represent the top-level
+ * directory. Return 0 on success.
+ */
+static int add_ref(struct ref_dir *dir, struct ref_entry *ref)
+{
+ dir = find_containing_dir(dir, ref->name, 1);
+ if (!dir)
+ return -1;
+ add_entry_to_dir(dir, ref);
+ return 0;
+}
+
/*
* Emit a warning and return true iff ref1 and ref2 have the same name
* and the same sha1. Die if they have the same name but different
*/
static int is_dup_ref(const struct ref_entry *ref1, const struct ref_entry *ref2)
{
- if (!strcmp(ref1->name, ref2->name)) {
- /* Duplicate name; make sure that the SHA1s match: */
- if (hashcmp(ref1->sha1, ref2->sha1))
- die("Duplicated ref, and SHA1s don't match: %s",
- ref1->name);
- warning("Duplicated ref: %s", ref1->name);
- return 1;
- } else {
+ if (strcmp(ref1->name, ref2->name))
return 0;
- }
+
+ /* Duplicate name; make sure that they don't conflict: */
+
+ if ((ref1->flag & REF_DIR) || (ref2->flag & REF_DIR))
+ /* This is impossible by construction */
+ die("Reference directory conflict: %s", ref1->name);
+
+ if (hashcmp(ref1->u.value.sha1, ref2->u.value.sha1))
+ die("Duplicated ref, and SHA1s don't match: %s", ref1->name);
+
+ warning("Duplicated ref: %s", ref1->name);
+ return 1;
}
/*
- * Sort the entries in array (if they are not already sorted).
+ * Sort the entries in dir non-recursively (if they are not already
+ * sorted) and remove any duplicate entries.
*/
-static void sort_ref_array(struct ref_array *array)
+static void sort_ref_dir(struct ref_dir *dir)
{
int i, j;
+ struct ref_entry *last = NULL;
/*
* This check also prevents passing a zero-length array to qsort(),
* which is a problem on some platforms.
*/
- if (array->sorted == array->nr)
+ if (dir->sorted == dir->nr)
return;
- qsort(array->refs, array->nr, sizeof(*array->refs), ref_entry_cmp);
+ qsort(dir->entries, dir->nr, sizeof(*dir->entries), ref_entry_cmp);
- /* Remove any duplicates from the ref_array */
- i = 0;
- for (j = 1; j < array->nr; j++) {
- if (is_dup_ref(array->refs[i], array->refs[j])) {
- free(array->refs[j]);
- continue;
+ /* Remove any duplicates: */
+ for (i = 0, j = 0; j < dir->nr; j++) {
+ struct ref_entry *entry = dir->entries[j];
+ if (last && is_dup_ref(last, entry))
+ free_ref_entry(entry);
+ else
+ last = dir->entries[i++] = entry;
+ }
+ dir->sorted = dir->nr = i;
+}
+
+#define DO_FOR_EACH_INCLUDE_BROKEN 01
+
+static struct ref_entry *current_ref;
+
+static int do_one_ref(const char *base, each_ref_fn fn, int trim,
+ int flags, void *cb_data, struct ref_entry *entry)
+{
+ int retval;
+ if (prefixcmp(entry->name, base))
+ return 0;
+
+ if (!(flags & DO_FOR_EACH_INCLUDE_BROKEN)) {
+ if (entry->flag & REF_ISBROKEN)
+ return 0; /* ignore broken refs e.g. dangling symref */
+ if (!has_sha1_file(entry->u.value.sha1)) {
+ error("%s does not point to a valid object!", entry->name);
+ return 0;
}
- array->refs[++i] = array->refs[j];
}
- array->sorted = array->nr = i + 1;
+ current_ref = entry;
+ retval = fn(entry->name + trim, entry->u.value.sha1, entry->flag, cb_data);
+ current_ref = NULL;
+ return retval;
}
-static struct ref_entry *search_ref_array(struct ref_array *array, const char *refname)
+/*
+ * Call fn for each reference in dir that has index in the range
+ * offset <= index < dir->nr. Recurse into subdirectories that are in
+ * that index range, sorting them before iterating. This function
+ * does not sort dir itself; it should be sorted beforehand.
+ */
+static int do_for_each_ref_in_dir(struct ref_dir *dir, int offset,
+ const char *base,
+ each_ref_fn fn, int trim, int flags, void *cb_data)
{
- struct ref_entry *e, **r;
- int len;
+ int i;
+ assert(dir->sorted == dir->nr);
+ for (i = offset; i < dir->nr; i++) {
+ struct ref_entry *entry = dir->entries[i];
+ int retval;
+ if (entry->flag & REF_DIR) {
+ sort_ref_dir(&entry->u.subdir);
+ retval = do_for_each_ref_in_dir(&entry->u.subdir, 0,
+ base, fn, trim, flags, cb_data);
+ } else {
+ retval = do_one_ref(base, fn, trim, flags, cb_data, entry);
+ }
+ if (retval)
+ return retval;
+ }
+ return 0;
+}
- if (refname == NULL)
- return NULL;
+/*
+ * Call fn for each reference in the union of dir1 and dir2, in order
+ * by refname. Recurse into subdirectories. If a value entry appears
+ * in both dir1 and dir2, then only process the version that is in
+ * dir2. The input dirs must already be sorted, but subdirs will be
+ * sorted as needed.
+ */
+static int do_for_each_ref_in_dirs(struct ref_dir *dir1,
+ struct ref_dir *dir2,
+ const char *base, each_ref_fn fn, int trim,
+ int flags, void *cb_data)
+{
+ int retval;
+ int i1 = 0, i2 = 0;
- if (!array->nr)
- return NULL;
- sort_ref_array(array);
- len = strlen(refname) + 1;
- e = xmalloc(sizeof(struct ref_entry) + len);
- memcpy(e->name, refname, len);
+ assert(dir1->sorted == dir1->nr);
+ assert(dir2->sorted == dir2->nr);
+ while (1) {
+ struct ref_entry *e1, *e2;
+ int cmp;
+ if (i1 == dir1->nr) {
+ return do_for_each_ref_in_dir(dir2, i2,
+ base, fn, trim, flags, cb_data);
+ }
+ if (i2 == dir2->nr) {
+ return do_for_each_ref_in_dir(dir1, i1,
+ base, fn, trim, flags, cb_data);
+ }
+ e1 = dir1->entries[i1];
+ e2 = dir2->entries[i2];
+ cmp = strcmp(e1->name, e2->name);
+ if (cmp == 0) {
+ if ((e1->flag & REF_DIR) && (e2->flag & REF_DIR)) {
+ /* Both are directories; descend them in parallel. */
+ sort_ref_dir(&e1->u.subdir);
+ sort_ref_dir(&e2->u.subdir);
+ retval = do_for_each_ref_in_dirs(
+ &e1->u.subdir, &e2->u.subdir,
+ base, fn, trim, flags, cb_data);
+ i1++;
+ i2++;
+ } else if (!(e1->flag & REF_DIR) && !(e2->flag & REF_DIR)) {
+ /* Both are references; ignore the one from dir1. */
+ retval = do_one_ref(base, fn, trim, flags, cb_data, e2);
+ i1++;
+ i2++;
+ } else {
+ die("conflict between reference and directory: %s",
+ e1->name);
+ }
+ } else {
+ struct ref_entry *e;
+ if (cmp < 0) {
+ e = e1;
+ i1++;
+ } else {
+ e = e2;
+ i2++;
+ }
+ if (e->flag & REF_DIR) {
+ sort_ref_dir(&e->u.subdir);
+ retval = do_for_each_ref_in_dir(
+ &e->u.subdir, 0,
+ base, fn, trim, flags, cb_data);
+ } else {
+ retval = do_one_ref(base, fn, trim, flags, cb_data, e);
+ }
+ }
+ if (retval)
+ return retval;
+ }
+ if (i1 < dir1->nr)
+ return do_for_each_ref_in_dir(dir1, i1,
+ base, fn, trim, flags, cb_data);
+ if (i2 < dir2->nr)
+ return do_for_each_ref_in_dir(dir2, i2,
+ base, fn, trim, flags, cb_data);
+ return 0;
+}
- r = bsearch(&e, array->refs, array->nr, sizeof(*array->refs), ref_entry_cmp);
+/*
+ * Return true iff refname1 and refname2 conflict with each other.
+ * Two reference names conflict if one of them exactly matches the
+ * leading components of the other; e.g., "foo/bar" conflicts with
+ * both "foo" and with "foo/bar/baz" but not with "foo/bar" or
+ * "foo/barbados".
+ */
+static int names_conflict(const char *refname1, const char *refname2)
+{
+ for (; *refname1 && *refname1 == *refname2; refname1++, refname2++)
+ ;
+ return (*refname1 == '\0' && *refname2 == '/')
+ || (*refname1 == '/' && *refname2 == '\0');
+}
- free(e);
+struct name_conflict_cb {
+ const char *refname;
+ const char *oldrefname;
+ const char *conflicting_refname;
+};
- if (r == NULL)
- return NULL;
+static int name_conflict_fn(const char *existingrefname, const unsigned char *sha1,
+ int flags, void *cb_data)
+{
+ struct name_conflict_cb *data = (struct name_conflict_cb *)cb_data;
+ if (data->oldrefname && !strcmp(data->oldrefname, existingrefname))
+ return 0;
+ if (names_conflict(data->refname, existingrefname)) {
+ data->conflicting_refname = existingrefname;
+ return 1;
+ }
+ return 0;
+}
- return *r;
+/*
+ * Return true iff a reference named refname could be created without
+ * conflicting with the name of an existing reference in array. If
+ * oldrefname is non-NULL, ignore potential conflicts with oldrefname
+ * (e.g., because oldrefname is scheduled for deletion in the same
+ * operation).
+ */
+static int is_refname_available(const char *refname, const char *oldrefname,
+ struct ref_dir *dir)
+{
+ struct name_conflict_cb data;
+ data.refname = refname;
+ data.oldrefname = oldrefname;
+ data.conflicting_refname = NULL;
+
+ sort_ref_dir(dir);
+ if (do_for_each_ref_in_dir(dir, 0, "", name_conflict_fn,
+ 0, DO_FOR_EACH_INCLUDE_BROKEN,
+ &data)) {
+ error("'%s' exists; cannot create '%s'",
+ data.conflicting_refname, refname);
+ return 0;
+ }
+ return 1;
}
/*
struct ref_cache *next;
char did_loose;
char did_packed;
- struct ref_array loose;
- struct ref_array packed;
+ struct ref_dir loose;
+ struct ref_dir packed;
/* The submodule name, or "" for the main repo. */
char name[FLEX_ARRAY];
} *ref_cache;
-static struct ref_entry *current_ref;
-
-static void clear_ref_array(struct ref_array *array)
-{
- int i;
- for (i = 0; i < array->nr; i++)
- free(array->refs[i]);
- free(array->refs);
- array->sorted = array->nr = array->alloc = 0;
- array->refs = NULL;
-}
-
static void clear_packed_ref_cache(struct ref_cache *refs)
{
if (refs->did_packed)
- clear_ref_array(&refs->packed);
+ clear_ref_dir(&refs->packed);
refs->did_packed = 0;
}
static void clear_loose_ref_cache(struct ref_cache *refs)
{
if (refs->did_loose)
- clear_ref_array(&refs->loose);
+ clear_ref_dir(&refs->loose);
refs->did_loose = 0;
}
refs = refs->next;
}
- refs = create_ref_cache(submodule);
- refs->next = ref_cache;
- ref_cache = refs;
- return refs;
-}
+ refs = create_ref_cache(submodule);
+ refs->next = ref_cache;
+ ref_cache = refs;
+ return refs;
+}
+
+void invalidate_ref_cache(const char *submodule)
+{
+ struct ref_cache *refs = get_ref_cache(submodule);
+ clear_packed_ref_cache(refs);
+ clear_loose_ref_cache(refs);
+}
+
+/*
+ * Parse one line from a packed-refs file. Write the SHA1 to sha1.
+ * Return a pointer to the refname within the line (null-terminated),
+ * or NULL if there was a problem.
+ */
+static const char *parse_ref_line(char *line, unsigned char *sha1)
+{
+ /*
+ * 42: the answer to everything.
+ *
+ * In this case, it happens to be the answer to
+ * 40 (length of sha1 hex representation)
+ * +1 (space in between hex and name)
+ * +1 (newline at the end of the line)
+ */
+ int len = strlen(line) - 42;
+
+ if (len <= 0)
+ return NULL;
+ if (get_sha1_hex(line, sha1) < 0)
+ return NULL;
+ if (!isspace(line[40]))
+ return NULL;
+ line += 41;
+ if (isspace(*line))
+ return NULL;
+ if (line[len] != '\n')
+ return NULL;
+ line[len] = 0;
-void invalidate_ref_cache(const char *submodule)
-{
- struct ref_cache *refs = get_ref_cache(submodule);
- clear_packed_ref_cache(refs);
- clear_loose_ref_cache(refs);
+ return line;
}
-static void read_packed_refs(FILE *f, struct ref_array *array)
+static void read_packed_refs(FILE *f, struct ref_dir *dir)
{
struct ref_entry *last = NULL;
char refline[PATH_MAX];
refname = parse_ref_line(refline, sha1);
if (refname) {
last = create_ref_entry(refname, sha1, flag, 1);
- add_ref(array, last);
+ add_ref(dir, last);
continue;
}
if (last &&
strlen(refline) == 42 &&
refline[41] == '\n' &&
!get_sha1_hex(refline + 1, sha1))
- hashcpy(last->peeled, sha1);
+ hashcpy(last->u.value.peeled, sha1);
}
}
-static struct ref_array *get_packed_refs(struct ref_cache *refs)
+static struct ref_dir *get_packed_refs(struct ref_cache *refs)
{
if (!refs->did_packed) {
const char *packed_refs_file;
}
static void get_ref_dir(struct ref_cache *refs, const char *base,
- struct ref_array *array)
+ struct ref_dir *dir)
{
- DIR *dir;
+ DIR *d;
const char *path;
if (*refs->name)
else
path = git_path("%s", base);
-
- dir = opendir(path);
-
- if (dir) {
+ d = opendir(path);
+ if (d) {
struct dirent *de;
int baselen = strlen(base);
char *refname = xmalloc(baselen + 257);
if (baselen && base[baselen-1] != '/')
refname[baselen++] = '/';
- while ((de = readdir(dir)) != NULL) {
+ while ((de = readdir(d)) != NULL) {
unsigned char sha1[20];
struct stat st;
int flag;
if (stat(refdir, &st) < 0)
continue;
if (S_ISDIR(st.st_mode)) {
- get_ref_dir(refs, refname, array);
+ get_ref_dir(refs, refname, dir);
continue;
}
if (*refs->name) {
hashclr(sha1);
flag |= REF_ISBROKEN;
}
- add_ref(array, create_ref_entry(refname, sha1, flag, 1));
+ add_ref(dir, create_ref_entry(refname, sha1, flag, 1));
}
free(refname);
- closedir(dir);
+ closedir(d);
}
}
-struct warn_if_dangling_data {
- FILE *fp;
- const char *refname;
- const char *msg_fmt;
-};
-
-static int warn_if_dangling_symref(const char *refname, const unsigned char *sha1,
- int flags, void *cb_data)
-{
- struct warn_if_dangling_data *d = cb_data;
- const char *resolves_to;
- unsigned char junk[20];
-
- if (!(flags & REF_ISSYMREF))
- return 0;
-
- resolves_to = resolve_ref_unsafe(refname, junk, 0, NULL);
- if (!resolves_to || strcmp(resolves_to, d->refname))
- return 0;
-
- fprintf(d->fp, d->msg_fmt, refname);
- return 0;
-}
-
-void warn_dangling_symref(FILE *fp, const char *msg_fmt, const char *refname)
-{
- struct warn_if_dangling_data data;
-
- data.fp = fp;
- data.refname = refname;
- data.msg_fmt = msg_fmt;
- for_each_rawref(warn_if_dangling_symref, &data);
-}
-
-static struct ref_array *get_loose_refs(struct ref_cache *refs)
+static struct ref_dir *get_loose_refs(struct ref_cache *refs)
{
if (!refs->did_loose) {
get_ref_dir(refs, "refs", &refs->loose);
const char *refname, unsigned char *sha1)
{
struct ref_entry *ref;
- struct ref_array *array = get_packed_refs(refs);
+ struct ref_dir *dir = get_packed_refs(refs);
- ref = search_ref_array(array, refname);
+ ref = find_ref(dir, refname);
if (ref == NULL)
return -1;
- memcpy(sha1, ref->sha1, 20);
+ memcpy(sha1, ref->u.value.sha1, 20);
return 0;
}
*/
static int get_packed_ref(const char *refname, unsigned char *sha1)
{
- struct ref_array *packed = get_packed_refs(get_ref_cache(NULL));
- struct ref_entry *entry = search_ref_array(packed, refname);
+ struct ref_dir *packed = get_packed_refs(get_ref_cache(NULL));
+ struct ref_entry *entry = find_ref(packed, refname);
if (entry) {
- hashcpy(sha1, entry->sha1);
+ hashcpy(sha1, entry->u.value.sha1);
return 0;
}
return -1;
return read_ref_full(refname, sha1, 1, NULL);
}
-#define DO_FOR_EACH_INCLUDE_BROKEN 01
-static int do_one_ref(const char *base, each_ref_fn fn, int trim,
- int flags, void *cb_data, struct ref_entry *entry)
+int ref_exists(const char *refname)
{
- if (prefixcmp(entry->name, base))
- return 0;
-
- if (!(flags & DO_FOR_EACH_INCLUDE_BROKEN)) {
- if (entry->flag & REF_ISBROKEN)
- return 0; /* ignore broken refs e.g. dangling symref */
- if (!has_sha1_file(entry->sha1)) {
- error("%s does not point to a valid object!", entry->name);
- return 0;
- }
- }
- current_ref = entry;
- return fn(entry->name + trim, entry->sha1, entry->flag, cb_data);
+ unsigned char sha1[20];
+ return !!resolve_ref_unsafe(refname, sha1, 1, NULL);
}
static int filter_refs(const char *refname, const unsigned char *sha1, int flags,
if (current_ref && (current_ref->name == refname
|| !strcmp(current_ref->name, refname))) {
if (current_ref->flag & REF_KNOWS_PEELED) {
- hashcpy(sha1, current_ref->peeled);
+ hashcpy(sha1, current_ref->u.value.peeled);
return 0;
}
- hashcpy(base, current_ref->sha1);
+ hashcpy(base, current_ref->u.value.sha1);
goto fallback;
}
return -1;
if ((flag & REF_ISPACKED)) {
- struct ref_array *array = get_packed_refs(get_ref_cache(NULL));
- struct ref_entry *r = search_ref_array(array, refname);
+ struct ref_dir *dir = get_packed_refs(get_ref_cache(NULL));
+ struct ref_entry *r = find_ref(dir, refname);
if (r != NULL && r->flag & REF_KNOWS_PEELED) {
- hashcpy(sha1, r->peeled);
+ hashcpy(sha1, r->u.value.peeled);
return 0;
}
}
return -1;
}
+struct warn_if_dangling_data {
+ FILE *fp;
+ const char *refname;
+ const char *msg_fmt;
+};
+
+static int warn_if_dangling_symref(const char *refname, const unsigned char *sha1,
+ int flags, void *cb_data)
+{
+ struct warn_if_dangling_data *d = cb_data;
+ const char *resolves_to;
+ unsigned char junk[20];
+
+ if (!(flags & REF_ISSYMREF))
+ return 0;
+
+ resolves_to = resolve_ref_unsafe(refname, junk, 0, NULL);
+ if (!resolves_to || strcmp(resolves_to, d->refname))
+ return 0;
+
+ fprintf(d->fp, d->msg_fmt, refname);
+ return 0;
+}
+
+void warn_dangling_symref(FILE *fp, const char *msg_fmt, const char *refname)
+{
+ struct warn_if_dangling_data data;
+
+ data.fp = fp;
+ data.refname = refname;
+ data.msg_fmt = msg_fmt;
+ for_each_rawref(warn_if_dangling_symref, &data);
+}
+
static int do_for_each_ref(const char *submodule, const char *base, each_ref_fn fn,
int trim, int flags, void *cb_data)
{
- int retval = 0, p = 0, l = 0;
struct ref_cache *refs = get_ref_cache(submodule);
- struct ref_array *packed = get_packed_refs(refs);
- struct ref_array *loose = get_loose_refs(refs);
-
- sort_ref_array(packed);
- sort_ref_array(loose);
- while (p < packed->nr && l < loose->nr) {
- struct ref_entry *entry;
- int cmp = strcmp(packed->refs[p]->name, loose->refs[l]->name);
- if (!cmp) {
- p++;
- continue;
- }
- if (cmp > 0) {
- entry = loose->refs[l++];
- } else {
- entry = packed->refs[p++];
- }
- retval = do_one_ref(base, fn, trim, flags, cb_data, entry);
- if (retval)
- goto end_each;
- }
-
- if (l < loose->nr) {
- p = l;
- packed = loose;
- }
+ struct ref_dir *packed_dir = get_packed_refs(refs);
+ struct ref_dir *loose_dir = get_loose_refs(refs);
+ int retval = 0;
- for (; p < packed->nr; p++) {
- retval = do_one_ref(base, fn, trim, flags, cb_data, packed->refs[p]);
- if (retval)
- goto end_each;
+ if (base && *base) {
+ packed_dir = find_containing_dir(packed_dir, base, 0);
+ loose_dir = find_containing_dir(loose_dir, base, 0);
+ }
+
+ if (packed_dir && loose_dir) {
+ sort_ref_dir(packed_dir);
+ sort_ref_dir(loose_dir);
+ retval = do_for_each_ref_in_dirs(
+ packed_dir, loose_dir,
+ base, fn, trim, flags, cb_data);
+ } else if (packed_dir) {
+ sort_ref_dir(packed_dir);
+ retval = do_for_each_ref_in_dir(
+ packed_dir, 0,
+ base, fn, trim, flags, cb_data);
+ } else if (loose_dir) {
+ sort_ref_dir(loose_dir);
+ retval = do_for_each_ref_in_dir(
+ loose_dir, 0,
+ base, fn, trim, flags, cb_data);
}
-end_each:
- current_ref = NULL;
return retval;
}
-
static int do_head_ref(const char *submodule, each_ref_fn fn, void *cb_data)
{
unsigned char sha1[20];
DO_FOR_EACH_INCLUDE_BROKEN, cb_data);
}
-/*
- * Make sure "ref" is something reasonable to have under ".git/refs/";
- * We do not like it if:
- *
- * - any path component of it begins with ".", or
- * - it has double dots "..", or
- * - it has ASCII control character, "~", "^", ":" or SP, anywhere, or
- * - it ends with a "/".
- * - it ends with ".lock"
- * - it contains a "\" (backslash)
- */
-
-/* Return true iff ch is not allowed in reference names. */
-static inline int bad_ref_char(int ch)
-{
- if (((unsigned) ch) <= ' ' || ch == 0x7f ||
- ch == '~' || ch == '^' || ch == ':' || ch == '\\')
- return 1;
- /* 2.13 Pattern Matching Notation */
- if (ch == '*' || ch == '?' || ch == '[') /* Unsupported */
- return 1;
- return 0;
-}
-
-/*
- * Try to read one refname component from the front of refname. Return
- * the length of the component found, or -1 if the component is not
- * legal.
- */
-static int check_refname_component(const char *refname, int flags)
-{
- const char *cp;
- char last = '\0';
-
- for (cp = refname; ; cp++) {
- char ch = *cp;
- if (ch == '\0' || ch == '/')
- break;
- if (bad_ref_char(ch))
- return -1; /* Illegal character in refname. */
- if (last == '.' && ch == '.')
- return -1; /* Refname contains "..". */
- if (last == '@' && ch == '{')
- return -1; /* Refname contains "@{". */
- last = ch;
- }
- if (cp == refname)
- return -1; /* Component has zero length. */
- if (refname[0] == '.') {
- if (!(flags & REFNAME_DOT_COMPONENT))
- return -1; /* Component starts with '.'. */
- /*
- * Even if leading dots are allowed, don't allow "."
- * as a component (".." is prevented by a rule above).
- */
- if (refname[1] == '\0')
- return -1; /* Component equals ".". */
- }
- if (cp - refname >= 5 && !memcmp(cp - 5, ".lock", 5))
- return -1; /* Refname ends with ".lock". */
- return cp - refname;
-}
-
-int check_refname_format(const char *refname, int flags)
-{
- int component_len, component_count = 0;
-
- while (1) {
- /* We are at the start of a path component. */
- component_len = check_refname_component(refname, flags);
- if (component_len < 0) {
- if ((flags & REFNAME_REFSPEC_PATTERN) &&
- refname[0] == '*' &&
- (refname[1] == '\0' || refname[1] == '/')) {
- /* Accept one wildcard as a full refname component. */
- flags &= ~REFNAME_REFSPEC_PATTERN;
- component_len = 1;
- } else {
- return -1;
- }
- }
- component_count++;
- if (refname[component_len] == '\0')
- break;
- /* Skip to next component. */
- refname += component_len + 1;
- }
-
- if (refname[component_len - 1] == '.')
- return -1; /* Refname ends with '.'. */
- if (!(flags & REFNAME_ALLOW_ONELEVEL) && component_count < 2)
- return -1; /* Refname has only one component. */
- return 0;
-}
-
const char *prettify_refname(const char *name)
{
return name + (
return result;
}
-/*
- * Return true iff a reference named refname could be created without
- * conflicting with the name of an existing reference. If oldrefname
- * is non-NULL, ignore potential conflicts with oldrefname (e.g.,
- * because oldrefname is scheduled for deletion in the same
- * operation).
- */
-static int is_refname_available(const char *refname, const char *oldrefname,
- struct ref_array *array)
-{
- int i, namlen = strlen(refname); /* e.g. 'foo/bar' */
- for (i = 0; i < array->nr; i++ ) {
- struct ref_entry *entry = array->refs[i];
- /* entry->name could be 'foo' or 'foo/bar/baz' */
- if (!oldrefname || strcmp(oldrefname, entry->name)) {
- int len = strlen(entry->name);
- int cmplen = (namlen < len) ? namlen : len;
- const char *lead = (namlen < len) ? entry->name : refname;
- if (!strncmp(refname, entry->name, cmplen) &&
- lead[cmplen] == '/') {
- error("'%s' exists; cannot create '%s'",
- entry->name, refname);
- return 0;
- }
- }
- }
- return 1;
-}
-
/*
* *string and *len will only be substituted, and *string returned (for
* later free()ing) if the string passed in is a magic short-hand form
return lock_ref_sha1_basic(refname, old_sha1, flags, NULL);
}
+struct repack_without_ref_sb {
+ const char *refname;
+ int fd;
+};
+
+static int repack_without_ref_fn(const char *refname, const unsigned char *sha1,
+ int flags, void *cb_data)
+{
+ struct repack_without_ref_sb *data = cb_data;
+ char line[PATH_MAX + 100];
+ int len;
+
+ if (!strcmp(data->refname, refname))
+ return 0;
+ len = snprintf(line, sizeof(line), "%s %s\n",
+ sha1_to_hex(sha1), refname);
+ /* this should not happen but just being defensive */
+ if (len > sizeof(line))
+ die("too long a refname '%s'", refname);
+ write_or_die(data->fd, line, len);
+ return 0;
+}
+
static struct lock_file packlock;
static int repack_without_ref(const char *refname)
{
- struct ref_array *packed;
- int fd, i;
-
- packed = get_packed_refs(get_ref_cache(NULL));
- if (search_ref_array(packed, refname) == NULL)
+ struct repack_without_ref_sb data;
+ struct ref_dir *packed = get_packed_refs(get_ref_cache(NULL));
+ if (find_ref(packed, refname) == NULL)
return 0;
- fd = hold_lock_file_for_update(&packlock, git_path("packed-refs"), 0);
- if (fd < 0) {
+ data.refname = refname;
+ data.fd = hold_lock_file_for_update(&packlock, git_path("packed-refs"), 0);
+ if (data.fd < 0) {
unable_to_lock_error(git_path("packed-refs"), errno);
return error("cannot delete '%s' from packed refs", refname);
}
-
- for (i = 0; i < packed->nr; i++) {
- char line[PATH_MAX + 100];
- int len;
- struct ref_entry *ref = packed->refs[i];
-
- if (!strcmp(refname, ref->name))
- continue;
- len = snprintf(line, sizeof(line), "%s %s\n",
- sha1_to_hex(ref->sha1), ref->name);
- /* this should not happen but just being defensive */
- if (len > sizeof(line))
- die("too long a refname '%s'", ref->name);
- write_or_die(fd, line, len);
- }
+ do_for_each_ref_in_dir(packed, 0, "", repack_without_ref_fn, 0, 0, &data);
return commit_lock_file(&packlock);
}
static int do_for_each_reflog(const char *base, each_ref_fn fn, void *cb_data)
{
- DIR *dir = opendir(git_path("logs/%s", base));
+ DIR *d = opendir(git_path("logs/%s", base));
int retval = 0;
- if (dir) {
+ if (d) {
struct dirent *de;
int baselen = strlen(base);
char *log = xmalloc(baselen + 257);
if (baselen && base[baselen-1] != '/')
log[baselen++] = '/';
- while ((de = readdir(dir)) != NULL) {
+ while ((de = readdir(d)) != NULL) {
struct stat st;
int namelen;
break;
}
free(log);
- closedir(dir);
+ closedir(d);
}
else if (*base)
return errno;
return 0;
}
-int ref_exists(const char *refname)
-{
- unsigned char sha1[20];
- return !!resolve_ref_unsafe(refname, sha1, 1, NULL);
-}
-
struct ref *find_ref_by_name(const struct ref *list, const char *name)
{
for ( ; list; list = list->next)
#define REF_ISBROKEN 0x04
/*
- * Calls the specified function for each ref file until it returns nonzero,
- * and returns the value
+ * Calls the specified function for each ref file until it returns
+ * nonzero, and returns the value. Please note that it is not safe to
+ * modify references while an iteration is in progress, unless the
+ * same callback function invocation that modifies the reference also
+ * returns a nonzero value to immediately stop the iteration.
*/
typedef int each_ref_fn(const char *refname, const unsigned char *sha1, int flags, void *cb_data);
extern int head_ref(each_ref_fn, void *);
struct rpc_state {
const char *service_name;
const char **argv;
+ struct strbuf *stdin_preamble;
char *service_url;
char *hdr_content_type;
char *hdr_accept;
{
const char *svc = rpc->service_name;
struct strbuf buf = STRBUF_INIT;
+ struct strbuf *preamble = rpc->stdin_preamble;
struct child_process client;
int err = 0;
client.argv = rpc->argv;
if (start_command(&client))
exit(1);
+ if (preamble)
+ write_or_die(client.in, preamble->buf, preamble->len);
if (heads)
write_or_die(client.in, heads->buf, heads->len);
int nr_heads, struct ref **to_fetch)
{
struct rpc_state rpc;
+ struct strbuf preamble = STRBUF_INIT;
char *depth_arg = NULL;
- const char **argv;
int argc = 0, i, err;
+ const char *argv[15];
- argv = xmalloc((15 + nr_heads) * sizeof(char*));
argv[argc++] = "fetch-pack";
argv[argc++] = "--stateless-rpc";
+ argv[argc++] = "--stdin";
argv[argc++] = "--lock-pack";
if (options.followtags)
argv[argc++] = "--include-tag";
argv[argc++] = depth_arg;
}
argv[argc++] = url;
+ argv[argc++] = NULL;
+
for (i = 0; i < nr_heads; i++) {
struct ref *ref = to_fetch[i];
if (!ref->name || !*ref->name)
die("cannot fetch by sha1 over smart http");
- argv[argc++] = ref->name;
+ packet_buf_write(&preamble, "%s\n", ref->name);
}
- argv[argc++] = NULL;
+ packet_buf_flush(&preamble);
memset(&rpc, 0, sizeof(rpc));
rpc.service_name = "git-upload-pack",
rpc.argv = argv;
+ rpc.stdin_preamble = &preamble;
rpc.gzip_request = 1;
err = rpc_service(&rpc, heads);
if (rpc.result.len)
safe_write(1, rpc.result.buf, rpc.result.len);
strbuf_release(&rpc.result);
- free(argv);
+ strbuf_release(&preamble);
free(depth_arg);
return err;
}
}
}
+void reset_revision_walk(void)
+{
+ clear_object_flags(SEEN | ADDED | SHOWN);
+}
+
int prepare_revision_walk(struct rev_info *revs)
{
int nr = revs->pending.nr;
if (commit) {
if (!(commit->object.flags & SEEN)) {
commit->object.flags |= SEEN;
- commit_list_insert_by_date(commit, &revs->commits);
+ commit_list_insert(commit, &revs->commits);
}
}
e++;
}
+ commit_list_reverse(&revs->commits);
+ commit_list_sort_by_date(&revs->commits);
if (!revs->leak_pending)
free(list);
const char * const usagestr[]);
extern int handle_revision_arg(const char *arg, struct rev_info *revs,int flags,int cant_be_filename);
+extern void reset_revision_walk(void);
extern int prepare_revision_walk(struct rev_info *revs);
extern struct commit *get_revision(struct rev_info *revs);
extern char *get_revision_mark(const struct rev_info *revs, const struct commit *commit);
#include "sigchain.h"
#include "argv-array.h"
+#ifndef SHELL_PATH
+# define SHELL_PATH "/bin/sh"
+#endif
+
struct child_to_clean {
pid_t pid;
struct child_to_clean *next;
}
#endif
+static char *locate_in_PATH(const char *file)
+{
+ const char *p = getenv("PATH");
+ struct strbuf buf = STRBUF_INIT;
+
+ if (!p || !*p)
+ return NULL;
+
+ while (1) {
+ const char *end = strchrnul(p, ':');
+
+ strbuf_reset(&buf);
+
+ /* POSIX specifies an empty entry as the current directory. */
+ if (end != p) {
+ strbuf_add(&buf, p, end - p);
+ strbuf_addch(&buf, '/');
+ }
+ strbuf_addstr(&buf, file);
+
+ if (!access(buf.buf, F_OK))
+ return strbuf_detach(&buf, NULL);
+
+ if (!*end)
+ break;
+ p = end + 1;
+ }
+
+ strbuf_release(&buf);
+ return NULL;
+}
+
+static int exists_in_PATH(const char *file)
+{
+ char *r = locate_in_PATH(file);
+ free(r);
+ return r != NULL;
+}
+
+int sane_execvp(const char *file, char * const argv[])
+{
+ if (!execvp(file, argv))
+ return 0; /* cannot happen ;-) */
+
+ /*
+ * When a command can't be found because one of the directories
+ * listed in $PATH is unsearchable, execvp reports EACCES, but
+ * careful usability testing (read: analysis of occasional bug
+ * reports) reveals that "No such file or directory" is more
+ * intuitive.
+ *
+ * We avoid commands with "/", because execvp will not do $PATH
+ * lookups in that case.
+ *
+ * The reassignment of EACCES to errno looks like a no-op below,
+ * but we need to protect against exists_in_PATH overwriting errno.
+ */
+ if (errno == EACCES && !strchr(file, '/'))
+ errno = exists_in_PATH(file) ? EACCES : ENOENT;
+ return -1;
+}
+
static const char **prepare_shell_cmd(const char **argv)
{
int argc, nargc = 0;
die("BUG: shell command is empty");
if (strcspn(argv[0], "|&;<>()$`\\\"' \t\n*?[#~=%") != strlen(argv[0])) {
+#ifndef WIN32
+ nargv[nargc++] = SHELL_PATH;
+#else
nargv[nargc++] = "sh";
+#endif
nargv[nargc++] = "-c";
if (argc < 2)
{
const char **nargv = prepare_shell_cmd(argv);
trace_argv_printf(nargv, "trace: exec:");
- execvp(nargv[0], (char **)nargv);
+ sane_execvp(nargv[0], (char **)nargv);
free(nargv);
return -1;
}
} else if (cmd->use_shell) {
execv_shell_cmd(cmd->argv);
} else {
- execvp(cmd->argv[0], (char *const*) cmd->argv);
+ sane_execvp(cmd->argv[0], (char *const*) cmd->argv);
}
if (errno == ENOENT) {
if (!cmd->silent_exec_failure)
static struct tree *empty_tree(void)
{
- return lookup_tree((const unsigned char *)EMPTY_TREE_SHA1_BIN);
+ return lookup_tree(EMPTY_TREE_SHA1_BIN);
}
static int error_dirty_index(struct replay_opts *opts)
#include "pack-revindex.h"
#include "sha1-lookup.h"
#include "bulk-checkin.h"
+#include "streaming.h"
#ifndef O_NOATIME
#if defined(__linux__) && (defined(__i386__) || defined(__PPC__))
return NULL;
}
-int check_sha1_signature(const unsigned char *sha1, void *map, unsigned long size, const char *type)
+/*
+ * With an in-core object data in "map", rehash it to make sure the
+ * object name actually matches "sha1" to detect object corruption.
+ * With "map" == NULL, try reading the object named with "sha1" using
+ * the streaming interface and rehash it to do the same.
+ */
+int check_sha1_signature(const unsigned char *sha1, void *map,
+ unsigned long size, const char *type)
{
unsigned char real_sha1[20];
- hash_sha1_file(map, size, type, real_sha1);
+ enum object_type obj_type;
+ struct git_istream *st;
+ git_SHA_CTX c;
+ char hdr[32];
+ int hdrlen;
+
+ if (map) {
+ hash_sha1_file(map, size, type, real_sha1);
+ return hashcmp(sha1, real_sha1) ? -1 : 0;
+ }
+
+ st = open_istream(sha1, &obj_type, &size, NULL);
+ if (!st)
+ return -1;
+
+ /* Generate the header */
+ hdrlen = sprintf(hdr, "%s %lu", typename(obj_type), size) + 1;
+
+ /* Sha1.. */
+ git_SHA1_Init(&c);
+ git_SHA1_Update(&c, hdr, hdrlen);
+ for (;;) {
+ char buf[1024 * 16];
+ ssize_t readlen = read_istream(st, buf, sizeof(buf));
+
+ if (!readlen)
+ break;
+ git_SHA1_Update(&c, buf, readlen);
+ }
+ git_SHA1_Final(real_sha1, &c);
+ close_istream(st);
return hashcmp(sha1, real_sha1) ? -1 : 0;
}
len = cp + tmp_len - name;
cp = xstrndup(name, cp - name);
upstream = branch_get(*cp ? cp : NULL);
- if (!upstream
- || !upstream->merge
- || !upstream->merge[0]->dst)
- return error("No upstream branch found for '%s'", cp);
+ /*
+ * Upstream can be NULL only if cp refers to HEAD and HEAD
+ * points to something different than a branch.
+ */
+ if (!upstream)
+ return error(_("HEAD does not point to a branch"));
+ if (!upstream->merge || !upstream->merge[0]->dst) {
+ if (!ref_exists(upstream->refname))
+ return error(_("No such branch: '%s'"), cp);
+ if (!upstream->merge)
+ return error(_("No upstream configured for branch '%s'"),
+ upstream->name);
+ return error(
+ _("Upstream branch '%s' not stored as a remote-tracking branch"),
+ upstream->merge[0]->src);
+ }
free(cp);
cp = shorten_unambiguous_ref(upstream->merge[0]->dst, 0);
strbuf_reset(buf);
return st->u.incore.buf ? 0 : -1;
}
+
+
+/****************************************************************
+ * Users of streaming interface
+ ****************************************************************/
+
+int stream_blob_to_fd(int fd, unsigned const char *sha1, struct stream_filter *filter,
+ int can_seek)
+{
+ struct git_istream *st;
+ enum object_type type;
+ unsigned long sz;
+ ssize_t kept = 0;
+ int result = -1;
+
+ st = open_istream(sha1, &type, &sz, filter);
+ if (!st)
+ return result;
+ if (type != OBJ_BLOB)
+ goto close_and_exit;
+ for (;;) {
+ char buf[1024 * 16];
+ ssize_t wrote, holeto;
+ ssize_t readlen = read_istream(st, buf, sizeof(buf));
+
+ if (!readlen)
+ break;
+ if (can_seek && sizeof(buf) == readlen) {
+ for (holeto = 0; holeto < readlen; holeto++)
+ if (buf[holeto])
+ break;
+ if (readlen == holeto) {
+ kept += holeto;
+ continue;
+ }
+ }
+
+ if (kept && lseek(fd, kept, SEEK_CUR) == (off_t) -1)
+ goto close_and_exit;
+ else
+ kept = 0;
+ wrote = write_in_full(fd, buf, readlen);
+
+ if (wrote != readlen)
+ goto close_and_exit;
+ }
+ if (kept && (lseek(fd, kept - 1, SEEK_CUR) == (off_t) -1 ||
+ write(fd, "", 1) != 1))
+ goto close_and_exit;
+ result = 0;
+
+ close_and_exit:
+ close_istream(st);
+ return result;
+}
extern int close_istream(struct git_istream *);
extern ssize_t read_istream(struct git_istream *, char *, size_t);
+extern int stream_blob_to_fd(int fd, const unsigned char *, struct stream_filter *, int can_seek);
+
#endif /* STREAMING_H */
void *data)
{
int i;
- int *needs_pushing = data;
+ struct string_list *needs_pushing = data;
for (i = 0; i < q->nr; i++) {
struct diff_filepair *p = q->queue[i];
if (!S_ISGITLINK(p->two->mode))
continue;
- if (submodule_needs_pushing(p->two->path, p->two->sha1)) {
- *needs_pushing = 1;
- break;
- }
+ if (submodule_needs_pushing(p->two->path, p->two->sha1))
+ string_list_insert(needs_pushing, p->two->path);
}
}
-
-static void commit_need_pushing(struct commit *commit, int *needs_pushing)
+static void find_unpushed_submodule_commits(struct commit *commit,
+ struct string_list *needs_pushing)
{
struct rev_info rev;
diff_tree_combined_merge(commit, 1, &rev);
}
-int check_submodule_needs_pushing(unsigned char new_sha1[20], const char *remotes_name)
+int find_unpushed_submodules(unsigned char new_sha1[20],
+ const char *remotes_name, struct string_list *needs_pushing)
{
struct rev_info rev;
struct commit *commit;
const char *argv[] = {NULL, NULL, "--not", "NULL", NULL};
int argc = ARRAY_SIZE(argv) - 1;
char *sha1_copy;
- int needs_pushing = 0;
+
struct strbuf remotes_arg = STRBUF_INIT;
strbuf_addf(&remotes_arg, "--remotes=%s", remotes_name);
if (prepare_revision_walk(&rev))
die("revision walk setup failed");
- while ((commit = get_revision(&rev)) && !needs_pushing)
- commit_need_pushing(commit, &needs_pushing);
+ while ((commit = get_revision(&rev)) != NULL)
+ find_unpushed_submodule_commits(commit, needs_pushing);
+ reset_revision_walk();
free(sha1_copy);
strbuf_release(&remotes_arg);
- return needs_pushing;
+ return needs_pushing->nr;
+}
+
+static int push_submodule(const char *path)
+{
+ if (add_submodule_odb(path))
+ return 1;
+
+ if (for_each_remote_ref_submodule(path, has_remote, NULL) > 0) {
+ struct child_process cp;
+ const char *argv[] = {"push", NULL};
+
+ memset(&cp, 0, sizeof(cp));
+ cp.argv = argv;
+ cp.env = local_repo_env;
+ cp.git_cmd = 1;
+ cp.no_stdin = 1;
+ cp.dir = path;
+ if (run_command(&cp))
+ return 0;
+ close(cp.out);
+ }
+
+ return 1;
+}
+
+int push_unpushed_submodules(unsigned char new_sha1[20], const char *remotes_name)
+{
+ int i, ret = 1;
+ struct string_list needs_pushing;
+
+ memset(&needs_pushing, 0, sizeof(struct string_list));
+ needs_pushing.strdup_strings = 1;
+
+ if (!find_unpushed_submodules(new_sha1, remotes_name, &needs_pushing))
+ return 1;
+
+ for (i = 0; i < needs_pushing.nr; i++) {
+ const char *path = needs_pushing.items[i].string;
+ fprintf(stderr, "Pushing submodule '%s'\n", path);
+ if (!push_submodule(path)) {
+ fprintf(stderr, "Unable to push submodule '%s'\n", path);
+ ret = 0;
+ }
+ }
+
+ string_list_clear(&needs_pushing, 0);
+
+ return ret;
}
static int is_submodule_commit_present(const char *path, unsigned char sha1[20])
if (in_merge_bases(b, &commit, 1))
add_object_array(o, NULL, &merges);
}
+ reset_revision_walk();
/* Now we've got all merges that contain a and b. Prune all
* merges that contain another found merge and save them in
unsigned is_submodule_modified(const char *path, int ignore_untracked);
int merge_submodule(unsigned char result[20], const char *path, const unsigned char base[20],
const unsigned char a[20], const unsigned char b[20], int search);
-int check_submodule_needs_pushing(unsigned char new_sha1[20], const char *remotes_name);
+int find_unpushed_submodules(unsigned char new_sha1[20], const char *remotes_name,
+ struct string_list *needs_pushing);
+int push_unpushed_submodules(unsigned char new_sha1[20], const char *remotes_name);
#endif
#
-# Library code for git-p4 tests
+# Library code for git p4 tests
#
. ./test-lib.sh
if ! test_have_prereq PYTHON; then
- skip_all='skipping git-p4 tests; python not available'
+ skip_all='skipping git p4 tests; python not available'
test_done
fi
( p4 -h && p4d -h ) >/dev/null 2>&1 || {
- skip_all='skipping git-p4 tests; no p4 or p4d'
+ skip_all='skipping git p4 tests; no p4 or p4d'
test_done
}
-GITP4="$GIT_BUILD_DIR/contrib/fast-import/git-p4"
-
# Try to pick a unique port: guess a large number, then hope
# no more than one of each test is running.
#
'
test_expect_success 'non-fast-forward push shows help message' '
- test_i18ngrep "To prevent you from losing history, non-fast-forward updates were rejected" output
+ test_i18ngrep "Updates were rejected because" output
'
}
<Location /smart_noexport/>
SetEnv GIT_EXEC_PATH ${GIT_EXEC_PATH}
</Location>
+<Location /smart_custom_env/>
+ SetEnv GIT_EXEC_PATH ${GIT_EXEC_PATH}
+ SetEnv GIT_HTTP_EXPORT_ALL
+ SetEnv GIT_COMMITTER_NAME "Custom User"
+ SetEnv GIT_COMMITTER_EMAIL custom@example.com
+</Location>
ScriptAlias /smart/ ${GIT_EXEC_PATH}/git-http-backend/
ScriptAlias /smart_noexport/ ${GIT_EXEC_PATH}/git-http-backend/
+ScriptAlias /smart_custom_env/ ${GIT_EXEC_PATH}/git-http-backend/
<Directory ${GIT_EXEC_PATH}>
Options None
</Directory>
grep "fatal: cannot exec.*hello.sh" err
'
+test_expect_success POSIXPERM 'unreadable directory in PATH' '
+ mkdir local-command &&
+ test_when_finished "chmod u+rwx local-command && rm -fr local-command" &&
+ git config alias.nitfol "!echo frotz" &&
+ chmod a-rx local-command &&
+ (
+ PATH=./local-command:$PATH &&
+ git nitfol >actual
+ ) &&
+ echo frotz >expect &&
+ test_cmp expect actual
+'
+
test_done
--- /dev/null
+#!/bin/sh
+#
+# Copyright (c) 2012 Heiko Voigt
+#
+
+test_description='Test revision walking api'
+
+. ./test-lib.sh
+
+cat >run_twice_expected <<-EOF
+1st
+ > add b
+ > add a
+2nd
+ > add b
+ > add a
+EOF
+
+test_expect_success 'setup' '
+ echo a > a &&
+ git add a &&
+ git commit -m "add a" &&
+ echo b > b &&
+ git add b &&
+ git commit -m "add b"
+'
+
+test_expect_success 'revision walking can be done twice' '
+ test-revision-walking run-twice > run_twice_actual
+ test_cmp run_twice_expected run_twice_actual
+'
+
+test_done
#!/bin/sh
-test_description='external credential helper tests'
-. ./test-lib.sh
-. "$TEST_DIRECTORY"/lib-credential.sh
+test_description='external credential helper tests
-pre_test() {
- test -z "$GIT_TEST_CREDENTIAL_HELPER_SETUP" ||
- eval "$GIT_TEST_CREDENTIAL_HELPER_SETUP"
+This is a tool for authors of external helper tools to sanity-check
+their helpers. If you have written the "git-credential-foo" helper,
+you check it with:
+
+ make GIT_TEST_CREDENTIAL_HELPER=foo t0303-credential-external.sh
+
+This assumes that your helper is capable of both storing and
+retrieving credentials (some helpers may be read-only, and they will
+fail these tests).
+
+Please note that the individual tests do not verify all of the
+preconditions themselves, but rather build on each other. A failing
+test means that tests later in the sequence can return false "OK"
+results.
+
+If your helper supports time-based expiration with a configurable
+timeout, you can test that feature with:
+
+ make GIT_TEST_CREDENTIAL_HELPER=foo \
+ GIT_TEST_CREDENTIAL_HELPER_TIMEOUT="foo --timeout=1" \
+ t0303-credential-external.sh
- # clean before the test in case there is cruft left
- # over from a previous run that would impact results
- helper_test_clean "$GIT_TEST_CREDENTIAL_HELPER"
-}
+If your helper requires additional setup before the tests are started,
+you can set GIT_TEST_CREDENTIAL_HELPER_SETUP to a sequence of shell
+commands.
+'
-post_test() {
- # clean afterwards so that we are good citizens
- # and don't leave cruft in the helper's storage, which
- # might be long-term system storage
- helper_test_clean "$GIT_TEST_CREDENTIAL_HELPER"
-}
+. ./test-lib.sh
+. "$TEST_DIRECTORY"/lib-credential.sh
if test -z "$GIT_TEST_CREDENTIAL_HELPER"; then
- say "# skipping external helper tests (set GIT_TEST_CREDENTIAL_HELPER)"
-else
- pre_test
- helper_test "$GIT_TEST_CREDENTIAL_HELPER"
- post_test
+ skip_all="used to test external credential helpers"
+ test_done
fi
+test -z "$GIT_TEST_CREDENTIAL_HELPER_SETUP" ||
+ eval "$GIT_TEST_CREDENTIAL_HELPER_SETUP"
+
+# clean before the test in case there is cruft left
+# over from a previous run that would impact results
+helper_test_clean "$GIT_TEST_CREDENTIAL_HELPER"
+
+helper_test "$GIT_TEST_CREDENTIAL_HELPER"
+
if test -z "$GIT_TEST_CREDENTIAL_HELPER_TIMEOUT"; then
- say "# skipping external helper timeout tests"
+ say "# skipping timeout tests (GIT_TEST_CREDENTIAL_HELPER_TIMEOUT not set)"
else
- pre_test
helper_test_timeout "$GIT_TEST_CREDENTIAL_HELPER_TIMEOUT"
- post_test
fi
+# clean afterwards so that we are good citizens
+# and don't leave cruft in the helper's storage, which
+# might be long-term system storage
+helper_test_clean "$GIT_TEST_CREDENTIAL_HELPER"
+
test_done
. ./test-lib.sh
test_expect_success setup '
- git config core.bigfilethreshold 200k &&
+ # clone does not allow us to pass core.bigfilethreshold to
+ # new repos, so set core.bigfilethreshold globally
+ git config --global core.bigfilethreshold 200k &&
echo X | dd of=large1 bs=1k seek=2000 &&
echo X | dd of=large2 bs=1k seek=2000 &&
echo X | dd of=large3 bs=1k seek=2000 &&
- echo Y | dd of=huge bs=1k seek=2500
+ echo Y | dd of=huge bs=1k seek=2500 &&
+ GIT_ALLOC_LIMIT=1500 &&
+ export GIT_ALLOC_LIMIT
'
test_expect_success 'add a large file or two' '
)
'
+test_expect_success 'diff --raw' '
+ git commit -q -m initial &&
+ echo modified >>large1 &&
+ git add large1 &&
+ git commit -q -m modified &&
+ git diff --raw HEAD^
+'
+
+test_expect_success 'hash-object' '
+ git hash-object large1
+'
+
+test_expect_success 'cat-file a large file' '
+ git cat-file blob :large1 >/dev/null
+'
+
+test_expect_success 'cat-file a large file from a tag' '
+ git tag -m largefile largefiletag :large1 &&
+ git cat-file blob largefiletag >/dev/null
+'
+
+test_expect_success 'git-show a large file' '
+ git show :large1 >/dev/null
+
+'
+
+test_expect_success 'repack' '
+ git repack -ad
+'
+
test_done
check_fsck &&
- loglen=$(wc -l <.git/logs/refs/heads/master) &&
- test $loglen = 4
+ test_line_count = 4 .git/logs/refs/heads/master
'
test_expect_success rewind '
check_have A B C D E F G H I J K L &&
- loglen=$(wc -l <.git/logs/refs/heads/master) &&
- test $loglen = 5
+ test_line_count = 5 .git/logs/refs/heads/master
'
test_expect_success 'corrupt and check' '
--stale-fix \
--all &&
- loglen=$(wc -l <.git/logs/refs/heads/master) &&
- test $loglen = 5 &&
+ test_line_count = 5 .git/logs/refs/heads/master &&
check_fsck "missing blob $F"
'
--stale-fix \
--all &&
- loglen=$(wc -l <.git/logs/refs/heads/master) &&
- test $loglen = 2 &&
+ test_line_count = 2 .git/logs/refs/heads/master &&
check_fsck "dangling commit $K"
'
test_expect_success 'rewind2' '
test_tick && git reset --hard HEAD~2 &&
- loglen=$(wc -l <.git/logs/refs/heads/master) &&
- test $loglen = 4
-
+ test_line_count = 4 .git/logs/refs/heads/master
'
test_expect_success '--expire=never' '
--expire=never \
--expire-unreachable=never \
--all &&
- loglen=$(wc -l <.git/logs/refs/heads/master) &&
- test $loglen = 4
-
+ test_line_count = 4 .git/logs/refs/heads/master
'
test_expect_success 'gc.reflogexpire=never' '
git config gc.reflogexpire never &&
git config gc.reflogexpireunreachable never &&
git reflog expire --verbose --all &&
- loglen=$(wc -l <.git/logs/refs/heads/master) &&
- test $loglen = 4
+ test_line_count = 4 .git/logs/refs/heads/master
'
test_expect_success 'gc.reflogexpire=false' '
git config gc.reflogexpire false &&
git config gc.reflogexpireunreachable false &&
git reflog expire --verbose --all &&
- loglen=$(wc -l <.git/logs/refs/heads/master) &&
- test $loglen = 4 &&
+ test_line_count = 4 .git/logs/refs/heads/master &&
git config --unset gc.reflogexpire &&
git config --unset gc.reflogexpireunreachable
)
'
-test_expect_failure 'empty prefix is actually written out' '
+test_expect_success 'empty prefix is actually written out' '
echo >expected &&
(
cd work &&
test_commit 3 &&
(cd clone &&
test_commit 4 &&
- git branch --track my-side origin/side)
-
+ git branch --track my-side origin/side &&
+ git branch --track local-master master &&
+ git remote add -t master master-only .. &&
+ git fetch master-only &&
+ git branch bad-upstream &&
+ git config branch.bad-upstream.remote master-only &&
+ git config branch.bad-upstream.merge refs/heads/side
+ )
'
+sq="'"
+
full_name () {
(cd clone &&
git rev-parse --symbolic-full-name "$@")
git show -s --pretty=format:%s "$@")
}
+error_message () {
+ (cd clone &&
+ test_must_fail git rev-parse --verify "$@")
+}
+
test_expect_success '@{upstream} resolves to correct full name' '
test refs/remotes/origin/master = "$(full_name @{upstream})"
'
test_expect_success 'merge my-side@{u} records the correct name' '
(
- sq="'\''" &&
cd clone || exit
git checkout master || exit
git branch -D new ;# can fail but is ok
test_cmp expect actual
'
+test_expect_success 'branch@{u} works when tracking a local branch' '
+ test refs/heads/master = "$(full_name local-master@{u})"
+'
+
+test_expect_success 'branch@{u} error message when no upstream' '
+ cat >expect <<-EOF &&
+ error: No upstream configured for branch ${sq}non-tracking${sq}
+ fatal: Needed a single revision
+ EOF
+ error_message non-tracking@{u} 2>actual &&
+ test_i18ncmp expect actual
+'
+
+test_expect_success '@{u} error message when no upstream' '
+ cat >expect <<-EOF &&
+ error: No upstream configured for branch ${sq}master${sq}
+ fatal: Needed a single revision
+ EOF
+ test_must_fail git rev-parse --verify @{u} 2>actual &&
+ test_i18ncmp expect actual
+'
+
+test_expect_success 'branch@{u} error message with misspelt branch' '
+ cat >expect <<-EOF &&
+ error: No such branch: ${sq}no-such-branch${sq}
+ fatal: Needed a single revision
+ EOF
+ error_message no-such-branch@{u} 2>actual &&
+ test_i18ncmp expect actual
+'
+
+test_expect_success '@{u} error message when not on a branch' '
+ cat >expect <<-EOF &&
+ error: HEAD does not point to a branch
+ fatal: Needed a single revision
+ EOF
+ git checkout HEAD^0 &&
+ test_must_fail git rev-parse --verify @{u} 2>actual &&
+ test_i18ncmp expect actual
+'
+
+test_expect_success 'branch@{u} error message if upstream branch not fetched' '
+ cat >expect <<-EOF &&
+ error: Upstream branch ${sq}refs/heads/side${sq} not stored as a remote-tracking branch
+ fatal: Needed a single revision
+ EOF
+ error_message bad-upstream@{u} 2>actual &&
+ test_i18ncmp expect actual
+'
+
+test_expect_success 'pull works when tracking a local branch' '
+(
+ cd clone &&
+ git checkout local-master &&
+ git pull
+)
+'
+
+# makes sense if the previous one succeeded
+test_expect_success '@{u} works when tracking a local branch' '
+ test refs/heads/master = "$(full_name @{u})"
+'
+
cat >expect <<EOF
commit 8f489d01d0cc65c3b0f09504ec50b5ed02a70bd5
Reflog: master@{0} (C O Mitter <committer@example.com>)
rm -f path* .merge_* out .git/index &&
git read-tree $t1 &&
git checkout-index --temp -- path1 >out &&
-test $(wc -l <out) = 1 &&
+test_line_count = 1 out &&
test $(cut "-d " -f2 out) = path1 &&
p=$(cut "-d " -f1 out) &&
test -f $p &&
rm -f path* .merge_* out .git/index &&
git read-tree $t1 &&
git checkout-index -a --temp >out &&
-test $(wc -l <out) = 5 &&
+test_line_count = 5 out &&
for f in path0 path1 path3 path4 asubdir/path5
do
test $(grep $f out | cut "-d " -f2) = $f &&
'checkout one stage 2 to temporary file' '
rm -f path* .merge_* out &&
git checkout-index --stage=2 --temp -- path1 >out &&
-test $(wc -l <out) = 1 &&
+test_line_count = 1 out &&
test $(cut "-d " -f2 out) = path1 &&
p=$(cut "-d " -f1 out) &&
test -f $p &&
'checkout all stage 2 to temporary files' '
rm -f path* .merge_* out &&
git checkout-index --all --stage=2 --temp >out &&
-test $(wc -l <out) = 3 &&
+test_line_count = 3 out &&
for f in path1 path2 path4
do
test $(grep $f out | cut "-d " -f2) = $f &&
'checkout all stages/one file to nothing' '
rm -f path* .merge_* out &&
git checkout-index --stage=all --temp -- path0 >out &&
-test $(wc -l <out) = 0'
+test_line_count = 0 out'
test_expect_success \
'checkout all stages/one file to temporary files' '
rm -f path* .merge_* out &&
git checkout-index --stage=all --temp -- path1 >out &&
-test $(wc -l <out) = 1 &&
+test_line_count = 1 out &&
test $(cut "-d " -f2 out) = path1 &&
cut "-d " -f1 out | (read s1 s2 s3 &&
test -f $s1 &&
'checkout some stages/one file to temporary files' '
rm -f path* .merge_* out &&
git checkout-index --stage=all --temp -- path2 >out &&
-test $(wc -l <out) = 1 &&
+test_line_count = 1 out &&
test $(cut "-d " -f2 out) = path2 &&
cut "-d " -f1 out | (read s1 s2 s3 &&
test $s1 = . &&
'checkout all stages/all files to temporary files' '
rm -f path* .merge_* out &&
git checkout-index -a --stage=all --temp >out &&
-test $(wc -l <out) = 5'
+test_line_count = 5 out'
test_expect_success \
'-- path0: no entry' '
'checkout --temp within subdir' '
(cd asubdir &&
git checkout-index -a --stage=all >out &&
- test $(wc -l <out) = 1 &&
+ test_line_count = 1 out &&
test $(grep path5 out | cut "-d " -f2) = path5 &&
grep path5 out | cut "-d " -f1 | (read s1 s2 s3 &&
test -f ../$s1 &&
rm -f .git/index &&
git read-tree $t4 &&
git checkout-index --temp -a >out &&
-test $(wc -l <out) = 1 &&
+test_line_count = 1 out &&
test $(cut "-d " -f2 out) = a &&
p=$(cut "-d " -f1 out) &&
test -f $p &&
git config branch.child.merge refs/heads/master &&
git checkout child^ &&
git checkout child >stdout &&
- test_cmp expect stdout
+ test_i18ncmp expect stdout
'
test_done
prime_resolve_undo &&
git update-index --unresolve fi/le &&
git ls-files -u >actual &&
- test $(wc -l <actual) = 3
+ test_line_count = 3 actual
'
test_expect_success 'rerere and rerere forget' '
p1='tabs ," (dq) and spaces'
p2='just space'
-cat >"$p0" <<\EOF
-1. A quick brown fox jumps over the lazy cat, oops dog.
-2. A quick brown fox jumps over the lazy cat, oops dog.
-3. A quick brown fox jumps over the lazy cat, oops dog.
-EOF
-
-cat 2>/dev/null >"$p1" "$p0"
-echo 'Foo Bar Baz' >"$p2"
+test_expect_success 'setup' '
+ cat >"$p0" <<-\EOF &&
+ 1. A quick brown fox jumps over the lazy cat, oops dog.
+ 2. A quick brown fox jumps over the lazy cat, oops dog.
+ 3. A quick brown fox jumps over the lazy cat, oops dog.
+ EOF
+
+ { cat "$p0" >"$p1" || :; } &&
+ { echo "Foo Bar Baz" >"$p2" || :; } &&
+
+ if test -f "$p1" && cmp "$p0" "$p1"
+ then
+ test_set_prereq TABS_IN_FILENAMES
+ fi
+'
-if test -f "$p1" && cmp "$p0" "$p1"
+if ! test_have_prereq TABS_IN_FILENAMES
then
- test_set_prereq TABS_IN_FILENAMES
-else
# since FAT/NTFS does not allow tabs in filenames, skip this test
- say 'Your filesystem does not allow tabs in filenames'
+ skip_all='Your filesystem does not allow tabs in filenames'
+ test_done
fi
-test_expect_success TABS_IN_FILENAMES 'setup expect' "
-echo 'just space
-no-funny' >expected
-"
+test_expect_success 'setup: populate index and tree' '
+ git update-index --add "$p0" "$p2" &&
+ t0=$(git write-tree)
+'
-test_expect_success TABS_IN_FILENAMES 'git ls-files no-funny' \
- 'git update-index --add "$p0" "$p2" &&
+test_expect_success 'ls-files prints space in filename verbatim' '
+ printf "%s\n" "just space" no-funny >expected &&
git ls-files >current &&
- test_cmp expected current'
-
-test_expect_success TABS_IN_FILENAMES 'setup expect' '
-t0=`git write-tree` &&
-echo "$t0" >t0 &&
+ test_cmp expected current
+'
-cat > expected <<\EOF
-just space
-no-funny
-"tabs\t,\" (dq) and spaces"
-EOF
+test_expect_success 'setup: add funny filename' '
+ git update-index --add "$p1" &&
+ t1=$(git write-tree)
'
-test_expect_success TABS_IN_FILENAMES 'git ls-files with-funny' \
- 'git update-index --add "$p1" &&
+test_expect_success 'ls-files quotes funny filename' '
+ cat >expected <<-\EOF &&
+ just space
+ no-funny
+ "tabs\t,\" (dq) and spaces"
+ EOF
git ls-files >current &&
- test_cmp expected current'
-
-test_expect_success TABS_IN_FILENAMES 'setup expect' "
-echo 'just space
-no-funny
-tabs ,\" (dq) and spaces' >expected
-"
-
-test_expect_success TABS_IN_FILENAMES 'git ls-files -z with-funny' \
- 'git ls-files -z | perl -pe y/\\000/\\012/ >current &&
- test_cmp expected current'
-
-test_expect_success TABS_IN_FILENAMES 'setup expect' '
-t1=`git write-tree` &&
-echo "$t1" >t1 &&
-
-cat > expected <<\EOF
-just space
-no-funny
-"tabs\t,\" (dq) and spaces"
-EOF
-'
-
-test_expect_success TABS_IN_FILENAMES 'git ls-tree with funny' \
- 'git ls-tree -r $t1 | sed -e "s/^[^ ]* //" >current &&
- test_cmp expected current'
-
-test_expect_success TABS_IN_FILENAMES 'setup expect' '
-cat > expected <<\EOF
-A "tabs\t,\" (dq) and spaces"
-EOF
-'
-
-test_expect_success TABS_IN_FILENAMES 'git diff-index with-funny' \
- 'git diff-index --name-status $t0 >current &&
- test_cmp expected current'
-
-test_expect_success TABS_IN_FILENAMES 'git diff-tree with-funny' \
- 'git diff-tree --name-status $t0 $t1 >current &&
- test_cmp expected current'
-
-test_expect_success TABS_IN_FILENAMES 'setup expect' "
-echo 'A
-tabs ,\" (dq) and spaces' >expected
-"
-
-test_expect_success TABS_IN_FILENAMES 'git diff-index -z with-funny' \
- 'git diff-index -z --name-status $t0 | perl -pe y/\\000/\\012/ >current &&
- test_cmp expected current'
-
-test_expect_success TABS_IN_FILENAMES 'git diff-tree -z with-funny' \
- 'git diff-tree -z --name-status $t0 $t1 | perl -pe y/\\000/\\012/ >current &&
- test_cmp expected current'
-
-test_expect_success TABS_IN_FILENAMES 'setup expect' '
-cat > expected <<\EOF
-CNUM no-funny "tabs\t,\" (dq) and spaces"
-EOF
-'
-
-test_expect_success TABS_IN_FILENAMES 'git diff-tree -C with-funny' \
- 'git diff-tree -C --find-copies-harder --name-status \
- $t0 $t1 | sed -e 's/^C[0-9]*/CNUM/' >current &&
- test_cmp expected current'
-
-test_expect_success TABS_IN_FILENAMES 'setup expect' '
-cat > expected <<\EOF
-RNUM no-funny "tabs\t,\" (dq) and spaces"
-EOF
-'
-
-test_expect_success TABS_IN_FILENAMES 'git diff-tree delete with-funny' \
- 'git update-index --force-remove "$p0" &&
- git diff-index -M --name-status \
- $t0 | sed -e 's/^R[0-9]*/RNUM/' >current &&
- test_cmp expected current'
-
-test_expect_success TABS_IN_FILENAMES 'setup expect' '
-cat > expected <<\EOF
-diff --git a/no-funny "b/tabs\t,\" (dq) and spaces"
-similarity index NUM%
-rename from no-funny
-rename to "tabs\t,\" (dq) and spaces"
-EOF
-'
-
-test_expect_success TABS_IN_FILENAMES 'git diff-tree delete with-funny' \
- 'git diff-index -M -p $t0 |
- sed -e "s/index [0-9]*%/index NUM%/" >current &&
- test_cmp expected current'
-
-test_expect_success TABS_IN_FILENAMES 'setup expect' '
-chmod +x "$p1" &&
-cat > expected <<\EOF
-diff --git a/no-funny "b/tabs\t,\" (dq) and spaces"
-old mode 100644
-new mode 100755
-similarity index NUM%
-rename from no-funny
-rename to "tabs\t,\" (dq) and spaces"
-EOF
-'
-
-test_expect_success TABS_IN_FILENAMES 'git diff-tree delete with-funny' \
- 'git diff-index -M -p $t0 |
- sed -e "s/index [0-9]*%/index NUM%/" >current &&
- test_cmp expected current'
-
-test_expect_success TABS_IN_FILENAMES 'setup expect' '
-cat >expected <<\EOF
- "tabs\t,\" (dq) and spaces"
- 1 file changed, 0 insertions(+), 0 deletions(-)
-EOF
-'
-
-test_expect_success TABS_IN_FILENAMES 'git diff-tree rename with-funny applied' \
- 'git diff-index -M -p $t0 |
- git apply --stat | sed -e "s/|.*//" -e "s/ *\$//" >current &&
- test_cmp expected current'
-
-test_expect_success TABS_IN_FILENAMES 'setup expect' '
-cat > expected <<\EOF
- no-funny
- "tabs\t,\" (dq) and spaces"
- 2 files changed, 3 insertions(+), 3 deletions(-)
-EOF
-'
-
-test_expect_success TABS_IN_FILENAMES 'git diff-tree delete with-funny applied' \
- 'git diff-index -p $t0 |
- git apply --stat | sed -e "s/|.*//" -e "s/ *\$//" >current &&
- test_cmp expected current'
-
-test_expect_success TABS_IN_FILENAMES 'git apply non-git diff' \
- 'git diff-index -p $t0 |
- sed -ne "/^[-+@]/p" |
- git apply --stat | sed -e "s/|.*//" -e "s/ *\$//" >current &&
- test_cmp expected current'
+ test_cmp expected current
+'
+
+test_expect_success 'ls-files -z does not quote funny filename' '
+ cat >expected <<-\EOF &&
+ just space
+ no-funny
+ tabs ," (dq) and spaces
+ EOF
+ git ls-files -z >ls-files.z &&
+ perl -pe "y/\000/\012/" <ls-files.z >current &&
+ test_cmp expected current
+'
+
+test_expect_success 'ls-tree quotes funny filename' '
+ cat >expected <<-\EOF &&
+ just space
+ no-funny
+ "tabs\t,\" (dq) and spaces"
+ EOF
+ git ls-tree -r $t1 >ls-tree &&
+ sed -e "s/^[^ ]* //" <ls-tree >current &&
+ test_cmp expected current
+'
+
+test_expect_success 'diff-index --name-status quotes funny filename' '
+ cat >expected <<-\EOF &&
+ A "tabs\t,\" (dq) and spaces"
+ EOF
+ git diff-index --name-status $t0 >current &&
+ test_cmp expected current
+'
+
+test_expect_success 'diff-tree --name-status quotes funny filename' '
+ cat >expected <<-\EOF &&
+ A "tabs\t,\" (dq) and spaces"
+ EOF
+ git diff-tree --name-status $t0 $t1 >current &&
+ test_cmp expected current
+'
+
+test_expect_success 'diff-index -z does not quote funny filename' '
+ cat >expected <<-\EOF &&
+ A
+ tabs ," (dq) and spaces
+ EOF
+ git diff-index -z --name-status $t0 >diff-index.z &&
+ perl -pe "y/\000/\012/" <diff-index.z >current &&
+ test_cmp expected current
+'
+
+test_expect_success 'diff-tree -z does not quote funny filename' '
+ cat >expected <<-\EOF &&
+ A
+ tabs ," (dq) and spaces
+ EOF
+ git diff-tree -z --name-status $t0 $t1 >diff-tree.z &&
+ perl -pe y/\\000/\\012/ <diff-tree.z >current &&
+ test_cmp expected current
+'
+
+test_expect_success 'diff-tree --find-copies-harder quotes funny filename' '
+ cat >expected <<-\EOF &&
+ CNUM no-funny "tabs\t,\" (dq) and spaces"
+ EOF
+ git diff-tree -C --find-copies-harder --name-status $t0 $t1 >out &&
+ sed -e "s/^C[0-9]*/CNUM/" <out >current &&
+ test_cmp expected current
+'
+
+test_expect_success 'setup: remove unfunny index entry' '
+ git update-index --force-remove "$p0"
+'
+
+test_expect_success 'diff-tree -M quotes funny filename' '
+ cat >expected <<-\EOF &&
+ RNUM no-funny "tabs\t,\" (dq) and spaces"
+ EOF
+ git diff-index -M --name-status $t0 >out &&
+ sed -e "s/^R[0-9]*/RNUM/" <out >current &&
+ test_cmp expected current
+'
+
+test_expect_success 'diff-index -M -p quotes funny filename' '
+ cat >expected <<-\EOF &&
+ diff --git a/no-funny "b/tabs\t,\" (dq) and spaces"
+ similarity index NUM%
+ rename from no-funny
+ rename to "tabs\t,\" (dq) and spaces"
+ EOF
+ git diff-index -M -p $t0 >diff &&
+ sed -e "s/index [0-9]*%/index NUM%/" <diff >current &&
+ test_cmp expected current
+'
+
+test_expect_success 'setup: mode change' '
+ chmod +x "$p1"
+'
+
+test_expect_success 'diff-index -M -p with mode change quotes funny filename' '
+ cat >expected <<-\EOF &&
+ diff --git a/no-funny "b/tabs\t,\" (dq) and spaces"
+ old mode 100644
+ new mode 100755
+ similarity index NUM%
+ rename from no-funny
+ rename to "tabs\t,\" (dq) and spaces"
+ EOF
+ git diff-index -M -p $t0 >diff &&
+ sed -e "s/index [0-9]*%/index NUM%/" <diff >current &&
+ test_cmp expected current
+'
+
+test_expect_success 'diffstat for rename quotes funny filename' '
+ cat >expected <<-\EOF &&
+ "tabs\t,\" (dq) and spaces"
+ 1 file changed, 0 insertions(+), 0 deletions(-)
+ EOF
+ git diff-index -M -p $t0 >diff &&
+ git apply --stat <diff >diffstat &&
+ sed -e "s/|.*//" -e "s/ *\$//" <diffstat >current &&
+ test_i18ncmp expected current
+'
+
+test_expect_success 'numstat for rename quotes funny filename' '
+ cat >expected <<-\EOF &&
+ 0 0 "tabs\t,\" (dq) and spaces"
+ EOF
+ git diff-index -M -p $t0 >diff &&
+ git apply --numstat <diff >current &&
+ test_cmp expected current
+'
+
+test_expect_success 'numstat without -M quotes funny filename' '
+ cat >expected <<-\EOF &&
+ 0 3 no-funny
+ 3 0 "tabs\t,\" (dq) and spaces"
+ EOF
+ git diff-index -p $t0 >diff &&
+ git apply --numstat <diff >current &&
+ test_cmp expected current
+'
+
+test_expect_success 'numstat for non-git rename diff quotes funny filename' '
+ cat >expected <<-\EOF &&
+ 0 3 no-funny
+ 3 0 "tabs\t,\" (dq) and spaces"
+ EOF
+ git diff-index -p $t0 >git-diff &&
+ sed -ne "/^[-+@]/p" <git-diff >diff &&
+ git apply --numstat <diff >current &&
+ test_cmp expected current
+'
test_done
EOF
git notes merge --commit &&
# No .git/NOTES_MERGE_* files left
- test_must_fail ls .git/NOTES_MERGE_* >output 2>/dev/null &&
+ test_might_fail ls .git/NOTES_MERGE_* >output 2>/dev/null &&
test_cmp /dev/null output &&
# Merge commit has pre-merge y and pre-merge z as parents
test "$(git rev-parse refs/notes/m^1)" = "$(cat pre_merge_y)" &&
test_expect_success 'abort notes merge' '
git notes merge --abort &&
# No .git/NOTES_MERGE_* files left
- test_must_fail ls .git/NOTES_MERGE_* >output 2>/dev/null &&
+ test_might_fail ls .git/NOTES_MERGE_* >output 2>/dev/null &&
test_cmp /dev/null output &&
# m has not moved (still == y)
test "$(git rev-parse refs/notes/m)" = "$(cat pre_merge_y)" &&
# Finalize merge
git notes merge --commit &&
# No .git/NOTES_MERGE_* files left
- test_must_fail ls .git/NOTES_MERGE_* >output 2>/dev/null &&
+ test_might_fail ls .git/NOTES_MERGE_* >output 2>/dev/null &&
test_cmp /dev/null output &&
# Merge commit has pre-merge y and pre-merge z as parents
test "$(git rev-parse refs/notes/m^1)" = "$(cat pre_merge_y)" &&
test_expect_success 'resolve situation by aborting the notes merge' '
git notes merge --abort &&
# No .git/NOTES_MERGE_* files left
- test_must_fail ls .git/NOTES_MERGE_* >output 2>/dev/null &&
+ test_might_fail ls .git/NOTES_MERGE_* >output 2>/dev/null &&
test_cmp /dev/null output &&
# m has not moved (still == w)
test "$(git rev-parse refs/notes/m)" = "$(git rev-parse refs/notes/w)" &&
verify_notes z
'
+cat >expect_notes <<EOF
+foo
+bar
+EOF
+
+test_expect_success 'switch cwd before committing notes merge' '
+ git notes add -m foo HEAD &&
+ git notes --ref=other add -m bar HEAD &&
+ test_must_fail git notes merge refs/notes/other &&
+ (
+ cd .git/NOTES_MERGE_WORKTREE &&
+ echo "foo" > $(git rev-parse HEAD) &&
+ echo "bar" >> $(git rev-parse HEAD) &&
+ git notes merge --commit
+ ) &&
+ git notes show HEAD > actual_notes &&
+ test_cmp expect_notes actual_notes
+'
+
test_done
FAKE_LINES="1 squash 2 3" git rebase -i A
'
+test_expect_success 'submodule conflict setup' '
+ git tag submodule-base &&
+ git checkout HEAD^ &&
+ (
+ cd sub && git checkout HEAD^ && echo 4 >elif &&
+ git add elif && git commit -m "submodule conflict"
+ ) &&
+ git add sub &&
+ test_tick &&
+ git commit -m "Conflict in submodule" &&
+ git tag submodule-topic
+'
+
+test_expect_success 'rebase -i continue with only submodule staged' '
+ test_must_fail git rebase -i submodule-base &&
+ git add sub &&
+ git rebase --continue &&
+ test $(git rev-parse submodule-base) != $(git rev-parse HEAD)
+'
+
+test_expect_success 'rebase -i continue with unstaged submodule' '
+ git checkout submodule-topic &&
+ git reset --hard &&
+ test_must_fail git rebase -i submodule-base &&
+ git reset &&
+ git rebase --continue &&
+ test $(git rev-parse submodule-base) = $(git rev-parse HEAD)
+'
+
test_expect_success 'avoid unnecessary reset' '
git checkout master &&
+ git reset --hard &&
test-chmtime =123456789 file3 &&
git update-index --refresh &&
HEAD=$(git rev-parse HEAD) &&
test_tick &&
git rebase $2 -i HEAD^^^ &&
git log --oneline >actual &&
- test 3 = $(wc -l <actual) &&
+ test_line_count = 3 actual &&
git diff --exit-code $1 &&
test 1 = "$(git cat-file blob HEAD^:file1)" &&
test 1 = $(git cat-file commit HEAD^ | grep first | wc -l)
test_tick &&
git rebase $2 -i HEAD^^^ &&
git log --oneline >actual &&
- test 3 = $(wc -l <actual) &&
+ test_line_count = 3 actual &&
git diff --exit-code $1 &&
test 1 = "$(git cat-file blob HEAD^:file1)" &&
test 2 = $(git cat-file commit HEAD^ | grep first | wc -l)
test_tick &&
git rebase --autosquash -i HEAD^^^ &&
git log --oneline >actual &&
- test 4 = $(wc -l <actual) &&
+ test_line_count = 4 actual &&
git diff --exit-code final-missquash &&
test 0 = $(git rev-list final-missquash...HEAD | wc -l)
'
test_tick &&
git rebase --autosquash -i HEAD~4 &&
git log --oneline >actual &&
- test 4 = $(wc -l <actual) &&
+ test_line_count = 4 actual &&
git diff --exit-code final-multisquash &&
test 1 = "$(git cat-file blob HEAD^^:file1)" &&
test 2 = $(git cat-file commit HEAD^^ | grep first | wc -l) &&
test_tick &&
git rebase --autosquash -i HEAD~4 &&
git log --oneline >actual &&
- test 5 = $(wc -l <actual) &&
+ test_line_count = 5 actual &&
git diff --exit-code final-presquash &&
test 0 = "$(git cat-file blob HEAD^^:file1)" &&
test 1 = "$(git cat-file blob HEAD^:file1)" &&
test_tick &&
git rebase --autosquash -i HEAD^^^ &&
git log --oneline >actual &&
- test 3 = $(wc -l <actual) &&
+ test_line_count = 3 actual &&
git diff --exit-code final-shasquash &&
test 1 = "$(git cat-file blob HEAD^:file1)" &&
test 1 = $(git cat-file commit HEAD^ | grep squash | wc -l)
test_tick &&
git rebase --autosquash -i HEAD^^^ &&
git log --oneline >actual &&
- test 3 = $(wc -l <actual) &&
+ test_line_count = 3 actual &&
git diff --exit-code final-longshasquash &&
test 1 = "$(git cat-file blob HEAD^:file1)" &&
test 1 = $(git cat-file commit HEAD^ | grep squash | wc -l)
test_tick &&
git rebase --autosquash -i HEAD^^^ &&
git log --oneline >actual &&
- test 3 = $(wc -l <actual) &&
+ test_line_count = 3 actual &&
git diff --exit-code final-commit-$1 &&
test 1 = "$(git cat-file blob HEAD^:file1)" &&
test $2 = $(git cat-file commit HEAD^ | grep first | wc -l)
'
test_expect_success 'cherry-pick first..fourth works' '
+ git checkout -f master &&
+ git reset --hard first &&
+ test_tick &&
+ git cherry-pick first..fourth &&
+ git diff --quiet other &&
+ git diff --quiet HEAD other &&
+ check_head_differs_from fourth
+'
+
+test_expect_success 'output to keep user entertained during multi-pick' '
cat <<-\EOF >expected &&
[master OBJID] second
Author: A U Thor <author@example.com>
git reset --hard first &&
test_tick &&
git cherry-pick first..fourth >actual &&
+ sed -e "s/$_x05[0-9a-f][0-9a-f]/OBJID/" <actual >actual.fuzzy &&
+ test_line_count -ge 3 actual.fuzzy &&
+ test_i18ncmp expected actual.fuzzy
+'
+
+test_expect_success 'cherry-pick --strategy resolve first..fourth works' '
+ git checkout -f master &&
+ git reset --hard first &&
+ test_tick &&
+ git cherry-pick --strategy resolve first..fourth &&
git diff --quiet other &&
git diff --quiet HEAD other &&
-
- sed -e "s/$_x05[0-9a-f][0-9a-f]/OBJID/" <actual >actual.fuzzy &&
- test_cmp expected actual.fuzzy &&
check_head_differs_from fourth
'
-test_expect_success 'cherry-pick --strategy resolve first..fourth works' '
+test_expect_success 'output during multi-pick indicates merge strategy' '
cat <<-\EOF >expected &&
Trying simple merge.
[master OBJID] second
git reset --hard first &&
test_tick &&
git cherry-pick --strategy resolve first..fourth >actual &&
- git diff --quiet other &&
- git diff --quiet HEAD other &&
sed -e "s/$_x05[0-9a-f][0-9a-f]/OBJID/" <actual >actual.fuzzy &&
- test_cmp expected actual.fuzzy &&
- check_head_differs_from fourth
+ test_i18ncmp expected actual.fuzzy
'
test_expect_success 'cherry-pick --ff first..fourth works' '
! grep "^+15" actual
'
+test_expect_success 'patch mode ignores unmerged entries' '
+ git reset --hard &&
+ test_commit conflict &&
+ test_commit non-conflict &&
+ git checkout -b side &&
+ test_commit side conflict.t &&
+ git checkout master &&
+ test_commit master conflict.t &&
+ test_must_fail git merge side &&
+ echo changed >non-conflict.t &&
+ echo y | git add -p >output &&
+ ! grep a/conflict.t output &&
+ cat >expected <<-\EOF &&
+ * Unmerged path conflict.t
+ diff --git a/non-conflict.t b/non-conflict.t
+ index f766221..5ea2ed4 100644
+ --- a/non-conflict.t
+ +++ b/non-conflict.t
+ @@ -1 +1 @@
+ -non-conflict
+ +changed
+ EOF
+ git diff --cached >diff &&
+ test_cmp expected diff
+'
+
test_done
git config --unset-all i18n.commitencoding &&
git rebase --autosquash -i HEAD^^^ &&
git log --oneline >actual &&
- test 3 = $(wc -l <actual)
+ test_line_count = 3 actual
'
}
test $(git ls-files --modified | wc -l) -eq 1
'
-test_expect_success 'stash show - stashes on stack, stash-like argument' '
+test_expect_success 'stash show format defaults to --stat' '
git stash clear &&
test_when_finished "git reset --hard HEAD" &&
git reset --hard &&
1 file changed, 1 insertion(+)
EOF
git stash show ${STASH_ID} >actual &&
+ test_i18ncmp expected actual
+'
+
+test_expect_success 'stash show - stashes on stack, stash-like argument' '
+ git stash clear &&
+ test_when_finished "git reset --hard HEAD" &&
+ git reset --hard &&
+ echo foo >> file &&
+ git stash &&
+ test_when_finished "git stash drop" &&
+ echo bar >> file &&
+ STASH_ID=$(git stash create) &&
+ git reset --hard &&
+ echo "1 0 file" >expected &&
+ git stash show --numstat ${STASH_ID} >actual &&
test_cmp expected actual
'
echo foo >> file &&
STASH_ID=$(git stash create) &&
git reset --hard &&
- cat >expected <<-EOF &&
- file | 1 +
- 1 file changed, 1 insertion(+)
- EOF
- git stash show ${STASH_ID} >actual &&
+ echo "1 0 file" >expected &&
+ git stash show --numstat ${STASH_ID} >actual &&
test_cmp expected actual
'
. ./test-lib.sh
+cat >expect.binary-numstat <<\EOF
+1 1 a
+- - b
+1 1 c
+- - d
+EOF
+
test_expect_success 'prepare repository' \
'echo AIT >a && echo BIT >b && echo CIT >c && echo DIT >d &&
git update-index --add a b c d &&
d | Bin
4 files changed, 2 insertions(+), 2 deletions(-)
EOF
-test_expect_success 'diff without --binary' \
- 'git diff | git apply --stat --summary >current &&
- test_cmp expected current'
+test_expect_success '"apply --stat" output for binary file change' '
+ git diff >diff &&
+ git apply --stat --summary <diff >current &&
+ test_i18ncmp expected current
+'
-test_expect_success 'diff with --binary' \
- 'git diff --binary | git apply --stat --summary >current &&
- test_cmp expected current'
+test_expect_success 'apply --numstat notices binary file change' '
+ git diff >diff &&
+ git apply --numstat <diff >current &&
+ test_cmp expect.binary-numstat current
+'
+
+test_expect_success 'apply --numstat understands diff --binary format' '
+ git diff --binary >diff &&
+ git apply --numstat <diff >current &&
+ test_cmp expect.binary-numstat current
+'
# apply needs to be able to skip the binary material correctly
# in order to report the line number of a corrupt patch.
} >"$actual" &&
if test -f "$expect"
then
- test_cmp "$expect" "$actual" &&
+ case $cmd in
+ *format-patch* | *-stat*)
+ test_i18ncmp "$expect" "$actual";;
+ *)
+ test_cmp "$expect" "$actual";;
+ esac &&
rm -f "$actual"
else
# this is to help developing new tests.
'
cat > expect << EOF
----
- file | 16 ++++++++++++++++
- 1 file changed, 16 insertions(+)
-
-diff --git a/file b/file
index 40f36c6..2dc5c23 100644
--- a/file
+++ b/file
test_expect_success 'format-patch respects -U' '
git format-patch -U4 -2 &&
- sed -e "1,/^\$/d" -e "/^+5/q" < 0001-This-is-an-excessively-long-subject-line-for-a-messa.patch > output &&
+ sed -e "1,/^diff/d" -e "/^+5/q" \
+ <0001-This-is-an-excessively-long-subject-line-for-a-messa.patch \
+ >output &&
test_cmp expect output
'
test_cmp expect actual
'
-test_expect_success TABS_IN_FILENAMES 'setup expected files' '
-cat >expect <<\EOF
- pathname.1 => "Rpathname\twith HT.0" | 0
- pathname.3 => "Rpathname\nwith LF.0" | 0
- "pathname\twith HT.3" => "Rpathname\nwith LF.1" | 0
- pathname.2 => Rpathname with SP.0 | 0
- "pathname\twith HT.2" => Rpathname with SP.1 | 0
- pathname.0 => Rpathname.0 | 0
- "pathname\twith HT.0" => Rpathname.1 | 0
- 7 files changed, 0 insertions(+), 0 deletions(-)
-EOF
+test_expect_success TABS_IN_FILENAMES 'git diff --numstat -M HEAD' '
+ cat >expect <<-\EOF &&
+ 0 0 pathname.1 => "Rpathname\twith HT.0"
+ 0 0 pathname.3 => "Rpathname\nwith LF.0"
+ 0 0 "pathname\twith HT.3" => "Rpathname\nwith LF.1"
+ 0 0 pathname.2 => Rpathname with SP.0
+ 0 0 "pathname\twith HT.2" => Rpathname with SP.1
+ 0 0 pathname.0 => Rpathname.0
+ 0 0 "pathname\twith HT.0" => Rpathname.1
+ EOF
+ git diff --numstat -M HEAD >actual &&
+ test_cmp expect actual
'
test_expect_success TABS_IN_FILENAMES 'git diff --stat -M HEAD' '
+ cat >expect <<-\EOF &&
+ pathname.1 => "Rpathname\twith HT.0" | 0
+ pathname.3 => "Rpathname\nwith LF.0" | 0
+ "pathname\twith HT.3" => "Rpathname\nwith LF.1" | 0
+ pathname.2 => Rpathname with SP.0 | 0
+ "pathname\twith HT.2" => Rpathname with SP.1 | 0
+ pathname.0 => Rpathname.0 | 0
+ "pathname\twith HT.0" => Rpathname.1 | 0
+ 7 files changed, 0 insertions(+), 0 deletions(-)
+ EOF
git diff --stat -M HEAD >actual &&
- test_cmp expect actual
+ test_i18ncmp expect actual
'
test_done
test_expect_success 'diffstat does not run textconv' '
echo file diff=fail >.gitattributes &&
git diff --stat HEAD^ HEAD >actual &&
- test_cmp expect.stat actual
+ test_i18ncmp expect.stat actual &&
+
+ head -n1 <expect.stat >expect.line1 &&
+ head -n1 <actual >actual.line1 &&
+ test_cmp expect.line1 actual.line1
'
# restore working setup
echo file diff=foo >.gitattributes
grep "GIT binary patch" diff
'
-test_expect_success 'rewrite diff --stat shows binary changes' '
+test_expect_success 'rewrite diff --numstat shows binary changes' '
+ git diff -B --numstat --summary >diff &&
+ grep -e "- - " diff &&
+ grep " rewrite file" diff
+'
+
+test_expect_success 'diff --stat counts binary rewrite as 0 lines' '
git diff -B --stat --summary >diff &&
grep "Bin" diff &&
- grep "0 insertions.*0 deletions" diff &&
+ test_i18ngrep "0 insertions.*0 deletions" diff &&
grep " rewrite file" diff
'
test_description='word diff colors'
. ./test-lib.sh
+. "$TEST_DIRECTORY"/diff-lib.sh
cat >pre.simple <<-\EOF
h(4)
word_diff --word-diff=plain --word-diff=none
'
+test_expect_success 'unset default driver' '
+ test_unconfig diff.wordregex
+'
+
test_language_driver bibtex
test_language_driver cpp
test_language_driver csharp
word_diff --word-diff=plain
'
+test_expect_success 'setup history with two files' '
+ echo "a b; c" >a.tex &&
+ echo "a b; c" >z.txt &&
+ git add a.tex z.txt &&
+ git commit -minitial &&
+
+ # modify both
+ echo "a bx; c" >a.tex &&
+ echo "a bx; c" >z.txt &&
+ git commit -mmodified -a
+'
+
+test_expect_success 'wordRegex for the first file does not apply to the second' '
+ echo "*.tex diff=tex" >.gitattributes &&
+ git config diff.tex.wordRegex "[a-z]+|." &&
+ cat >expect <<-\EOF &&
+ diff --git a/a.tex b/a.tex
+ --- a/a.tex
+ +++ b/a.tex
+ @@ -1 +1 @@
+ a [-b-]{+bx+}; c
+ diff --git a/z.txt b/z.txt
+ --- a/z.txt
+ +++ b/z.txt
+ @@ -1 +1 @@
+ a [-b;-]{+bx;+} c
+ EOF
+ git diff --word-diff HEAD~ >actual &&
+ compare_diff_patch expect actual
+'
+
test_done
test_expect_success 'git diff-tree HEAD^ HEAD' '
git diff-tree --quiet HEAD^ HEAD >cnt
- test $? = 1 && test $(wc -l <cnt) = 0
+ test $? = 1 && test_line_count = 0 cnt
'
test_expect_success 'git diff-tree HEAD^ HEAD -- a' '
git diff-tree --quiet HEAD^ HEAD -- a >cnt
- test $? = 0 && test $(wc -l <cnt) = 0
+ test $? = 0 && test_line_count = 0 cnt
'
test_expect_success 'git diff-tree HEAD^ HEAD -- b' '
git diff-tree --quiet HEAD^ HEAD -- b >cnt
- test $? = 1 && test $(wc -l <cnt) = 0
+ test $? = 1 && test_line_count = 0 cnt
'
# this diff outputs one line: sha1 of the given head
test_expect_success 'echo HEAD | git diff-tree --stdin' '
echo $(git rev-parse HEAD) | git diff-tree --quiet --stdin >cnt
- test $? = 1 && test $(wc -l <cnt) = 1
+ test $? = 1 && test_line_count = 1 cnt
'
test_expect_success 'git diff-tree HEAD HEAD' '
git diff-tree --quiet HEAD HEAD >cnt
- test $? = 0 && test $(wc -l <cnt) = 0
+ test $? = 0 && test_line_count = 0 cnt
'
test_expect_success 'git diff-files' '
git diff-files --quiet >cnt
- test $? = 0 && test $(wc -l <cnt) = 0
+ test $? = 0 && test_line_count = 0 cnt
'
test_expect_success 'git diff-index --cached HEAD' '
git diff-index --quiet --cached HEAD >cnt
- test $? = 0 && test $(wc -l <cnt) = 0
+ test $? = 0 && test_line_count = 0 cnt
'
test_expect_success 'git diff-index --cached HEAD^' '
git diff-index --quiet --cached HEAD^ >cnt
- test $? = 1 && test $(wc -l <cnt) = 0
+ test $? = 1 && test_line_count = 0 cnt
'
test_expect_success 'git diff-index --cached HEAD^' '
echo text >>b &&
echo 3 >c &&
git add . && {
git diff-index --quiet --cached HEAD^ >cnt
- test $? = 1 && test $(wc -l <cnt) = 0
+ test $? = 1 && test_line_count = 0 cnt
}
'
test_expect_success 'git diff-tree -Stext HEAD^ HEAD -- b' '
git commit -m "text in b" && {
git diff-tree --quiet -Stext HEAD^ HEAD -- b >cnt
- test $? = 1 && test $(wc -l <cnt) = 0
+ test $? = 1 && test_line_count = 0 cnt
}
'
test_expect_success 'git diff-tree -Snot-found HEAD^ HEAD -- b' '
git diff-tree --quiet -Snot-found HEAD^ HEAD -- b >cnt
- test $? = 0 && test $(wc -l <cnt) = 0
+ test $? = 0 && test_line_count = 0 cnt
'
test_expect_success 'git diff-files' '
echo 3 >>c && {
git diff-files --quiet >cnt
- test $? = 1 && test $(wc -l <cnt) = 0
+ test $? = 1 && test_line_count = 0 cnt
}
'
test_expect_success 'git diff-index --cached HEAD' '
git update-index c && {
git diff-index --quiet --cached HEAD >cnt
- test $? = 1 && test $(wc -l <cnt) = 0
+ test $? = 1 && test_line_count = 0 cnt
}
'
'
cat > expected <<\EOF
- bar => sub/bar | Bin 5 -> 5 bytes
- foo => sub/foo | 0
- 2 files changed, 0 insertions(+), 0 deletions(-)
+- - bar => sub/bar
+0 0 foo => sub/foo
diff --git a/bar b/sub/bar
similarity index 100%
EOF
test_expect_success 'git show -C -C report renames' '
- git show -C -C --raw --binary --stat | tail -n 12 > current &&
+ git show -C -C --raw --binary --numstat >patch-with-stat &&
+ tail -n 11 patch-with-stat >current &&
test_cmp expected current
'
"
}
+check_numstat() {
+expect=$1; shift
+cat >expected <<EOF
+1 0 $expect
+EOF
+test_expect_success "--numstat $*" "
+ echo '1 0 $expect' >expected &&
+ git diff --numstat $* HEAD^ >actual &&
+ test_cmp expected actual
+"
+}
+
check_stat() {
expect=$1; shift
cat >expected <<EOF
EOF
test_expect_success "--stat $*" "
git diff --stat $* HEAD^ >actual &&
- test_cmp expected actual
+ test_i18ncmp expected actual
"
}
"
}
-for type in diff stat raw; do
+for type in diff numstat stat raw; do
check_$type file2 --relative=subdir/
check_$type file2 --relative=subdir
check_$type dir/file2 --relative=sub
'
cat <<EOF >expect_diff_stat
- changed/text | 2 +-
- dst/copy/changed/text | 10 ++++++++++
- dst/copy/rearranged/text | 10 ++++++++++
- dst/copy/unchanged/text | 10 ++++++++++
- dst/move/changed/text | 10 ++++++++++
- dst/move/rearranged/text | 10 ++++++++++
- dst/move/unchanged/text | 10 ++++++++++
- rearranged/text | 2 +-
- src/move/changed/text | 10 ----------
- src/move/rearranged/text | 10 ----------
- src/move/unchanged/text | 10 ----------
- 11 files changed, 62 insertions(+), 32 deletions(-)
+1 1 changed/text
+10 0 dst/copy/changed/text
+10 0 dst/copy/rearranged/text
+10 0 dst/copy/unchanged/text
+10 0 dst/move/changed/text
+10 0 dst/move/rearranged/text
+10 0 dst/move/unchanged/text
+1 1 rearranged/text
+0 10 src/move/changed/text
+0 10 src/move/rearranged/text
+0 10 src/move/unchanged/text
EOF
cat <<EOF >expect_diff_stat_M
- changed/text | 2 +-
- dst/copy/changed/text | 10 ++++++++++
- dst/copy/rearranged/text | 10 ++++++++++
- dst/copy/unchanged/text | 10 ++++++++++
- {src => dst}/move/changed/text | 2 +-
- {src => dst}/move/rearranged/text | 2 +-
- {src => dst}/move/unchanged/text | 0
- rearranged/text | 2 +-
- 8 files changed, 34 insertions(+), 4 deletions(-)
+1 1 changed/text
+10 0 dst/copy/changed/text
+10 0 dst/copy/rearranged/text
+10 0 dst/copy/unchanged/text
+1 1 {src => dst}/move/changed/text
+1 1 {src => dst}/move/rearranged/text
+0 0 {src => dst}/move/unchanged/text
+1 1 rearranged/text
EOF
cat <<EOF >expect_diff_stat_CC
- changed/text | 2 +-
- {src => dst}/copy/changed/text | 2 +-
- {src => dst}/copy/rearranged/text | 2 +-
- {src => dst}/copy/unchanged/text | 0
- {src => dst}/move/changed/text | 2 +-
- {src => dst}/move/rearranged/text | 2 +-
- {src => dst}/move/unchanged/text | 0
- rearranged/text | 2 +-
- 8 files changed, 6 insertions(+), 6 deletions(-)
-EOF
-
-test_expect_success 'sanity check setup (--stat)' '
- git diff --stat HEAD^..HEAD >actual_diff_stat &&
+1 1 changed/text
+1 1 {src => dst}/copy/changed/text
+1 1 {src => dst}/copy/rearranged/text
+0 0 {src => dst}/copy/unchanged/text
+1 1 {src => dst}/move/changed/text
+1 1 {src => dst}/move/rearranged/text
+0 0 {src => dst}/move/unchanged/text
+1 1 rearranged/text
+EOF
+
+test_expect_success 'sanity check setup (--numstat)' '
+ git diff --numstat HEAD^..HEAD >actual_diff_stat &&
test_cmp expect_diff_stat actual_diff_stat &&
- git diff --stat -M HEAD^..HEAD >actual_diff_stat_M &&
+ git diff --numstat -M HEAD^..HEAD >actual_diff_stat_M &&
test_cmp expect_diff_stat_M actual_diff_stat_M &&
- git diff --stat -C -C HEAD^..HEAD >actual_diff_stat_CC &&
+ git diff --numstat -C -C HEAD^..HEAD >actual_diff_stat_CC &&
test_cmp expect_diff_stat_CC actual_diff_stat_CC
'
2 files changed, 2 insertions(+)
EOF
git diff --stat --stat-count=2 >actual &&
- test_cmp expect actual
+ test_i18ncmp expect actual
'
test_done
test_expect_success "$title" '
git apply --stat --summary \
<"$TEST_DIRECTORY/t4100/t-apply-$num.patch" >current &&
- test_cmp "$TEST_DIRECTORY"/t4100/t-apply-$num.expect current
+ test_i18ncmp "$TEST_DIRECTORY"/t4100/t-apply-$num.expect current
'
test_expect_success "$title with recount" '
sed -e "$UNC" <"$TEST_DIRECTORY/t4100/t-apply-$num.patch" |
git apply --recount --stat --summary >current &&
- test_cmp "$TEST_DIRECTORY"/t4100/t-apply-$num.expect current
+ test_i18ncmp "$TEST_DIRECTORY"/t4100/t-apply-$num.expect current
'
done <<\EOF
rename
git reset --hard &&
touch empty-file &&
test_tick &&
- { git am empty-file > actual 2>&1 && false || :; } &&
+ test_must_fail git am empty-file 2>actual &&
echo Patch format detection failed. >expected &&
- test_cmp expected actual
+ test_i18ncmp expected actual
'
test_done
)
'
+cat >expect <<\EOF
+* commit COMMIT_OBJECT_NAME
+|\ Merge: MERGE_PARENTS
+| | Author: A U Thor <author@example.com>
+| |
+| | Merge HEADS DESCRIPTION
+| |
+| * commit COMMIT_OBJECT_NAME
+| | Author: A U Thor <author@example.com>
+| |
+| | reach
+| | ---
+| | reach.t | 1 +
+| | 1 file changed, 1 insertion(+)
+| |
+| | diff --git a/reach.t b/reach.t
+| | new file mode 100644
+| | index 0000000..10c9591
+| | --- /dev/null
+| | +++ b/reach.t
+| | @@ -0,0 +1 @@
+| | +reach
+| |
+| \
+*-. \ commit COMMIT_OBJECT_NAME
+|\ \ \ Merge: MERGE_PARENTS
+| | | | Author: A U Thor <author@example.com>
+| | | |
+| | | | Merge HEADS DESCRIPTION
+| | | |
+| | * | commit COMMIT_OBJECT_NAME
+| | |/ Author: A U Thor <author@example.com>
+| | |
+| | | octopus-b
+| | | ---
+| | | octopus-b.t | 1 +
+| | | 1 file changed, 1 insertion(+)
+| | |
+| | | diff --git a/octopus-b.t b/octopus-b.t
+| | | new file mode 100644
+| | | index 0000000..d5fcad0
+| | | --- /dev/null
+| | | +++ b/octopus-b.t
+| | | @@ -0,0 +1 @@
+| | | +octopus-b
+| | |
+| * | commit COMMIT_OBJECT_NAME
+| |/ Author: A U Thor <author@example.com>
+| |
+| | octopus-a
+| | ---
+| | octopus-a.t | 1 +
+| | 1 file changed, 1 insertion(+)
+| |
+| | diff --git a/octopus-a.t b/octopus-a.t
+| | new file mode 100644
+| | index 0000000..11ee015
+| | --- /dev/null
+| | +++ b/octopus-a.t
+| | @@ -0,0 +1 @@
+| | +octopus-a
+| |
+* | commit COMMIT_OBJECT_NAME
+|/ Author: A U Thor <author@example.com>
+|
+| seventh
+| ---
+| seventh.t | 1 +
+| 1 file changed, 1 insertion(+)
+|
+| diff --git a/seventh.t b/seventh.t
+| new file mode 100644
+| index 0000000..9744ffc
+| --- /dev/null
+| +++ b/seventh.t
+| @@ -0,0 +1 @@
+| +seventh
+|
+* commit COMMIT_OBJECT_NAME
+|\ Merge: MERGE_PARENTS
+| | Author: A U Thor <author@example.com>
+| |
+| | Merge branch 'tangle'
+| |
+| * commit COMMIT_OBJECT_NAME
+| |\ Merge: MERGE_PARENTS
+| | | Author: A U Thor <author@example.com>
+| | |
+| | | Merge branch 'side' (early part) into tangle
+| | |
+| * | commit COMMIT_OBJECT_NAME
+| |\ \ Merge: MERGE_PARENTS
+| | | | Author: A U Thor <author@example.com>
+| | | |
+| | | | Merge branch 'master' (early part) into tangle
+| | | |
+| * | | commit COMMIT_OBJECT_NAME
+| | | | Author: A U Thor <author@example.com>
+| | | |
+| | | | tangle-a
+| | | | ---
+| | | | tangle-a | 1 +
+| | | | 1 file changed, 1 insertion(+)
+| | | |
+| | | | diff --git a/tangle-a b/tangle-a
+| | | | new file mode 100644
+| | | | index 0000000..7898192
+| | | | --- /dev/null
+| | | | +++ b/tangle-a
+| | | | @@ -0,0 +1 @@
+| | | | +a
+| | | |
+* | | | commit COMMIT_OBJECT_NAME
+|\ \ \ \ Merge: MERGE_PARENTS
+| | | | | Author: A U Thor <author@example.com>
+| | | | |
+| | | | | Merge branch 'side'
+| | | | |
+| * | | | commit COMMIT_OBJECT_NAME
+| | |_|/ Author: A U Thor <author@example.com>
+| |/| |
+| | | | side-2
+| | | | ---
+| | | | 2 | 1 +
+| | | | 1 file changed, 1 insertion(+)
+| | | |
+| | | | diff --git a/2 b/2
+| | | | new file mode 100644
+| | | | index 0000000..0cfbf08
+| | | | --- /dev/null
+| | | | +++ b/2
+| | | | @@ -0,0 +1 @@
+| | | | +2
+| | | |
+| * | | commit COMMIT_OBJECT_NAME
+| | | | Author: A U Thor <author@example.com>
+| | | |
+| | | | side-1
+| | | | ---
+| | | | 1 | 1 +
+| | | | 1 file changed, 1 insertion(+)
+| | | |
+| | | | diff --git a/1 b/1
+| | | | new file mode 100644
+| | | | index 0000000..d00491f
+| | | | --- /dev/null
+| | | | +++ b/1
+| | | | @@ -0,0 +1 @@
+| | | | +1
+| | | |
+* | | | commit COMMIT_OBJECT_NAME
+| | | | Author: A U Thor <author@example.com>
+| | | |
+| | | | Second
+| | | | ---
+| | | | one | 1 +
+| | | | 1 file changed, 1 insertion(+)
+| | | |
+| | | | diff --git a/one b/one
+| | | | new file mode 100644
+| | | | index 0000000..9a33383
+| | | | --- /dev/null
+| | | | +++ b/one
+| | | | @@ -0,0 +1 @@
+| | | | +case
+| | | |
+* | | | commit COMMIT_OBJECT_NAME
+| |_|/ Author: A U Thor <author@example.com>
+|/| |
+| | | sixth
+| | | ---
+| | | a/two | 1 -
+| | | 1 file changed, 1 deletion(-)
+| | |
+| | | diff --git a/a/two b/a/two
+| | | deleted file mode 100644
+| | | index 9245af5..0000000
+| | | --- a/a/two
+| | | +++ /dev/null
+| | | @@ -1 +0,0 @@
+| | | -ni
+| | |
+* | | commit COMMIT_OBJECT_NAME
+| | | Author: A U Thor <author@example.com>
+| | |
+| | | fifth
+| | | ---
+| | | a/two | 1 +
+| | | 1 file changed, 1 insertion(+)
+| | |
+| | | diff --git a/a/two b/a/two
+| | | new file mode 100644
+| | | index 0000000..9245af5
+| | | --- /dev/null
+| | | +++ b/a/two
+| | | @@ -0,0 +1 @@
+| | | +ni
+| | |
+* | | commit COMMIT_OBJECT_NAME
+|/ / Author: A U Thor <author@example.com>
+| |
+| | fourth
+| | ---
+| | ein | 1 +
+| | 1 file changed, 1 insertion(+)
+| |
+| | diff --git a/ein b/ein
+| | new file mode 100644
+| | index 0000000..9d7e69f
+| | --- /dev/null
+| | +++ b/ein
+| | @@ -0,0 +1 @@
+| | +ichi
+| |
+* | commit COMMIT_OBJECT_NAME
+|/ Author: A U Thor <author@example.com>
+|
+| third
+| ---
+| ichi | 1 +
+| one | 1 -
+| 2 files changed, 1 insertion(+), 1 deletion(-)
+|
+| diff --git a/ichi b/ichi
+| new file mode 100644
+| index 0000000..9d7e69f
+| --- /dev/null
+| +++ b/ichi
+| @@ -0,0 +1 @@
+| +ichi
+| diff --git a/one b/one
+| deleted file mode 100644
+| index 9d7e69f..0000000
+| --- a/one
+| +++ /dev/null
+| @@ -1 +0,0 @@
+| -ichi
+|
+* commit COMMIT_OBJECT_NAME
+| Author: A U Thor <author@example.com>
+|
+| second
+| ---
+| one | 2 +-
+| 1 file changed, 1 insertion(+), 1 deletion(-)
+|
+| diff --git a/one b/one
+| index 5626abf..9d7e69f 100644
+| --- a/one
+| +++ b/one
+| @@ -1 +1 @@
+| -one
+| +ichi
+|
+* commit COMMIT_OBJECT_NAME
+ Author: A U Thor <author@example.com>
+
+ initial
+ ---
+ one | 1 +
+ 1 file changed, 1 insertion(+)
+
+ diff --git a/one b/one
+ new file mode 100644
+ index 0000000..5626abf
+ --- /dev/null
+ +++ b/one
+ @@ -0,0 +1 @@
+ +one
+EOF
+
+sanitize_output () {
+ sed -e 's/ *$//' \
+ -e 's/commit [0-9a-f]*$/commit COMMIT_OBJECT_NAME/' \
+ -e 's/Merge: [ 0-9a-f]*$/Merge: MERGE_PARENTS/' \
+ -e 's/Merge tag.*/Merge HEADS DESCRIPTION/' \
+ -e 's/Merge commit.*/Merge HEADS DESCRIPTION/' \
+ -e 's/, 0 deletions(-)//' \
+ -e 's/, 0 insertions(+)//' \
+ -e 's/ 1 files changed, / 1 file changed, /' \
+ -e 's/, 1 deletions(-)/, 1 deletion(-)/' \
+ -e 's/, 1 insertions(+)/, 1 insertion(+)/'
+}
+
+test_expect_success 'log --graph with diff and stats' '
+ git log --graph --pretty=short --stat -p >actual &&
+ sanitize_output >actual.sanitized <actual &&
+ test_cmp expect actual.sanitized
+'
+
test_done
git mailsplit -d3 -o. "$TEST_DIRECTORY"/t5100/nul-plain &&
test_cmp "$TEST_DIRECTORY"/t5100/nul-plain 001 &&
(cat 001 | git mailinfo msg patch) &&
- test 4 = $(wc -l < patch)
+ test_line_count = 4 patch
'
git request-pull initial "$downstream_url" >../request
) &&
<request sed -nf fuzz.sed >request.fuzzy &&
- test_cmp expect request.fuzzy
+ test_i18ncmp expect request.fuzzy
'
test_cmp count7.expected count7.actual
'
+test_expect_success 'setup tests for the --stdin parameter' '
+ for head in C D E F
+ do
+ add $head
+ done &&
+ for head in A B C D E F
+ do
+ git tag $head $head
+ done &&
+ cat >input <<-\EOF
+ refs/heads/C
+ refs/heads/A
+ refs/heads/D
+ refs/tags/C
+ refs/heads/B
+ refs/tags/A
+ refs/heads/E
+ refs/tags/B
+ refs/tags/E
+ refs/tags/D
+ EOF
+ sort <input >expect &&
+ (
+ echo refs/heads/E &&
+ echo refs/tags/E &&
+ cat input
+ ) >input.dup
+'
+
+test_expect_success 'fetch refs from cmdline' '
+ (
+ cd client &&
+ git fetch-pack --no-progress .. $(cat ../input)
+ ) >output &&
+ cut -d " " -f 2 <output | sort >actual &&
+ test_cmp expect actual
+'
+
+test_expect_success 'fetch refs from stdin' '
+ (
+ cd client &&
+ git fetch-pack --stdin --no-progress .. <../input
+ ) >output &&
+ cut -d " " -f 2 <output | sort >actual &&
+ test_cmp expect actual
+'
+
+test_expect_success 'fetch mixed refs from cmdline and stdin' '
+ (
+ cd client &&
+ tail -n +5 ../input |
+ git fetch-pack --stdin --no-progress .. $(head -n 4 ../input)
+ ) >output &&
+ cut -d " " -f 2 <output | sort >actual &&
+ test_cmp expect actual
+'
+
+test_expect_success 'test duplicate refs from stdin' '
+ (
+ cd client &&
+ test_must_fail git fetch-pack --stdin --no-progress .. <../input.dup
+ ) >output &&
+ cut -d " " -f 2 <output | sort >actual &&
+ test_cmp expect actual
+'
+
test_done
--- /dev/null
+#!/bin/sh
+
+test_description='check various push.default settings'
+. ./test-lib.sh
+
+test_expect_success 'setup bare remotes' '
+ git init --bare repo1 &&
+ git remote add parent1 repo1 &&
+ git init --bare repo2 &&
+ git remote add parent2 repo2 &&
+ test_commit one &&
+ git push parent1 HEAD &&
+ git push parent2 HEAD
+'
+
+test_expect_success '"upstream" pushes to configured upstream' '
+ git checkout master &&
+ test_config branch.master.remote parent1 &&
+ test_config branch.master.merge refs/heads/foo &&
+ test_config push.default upstream &&
+ test_commit two &&
+ git push &&
+ echo two >expect &&
+ git --git-dir=repo1 log -1 --format=%s foo >actual &&
+ test_cmp expect actual
+'
+
+test_expect_success '"upstream" does not push on unconfigured remote' '
+ git checkout master &&
+ test_unconfig branch.master.remote &&
+ test_config push.default upstream &&
+ test_commit three &&
+ test_must_fail git push
+'
+
+test_expect_success '"upstream" does not push on unconfigured branch' '
+ git checkout master &&
+ test_config branch.master.remote parent1 &&
+ test_unconfig branch.master.merge &&
+ test_config push.default upstream
+ test_commit four &&
+ test_must_fail git push
+'
+
+test_expect_success '"upstream" does not push when remotes do not match' '
+ git checkout master &&
+ test_config branch.master.remote parent1 &&
+ test_config branch.master.merge refs/heads/foo &&
+ test_config push.default upstream &&
+ test_commit five &&
+ test_must_fail git push parent2
+'
+
+test_done
)
'
+test_expect_success 'push unpushed submodules when not needed' '
+ (
+ cd work &&
+ (
+ cd gar/bage &&
+ git checkout master &&
+ >junk5 &&
+ git add junk5 &&
+ git commit -m "Fifth junk" &&
+ git push &&
+ git rev-parse origin/master >../../../expected
+ ) &&
+ git checkout master &&
+ git add gar/bage &&
+ git commit -m "Fifth commit for gar/bage" &&
+ git push --recurse-submodules=on-demand ../pub.git master
+ ) &&
+ (
+ cd submodule.git &&
+ git rev-parse master >../actual
+ ) &&
+ test_cmp expected actual
+'
+
+test_expect_success 'push unpushed submodules when not needed 2' '
+ (
+ cd submodule.git &&
+ git rev-parse master >../expected
+ ) &&
+ (
+ cd work &&
+ (
+ cd gar/bage &&
+ >junk6 &&
+ git add junk6 &&
+ git commit -m "Sixth junk"
+ ) &&
+ >junk2 &&
+ git add junk2 &&
+ git commit -m "Second junk for work" &&
+ git push --recurse-submodules=on-demand ../pub.git master
+ ) &&
+ (
+ cd submodule.git &&
+ git rev-parse master >../actual
+ ) &&
+ test_cmp expected actual
+'
+
+test_expect_success 'push unpushed submodules recursively' '
+ (
+ cd work &&
+ (
+ cd gar/bage &&
+ git checkout master &&
+ > junk7 &&
+ git add junk7 &&
+ git commit -m "Seventh junk" &&
+ git rev-parse master >../../../expected
+ ) &&
+ git checkout master &&
+ git add gar/bage &&
+ git commit -m "Seventh commit for gar/bage" &&
+ git push --recurse-submodules=on-demand ../pub.git master
+ ) &&
+ (
+ cd submodule.git &&
+ git rev-parse master >../actual
+ ) &&
+ test_cmp expected actual
+'
+
+test_expect_success 'push unpushable submodule recursively fails' '
+ (
+ cd work &&
+ (
+ cd gar/bage &&
+ git rev-parse origin/master >../../../expected &&
+ git checkout master~0 &&
+ > junk8 &&
+ git add junk8 &&
+ git commit -m "Eighth junk"
+ ) &&
+ git add gar/bage &&
+ git commit -m "Eighth commit for gar/bage" &&
+ test_must_fail git push --recurse-submodules=on-demand ../pub.git master
+ ) &&
+ (
+ cd submodule.git &&
+ git rev-parse master >../actual
+ ) &&
+ test_cmp expected actual
+'
+
test_done
git clone --bare test_repo test_repo.git &&
cd test_repo.git &&
git config http.receivepack true &&
+ git config core.logallrefupdates true &&
ORIG_HEAD=$(git rev-parse --verify HEAD) &&
cd - &&
mv test_repo.git "$HTTPD_DOCUMENT_ROOT_PATH"
'
test_expect_success 'push fails for non-fast-forward refs unmatched by remote helper: our output' '
- test_i18ngrep "To prevent you from losing history, non-fast-forward updates were rejected" \
+ test_i18ngrep "Updates were rejected because" \
output
'
test_cmp /dev/null output
'
+test_expect_success 'http push gives sane defaults to reflog' '
+ cd "$ROOT_PATH"/test_repo_clone &&
+ test_commit reflog-test &&
+ git push "$HTTPD_URL"/smart/test_repo.git &&
+ git --git-dir="$HTTPD_DOCUMENT_ROOT_PATH/test_repo.git" \
+ log -g -1 --format="%gn <%ge>" >actual &&
+ echo "anonymous <anonymous@http.127.0.0.1>" >expect &&
+ test_cmp expect actual
+'
+
+test_expect_success 'http push respects GIT_COMMITTER_* in reflog' '
+ cd "$ROOT_PATH"/test_repo_clone &&
+ test_commit custom-reflog-test &&
+ git push "$HTTPD_URL"/smart_custom_env/test_repo.git &&
+ git --git-dir="$HTTPD_DOCUMENT_ROOT_PATH/test_repo.git" \
+ log -g -1 --format="%gn <%ge>" >actual &&
+ echo "Custom User <custom@example.com>" >expect &&
+ test_cmp expect actual
+'
+
stop_httpd
test_done
git clone $HTTPD_URL/smart-redir-temp/repo.git --quiet repo-t
'
+test -n "$GIT_TEST_LONG" && test_set_prereq EXPENSIVE
+
+test_expect_success EXPENSIVE 'create 50,000 tags in the repo' '
+ (
+ cd "$HTTPD_DOCUMENT_ROOT_PATH/repo.git" &&
+ for i in `seq 50000`
+ do
+ echo "commit refs/heads/too-many-refs"
+ echo "mark :$i"
+ echo "committer git <git@example.com> $i +0000"
+ echo "data 0"
+ echo "M 644 inline bla.txt"
+ echo "data 4"
+ echo "bla"
+ # make every commit dangling by always
+ # rewinding the branch after each commit
+ echo "reset refs/heads/too-many-refs"
+ echo "from :1"
+ done | git fast-import --export-marks=marks &&
+
+ # now assign tags to all the dangling commits we created above
+ tag=$(perl -e "print \"bla\" x 30") &&
+ sed -e "s/^:\(.\+\) \(.\+\)$/\2 refs\/tags\/$tag-\1/" <marks >>packed-refs
+ )
+'
+
+test_expect_success EXPENSIVE 'clone the 50,000 tag repo to check OS command line overflow' '
+ git clone $HTTPD_URL/smart/repo.git too-many-refs 2>err &&
+ test_line_count = 0 err
+'
+
stop_httpd
test_done
cd "$base_dir"
test_expect_success 'existence of info/alternates' \
-'test `wc -l <C/.git/objects/info/alternates` = 2'
+'test_line_count = 2 C/.git/objects/info/alternates'
cd "$base_dir"
cd "$base_dir"
test_expect_success 'existence of info/alternates' \
-'test `wc -l <D/.git/objects/info/alternates` = 1'
+'test_line_count = 1 D/.git/objects/info/alternates'
cd "$base_dir"
test_valid_repo() {
git fsck --full > fsck.log &&
- test `wc -l < fsck.log` = 0
+ test_line_count = 0 fsck.log
}
base_dir=`pwd`
test_expect_success 'del LF before empty (1)' '
git show -s --pretty=format:"%s%n%-b%nThanks%n" HEAD^^ >actual &&
- test $(wc -l <actual) = 2
+ test_line_count = 2 actual
'
test_expect_success 'del LF before empty (2)' '
git show -s --pretty=format:"%s%n%-b%nThanks%n" HEAD >actual &&
- test $(wc -l <actual) = 6 &&
+ test_line_count = 6 actual &&
grep "^$" actual
'
test_expect_success 'add LF before non-empty (1)' '
git show -s --pretty=format:"%s%+b%nThanks%n" HEAD^^ >actual &&
- test $(wc -l <actual) = 2
+ test_line_count = 2 actual
'
test_expect_success 'add LF before non-empty (2)' '
git show -s --pretty=format:"%s%+b%nThanks%n" HEAD >actual &&
- test $(wc -l <actual) = 6 &&
+ test_line_count = 6 actual &&
grep "^$" actual
'
git commit -m "dummy" --allow-empty &&
git filter-branch --msg-filter "sed -e s/dummy//" HEAD^^.. &&
git rev-list --oneline HEAD >test.txt &&
- test $(git rev-list --oneline HEAD | wc -l) -eq 5 &&
- test $(git rev-list --oneline --graph HEAD | wc -l) -eq 5
+ test_line_count = 5 test.txt &&
+ git rev-list --oneline --graph HEAD >testg.txt &&
+ test_line_count = 5 testg.txt
'
test_done
! grep "refusing to lose untracked file" errors.txt
'
+test_expect_success 'do not follow renames for empty files' '
+ git checkout -f -b empty-base &&
+ >empty1 &&
+ git add empty1 &&
+ git commit -m base &&
+ echo content >empty1 &&
+ git add empty1 &&
+ git commit -m fill &&
+ git checkout -b empty-topic HEAD^ &&
+ git mv empty1 empty2 &&
+ git commit -m rename &&
+ test_must_fail git merge empty-base &&
+ >expect &&
+ test_cmp expect empty2
+'
+
test_done
git merge -m "merge HASH7 and SIDE_HASH7" "$HASH7" &&
B_HASH=$(git rev-parse --verify HEAD) &&
git merge-base --all "$A_HASH" "$B_HASH" > merge_bases.txt &&
- test $(wc -l < merge_bases.txt) = "2" &&
+ test_line_count = 2 merge_bases.txt &&
grep "$HASH5" merge_bases.txt &&
grep "$SIDE_HASH5" merge_bases.txt
'
test_expect_success 'massive simple rename does not spam added files' '
sane_unset GIT_MERGE_VERBOSITY &&
git merge --no-stat simple-rename | grep -v Removing >output &&
- test 5 -gt "$(wc -l < output)"
+ test_line_count -lt 5 output
'
test_done
(
cd test && git checkout b1
) >actual &&
- grep "have 1 and 1 different" actual
+ test_i18ngrep "have 1 and 1 different" actual
'
test_expect_success 'checkout with local tracked branch' '
git checkout master &&
git checkout follower >actual &&
- grep "is ahead of" actual
+ test_i18ngrep "is ahead of" actual
'
test_expect_success 'status' '
# reports nothing to commit
test_must_fail git commit --dry-run
) >actual &&
- grep "have 1 and 1 different" actual
+ test_i18ngrep "have 1 and 1 different" actual
'
test_expect_success 'fail to track lightweight tags' '
git checkout master &&
git tag light &&
test_must_fail git branch --track lighttrack light >actual &&
- test_must_fail grep "set up to track" actual &&
+ test_i18ngrep ! "set up to track" actual &&
test_must_fail git checkout lighttrack
'
git checkout master &&
git tag -m heavy heavy &&
test_must_fail git branch --track heavytrack heavy >actual &&
- test_must_fail grep "set up to track" actual &&
+ test_i18ngrep ! "set up to track" actual &&
test_must_fail git checkout heavytrack
'
test 0 -eq $(git ls-files -u | wc -l) &&
test 0 -eq $(git ls-files -o | wc -l) &&
- test 6 -eq $(wc -l < c) &&
+ test_line_count = 6 c &&
test $(git rev-parse HEAD:a) = $(git rev-parse B:a) &&
test $(git rev-parse HEAD:b) = $(git rev-parse A:b)
'
echo "l3" >two &&
test_tick &&
- git commit -a -m "Left #3" &&
+ GIT_COMMITTER_NAME="Another Committer" \
+ GIT_AUTHOR_NAME="Another Author" git commit -a -m "Left #3" &&
echo "l4" >two &&
test_tick &&
- git commit -a -m "Left #4" &&
+ GIT_COMMITTER_NAME="Another Committer" \
+ GIT_AUTHOR_NAME="Another Author" git commit -a -m "Left #4" &&
echo "l5" >two &&
test_tick &&
- git commit -a -m "Left #5" &&
+ GIT_COMMITTER_NAME="Another Committer" \
+ GIT_AUTHOR_NAME="Another Author" git commit -a -m "Left #5" &&
git tag tag-l5 &&
git checkout right &&
cat >expected <<-EOF &&
Merge branch ${apos}left${apos}
+ By Another Author (3) and A U Thor (2)
+ via Another Committer
* left:
Left #5
Left #4
cat >expected <<-EOF &&
Merge branch ${apos}left${apos}
+ By Another Author (3) and A U Thor (2)
+ via Another Committer
* left: (5 commits)
Left #5
Left #4
cat >expected <<-EOF &&
Merge branch ${apos}left${apos}
+ By Another Author (3) and A U Thor (2)
+ via Another Committer
* left:
Left #5
Left #4
cat >expected <<-EOF &&
Merge branch ${apos}left${apos}
+ By Another Author (3) and A U Thor (2)
+ via Another Committer
* left: (5 commits)
Left #5
Left #4
cat >expected <<-EOF &&
Merge branch ${apos}left${apos}
+ By Another Author (3) and A U Thor (2)
+ via Another Committer
* left:
Left #5
Left #4
cat >expected.log <<-EOF &&
Sync with left
+ By Another Author (3) and A U Thor (2)
+ via Another Committer
* ${apos}left${apos} of $(pwd):
Left #5
Left #4
cat >expected <<-EOF
Merge branches ${apos}left${apos} and ${apos}right${apos}
+ By Another Author (3) and A U Thor (2)
+ via Another Committer
* left:
Left #5
Left #4
Common #2
Common #1
+ By Another Author (3) and A U Thor (2)
+ via Another Committer
* tag ${apos}tag-l5${apos}:
Left #5
Left #4
Common #2
Common #1
+ By Another Author (3) and A U Thor (2)
+ via Another Committer
* left:
Left #5
Left #4
git checkout -f renamer && git clean -f &&
git checkout renamer^ 2>messages &&
test_i18ngrep "HEAD is now at 7329388" messages &&
- test 1 -eq $(wc -l <messages) &&
+ test_line_count = 1 messages &&
H=$(git rev-parse --verify HEAD) &&
M=$(git show-ref -s --verify refs/heads/master) &&
test "z$H" = "z$M" &&
git checkout -f renamer && git clean -f &&
git checkout renamer^ 2>messages &&
test_i18ngrep "HEAD is now at 7329388" messages &&
- test 1 -lt $(wc -l <messages) &&
+ test_line_count -gt 1 messages &&
H=$(git rev-parse --verify HEAD) &&
M=$(git show-ref -s --verify refs/heads/master) &&
test "z$H" = "z$M" &&
'
test_expect_success 'nested git work tree' '
- rm -fr foo bar &&
- mkdir foo bar &&
+ rm -fr foo bar baz &&
+ mkdir -p foo bar baz/boo &&
(
cd foo &&
git init &&
cd bar &&
>goodbye.people
) &&
+ (
+ cd baz/boo &&
+ git init &&
+ >deeper.world
+ git add . &&
+ git commit -a -m deeply.nested
+ ) &&
git clean -f -d &&
test -f foo/.git/index &&
test -f foo/hello.world &&
+ test -f baz/boo/.git/index &&
+ test -f baz/boo/deeper.world &&
! test -d bar
'
test_expect_success 'force removal of nested git work tree' '
- rm -fr foo bar &&
- mkdir foo bar &&
+ rm -fr foo bar baz &&
+ mkdir -p foo bar baz/boo &&
(
cd foo &&
git init &&
cd bar &&
>goodbye.people
) &&
+ (
+ cd baz/boo &&
+ git init &&
+ >deeper.world
+ git add . &&
+ git commit -a -m deeply.nested
+ ) &&
git clean -f -f -d &&
! test -d foo &&
- ! test -d bar
+ ! test -d bar &&
+ ! test -d baz
'
test_expect_success 'git clean -e' '
test_expect_success 'status should only print one line' '
git submodule status >lines &&
- test $(wc -l <lines) = 1
+ test_line_count = 1 lines
'
test_expect_success 'setup - fetch commit name from submodule' '
cd "$base_dir"
test_expect_success 'after add: existence of info/alternates' \
-'test `wc -l <super/.git/modules/sub/objects/info/alternates` = 1'
+'test_line_count = 1 super/.git/modules/sub/objects/info/alternates'
cd "$base_dir"
cd "$base_dir"
test_expect_success 'after update: existence of info/alternates' \
-'test `wc -l <super-clone/.git/modules/sub/objects/info/alternates` = 1'
+'test_line_count = 1 super-clone/.git/modules/sub/objects/info/alternates'
cd "$base_dir"
'
test_expect_success '-m and -F do not mix' '
+ git checkout HEAD file && echo >>file && git add file &&
test_must_fail git commit -m foo -m bar -F file
'
test_expect_success '-m and -C do not mix' '
+ git checkout HEAD file && echo >>file && git add file &&
test_must_fail git commit -C HEAD -m illegal
'
test_must_fail git commit -F msg -a
'
+test_expect_success 'template "emptyness" check does not kick in with -F' '
+ git checkout HEAD file && echo >>file && git add file &&
+ git commit -t file -F file
+'
+
+test_expect_success 'template "emptyness" check' '
+ git checkout HEAD file && echo >>file && git add file &&
+ test_must_fail git commit -t file 2>err &&
+ test_i18ngrep "did not edit" err
+'
+
test_expect_success 'setup: commit message from file' '
+ git checkout HEAD file && echo >>file && git add file &&
echo this is the commit message, coming from a file >msg &&
git commit -F msg -a
'
git reset --hard &&
git commit -s -m "hello: kitty" --allow-empty &&
git cat-file commit HEAD | sed -e "1,/^$/d" >actual &&
- test $(wc -l <actual) = 3
+ test_line_count = 3 actual
'
git checkout -- file
'
+test_expect_success 'check the author in hook' '
+ write_script "$HOOK" <<-\EOF &&
+ test "$GIT_AUTHOR_NAME" = "New Author" &&
+ test "$GIT_AUTHOR_EMAIL" = "newauthor@example.com"
+ EOF
+ test_must_fail git commit --allow-empty -m "by a.u.thor" &&
+ (
+ GIT_AUTHOR_NAME="New Author" &&
+ GIT_AUTHOR_EMAIL="newauthor@example.com" &&
+ export GIT_AUTHOR_NAME GIT_AUTHOR_EMAIL &&
+ git commit --allow-empty -m "by new.author via env" &&
+ git show -s
+ ) &&
+ git commit --author="New Author <newauthor@example.com>" \
+ --allow-empty -m "by new.author via command line" &&
+ git show -s
+'
+
test_done
test_expect_success 'merge output uses pretty names' '
git reset --hard c1 &&
git merge c2 c3 c4 >actual &&
- test_cmp actual expected
+ test_i18ncmp expected actual
'
cat >expected <<\EOF
test_expect_success 'merge up-to-date output uses pretty names' '
git merge c4 c5 >actual &&
- test_cmp actual expected
+ test_i18ncmp expected actual
'
cat >expected <<\EOF
test_expect_success 'merge fast-forward output uses pretty names' '
git reset --hard c0 &&
git merge c1 c2 >actual &&
- test_cmp actual expected
+ test_i18ncmp expected actual
'
test_done
test_cmp important c1.c
'
+test_expect_failure 'will not overwrite unstaged changes in renamed file' '
+ git reset --hard c1 &&
+ git mv c1.c other.c &&
+ git commit -m rename &&
+ cp important other.c &&
+ git merge c1a &&
+ test_cmp important other.c
+'
+
test_expect_success 'will not overwrite untracked subtree' '
git reset --hard c0 &&
rm -rf sub &&
test "$diff" = ""
'
+test_expect_success PERL 'difftool forwards arguments to diff' '
+ >for-diff &&
+ git add for-diff &&
+ echo changes>for-diff &&
+ git add for-diff &&
+ diff=$(git difftool --cached --no-prompt -- for-diff) &&
+ test "$diff" = "" &&
+ git reset -- for-diff &&
+ rm for-diff
+'
+
test_expect_success PERL 'difftool honors --gui' '
git config merge.tool bogus-tool &&
git config diff.tool bogus-tool &&
'n=$(grep $a verify | wc -l) &&
test 1 = $n'
+###
+### series S
+###
+#
+# Make sure missing spaces and EOLs after mark references
+# cause errors.
+#
+# Setup:
+#
+# 1--2--4
+# \ /
+# -3-
+#
+# commit marks: 301, 302, 303, 304
+# blob marks: 403, 404, resp.
+# note mark: 202
+#
+# The error message when a space is missing not at the
+# end of the line is:
+#
+# Missing space after ..
+#
+# or when extra characters come after the mark at the end
+# of the line:
+#
+# Garbage after ..
+#
+# or when the dataref is neither "inline " or a known SHA1,
+#
+# Invalid dataref ..
+#
+test_tick
+
+cat >input <<INPUT_END
+commit refs/heads/S
+mark :301
+committer $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL> $GIT_COMMITTER_DATE
+data <<COMMIT
+commit 1
+COMMIT
+M 100644 inline hello.c
+data <<BLOB
+blob 1
+BLOB
+
+commit refs/heads/S
+mark :302
+committer $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL> $GIT_COMMITTER_DATE
+data <<COMMIT
+commit 2
+COMMIT
+from :301
+M 100644 inline hello.c
+data <<BLOB
+blob 2
+BLOB
+
+blob
+mark :403
+data <<BLOB
+blob 3
+BLOB
+
+blob
+mark :202
+data <<BLOB
+note 2
+BLOB
+INPUT_END
+
+test_expect_success 'S: initialize for S tests' '
+ git fast-import --export-marks=marks <input
+'
+
+#
+# filemodify, three datarefs
+#
+test_expect_success 'S: filemodify with garbage after mark must fail' '
+ test_must_fail git fast-import --import-marks=marks <<-EOF 2>err &&
+ commit refs/heads/S
+ committer $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL> $GIT_COMMITTER_DATE
+ data <<COMMIT
+ commit N
+ COMMIT
+ M 100644 :403x hello.c
+ EOF
+ cat err &&
+ test_i18ngrep "space after mark" err
+'
+
+# inline is misspelled; fast-import thinks it is some unknown dataref
+test_expect_success 'S: filemodify with garbage after inline must fail' '
+ test_must_fail git fast-import --import-marks=marks <<-EOF 2>err &&
+ commit refs/heads/S
+ committer $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL> $GIT_COMMITTER_DATE
+ data <<COMMIT
+ commit N
+ COMMIT
+ M 100644 inlineX hello.c
+ data <<BLOB
+ inline
+ BLOB
+ EOF
+ cat err &&
+ test_i18ngrep "nvalid dataref" err
+'
+
+test_expect_success 'S: filemodify with garbage after sha1 must fail' '
+ sha1=$(grep :403 marks | cut -d\ -f2) &&
+ test_must_fail git fast-import --import-marks=marks <<-EOF 2>err &&
+ commit refs/heads/S
+ committer $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL> $GIT_COMMITTER_DATE
+ data <<COMMIT
+ commit N
+ COMMIT
+ M 100644 ${sha1}x hello.c
+ EOF
+ cat err &&
+ test_i18ngrep "space after SHA1" err
+'
+
+#
+# notemodify, three ways to say dataref
+#
+test_expect_success 'S: notemodify with garabge after mark dataref must fail' '
+ test_must_fail git fast-import --import-marks=marks <<-EOF 2>err &&
+ commit refs/heads/S
+ committer $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL> $GIT_COMMITTER_DATE
+ data <<COMMIT
+ commit S note dataref markref
+ COMMIT
+ N :202x :302
+ EOF
+ cat err &&
+ test_i18ngrep "space after mark" err
+'
+
+test_expect_success 'S: notemodify with garbage after inline dataref must fail' '
+ test_must_fail git fast-import --import-marks=marks <<-EOF 2>err &&
+ commit refs/heads/S
+ committer $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL> $GIT_COMMITTER_DATE
+ data <<COMMIT
+ commit S note dataref inline
+ COMMIT
+ N inlineX :302
+ data <<BLOB
+ note blob
+ BLOB
+ EOF
+ cat err &&
+ test_i18ngrep "nvalid dataref" err
+'
+
+test_expect_success 'S: notemodify with garbage after sha1 dataref must fail' '
+ sha1=$(grep :202 marks | cut -d\ -f2) &&
+ test_must_fail git fast-import --import-marks=marks <<-EOF 2>err &&
+ commit refs/heads/S
+ committer $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL> $GIT_COMMITTER_DATE
+ data <<COMMIT
+ commit S note dataref sha1
+ COMMIT
+ N ${sha1}x :302
+ EOF
+ cat err &&
+ test_i18ngrep "space after SHA1" err
+'
+
+#
+# notemodify, mark in committish
+#
+test_expect_success 'S: notemodify with garbarge after mark committish must fail' '
+ test_must_fail git fast-import --import-marks=marks <<-EOF 2>err &&
+ commit refs/heads/Snotes
+ committer $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL> $GIT_COMMITTER_DATE
+ data <<COMMIT
+ commit S note committish
+ COMMIT
+ N :202 :302x
+ EOF
+ cat err &&
+ test_i18ngrep "after mark" err
+'
+
+#
+# from
+#
+test_expect_success 'S: from with garbage after mark must fail' '
+ # no &&
+ git fast-import --import-marks=marks --export-marks=marks <<-EOF 2>err
+ commit refs/heads/S2
+ mark :303
+ committer $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL> $GIT_COMMITTER_DATE
+ data <<COMMIT
+ commit 3
+ COMMIT
+ from :301x
+ M 100644 :403 hello.c
+ EOF
+
+ ret=$? &&
+ echo returned $ret &&
+ test $ret -ne 0 && # failed, but it created the commit
+
+ # go create the commit, need it for merge test
+ git fast-import --import-marks=marks --export-marks=marks <<-EOF &&
+ commit refs/heads/S2
+ mark :303
+ committer $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL> $GIT_COMMITTER_DATE
+ data <<COMMIT
+ commit 3
+ COMMIT
+ from :301
+ M 100644 :403 hello.c
+ EOF
+
+ # now evaluate the error
+ cat err &&
+ test_i18ngrep "after mark" err
+'
+
+
+#
+# merge
+#
+test_expect_success 'S: merge with garbage after mark must fail' '
+ test_must_fail git fast-import --import-marks=marks <<-EOF 2>err &&
+ commit refs/heads/S
+ mark :304
+ committer $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL> $GIT_COMMITTER_DATE
+ data <<COMMIT
+ merge 4
+ COMMIT
+ from :302
+ merge :303x
+ M 100644 :403 hello.c
+ EOF
+ cat err &&
+ test_i18ngrep "after mark" err
+'
+
+#
+# tag, from markref
+#
+test_expect_success 'S: tag with garbage after mark must fail' '
+ test_must_fail git fast-import --import-marks=marks <<-EOF 2>err &&
+ tag refs/tags/Stag
+ from :302x
+ tagger $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL> $GIT_COMMITTER_DATE
+ data <<TAG
+ tag S
+ TAG
+ EOF
+ cat err &&
+ test_i18ngrep "after mark" err
+'
+
+#
+# cat-blob markref
+#
+test_expect_success 'S: cat-blob with garbage after mark must fail' '
+ test_must_fail git fast-import --import-marks=marks <<-EOF 2>err &&
+ cat-blob :403x
+ EOF
+ cat err &&
+ test_i18ngrep "after mark" err
+'
+
+#
+# ls markref
+#
+test_expect_success 'S: ls with garbage after mark must fail' '
+ test_must_fail git fast-import --import-marks=marks <<-EOF 2>err &&
+ ls :302x hello.c
+ EOF
+ cat err &&
+ test_i18ngrep "space after mark" err
+'
+
+test_expect_success 'S: ls with garbage after sha1 must fail' '
+ sha1=$(grep :302 marks | cut -d\ -f2) &&
+ test_must_fail git fast-import --import-marks=marks <<-EOF 2>err &&
+ ls ${sha1}x hello.c
+ EOF
+ cat err &&
+ test_i18ngrep "space after tree-ish" err
+'
+
test_done
git checkout -b marks master &&
git fast-export --export-marks=tmp-marks HEAD &&
test -s tmp-marks &&
- test $(wc -l < tmp-marks) -eq 3 &&
+ test_line_count = 3 tmp-marks &&
test $(
git fast-export --import-marks=tmp-marks\
--export-marks=tmp-marks HEAD |
grep ^commit\ |
wc -l) \
-eq 1 &&
- test $(wc -l < tmp-marks) -eq 4
+ test_line_count = 4 tmp-marks
'
cd cvswork &&
GIT_CONFIG="$git_config" cvs update &&
GIT_CONFIG="$git_config" cvs status | grep "^File: status.file" >../out &&
- test $(wc -l <../out) = 2
+ test_line_count = 2 ../out
'
cd "$WORKDIR"
test_expect_success 'cvs status (nonrecursive)' '
cd cvswork &&
GIT_CONFIG="$git_config" cvs status -l | grep "^File: status.file" >../out &&
- test $(wc -l <../out) = 1
+ test_line_count = 1 ../out
'
cd "$WORKDIR"
cd "$WORKDIR"
test_expect_success 'cvs co -c (shows module database)' '
GIT_CONFIG="$git_config" cvs co -c > out &&
- grep "^master[ ]\+master$" < out &&
- ! grep -v "^master[ ]\+master$" < out
+ grep "^master[ ][ ]*master$" <out &&
+ ! grep -v "^master[ ][ ]*master$" <out
'
#------------
test_expect_success 'snapshots: bad tree-ish id (tagged object)' '
echo object > tag-object &&
git add tag-object &&
- git commit -m "Object to be tagged" &&
+ test_tick && git commit -m "Object to be tagged" &&
git tag tagged-object `git hash-object tag-object` &&
gitweb_run "p=.git;a=snapshot;h=tagged-object;sf=tgz" &&
grep "400 - Object is not a tree-ish" gitweb.output
'
test_debug 'cat gitweb.output'
+# ----------------------------------------------------------------------
+# modification times (Last-Modified and If-Modified-Since)
+
+test_expect_success 'modification: feed last-modified' '
+ gitweb_run "p=.git;a=atom;h=master" &&
+ grep "Status: 200 OK" gitweb.headers &&
+ grep "Last-modified: Thu, 7 Apr 2005 22:14:13 +0000" gitweb.headers
+'
+test_debug 'cat gitweb.headers'
+
+test_expect_success 'modification: feed if-modified-since (modified)' '
+ export HTTP_IF_MODIFIED_SINCE="Wed, 6 Apr 2005 22:14:13 +0000" &&
+ test_when_finished "unset HTTP_IF_MODIFIED_SINCE" &&
+ gitweb_run "p=.git;a=atom;h=master" &&
+ grep "Status: 200 OK" gitweb.headers
+'
+test_debug 'cat gitweb.headers'
+
+test_expect_success 'modification: feed if-modified-since (unmodified)' '
+ export HTTP_IF_MODIFIED_SINCE="Thu, 7 Apr 2005 22:14:13 +0000" &&
+ test_when_finished "unset HTTP_IF_MODIFIED_SINCE" &&
+ gitweb_run "p=.git;a=atom;h=master" &&
+ grep "Status: 304 Not Modified" gitweb.headers
+'
+test_debug 'cat gitweb.headers'
+
+test_expect_success 'modification: snapshot last-modified' '
+ gitweb_run "p=.git;a=snapshot;h=master;sf=tgz" &&
+ grep "Status: 200 OK" gitweb.headers &&
+ grep "Last-modified: Thu, 7 Apr 2005 22:14:13 +0000" gitweb.headers
+'
+test_debug 'cat gitweb.headers'
+
+test_expect_success 'modification: snapshot if-modified-since (modified)' '
+ export HTTP_IF_MODIFIED_SINCE="Wed, 6 Apr 2005 22:14:13 +0000" &&
+ test_when_finished "unset HTTP_IF_MODIFIED_SINCE" &&
+ gitweb_run "p=.git;a=snapshot;h=master;sf=tgz" &&
+ grep "Status: 200 OK" gitweb.headers
+'
+test_debug 'cat gitweb.headers'
+
+test_expect_success 'modification: snapshot if-modified-since (unmodified)' '
+ export HTTP_IF_MODIFIED_SINCE="Thu, 7 Apr 2005 22:14:13 +0000" &&
+ test_when_finished "unset HTTP_IF_MODIFIED_SINCE" &&
+ gitweb_run "p=.git;a=snapshot;h=master;sf=tgz" &&
+ grep "Status: 304 Not Modified" gitweb.headers
+'
+test_debug 'cat gitweb.headers'
+
+test_expect_success 'modification: tree snapshot' '
+ ID=`git rev-parse --verify HEAD^{tree}` &&
+ export HTTP_IF_MODIFIED_SINCE="Wed, 6 Apr 2005 22:14:13 +0000" &&
+ test_when_finished "unset HTTP_IF_MODIFIED_SINCE" &&
+ gitweb_run "p=.git;a=snapshot;h=$ID;sf=tgz" &&
+ grep "Status: 200 OK" gitweb.headers &&
+ ! grep -i "last-modified" gitweb.headers
+'
+test_debug 'cat gitweb.headers'
# ----------------------------------------------------------------------
# load checking
#!/bin/sh
-test_description='git-p4 tests'
+test_description='git p4 tests'
. ./lib-git-p4.sh
)
'
-test_expect_success 'basic git-p4 clone' '
- "$GITP4" clone --dest="$git" //depot &&
+test_expect_success 'basic git p4 clone' '
+ git p4 clone --dest="$git" //depot &&
test_when_finished cleanup_git &&
(
cd "$git" &&
)
'
-test_expect_success 'git-p4 clone @all' '
- "$GITP4" clone --dest="$git" //depot@all &&
+test_expect_success 'git p4 clone @all' '
+ git p4 clone --dest="$git" //depot@all &&
test_when_finished cleanup_git &&
(
cd "$git" &&
)
'
-test_expect_success 'git-p4 sync uninitialized repo' '
+test_expect_success 'git p4 sync uninitialized repo' '
test_create_repo "$git" &&
test_when_finished cleanup_git &&
(
cd "$git" &&
- test_must_fail "$GITP4" sync
+ test_must_fail git p4 sync
)
'
# Create a git repo by hand. Add a commit so that HEAD is valid.
# Test imports a new p4 repository into a new git branch.
#
-test_expect_success 'git-p4 sync new branch' '
+test_expect_success 'git p4 sync new branch' '
test_create_repo "$git" &&
test_when_finished cleanup_git &&
(
cd "$git" &&
test_commit head &&
- "$GITP4" sync --branch=refs/remotes/p4/depot //depot@all &&
+ git p4 sync --branch=refs/remotes/p4/depot //depot@all &&
git log --oneline p4/depot >lines &&
test_line_count = 2 lines
)
p4 add sub2/f2 &&
p4 submit -d "sub2/f2"
) &&
- "$GITP4" clone --dest="$git" //depot/sub1 //depot/sub2 &&
+ git p4 clone --dest="$git" //depot/sub1 //depot/sub2 &&
test_when_finished cleanup_git &&
(
cd "$git" &&
p4 add sub1/f3 &&
p4 submit -d "sub1/f3"
) &&
- "$GITP4" clone --dest="$git" //depot/sub1@all //depot/sub2@all &&
+ git p4 clone --dest="$git" //depot/sub1@all //depot/sub2@all &&
test_when_finished cleanup_git &&
(
cd "$git" &&
p4 add sub2/f3 &&
p4 submit -d "sub2/f3"
) &&
- "$GITP4" clone --dest="$git" //depot/sub1@all //depot/sub2@all &&
+ git p4 clone --dest="$git" //depot/sub1@all //depot/sub2@all &&
test_when_finished cleanup_git &&
(
cd "$git" &&
exit 1
EOF
chmod 755 "$badp4dir"/p4 &&
- PATH="$badp4dir:$PATH" "$GITP4" clone --dest="$git" //depot >errs 2>&1 ; retval=$? &&
+ PATH="$badp4dir:$PATH" git p4 clone --dest="$git" //depot >errs 2>&1 ; retval=$? &&
test $retval -eq 1 &&
test_must_fail grep -q Traceback errs
'
)
'
-test_expect_success 'wildcard files git-p4 clone' '
- "$GITP4" clone --dest="$git" //depot &&
+test_expect_success 'wildcard files git p4 clone' '
+ git p4 clone --dest="$git" //depot &&
test_when_finished cleanup_git &&
(
cd "$git" &&
'
test_expect_success 'clone bare' '
- "$GITP4" clone --dest="$git" --bare //depot &&
+ git p4 clone --dest="$git" --bare //depot &&
test_when_finished cleanup_git &&
(
cd "$git" &&
p4_add_user alice Alice &&
p4_add_user bob Bob &&
p4_grant_admin alice &&
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
test_when_finished cleanup_git &&
(
cd "$git" &&
git commit --author "Alice <alice@localhost>" -m "a change by alice" file1 &&
git commit --author "Bob <bob@localhost>" -m "a change by bob" file2 &&
git config git-p4.skipSubmitEditCheck true &&
- P4EDITOR=touch P4USER=alice P4PASSWD=secret "$GITP4" commit --preserve-user &&
+ P4EDITOR=touch P4USER=alice P4PASSWD=secret git p4 commit --preserve-user &&
p4_check_commit_author file1 alice &&
p4_check_commit_author file2 bob
)
# Test username support, submitting as bob, who lacks admin rights. Should
# not submit change to p4 (git diff should show deltas).
test_expect_success 'refuse to preserve users without perms' '
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
test_when_finished cleanup_git &&
(
cd "$git" &&
git commit --author "Alice <alice@localhost>" -m "perms: a change by alice" file1 &&
P4EDITOR=touch P4USER=bob P4PASSWD=secret &&
export P4EDITOR P4USER P4PASSWD &&
- test_must_fail "$GITP4" commit --preserve-user &&
+ test_must_fail git p4 commit --preserve-user &&
! git diff --exit-code HEAD..p4/master
)
'
# What happens with unknown author? Without allowMissingP4Users it should fail.
test_expect_success 'preserve user where author is unknown to p4' '
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
test_when_finished cleanup_git &&
(
cd "$git" &&
git commit --author "Charlie <charlie@localhost>" -m "preserve: a change by charlie" file1 &&
P4EDITOR=touch P4USER=alice P4PASSWD=secret &&
export P4EDITOR P4USER P4PASSWD &&
- test_must_fail "$GITP4" commit --preserve-user &&
+ test_must_fail git p4 commit --preserve-user &&
! git diff --exit-code HEAD..p4/master &&
echo "$0: repeat with allowMissingP4Users enabled" &&
git config git-p4.allowMissingP4Users true &&
git config git-p4.preserveUser true &&
- "$GITP4" commit &&
+ git p4 commit &&
git diff --exit-code HEAD..p4/master &&
p4_check_commit_author file1 alice
)
'
-# If we're *not* using --preserve-user, git-p4 should warn if we're submitting
+# If we're *not* using --preserve-user, git p4 should warn if we're submitting
# changes that are not all ours.
# Test: user in p4 and user unknown to p4.
# Test: warning disabled and user is the same.
test_expect_success 'not preserving user with mixed authorship' '
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
test_when_finished cleanup_git &&
(
cd "$git" &&
make_change_by_user usernamefile3 Derek derek@localhost &&
P4EDITOR=cat P4USER=alice P4PASSWD=secret &&
export P4EDITOR P4USER P4PASSWD &&
- "$GITP4" commit |\
+ git p4 commit |\
grep "git author derek@localhost does not match" &&
make_change_by_user usernamefile3 Charlie charlie@localhost &&
- "$GITP4" commit |\
+ git p4 commit |\
grep "git author charlie@localhost does not match" &&
make_change_by_user usernamefile3 alice alice@localhost &&
- "$GITP4" commit |\
+ git p4 commit |\
test_must_fail grep "git author.*does not match" &&
git config git-p4.skipUserNameCheck true &&
make_change_by_user usernamefile3 Charlie charlie@localhost &&
- "$GITP4" commit |\
+ git p4 commit |\
test_must_fail grep "git author.*does not match" &&
p4_check_commit_author usernamefile3 alice
p4change=$(p4 -G changes -m 1 //depot/... | marshal_dump change) &&
p4time=$(p4 -G changes -m 1 //depot/... | marshal_dump time) &&
sleep 3 &&
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
test_when_finished cleanup_git &&
(
cd "$git" &&
# Repeat, this time with a smaller threshold and confirm that the rename is
# detected in P4.
test_expect_success 'detect renames' '
- "$GITP4" clone --dest="$git" //depot@all &&
+ git p4 clone --dest="$git" //depot@all &&
test_when_finished cleanup_git &&
(
cd "$git" &&
git mv file1 file4 &&
git commit -a -m "Rename file1 to file4" &&
git diff-tree -r -M HEAD &&
- "$GITP4" submit &&
+ git p4 submit &&
p4 filelog //depot/file4 &&
p4 filelog //depot/file4 | test_must_fail grep -q "branch from" &&
git commit -a -m "Rename file4 to file5" &&
git diff-tree -r -M HEAD &&
git config git-p4.detectRenames true &&
- "$GITP4" submit &&
+ git p4 submit &&
p4 filelog //depot/file5 &&
p4 filelog //depot/file5 | grep -q "branch from //depot/file4" &&
level=$(git diff-tree -r -M HEAD | sed 1d | cut -f1 | cut -d" " -f5 | sed "s/R0*//") &&
test -n "$level" && test "$level" -gt 0 && test "$level" -lt 98 &&
git config git-p4.detectRenames $(($level + 2)) &&
- "$GITP4" submit &&
+ git p4 submit &&
p4 filelog //depot/file6 &&
p4 filelog //depot/file6 | test_must_fail grep -q "branch from" &&
level=$(git diff-tree -r -M HEAD | sed 1d | cut -f1 | cut -d" " -f5 | sed "s/R0*//") &&
test -n "$level" && test "$level" -gt 2 && test "$level" -lt 100 &&
git config git-p4.detectRenames $(($level - 2)) &&
- "$GITP4" submit &&
+ git p4 submit &&
p4 filelog //depot/file7 &&
p4 filelog //depot/file7 | grep -q "branch from //depot/file6"
)
# Modify and copy a file, configure a smaller threshold in detectCopies and
# confirm that copy is detected in P4.
test_expect_success 'detect copies' '
- "$GITP4" clone --dest="$git" //depot@all &&
+ git p4 clone --dest="$git" //depot@all &&
test_when_finished cleanup_git &&
(
cd "$git" &&
git add file8 &&
git commit -a -m "Copy file2 to file8" &&
git diff-tree -r -C HEAD &&
- "$GITP4" submit &&
+ git p4 submit &&
p4 filelog //depot/file8 &&
p4 filelog //depot/file8 | test_must_fail grep -q "branch from" &&
git commit -a -m "Copy file2 to file9" &&
git diff-tree -r -C HEAD &&
git config git-p4.detectCopies true &&
- "$GITP4" submit &&
+ git p4 submit &&
p4 filelog //depot/file9 &&
p4 filelog //depot/file9 | test_must_fail grep -q "branch from" &&
git add file2 file10 &&
git commit -a -m "Modify and copy file2 to file10" &&
git diff-tree -r -C HEAD &&
- "$GITP4" submit &&
+ git p4 submit &&
p4 filelog //depot/file10 &&
p4 filelog //depot/file10 | grep -q "branch from //depot/file" &&
src=$(git diff-tree -r -C --find-copies-harder HEAD | sed 1d | cut -f2) &&
test "$src" = file10 &&
git config git-p4.detectCopiesHarder true &&
- "$GITP4" submit &&
+ git p4 submit &&
p4 filelog //depot/file11 &&
p4 filelog //depot/file11 | grep -q "branch from //depot/file" &&
src=$(git diff-tree -r -C --find-copies-harder HEAD | sed 1d | cut -f2) &&
test "$src" = file10 &&
git config git-p4.detectCopies $(($level + 2)) &&
- "$GITP4" submit &&
+ git p4 submit &&
p4 filelog //depot/file12 &&
p4 filelog //depot/file12 | test_must_fail grep -q "branch from" &&
src=$(git diff-tree -r -C --find-copies-harder HEAD | sed 1d | cut -f2) &&
test "$src" = file10 &&
git config git-p4.detectCopies $(($level - 2)) &&
- "$GITP4" submit &&
+ git p4 submit &&
p4 filelog //depot/file13 &&
p4 filelog //depot/file13 | grep -q "branch from //depot/file"
)
#!/bin/sh
-test_description='git-p4 p4 branching tests'
+test_description='git p4 tests for p4 branches'
. ./lib-git-p4.sh
test_expect_success 'import main, no branch detection' '
test_when_finished cleanup_git &&
- "$GITP4" clone --dest="$git" //depot/main@all &&
+ git p4 clone --dest="$git" //depot/main@all &&
(
cd "$git" &&
git log --oneline --graph --decorate --all &&
test_expect_success 'import branch1, no branch detection' '
test_when_finished cleanup_git &&
- "$GITP4" clone --dest="$git" //depot/branch1@all &&
+ git p4 clone --dest="$git" //depot/branch1@all &&
(
cd "$git" &&
git log --oneline --graph --decorate --all &&
test_expect_success 'import branch2, no branch detection' '
test_when_finished cleanup_git &&
- "$GITP4" clone --dest="$git" //depot/branch2@all &&
+ git p4 clone --dest="$git" //depot/branch2@all &&
(
cd "$git" &&
git log --oneline --graph --decorate --all &&
test_expect_success 'import depot, no branch detection' '
test_when_finished cleanup_git &&
- "$GITP4" clone --dest="$git" //depot@all &&
+ git p4 clone --dest="$git" //depot@all &&
(
cd "$git" &&
git log --oneline --graph --decorate --all &&
test_expect_success 'import depot, branch detection' '
test_when_finished cleanup_git &&
- "$GITP4" clone --dest="$git" --detect-branches //depot@all &&
+ git p4 clone --dest="$git" --detect-branches //depot@all &&
(
cd "$git" &&
(
cd "$git" &&
git config git-p4.branchList main:branch1 &&
- "$GITP4" clone --dest=. --detect-branches //depot@all &&
+ git p4 clone --dest=. --detect-branches //depot@all &&
git log --oneline --graph --decorate --all &&
# Configure branches through git-config and clone them.
# All files are tested to make sure branches were cloned correctly.
# Finally, make an update to branch1 on P4 side to check if it is imported
-# correctly by git-p4.
-test_expect_success 'git-p4 clone simple branches' '
+# correctly by git p4.
+test_expect_success 'git p4 clone simple branches' '
test_when_finished cleanup_git &&
test_create_repo "$git" &&
(
cd "$git" &&
git config git-p4.branchList branch1:branch2 &&
git config --add git-p4.branchList branch1:branch3 &&
- "$GITP4" clone --dest=. --detect-branches //depot@all &&
+ git p4 clone --dest=. --detect-branches //depot@all &&
git log --all --graph --decorate --stat &&
git reset --hard p4/depot/branch1 &&
test -f file1 &&
p4 submit -d "update file2 in branch3" &&
cd "$git" &&
git reset --hard p4/depot/branch1 &&
- "$GITP4" rebase &&
+ git p4 rebase &&
grep file2_ file2
)
'
# Create a complex branch structure in P4 depot to check if they are correctly
-# cloned. The branches are created from older changelists to check if git-p4 is
+# cloned. The branches are created from older changelists to check if git p4 is
# able to correctly detect them.
# The final expected structure is:
# `branch1
# `- file1
# `- file2
# `- file3
-test_expect_success 'git-p4 add complex branches' '
+test_expect_success 'git p4 add complex branches' '
test_when_finished cleanup_git &&
test_create_repo "$git" &&
(
)
'
-# Configure branches through git-config and clone them. git-p4 will only be able
+# Configure branches through git-config and clone them. git p4 will only be able
# to clone the original structure if it is able to detect the origin changelist
# of each branch.
-test_expect_success 'git-p4 clone complex branches' '
+test_expect_success 'git p4 clone complex branches' '
test_when_finished cleanup_git &&
test_create_repo "$git" &&
(
git config --add git-p4.branchList branch1:branch3 &&
git config --add git-p4.branchList branch1:branch4 &&
git config --add git-p4.branchList branch1:branch5 &&
- "$GITP4" clone --dest=. --detect-branches //depot@all &&
+ git p4 clone --dest=. --detect-branches //depot@all &&
git log --all --graph --decorate --stat &&
git reset --hard p4/depot/branch1 &&
test_path_is_file file1 &&
#!/bin/sh
-test_description='git-p4 p4 filetype tests'
+test_description='git p4 filetype tests'
. ./lib-git-p4.sh
test_expect_success 'utf-16 file test' '
test_when_finished cleanup_git &&
- "$GITP4" clone --dest="$git" //depot@all &&
+ git p4 clone --dest="$git" //depot@all &&
(
cd "$git" &&
build_smush &&
test_when_finished rm -f k_smush.py ko_smush.py &&
test_when_finished cleanup_git &&
- "$GITP4" clone --dest="$git" //depot@all &&
+ git p4 clone --dest="$git" //depot@all &&
(
cd "$git" &&
"$PYTHON_PATH" "$TRASH_DIRECTORY/ko_smush.py" <"$cli/k-text-ko" >cli-k-text-ko-smush &&
test_cmp cli-k-text-ko-smush k-text-ko &&
- # utf16, even though p4 expands keywords, git-p4 does not
+ # utf16, even though p4 expands keywords, git p4 does not
# try to undo that
test_cmp "$cli/k-utf16-k" k-utf16-k &&
test_cmp "$cli/k-utf16-ko" k-utf16-ko
p4 submit -d appledouble
) &&
test_when_finished cleanup_git &&
- "$GITP4" clone --dest="$git" //depot@all &&
+ git p4 clone --dest="$git" //depot@all &&
(
cd "$git" &&
test ! -f double.png
#!/bin/sh
-test_description='git-p4 transparency to shell metachars in filenames'
+test_description='git p4 transparency to shell metachars in filenames'
. ./lib-git-p4.sh
'
test_expect_success 'shell metachars in filenames' '
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
test_when_finished cleanup_git &&
(
cd "$git" &&
echo f2 >"file with spaces" &&
git add "file with spaces" &&
git commit -m "add files" &&
- P4EDITOR=touch "$GITP4" submit
+ P4EDITOR=touch git p4 submit
) &&
(
cd "$cli" &&
'
test_expect_success 'deleting with shell metachars' '
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
test_when_finished cleanup_git &&
(
cd "$git" &&
git rm foo\$bar &&
git rm file\ with\ spaces &&
git commit -m "remove files" &&
- P4EDITOR=touch "$GITP4" submit
+ P4EDITOR=touch git p4 submit
) &&
(
cd "$cli" &&
cd "$git" &&
git config git-p4.branchList main:branch\$3 &&
- "$GITP4" clone --dest=. --detect-branches //depot@all &&
+ git p4 clone --dest=. --detect-branches //depot@all &&
git log --all --graph --decorate --stat &&
git reset --hard p4/depot/branch\$3 &&
test -f shell_char_branch_file &&
#!/bin/sh
-test_description='git-p4 p4 label tests'
+test_description='git p4 label tests'
. ./lib-git-p4.sh
p4 labels ... &&
- "$GITP4" clone --dest="$git" --detect-labels //depot@all &&
+ git p4 clone --dest="$git" --detect-labels //depot@all &&
cd "$git" &&
git tag &&
p4 labels ... &&
- "$GITP4" clone --dest="$git" --detect-labels //depot@all &&
+ git p4 clone --dest="$git" --detect-labels //depot@all &&
cd "$git" &&
git tag | grep tag_f1 &&
#!/bin/sh
-test_description='git-p4 skipSubmitEdit config variables'
+test_description='git p4 skipSubmitEdit config variables'
. ./lib-git-p4.sh
# this works because EDITOR is set to :
test_expect_success 'no config, unedited, say yes' '
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
test_when_finished cleanup_git &&
(
cd "$git" &&
echo line >>file1 &&
git commit -a -m "change 2" &&
- echo y | "$GITP4" submit &&
+ echo y | git p4 submit &&
p4 changes //depot/... >wc &&
test_line_count = 2 wc
)
'
test_expect_success 'no config, unedited, say no' '
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
test_when_finished cleanup_git &&
(
cd "$git" &&
echo line >>file1 &&
git commit -a -m "change 3 (not really)" &&
- printf "bad response\nn\n" | "$GITP4" submit &&
+ printf "bad response\nn\n" | git p4 submit &&
p4 changes //depot/... >wc &&
test_line_count = 2 wc
)
'
test_expect_success 'skipSubmitEdit' '
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
test_when_finished cleanup_git &&
(
cd "$git" &&
git config core.editor /bin/false &&
echo line >>file1 &&
git commit -a -m "change 3" &&
- "$GITP4" submit &&
+ git p4 submit &&
p4 changes //depot/... >wc &&
test_line_count = 3 wc
)
'
test_expect_success 'skipSubmitEditCheck' '
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
test_when_finished cleanup_git &&
(
cd "$git" &&
git config git-p4.skipSubmitEditCheck true &&
echo line >>file1 &&
git commit -a -m "change 4" &&
- "$GITP4" submit &&
+ git p4 submit &&
p4 changes //depot/... >wc &&
test_line_count = 4 wc
)
# check the normal case, where the template really is edited
test_expect_success 'no config, edited' '
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
test_when_finished cleanup_git &&
ed="$TRASH_DIRECTORY/ed.sh" &&
test_when_finished "rm \"$ed\"" &&
cd "$git" &&
echo line >>file1 &&
git commit -a -m "change 5" &&
- EDITOR="\"$ed\"" "$GITP4" submit &&
+ EDITOR="\"$ed\"" git p4 submit &&
p4 changes //depot/... >wc &&
test_line_count = 5 wc
)
#!/bin/sh
-test_description='git-p4 options'
+test_description='git p4 options'
. ./lib-git-p4.sh
'
test_expect_success 'clone no --git-dir' '
- test_must_fail "$GITP4" clone --git-dir=xx //depot
+ test_must_fail git p4 clone --git-dir=xx //depot
'
test_expect_success 'clone --branch' '
- "$GITP4" clone --branch=refs/remotes/p4/sb --dest="$git" //depot &&
+ git p4 clone --branch=refs/remotes/p4/sb --dest="$git" //depot &&
test_when_finished cleanup_git &&
(
cd "$git" &&
cf="$TRASH_DIRECTORY/cf" &&
test_when_finished "rm \"$cf\"" &&
printf "1\n3\n" >"$cf" &&
- "$GITP4" clone --changesfile="$cf" --dest="$git" //depot &&
+ git p4 clone --changesfile="$cf" --dest="$git" //depot &&
test_when_finished cleanup_git &&
(
cd "$git" &&
cf="$TRASH_DIRECTORY/cf" &&
test_when_finished "rm \"$cf\"" &&
printf "1\n3\n" >"$cf" &&
- test_must_fail "$GITP4" clone --changesfile="$cf" --dest="$git" //depot@all
+ test_must_fail git p4 clone --changesfile="$cf" --dest="$git" //depot@all
'
# imports both master and p4/master in refs/heads
# requires --import-local on sync to find p4 refs/heads
# does not update master on sync, just p4/master
test_expect_success 'clone/sync --import-local' '
- "$GITP4" clone --import-local --dest="$git" //depot@1,2 &&
+ git p4 clone --import-local --dest="$git" //depot@1,2 &&
test_when_finished cleanup_git &&
(
cd "$git" &&
test_line_count = 2 lines &&
git log --oneline refs/heads/p4/master >lines &&
test_line_count = 2 lines &&
- test_must_fail "$GITP4" sync &&
+ test_must_fail git p4 sync &&
- "$GITP4" sync --import-local &&
+ git p4 sync --import-local &&
git log --oneline refs/heads/master >lines &&
test_line_count = 2 lines &&
git log --oneline refs/heads/p4/master >lines &&
'
test_expect_success 'clone --max-changes' '
- "$GITP4" clone --dest="$git" --max-changes 2 //depot@all &&
+ git p4 clone --dest="$git" --max-changes 2 //depot@all &&
test_when_finished cleanup_git &&
(
cd "$git" &&
p4 add sub/dir/f4 &&
p4 submit -d "change 4"
) &&
- "$GITP4" clone --dest="$git" --keep-path //depot/sub/dir@all &&
+ git p4 clone --dest="$git" --keep-path //depot/sub/dir@all &&
test_when_finished cleanup_git &&
(
cd "$git" &&
test_path_is_file sub/dir/f4
) &&
cleanup_git &&
- "$GITP4" clone --dest="$git" //depot/sub/dir@all &&
+ git p4 clone --dest="$git" //depot/sub/dir@all &&
(
cd "$git" &&
test_path_is_file f4 &&
(
# big usage message
exec >/dev/null &&
- test_must_fail "$GITP4" clone --dest="$git" --use-client-spec
+ test_must_fail git p4 clone --dest="$git" --use-client-spec
) &&
cli2="$TRASH_DIRECTORY/cli2" &&
mkdir -p "$cli2" &&
) &&
P4CLIENT=client2 &&
test_when_finished cleanup_git &&
- "$GITP4" clone --dest="$git" --use-client-spec //depot/... &&
+ git p4 clone --dest="$git" --use-client-spec //depot/... &&
(
cd "$git" &&
test_path_is_file bus/dir/f4 &&
cd "$git" &&
git init &&
git config git-p4.useClientSpec true &&
- "$GITP4" sync //depot/... &&
+ git p4 sync //depot/... &&
git checkout -b master p4/master &&
test_path_is_file bus/dir/f4 &&
test_path_is_missing file1
#!/bin/sh
-test_description='git-p4 submit'
+test_description='git p4 submit'
. ./lib-git-p4.sh
test_expect_success 'submit with no client dir' '
test_when_finished cleanup_git &&
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
(
cd "$git" &&
echo file2 >file2 &&
git commit -m "git commit 2" &&
rm -rf "$cli" &&
git config git-p4.skipSubmitEdit true &&
- "$GITP4" submit
+ git p4 submit
)
'
# make two commits, but tell it to apply only from HEAD^
test_expect_success 'submit --origin' '
test_when_finished cleanup_git &&
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
(
cd "$git" &&
test_commit "file3" &&
test_commit "file4" &&
git config git-p4.skipSubmitEdit true &&
- "$GITP4" submit --origin=HEAD^
+ git p4 submit --origin=HEAD^
) &&
(
cd "$cli" &&
test_expect_success 'submit with allowSubmit' '
test_when_finished cleanup_git &&
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
(
cd "$git" &&
test_commit "file5" &&
git config git-p4.skipSubmitEdit true &&
git config git-p4.allowSubmit "nobranch" &&
- test_must_fail "$GITP4" submit &&
+ test_must_fail git p4 submit &&
git config git-p4.allowSubmit "nobranch,master" &&
- "$GITP4" submit
+ git p4 submit
)
'
test_expect_success 'submit with master branch name from argv' '
test_when_finished cleanup_git &&
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
(
cd "$git" &&
test_commit "file6" &&
git config git-p4.skipSubmitEdit true &&
- test_must_fail "$GITP4" submit nobranch &&
+ test_must_fail git p4 submit nobranch &&
git branch otherbranch &&
git reset --hard HEAD^ &&
test_commit "file7" &&
- "$GITP4" submit otherbranch
+ git p4 submit otherbranch
) &&
(
cd "$cli" &&
#!/bin/sh
-test_description='git-p4 relative chdir'
+test_description='git p4 relative chdir'
. ./lib-git-p4.sh
(
P4CONFIG=p4config && export P4CONFIG &&
sane_unset P4PORT P4CLIENT &&
- "$GITP4" clone --verbose --dest="$git" //depot
+ git p4 clone --verbose --dest="$git" //depot
)
'
(
P4CONFIG=p4config && export P4CONFIG &&
sane_unset P4PORT P4CLIENT &&
- "$GITP4" clone --verbose --dest="git" //depot
+ git p4 clone --verbose --dest="git" //depot
)
'
#!/bin/sh
-test_description='git-p4 client view'
+test_description='git p4 client view'
. ./lib-git-p4.sh
test_expect_success 'unsupported view wildcard %%n' '
client_view "//depot/%%%%1/sub/... //client/sub/%%%%1/..." &&
test_when_finished cleanup_git &&
- test_must_fail "$GITP4" clone --use-client-spec --dest="$git" //depot
+ test_must_fail git p4 clone --use-client-spec --dest="$git" //depot
'
test_expect_success 'unsupported view wildcard *' '
client_view "//depot/*/bar/... //client/*/bar/..." &&
test_when_finished cleanup_git &&
- test_must_fail "$GITP4" clone --use-client-spec --dest="$git" //depot
+ test_must_fail git p4 clone --use-client-spec --dest="$git" //depot
'
test_expect_success 'wildcard ... only supported at end of spec 1' '
client_view "//depot/.../file11 //client/.../file11" &&
test_when_finished cleanup_git &&
- test_must_fail "$GITP4" clone --use-client-spec --dest="$git" //depot
+ test_must_fail git p4 clone --use-client-spec --dest="$git" //depot
'
test_expect_success 'wildcard ... only supported at end of spec 2' '
client_view "//depot/.../a/... //client/.../a/..." &&
test_when_finished cleanup_git &&
- test_must_fail "$GITP4" clone --use-client-spec --dest="$git" //depot
+ test_must_fail git p4 clone --use-client-spec --dest="$git" //depot
'
test_expect_success 'basic map' '
files="cli1/file11 cli1/file12" &&
client_verify $files &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot &&
+ git p4 clone --use-client-spec --dest="$git" //depot &&
git_verify $files
'
client_view &&
client_verify &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot &&
+ git p4 clone --use-client-spec --dest="$git" //depot &&
git_verify
'
files="file11" &&
client_verify $files &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot &&
+ git p4 clone --use-client-spec --dest="$git" //depot &&
git_verify $files
'
cli2/dir2/file21 cli2/dir2/file22" &&
client_verify $files &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot &&
+ git p4 clone --use-client-spec --dest="$git" //depot &&
git_verify $files
'
files="file21 file22" &&
client_verify $files &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot &&
+ git p4 clone --use-client-spec --dest="$git" //depot &&
git_verify $files
'
files="cli12/file21 cli12/file22" &&
client_verify $files &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot &&
+ git p4 clone --use-client-spec --dest="$git" //depot &&
git_verify $files
'
"-//depot/dir2/... //client/..." &&
client_verify &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot &&
+ git p4 clone --use-client-spec --dest="$git" //depot &&
git_verify
'
files="dir1/file11 dir1/file12" &&
client_verify $files &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot &&
+ git p4 clone --use-client-spec --dest="$git" //depot &&
git_verify $files
'
files="dir1/file11 dir1/file12 dir2/file21" &&
client_verify $files &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot &&
+ git p4 clone --use-client-spec --dest="$git" //depot &&
git_verify $files
'
files="cli/file11 cli/file12 cli/file21 cli/file22" &&
client_verify $files &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot &&
+ git p4 clone --use-client-spec --dest="$git" //depot &&
git_verify $files
'
files="cli/file11 cli/file12 cli/file21" &&
client_verify $files &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot &&
+ git p4 clone --use-client-spec --dest="$git" //depot &&
git_verify $files
'
files="dir1/file11 dir1/file12 dir2incl/file21 dir2incl/file22" &&
client_verify $files &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot &&
+ git p4 clone --use-client-spec --dest="$git" //depot &&
git_verify $files
'
client_view "//depot/dir1/... \"//client/cdir 1/...\"" &&
client_verify "cdir 1/file11" "cdir 1/file12" &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot &&
+ git p4 clone --use-client-spec --dest="$git" //depot &&
git_verify "cdir 1/file11" "cdir 1/file12"
'
test_expect_success 'clone --use-client-spec sets useClientSpec' '
client_view "//depot/... //client/..." &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot &&
+ git p4 clone --use-client-spec --dest="$git" //depot &&
(
cd "$git" &&
git config --bool git-p4.useClientSpec >actual &&
files="dir1/file11 dir1/file12 dir2/file21 dir2/file22" &&
client_verify $files &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot/dir1 &&
+ git p4 clone --use-client-spec --dest="$git" //depot/dir1 &&
git_verify dir1/file11 dir1/file12
'
test_expect_success 'subdir clone, submit modify' '
client_view "//depot/... //client/..." &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot/dir1 &&
+ git p4 clone --use-client-spec --dest="$git" //depot/dir1 &&
(
cd "$git" &&
git config git-p4.skipSubmitEdit true &&
echo line >>dir1/file12 &&
git add dir1/file12 &&
git commit -m dir1/file12 &&
- "$GITP4" submit
+ git p4 submit
) &&
(
cd "$cli" &&
test_expect_success 'subdir clone, submit add' '
client_view "//depot/... //client/..." &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot/dir1 &&
+ git p4 clone --use-client-spec --dest="$git" //depot/dir1 &&
(
cd "$git" &&
git config git-p4.skipSubmitEdit true &&
echo file13 >dir1/file13 &&
git add dir1/file13 &&
git commit -m dir1/file13 &&
- "$GITP4" submit
+ git p4 submit
) &&
(
cd "$cli" &&
test_expect_success 'subdir clone, submit delete' '
client_view "//depot/... //client/..." &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot/dir1 &&
+ git p4 clone --use-client-spec --dest="$git" //depot/dir1 &&
(
cd "$git" &&
git config git-p4.skipSubmitEdit true &&
git rm dir1/file12 &&
git commit -m "delete dir1/file12" &&
- "$GITP4" submit
+ git p4 submit
) &&
(
cd "$cli" &&
test_expect_success 'subdir clone, submit copy' '
client_view "//depot/... //client/..." &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot/dir1 &&
+ git p4 clone --use-client-spec --dest="$git" //depot/dir1 &&
(
cd "$git" &&
git config git-p4.skipSubmitEdit true &&
cp dir1/file11 dir1/file11a &&
git add dir1/file11a &&
git commit -m "copy to dir1/file11a" &&
- "$GITP4" submit
+ git p4 submit
) &&
(
cd "$cli" &&
test_expect_success 'subdir clone, submit rename' '
client_view "//depot/... //client/..." &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot/dir1 &&
+ git p4 clone --use-client-spec --dest="$git" //depot/dir1 &&
(
cd "$git" &&
git config git-p4.skipSubmitEdit true &&
git config git-p4.detectRenames true &&
git mv dir1/file13 dir1/file13a &&
git commit -m "rename dir1/file13 to dir1/file13a" &&
- "$GITP4" submit
+ git p4 submit
) &&
(
cd "$cli" &&
client_verify $files &&
test_cmp actual "$cli"/filecollide &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot &&
+ git p4 clone --use-client-spec --dest="$git" //depot &&
git_verify $files &&
test_cmp actual "$git"/filecollide
'
client_verify $files &&
test_cmp actual "$cli"/filecollide &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot &&
+ git p4 clone --use-client-spec --dest="$git" //depot &&
git_verify $files &&
test_cmp actual "$git"/filecollide
'
files="file11 file12 file21 file22" &&
client_verify $files &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot &&
+ git p4 clone --use-client-spec --dest="$git" //depot &&
git_verify $files
'
files="file11 file12 file21 file22" &&
client_verify $files &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot &&
+ git p4 clone --use-client-spec --dest="$git" //depot &&
git_verify $files
'
echo dir1/colA >actual &&
client_verify $files &&
test_cmp actual "$cli"/colA &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot &&
+ git p4 clone --use-client-spec --dest="$git" //depot &&
git_verify $files &&
test_cmp actual "$git"/colA
'
test_cmp actual "$cli"/colA &&
(
cd "$git" &&
- "$GITP4" sync --use-client-spec &&
+ git p4 sync --use-client-spec &&
git merge --ff-only p4/master
) &&
git_verify $files &&
test_cmp actual "$cli"/colB &&
(
cd "$git" &&
- "$GITP4" sync --use-client-spec &&
+ git p4 sync --use-client-spec &&
git merge --ff-only p4/master
) &&
git_verify $files &&
test_when_finished cleanup_git &&
(
cd "$git" &&
- "$GITP4" sync --use-client-spec &&
+ git p4 sync --use-client-spec &&
git merge --ff-only p4/master
) &&
git_verify $files
echo dir1/colA >actual &&
client_verify $files &&
test_cmp actual "$cli"/colA &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot &&
+ git p4 clone --use-client-spec --dest="$git" //depot &&
git_verify $files &&
test_cmp actual "$git"/colA
'
test_cmp actual "$cli"/colA &&
(
cd "$git" &&
- "$GITP4" sync --use-client-spec &&
+ git p4 sync --use-client-spec &&
git merge --ff-only p4/master
) &&
git_verify $files &&
test_cmp actual "$cli"/colB &&
(
cd "$git" &&
- "$GITP4" sync --use-client-spec &&
+ git p4 sync --use-client-spec &&
git merge --ff-only p4/master
) &&
git_verify $files &&
test_when_finished cleanup_git &&
(
cd "$git" &&
- "$GITP4" sync --use-client-spec &&
+ git p4 sync --use-client-spec &&
git merge --ff-only p4/master
) &&
git_verify $files &&
files="cdir1/file11 cdir1/file12" &&
client_verify $files &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot &&
+ git p4 clone --use-client-spec --dest="$git" //depot &&
client_verify $files
'
client_view "\"//depot/dir 1/...\" \"//client/cdir 1/...\"" &&
client_verify "cdir 1/file11" "cdir 1/file12" &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot &&
+ git p4 clone --use-client-spec --dest="$git" //depot &&
git_verify "cdir 1/file11" "cdir 1/file12"
'
#
test_expect_success 'edit far away from RCS lines' '
test_when_finished cleanup_git &&
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
(
cd "$git" &&
git config git-p4.skipSubmitEdit true &&
sed -i "s/^line7/line7 edit/" filek &&
git commit -m "filek line7 edit" filek &&
- "$GITP4" submit &&
+ git p4 submit &&
scrub_k_check filek
)
'
#
test_expect_success 'edit near RCS lines' '
test_when_finished cleanup_git &&
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
(
cd "$git" &&
git config git-p4.skipSubmitEdit true &&
git config git-p4.attemptRCSCleanup true &&
sed -i "s/^line4/line4 edit/" filek &&
git commit -m "filek line4 edit" filek &&
- "$GITP4" submit &&
+ git p4 submit &&
scrub_k_check filek
)
'
#
test_expect_success 'edit keyword lines' '
test_when_finished cleanup_git &&
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
(
cd "$git" &&
git config git-p4.skipSubmitEdit true &&
git config git-p4.attemptRCSCleanup true &&
sed -i "/Revision/d" filek &&
git commit -m "filek remove Revision line" filek &&
- "$GITP4" submit &&
+ git p4 submit &&
scrub_k_check filek
)
'
#
test_expect_success 'scrub ko files differently' '
test_when_finished cleanup_git &&
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
(
cd "$git" &&
git config git-p4.skipSubmitEdit true &&
git config git-p4.attemptRCSCleanup true &&
sed -i "s/^line4/line4 edit/" fileko &&
git commit -m "fileko line4 edit" fileko &&
- "$GITP4" submit &&
+ git p4 submit &&
scrub_ko_check fileko &&
! scrub_k_check fileko
)
#
test_expect_success 'do not scrub plain text' '
test_when_finished cleanup_git &&
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
(
cd "$git" &&
git config git-p4.skipSubmitEdit true &&
sed -i "s/^line5/line5 p4 edit/" file_text &&
p4 submit -d "file5 p4 edit"
) &&
- ! "$GITP4" submit &&
+ ! git p4 submit &&
(
# exepct something like:
# file_text - file(s) not opened on this client
# even though the change itself would otherwise apply cleanly.
test_expect_success 'cope with rcs keyword expansion damage' '
test_when_finished cleanup_git &&
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
(
cd "$git" &&
git config git-p4.skipSubmitEdit true &&
git add kwfile1.c &&
git commit -m "Zap an RCS kw line" &&
- "$GITP4" submit &&
- "$GITP4" rebase &&
+ git p4 submit &&
+ git p4 rebase &&
git diff p4/master &&
- "$GITP4" commit &&
+ git p4 commit &&
echo "try modifying in both" &&
cd "$cli" &&
p4 edit kwfile1.c &&
echo "line from git at the top" | cat - kwfile1.c >kwfile1.c.new &&
mv kwfile1.c.new kwfile1.c &&
git commit -m "Add line in git at the top" kwfile1.c &&
- "$GITP4" rebase &&
- "$GITP4" submit
+ git p4 rebase &&
+ git p4 submit
)
'
cat kwdelfile.c &&
grep 1 kwdelfile.c
) &&
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
(
cd "$git" &&
grep Revision kwdelfile.c &&
git commit -m "Delete a file containing RCS keywords" &&
git config git-p4.skipSubmitEdit true &&
git config git-p4.attemptRCSCleanup true &&
- "$GITP4" submit
+ git p4 submit
) &&
(
cd "$cli" &&
# work fine without any special handling.
test_expect_success 'Add keywords in git which match the default p4 values' '
test_when_finished cleanup_git &&
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
(
cd "$git" &&
echo "NewKW: \$Revision\$" >>kwfile1.c &&
git commit -m "Adding RCS keywords in git" &&
git config git-p4.skipSubmitEdit true &&
git config git-p4.attemptRCSCleanup true &&
- "$GITP4" submit
+ git p4 submit
) &&
(
cd "$cli" &&
#
test_expect_failure 'Add keywords in git which do not match the default p4 values' '
test_when_finished cleanup_git &&
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
(
cd "$git" &&
echo "NewKW2: \$Revision:1\$" >>kwfile1.c &&
git commit -m "Adding RCS keywords in git" &&
git config git-p4.skipSubmitEdit true &&
git config git-p4.attemptRCSCleanup true &&
- "$GITP4" submit
+ git p4 submit
) &&
(
cd "$cli" &&
p4 add -t ktext merge2.c &&
p4 submit -d "add merge test file"
) &&
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
(
cd "$git" &&
sed -e "/Hello/d" merge2.c >merge2.c.tmp &&
test -f merge2.c &&
git config git-p4.skipSubmitEdit true &&
git config git-p4.attemptRCSCleanup true &&
- !(echo "s" | "$GITP4" submit) &&
+ !(echo "s" | git p4 submit) &&
git rebase --skip &&
! test -f merge2.c
)
--- /dev/null
+#include "cache.h"
+#include "mergesort.h"
+
+struct line {
+ char *text;
+ struct line *next;
+};
+
+static void *get_next(const void *a)
+{
+ return ((const struct line *)a)->next;
+}
+
+static void set_next(void *a, void *b)
+{
+ ((struct line *)a)->next = b;
+}
+
+static int compare_strings(const void *a, const void *b)
+{
+ const struct line *x = a, *y = b;
+ return strcmp(x->text, y->text);
+}
+
+int main(int argc, const char **argv)
+{
+ struct line *line, *p = NULL, *lines = NULL;
+ struct strbuf sb = STRBUF_INIT;
+
+ for (;;) {
+ if (strbuf_getwholeline(&sb, stdin, '\n'))
+ break;
+ line = xmalloc(sizeof(struct line));
+ line->text = strbuf_detach(&sb, NULL);
+ if (p) {
+ line->next = p->next;
+ p->next = line;
+ } else {
+ line->next = NULL;
+ lines = line;
+ }
+ p = line;
+ }
+
+ lines = llist_mergesort(lines, get_next, set_next, compare_strings);
+
+ while (lines) {
+ printf("%s", lines->text);
+ lines = lines->next;
+ }
+ return 0;
+}
--- /dev/null
+/*
+ * test-revision-walking.c: test revision walking API.
+ *
+ * (C) 2012 Heiko Voigt <hvoigt@hvoigt.net>
+ *
+ * This code is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License version 2 as
+ * published by the Free Software Foundation.
+ */
+
+#include "cache.h"
+#include "commit.h"
+#include "diff.h"
+#include "revision.h"
+
+static void print_commit(struct commit *commit)
+{
+ struct strbuf sb = STRBUF_INIT;
+ struct pretty_print_context ctx = {0};
+ ctx.date_mode = DATE_NORMAL;
+ format_commit_message(commit, " %m %s", &sb, &ctx);
+ printf("%s\n", sb.buf);
+ strbuf_release(&sb);
+}
+
+static int run_revision_walk(void)
+{
+ struct rev_info rev;
+ struct commit *commit;
+ const char *argv[] = {NULL, "--all", NULL};
+ int argc = ARRAY_SIZE(argv) - 1;
+ int got_revision = 0;
+
+ init_revisions(&rev, NULL);
+ setup_revisions(argc, argv, &rev, NULL);
+ if (prepare_revision_walk(&rev))
+ die("revision walk setup failed");
+
+ while ((commit = get_revision(&rev)) != NULL) {
+ print_commit(commit);
+ got_revision = 1;
+ }
+
+ reset_revision_walk();
+ return got_revision;
+}
+
+int main(int argc, char **argv)
+{
+ if (argc < 2)
+ return 1;
+
+ if (!strcmp(argv[1], "run-twice")) {
+ printf("1st\n");
+ if (!run_revision_walk())
+ return 1;
+ printf("2nd\n");
+ if (!run_revision_walk())
+ return 1;
+
+ return 0;
+ }
+
+ fprintf(stderr, "check usage\n");
+ return 1;
+}
#include "branch.h"
#include "url.h"
#include "submodule.h"
+#include "string-list.h"
/* rsync support */
{
struct ref *ref;
int n = 0;
+ unsigned char head_sha1[20];
+ char *head;
+
+ head = resolve_refdup("HEAD", head_sha1, 1, NULL);
if (verbose) {
for (ref = refs; ref; ref = ref->next)
ref->status != REF_STATUS_UPTODATE &&
ref->status != REF_STATUS_OK)
n += print_one_push_status(ref, dest, n, porcelain);
- if (ref->status == REF_STATUS_REJECT_NONFASTFORWARD)
- *nonfastforward = 1;
+ if (ref->status == REF_STATUS_REJECT_NONFASTFORWARD &&
+ *nonfastforward != NON_FF_HEAD) {
+ if (!strcmp(head, ref->name))
+ *nonfastforward = NON_FF_HEAD;
+ else
+ *nonfastforward = NON_FF_OTHER;
+ }
}
}
transport->progress = verbosity >= 0 && isatty(2);
}
+static void die_with_unpushed_submodules(struct string_list *needs_pushing)
+{
+ int i;
+
+ fprintf(stderr, "The following submodule paths contain changes that can\n"
+ "not be found on any remote:\n");
+ for (i = 0; i < needs_pushing->nr; i++)
+ printf(" %s\n", needs_pushing->items[i].string);
+ fprintf(stderr, "\nPlease try\n\n"
+ " git push --recurse-submodules=on-demand\n\n"
+ "or cd to the path and use\n\n"
+ " git push\n\n"
+ "to push them to a remote.\n\n");
+
+ string_list_clear(needs_pushing, 0);
+
+ die("Aborting.");
+}
+
int transport_push(struct transport *transport,
int refspec_nr, const char **refspec, int flags,
int *nonfastforward)
flags & TRANSPORT_PUSH_MIRROR,
flags & TRANSPORT_PUSH_FORCE);
- if ((flags & TRANSPORT_RECURSE_SUBMODULES_CHECK) && !is_bare_repository()) {
+ if ((flags & TRANSPORT_RECURSE_SUBMODULES_ON_DEMAND) && !is_bare_repository()) {
struct ref *ref = remote_refs;
for (; ref; ref = ref->next)
if (!is_null_sha1(ref->new_sha1) &&
- check_submodule_needs_pushing(ref->new_sha1,transport->remote->name))
- die("There are unpushed submodules, aborting.");
+ !push_unpushed_submodules(ref->new_sha1,
+ transport->remote->name))
+ die ("Failed to push all needed submodules!");
+ }
+
+ if ((flags & (TRANSPORT_RECURSE_SUBMODULES_ON_DEMAND |
+ TRANSPORT_RECURSE_SUBMODULES_CHECK)) && !is_bare_repository()) {
+ struct ref *ref = remote_refs;
+ struct string_list needs_pushing;
+
+ memset(&needs_pushing, 0, sizeof(struct string_list));
+ needs_pushing.strdup_strings = 1;
+ for (; ref; ref = ref->next)
+ if (!is_null_sha1(ref->new_sha1) &&
+ find_unpushed_submodules(ref->new_sha1,
+ transport->remote->name, &needs_pushing))
+ die_with_unpushed_submodules(&needs_pushing);
}
push_ret = transport->push_refs(transport, remote_refs, flags);
#define TRANSPORT_PUSH_SET_UPSTREAM 32
#define TRANSPORT_RECURSE_SUBMODULES_CHECK 64
#define TRANSPORT_PUSH_PRUNE 128
+#define TRANSPORT_RECURSE_SUBMODULES_ON_DEMAND 256
#define TRANSPORT_SUMMARY_WIDTH (2 * DEFAULT_ABBREV + 3)
void transport_set_verbosity(struct transport *transport, int verbosity,
int force_progress);
+#define NON_FF_HEAD 1
+#define NON_FF_OTHER 2
int transport_push(struct transport *connection,
int refspec_nr, const char **refspec, int flags,
int * nonfastforward);
opts->unpack_rejects[i].strdup_strings = 1;
}
-static void add_entry(struct unpack_trees_options *o, struct cache_entry *ce,
- unsigned int set, unsigned int clear)
+static void do_add_entry(struct unpack_trees_options *o, struct cache_entry *ce,
+ unsigned int set, unsigned int clear)
{
- unsigned int size = ce_size(ce);
- struct cache_entry *new = xmalloc(size);
-
clear |= CE_HASHED | CE_UNHASHED;
if (set & CE_REMOVE)
set |= CE_WT_REMOVE;
+ ce->next = NULL;
+ ce->ce_flags = (ce->ce_flags & ~clear) | set;
+ add_index_entry(&o->result, ce,
+ ADD_CACHE_OK_TO_ADD | ADD_CACHE_OK_TO_REPLACE);
+}
+
+static void add_entry(struct unpack_trees_options *o, struct cache_entry *ce,
+ unsigned int set, unsigned int clear)
+{
+ unsigned int size = ce_size(ce);
+ struct cache_entry *new = xmalloc(size);
+
memcpy(new, ce, size);
- new->next = NULL;
- new->ce_flags = (new->ce_flags & ~clear) | set;
- add_index_entry(&o->result, new, ADD_CACHE_OK_TO_ADD|ADD_CACHE_OK_TO_REPLACE);
+ do_add_entry(o, new, set, clear);
}
/*
for (i = 0; i < n; i++)
if (src[i] && src[i] != o->df_conflict_entry)
- add_entry(o, src[i], 0, 0);
+ do_add_entry(o, src[i], 0, 0);
return 0;
}
if (unpack_nondirectories(n, mask, dirmask, src, names, info) < 0)
return -1;
- if (src[0]) {
+ if (o->merge && src[0]) {
if (ce_stage(src[0]))
mark_ce_used_same_name(src[0], o);
else
static void (*try_to_free_routine)(size_t size) = do_nothing;
+static void memory_limit_check(size_t size)
+{
+ static int limit = -1;
+ if (limit == -1) {
+ const char *env = getenv("GIT_ALLOC_LIMIT");
+ limit = env ? atoi(env) * 1024 : 0;
+ }
+ if (limit && size > limit)
+ die("attempting to allocate %"PRIuMAX" over limit %d",
+ (intmax_t)size, limit);
+}
+
try_to_free_t set_try_to_free_routine(try_to_free_t routine)
{
try_to_free_t old = try_to_free_routine;
void *xmalloc(size_t size)
{
- void *ret = malloc(size);
+ void *ret;
+
+ memory_limit_check(size);
+ ret = malloc(size);
if (!ret && !size)
ret = malloc(1);
if (!ret) {
void *xrealloc(void *ptr, size_t size)
{
- void *ret = realloc(ptr, size);
+ void *ret;
+
+ memory_limit_check(size);
+ ret = realloc(ptr, size);
if (!ret && !size)
ret = realloc(ptr, 1);
if (!ret) {
void *xcalloc(size_t nmemb, size_t size)
{
- void *ret = calloc(nmemb, size);
+ void *ret;
+
+ memory_limit_check(size * nmemb);
+ ret = calloc(nmemb, size);
if (!ret && (!nmemb || !size))
ret = calloc(1, 1);
if (!ret) {
#define XDF_IGNORE_WHITESPACE (1 << 2)
#define XDF_IGNORE_WHITESPACE_CHANGE (1 << 3)
#define XDF_IGNORE_WHITESPACE_AT_EOL (1 << 4)
-#define XDF_PATIENCE_DIFF (1 << 5)
-#define XDF_HISTOGRAM_DIFF (1 << 6)
#define XDF_WHITESPACE_FLAGS (XDF_IGNORE_WHITESPACE | XDF_IGNORE_WHITESPACE_CHANGE | XDF_IGNORE_WHITESPACE_AT_EOL)
-#define XDL_PATCH_NORMAL '-'
-#define XDL_PATCH_REVERSE '+'
-#define XDL_PATCH_MODEMASK ((1 << 8) - 1)
-#define XDL_PATCH_IGNOREBSPACE (1 << 8)
+#define XDF_PATIENCE_DIFF (1 << 5)
+#define XDF_HISTOGRAM_DIFF (1 << 6)
+#define XDF_DIFF_ALGORITHM_MASK (XDF_PATIENCE_DIFF | XDF_HISTOGRAM_DIFF)
+#define XDF_DIFF_ALG(x) ((x) & XDF_DIFF_ALGORITHM_MASK)
#define XDL_EMIT_FUNCNAMES (1 << 0)
#define XDL_EMIT_COMMON (1 << 1)
xdalgoenv_t xenv;
diffdata_t dd1, dd2;
- if (xpp->flags & XDF_PATIENCE_DIFF)
+ if (XDF_DIFF_ALG(xpp->flags) == XDF_PATIENCE_DIFF)
return xdl_do_patience_diff(mf1, mf2, xpp, xe);
- if (xpp->flags & XDF_HISTOGRAM_DIFF)
+ if (XDF_DIFF_ALG(xpp->flags) == XDF_HISTOGRAM_DIFF)
return xdl_do_histogram_diff(mf1, mf2, xpp, xe);
if (xdl_prepare_env(mf1, mf2, xpp, xe) < 0) {
int line1, int count1, int line2, int count2)
{
xpparam_t xpp;
- xpp.flags = index->xpp->flags & ~XDF_HISTOGRAM_DIFF;
+ xpp.flags = index->xpp->flags & ~XDF_DIFF_ALGORITHM_MASK;
return xdl_fall_back_diff(index->env, &xpp,
line1, count1, line2, count2);
int line1, int count1, int line2, int count2)
{
xpparam_t xpp;
- xpp.flags = map->xpp->flags & ~XDF_PATIENCE_DIFF;
+ xpp.flags = map->xpp->flags & ~XDF_DIFF_ALGORITHM_MASK;
return xdl_fall_back_diff(map->env, &xpp,
line1, count1, line2, count2);
if (!(recs = (xrecord_t **) xdl_malloc(narec * sizeof(xrecord_t *))))
goto abort;
- if (xpp->flags & XDF_HISTOGRAM_DIFF)
+ if (XDF_DIFF_ALG(xpp->flags) == XDF_HISTOGRAM_DIFF)
hbits = hsize = 0;
else {
hbits = xdl_hashbits((unsigned int) narec);
crec->ha = hav;
recs[nrec++] = crec;
- if (!(xpp->flags & XDF_HISTOGRAM_DIFF) &&
- xdl_classify_record(pass, cf, rhash, hbits, crec) < 0)
+ if ((XDF_DIFF_ALG(xpp->flags) != XDF_HISTOGRAM_DIFF) &&
+ xdl_classify_record(pass, cf, rhash, hbits, crec) < 0)
goto abort;
}
}
* (nrecs) will be updated correctly anyway by
* xdl_prepare_ctx().
*/
- sample = xpp->flags & XDF_HISTOGRAM_DIFF ? XDL_GUESS_NLINES2 : XDL_GUESS_NLINES1;
+ sample = (XDF_DIFF_ALG(xpp->flags) == XDF_HISTOGRAM_DIFF
+ ? XDL_GUESS_NLINES2 : XDL_GUESS_NLINES1);
enl1 = xdl_guess_lines(mf1, sample) + 1;
enl2 = xdl_guess_lines(mf2, sample) + 1;
- if (!(xpp->flags & XDF_HISTOGRAM_DIFF) &&
- xdl_init_classifier(&cf, enl1 + enl2 + 1, xpp->flags) < 0) {
-
+ if (XDF_DIFF_ALG(xpp->flags) != XDF_HISTOGRAM_DIFF &&
+ xdl_init_classifier(&cf, enl1 + enl2 + 1, xpp->flags) < 0)
return -1;
- }
if (xdl_prepare_ctx(1, mf1, enl1, xpp, &cf, &xe->xdf1) < 0) {
return -1;
}
- if (!(xpp->flags & XDF_PATIENCE_DIFF) &&
- !(xpp->flags & XDF_HISTOGRAM_DIFF) &&
- xdl_optimize_ctxs(&cf, &xe->xdf1, &xe->xdf2) < 0) {
+ if ((XDF_DIFF_ALG(xpp->flags) != XDF_PATIENCE_DIFF) &&
+ (XDF_DIFF_ALG(xpp->flags) != XDF_HISTOGRAM_DIFF) &&
+ xdl_optimize_ctxs(&cf, &xe->xdf1, &xe->xdf2) < 0) {
xdl_free_ctx(&xe->xdf2);
xdl_free_ctx(&xe->xdf1);