/git-cherry-pick
/git-clean
/git-clone
+/git-column
/git-commit
/git-commit-tree
/git-config
/git-name-rev
/git-mv
/git-notes
+/git-p4
/git-pack-redundant
/git-pack-objects
/git-pack-refs
/test-index-version
/test-line-buffer
/test-match-trees
+/test-mergesort
/test-mktemp
/test-parse-options
/test-path-utils
+/test-revision-walking
/test-run-command
/test-sha1
/test-sigchain
#
ifndef ASCIIDOC7
-ASCIIDOC_EXTRA += -a asciidoc7compatible -a no-inline-literal
+ASCIIDOC_EXTRA += -a asciidoc7compatible
endif
ifdef DOCBOOK_XSL_172
ASCIIDOC_EXTRA += -a git-asciidoc-no-roff
# Shell quote;
SHELL_PATH_SQ = $(subst ','\'',$(SHELL_PATH))
+ifdef DEFAULT_PAGER
+DEFAULT_PAGER_SQ = $(subst ','\'',$(DEFAULT_PAGER))
+ASCIIDOC_EXTRA += -a 'git-default-pager=$(DEFAULT_PAGER_SQ)'
+endif
+
+ifdef DEFAULT_EDITOR
+DEFAULT_EDITOR_SQ = $(subst ','\'',$(DEFAULT_EDITOR))
+ASCIIDOC_EXTRA += -a 'git-default-editor=$(DEFAULT_EDITOR_SQ)'
+endif
+
#
# Please note that there is a minor bug in asciidoc.
# The version after 6.0.3 _will_ include the patch found here:
--- /dev/null
+Git v1.7.11 Release Notes
+=========================
+
+Updates since v1.7.10
+---------------------
+
+UI, Workflows & Features
+
+ * A new mode for push, "simple", which is a cross between "current"
+ and "upstream", has been introduced. "git push" without any refspec
+ will push the current branch out to the same name at the remote
+ repository only when it is set to track the branch with the same
+ name over there. The plan is to make this mode the new default
+ value when push.default is not configured.
+
+ * A third-party tool "git subtree" is distributed in contrib/
+
+ * Error messages given when @{u} is used for a branch without its
+ upstream configured have been clatified.
+
+ * Even with "-q"uiet option, "checkout" used to report setting up
+ tracking. Also "branch" learned the "-q"uiet option to squelch
+ informational message.
+
+ * The smart-http backend used to always override GIT_COMMITTER_*
+ variables with REMOTE_USER and REMOTE_ADDR, but these variables are
+ now preserved when set.
+
+ * "include.path" mechanism of the configuration files learned to
+ understand "~/path" and "~user/path".
+
+ * "git am" learned the "--include" option, which is an opposite of
+ existing the "--exclude" option.
+
+ * When "git am -3" needs to fall back to an application to a
+ synthesized preimage followed by a 3-way merge, the paths that
+ needed such treatment are now reported to the end user, so that the
+ result in them can be eyeballed with extra care.
+
+ * The output from "diff/log --stat" used to always allocate 4 columns
+ to show the number of modified lines, but not anymore.
+
+ * The "fmt-merge-msg" command learns to list the primary contributors
+ involved in the side topic you are merging.
+
+ * The cases "git push" fails due to non-ff can be broken into three
+ categories; each case is given a separate advise message.
+
+ * "git rebase" learned to optionally keep commits that do not
+ introduce any change in the original history.
+
+ * "git push --recurse-submodules" learned to optionally look into the
+ histories of submodules bound to the superproject and push them
+ out.
+
+ * A 'snapshot' request to "gitweb" honors If-Modified-Since: header,
+ based on the commit date.
+
+ * "gitweb" learned to highlight the patch it outputs even more.
+
+Foreign Interface
+
+ * "git svn" used to die with unwanted SIGPIPE when talking with HTTP
+ server that uses keep-alive.
+
+ * "git svn" learned to use platform specific authentication
+ providers, e.g. gnome-keyring, kwallet, etc.
+
+ * "git p4" has been moved out of contrib/ area and has seen more work
+ on importing labels as tags from (and exporting tags as labels to)
+ p4.
+
+Performance and Internal Implementation (please report possible regressions)
+
+ * An experimental "version 4" format of the index file has been
+ introduced to reduce on-disk footprint and I/O overhead.
+
+ * The code to compute hash values for lines used by the internal diff
+ engine was optimized on little-endian machines, using the same
+ trick the kernel folks came up with.
+
+ * "git apply" had some memory leaks plugged.
+
+ * "git repack" used to write out unreachable objects as loose objects
+ when repacking, even if such loose objects will immediately pruned
+ due to its age.
+
+ * Setting up a revision traversal with many starting points was
+ inefficient as these were placed in a date-order priority queue
+ one-by-one. Now they are collected in the queue unordered first,
+ and sorted immediately before getting used.
+
+ * "git rev-parse --show-prefix" used to emit nothing when run at the
+ top-level of the working tree, but now it gives a blank line.
+
+ * Minor memory leak during unpack_trees (hence "merge" and "checkout"
+ to check out another branch) has been plugged.
+
+ * More lower-level commands learned to use the streaming API to read
+ from the object store without keeping everything in core.
+
+ * Because "sh" on the user's PATH may be utterly broken on some
+ systems, run-command API now uses SHELL_PATH, not /bin/sh, when
+ spawning an external command (not applicable to Windows port).
+
+ * The API to iterate over refs/ hierarchy has been tweaked to allow
+ walking only a subset of it more efficiently.
+
+Also contains minor documentation updates and code clean-ups.
+
+
+Fixes since v1.7.10
+-------------------
+
+Unless otherwise noted, all the fixes since v1.7.10 in the maintenance
+releases are contained in this release (see release notes to them for
+details).
+
+ * When using a Perl script on a system where "perl" found on user's
+ $PATH could be ancient or otherwise broken, we allow builders to
+ specify the path to a good copy of Perl with $PERL_PATH. The
+ gitweb test forgot to use that Perl when running its test.
+ (merge 0754e08 jk/maint-gitweb-test-use-sane-perl later to maint).
+
+ * A contrib script "rerere-train" did not work out of the box unless
+ user futzed with her $PATH.
+ (merge 53876fc jc/rerere-train later to maint).
+
+ * "log --graph" was not very friendly with "--stat" option and its
+ output had line breaks at wrong places.
+ (merge bafa16e lp/diffstat-with-graph later to maint).
+
+ * "git config --rename-section" to rename an existing section into a
+ bogus one did not check the new name.
+ (merge 94a35b1 jk/maint-config-bogus-section later to maint).
+
+ * The test scaffolding for git-daemon was flaky.
+ (merge 46e3581 js/daemon-test-race-fix later to maint).
+
+ * The test scaffolding for fast-import was flaky.
+ (merge 7fb8e16 pw/t5800-import-race-fix later to maint).
+
+ * Octopus merge strategy did not reduce heads that are recorded in the
+ final commit correctly.
+ (merge 5802f81 jc/merge-reduce-parents-early later to maint).
+
+ * In the older days, the header "Conflicts:" in "cherry-pick" and
+ "merge" was separated by a blank line from the list of paths that
+ follow for readability, but when "merge" was rewritten in C, we lost
+ it by mistake. Remove the newline from "cherry-pick" to make them
+ match again.
+ (merge 5112068 rt/cherry-revert-conflict-summary later to maint).
+
+ * The filesystem boundary was not correctly reported when .git
+ directory discovery stopped at a mount point.
+ (merge 2565b43 cb/maint-report-mount-point-correctly-in-setup later to maint).
+
+ * The command line parser choked "git cherry-pick $name" when $name
+ can be both revision name and a pathname, even though $name can
+ never be a path in the context of the command.
+ (merge 6d5b93f cb/cherry-pick-rev-path-confusion later to maint).
+
+ * HTTP transport that requires authentication did not work correctly
+ when multiple connections are used simultaneously.
+ (merge 6f4c347 cb/http-multi-curl-auth later to maint).
+
+ * The i18n of error message "git stash save" was not properly done.
+ (merge ed3c400 rl/maint-stash-i18n-save-error later to maint).
+
+ * The report from "git fetch" said "new branch" even for a non branch
+ ref.
+ (merge 0997ada mb/fetch-call-a-non-branch-a-ref later to maint).
+
+ * The "diff --no-index" codepath used limited-length buffers, risking
+ pathnames getting truncated. Update it to use the strbuf API.
+ (merge 875b91b jm/maint-strncpy-diff-no-index later to maint).
found at the location of the include directive. If the value of the
`include.path` variable is a relative path, the path is considered to be
relative to the configuration file in which the include directive was
-found. See below for examples.
+found. The value of `include.path` is subject to tilde expansion: `~/`
+is expanded to the value of `$HOME`, and `~user/` to the specified
+user's home directory. See below for examples.
Example
~~~~~~~
[include]
path = /path/to/foo.inc ; include by absolute path
path = foo ; expand "foo" relative to the current file
+ path = ~/foo ; expand "foo" in your $HOME directory
Variables
~~~~~~~~~
+
--
pushNonFastForward::
- Advice shown when linkgit:git-push[1] refuses
- non-fast-forward refs.
+ Set this variable to 'false' if you want to disable
+ 'pushNonFFCurrent', 'pushNonFFDefault', and
+ 'pushNonFFMatching' simultaneously.
+ pushNonFFCurrent::
+ Advice shown when linkgit:git-push[1] fails due to a
+ non-fast-forward update to the current branch.
+ pushNonFFDefault::
+ Advice to set 'push.default' to 'upstream' or 'current'
+ when you ran linkgit:git-push[1] and pushed 'matching
+ refs' by default (i.e. you did not provide an explicit
+ refspec, and no 'push.default' configuration was set)
+ and it resulted in a non-fast-forward error.
+ pushNonFFMatching::
+ Advice shown when you ran linkgit:git-push[1] and pushed
+ 'matching refs' explicitly (i.e. you used ':', or
+ specified a refspec that isn't your current branch) and
+ it resulted in a non-fast-forward error.
statusHints::
Directions on how to stage/unstage/add shown in the
output of linkgit:git-status[1] and the template shown
core.excludesfile::
In addition to '.gitignore' (per-directory) and
'.git/info/exclude', git looks into this file for patterns
- of files which are not meant to be tracked. "{tilde}/" is expanded
- to the value of `$HOME` and "{tilde}user/" to the specified user's
+ of files which are not meant to be tracked. "`~/`" is expanded
+ to the value of `$HOME` and "`~user/`" to the specified user's
home directory. See linkgit:gitignore[5].
core.askpass::
`never` if you prefer git commands not to use color unless enabled
explicitly with some other configuration or the `--color` option.
+column.ui::
+ Specify whether supported commands should output in columns.
+ This variable consists of a list of tokens separated by spaces
+ or commas:
++
+--
+`always`;;
+ always show in columns
+`never`;;
+ never show in columns
+`auto`;;
+ show in columns if the output is to the terminal
+`column`;;
+ fill columns before rows (default)
+`row`;;
+ fill rows before columns
+`plain`;;
+ show in one column
+`dense`;;
+ make unequal size columns to utilize more space
+`nodense`;;
+ make equal size columns
+--
++
+ This option defaults to 'never'.
+
+column.branch::
+ Specify whether to output branch listing in `git branch` in columns.
+ See `column.ui` for details.
+
+column.status::
+ Specify whether to output untracked files in `git status` in columns.
+ See `column.ui` for details.
+
+column.tag::
+ Specify whether to output tag listing in `git tag` in columns.
+ See `column.ui` for details.
+
commit.status::
A boolean to enable/disable inclusion of status information in the
commit message template when using an editor to prepare the commit
commit.template::
Specify a file to use as the template for new commit messages.
- "{tilde}/" is expanded to the value of `$HOME` and "{tilde}user/" to the
+ "`~/`" is expanded to the value of `$HOME` and "`~user/`" to the
specified user's home directory.
credential.helper::
a boolean value, or `shallow` or `deep`. `shallow` threading
makes every mail a reply to the head of the series,
where the head is chosen from the cover letter, the
- `\--in-reply-to`, and the first patch mail, in this order.
+ `--in-reply-to`, and the first patch mail, in this order.
`deep` threading makes every mail a reply to the previous one.
A true boolean value is the same as `shallow`, and a false
value disables threading.
interactive.singlekey::
In interactive commands, allow the user to provide one-letter
input with a single key (i.e., without hitting enter).
- Currently this is used by the `\--patch` mode of
+ Currently this is used by the `--patch` mode of
linkgit:git-add[1], linkgit:git-checkout[1], linkgit:git-commit[1],
linkgit:git-reset[1], and linkgit:git-stash[1]. Note that this
setting is silently ignored if portable keystroke input
log.abbrevCommit::
If true, makes linkgit:git-log[1], linkgit:git-show[1], and
- linkgit:git-whatchanged[1] assume `\--abbrev-commit`. You may
- override this option with `\--no-abbrev-commit`.
+ linkgit:git-whatchanged[1] assume `--abbrev-commit`. You may
+ override this option with `--no-abbrev-commit`.
log.date::
Set the default date-time mode for the 'log' command.
Setting a value for log.date is similar to using 'git log''s
- `\--date` option. Possible values are `relative`, `local`,
+ `--date` option. Possible values are `relative`, `local`,
`default`, `iso`, `rfc`, and `short`; see linkgit:git-log[1]
for details.
and this config option ignored whenever the corresponding pack is
larger than 2 GB.
+
-If you have an old git that does not understand the version 2 `{asterisk}.idx` file,
+If you have an old git that does not understand the version 2 `*.idx` file,
cloning or fetching over a non native protocol (e.g. "http" and "rsync")
-that will copy both `{asterisk}.pack` file and corresponding `{asterisk}.idx` file from the
+that will copy both `*.pack` file and corresponding `*.idx` file from the
other side may give you a repository that cannot be accessed with your
-older version of git. If the `{asterisk}.pack` file is smaller than 2 GB, however,
+older version of git. If the `*.pack` file is smaller than 2 GB, however,
you can use linkgit:git-index-pack[1] on the *.pack file to regenerate
-the `{asterisk}.idx` file.
+the `*.idx` file.
pack.packSizeLimit::
The maximum size of a pack. This setting only affects
packing to a file when repacking, i.e. the git:// protocol
- is unaffected. It can be overridden by the `\--max-pack-size`
+ is unaffected. It can be overridden by the `--max-pack-size`
option of linkgit:git-repack[1]. The minimum size allowed is
limited to 1 MiB. The default is unlimited.
Common unit suffixes of 'k', 'm', or 'g' are
If the value is boolean, turns on or off pagination of the
output of a particular git subcommand when writing to a tty.
Otherwise, turns on pagination for the subcommand using the
- pager specified by the value of `pager.<cmd>`. If `\--paginate`
- or `\--no-pager` is specified on the command line, it takes
+ pager specified by the value of `pager.<cmd>`. If `--paginate`
+ or `--no-pager` is specified on the command line, it takes
precedence over this option. To disable pagination for all
commands, set `core.pager` or `GIT_PAGER` to `cat`.
Alias for a --pretty= format string, as specified in
linkgit:git-log[1]. Any aliases defined here can be used just
as the built-in pretty formats could. For example,
- running `git config pretty.changelog "format:{asterisk} %H %s"`
+ running `git config pretty.changelog "format:* %H %s"`
would cause the invocation `git log --pretty=changelog`
- to be equivalent to running `git log "--pretty=format:{asterisk} %H %s"`.
+ to be equivalent to running `git log "--pretty=format:* %H %s"`.
Note that an alias with the same name as a built-in format
will be silently ignored.
line. Possible values are:
+
* `nothing` - do not push anything.
-* `matching` - push all matching branches.
- All branches having the same name in both ends are considered to be
- matching. This is the default.
+* `matching` - push all branches having the same name in both ends.
+ This is for those who prepare all the branches into a publishable
+ shape and then push them out with a single command. It is not
+ appropriate for pushing into a repository shared by multiple users,
+ since locally stalled branches will attempt a non-fast forward push
+ if other users updated the branch.
+ +
+ This is currently the default, but Git 2.0 will change the default
+ to `simple`.
* `upstream` - push the current branch to its upstream branch.
-* `tracking` - deprecated synonym for `upstream`.
+ With this, `git push` will update the same remote ref as the one which
+ is merged by `git pull`, making `push` and `pull` symmetrical.
+ See "branch.<name>.merge" for how to configure the upstream branch.
+* `simple` - like `upstream`, but refuses to push if the upstream
+ branch's name is different from the local one. This is the safest
+ option and is well-suited for beginners. It will become the default
+ in Git 2.0.
* `current` - push the current branch to a branch of the same name.
+ +
+ The `simple`, `current` and `upstream` modes are for those who want to
+ push out a single branch after finishing work, even when the other
+ branches are not yet ready to be pushed out. If you are working with
+ other people to push into the same shared repository, you would want
+ to use one of these.
rebase.stat::
Whether to show a diffstat of what changed upstream since the last
remote.<name>.mirror::
If true, pushing to this remote will automatically behave
- as if the `\--mirror` option was given on the command line.
+ as if the `--mirror` option was given on the command line.
remote.<name>.skipDefaultUpdate::
If true, this remote will be skipped by default when updating
from both files (hence two `-` removals from both file1 and
file2, plus `++` to mean one line that was added does not appear
in either file1 nor file2). Also eight other lines are the same
-from file1 but do not appear in file2 (hence prefixed with `{plus}`).
+from file1 but do not appear in file2 (hence prefixed with `+`).
When shown by `git diff-tree -c`, it compares the parents of a
merge commit with the merge result (i.e. file1..fileN are the
`--stat-name-width=<name-width>` and `--stat-count=<count>`.
--numstat::
- Similar to `\--stat`, but shows number of added and
+ Similar to `--stat`, but shows number of added and
deleted lines in decimal notation and pathname without
abbreviation, to make it more machine friendly. For
binary files, outputs two `-` instead of saying
<9> switch to the master branch.
<10> merge a topic branch into your master branch.
<11> review commit logs; other forms to limit output can be
-combined and include `\--max-count=10` (show 10 commits),
-`\--until=2005-12-10`, etc.
+combined and include `--max-count=10` (show 10 commits),
+`--until=2005-12-10`, etc.
<12> view only the changes that touch what's in `curses/`
directory, since `v2.43` tag.
[--3way] [--interactive] [--committer-date-is-author-date]
[--ignore-date] [--ignore-space-change | --ignore-whitespace]
[--whitespace=<option>] [-C<n>] [-p<n>] [--directory=<dir>]
- [--exclude=<path>] [--reject] [-q | --quiet]
+ [--exclude=<path>] [--include=<path>] [--reject] [-q | --quiet]
[--scissors | --no-scissors]
[(<mbox> | <Maildir>)...]
'git am' (--continue | --skip | --abort)
-p<n>::
--directory=<dir>::
--exclude=<path>::
+--include=<path>::
--reject::
These flags are passed to the 'git apply' (see linkgit:git-apply[1])
program that applies
Same as above, but the format is inferred from the output file.
-`git archive --format=tar --prefix=git-1.4.0/ v1.4.0{caret}\{tree\} | gzip >git-1.4.0.tar.gz`::
+`git archive --format=tar --prefix=git-1.4.0/ v1.4.0^{tree} | gzip >git-1.4.0.tar.gz`::
Create a compressed tarball for v1.4.0 release, but without a
global extended pax header.
git log --diff-filter=A --pretty=short -- foo
and then annotate the change between the commit and its
-parents, using `commit{caret}!` notation:
+parents, using `commit^!` notation:
git blame -C -C -f $commit^! -- foo
[verse]
'git branch' [--color[=<when>] | --no-color] [-r | -a]
[--list] [-v [--abbrev=<length> | --no-abbrev]]
+ [--column[=<options>] | --no-column]
[(--merged | --no-merged | --contains) [<commit>]] [<pattern>...]
'git branch' [--set-upstream | --track | --no-track] [-l] [-f] <branchname> [<start-point>]
'git branch' (-m | -M) [<oldbranch>] <newbranch>
default to color output.
Same as `--color=never`.
+--column[=<options>]::
+--no-column::
+ Display branch listing in columns. See configuration variable
+ column.branch for option syntax.`--column` and `--no-column`
+ without options are equivalent to 'always' and 'never' respectively.
++
+This option is only applicable in non-verbose mode.
+
-r::
--remotes::
List or delete (if used with -d) the remote-tracking branches.
relationship to upstream branch (if any). If given twice, print
the name of the upstream branch, as well.
+-q::
+--quiet::
+ Be more quiet when creating or deleting a branch, suppressing
+ non-error messages.
+
--abbrev=<length>::
Alter the sha1's minimum display length in the output listing.
The default value is 7 and can be overridden by the `core.abbrev`
A list of arguments, acceptable to 'git rev-parse' and
'git rev-list' (and containing a named ref, see SPECIFYING REFERENCES
below), that specifies the specific objects and references
- to transport. For example, `master{tilde}10..master` causes the
+ to transport. For example, `master~10..master` causes the
current master reference to be packaged along with all objects
added since its 10th ancestor commit. There is no explicit
limit to the number of references and objects that may be
'git bundle' will only package references that are shown by
'git show-ref': this includes heads, tags, and remote heads. References
-such as `master{tilde}1` cannot be packaged, but are perfectly suitable for
+such as `master~1` cannot be packaged, but are perfectly suitable for
defining the basis. More than one reference may be packaged, and more
than one basis can be specified. The objects packaged are those not
contained in the union of the given bases. Each basis can be
-specified explicitly (e.g. `^master{tilde}10`), or implicitly (e.g.
-`master{tilde}10..master`, `--since=10.days.ago master`).
+specified explicitly (e.g. `^master~10`), or implicitly (e.g.
+`master~10..master`, `--since=10.days.ago master`).
It is very important that the basis used be held by the destination.
It is okay to err on the side of caution, causing the bundle file
. They cannot have ASCII control characters (i.e. bytes whose
values are lower than \040, or \177 `DEL`), space, tilde `~`,
- caret `{caret}`, or colon `:` anywhere.
+ caret `^`, or colon `:` anywhere.
-. They cannot have question-mark `?`, asterisk `{asterisk}`, or open
+. They cannot have question-mark `?`, asterisk `*`, or open
bracket `[` anywhere. See the `--refspec-pattern` option below for
an exception to this rule.
reference name expressions (see linkgit:gitrevisions[7]):
. A double-dot `..` is often used as in `ref1..ref2`, and in some
- contexts this notation means `{caret}ref1 ref2` (i.e. not in
+ contexts this notation means `^ref1 ref2` (i.e. not in
`ref1` and in `ref2`).
-. A tilde `~` and caret `{caret}` are used to introduce the postfix
+. A tilde `~` and caret `^` are used to introduce the postfix
'nth parent' and 'peel onion' operation.
. A colon `:` is used as in `srcref:dstref` to mean "use srcref\'s
--refspec-pattern::
Interpret <refname> as a reference name pattern for a refspec
(as used with remote repositories). If this option is
- enabled, <refname> is allowed to contain a single `{asterisk}`
+ enabled, <refname> is allowed to contain a single `*`
in place of a one full pathname component (e.g.,
- `foo/{asterisk}/bar` but not `foo/bar{asterisk}`).
+ `foo/*/bar` but not `foo/bar*`).
--normalize::
Normalize 'refname' by removing any leading slash (`/`)
+
This means that you can use `git checkout -p` to selectively discard
edits from your current working tree. See the ``Interactive Mode''
-section of linkgit:git-add[1] to learn how to operate the `\--patch` mode.
+section of linkgit:git-add[1] to learn how to operate the `--patch` mode.
<branch>::
Branch to checkout; if it refers to a branch (i.e., a name that,
commit, your HEAD becomes "detached" and you are no longer on
any branch (see below for details).
+
-As a special case, the `"@\{-N\}"` syntax for the N-th last branch
+As a special case, the `"@{-N}"` syntax for the N-th last branch
checks out the branch (instead of detaching). You may also specify
-`-` which is synonymous with `"@\{-1\}"`.
+`-` which is synonymous with `"@{-1}"`.
+
-As a further special case, you may use `"A\...B"` as a shortcut for the
+As a further special case, you may use `"A...B"` as a shortcut for the
merge base of `A` and `B` if there is exactly one merge base. You can
leave out at most one of `A` and `B`, in which case it defaults to `HEAD`.
cherry-pick'ed commit, then a fast forward to this commit will
be performed.
+--allow-empty::
+ By default, cherry-picking an empty commit will fail,
+ indicating that an explicit invocation of `git commit
+ --allow-empty` is required. This option overrides that
+ behavior, allowing empty commits to be preserved automatically
+ in a cherry-pick. Note that when "--ff" is in effect, empty
+ commits that meet the "fast-forward" requirement will be kept
+ even without this option. Note also, that use of this option only
+ keeps commits that were initially empty (i.e. the commit recorded the
+ same tree as its parent). Commits which are made empty due to a
+ previous commit are dropped. To force the inclusion of those commits
+ use `--keep-redundant-commits`.
+
+--keep-redundant-commits::
+ If a commit being cherry picked duplicates a commit already in the
+ current history, it will become empty. By default these
+ redundant commits are ignored. This option overrides that behavior and
+ creates an empty commit object. Implies `--allow-empty`.
+
--strategy=<strategy>::
Use the given merge strategy. Should only be used once.
See the MERGE STRATEGIES section in linkgit:git-merge[1]
Apply the changes introduced by all commits that are ancestors
of master but not of HEAD to produce new commits.
-`git cherry-pick master{tilde}4 master{tilde}2`::
+`git cherry-pick master~4 master~2`::
Apply the changes introduced by the fifth and third last
commits pointed to by master and create 2 new commits with
are in next but not HEAD to the current branch, creating a new
commit for each new change.
-`git rev-list --reverse master \-- README | git cherry-pick -n --stdin`::
+`git rev-list --reverse master -- README | git cherry-pick -n --stdin`::
Apply the changes introduced by all commits on the master
branch that touched README to the working tree and index,
--- /dev/null
+git-column(1)
+=============
+
+NAME
+----
+git-column - Display data in columns
+
+SYNOPSIS
+--------
+[verse]
+'git column' [--command=<name>] [--[raw-]mode=<mode>] [--width=<width>]
+ [--indent=<string>] [--nl=<string>] [--pading=<n>]
+
+DESCRIPTION
+-----------
+This command formats its input into multiple columns.
+
+OPTIONS
+-------
+--command=<name>::
+ Look up layout mode using configuration variable column.<name> and
+ column.ui.
+
+--mode=<mode>::
+ Specify layout mode. See configuration variable column.ui for option
+ syntax.
+
+--raw-mode=<n>::
+ Same as --mode but take mode encoded as a number. This is mainly used
+ by other commands that have already parsed layout mode.
+
+--width=<width>::
+ Specify the terminal width. By default 'git column' will detect the
+ terminal width, or fall back to 80 if it is unable to do so.
+
+--indent=<string>::
+ String to be printed at the beginning of each line.
+
+--nl=<N>::
+ String to be printed at the end of each line,
+ including newline character.
+
+--padding=<N>::
+ The number of spaces between columns. One space by default.
+
+
+Author
+------
+Written by Nguyen Thai Ngoc Duy <pclouds@gmail.com>
+
+GIT
+---
+Part of the linkgit:git[1] suite
5. by using the --interactive or --patch switches with the 'commit' command
to decide one by one which files or hunks should be part of the commit,
- before finalizing the operation. See the ``Interactive Mode`` section of
+ before finalizing the operation. See the ``Interactive Mode'' section of
linkgit:git-add[1] to learn how to operate these modes.
The `--dry-run` option can be used to obtain a
your working tree are temporarily stored to a staging area
called the "index" with 'git add'. A file can be
reverted back, only in the index but not in the working tree,
-to that of the last commit with `git reset HEAD \-- <file>`,
+to that of the last commit with `git reset HEAD -- <file>`,
which effectively reverts 'git add' and prevents the changes to
this file from participating in the next commit. After building
the state to be committed incrementally with these commands,
'git-cvsserver' uses the Perl DBI module. Please also read
its documentation if changing these variables, especially
-about `DBI\->connect()`.
+about `DBI->connect()`.
gitcvs.dbname::
Database name. The exact meaning depends on the
[<git-rev-list-args>...]::
A list of arguments, acceptable to 'git rev-parse' and
'git rev-list', that specifies the specific objects and references
- to export. For example, `master{tilde}10..master` causes the
+ to export. For example, `master~10..master` causes the
current master reference to be exported along with all objects
added since its 10th ancestor commit.
options.
--cat-blob-fd=<fd>::
- Specify the file descriptor that will be written to
- when the `cat-blob` command is encountered in the stream.
- The default behaviour is to write to `stdout`.
+ Write responses to `cat-blob` and `ls` queries to the
+ file descriptor <fd> instead of `stdout`. Allows `progress`
+ output intended for the end-user to be separated from other
+ output.
--done::
Require a `done` command at the end of the stream.
----
from refs/heads/branch^0
----
-The `{caret}0` suffix is necessary as fast-import does not permit a branch to
+The `^0` suffix is necessary as fast-import does not permit a branch to
start from itself, and the branch is created in memory before the
-`from` command is even read from the input. Adding `{caret}0` will force
+`from` command is even read from the input. Adding `^0` will force
fast-import to resolve the commit through Git's revision parsing library,
rather than its internal branch table, thereby loading in the
existing value of the branch.
accepted. In particular, the `cat-blob` command can be used in the
middle of a commit but not in the middle of a `data` command.
+See ``Responses To Commands'' below for details about how to read
+this output safely.
+
`ls`
~~~~
Prints information about the object at a path to a file descriptor
See `filemodify` above for a detailed description of `<path>`.
-Output uses the same format as `git ls-tree <tree> {litdd} <path>`:
+Output uses the same format as `git ls-tree <tree> -- <path>`:
====
<mode> SP ('blob' | 'tree' | 'commit') SP <dataref> HT <path> LF
missing SP <path> LF
====
+See ``Responses To Commands'' below for details about how to read
+this output safely.
+
`feature`
~~~~~~~~~
Require that fast-import supports the specified feature, or abort if
in use, the `done` command is mandatory and marks the end of the
stream.
+Responses To Commands
+---------------------
+New objects written by fast-import are not available immediately.
+Most fast-import commands have no visible effect until the next
+checkpoint (or completion). The frontend can send commands to
+fill fast-import's input pipe without worrying about how quickly
+they will take effect, which improves performance by simplifying
+scheduling.
+
+For some frontends, though, it is useful to be able to read back
+data from the current repository as it is being updated (for
+example when the source material describes objects in terms of
+patches to be applied to previously imported objects). This can
+be accomplished by connecting the frontend and fast-import via
+bidirectional pipes:
+
+====
+ mkfifo fast-import-output
+ frontend <fast-import-output |
+ git fast-import >fast-import-output
+====
+
+A frontend set up this way can use `progress`, `ls`, and `cat-blob`
+commands to read information from the import in progress.
+
+To avoid deadlock, such frontends must completely consume any
+pending output from `progress`, `ls`, and `cat-blob` before
+performing writes to fast-import that might block.
+
Crash Reports
-------------
If fast-import is supplied invalid input it will terminate with a
--index-filter <command>::
This is the filter for rewriting the index. It is similar to the
tree filter but does not check out the tree, which makes it much
- faster. Frequently used with `git rm \--cached
- \--ignore-unmatch ...`, see EXAMPLES below. For hairy
+ faster. Frequently used with `git rm --cached
+ --ignore-unmatch ...`, see EXAMPLES below. For hairy
cases, see linkgit:git-update-index[1].
--parent-filter <command>::
a simple `rm filename` will fail for that tree and commit.
Thus you may instead want to use `rm -f filename` as the script.
-Using `\--index-filter` with 'git rm' yields a significantly faster
+Using `--index-filter` with 'git rm' yields a significantly faster
version. Like with using `rm filename`, `git rm --cached filename`
will fail if the file is absent from the tree of a commit. If you
want to "completely forget" a file, it does not matter when it entered
-history, so we also add `\--ignore-unmatch`:
+history, so we also add `--ignore-unmatch`:
--------------------------------------------------------------------------
git filter-branch --index-filter 'git rm --cached --ignore-unmatch filename' HEAD
-------------------------------------------------------
Thus you can, e.g., turn a library subdirectory into a repository of
-its own. Note the `\--` that separates 'filter-branch' options from
-revision options, and the `\--all` to rewrite all branches and tags.
+its own. Note the `--` that separates 'filter-branch' options from
+revision options, and the `--all` to rewrite all branches and tags.
To set a commit (which typically is at the tip of another
history) to be the parent of the current initial commit, in
------------------------------------
git-filter-branch is often used to get rid of a subset of files,
-usually with some combination of `\--index-filter` and
-`\--subdirectory-filter`. People expect the resulting repository to
+usually with some combination of `--index-filter` and
+`--subdirectory-filter`. People expect the resulting repository to
be smaller than the original, but you need a few more steps to
actually make it smaller, because git tries hard not to lose your
objects until you tell it to. First make sure that:
* You really removed all variants of a filename, if a blob was moved
- over its lifetime. `git log \--name-only \--follow \--all \--
- filename` can help you find renames.
+ over its lifetime. `git log --name-only --follow --all -- filename`
+ can help you find renames.
-* You really filtered all refs: use `\--tag-name-filter cat \--
- \--all` when calling git-filter-branch.
+* You really filtered all refs: use `--tag-name-filter cat -- --all`
+ when calling git-filter-branch.
Then there are two ways to get a smaller repository. A safer way is
to clone, that keeps your original intact.
-* Clone it with `git clone +++file:///path/to/repo+++`. The clone
+* Clone it with `git clone file:///path/to/repo`. The clone
will not have the removed objects. See linkgit:git-clone[1]. (Note
that cloning with a plain path just hardlinks everything!)
warned.
* Remove the original refs backed up by git-filter-branch: say `git
- for-each-ref \--format="%(refname)" refs/original/ | xargs -n 1 git
+ for-each-ref --format="%(refname)" refs/original/ | xargs -n 1 git
update-ref -d`.
-* Expire all reflogs with `git reflog expire \--expire=now \--all`.
+* Expire all reflogs with `git reflog expire --expire=now --all`.
-* Garbage collect all unreferenced objects with `git gc \--prune=now`
+* Garbage collect all unreferenced objects with `git gc --prune=now`
(or if your git-gc is not new enough to support arguments to
- `\--prune`, use `git repack -ad; git prune` instead).
+ `--prune`, use `git repack -ad; git prune` instead).
GIT
---
The first rule takes precedence in the case of a single <commit>. To
apply the second rule, i.e., format everything since the beginning of
history up until <commit>, use the '\--root' option: `git format-patch
-\--root <commit>`. If you want to format only <commit> itself, you
+--root <commit>`. If you want to format only <commit> itself, you
can do this with `git format-patch -1 <commit>`.
By default, each output file is numbered sequentially from 1, and uses the
The optional <style> argument can be either `shallow` or `deep`.
'shallow' threading makes every mail a reply to the head of the
series, where the head is chosen from the cover letter, the
-`\--in-reply-to`, and the first patch mail, in this order. 'deep'
+`--in-reply-to`, and the first patch mail, in this order. 'deep'
threading makes every mail a reply to the previous one.
+
The default is `--no-thread`, unless the 'format.thread' configuration
can be set to indicate how long historical reflog entries which
are not part of the current branch should remain available in
this repository. These types of entries are generally created as
-a result of using `git commit \--amend` or `git rebase` and are the
+a result of using `git commit --amend` or `git rebase` and are the
commits prior to the amend or rebase occurring. Since these changes
are not part of the current project most users will want to expire
them sooner. This option defaults to '30 days'.
Examples
--------
-`git grep {apostrophe}time_t{apostrophe} \-- {apostrophe}*.[ch]{apostrophe}`::
+`git grep 'time_t' -- '*.[ch]'`::
Looks for `time_t` in all tracked .c and .h files in the working
directory and its subdirectories.
-`git grep -e {apostrophe}#define{apostrophe} --and \( -e MAX_PATH -e PATH_MAX \)`::
+`git grep -e '#define' --and \( -e MAX_PATH -e PATH_MAX \)`::
Looks for a line that has `#define` and either `MAX_PATH` or
`PATH_MAX`.
Show all commits since version 'v2.6.12' that changed any file
in the include/scsi or drivers/scsi subdirectories
-`git log --since="2 weeks ago" \-- gitk`::
+`git log --since="2 weeks ago" -- gitk`::
Show the changes during the last two weeks to the file 'gitk'.
The "--" is necessary to avoid confusion with the *branch* named
second object). This subcommand is equivalent to:
`git notes add [-f] -C $(git notes list <from-object>) <to-object>`
+
-In `\--stdin` mode, take lines in the format
+In `--stdin` mode, take lines in the format
+
----------
<from-object> SP <to-object> [ SP <rest> ] LF
EXAMPLE
-------
-* Create an alias for 'git p4', using the full path to the 'git-p4'
- script if needed:
-+
-------------
-$ git config --global alias.p4 '!git-p4'
-------------
-
* Clone a repository:
+
------------
General options
~~~~~~~~~~~~~~~
-All commands except clone accept this option.
+All commands except clone accept these options.
--git-dir <dir>::
Set the 'GIT_DIR' environment variable. See linkgit:git[1].
+--verbose::
+ Provide more progress information.
+
Sync options
~~~~~~~~~~~~
These options can be used in the initial 'clone' as well as in
--silent::
Do not print any progress information.
---verbose::
- Provide more progress information.
-
--detect-labels::
Query p4 for labels associated with the depot paths, and add
- them as tags in git.
+ them as tags in git. Limited usefulness as only imports labels
+ associated with new changelists. Deprecated.
+
+--import-labels::
+ Import labels from p4 into git.
--import-local::
By default, p4 branches are stored in 'refs/remotes/p4/',
~~~~~~~~~~~~~~
These options can be used to modify 'git p4 submit' behavior.
---verbose::
- Provide more progress information.
-
--origin <commit>::
Upstream location from which commits are identified to submit to
p4. By default, this is the most recent p4 commit reachable
Re-author p4 changes before submitting to p4. This option
requires p4 admin privileges.
+--export-labels::
+ Export tags from git as p4 labels. Tags found in git are applied
+ to the perforce working directory.
+
+Rebase options
+~~~~~~~~~~~~~~
+These options can be used to modify 'git p4 rebase' behavior.
+
+--import-labels::
+ Import p4 labels.
DEPOT PATH SYNTAX
-----------------
work properly; the submit command looks only at the variable and does
not have a command-line option.
-The full syntax for a p4 view is documented in 'p4 help views'. Git-p4
+The full syntax for a p4 view is documented in 'p4 help views'. 'Git p4'
knows only a subset of the view syntax. It understands multi-line
mappings, overlays with '+', exclusions with '-' and double-quotes
-around whitespace. Of the possible wildcards, git-p4 only handles
-'...', and only when it is at the end of the path. Git-p4 will complain
+around whitespace. Of the possible wildcards, 'git p4' only handles
+'...', and only when it is at the end of the path. 'Git p4' will complain
if it encounters an unhandled wildcard.
Bugs in the implementation of overlap mappings exist. If multiple depot
paths map through overlays to the same location in the repository,
-git-p4 can choose the wrong one. This is hard to solve without
-dedicating a client spec just for git-p4.
+'git p4' can choose the wrong one. This is hard to solve without
+dedicating a client spec just for 'git p4'.
-The name of the client can be given to git-p4 in multiple ways. The
+The name of the client can be given to 'git p4' in multiple ways. The
variable 'git-p4.client' takes precedence if it exists. Otherwise,
normal p4 mechanisms of determining the client are used: environment
variable P4CLIENT, a file referenced by P4CONFIG, or the local host name.
enabled. Each entry should be a pair of branch names separated
by a colon (:). This example declares that both branchA and
branchB were created from main:
+
-------------
git config git-p4.branchList main:branchA
git config --add git-p4.branchList main:branchB
-------------
+git-p4.ignoredP4Labels::
+ List of p4 labels to ignore. This is built automatically as
+ unimportable labels are discovered.
+
+git-p4.importLabels::
+ Import p4 labels into git, as per --import-labels.
+
+git-p4.labelImportRegexp::
+ Only p4 labels matching this regular expression will be imported. The
+ default value is '[a-zA-Z0-9_\-.]+$'.
+
git-p4.useClientSpec::
Specify that the p4 client spec should be used to identify p4
depot paths of interest. This is equivalent to specifying the
submission regardless.
git-p4.attemptRCSCleanup:
- If enabled, 'git p4 submit' will attempt to cleanup RCS keywords
- ($Header$, etc). These would otherwise cause merge conflicts and prevent
- the submit going ahead. This option should be considered experimental at
- present.
+ If enabled, 'git p4 submit' will attempt to cleanup RCS keywords
+ ($Header$, etc). These would otherwise cause merge conflicts and prevent
+ the submit going ahead. This option should be considered experimental at
+ present.
+
+git-p4.exportLabels::
+ Export git tags to p4 labels, as per --export-labels.
+
+git-p4.labelExportRegexp::
+ Only p4 labels matching this regular expression will be exported. The
+ default value is '[a-zA-Z0-9_\-.]+$'.
IMPLEMENTATION DETAILS
----------------------
A recommended practice to deal with a repository with too many
refs is to pack its refs with `--all --prune` once, and
-occasionally run `git pack-refs \--prune`. Tags are by
+occasionally run `git pack-refs --prune`. Tags are by
definition stationary and are not expected to change. Branch
heads will be packed with the initial `pack-refs --all`, but
only the currently active branch heads will become unpacked,
+
See `pull.rebase`, `branch.<name>.rebase` and `branch.autosetuprebase` in
linkgit:git-config[1] if you want to make `git pull` always use
-`{litdd}rebase` instead of merging.
+`--rebase` instead of merging.
+
[NOTE]
This is a potentially _dangerous_ mode of operation.
<refspec>...::
The format of a <refspec> parameter is an optional plus
- `{plus}`, followed by the source ref <src>, followed
+ `+`, followed by the source ref <src>, followed
by a colon `:`, followed by the destination ref <dst>.
It is used to specify with what <src> object the <dst> ref
in the remote repository is to be updated.
+
The object referenced by <src> is used to update the <dst> reference
on the remote side, but by default this is only allowed if the
-update can fast-forward <dst>. By having the optional leading `{plus}`,
+update can fast-forward <dst>. By having the optional leading `+`,
you can tell git to update the <dst> ref even when the update is not a
fast-forward. This does *not* attempt to merge <src> into <dst>. See
EXAMPLES below for details.
Pushing an empty <src> allows you to delete the <dst> ref from
the remote repository.
+
-The special refspec `:` (or `{plus}:` to allow non-fast-forward updates)
+The special refspec `:` (or `+:` to allow non-fast-forward updates)
directs git to push "matching" branches: for every branch that exists on
the local side, the remote side is updated if a branch of the same name
already exists on the remote side. This is the default operation mode
Remove remote branches that don't have a local counterpart. For example
a remote branch `tmp` will be removed if a local branch with the same
name doesn't exist any more. This also respects refspecs, e.g.
- `git push --prune remote refs/heads/{asterisk}:refs/tmp/{asterisk}` would
+ `git push --prune remote refs/heads/*:refs/tmp/*` would
make sure that remote `refs/tmp/foo` will be removed if `refs/heads/foo`
doesn't exist.
is specified. This flag forces progress status even if the
standard error stream is not directed to a terminal.
---recurse-submodules=check::
- Check whether all submodule commits used by the revisions to be
- pushed are available on a remote tracking branch. Otherwise the
- push will be aborted and the command will exit with non-zero status.
+--recurse-submodules=check|on-demand::
+ Make sure all submodule commits used by the revisions to be
+ pushed are available on a remote tracking branch. If 'check' is
+ used git will verify that all submodule commits that changed in
+ the revisions to be pushed are available on at least one remote
+ of the submodule. If any commits are missing the push will be
+ aborted and exit with non-zero status. If 'on-demand' is used
+ all submodules that changed in the revisions to be pushed will
+ be pushed. If on-demand was not able to push all necessary
+ revisions it will also be aborted and exit with non-zero status.
include::urls-remotes.txt[]
flag::
A single character indicating the status of the ref:
(space);; for a successfully pushed fast-forward;
-`{plus}`;; for a successful forced update;
+`+`;; for a successful forced update;
`-`;; for a successfully deleted ref;
`*`;; for a successfully pushed new ref;
`!`;; for a ref that was rejected or failed to push; and
For a successfully pushed ref, the summary shows the old and new
values of the ref in a form suitable for using as an argument to
`git log` (this is `<old>..<new>` in most cases, and
- `<old>\...<new>` for forced non-fast-forward updates).
+ `<old>...<new>` for forced non-fast-forward updates).
+
For a failed update, more details are given:
+
Find a ref that matches `experimental` in the `origin` repository
(e.g. `refs/heads/experimental`), and delete it.
-`git push origin {plus}dev:master`::
+`git push origin +dev:master`::
Update the origin repository's master branch with the dev branch,
allowing non-fast-forward updates. *This can leave unreferenced
commits dangling in the origin repository.* Consider the
will be reset to where it was when the rebase operation was
started.
+--keep-empty::
+ Keep the commits that do not change anything from its
+ parents in the result.
+
--skip::
Restart the rebasing process by skipping the current patch.
-X <strategy-option>::
--strategy-option=<strategy-option>::
Pass the <strategy-option> through to the merge strategy.
- This implies `\--merge` and, if no strategy has been
+ This implies `--merge` and, if no strategy has been
specified, `-s recursive`. Note the reversal of 'ours' and
'theirs' as noted in above for the `-m` option.
Hard case: The changes are not the same.::
This happens if the 'subsystem' rebase had conflicts, or used
- `\--interactive` to omit, edit, squash, or fixup commits; or
- if the upstream used one of `commit \--amend`, `reset`, or
+ `--interactive` to omit, edit, squash, or fixup commits; or
+ if the upstream used one of `commit --amend`, `reset`, or
`filter-branch`.
NOTE: While an "easy case recovery" sometimes appears to be successful
even in the hard case, it may have unintended consequences. For
example, a commit that was removed via `git rebase
- \--interactive` will be **resurrected**!
+ --interactive` will be **resurrected**!
The idea is to manually tell 'git rebase' "where the old 'subsystem'
ended and your 'topic' began", that is, what the old merge-base
of the old 'subsystem', for example:
* With the 'subsystem' reflog: after 'git fetch', the old tip of
- 'subsystem' is at `subsystem@\{1}`. Subsequent fetches will
+ 'subsystem' is at `subsystem@{1}`. Subsequent fetches will
increase the number. (See linkgit:git-reflog[1].)
* Relative to the tip of 'topic': knowing that your 'topic' has three
see linkgit:git-log[1].
The reflog is useful in various git commands, to specify the old value
-of a reference. For example, `HEAD@\{2\}` means "where HEAD used to be
-two moves ago", `master@\{one.week.ago\}` means "where master used to
+of a reference. For example, `HEAD@{2}` means "where HEAD used to be
+two moves ago", `master@{one.week.ago}` means "where master used to
point to one week ago", and so on. See linkgit:gitrevisions[7] for
more details.
To delete single entries from the reflog, use the subcommand "delete"
-and specify the _exact_ entry (e.g. "`git reflog delete master@\{2\}`").
+and specify the _exact_ entry (e.g. "`git reflog delete master@{2}`").
OPTIONS
capability use this.
+
A helper advertising the capability
-`refspec refs/heads/{asterisk}:refs/svn/origin/branches/{asterisk}`
+`refspec refs/heads/*:refs/svn/origin/branches/*`
is saying that, when it is asked to `import refs/heads/topic`, the
stream it outputs will update the `refs/svn/origin/branches/topic`
ref.
applicable refspec takes precedence. The left-hand of refspecs
advertised with this capability must cover all refs reported by
the list command. If no 'refspec' capability is advertised,
-there is an implied `refspec {asterisk}:{asterisk}`.
+there is an implied `refspec *:*`.
Capabilities for Pushing
~~~~~~~~~~~~~~~~~~~~~~~~
This modifies the 'import' capability.
+
A helper advertising
-`refspec refs/heads/{asterisk}:refs/svn/origin/branches/{asterisk}`
+`refspec refs/heads/*:refs/svn/origin/branches/*`
in its capabilities is saying that, when it handles
`import refs/heads/topic`, the stream it outputs will update the
`refs/svn/origin/branches/topic` ref.
applicable refspec takes precedence. The left-hand of refspecs
advertised with this capability must cover all refs reported by
the list command. If no 'refspec' capability is advertised,
-there is an implied `refspec {asterisk}:{asterisk}`.
+there is an implied `refspec *:*`.
INVOCATION
----------
With `-m <master>` option, a symbolic-ref `refs/remotes/<name>/HEAD` is set
up to point at remote's `<master>` branch. See also the set-head command.
+
-When a fetch mirror is created with `\--mirror=fetch`, the refs will not
+When a fetch mirror is created with `--mirror=fetch`, the refs will not
be stored in the 'refs/remotes/' namespace, but rather everything in
'refs/' on the remote will be directly mirrored into 'refs/' in the
local repository. This option only makes sense in bare repositories,
because a fetch would overwrite any local commits.
+
-When a push mirror is created with `\--mirror=push`, then `git push`
-will always behave as if `\--mirror` was passed.
+When a push mirror is created with `--mirror=push`, then `git push`
+will always behave as if `--mirror` was passed.
'rename'::
The commits marked with `*` touch the same area in the same
file; you need to resolve the conflicts when creating the commit
-marked with `{plus}`. Then you can test the result to make sure your
+marked with `+`. Then you can test the result to make sure your
work-in-progress still works with what is in the latest master.
After this test merge, there are two ways to continue your work
on the topic. The easiest is to build on top of the test merge
-commit `{plus}`, and when your work in the topic branch is finally
+commit `+`, and when your work in the topic branch is finally
ready, pull the topic branch into master, and/or ask the
upstream to pull from you. By that time, however, the master or
-the upstream might have been advanced since the test merge `{plus}`,
+the upstream might have been advanced since the test merge `+`,
in which case the final commit graph would look like this:
------------
+
This means that `git reset -p` is the opposite of `git add -p`, i.e.
you can use it to selectively reset hunks. See the ``Interactive Mode''
-section of linkgit:git-add[1] to learn how to operate the `\--patch` mode.
+section of linkgit:git-add[1] to learn how to operate the `--patch` mode.
'git reset' --<mode> [<commit>]::
This form resets the current branch head to <commit> and
+
If a `pattern` is given, only refs matching the given shell glob are
shown. If the pattern does not contain a globbing character (`?`,
-`{asterisk}`, or `[`), it is turned into a prefix match by
-appending `/{asterisk}`.
+`*`, or `[`), it is turned into a prefix match by appending `/*`.
--glob=pattern::
Show all refs matching the shell glob pattern `pattern`. If
the pattern does not start with `refs/`, this is automatically
prepended. If the pattern does not contain a globbing
- character (`?`, `{asterisk}`, or `[`), it is turned into a prefix
- match by appending `/{asterisk}`.
+ character (`?`, `*`, or `[`), it is turned into a prefix
+ match by appending `/*`.
--show-toplevel::
Show the absolute path of the top-level directory.
should see linkgit:git-reset[1], particularly the '--hard' option. If
you want to extract specific files as they were in another commit, you
should see linkgit:git-checkout[1], specifically the `git checkout
-<commit> \-- <filename>` syntax. Take care with these alternatives as
+<commit> -- <filename>` syntax. Take care with these alternatives as
both will discard uncommitted changes in your working directory.
OPTIONS
Revert the changes specified by the fourth last commit in HEAD
and create a new commit with the reverted changes.
-`git revert -n master{tilde}5..master{tilde}2`::
+`git revert -n master~5..master~2`::
Revert the changes done by commits from the fifth last commit
in master (included) to the third last commit in master
File globbing matches across directory boundaries. Thus, given
two directories `d` and `d2`, there is a difference between
-using `git rm {apostrophe}d{asterisk}{apostrophe}` and
-`git rm {apostrophe}d/{asterisk}{apostrophe}`, as the former will
+using `git rm 'd*'` and `git rm 'd/*'`, as the former will
also remove all of directory `d2`.
REMOVING FILES THAT HAVE DISAPPEARED FROM THE FILESYSTEM
--format[=<format>]::
Instead of the commit subject, use some other information to
describe each commit. '<format>' can be any string accepted
- by the `--format` option of 'git log', such as '{asterisk} [%h] %s'.
+ by the `--format` option of 'git log', such as '* [%h] %s'.
(See the "PRETTY FORMATS" section of linkgit:git-log[1].)
Each pretty-printed commit will be rewrapped before it is shown.
--exclude-existing[=<pattern>]::
Make 'git show-ref' act as a filter that reads refs from stdin of the
- form "`{caret}(?:<anything>\s)?<refname>(?:{backslash}{caret}{})?$`"
+ form "`^(?:<anything>\s)?<refname>(?:\^{})?$`"
and performs the following actions on each:
(1) strip "{caret}{}" at the end of line if any;
(2) ignore if pattern is provided and does not head-match refname;
Shows the tag `v1.0.0`, along with the object the tags
points at.
-`git show v1.0.0^\{tree\}`::
+`git show v1.0.0^{tree}`::
Shows the tree pointed to by the tag `v1.0.0`.
-`git show -s --format=%s v1.0.0^\{commit\}`::
+`git show -s --format=%s v1.0.0^{commit}`::
Shows the subject of the commit pointed to by the
tag `v1.0.0`.
The latest stash you created is stored in `refs/stash`; older
stashes are found in the reflog of this reference and can be named using
-the usual reflog syntax (e.g. `stash@\{0}` is the most recently
-created stash, `stash@\{1}` is the one before it, `stash@\{2.hours.ago}`
+the usual reflog syntax (e.g. `stash@{0}` is the most recently
+created stash, `stash@{1}` is the one before it, `stash@{2.hours.ago}`
is also possible).
OPTIONS
of your repository, and its worktree contains only the changes you
selected interactively. The selected changes are then rolled back
from your worktree. See the ``Interactive Mode'' section of
-linkgit:git-add[1] to learn how to operate the `\--patch` mode.
+linkgit:git-add[1] to learn how to operate the `--patch` mode.
+
The `--patch` option implies `--keep-index`. You can use
`--no-keep-index` to override this.
list [<options>]::
List the stashes that you currently have. Each 'stash' is listed
- with its name (e.g. `stash@\{0}` is the latest stash, `stash@\{1}` is
+ with its name (e.g. `stash@{0}` is the latest stash, `stash@{1}` is
the one before, etc.), the name of the branch that was current when the
stash was made, and a short description of the commit the stash was
based on.
stashed state and its original parent. When no `<stash>` is given,
shows the latest one. By default, the command shows the diffstat, but
it will accept any format known to 'git diff' (e.g., `git stash show
- -p stash@\{1}` to view the second most recent stash in patch form).
+ -p stash@{1}` to view the second most recent stash in patch form).
pop [--index] [-q|--quiet] [<stash>]::
have conflicts (which are stored in the index, where you therefore can no
longer apply the changes as they were originally).
+
-When no `<stash>` is given, `stash@\{0}` is assumed, otherwise `<stash>` must
-be a reference of the form `stash@\{<revision>}`.
+When no `<stash>` is given, `stash@{0}` is assumed, otherwise `<stash>` must
+be a reference of the form `stash@{<revision>}`.
apply [--index] [-q|--quiet] [<stash>]::
drop [-q|--quiet] [<stash>]::
Remove a single stashed state from the stash list. When no `<stash>`
- is given, it removes the latest one. i.e. `stash@\{0}`, otherwise
+ is given, it removes the latest one. i.e. `stash@{0}`, otherwise
`<stash>` must a valid stash log reference of the form
- `stash@\{<revision>}`.
+ `stash@{<revision>}`.
create::
Terminate entries with NUL, instead of LF. This implies
the `--porcelain` output format if no other format is given.
+--column[=<options>]::
+--no-column::
+ Display untracked files in columns. See configuration variable
+ column.status for option syntax.`--column` and `--no-column`
+ without options are equivalent to 'always' and 'never'
+ respectively.
+
OUTPUT
------
XY PATH1 -> PATH2
-where `PATH1` is the path in the `HEAD`, and the ` \-> PATH2` part is
+where `PATH1` is the path in the `HEAD`, and the " `-> PATH2`" part is
shown only when `PATH1` corresponds to a different path in the
index/worktree (i.e. the file is renamed). The 'XY' is a two-letter
status code.
-The fields (including the `\->`) are separated from each other by a
+The fields (including the `->`) are separated from each other by a
single space. If a filename contains whitespace or other nonprintable
characters, that field will be quoted in the manner of a C string
literal: surrounded by ASCII double quote (34) characters, and with
<tagname> [<commit> | <object>]
'git tag' -d <tagname>...
'git tag' [-n[<num>]] -l [--contains <commit>] [--points-at <object>]
+ [--column[=<options>] | --no-column] [<pattern>...]
[<pattern>...]
'git tag' -v <tagname>...
using fnmatch(3)). Multiple patterns may be given; if any of
them matches, the tag is shown.
+--column[=<options>]::
+--no-column::
+ Display tag listing in columns. See configuration variable
+ column.tag for option syntax.`--column` and `--no-column`
+ without options are equivalent to 'always' and 'never' respectively.
++
+This option is only applicable when listing tags without annotation lines.
+
--contains <commit>::
Only list tags which contain the specified commit.
Create a tarball for v1.4.0 release.
-`git tar-tree v1.4.0{caret}\{tree\} git-1.4.0 | gzip >git-1.4.0.tar.gz`::
+`git tar-tree v1.4.0^{tree} git-1.4.0 | gzip >git-1.4.0.tar.gz`::
Create a tarball for v1.4.0 release, but without a
global extended pax header.
[--ignore-submodules]
[--really-refresh] [--unresolve] [--again | -g]
[--info-only] [--index-info]
- [-z] [--stdin]
+ [-z] [--stdin] [--index-version <n>]
[--verbose]
[--] [<file>...]
--verbose::
Report what is being added and removed from index.
+--index-version <n>::
+ Write the resulting index out in the named on-disk format version.
+ The current default version is 2.
+
-z::
Only meaningful with `--stdin` or `--index-info`; paths are
separated with NUL character instead of LF.
`$SOME_ENVIRONMENT_VARIABLE`, `"C:\Program Files\Vim\gvim.exe"
--nofork`. The order of preference is the `$GIT_EDITOR`
environment variable, then `core.editor` configuration, then
- `$VISUAL`, then `$EDITOR`, and then finally 'vi'.
+ `$VISUAL`, then `$EDITOR`, and then the default chosen at compile
+ time, which is usually 'vi'.
+ifdef::git-default-editor[]
+ The build you are using chose '{git-default-editor}' as the default.
+endif::git-default-editor[]
GIT_PAGER::
Text viewer for use by git commands (e.g., 'less'). The value
is meant to be interpreted by the shell. The order of preference
is the `$GIT_PAGER` environment variable, then `core.pager`
- configuration, then `$PAGER`, and then finally 'less'.
+ configuration, then `$PAGER`, and then the default chosen at
+ compile time (usually 'less').
+ifdef::git-default-pager[]
+ The build you are using chose '{git-default-pager}' as the default.
+endif::git-default-pager[]
Diagnostics
-----------
Show as patches the commits since version 'v2.6.12' that changed
any file in the include/scsi or drivers/scsi subdirectories
-`git whatchanged --since="2 weeks ago" \-- gitk`::
+`git whatchanged --since="2 weeks ago" -- gitk`::
Show the changes during the last two weeks to the file 'gitk'.
The "--" is necessary to avoid confusion with the *branch* named
are paths.
* When an argument can be misunderstood as either a revision or a path,
- they can be disambiguated by placing `\--` between them.
- E.g. `git diff \-- HEAD` is, "I have a file called HEAD in my work
+ they can be disambiguated by placing `--` between them.
+ E.g. `git diff -- HEAD` is, "I have a file called HEAD in my work
tree. Please show changes between the version I staged in the index
and what I have in the work tree for that file". not "show difference
between the HEAD commit and the work tree as a whole". You can say
- `git diff HEAD \--` to ask for the latter.
+ `git diff HEAD --` to ask for the latter.
- * Without disambiguating `\--`, git makes a reasonable guess, but errors
+ * Without disambiguating `--`, git makes a reasonable guess, but errors
out and asking you to disambiguate when ambiguous. E.g. if you have a
file called HEAD in your work tree, `git diff HEAD` is ambiguous, and
- you have to say either `git diff HEAD \--` or `git diff \-- HEAD` to
+ you have to say either `git diff HEAD --` or `git diff -- HEAD` to
disambiguate.
When writing a script that is expected to handle random user-input, it is
a good practice to make it explicit which arguments are which by placing
-disambiguating `\--` at appropriate places.
+disambiguating `--` at appropriate places.
Here are the rules regarding the "flags" that you should follow when you are
scripting git:
program normally just takes a list of filenames you want to update, but
to avoid trivial mistakes, it refuses to add new entries to the index
(or remove existing ones) unless you explicitly tell it that you're
-adding a new entry with the `\--add` flag (or removing an entry with the
-`\--remove`) flag.
+adding a new entry with the `--add` flag (or removing an entry with the
+`--remove`) flag.
So to populate the index with the two files you just created, you can do
which ends up doing the above for you.
In other words, 'git diff-index' normally compares a tree against the
-working tree, but when given the `\--cached` flag, it is told to
+working tree, but when given the `--cached` flag, it is told to
instead compare against just the index cache contents, and ignore the
current working tree state entirely. Since we just wrote the index
-file to HEAD, doing `git diff-index \--cached -p HEAD` should thus return
+file to HEAD, doing `git diff-index --cached -p HEAD` should thus return
an empty set of differences, and that's exactly what it does.
[NOTE]
comparisons, and saying that it compares a tree against the working
tree is thus not strictly accurate. In particular, the list of
files to compare (the "meta-data") *always* comes from the index file,
-regardless of whether the `\--cached` flag is used or not. The `\--cached`
+regardless of whether the `--cached` flag is used or not. The `--cached`
flag really only determines whether the file *contents* to be compared
come from the working tree or not.
$ git update-index hello
------------------------------------------------
-(note how we didn't need the `\--add` flag this time, since git knew
+(note how we didn't need the `--add` flag this time, since git knew
about the file already).
Note what happens to the different 'git diff-{asterisk}' versions here.
When using the above two commands, the initial commit will be shown.
If this is a problem because it is huge, you can hide it by setting
the log.showroot configuration variable to false. Having this, you
-can still show it for each command just adding the `\--root` option,
+can still show it for each command just adding the `--root` option,
which is a flag for 'git diff-tree' accepted by both commands.
With that, you should now be having some inkling of what git does, and
$ gitk --all
----------------
-will show you graphically both of your branches (that's what the `\--all`
+will show you graphically both of your branches (that's what the `--all`
means: normally it will just show you your current `HEAD`) and their
histories. You can also see exactly how they came to be from a common
source.
(which is correct, so never mind), and you can write a small merge
message about your adventures in 'git merge'-land.
-After you're done, start up `gitk \--all` to see graphically what the
+After you're done, start up `gitk --all` to see graphically what the
history looks like. Notice that `mybranch` still exists, and you can
switch to it, and continue to work with it if you want to. The
`mybranch` branch will not contain the merge, but next time you merge it
The first two lines indicate that it is showing the two branches
and the first line of the commit log message from their
top-of-the-tree commits, you are currently on `master` branch
-(notice the asterisk `{asterisk}` character), and the first column for
+(notice the asterisk `*` character), and the first column for
the later output lines is used to show commits contained in the
`master` branch, and the second column for the `mybranch`
branch. Three commits are shown along with their log messages.
-All of them have non blank characters in the first column (`{asterisk}`
+All of them have non blank characters in the first column (`*`
shows an ordinary commit on the current branch, `-` is a merge commit), which
means they are now part of the `master` branch. Only the "Some
work" commit has the plus `+` character in the second column,
----------------
Updating from ae3a2da... to a80b4aa....
Fast-forward (no commit created; -m option ignored)
- example | 1 +
- hello | 1 +
+ example | 1 +
+ hello | 1 +
2 files changed, 2 insertions(+)
----------------
the tree of your branch to that of the `master` branch. This is
often called 'fast-forward' merge.
-You can run `gitk \--all` again to see how the commit ancestry
+You can run `gitk --all` again to see how the commit ancestry
looks like, or run 'show-branch', which tells you this.
------------------------------------------------
fairly quickly, leaving only a handful of real changes in non-zero
stages.
-To look at only non-zero stages, use `\--unmerged` flag:
+To look at only non-zero stages, use `--unmerged` flag:
------------
$ git ls-files --unmerged
directory.
[NOTE]
-You will see two files, `pack-{asterisk}.pack` and `pack-{asterisk}.idx`,
+You will see two files, `pack-*.pack` and `pack-*.idx`,
in `.git/objects/pack` directory. They are closely related to
each other, and if you ever copy them by hand to a different
repository for whatever reason, you should make sure you copy
---------------------
Options for a credential context can be configured either in
-`credential.\*` (which applies to all credentials), or
-`credential.<url>.\*`, where <url> matches the context as described
+`credential.*` (which applies to all credentials), or
+`credential.<url>.*`, where <url> matches the context as described
above.
The following options are available in either location:
number after the "-M" or "-C" option (e.g. "-M8" to tell it to use
8/10 = 80%).
-Note. When the "-C" option is used with `\--find-copies-harder`
+Note. When the "-C" option is used with `--find-copies-harder`
option, 'git diff-{asterisk}' commands feed unmodified filepairs to
diffcore mechanism as well as modified ones. This lets the copy
detector consider unmodified files as copy source candidates at
-the expense of making it slower. Without `\--find-copies-harder`,
+the expense of making it slower. Without `--find-copies-harder`,
'git diff-{asterisk}' commands can detect copies only if the file that was
copied happened to have been modified in the same changeset.
This transformation is used to find filepairs that represent
changes that touch a specified string, and is controlled by the
--S option and the `\--pickaxe-all` option to the 'git diff-{asterisk}'
+-S option and the `--pickaxe-all` option to the 'git diff-*'
commands.
When diffcore-pickaxe is in use, it checks if there are
"the string appeared in this changeset". It also checks for the
opposite case that loses the specified string.
-When `\--pickaxe-all` is not in effect, diffcore-pickaxe leaves
+When `--pickaxe-all` is not in effect, diffcore-pickaxe leaves
only such filepairs that touch the specified string in its
-output. When `\--pickaxe-all` is used, diffcore-pickaxe leaves all
+output. When `--pickaxe-all` is used, diffcore-pickaxe leaves all
filepairs intact if there is such a filepair, or makes the
output empty otherwise. The latter behaviour is designed to
make reviewing of the changes in the context of the whole
~~~~~~~~~~
This hook is invoked by 'git commit', and can be bypassed
-with `\--no-verify` option. It takes no parameter, and is
+with `--no-verify` option. It takes no parameter, and is
invoked before obtaining the proposed commit log message and
making a commit. Exiting with non-zero status from this script
causes the 'git commit' to abort.
configuration option `commit.template` is set); `merge` (if the
commit is a merge or a `.git/MERGE_MSG` file exists); `squash`
(if a `.git/SQUASH_MSG` file exists); or `commit`, followed by
-a commit SHA1 (if a `-c`, `-C` or `\--amend` option was given).
+a commit SHA1 (if a `-c`, `-C` or `--amend` option was given).
If the exit status is non-zero, 'git commit' will abort.
The purpose of the hook is to edit the message file in place, and
-it is not suppressed by the `\--no-verify` option. A non-zero exit
+it is not suppressed by the `--no-verify` option. A non-zero exit
means a failure of the hook and aborts the commit. It should not
be used as replacement for pre-commit hook.
~~~~~~~~~~
This hook is invoked by 'git commit', and can be bypassed
-with `\--no-verify` option. It takes a single parameter, the
+with `--no-verify` option. It takes a single parameter, the
name of the file that holds the proposed commit log message.
Exiting with non-zero status causes the 'git commit' to
abort.
Set `$maxload` to undefined value (`undef`) to turn this feature off.
The default value is 300.
+$omit_age_column::
+ If true, omit the column with date of the most current commit on the
+ projects list page. It can save a bit of I/O and a fork per repository.
+
+$omit_owner::
+ If true prevents displaying information about repository owner.
+
$per_request_config::
If this is set to code reference, it will be run once for each request.
You can set parts of configuration that change per session this way.
forks::
If this feature is enabled, gitweb considers projects in
subdirectories of project root (basename) to be forks of existing
- projects. For each project `$projname.git`, projects in the
- `$projname/` directory and its subdirectories will not be
- shown in the main projects list. Instead, a \'+' mark is shown
- next to `$projname`, which links to a "forks" view that lists all
- the forks (all projects in `$projname/` subdirectory). Additionally
+ projects. For each project +$projname.git+, projects in the
+ +$projname/+ directory and its subdirectories will not be
+ shown in the main projects list. Instead, a \'\+' mark is shown
+ next to +$projname+, which links to a "forks" view that lists all
+ the forks (all projects in +$projname/+ subdirectory). Additionally
a "forks" view for a project is linked from project summary page.
+
-If the project list is taken from a file (`$projects_list` points to a
+If the project list is taken from a file (+$projects_list+ points to a
file), forks are only recognized if they are listed after the main project
in that file.
+
beginning. It is always easier to squash a few commits together than
to split one big commit into several. Don't be afraid of making too
small or imperfect steps along the way. You can always go back later
-and edit the commits with `git rebase \--interactive` before you
-publish them. You can use `git stash save \--keep-index` to run the
+and edit the commits with `git rebase --interactive` before you
+publish them. You can use `git stash save --keep-index` to run the
test suite independent of other uncommitted changes; see the EXAMPLES
section of linkgit:git-stash[1].
- '%b': body
- '%B': raw body (unwrapped subject and body)
- '%N': commit notes
-- '%gD': reflog selector, e.g., `refs/stash@\{1\}`
-- '%gd': shortened reflog selector, e.g., `stash@\{1\}`
+- '%gD': reflog selector, e.g., `refs/stash@{1}`
+- '%gd': shortened reflog selector, e.g., `stash@{1}`
- '%gn': reflog identity name
- '%gN': reflog identity name (respecting .mailmap, see linkgit:git-shortlog[1] or linkgit:git-blame[1])
- '%ge': reflog identity email
`git log -g`). The `%d` placeholder will use the "short" decoration
format if `--decorate` was not already provided on the command line.
-If you add a `{plus}` (plus sign) after '%' of a placeholder, a line-feed
+If you add a `+` (plus sign) after '%' of a placeholder, a line-feed
is inserted immediately before the expansion if and only if the
placeholder expands to a non-empty string.
<refspec>::
The format of a <refspec> parameter is an optional plus
- `{plus}`, followed by the source ref <src>, followed
+ `+`, followed by the source ref <src>, followed
by a colon `:`, followed by the destination ref <dst>.
+
The remote ref that matches <src>
+
For example, `--cherry-pick --right-only A...B` omits those
commits from `B` which are in `A` or are patch-equivalent to a commit in
-`A`. In other words, this lists the `{plus}` commits from `git cherry A B`.
+`A`. In other words, this lists the `+` commits from `git cherry A B`.
More precisely, `--cherry-pick --right-only --no-merges` gives the exact
list.
`---------'
-----------------------------------------------------------------------
+
-Note the major differences in `N` and `P` over '\--full-history':
+Note the major differences in `N` and `P` over '--full-history':
+
--
* `N`'s parent list had `I` removed, because it is an ancestor of the
When we want to find out what commits in `M` are contaminated with the
bug introduced by `D` and need fixing, however, we might want to view
only the subset of 'D..M' that are actually descendants of `D`, i.e.
-excluding `C` and `K`. This is exactly what the '\--ancestry-path'
+excluding `C` and `K`. This is exactly what the '--ancestry-path'
option does. Applied to the 'D..M' range, it results in:
+
-----------------------------------------------------------------------
`argv_array_push`::
Push a copy of a string onto the end of the array.
+`argv_array_pushl`::
+ Push a list of strings onto the end of the array. The arguments
+ should be a list of `const char *` strings, terminated by a NULL
+ argument.
+
`argv_array_pushf`::
Format a string and push it onto the end of the array. This is a
convenience wrapper combining `strbuf_addf` and `argv_array_push`.
* There are basically two forms of options:
'Short options' consist of one dash (`-`) and one alphanumeric
character.
- 'Long options' begin with two dashes (`\--`) and some
+ 'Long options' begin with two dashes (`--`) and some
alphanumeric characters.
* Options are case-sensitive.
* 'sticked' and 'separate form' of options with arguments.
`-oArg` is sticked, `-o Arg` is separate form.
- `\--option=Arg` is sticked, `\--option Arg` is separate form.
+ `--option=Arg` is sticked, `--option Arg` is separate form.
* Long options may be 'abbreviated', as long as the abbreviation
is unambiguous.
* Short options may be bundled, e.g. `-a -b` can be specified as `-ab`.
* Boolean long options can be 'negated' (or 'unset') by prepending
- `no-`, e.g. `\--no-abbrev` instead of `\--abbrev`. Conversely,
+ `no-`, e.g. `--no-abbrev` instead of `--abbrev`. Conversely,
options that begin with `no-` can be 'negated' by removing it.
-* Options and non-option arguments can clearly be separated using the `\--`
- option, e.g. `-a -b \--option \-- \--this-is-a-file` indicates that
- `\--this-is-a-file` must not be processed as an option.
+* Options and non-option arguments can clearly be separated using the `--`
+ option, e.g. `-a -b --option -- --this-is-a-file` indicates that
+ `--this-is-a-file` must not be processed as an option.
Steps to parse options
----------------------
Flags are the bitwise-or of:
`PARSE_OPT_KEEP_DASHDASH`::
- Keep the `\--` that usually separates options from
+ Keep the `--` that usually separates options from
non-option arguments.
`PARSE_OPT_STOP_AT_NON_OPTION`::
There are some macros to easily define options:
`OPT__ABBREV(&int_var)`::
- Add `\--abbrev[=<n>]`.
+ Add `--abbrev[=<n>]`.
`OPT__COLOR(&int_var, description)`::
- Add `\--color[=<when>]` and `--no-color`.
+ Add `--color[=<when>]` and `--no-color`.
`OPT__DRY_RUN(&int_var, description)`::
- Add `-n, \--dry-run`.
+ Add `-n, --dry-run`.
`OPT__FORCE(&int_var, description)`::
- Add `-f, \--force`.
+ Add `-f, --force`.
`OPT__QUIET(&int_var, description)`::
- Add `-q, \--quiet`.
+ Add `-q, --quiet`.
`OPT__VERBOSE(&int_var, description)`::
- Add `-v, \--verbose`.
+ Add `-v, --verbose`.
`OPT_GROUP(description)`::
Start an option group. `description` is a short string that
If not stated otherwise, interpret the arguments as follows:
* `short` is a character for the short option
- (e.g. `{apostrophe}e{apostrophe}` for `-e`, use `0` to omit),
+ (e.g. `'e'` for `-e`, use `0` to omit),
* `long` is a string for the long option
- (e.g. `"example"` for `\--example`, use `NULL` to omit),
+ (e.g. `"example"` for `--example`, use `NULL` to omit),
* `int_var` is an integer variable,
The callback mechanism is as follows:
* Inside `func`, the only interesting member of the structure
- given by `opt` is the void pointer `opt\->value`.
- `\*opt\->value` will be the value that is saved into `var`, if you
+ given by `opt` is the void pointer `opt->value`.
+ `*opt->value` will be the value that is saved into `var`, if you
use `OPT_CALLBACK()`.
- For example, do `*(unsigned long *)opt\->value = 42;` to get 42
+ For example, do `*(unsigned long *)opt->value = 42;` to get 42
into an `unsigned long` variable.
* Return value `0` indicates success and non-zero return
returning a `struct commit *` each time you call it. The end of the
revision list is indicated by returning a NULL pointer.
+`reset_revision_walk`::
+
+ Reset the flags used by the revision walking api. You can use
+ this to do multiple sequencial revision walks.
+
Data structures
---------------
are encoded in 7-bit ASCII and the encoding cannot contain a NUL
byte (iow, this is a UNIX pathname).
+ (Version 4) In version 4, the entry path name is prefix-compressed
+ relative to the path name for the previous entry (the very first
+ entry is encoded as if the path name for the previous entry is an
+ empty string). At the beginning of an entry, an integer N in the
+ variable width encoding (the same encoding as the offset is encoded
+ for OFS_DELTA pack entries; see pack-format.txt) is stored, followed
+ by a NUL-terminated string S. Removing N bytes from the end of the
+ path name for the previous entry, and replacing it with the string S
+ yields the path name for this entry.
+
1-8 nul bytes as necessary to pad the entry to a multiple of eight bytes
while keeping the name NUL-terminated.
+ (Version 4) In version 4, the padding after the pathname does not
+ exist.
+
== Extensions
=== Cached tree
. They cannot have ASCII control characters (i.e. bytes whose
values are lower than \040, or \177 `DEL`), space, tilde `~`,
- caret `{caret}`, colon `:`, question-mark `?`, asterisk `*`,
+ caret `^`, colon `:`, question-mark `?`, asterisk `*`,
or open bracket `[` anywhere.
. They cannot end with a slash `/` nor a dot `.`.
Reflogs
^^^^^^^
-Say you modify a branch with `linkgit:git-reset[1] --hard`, and then
+Say you modify a branch with +linkgit:git-reset[1] \--hard+, and then
realize that the branch was the only reference you had to that point in
history.
If you are interested in more details of the revision walking process,
just have a look at the first implementation of `cmd_log()`; call
-`git show v1.3.0{tilde}155^2{tilde}4` and scroll down to that function (note that you
+`git show v1.3.0~155^2~4` and scroll down to that function (note that you
no longer need to call `setup_pager()` directly).
Nowadays, `git log` is a builtin, which means that it is _contained_ in the
negative numbers in case of different errors--and 0 on success.
- the variable `sha1` in the function signature of `get_sha1()` is `unsigned
- char {asterisk}`, but is actually expected to be a pointer to `unsigned
+ char *`, but is actually expected to be a pointer to `unsigned
char[20]`. This variable will contain the 160-bit SHA-1 of the given
- commit. Note that whenever a SHA-1 is passed as `unsigned char {asterisk}`, it
+ commit. Note that whenever a SHA-1 is passed as `unsigned char *`, it
is the binary representation, as opposed to the ASCII representation in
hex characters, which is passed as `char *`.
#!/bin/sh
GVF=GIT-VERSION-FILE
-DEF_VER=v1.7.10.1
+DEF_VER=v1.7.10.GIT
LF='
'
use English. Under autoconf the configure script will do this
automatically if it can't find libintl on the system.
+ - Python version 2.6 or later is needed to use the git-p4
+ interface to Perforce.
+
- Some platform specific issues are dealt with Makefile rules,
but depending on your specific installation, you may not
have all the libraries/tools needed, or you may have
# dependency rules.
#
# Define NATIVE_CRLF if your platform uses CRLF for line endings.
+#
+# Define XDL_FAST_HASH to use an alternative line-hashing method in
+# the diff algorithm. It gives a nice speedup if your processor has
+# fast unaligned word loads. Does NOT work on big-endian systems!
+# Enabled by default on x86_64.
GIT-VERSION-FILE: FORCE
@$(SHELL_PATH) ./GIT-VERSION-GEN
VCSSVN_H =
VCSSVN_OBJS =
VCSSVN_TEST_OBJS =
+MISC_H =
EXTRA_CPPFLAGS =
LIB_H =
LIB_OBJS =
SCRIPT_PERL += git-svn.perl
SCRIPT_PYTHON += git-remote-testgit.py
+SCRIPT_PYTHON += git-p4.py
SCRIPTS = $(patsubst %.sh,%,$(SCRIPT_SH)) \
$(patsubst %.perl,%,$(SCRIPT_PERL)) \
TEST_PROGRAMS_NEED_X += test-index-version
TEST_PROGRAMS_NEED_X += test-line-buffer
TEST_PROGRAMS_NEED_X += test-match-trees
+TEST_PROGRAMS_NEED_X += test-mergesort
TEST_PROGRAMS_NEED_X += test-mktemp
TEST_PROGRAMS_NEED_X += test-parse-options
TEST_PROGRAMS_NEED_X += test-path-utils
+TEST_PROGRAMS_NEED_X += test-revision-walking
TEST_PROGRAMS_NEED_X += test-run-command
TEST_PROGRAMS_NEED_X += test-sha1
TEST_PROGRAMS_NEED_X += test-sigchain
XDIFF_LIB=xdiff/lib.a
VCSSVN_LIB=vcs-svn/lib.a
+XDIFF_H += xdiff/xinclude.h
+XDIFF_H += xdiff/xmacros.h
+XDIFF_H += xdiff/xdiff.h
+XDIFF_H += xdiff/xtypes.h
+XDIFF_H += xdiff/xutils.h
+XDIFF_H += xdiff/xprepare.h
+XDIFF_H += xdiff/xdiffi.h
+XDIFF_H += xdiff/xemit.h
+
+VCSSVN_H += vcs-svn/line_buffer.h
+VCSSVN_H += vcs-svn/sliding_window.h
+VCSSVN_H += vcs-svn/repo_tree.h
+VCSSVN_H += vcs-svn/fast_export.h
+VCSSVN_H += vcs-svn/svndiff.h
+VCSSVN_H += vcs-svn/svndump.h
+
+MISC_H += branch.h
+MISC_H += bundle.h
+MISC_H += bisect.h
+MISC_H += common-cmds.h
+MISC_H += fetch-pack.h
+MISC_H += thread-utils.h
+MISC_H += send-pack.h
+MISC_H += shortlog.h
+MISC_H += reachable.h
+MISC_H += wt-status.h
+MISC_H += tar.h
+MISC_H += url.h
+MISC_H += walker.h
+
LIB_H += advice.h
LIB_H += archive.h
LIB_H += argv-array.h
LIB_H += mailmap.h
LIB_H += merge-file.h
LIB_H += merge-recursive.h
+LIB_H += mergesort.h
LIB_H += notes.h
LIB_H += notes-cache.h
LIB_H += notes-merge.h
LIB_H += unpack-trees.h
LIB_H += userdiff.h
LIB_H += utf8.h
+LIB_H += varint.h
LIB_H += xdiff-interface.h
LIB_H += xdiff/xdiff.h
LIB_OBJS += bundle.o
LIB_OBJS += cache-tree.o
LIB_OBJS += color.o
+LIB_OBJS += column.o
LIB_OBJS += combine-diff.o
LIB_OBJS += commit.o
LIB_OBJS += compat/obstack.o
LIB_OBJS += match-trees.o
LIB_OBJS += merge-file.o
LIB_OBJS += merge-recursive.o
+LIB_OBJS += mergesort.o
LIB_OBJS += name-hash.o
LIB_OBJS += notes.o
LIB_OBJS += notes-cache.o
LIB_OBJS += usage.o
LIB_OBJS += userdiff.o
LIB_OBJS += utf8.o
+LIB_OBJS += varint.o
LIB_OBJS += walker.o
LIB_OBJS += wrapper.o
LIB_OBJS += write_or_die.o
BUILTIN_OBJS += builtin/checkout.o
BUILTIN_OBJS += builtin/clean.o
BUILTIN_OBJS += builtin/clone.o
+BUILTIN_OBJS += builtin/column.o
BUILTIN_OBJS += builtin/commit-tree.o
BUILTIN_OBJS += builtin/commit.o
BUILTIN_OBJS += builtin/config.o
# because maintaining the nesting to match is a pain. If
# we had "elif" things would have been much nicer...
+ifeq ($(uname_M),x86_64)
+ XDL_FAST_HASH = YesPlease
+endif
ifeq ($(uname_S),OSF1)
# Need this for u_short definitions et al
BASIC_CFLAGS += -D_OSF_SOURCE
MSGFMT += --check --statistics
endif
+ifneq (,$(XDL_FAST_HASH))
+ BASIC_CFLAGS += -DXDL_FAST_HASH
+endif
+
ifeq ($(TCLTK_PATH),)
NO_TCLTK=NoThanks
endif
BASIC_CFLAGS += -DDEFAULT_PAGER='$(DEFAULT_PAGER_CQ_SQ)'
endif
+ifdef SHELL_PATH
+SHELL_PATH_CQ = "$(subst ",\",$(subst \,\\,$(SHELL_PATH)))"
+SHELL_PATH_CQ_SQ = $(subst ','\'',$(SHELL_PATH_CQ))
+
+BASIC_CFLAGS += -DSHELL_PATH='$(SHELL_PATH_CQ_SQ)'
+endif
+
ALL_CFLAGS += $(BASIC_CFLAGS)
ALL_LDFLAGS += $(BASIC_LDFLAGS)
builtin/commit.o builtin/revert.o wt-status.o: wt-status.h
builtin/tar-tree.o archive-tar.o: tar.h
connect.o transport.o url.o http-backend.o: url.h
+builtin/branch.o builtin/commit.o builtin/tag.o column.o help.o pager.o: column.h
http-fetch.o http-walker.o remote-curl.o transport.o walker.o: walker.h
http.o http-walker.o http-push.o http-fetch.o remote-curl.o: http.h url.h
-XDIFF_H += xdiff/xinclude.h
-XDIFF_H += xdiff/xmacros.h
-XDIFF_H += xdiff/xdiff.h
-XDIFF_H += xdiff/xtypes.h
-XDIFF_H += xdiff/xutils.h
-XDIFF_H += xdiff/xprepare.h
-XDIFF_H += xdiff/xdiffi.h
-XDIFF_H += xdiff/xemit.h
-
xdiff-interface.o $(XDIFF_OBJS): $(XDIFF_H)
-VCSSVN_H += vcs-svn/line_buffer.h
-VCSSVN_H += vcs-svn/sliding_window.h
-VCSSVN_H += vcs-svn/repo_tree.h
-VCSSVN_H += vcs-svn/fast_export.h
-VCSSVN_H += vcs-svn/svndiff.h
-VCSSVN_H += vcs-svn/svndump.h
-
$(VCSSVN_OBJS) $(VCSSVN_TEST_OBJS): $(LIB_H) $(VCSSVN_H)
endif
$(VCSSVN_LIB): $(VCSSVN_OBJS)
$(QUIET_AR)$(RM) $@ && $(AR) rcs $@ $(VCSSVN_OBJS)
+export DEFAULT_EDITOR DEFAULT_PAGER
+
doc:
$(MAKE) -C Documentation all
--keyword=_ --keyword=N_ --keyword="Q_:1,2"
XGETTEXT_FLAGS_SH = $(XGETTEXT_FLAGS) --language=Shell
XGETTEXT_FLAGS_PERL = $(XGETTEXT_FLAGS) --keyword=__ --language=Perl
-LOCALIZED_C := $(C_OBJ:o=c)
+LOCALIZED_C := $(C_OBJ:o=c) $(LIB_H) $(XDIFF_H) $(VCSSVN_H) $(MISC_H)
LOCALIZED_SH := $(SCRIPT_SH)
LOCALIZED_PERL := $(SCRIPT_PERL)
-Documentation/RelNotes/1.7.10.1.txt
\ No newline at end of file
+Documentation/RelNotes/1.7.11.txt
\ No newline at end of file
#include "cache.h"
int advice_push_nonfastforward = 1;
+int advice_push_non_ff_current = 1;
+int advice_push_non_ff_default = 1;
+int advice_push_non_ff_matching = 1;
int advice_status_hints = 1;
int advice_commit_before_merge = 1;
int advice_resolve_conflict = 1;
int *preference;
} advice_config[] = {
{ "pushnonfastforward", &advice_push_nonfastforward },
+ { "pushnonffcurrent", &advice_push_non_ff_current },
+ { "pushnonffdefault", &advice_push_non_ff_default },
+ { "pushnonffmatching", &advice_push_non_ff_matching },
{ "statushints", &advice_status_hints },
{ "commitbeforemerge", &advice_commit_before_merge },
{ "resolveconflict", &advice_resolve_conflict },
#include "git-compat-util.h"
extern int advice_push_nonfastforward;
+extern int advice_push_non_ff_current;
+extern int advice_push_non_ff_default;
+extern int advice_push_non_ff_matching;
extern int advice_status_hints;
extern int advice_commit_before_merge;
extern int advice_resolve_conflict;
#include "argv-array.h"
#include "strbuf.h"
-static const char *empty_argv_storage = NULL;
-const char **empty_argv = &empty_argv_storage;
+const char *empty_argv[] = { NULL };
void argv_array_init(struct argv_array *array)
{
argv_array_push_nodup(array, strbuf_detach(&v, NULL));
}
+void argv_array_pushl(struct argv_array *array, ...)
+{
+ va_list ap;
+ const char *arg;
+
+ va_start(ap, array);
+ while((arg = va_arg(ap, const char *)))
+ argv_array_push(array, arg);
+ va_end(ap);
+}
+
void argv_array_clear(struct argv_array *array)
{
if (array->argv != empty_argv) {
#ifndef ARGV_ARRAY_H
#define ARGV_ARRAY_H
-extern const char **empty_argv;
+extern const char *empty_argv[];
struct argv_array {
const char **argv;
void argv_array_push(struct argv_array *, const char *);
__attribute__((format (printf,2,3)))
void argv_array_pushf(struct argv_array *, const char *fmt, ...);
+void argv_array_pushl(struct argv_array *, ...);
void argv_array_clear(struct argv_array *);
#endif /* ARGV_ARRAY_H */
* config.
*/
static int setup_tracking(const char *new_ref, const char *orig_ref,
- enum branch_track track)
+ enum branch_track track, int quiet)
{
struct tracking tracking;
+ int config_flags = quiet ? 0 : BRANCH_CONFIG_VERBOSE;
if (strlen(new_ref) > 1024 - 7 - 7 - 1)
return error("Tracking not set up: name too long: %s",
return error("Not tracking: ambiguous information for ref %s",
orig_ref);
- install_branch_config(BRANCH_CONFIG_VERBOSE, new_ref, tracking.remote,
+ install_branch_config(config_flags, new_ref, tracking.remote,
tracking.src ? tracking.src : orig_ref);
free(tracking.src);
void create_branch(const char *head,
const char *name, const char *start_name,
int force, int reflog, int clobber_head,
- enum branch_track track)
+ int quiet, enum branch_track track)
{
struct ref_lock *lock = NULL;
struct commit *commit;
start_name);
if (real_ref && track)
- setup_tracking(ref.buf+11, real_ref, track);
+ setup_tracking(ref.buf+11, real_ref, track, quiet);
if (!dont_change_ref)
if (write_ref_sha1(lock, sha1, msg) < 0)
*/
void create_branch(const char *head, const char *name, const char *start_name,
int force, int reflog,
- int clobber_head, enum branch_track track);
+ int clobber_head, int quiet, enum branch_track track);
/*
* Validates that the requested branch may be created, returning the
extern int cmd_cherry_pick(int argc, const char **argv, const char *prefix);
extern int cmd_clone(int argc, const char **argv, const char *prefix);
extern int cmd_clean(int argc, const char **argv, const char *prefix);
+extern int cmd_column(int argc, const char **argv, const char *prefix);
extern int cmd_commit(int argc, const char **argv, const char *prefix);
extern int cmd_commit_tree(int argc, const char **argv, const char *prefix);
extern int cmd_config(int argc, const char **argv, const char *prefix);
ws_error_action = correct_ws_error;
return;
}
- die("unrecognized whitespace option '%s'", option);
+ die(_("unrecognized whitespace option '%s'"), option);
}
static void parse_ignorewhitespace_option(const char *option)
ws_ignore_action = ignore_ws_change;
return;
}
- die("unrecognized whitespace ignore option '%s'", option);
+ die(_("unrecognized whitespace ignore option '%s'"), option);
}
static void set_default_whitespace_mode(const char *whitespace_option)
unsigned long leading, trailing;
unsigned long oldpos, oldlines;
unsigned long newpos, newlines;
+ /*
+ * 'patch' is usually borrowed from buf in apply_patch(),
+ * but some codepaths store an allocated buffer.
+ */
const char *patch;
+ unsigned free_patch:1,
+ rejected:1;
int size;
- int rejected;
int linenr;
struct fragment *next;
};
struct patch *next;
};
+static void free_fragment_list(struct fragment *list)
+{
+ while (list) {
+ struct fragment *next = list->next;
+ if (list->free_patch)
+ free((char *)list->patch);
+ free(list);
+ list = next;
+ }
+}
+
+static void free_patch(struct patch *patch)
+{
+ free_fragment_list(patch->fragments);
+ free(patch->def_name);
+ free(patch->old_name);
+ free(patch->new_name);
+ free(patch->result);
+ free(patch);
+}
+
+static void free_patch_list(struct patch *list)
+{
+ while (list) {
+ struct patch *next = list->next;
+ free_patch(list);
+ list = next;
+ }
+}
+
/*
* A line in a file, len-bytes long (includes the terminating LF,
* except for an incomplete line at the end if the file ends with
img->nr++;
}
+/*
+ * "buf" has the file contents to be patched (read from various sources).
+ * attach it to "image" and add line-based index to it.
+ * "image" now owns the "buf".
+ */
static void prepare_image(struct image *image, char *buf, size_t len,
int prepare_linetable)
{
image->len = 0;
}
-static void say_patch_name(FILE *output, const char *pre,
- struct patch *patch, const char *post)
+/* fmt must contain _one_ %s and no other substitution */
+static void say_patch_name(FILE *output, const char *fmt, struct patch *patch)
{
- fputs(pre, output);
+ struct strbuf sb = STRBUF_INIT;
+
if (patch->old_name && patch->new_name &&
strcmp(patch->old_name, patch->new_name)) {
- quote_c_style(patch->old_name, NULL, output, 0);
- fputs(" => ", output);
- quote_c_style(patch->new_name, NULL, output, 0);
+ quote_c_style(patch->old_name, &sb, NULL, 0);
+ strbuf_addstr(&sb, " => ");
+ quote_c_style(patch->new_name, &sb, NULL, 0);
} else {
const char *n = patch->new_name;
if (!n)
n = patch->old_name;
- quote_c_style(n, NULL, output, 0);
+ quote_c_style(n, &sb, NULL, 0);
}
- fputs(post, output);
+ fprintf(output, fmt, sb.buf);
+ fputc('\n', output);
+ strbuf_release(&sb);
}
-#define CHUNKSIZE (8192)
#define SLOP (16)
static void read_patch_file(struct strbuf *sb, int fd)
return name;
}
-static char *find_name_gnu(const char *line, char *def, int p_value)
+static char *find_name_gnu(const char *line, const char *def, int p_value)
{
struct strbuf name = STRBUF_INIT;
char *cp;
cp++;
}
- /* name can later be freed, so we need
- * to memmove, not just return cp
- */
strbuf_remove(&name, 0, cp - name.buf);
- free(def);
if (root)
strbuf_insert(&name, 0, root, root_len);
return squash_slash(strbuf_detach(&name, NULL));
return line + len - end;
}
-static char *find_name_common(const char *line, char *def, int p_value,
- const char *end, int terminate)
+static char *null_strdup(const char *s)
+{
+ return s ? xstrdup(s) : NULL;
+}
+
+static char *find_name_common(const char *line, const char *def,
+ int p_value, const char *end, int terminate)
{
int len;
const char *start = NULL;
start = line;
}
if (!start)
- return squash_slash(def);
+ return squash_slash(null_strdup(def));
len = line - start;
if (!len)
- return squash_slash(def);
+ return squash_slash(null_strdup(def));
/*
* Generally we prefer the shorter name, especially
if (def) {
int deflen = strlen(def);
if (deflen < len && !strncmp(start, def, deflen))
- return squash_slash(def);
- free(def);
+ return squash_slash(xstrdup(def));
}
if (root) {
if (!stamp) {
stamp = xmalloc(sizeof(*stamp));
if (regcomp(stamp, stamp_regexp, REG_EXTENDED)) {
- warning("Cannot prepare timestamp regexp %s",
+ warning(_("Cannot prepare timestamp regexp %s"),
stamp_regexp);
return 0;
}
status = regexec(stamp, timestamp, ARRAY_SIZE(m), m, 0);
if (status) {
if (status != REG_NOMATCH)
- warning("regexec returned %d for input: %s",
+ warning(_("regexec returned %d for input: %s"),
status, timestamp);
return 0;
}
name = find_name_traditional(first, NULL, p_value);
patch->old_name = name;
} else {
- name = find_name_traditional(first, NULL, p_value);
- name = find_name_traditional(second, name, p_value);
+ char *first_name;
+ first_name = find_name_traditional(first, NULL, p_value);
+ name = find_name_traditional(second, first_name, p_value);
+ free(first_name);
if (has_epoch_timestamp(first)) {
patch->is_new = 1;
patch->is_delete = 0;
patch->is_delete = 1;
patch->old_name = name;
} else {
- patch->old_name = patch->new_name = name;
+ patch->old_name = name;
+ patch->new_name = xstrdup(name);
}
}
if (!name)
- die("unable to find filename in patch at line %d", linenr);
+ die(_("unable to find filename in patch at line %d"), linenr);
}
static int gitdiff_hdrend(const char *line, struct patch *patch)
name = orig_name;
len = strlen(name);
if (isnull)
- die("git apply: bad git-diff - expected /dev/null, got %s on line %d", name, linenr);
+ die(_("git apply: bad git-diff - expected /dev/null, got %s on line %d"), name, linenr);
another = find_name(line, NULL, p_value, TERM_TAB);
if (!another || memcmp(another, name, len + 1))
- die("git apply: bad git-diff - inconsistent %s filename on line %d", oldnew, linenr);
+ die(_("git apply: bad git-diff - inconsistent %s filename on line %d"), oldnew, linenr);
free(another);
return orig_name;
}
else {
/* expect "/dev/null" */
if (memcmp("/dev/null", line, 9) || line[9] != '\n')
- die("git apply: bad git-diff - expected /dev/null on line %d", linenr);
+ die(_("git apply: bad git-diff - expected /dev/null on line %d"), linenr);
return NULL;
}
}
static int gitdiff_oldname(const char *line, struct patch *patch)
{
+ char *orig = patch->old_name;
patch->old_name = gitdiff_verify_name(line, patch->is_new, patch->old_name, "old");
+ if (orig != patch->old_name)
+ free(orig);
return 0;
}
static int gitdiff_newname(const char *line, struct patch *patch)
{
+ char *orig = patch->new_name;
patch->new_name = gitdiff_verify_name(line, patch->is_delete, patch->new_name, "new");
+ if (orig != patch->new_name)
+ free(orig);
return 0;
}
static int gitdiff_delete(const char *line, struct patch *patch)
{
patch->is_delete = 1;
- patch->old_name = patch->def_name;
+ free(patch->old_name);
+ patch->old_name = null_strdup(patch->def_name);
return gitdiff_oldmode(line, patch);
}
static int gitdiff_newfile(const char *line, struct patch *patch)
{
patch->is_new = 1;
- patch->new_name = patch->def_name;
+ free(patch->new_name);
+ patch->new_name = null_strdup(patch->def_name);
return gitdiff_newmode(line, patch);
}
static int gitdiff_copysrc(const char *line, struct patch *patch)
{
patch->is_copy = 1;
+ free(patch->old_name);
patch->old_name = find_name(line, NULL, p_value ? p_value - 1 : 0, 0);
return 0;
}
static int gitdiff_copydst(const char *line, struct patch *patch)
{
patch->is_copy = 1;
+ free(patch->new_name);
patch->new_name = find_name(line, NULL, p_value ? p_value - 1 : 0, 0);
return 0;
}
static int gitdiff_renamesrc(const char *line, struct patch *patch)
{
patch->is_rename = 1;
+ free(patch->old_name);
patch->old_name = find_name(line, NULL, p_value ? p_value - 1 : 0, 0);
return 0;
}
static int gitdiff_renamedst(const char *line, struct patch *patch)
{
patch->is_rename = 1;
+ free(patch->new_name);
patch->new_name = find_name(line, NULL, p_value ? p_value - 1 : 0, 0);
return 0;
}
* creation or deletion of an empty file. In any of these cases,
* both sides are the same name under a/ and b/ respectively.
*/
-static char *git_header_name(char *line, int llen)
+static char *git_header_name(const char *line, int llen)
{
const char *name;
const char *second = NULL;
}
/* Verify that we recognize the lines following a git header */
-static int parse_git_header(char *line, int len, unsigned int size, struct patch *patch)
+static int parse_git_header(const char *line, int len, unsigned int size, struct patch *patch)
{
unsigned long offset;
return offset + ex;
}
-static void recount_diff(char *line, int size, struct fragment *fragment)
+static void recount_diff(const char *line, int size, struct fragment *fragment)
{
int oldlines = 0, newlines = 0, ret = 0;
break;
}
if (ret) {
- warning("recount: unexpected line: %.*s",
+ warning(_("recount: unexpected line: %.*s"),
(int)linelen(line, size), line);
return;
}
* Parse a unified diff fragment header of the
* form "@@ -a,b +c,d @@"
*/
-static int parse_fragment_header(char *line, int len, struct fragment *fragment)
+static int parse_fragment_header(const char *line, int len, struct fragment *fragment)
{
int offset;
return offset;
}
-static int find_header(char *line, unsigned long size, int *hdrsize, struct patch *patch)
+static int find_header(const char *line, unsigned long size, int *hdrsize, struct patch *patch)
{
unsigned long offset, len;
struct fragment dummy;
if (parse_fragment_header(line, len, &dummy) < 0)
continue;
- die("patch fragment without header at line %d: %.*s",
+ die(_("patch fragment without header at line %d: %.*s"),
linenr, (int)len-1, line);
}
continue;
if (!patch->old_name && !patch->new_name) {
if (!patch->def_name)
- die("git diff header lacks filename information when removing "
- "%d leading pathname components (line %d)" , p_value, linenr);
- patch->old_name = patch->new_name = patch->def_name;
+ die(Q_("git diff header lacks filename information when removing "
+ "%d leading pathname component (line %d)",
+ "git diff header lacks filename information when removing "
+ "%d leading pathname components (line %d)",
+ p_value),
+ p_value, linenr);
+ patch->old_name = xstrdup(patch->def_name);
+ patch->new_name = xstrdup(patch->def_name);
}
if (!patch->is_delete && !patch->new_name)
die("git diff header lacks filename information "
* between a "---" that is part of a patch, and a "---" that starts
* the next patch is to look at the line counts..
*/
-static int parse_fragment(char *line, unsigned long size,
+static int parse_fragment(const char *line, unsigned long size,
struct patch *patch, struct fragment *fragment)
{
int added, deleted;
patch->lines_deleted += deleted;
if (0 < patch->is_new && oldlines)
- return error("new file depends on old contents");
+ return error(_("new file depends on old contents"));
if (0 < patch->is_delete && newlines)
- return error("deleted file still has contents");
+ return error(_("deleted file still has contents"));
return offset;
}
-static int parse_single_patch(char *line, unsigned long size, struct patch *patch)
+/*
+ * We have seen "diff --git a/... b/..." header (or a traditional patch
+ * header). Read hunks that belong to this patch into fragments and hang
+ * them to the given patch structure.
+ *
+ * The (fragment->patch, fragment->size) pair points into the memory given
+ * by the caller, not a copy, when we return.
+ */
+static int parse_single_patch(const char *line, unsigned long size, struct patch *patch)
{
unsigned long offset = 0;
unsigned long oldlines = 0, newlines = 0, context = 0;
fragment->linenr = linenr;
len = parse_fragment(line, size, patch, fragment);
if (len <= 0)
- die("corrupt patch at line %d", linenr);
+ die(_("corrupt patch at line %d"), linenr);
fragment->patch = line;
fragment->size = len;
oldlines += fragment->oldlines;
patch->is_delete = 0;
if (0 < patch->is_new && oldlines)
- die("new file %s depends on old contents", patch->new_name);
+ die(_("new file %s depends on old contents"), patch->new_name);
if (0 < patch->is_delete && newlines)
- die("deleted file %s still has contents", patch->old_name);
+ die(_("deleted file %s still has contents"), patch->old_name);
if (!patch->is_delete && !newlines && context)
- fprintf(stderr, "** warning: file %s becomes empty but "
- "is not deleted\n", patch->new_name);
+ fprintf_ln(stderr,
+ _("** warning: "
+ "file %s becomes empty but is not deleted"),
+ patch->new_name);
return offset;
}
return out;
}
+/*
+ * Read a binary hunk and return a new fragment; fragment->patch
+ * points at an allocated memory that the caller must free, so
+ * it is marked as "->free_patch = 1".
+ */
static struct fragment *parse_binary_hunk(char **buf_p,
unsigned long *sz_p,
int *status_p,
frag = xcalloc(1, sizeof(*frag));
frag->patch = inflate_it(data, hunk_size, origlen);
+ frag->free_patch = 1;
if (!frag->patch)
goto corrupt;
free(data);
corrupt:
free(data);
*status_p = -1;
- error("corrupt binary patch at line %d: %.*s",
+ error(_("corrupt binary patch at line %d: %.*s"),
linenr-1, llen-1, buffer);
return NULL;
}
forward = parse_binary_hunk(&buffer, &size, &status, &used);
if (!forward && !status)
/* there has to be one hunk (forward hunk) */
- return error("unrecognized binary patch at line %d", linenr-1);
+ return error(_("unrecognized binary patch at line %d"), linenr-1);
if (status)
/* otherwise we already gave an error message */
return status;
return used;
}
+/*
+ * Read the patch text in "buffer" taht extends for "size" bytes; stop
+ * reading after seeing a single patch (i.e. changes to a single file).
+ * Create fragments (i.e. patch hunks) and hang them to the given patch.
+ * Return the number of bytes consumed, so that the caller can call us
+ * again for the next patch.
+ */
static int parse_chunk(char *buffer, unsigned long size, struct patch *patch)
{
int hdrsize, patchsize;
*/
if ((apply || check) &&
(!patch->is_binary && !metadata_changes(patch)))
- die("patch with only garbage at line %d", linenr);
+ die(_("patch with only garbage at line %d"), linenr);
}
return offset + hdrsize + patchsize;
switch (st->st_mode & S_IFMT) {
case S_IFLNK:
if (strbuf_readlink(buf, path, st->st_size) < 0)
- return error("unable to read symlink %s", path);
+ return error(_("unable to read symlink %s"), path);
return 0;
case S_IFREG:
if (strbuf_read_file(buf, path, st->st_size) != st->st_size)
- return error("unable to open or read %s", path);
+ return error(_("unable to open or read %s"), path);
convert_to_git(path, buf->buf, buf->len, buf, 0);
return 0;
default:
ctx++;
}
if (preimage->nr <= ctx)
- die("oops");
+ die(_("oops"));
/* and copy it in, while fixing the line length */
len = preimage->line[ctx].len;
img->len -= img->line[--img->nr].len;
}
+/*
+ * The change from "preimage" and "postimage" has been found to
+ * apply at applied_pos (counts in line numbers) in "img".
+ * Update "img" to remove "preimage" and replace it with "postimage".
+ */
static void update_image(struct image *img,
int applied_pos,
struct image *preimage,
img->nr = nr;
}
+/*
+ * Use the patch-hunk text in "frag" to prepare two images (preimage and
+ * postimage) for the hunk. Find lines that match "preimage" in "img" and
+ * replace the part of "img" with "postimage" text.
+ */
static int apply_one_fragment(struct image *img, struct fragment *frag,
int inaccurate_eof, unsigned ws_rule,
int nth_fragment)
break;
default:
if (apply_verbosely)
- error("invalid start of line: '%c'", first);
+ error(_("invalid start of line: '%c'"), first);
return -1;
}
if (added_blank_line) {
int offset = applied_pos - pos;
if (apply_in_reverse)
offset = 0 - offset;
- fprintf(stderr,
- "Hunk #%d succeeded at %d (offset %d lines).\n",
- nth_fragment, applied_pos + 1, offset);
+ fprintf_ln(stderr,
+ Q_("Hunk #%d succeeded at %d (offset %d line).",
+ "Hunk #%d succeeded at %d (offset %d lines).",
+ offset),
+ nth_fragment, applied_pos + 1, offset);
}
/*
*/
if ((leading != frag->leading) ||
(trailing != frag->trailing))
- fprintf(stderr, "Context reduced to (%ld/%ld)"
- " to apply fragment at %d\n",
- leading, trailing, applied_pos+1);
+ fprintf_ln(stderr, _("Context reduced to (%ld/%ld)"
+ " to apply fragment at %d"),
+ leading, trailing, applied_pos+1);
update_image(img, applied_pos, &preimage, &postimage);
} else {
if (apply_verbosely)
- error("while searching for:\n%.*s",
+ error(_("while searching for:\n%.*s"),
(int)(old - oldlines), oldlines);
}
void *dst;
if (!fragment)
- return error("missing binary patch data for '%s'",
+ return error(_("missing binary patch data for '%s'"),
patch->new_name ?
patch->new_name :
patch->old_name);
return -1;
}
+/*
+ * Replace "img" with the result of applying the binary patch.
+ * The binary patch data itself in patch->fragment is still kept
+ * but the preimage prepared by the caller in "img" is freed here
+ * or in the helper function apply_binary_fragment() this calls.
+ */
static int apply_binary(struct image *img, struct patch *patch)
{
const char *name = patch->old_name ? patch->old_name : patch->new_name;
* in the patch->fragments->{patch,size}.
*/
if (apply_binary_fragment(img, patch))
- return error("binary patch does not apply to '%s'",
+ return error(_("binary patch does not apply to '%s'"),
name);
/* verify that the result matches */
hash_sha1_file(img->buf, img->len, blob_type, sha1);
if (strcmp(sha1_to_hex(sha1), patch->new_sha1_prefix))
- return error("binary patch to '%s' creates incorrect result (expecting %s, got %s)",
+ return error(_("binary patch to '%s' creates incorrect result (expecting %s, got %s)"),
name, patch->new_sha1_prefix, sha1_to_hex(sha1));
}
while (frag) {
nth++;
if (apply_one_fragment(img, frag, inaccurate_eof, ws_rule, nth)) {
- error("patch failed: %s:%ld", name, frag->oldpos);
+ error(_("patch failed: %s:%ld"), name, frag->oldpos);
if (!apply_with_reject)
return -1;
frag->rejected = 1;
if (!(patch->is_copy || patch->is_rename) &&
(tpatch = in_fn_table(patch->old_name)) != NULL && !to_be_deleted(tpatch)) {
if (was_deleted(tpatch)) {
- return error("patch %s has been renamed/deleted",
+ return error(_("patch %s has been renamed/deleted"),
patch->old_name);
}
- /* We have a patched copy in memory use that */
+ /* We have a patched copy in memory; use that. */
strbuf_add(&buf, tpatch->result, tpatch->resultsize);
} else if (cached) {
if (read_file_or_gitlink(ce, &buf))
- return error("read of %s failed", patch->old_name);
+ return error(_("read of %s failed"), patch->old_name);
} else if (patch->old_name) {
if (S_ISGITLINK(patch->old_mode)) {
if (ce) {
/*
* There is no way to apply subproject
* patch without looking at the index.
+ * NEEDSWORK: shouldn't this be flagged
+ * as an error???
*/
+ free_fragment_list(patch->fragments);
patch->fragments = NULL;
}
} else {
if (read_old_data(st, patch->old_name, &buf))
- return error("read of %s failed", patch->old_name);
+ return error(_("read of %s failed"), patch->old_name);
}
}
free(image.line_allocated);
if (0 < patch->is_delete && patch->resultsize)
- return error("removal patch leaves file contents");
+ return error(_("removal patch leaves file contents"));
return 0;
}
if (has_symlink_leading_path(new_name, strlen(new_name)))
return 0;
- return error("%s: already exists in working directory", new_name);
+ return error(_("%s: already exists in working directory"), new_name);
}
else if ((errno != ENOENT) && (errno != ENOTDIR))
return error("%s: %s", new_name, strerror(errno));
if (!(patch->is_copy || patch->is_rename) &&
(tpatch = in_fn_table(old_name)) != NULL && !to_be_deleted(tpatch)) {
if (was_deleted(tpatch))
- return error("%s: has been deleted/renamed", old_name);
+ return error(_("%s: has been deleted/renamed"), old_name);
st_mode = tpatch->new_mode;
} else if (!cached) {
stat_ret = lstat(old_name, st);
if (stat_ret && errno != ENOENT)
- return error("%s: %s", old_name, strerror(errno));
+ return error(_("%s: %s"), old_name, strerror(errno));
}
if (to_be_deleted(tpatch))
if (pos < 0) {
if (patch->is_new < 0)
goto is_new;
- return error("%s: does not exist in index", old_name);
+ return error(_("%s: does not exist in index"), old_name);
}
*ce = active_cache[pos];
if (stat_ret < 0) {
return -1;
}
if (!cached && verify_index_match(*ce, st))
- return error("%s: does not match index", old_name);
+ return error(_("%s: does not match index"), old_name);
if (cached)
st_mode = (*ce)->ce_mode;
} else if (stat_ret < 0) {
if (patch->is_new < 0)
goto is_new;
- return error("%s: %s", old_name, strerror(errno));
+ return error(_("%s: %s"), old_name, strerror(errno));
}
if (!cached && !tpatch)
if (!patch->old_mode)
patch->old_mode = st_mode;
if ((st_mode ^ patch->old_mode) & S_IFMT)
- return error("%s: wrong type", old_name);
+ return error(_("%s: wrong type"), old_name);
if (st_mode != patch->old_mode)
- warning("%s has type %o, expected %o",
+ warning(_("%s has type %o, expected %o"),
old_name, st_mode, patch->old_mode);
if (!patch->new_mode && !patch->is_delete)
patch->new_mode = st_mode;
is_new:
patch->is_new = 1;
patch->is_delete = 0;
+ free(patch->old_name);
patch->old_name = NULL;
return 0;
}
+/*
+ * Check and apply the patch in-core; leave the result in patch->result
+ * for the caller to write it out to the final destination.
+ */
static int check_patch(struct patch *patch)
{
struct stat st;
if (check_index &&
cache_name_pos(new_name, strlen(new_name)) >= 0 &&
!ok_if_exists)
- return error("%s: already exists in index", new_name);
+ return error(_("%s: already exists in index"), new_name);
if (!cached) {
int err = check_to_create_blob(new_name, ok_if_exists);
if (err)
if (!patch->new_mode)
patch->new_mode = patch->old_mode;
if ((patch->old_mode ^ patch->new_mode) & S_IFMT)
- return error("new mode (%o) of %s does not match old mode (%o)%s%s",
+ return error(_("new mode (%o) of %s does not match old mode (%o)%s%s"),
patch->new_mode, new_name, patch->old_mode,
same ? "" : " of ", same ? "" : old_name);
}
if (apply_data(patch, &st, ce) < 0)
- return error("%s: patch does not apply", name);
+ return error(_("%s: patch does not apply"), name);
patch->rejected = 0;
return 0;
}
while (patch) {
if (apply_verbosely)
say_patch_name(stderr,
- "Checking patch ", patch, "...\n");
+ _("Checking patch %s..."), patch);
err |= check_patch(patch);
patch = patch->next;
}
ce = make_cache_entry(patch->old_mode, sha1_ptr, name, 0, 0);
if (!ce)
- die("make_cache_entry failed for path '%s'", name);
+ die(_("make_cache_entry failed for path '%s'"), name);
if (add_index_entry(&result, ce, ADD_CACHE_OK_TO_ADD))
die ("Could not add %s to temporary index", name);
}
{
if (update_index) {
if (remove_file_from_cache(patch->old_name) < 0)
- die("unable to remove %s from index", patch->old_name);
+ die(_("unable to remove %s from index"), patch->old_name);
}
if (!cached) {
if (!remove_or_warn(patch->old_mode, patch->old_name) && rmdir_empty) {
const char *s = buf;
if (get_sha1_hex(s + strlen("Subproject commit "), ce->sha1))
- die("corrupt patch for subproject %s", path);
+ die(_("corrupt patch for subproject %s"), path);
} else {
if (!cached) {
if (lstat(path, &st) < 0)
- die_errno("unable to stat newly created file '%s'",
+ die_errno(_("unable to stat newly created file '%s'"),
path);
fill_stat_cache_info(ce, &st);
}
if (write_sha1_file(buf, size, blob_type, ce->sha1) < 0)
- die("unable to create backing store for newly created file %s", path);
+ die(_("unable to create backing store for newly created file %s"), path);
}
if (add_cache_entry(ce, ADD_CACHE_OK_TO_ADD) < 0)
- die("unable to add cache entry for %s", path);
+ die(_("unable to add cache entry for %s"), path);
}
static int try_create_file(const char *path, unsigned int mode, const char *buf, unsigned long size)
strbuf_release(&nbuf);
if (close(fd) < 0)
- die_errno("closing file '%s'", path);
+ die_errno(_("closing file '%s'"), path);
return 0;
}
++nr;
}
}
- die_errno("unable to write file '%s' mode %o", path, mode);
+ die_errno(_("unable to write file '%s' mode %o"), path, mode);
}
static void create_file(struct patch *patch)
char namebuf[PATH_MAX];
struct fragment *frag;
int cnt = 0;
+ struct strbuf sb = STRBUF_INIT;
for (cnt = 0, frag = patch->fragments; frag; frag = frag->next) {
if (!frag->rejected)
if (!cnt) {
if (apply_verbosely)
say_patch_name(stderr,
- "Applied patch ", patch, " cleanly.\n");
+ _("Applied patch %s cleanly."), patch);
return 0;
}
* contents are marked "rejected" at the patch level.
*/
if (!patch->new_name)
- die("internal error");
+ die(_("internal error"));
/* Say this even without --verbose */
- say_patch_name(stderr, "Applying patch ", patch, " with");
- fprintf(stderr, " %d rejects...\n", cnt);
+ strbuf_addf(&sb, Q_("Applying patch %%s with %d reject...",
+ "Applying patch %%s with %d rejects...",
+ cnt),
+ cnt);
+ say_patch_name(stderr, sb.buf, patch);
+ strbuf_release(&sb);
cnt = strlen(patch->new_name);
if (ARRAY_SIZE(namebuf) <= cnt + 5) {
cnt = ARRAY_SIZE(namebuf) - 5;
- warning("truncating .rej filename to %.*s.rej",
+ warning(_("truncating .rej filename to %.*s.rej"),
cnt - 1, patch->new_name);
}
memcpy(namebuf, patch->new_name, cnt);
rej = fopen(namebuf, "w");
if (!rej)
- return error("cannot open %s: %s", namebuf, strerror(errno));
+ return error(_("cannot open %s: %s"), namebuf, strerror(errno));
/* Normal git tools never deal with .rej, so do not pretend
* this is a git patch by saying --git nor give extended
frag;
cnt++, frag = frag->next) {
if (!frag->rejected) {
- fprintf(stderr, "Hunk #%d applied cleanly.\n", cnt);
+ fprintf_ln(stderr, _("Hunk #%d applied cleanly."), cnt);
continue;
}
- fprintf(stderr, "Rejected hunk #%d.\n", cnt);
+ fprintf_ln(stderr, _("Rejected hunk #%d."), cnt);
fprintf(rej, "%.*s", frag->size, frag->patch);
if (frag->patch[frag->size-1] != '\n')
fputc('\n', rej);
if (!prefix || p->is_toplevel_relative)
return;
for ( ; p; p = p->next) {
- if (p->new_name == p->old_name) {
- char *prefixed = p->new_name;
- prefix_one(&prefixed);
- p->new_name = p->old_name = prefixed;
- }
- else {
- prefix_one(&p->new_name);
- prefix_one(&p->old_name);
- }
+ prefix_one(&p->new_name);
+ prefix_one(&p->old_name);
}
}
static int apply_patch(int fd, const char *filename, int options)
{
size_t offset;
- struct strbuf buf = STRBUF_INIT;
+ struct strbuf buf = STRBUF_INIT; /* owns the patch text */
struct patch *list = NULL, **listp = &list;
int skipped_patch = 0;
- /* FIXME - memory leak when using multiple patch files as inputs */
- memset(&fn_table, 0, sizeof(struct string_list));
patch_input_file = filename;
read_patch_file(&buf, fd);
offset = 0;
listp = &patch->next;
}
else {
- /* perhaps free it a bit better? */
- free(patch);
+ free_patch(patch);
skipped_patch++;
}
offset += nr;
}
if (!list && !skipped_patch)
- die("unrecognized input");
+ die(_("unrecognized input"));
if (whitespace_error && (ws_error_action == die_on_ws_error))
apply = 0;
if (check_index) {
if (read_cache() < 0)
- die("unable to read index file");
+ die(_("unable to read index file"));
}
if ((check || apply) &&
if (summary)
summary_patch_list(list);
+ free_patch_list(list);
strbuf_release(&buf);
+ string_list_clear(&fn_table, 0);
return 0;
}
if (!force_apply && (diffstat || numstat || summary || check || fake_ancestor))
apply = 0;
if (check_index && is_not_gitdir)
- die("--index outside a repository");
+ die(_("--index outside a repository"));
if (cached) {
if (is_not_gitdir)
- die("--cached outside a repository");
+ die(_("--cached outside a repository"));
check_index = 1;
}
for (i = 0; i < argc; i++) {
fd = open(arg, O_RDONLY);
if (fd < 0)
- die_errno("can't open patch '%s'", arg);
+ die_errno(_("can't open patch '%s'"), arg);
read_stdin = 0;
set_default_whitespace_mode(whitespace_option);
errs |= apply_patch(fd, arg, options);
squelch_whitespace_errors < whitespace_error) {
int squelched =
whitespace_error - squelch_whitespace_errors;
- warning("squelched %d "
- "whitespace error%s",
- squelched,
- squelched == 1 ? "" : "s");
+ warning(Q_("squelched %d whitespace error",
+ "squelched %d whitespace errors",
+ squelched),
+ squelched);
}
if (ws_error_action == die_on_ws_error)
- die("%d line%s add%s whitespace errors.",
- whitespace_error,
- whitespace_error == 1 ? "" : "s",
- whitespace_error == 1 ? "s" : "");
+ die(Q_("%d line adds whitespace errors.",
+ "%d lines add whitespace errors.",
+ whitespace_error),
+ whitespace_error);
if (applied_after_fixing_ws && apply)
warning("%d line%s applied after"
" fixing whitespace errors.",
applied_after_fixing_ws,
applied_after_fixing_ws == 1 ? "" : "s");
else if (whitespace_error)
- warning("%d line%s add%s whitespace errors.",
- whitespace_error,
- whitespace_error == 1 ? "" : "s",
- whitespace_error == 1 ? "s" : "");
+ warning(Q_("%d line adds whitespace errors.",
+ "%d lines add whitespace errors.",
+ whitespace_error),
+ whitespace_error);
}
if (update_index) {
if (write_cache(newfd, active_cache, active_nr) ||
commit_locked_index(&lock_file))
- die("Unable to write new index file");
+ die(_("Unable to write new index file"));
}
return !!errs;
#include "branch.h"
#include "diff.h"
#include "revision.h"
+#include "string-list.h"
+#include "column.h"
static const char * const builtin_branch_usage[] = {
"git branch [options] [-r | -a] [--merged | --no-merged]",
} merge_filter;
static unsigned char merge_filter_ref[20];
+static struct string_list output = STRING_LIST_INIT_DUP;
+static unsigned int colopts;
+
static int parse_branch_color_slot(const char *var, int ofs)
{
if (!strcasecmp(var+ofs, "plain"))
static int git_branch_config(const char *var, const char *value, void *cb)
{
+ if (!prefixcmp(var, "column."))
+ return git_column_config(var, value, "branch", &colopts);
if (!strcmp(var, "color.branch")) {
branch_use_color = git_config_colorbool(var, value);
return 0;
return merged;
}
-static int delete_branches(int argc, const char **argv, int force, int kinds)
+static int delete_branches(int argc, const char **argv, int force, int kinds,
+ int quiet)
{
struct commit *rev, *head_rev = NULL;
unsigned char sha1[20];
char *name = NULL;
- const char *fmt, *remote;
+ const char *fmt;
int i;
int ret = 0;
+ int remote_branch = 0;
struct strbuf bname = STRBUF_INIT;
switch (kinds) {
case REF_REMOTE_BRANCH:
fmt = "refs/remotes/%s";
- /* TRANSLATORS: This is "remote " in "remote branch '%s' not found" */
- remote = _("remote ");
+ /* For subsequent UI messages */
+ remote_branch = 1;
+
force = 1;
break;
case REF_LOCAL_BRANCH:
fmt = "refs/heads/%s";
- remote = "";
break;
default:
die(_("cannot use -a with -d"));
name = xstrdup(mkpath(fmt, bname.buf));
if (read_ref(name, sha1)) {
- error(_("%sbranch '%s' not found."),
- remote, bname.buf);
+ error(remote_branch
+ ? _("remote branch '%s' not found.")
+ : _("branch '%s' not found."), bname.buf);
ret = 1;
continue;
}
}
if (delete_ref(name, sha1, 0)) {
- error(_("Error deleting %sbranch '%s'"), remote,
+ error(remote_branch
+ ? _("Error deleting remote branch '%s'")
+ : _("Error deleting branch '%s'"),
bname.buf);
ret = 1;
} else {
struct strbuf buf = STRBUF_INIT;
- printf(_("Deleted %sbranch %s (was %s).\n"), remote,
- bname.buf,
- find_unique_abbrev(sha1, DEFAULT_ABBREV));
+ if (!quiet)
+ printf(remote_branch
+ ? _("Deleted remote branch %s (was %s).\n")
+ : _("Deleted branch %s (was %s).\n"),
+ bname.buf,
+ find_unique_abbrev(sha1, DEFAULT_ABBREV));
strbuf_addf(&buf, "branch.%s", bname.buf);
if (git_config_rename_section(buf.buf, NULL) < 0)
warning(_("Update of config-file failed"));
else if (verbose)
/* " f7c0c00 [ahead 58, behind 197] vcs-svn: drop obj_pool.h" */
add_verbose_info(&out, item, verbose, abbrev);
- printf("%s\n", out.buf);
+ if (column_active(colopts)) {
+ assert(!verbose && "--column and --verbose are incompatible");
+ string_list_append(&output, out.buf);
+ } else {
+ printf("%s\n", out.buf);
+ }
strbuf_release(&name);
strbuf_release(&out);
}
fp = fopen(git_path(edit_description), "w");
if ((fwrite(buf.buf, 1, buf.len, fp) < buf.len) || fclose(fp)) {
strbuf_release(&buf);
- return error(_("could not write branch description template: %s\n"),
+ return error(_("could not write branch description template: %s"),
strerror(errno));
}
strbuf_reset(&buf);
int delete = 0, rename = 0, force_create = 0, list = 0;
int verbose = 0, abbrev = -1, detached = 0;
int reflog = 0, edit_description = 0;
+ int quiet = 0;
enum branch_track track;
int kinds = REF_LOCAL_BRANCH;
struct commit_list *with_commit = NULL;
OPT_GROUP("Generic options"),
OPT__VERBOSE(&verbose,
"show hash and subject, give twice for upstream branch"),
+ OPT__QUIET(&quiet, "suppress informational messages"),
OPT_SET_INT('t', "track", &track, "set up tracking mode (see git-pull(1))",
BRANCH_TRACK_EXPLICIT),
OPT_SET_INT( 0, "set-upstream", &track, "change upstream info",
PARSE_OPT_LASTARG_DEFAULT | PARSE_OPT_NONEG,
opt_parse_merge_filter, (intptr_t) "HEAD",
},
+ OPT_COLUMN(0, "column", &colopts, "list branches in columns"),
OPT_END(),
};
}
hashcpy(merge_filter_ref, head_sha1);
+
argc = parse_options(argc, argv, prefix, options, builtin_branch_usage,
0);
if (abbrev == -1)
abbrev = DEFAULT_ABBREV;
+ finalize_colopts(&colopts, -1);
+ if (verbose) {
+ if (explicitly_enable_column(colopts))
+ die(_("--column and --verbose are incompatible"));
+ colopts = 0;
+ }
if (delete)
- return delete_branches(argc, argv, delete > 1, kinds);
- else if (list)
- return print_ref_list(kinds, detached, verbose, abbrev,
- with_commit, argv);
+ return delete_branches(argc, argv, delete > 1, kinds, quiet);
+ else if (list) {
+ int ret = print_ref_list(kinds, detached, verbose, abbrev,
+ with_commit, argv);
+ print_columns(&output, colopts, NULL);
+ string_list_clear(&output, 0);
+ return ret;
+ }
else if (edit_description) {
const char *branch_name;
struct strbuf branch_ref = STRBUF_INIT;
if (kinds != REF_LOCAL_BRANCH)
die(_("-a and -r options to 'git branch' do not make sense with a branch name"));
create_branch(head, argv[0], (argc == 2) ? argv[1] : head,
- force_create, reflog, 0, track);
+ force_create, reflog, 0, quiet, track);
} else
usage_with_options(builtin_branch_usage, options);
#include "parse-options.h"
#include "diff.h"
#include "userdiff.h"
+#include "streaming.h"
#define BATCH 1
#define BATCH_CHECK 2
return cmd_ls_tree(2, ls_args, NULL);
}
+ if (type == OBJ_BLOB)
+ return stream_blob_to_fd(1, sha1, NULL, 0);
buf = read_sha1_file(sha1, &type, &size);
if (!buf)
die("Cannot read object %s", obj_name);
break;
case 0:
+ if (type_from_string(exp_type) == OBJ_BLOB) {
+ unsigned char blob_sha1[20];
+ if (sha1_object_info(sha1, NULL) == OBJ_TAG) {
+ enum object_type type;
+ unsigned long size;
+ char *buffer = read_sha1_file(sha1, &type, &size);
+ if (memcmp(buffer, "object ", 7) ||
+ get_sha1_hex(buffer + 7, blob_sha1))
+ die("%s not a valid tag", sha1_to_hex(sha1));
+ free(buffer);
+ } else
+ hashcpy(blob_sha1, sha1);
+
+ if (sha1_object_info(blob_sha1, NULL) == OBJ_BLOB)
+ return stream_blob_to_fd(1, blob_sha1, NULL, 0);
+ /*
+ * we attempted to dereference a tag to a blob
+ * and failed; there may be new dereference
+ * mechanisms this code is not aware of.
+ * fall-back to the usual case.
+ */
+ }
buf = read_object_with_reference(sha1, exp_type, &size, NULL);
break;
opts->new_branch_force ? 1 : 0,
opts->new_branch_log,
opts->new_branch_force ? 1 : 0,
+ opts->quiet,
opts->track);
new->name = opts->new_branch;
setup_branch_path(new);
--- /dev/null
+#include "builtin.h"
+#include "cache.h"
+#include "strbuf.h"
+#include "parse-options.h"
+#include "string-list.h"
+#include "column.h"
+
+static const char * const builtin_column_usage[] = {
+ "git column [options]",
+ NULL
+};
+static unsigned int colopts;
+
+static int column_config(const char *var, const char *value, void *cb)
+{
+ return git_column_config(var, value, cb, &colopts);
+}
+
+int cmd_column(int argc, const char **argv, const char *prefix)
+{
+ struct string_list list = STRING_LIST_INIT_DUP;
+ struct strbuf sb = STRBUF_INIT;
+ struct column_options copts;
+ const char *command = NULL, *real_command = NULL;
+ struct option options[] = {
+ OPT_STRING(0, "command", &real_command, "name", "lookup config vars"),
+ OPT_COLUMN(0, "mode", &colopts, "layout to use"),
+ OPT_INTEGER(0, "raw-mode", &colopts, "layout to use"),
+ OPT_INTEGER(0, "width", &copts.width, "Maximum width"),
+ OPT_STRING(0, "indent", &copts.indent, "string", "Padding space on left border"),
+ OPT_INTEGER(0, "nl", &copts.nl, "Padding space on right border"),
+ OPT_INTEGER(0, "padding", &copts.padding, "Padding space between columns"),
+ OPT_END()
+ };
+
+ /* This one is special and must be the first one */
+ if (argc > 1 && !prefixcmp(argv[1], "--command=")) {
+ command = argv[1] + 10;
+ git_config(column_config, (void *)command);
+ } else
+ git_config(column_config, NULL);
+
+ memset(&copts, 0, sizeof(copts));
+ copts.width = term_columns();
+ copts.padding = 1;
+ argc = parse_options(argc, argv, "", options, builtin_column_usage, 0);
+ if (argc)
+ usage_with_options(builtin_column_usage, options);
+ if (real_command || command) {
+ if (!real_command || !command || strcmp(real_command, command))
+ die(_("--command must be the first argument"));
+ }
+ finalize_colopts(&colopts, -1);
+ while (!strbuf_getline(&sb, stdin, '\n'))
+ string_list_append(&list, sb.buf);
+
+ print_columns(&list, colopts, &copts);
+ return 0;
+}
#include "quote.h"
#include "submodule.h"
#include "gpg-interface.h"
+#include "column.h"
static const char * const builtin_commit_usage[] = {
"git commit [options] [--] <filepattern>...",
static int no_post_rewrite, allow_empty_message;
static char *untracked_files_arg, *force_date, *ignore_submodule_arg;
static char *sign_commit;
+static unsigned int colopts;
/*
* The default commit message cleanup mode will remove the lines
s->whence = whence;
}
-static const char *whence_s(void)
-{
- const char *s = "";
-
- switch (whence) {
- case FROM_COMMIT:
- break;
- case FROM_MERGE:
- s = _("merge");
- break;
- case FROM_CHERRY_PICK:
- s = _("cherry-pick");
- break;
- }
-
- return s;
-}
-
static void rollback_index_files(void)
{
switch (commit_style) {
*/
commit_style = COMMIT_PARTIAL;
- if (whence != FROM_COMMIT)
- die(_("cannot do a partial commit during a %s."), whence_s());
+ if (whence != FROM_COMMIT) {
+ if (whence == FROM_MERGE)
+ die(_("cannot do a partial commit during a merge."));
+ else if (whence == FROM_CHERRY_PICK)
+ die(_("cannot do a partial commit during a cherry-pick."));
+ }
memset(&partial, 0, sizeof(partial));
partial.strdup_strings = 1;
char *ai_tmp, *ci_tmp;
if (whence != FROM_COMMIT)
status_printf_ln(s, GIT_COLOR_NORMAL,
- _("\n"
- "It looks like you may be committing a %s.\n"
- "If this is not correct, please remove the file\n"
- " %s\n"
- "and try again.\n"
- ""),
- whence_s(),
+ whence == FROM_MERGE
+ ? _("\n"
+ "It looks like you may be committing a merge.\n"
+ "If this is not correct, please remove the file\n"
+ " %s\n"
+ "and try again.\n")
+ : _("\n"
+ "It looks like you may be committing a cherry-pick.\n"
+ "If this is not correct, please remove the file\n"
+ " %s\n"
+ "and try again.\n"),
git_path(whence == FROM_MERGE
? "MERGE_HEAD"
: "CHERRY_PICK_HEAD"));
fprintf(s->fp, "\n");
- status_printf(s, GIT_COLOR_NORMAL,
- _("Please enter the commit message for your changes."));
if (cleanup_mode == CLEANUP_ALL)
- status_printf_more(s, GIT_COLOR_NORMAL,
- _(" Lines starting\n"
- "with '#' will be ignored, and an empty"
+ status_printf(s, GIT_COLOR_NORMAL,
+ _("Please enter the commit message for your changes."
+ " Lines starting\nwith '#' will be ignored, and an empty"
" message aborts the commit.\n"));
else /* CLEANUP_SPACE, that is. */
- status_printf_more(s, GIT_COLOR_NORMAL,
- _(" Lines starting\n"
+ status_printf(s, GIT_COLOR_NORMAL,
+ _("Please enter the commit message for your changes."
+ " Lines starting\n"
"with '#' will be kept; you may remove them"
" yourself if you want to.\n"
"An empty message aborts the commit.\n"));
/* Sanity check options */
if (amend && !current_head)
die(_("You have nothing to amend."));
- if (amend && whence != FROM_COMMIT)
- die(_("You are in the middle of a %s -- cannot amend."), whence_s());
+ if (amend && whence != FROM_COMMIT) {
+ if (whence == FROM_MERGE)
+ die(_("You are in the middle of a merge -- cannot amend."));
+ else if (whence == FROM_CHERRY_PICK)
+ die(_("You are in the middle of a cherry-pick -- cannot amend."));
+ }
if (fixup_message && squash_message)
die(_("Options --squash and --fixup cannot be used together"));
if (use_message)
{
struct wt_status *s = cb;
+ if (!prefixcmp(k, "column."))
+ return git_column_config(k, v, "status", &colopts);
if (!strcmp(k, "status.submodulesummary")) {
int is_bool;
s->submodule_summary = git_config_bool_or_int(k, v, &is_bool);
{ OPTION_STRING, 0, "ignore-submodules", &ignore_submodule_arg, "when",
"ignore changes to submodules, optional when: all, dirty, untracked. (Default: all)",
PARSE_OPT_OPTARG, NULL, (intptr_t)"all" },
+ OPT_COLUMN(0, "column", &colopts, "list untracked files in columns"),
OPT_END(),
};
argc = parse_options(argc, argv, prefix,
builtin_status_options,
builtin_status_usage, 0);
+ finalize_colopts(&colopts, -1);
+ s.colopts = colopts;
if (null_termination && status_format == STATUS_FORMAT_LONG)
status_format = STATUS_FORMAT_PORCELAIN;
int result = 0;
struct ref *ref, *stale_refs = get_stale_heads(refs, ref_count, ref_map);
const char *dangling_msg = dry_run
- ? _(" (%s will become dangling)\n")
- : _(" (%s has become dangling)\n");
+ ? _(" (%s will become dangling)")
+ : _(" (%s has become dangling)");
for (ref = stale_refs; ref; ref = ref->next) {
if (!dry_run)
merge_log_config = DEFAULT_MERGE_LOG_LEN;
} else if (!strcmp(key, "merge.branchdesc")) {
use_branch_desc = git_config_bool(key, value);
+ } else {
+ return git_default_config(key, value, cb);
}
return 0;
}
strbuf_release(&desc);
}
+#define util_as_integral(elem) ((intptr_t)((elem)->util))
+
+static void record_person(int which, struct string_list *people,
+ struct commit *commit)
+{
+ char name_buf[MAX_GITNAME], *name, *name_end;
+ struct string_list_item *elem;
+ const char *field = (which == 'a') ? "\nauthor " : "\ncommitter ";
+
+ name = strstr(commit->buffer, field);
+ if (!name)
+ return;
+ name += strlen(field);
+ name_end = strchrnul(name, '<');
+ if (*name_end)
+ name_end--;
+ while (isspace(*name_end) && name <= name_end)
+ name_end--;
+ if (name_end < name || name + MAX_GITNAME <= name_end)
+ return;
+ memcpy(name_buf, name, name_end - name + 1);
+ name_buf[name_end - name + 1] = '\0';
+
+ elem = string_list_lookup(people, name_buf);
+ if (!elem) {
+ elem = string_list_insert(people, name_buf);
+ elem->util = (void *)0;
+ }
+ elem->util = (void*)(util_as_integral(elem) + 1);
+}
+
+static int cmp_string_list_util_as_integral(const void *a_, const void *b_)
+{
+ const struct string_list_item *a = a_, *b = b_;
+ return util_as_integral(b) - util_as_integral(a);
+}
+
+static void add_people_count(struct strbuf *out, struct string_list *people)
+{
+ if (people->nr == 1)
+ strbuf_addf(out, "%s", people->items[0].string);
+ else if (people->nr == 2)
+ strbuf_addf(out, "%s (%d) and %s (%d)",
+ people->items[0].string,
+ (int)util_as_integral(&people->items[0]),
+ people->items[1].string,
+ (int)util_as_integral(&people->items[1]));
+ else if (people->nr)
+ strbuf_addf(out, "%s (%d) and others",
+ people->items[0].string,
+ (int)util_as_integral(&people->items[0]));
+}
+
+static void credit_people(struct strbuf *out,
+ struct string_list *them,
+ int kind)
+{
+ const char *label;
+ const char *me;
+
+ if (kind == 'a') {
+ label = "\nBy ";
+ me = git_author_info(IDENT_NO_DATE);
+ } else {
+ label = "\nvia ";
+ me = git_committer_info(IDENT_NO_DATE);
+ }
+
+ if (!them->nr ||
+ (them->nr == 1 &&
+ me &&
+ (me = skip_prefix(me, them->items->string)) != NULL &&
+ skip_prefix(me, " <")))
+ return;
+ strbuf_addstr(out, label);
+ add_people_count(out, them);
+}
+
+static void add_people_info(struct strbuf *out,
+ struct string_list *authors,
+ struct string_list *committers)
+{
+ if (authors->nr)
+ qsort(authors->items,
+ authors->nr, sizeof(authors->items[0]),
+ cmp_string_list_util_as_integral);
+ if (committers->nr)
+ qsort(committers->items,
+ committers->nr, sizeof(committers->items[0]),
+ cmp_string_list_util_as_integral);
+
+ credit_people(out, authors, 'a');
+ credit_people(out, committers, 'c');
+}
+
static void shortlog(const char *name,
struct origin_data *origin_data,
struct commit *head,
struct commit *commit;
struct object *branch;
struct string_list subjects = STRING_LIST_INIT_DUP;
+ struct string_list authors = STRING_LIST_INIT_DUP;
+ struct string_list committers = STRING_LIST_INIT_DUP;
int flags = UNINTERESTING | TREESAME | SEEN | SHOWN | ADDED;
struct strbuf sb = STRBUF_INIT;
const unsigned char *sha1 = origin_data->sha1;
return;
setup_revisions(0, NULL, rev, NULL);
- rev->ignore_merges = 1;
add_pending_object(rev, branch, name);
add_pending_object(rev, &head->object, "^HEAD");
head->object.flags |= UNINTERESTING;
while ((commit = get_revision(rev)) != NULL) {
struct pretty_print_context ctx = {0};
- /* ignore merges */
- if (commit->parents && commit->parents->next)
+ if (commit->parents && commit->parents->next) {
+ /* do not list a merge but count committer */
+ record_person('c', &committers, commit);
continue;
-
+ }
+ if (!count)
+ /* the 'tip' committer */
+ record_person('c', &committers, commit);
+ record_person('a', &authors, commit);
count++;
if (subjects.nr > limit)
continue;
string_list_append(&subjects, strbuf_detach(&sb, NULL));
}
+ add_people_info(out, &authors, &committers);
if (count > limit)
strbuf_addf(out, "\n* %s: (%d commits)\n", name, count);
else
rev->commits = NULL;
rev->pending.nr = 0;
+ string_list_clear(&authors, 0);
+ string_list_clear(&committers, 0);
string_list_clear(&subjects, 0);
}
#include "parse-options.h"
#include "dir.h"
#include "progress.h"
+#include "streaming.h"
#define REACHABLE 0x0001
#define SEEN 0x0002
if (!(f = fopen(filename, "w")))
die_errno("Could not open '%s'", filename);
if (obj->type == OBJ_BLOB) {
- enum object_type type;
- unsigned long size;
- char *buf = read_sha1_file(obj->sha1,
- &type, &size);
- if (buf && fwrite(buf, 1, size, f) != size)
+ if (stream_blob_to_fd(fileno(f), obj->sha1, NULL, 1))
die_errno("Could not write '%s'", filename);
- free(buf);
} else
fprintf(f, "%s\n", sha1_to_hex(obj->sha1));
if (fclose(f))
#include "cache.h"
#include "parse-options.h"
#include "run-command.h"
+#include "argv-array.h"
#define FAILED_RUN "failed to run %s"
static int gc_auto_pack_limit = 50;
static const char *prune_expire = "2.weeks.ago";
-#define MAX_ADD 10
-static const char *argv_pack_refs[] = {"pack-refs", "--all", "--prune", NULL};
-static const char *argv_reflog[] = {"reflog", "expire", "--all", NULL};
-static const char *argv_repack[MAX_ADD] = {"repack", "-d", "-l", NULL};
-static const char *argv_prune[] = {"prune", "--expire", NULL, NULL, NULL};
-static const char *argv_rerere[] = {"rerere", "gc", NULL};
+static struct argv_array pack_refs_cmd = ARGV_ARRAY_INIT;
+static struct argv_array reflog = ARGV_ARRAY_INIT;
+static struct argv_array repack = ARGV_ARRAY_INIT;
+static struct argv_array prune = ARGV_ARRAY_INIT;
+static struct argv_array rerere = ARGV_ARRAY_INIT;
static int gc_config(const char *var, const char *value, void *cb)
{
return git_default_config(var, value, cb);
}
-static void append_option(const char **cmd, const char *opt, int max_length)
-{
- int i;
-
- for (i = 0; cmd[i]; i++)
- ;
-
- if (i + 2 >= max_length)
- die(_("Too many options specified"));
- cmd[i++] = opt;
- cmd[i] = NULL;
-}
-
static int too_many_loose_objects(void)
{
/*
return gc_auto_pack_limit <= cnt;
}
+static void add_repack_all_option(void)
+{
+ if (prune_expire && !strcmp(prune_expire, "now"))
+ argv_array_push(&repack, "-a");
+ else {
+ argv_array_push(&repack, "-A");
+ if (prune_expire)
+ argv_array_pushf(&repack, "--unpack-unreachable=%s", prune_expire);
+ }
+}
+
static int need_to_gc(void)
{
/*
* there is no need.
*/
if (too_many_packs())
- append_option(argv_repack,
- prune_expire && !strcmp(prune_expire, "now") ?
- "-a" : "-A",
- MAX_ADD);
+ add_repack_all_option();
else if (!too_many_loose_objects())
return 0;
int aggressive = 0;
int auto_gc = 0;
int quiet = 0;
- char buf[80];
struct option builtin_gc_options[] = {
OPT__QUIET(&quiet, "suppress progress reporting"),
if (argc == 2 && !strcmp(argv[1], "-h"))
usage_with_options(builtin_gc_usage, builtin_gc_options);
+ argv_array_pushl(&pack_refs_cmd, "pack-refs", "--all", "--prune", NULL);
+ argv_array_pushl(&reflog, "reflog", "expire", "--all", NULL);
+ argv_array_pushl(&repack, "repack", "-d", "-l", NULL);
+ argv_array_pushl(&prune, "prune", "--expire", NULL );
+ argv_array_pushl(&rerere, "rerere", "gc", NULL);
+
git_config(gc_config, NULL);
if (pack_refs < 0)
usage_with_options(builtin_gc_usage, builtin_gc_options);
if (aggressive) {
- append_option(argv_repack, "-f", MAX_ADD);
- append_option(argv_repack, "--depth=250", MAX_ADD);
- if (aggressive_window > 0) {
- sprintf(buf, "--window=%d", aggressive_window);
- append_option(argv_repack, buf, MAX_ADD);
- }
+ argv_array_push(&repack, "-f");
+ argv_array_push(&repack, "--depth=250");
+ if (aggressive_window > 0)
+ argv_array_pushf(&repack, "--window=%d", aggressive_window);
}
if (quiet)
- append_option(argv_repack, "-q", MAX_ADD);
+ argv_array_push(&repack, "-q");
if (auto_gc) {
/*
"run \"git gc\" manually. See "
"\"git help gc\" for more information.\n"));
} else
- append_option(argv_repack,
- prune_expire && !strcmp(prune_expire, "now")
- ? "-a" : "-A",
- MAX_ADD);
+ add_repack_all_option();
- if (pack_refs && run_command_v_opt(argv_pack_refs, RUN_GIT_CMD))
- return error(FAILED_RUN, argv_pack_refs[0]);
+ if (pack_refs && run_command_v_opt(pack_refs_cmd.argv, RUN_GIT_CMD))
+ return error(FAILED_RUN, pack_refs_cmd.argv[0]);
- if (run_command_v_opt(argv_reflog, RUN_GIT_CMD))
- return error(FAILED_RUN, argv_reflog[0]);
+ if (run_command_v_opt(reflog.argv, RUN_GIT_CMD))
+ return error(FAILED_RUN, reflog.argv[0]);
- if (run_command_v_opt(argv_repack, RUN_GIT_CMD))
- return error(FAILED_RUN, argv_repack[0]);
+ if (run_command_v_opt(repack.argv, RUN_GIT_CMD))
+ return error(FAILED_RUN, repack.argv[0]);
if (prune_expire) {
- argv_prune[2] = prune_expire;
+ argv_array_push(&prune, prune_expire);
if (quiet)
- argv_prune[3] = "--no-progress";
- if (run_command_v_opt(argv_prune, RUN_GIT_CMD))
- return error(FAILED_RUN, argv_prune[0]);
+ argv_array_push(&prune, "--no-progress");
+ if (run_command_v_opt(prune.argv, RUN_GIT_CMD))
+ return error(FAILED_RUN, prune.argv[0]);
}
- if (run_command_v_opt(argv_rerere, RUN_GIT_CMD))
- return error(FAILED_RUN, argv_rerere[0]);
+ if (run_command_v_opt(rerere.argv, RUN_GIT_CMD))
+ return error(FAILED_RUN, rerere.argv[0]);
if (auto_gc && too_many_loose_objects())
warning(_("There are too many unreachable loose objects; "
#include "common-cmds.h"
#include "parse-options.h"
#include "run-command.h"
+#include "column.h"
#include "help.h"
static struct man_viewer_list {
};
static int show_all = 0;
+static unsigned int colopts;
static enum help_format help_format = HELP_FORMAT_NONE;
static struct option builtin_help_options[] = {
OPT_BOOLEAN('a', "all", &show_all, "print all available commands"),
return HELP_FORMAT_INFO;
if (!strcmp(format, "web") || !strcmp(format, "html"))
return HELP_FORMAT_WEB;
- die("unrecognized help format '%s'", format);
+ die(_("unrecognized help format '%s'"), format);
}
static const char *get_man_viewer_info(const char *name)
ec_process.err = -1;
ec_process.stdout_to_stderr = 1;
if (start_command(&ec_process))
- return error("Failed to start emacsclient.");
+ return error(_("Failed to start emacsclient."));
strbuf_read(&buffer, ec_process.err, 20);
close(ec_process.err);
if (prefixcmp(buffer.buf, "emacsclient")) {
strbuf_release(&buffer);
- return error("Failed to parse emacsclient version.");
+ return error(_("Failed to parse emacsclient version."));
}
strbuf_remove(&buffer, 0, strlen("emacsclient"));
if (version < 22) {
strbuf_release(&buffer);
- return error("emacsclient version '%d' too old (< 22).",
+ return error(_("emacsclient version '%d' too old (< 22)."),
version);
}
path = "emacsclient";
strbuf_addf(&man_page, "(woman \"%s\")", page);
execlp(path, "emacsclient", "-e", man_page.buf, (char *)NULL);
- warning("failed to exec '%s': %s", path, strerror(errno));
+ warning(_("failed to exec '%s': %s"), path, strerror(errno));
}
}
path = "kfmclient";
strbuf_addf(&man_page, "man:%s(1)", page);
execlp(path, filename, "newTab", man_page.buf, (char *)NULL);
- warning("failed to exec '%s': %s", path, strerror(errno));
+ warning(_("failed to exec '%s': %s"), path, strerror(errno));
}
}
if (!path)
path = "man";
execlp(path, "man", page, (char *)NULL);
- warning("failed to exec '%s': %s", path, strerror(errno));
+ warning(_("failed to exec '%s': %s"), path, strerror(errno));
}
static void exec_man_cmd(const char *cmd, const char *page)
struct strbuf shell_cmd = STRBUF_INIT;
strbuf_addf(&shell_cmd, "%s %s", cmd, page);
execl("/bin/sh", "sh", "-c", shell_cmd.buf, (char *)NULL);
- warning("failed to exec '%s': %s", cmd, strerror(errno));
+ warning(_("failed to exec '%s': %s"), cmd, strerror(errno));
}
static void add_man_viewer(const char *name)
if (supported_man_viewer(name, len))
do_add_man_viewer_info(name, len, value);
else
- warning("'%s': path for unsupported man viewer.\n"
- "Please consider using 'man.<tool>.cmd' instead.",
+ warning(_("'%s': path for unsupported man viewer.\n"
+ "Please consider using 'man.<tool>.cmd' instead."),
name);
return 0;
const char *value)
{
if (supported_man_viewer(name, len))
- warning("'%s': cmd for supported man viewer.\n"
- "Please consider using 'man.<tool>.path' instead.",
+ warning(_("'%s': cmd for supported man viewer.\n"
+ "Please consider using 'man.<tool>.path' instead."),
name);
else
do_add_man_viewer_info(name, len, value);
static int git_help_config(const char *var, const char *value, void *cb)
{
+ if (!prefixcmp(var, "column."))
+ return git_column_config(var, value, "help", &colopts);
if (!strcmp(var, "help.format")) {
if (!value)
return config_error_nonbool(var);
longest = strlen(common_cmds[i].name);
}
- puts("The most commonly used git commands are:");
+ puts(_("The most commonly used git commands are:"));
for (i = 0; i < ARRAY_SIZE(common_cmds); i++) {
printf(" %s ", common_cmds[i].name);
mput_char(' ', longest - strlen(common_cmds[i].name));
- puts(common_cmds[i].help);
+ puts(_(common_cmds[i].help));
}
}
else if (info)
exec_man_cmd(info, page);
else
- warning("'%s': unknown man viewer.", name);
+ warning(_("'%s': unknown man viewer."), name);
}
static void show_man_page(const char *git_cmd)
if (fallback)
exec_viewer(fallback, page);
exec_viewer("man", page);
- die("no man viewer handled the request");
+ die(_("no man viewer handled the request"));
}
static void show_info_page(const char *git_cmd)
const char *page = cmd_to_page(git_cmd);
setenv("INFOPATH", system_path(GIT_INFO_PATH), 1);
execlp("info", "info", "gitman", page, (char *)NULL);
- die("no info viewer handled the request");
+ die(_("no info viewer handled the request"));
}
static void get_html_page_path(struct strbuf *page_path, const char *page)
/* Check that we have a git documentation directory. */
if (stat(mkpath("%s/git.html", html_path), &st)
|| !S_ISREG(st.st_mode))
- die("'%s': not a documentation directory.", html_path);
+ die(_("'%s': not a documentation directory."), html_path);
strbuf_init(page_path, 0);
strbuf_addf(page_path, "%s/%s.html", html_path, page);
parsed_help_format = help_format;
if (show_all) {
- printf("usage: %s\n\n", git_usage_string);
- list_commands("git commands", &main_cmds, &other_cmds);
- printf("%s\n", git_more_info_string);
+ git_config(git_help_config, NULL);
+ printf(_("usage: %s%s"), _(git_usage_string), "\n\n");
+ list_commands(colopts, &main_cmds, &other_cmds);
+ printf("%s\n", _(git_more_info_string));
return 0;
}
if (!argv[0]) {
- printf("usage: %s\n\n", git_usage_string);
+ printf(_("usage: %s%s"), _(git_usage_string), "\n\n");
list_common_cmds_help();
- printf("\n%s\n", git_more_info_string);
+ printf("\n%s\n", _(git_more_info_string));
return 0;
}
alias = alias_lookup(argv[0]);
if (alias && !is_git_command(argv[0])) {
- printf("`git %s' is aliased to `%s'\n", argv[0], alias);
+ printf_ln(_("`git %s' is aliased to `%s'"), argv[0], alias);
return 0;
}
return -1;
if (type != OBJ_ANY && obj->type != type)
- die("object type mismatch at %s", sha1_to_hex(obj->sha1));
+ die(_("object type mismatch at %s"), sha1_to_hex(obj->sha1));
obj->flags |= FLAG_LINK;
return 0;
unsigned long size;
int type = sha1_object_info(obj->sha1, &size);
if (type != obj->type || type <= 0)
- die("object of unexpected type");
+ die(_("object of unexpected type"));
obj->flags |= FLAG_CHECKED;
return;
}
if (min <= input_len)
return input_buffer + input_offset;
if (min > sizeof(input_buffer))
- die("cannot fill %d bytes", min);
+ die(Q_("cannot fill %d byte",
+ "cannot fill %d bytes",
+ min),
+ min);
flush();
do {
ssize_t ret = xread(input_fd, input_buffer + input_len,
sizeof(input_buffer) - input_len);
if (ret <= 0) {
if (!ret)
- die("early EOF");
- die_errno("read error on input");
+ die(_("early EOF"));
+ die_errno(_("read error on input"));
}
input_len += ret;
if (from_stdin)
static void use(int bytes)
{
if (bytes > input_len)
- die("used more bytes than were available");
+ die(_("used more bytes than were available"));
input_crc32 = crc32(input_crc32, input_buffer + input_offset, bytes);
input_len -= bytes;
input_offset += bytes;
/* make sure off_t is sufficiently large not to wrap */
if (signed_add_overflows(consumed_bytes, bytes))
- die("pack too large for current definition of off_t");
+ die(_("pack too large for current definition of off_t"));
consumed_bytes += bytes;
}
} else
output_fd = open(pack_name, O_CREAT|O_EXCL|O_RDWR, 0600);
if (output_fd < 0)
- die_errno("unable to create '%s'", pack_name);
+ die_errno(_("unable to create '%s'"), pack_name);
pack_fd = output_fd;
} else {
input_fd = open(pack_name, O_RDONLY);
if (input_fd < 0)
- die_errno("cannot open packfile '%s'", pack_name);
+ die_errno(_("cannot open packfile '%s'"), pack_name);
output_fd = -1;
pack_fd = input_fd;
}
/* Header consistency check */
if (hdr->hdr_signature != htonl(PACK_SIGNATURE))
- die("pack signature mismatch");
+ die(_("pack signature mismatch"));
if (!pack_version_ok(hdr->hdr_version))
die("pack version %"PRIu32" unsupported",
ntohl(hdr->hdr_version));
va_start(params, format);
vsnprintf(buf, sizeof(buf), format, params);
va_end(params);
- die("pack has bad object at offset %lu: %s", offset, buf);
+ die(_("pack has bad object at offset %lu: %s"), offset, buf);
}
static struct base_data *alloc_base_data(void)
use(input_len - stream.avail_in);
} while (status == Z_OK);
if (stream.total_out != size || status != Z_STREAM_END)
- bad_object(offset, "inflate returned %d", status);
+ bad_object(offset, _("inflate returned %d"), status);
git_inflate_end(&stream);
return buf;
}
while (c & 128) {
base_offset += 1;
if (!base_offset || MSB(base_offset, 7))
- bad_object(obj->idx.offset, "offset value overflow for delta base object");
+ bad_object(obj->idx.offset, _("offset value overflow for delta base object"));
p = fill(1);
c = *p;
use(1);
}
delta_base->offset = obj->idx.offset - base_offset;
if (delta_base->offset <= 0 || delta_base->offset >= obj->idx.offset)
- bad_object(obj->idx.offset, "delta base offset is out of bound");
+ bad_object(obj->idx.offset, _("delta base offset is out of bound"));
break;
case OBJ_COMMIT:
case OBJ_TREE:
case OBJ_TAG:
break;
default:
- bad_object(obj->idx.offset, "unknown object type %d", obj->type);
+ bad_object(obj->idx.offset, _("unknown object type %d"), obj->type);
}
obj->hdr_size = consumed_bytes - obj->idx.offset;
ssize_t n = (len < 64*1024) ? len : 64*1024;
n = pread(pack_fd, inbuf, n, from);
if (n < 0)
- die_errno("cannot pread pack file");
+ die_errno(_("cannot pread pack file"));
if (!n)
- die("premature end of pack file, %lu bytes missing", len);
+ die(Q_("premature end of pack file, %lu byte missing",
+ "premature end of pack file, %lu bytes missing",
+ len),
+ len);
from += n;
len -= n;
stream.next_in = inbuf;
/* This has been inflated OK when first encountered, so... */
if (status != Z_STREAM_END || stream.total_out != obj->size)
- die("serious inflate inconsistency");
+ die(_("serious inflate inconsistency"));
git_inflate_end(&stream);
free(inbuf);
unsigned long has_size;
has_data = read_sha1_file(sha1, &has_type, &has_size);
if (!has_data)
- die("cannot read existing object %s", sha1_to_hex(sha1));
+ die(_("cannot read existing object %s"), sha1_to_hex(sha1));
if (size != has_size || type != has_type ||
memcmp(data, has_data, size) != 0)
- die("SHA1 COLLISION FOUND WITH %s !", sha1_to_hex(sha1));
+ die(_("SHA1 COLLISION FOUND WITH %s !"), sha1_to_hex(sha1));
free(has_data);
}
if (strict) {
if (blob)
blob->object.flags |= FLAG_CHECKED;
else
- die("invalid blob object %s", sha1_to_hex(sha1));
+ die(_("invalid blob object %s"), sha1_to_hex(sha1));
} else {
struct object *obj;
int eaten;
*/
obj = parse_object_buffer(sha1, type, size, buf, &eaten);
if (!obj)
- die("invalid %s", typename(type));
+ die(_("invalid %s"), typename(type));
if (fsck_object(obj, 1, fsck_error_function))
- die("Error in object");
+ die(_("Error in object"));
if (fsck_walk(obj, mark_link, NULL))
- die("Not all child objects of %s are reachable", sha1_to_hex(obj->sha1));
+ die(_("Not all child objects of %s are reachable"), sha1_to_hex(obj->sha1));
if (obj->type == OBJ_TREE) {
struct tree *item = (struct tree *) obj;
&c->size);
free(raw);
if (!c->data)
- bad_object(obj->idx.offset, "failed to apply delta");
+ bad_object(obj->idx.offset, _("failed to apply delta"));
base_cache_used += c->size;
prune_base_data(c);
}
delta_data, delta_obj->size, &result->size);
free(delta_data);
if (!result->data)
- bad_object(delta_obj->idx.offset, "failed to apply delta");
+ bad_object(delta_obj->idx.offset, _("failed to apply delta"));
sha1_object(result->data, result->size, delta_obj->real_type,
delta_obj->idx.sha1);
nr_resolved_deltas++;
*/
if (verbose)
progress = start_progress(
- from_stdin ? "Receiving objects" : "Indexing objects",
+ from_stdin ? _("Receiving objects") : _("Indexing objects"),
nr_objects);
for (i = 0; i < nr_objects; i++) {
struct object_entry *obj = &objects[i];
flush();
git_SHA1_Final(sha1, &input_ctx);
if (hashcmp(fill(20), sha1))
- die("pack is corrupted (SHA1 mismatch)");
+ die(_("pack is corrupted (SHA1 mismatch)"));
use(20);
/* If input_fd is a file, we should have reached its end now. */
if (fstat(input_fd, &st))
- die_errno("cannot fstat packfile");
+ die_errno(_("cannot fstat packfile"));
if (S_ISREG(st.st_mode) &&
lseek(input_fd, 0, SEEK_CUR) - input_len != st.st_size)
- die("pack has junk at the end");
+ die(_("pack has junk at the end"));
if (!nr_deltas)
return;
* for some more deltas.
*/
if (verbose)
- progress = start_progress("Resolving deltas", nr_deltas);
+ progress = start_progress(_("Resolving deltas"), nr_deltas);
for (i = 0; i < nr_objects; i++) {
struct object_entry *obj = &objects[i];
struct base_data *base_obj = alloc_base_data();
} while (status == Z_OK);
if (status != Z_STREAM_END)
- die("unable to deflate appended object (%d)", status);
+ die(_("unable to deflate appended object (%d)"), status);
size = stream.total_out;
git_deflate_end(&stream);
return size;
if (check_sha1_signature(d->base.sha1, base_obj->data,
base_obj->size, typename(type)))
- die("local object %s is corrupt", sha1_to_hex(d->base.sha1));
+ die(_("local object %s is corrupt"), sha1_to_hex(d->base.sha1));
base_obj->obj = append_obj_to_pack(f, d->base.sha1,
base_obj->data, base_obj->size, type);
find_unresolved_deltas(base_obj);
fsync_or_die(output_fd, curr_pack_name);
err = close(output_fd);
if (err)
- die_errno("error while closing pack file");
+ die_errno(_("error while closing pack file"));
}
if (keep_msg) {
if (keep_fd < 0) {
if (errno != EEXIST)
- die_errno("cannot write keep file '%s'",
+ die_errno(_("cannot write keep file '%s'"),
keep_name);
} else {
if (keep_msg_len > 0) {
write_or_die(keep_fd, "\n", 1);
}
if (close(keep_fd) != 0)
- die_errno("cannot close written keep file '%s'",
+ die_errno(_("cannot close written keep file '%s'"),
keep_name);
report = "keep";
}
final_pack_name = name;
}
if (move_temp_to_file(curr_pack_name, final_pack_name))
- die("cannot store pack file");
+ die(_("cannot store pack file"));
} else if (from_stdin)
chmod(final_pack_name, 0444);
final_index_name = name;
}
if (move_temp_to_file(curr_index_name, final_index_name))
- die("cannot store index file");
+ die(_("cannot store index file"));
} else
chmod(final_index_name, 0444);
struct packed_git *p = add_packed_git(pack_name, strlen(pack_name), 1);
if (!p)
- die("Cannot open existing pack file '%s'", pack_name);
+ die(_("Cannot open existing pack file '%s'"), pack_name);
if (open_pack_index(p))
- die("Cannot open existing pack idx file for '%s'", pack_name);
+ die(_("Cannot open existing pack idx file for '%s'"), pack_name);
/* Read the attributes from the existing idx file */
opts->version = p->index_version;
}
if (baseobjects)
- printf("non delta: %d object%s\n",
- baseobjects, baseobjects > 1 ? "s" : "");
+ printf_ln(Q_("non delta: %d object",
+ "non delta: %d objects",
+ baseobjects),
+ baseobjects);
for (i = 0; i < deepest_delta; i++) {
if (!chain_histogram[i])
continue;
- printf("chain length = %d: %lu object%s\n",
- i + 1,
- chain_histogram[i],
- chain_histogram[i] > 1 ? "s" : "");
+ printf_ln(Q_("chain length = %d: %lu object",
+ "chain length = %d: %lu objects",
+ chain_histogram[i]),
+ i + 1,
+ chain_histogram[i]);
}
}
reset_pack_idx_option(&opts);
git_config(git_index_pack_config, &opts);
if (prefix && chdir(prefix))
- die("Cannot come back to cwd");
+ die(_("Cannot come back to cwd"));
for (i = 1; i < argc; i++) {
const char *arg = argv[i];
hdr->hdr_signature = htonl(PACK_SIGNATURE);
hdr->hdr_version = htonl(strtoul(arg + 14, &c, 10));
if (*c != ',')
- die("bad %s", arg);
+ die(_("bad %s"), arg);
hdr->hdr_entries = htonl(strtoul(c + 1, &c, 10));
if (*c)
- die("bad %s", arg);
+ die(_("bad %s"), arg);
input_len = sizeof(*hdr);
} else if (!strcmp(arg, "-v")) {
verbose = 1;
char *c;
opts.version = strtoul(arg + 16, &c, 10);
if (opts.version > 2)
- die("bad %s", arg);
+ die(_("bad %s"), arg);
if (*c == ',')
opts.off32_limit = strtoul(c+1, &c, 0);
if (*c || opts.off32_limit & 0x80000000)
- die("bad %s", arg);
+ die(_("bad %s"), arg);
} else
usage(index_pack_usage);
continue;
if (!pack_name && !from_stdin)
usage(index_pack_usage);
if (fix_thin_pack && !from_stdin)
- die("--fix-thin cannot be used without --stdin");
+ die(_("--fix-thin cannot be used without --stdin"));
if (!index_name && pack_name) {
int len = strlen(pack_name);
if (!has_extension(pack_name, ".pack"))
- die("packfile name '%s' does not end with '.pack'",
+ die(_("packfile name '%s' does not end with '.pack'"),
pack_name);
index_name_buf = xmalloc(len);
memcpy(index_name_buf, pack_name, len - 5);
if (keep_msg && !keep_name && pack_name) {
int len = strlen(pack_name);
if (!has_extension(pack_name, ".pack"))
- die("packfile name '%s' does not end with '.pack'",
+ die(_("packfile name '%s' does not end with '.pack'"),
pack_name);
keep_name_buf = xmalloc(len);
memcpy(keep_name_buf, pack_name, len - 5);
}
if (verify) {
if (!index_name)
- die("--verify with no packfile name given");
+ die(_("--verify with no packfile name given"));
read_idx_option(&opts, index_name);
opts.flags |= WRITE_IDX_VERIFY | WRITE_IDX_STRICT;
}
int nr_unresolved = nr_deltas - nr_resolved_deltas;
int nr_objects_initial = nr_objects;
if (nr_unresolved <= 0)
- die("confusion beyond insanity");
+ die(_("confusion beyond insanity"));
objects = xrealloc(objects,
(nr_objects + nr_unresolved + 1)
* sizeof(*objects));
"(disk corruption?)", curr_pack);
}
if (nr_deltas != nr_resolved_deltas)
- die("pack has %d unresolved deltas",
+ die(Q_("pack has %d unresolved delta",
+ "pack has %d unresolved deltas",
+ nr_deltas - nr_resolved_deltas),
nr_deltas - nr_resolved_deltas);
}
free(deltas);
#include "string-list.h"
#include "parse-options.h"
#include "branch.h"
+#include "streaming.h"
/* Set a default date-time format for git log ("log.date" config variable) */
static const char *default_date_mode = NULL;
strbuf_release(&out);
}
-static int show_object(const unsigned char *sha1, int show_tag_object,
- struct rev_info *rev)
+static int show_blob_object(const unsigned char *sha1, struct rev_info *rev)
+{
+ fflush(stdout);
+ return stream_blob_to_fd(1, sha1, NULL, 0);
+}
+
+static int show_tag_object(const unsigned char *sha1, struct rev_info *rev)
{
unsigned long size;
enum object_type type;
if (!buf)
return error(_("Could not read object %s"), sha1_to_hex(sha1));
- if (show_tag_object)
- while (offset < size && buf[offset] != '\n') {
- int new_offset = offset + 1;
- while (new_offset < size && buf[new_offset++] != '\n')
- ; /* do nothing */
- if (!prefixcmp(buf + offset, "tagger "))
- show_tagger(buf + offset + 7,
- new_offset - offset - 7, rev);
- offset = new_offset;
- }
+ assert(type == OBJ_TAG);
+ while (offset < size && buf[offset] != '\n') {
+ int new_offset = offset + 1;
+ while (new_offset < size && buf[new_offset++] != '\n')
+ ; /* do nothing */
+ if (!prefixcmp(buf + offset, "tagger "))
+ show_tagger(buf + offset + 7,
+ new_offset - offset - 7, rev);
+ offset = new_offset;
+ }
if (offset < size)
fwrite(buf + offset, size - offset, 1, stdout);
const char *name = objects[i].name;
switch (o->type) {
case OBJ_BLOB:
- ret = show_object(o->sha1, 0, NULL);
+ ret = show_blob_object(o->sha1, NULL);
break;
case OBJ_TAG: {
struct tag *t = (struct tag *)o;
diff_get_color_opt(&rev.diffopt, DIFF_COMMIT),
t->tag,
diff_get_color_opt(&rev.diffopt, DIFF_RESET));
- ret = show_object(o->sha1, 1, &rev);
+ ret = show_tag_object(o->sha1, &rev);
rev.shown_one = 1;
if (ret)
break;
if (quiet) {
if (!freopen("/dev/null", "w", stderr))
return error("failed to redirect stderr to /dev/null: "
- "%s\n", strerror(errno));
+ "%s", strerror(errno));
}
if (prefix)
if (read_mmfile(mmfs + i, fname))
return -1;
if (buffer_is_binary(mmfs[i].ptr, mmfs[i].size))
- return error("Cannot merge binary files: %s\n",
+ return error("Cannot merge binary files: %s",
argv[i]);
}
static int non_empty;
static int reuse_delta = 1, reuse_object = 1;
static int keep_unreachable, unpack_unreachable, include_tag;
+static unsigned long unpack_unreachable_expiration;
static int local;
static int incremental;
static int ignore_packed_keep;
if (!p->pack_local || p->pack_keep)
continue;
+ if (unpack_unreachable_expiration &&
+ p->mtime < unpack_unreachable_expiration)
+ continue;
+
if (open_pack_index(p))
die("cannot open pack index");
return 0;
}
+static int option_parse_unpack_unreachable(const struct option *opt,
+ const char *arg, int unset)
+{
+ if (unset) {
+ unpack_unreachable = 0;
+ unpack_unreachable_expiration = 0;
+ }
+ else {
+ unpack_unreachable = 1;
+ if (arg)
+ unpack_unreachable_expiration = approxidate(arg);
+ }
+ return 0;
+}
+
static int option_parse_ulong(const struct option *opt,
const char *arg, int unset)
{
"include tag objects that refer to objects to be packed"),
OPT_BOOL(0, "keep-unreachable", &keep_unreachable,
"keep unreachable objects"),
- OPT_BOOL(0, "unpack-unreachable", &unpack_unreachable,
- "unpack unreachable objects"),
+ { OPTION_CALLBACK, 0, "unpack-unreachable", NULL, "time",
+ "unpack unreachable objects newer than <time>",
+ PARSE_OPT_OPTARG, option_parse_unpack_unreachable },
OPT_BOOL(0, "thin", &thin,
"create thin packs"),
OPT_BOOL(0, "honor-pack-keep", &ignore_packed_keep,
static const char **refspec;
static int refspec_nr;
static int refspec_alloc;
+static int default_matching_used;
static void add_refspec(const char *ref)
{
return remote->url_nr;
}
-static void setup_push_upstream(struct remote *remote)
+static NORETURN int die_push_simple(struct branch *branch, struct remote *remote) {
+ /*
+ * There's no point in using shorten_unambiguous_ref here,
+ * as the ambiguity would be on the remote side, not what
+ * we have locally. Plus, this is supposed to be the simple
+ * mode. If the user is doing something crazy like setting
+ * upstream to a non-branch, we should probably be showing
+ * them the big ugly fully qualified ref.
+ */
+ const char *advice_maybe = "";
+ const char *short_upstream =
+ skip_prefix(branch->merge[0]->src, "refs/heads/");
+
+ if (!short_upstream)
+ short_upstream = branch->merge[0]->src;
+ /*
+ * Don't show advice for people who explicitely set
+ * push.default.
+ */
+ if (push_default == PUSH_DEFAULT_UNSPECIFIED)
+ advice_maybe = _("\n"
+ "To choose either option permanently, "
+ "see push.default in 'git help config'.");
+ die(_("The upstream branch of your current branch does not match\n"
+ "the name of your current branch. To push to the upstream branch\n"
+ "on the remote, use\n"
+ "\n"
+ " git push %s HEAD:%s\n"
+ "\n"
+ "To push to the branch of the same name on the remote, use\n"
+ "\n"
+ " git push %s %s\n"
+ "%s"),
+ remote->name, short_upstream,
+ remote->name, branch->name, advice_maybe);
+}
+
+static void setup_push_upstream(struct remote *remote, int simple)
{
struct strbuf refspec = STRBUF_INIT;
struct branch *branch = branch_get(NULL);
"your current branch '%s', without telling me what to push\n"
"to update which remote branch."),
remote->name, branch->name);
+ if (simple && strcmp(branch->refname, branch->merge[0]->src))
+ die_push_simple(branch, remote);
strbuf_addf(&refspec, "%s:%s", branch->name, branch->merge[0]->src);
add_refspec(refspec.buf);
{
switch (push_default) {
default:
+ case PUSH_DEFAULT_UNSPECIFIED:
+ default_matching_used = 1;
+ /* fallthru */
case PUSH_DEFAULT_MATCHING:
add_refspec(":");
break;
+ case PUSH_DEFAULT_SIMPLE:
+ setup_push_upstream(remote, 1);
+ break;
+
case PUSH_DEFAULT_UPSTREAM:
- setup_push_upstream(remote);
+ setup_push_upstream(remote, 0);
break;
case PUSH_DEFAULT_CURRENT:
}
}
+static const char message_advice_pull_before_push[] =
+ N_("Updates were rejected because the tip of your current branch is behind\n"
+ "its remote counterpart. Merge the remote changes (e.g. 'git pull')\n"
+ "before pushing again.\n"
+ "See the 'Note about fast-forwards' in 'git push --help' for details.");
+
+static const char message_advice_use_upstream[] =
+ N_("Updates were rejected because a pushed branch tip is behind its remote\n"
+ "counterpart. If you did not intend to push that branch, you may want to\n"
+ "specify branches to push or set the 'push.default' configuration\n"
+ "variable to 'current' or 'upstream' to push only the current branch.");
+
+static const char message_advice_checkout_pull_push[] =
+ N_("Updates were rejected because a pushed branch tip is behind its remote\n"
+ "counterpart. Check out this branch and merge the remote changes\n"
+ "(e.g. 'git pull') before pushing again.\n"
+ "See the 'Note about fast-forwards' in 'git push --help' for details.");
+
+static void advise_pull_before_push(void)
+{
+ if (!advice_push_non_ff_current || !advice_push_nonfastforward)
+ return;
+ advise(_(message_advice_pull_before_push));
+}
+
+static void advise_use_upstream(void)
+{
+ if (!advice_push_non_ff_default || !advice_push_nonfastforward)
+ return;
+ advise(_(message_advice_use_upstream));
+}
+
+static void advise_checkout_pull_push(void)
+{
+ if (!advice_push_non_ff_matching || !advice_push_nonfastforward)
+ return;
+ advise(_(message_advice_checkout_pull_push));
+}
+
static int push_with_options(struct transport *transport, int flags)
{
int err;
error(_("failed to push some refs to '%s'"), transport->url);
err |= transport_disconnect(transport);
-
if (!err)
return 0;
- if (nonfastforward && advice_push_nonfastforward) {
- fprintf(stderr, _("To prevent you from losing history, non-fast-forward updates were rejected\n"
- "Merge the remote changes (e.g. 'git pull') before pushing again. See the\n"
- "'Note about fast-forwards' section of 'git push --help' for details.\n"));
+ switch (nonfastforward) {
+ default:
+ break;
+ case NON_FF_HEAD:
+ advise_pull_before_push();
+ break;
+ case NON_FF_OTHER:
+ if (default_matching_used)
+ advise_use_upstream();
+ else
+ advise_checkout_pull_push();
+ break;
}
return 1;
const char *arg, int unset)
{
int *flags = opt->value;
+
+ if (*flags & (TRANSPORT_RECURSE_SUBMODULES_CHECK |
+ TRANSPORT_RECURSE_SUBMODULES_ON_DEMAND))
+ die("%s can only be used once.", opt->long_name);
+
if (arg) {
if (!strcmp(arg, "check"))
*flags |= TRANSPORT_RECURSE_SUBMODULES_CHECK;
+ else if (!strcmp(arg, "on-demand"))
+ *flags |= TRANSPORT_RECURSE_SUBMODULES_ON_DEMAND;
else
die("bad %s argument: %s", opt->long_name, arg);
} else
- die("option %s needs an argument (check)", opt->long_name);
+ die("option %s needs an argument (check|on-demand)",
+ opt->long_name);
return 0;
}
static const char * const builtin_remote_usage[] = {
"git remote [-v | --verbose]",
- "git remote add [-t <branch>] [-m <master>] [-f] [--mirror=<fetch|push>] <name> <url>",
+ "git remote add [-t <branch>] [-m <master>] [-f] [--tags|--no-tags] [--mirror=<fetch|push>] <name> <url>",
"git remote rename <old> <new>",
"git remote rm <name>",
"git remote set-head <name> (-a | -d | <branch>)",
"git remote prune [-n | --dry-run] <name>",
"git remote [-v | --verbose] update [-p | --prune] [(<group> | <remote>)...]",
"git remote set-branches [--add] <name> <branch>...",
- "git remote set-url <name> <newurl> [<oldurl>]",
+ "git remote set-url [--push] <name> <newurl> [<oldurl>]",
"git remote set-url --add <name> <newurl>",
"git remote set-url --delete <name> <url>",
NULL
argv[1] = "-v";
argv[2] = name;
}
- printf("Updating %s\n", name);
+ printf_ln(_("Updating %s"), name);
if (run_command_v_opt(argv, RUN_GIT_CMD))
- return error("Could not fetch %s", name);
+ return error(_("Could not fetch %s"), name);
return 0;
}
}
static const char mirror_advice[] =
-"--mirror is dangerous and deprecated; please\n"
-"\t use --mirror=fetch or --mirror=push instead";
+N_("--mirror is dangerous and deprecated; please\n"
+ "\t use --mirror=fetch or --mirror=push instead");
static int parse_mirror_opt(const struct option *opt, const char *arg, int not)
{
if (not)
*mirror = MIRROR_NONE;
else if (!arg) {
- warning("%s", mirror_advice);
+ warning("%s", _(mirror_advice));
*mirror = MIRROR_BOTH;
}
else if (!strcmp(arg, "fetch"))
else if (!strcmp(arg, "push"))
*mirror = MIRROR_PUSH;
else
- return error("unknown mirror argument: %s", arg);
+ return error(_("unknown mirror argument: %s"), arg);
return 0;
}
usage_with_options(builtin_remote_add_usage, options);
if (mirror && master)
- die("specifying a master branch makes no sense with --mirror");
+ die(_("specifying a master branch makes no sense with --mirror"));
if (mirror && !(mirror & MIRROR_FETCH) && track.nr)
- die("specifying branches to track makes sense only with fetch mirrors");
+ die(_("specifying branches to track makes sense only with fetch mirrors"));
name = argv[0];
url = argv[1];
remote = remote_get(name);
if (remote && (remote->url_nr > 1 || strcmp(name, remote->url[0]) ||
remote->fetch_refspec_nr))
- die("remote %s already exists.", name);
+ die(_("remote %s already exists."), name);
strbuf_addf(&buf2, "refs/heads/test:refs/remotes/%s/test", name);
if (!valid_fetch_refspec(buf2.buf))
- die("'%s' is not a valid remote name", name);
+ die(_("'%s' is not a valid remote name"), name);
strbuf_addf(&buf, "remote.%s.url", name);
if (git_config_set(buf.buf, url))
strbuf_addf(&buf2, "refs/remotes/%s/%s", name, master);
if (create_symref(buf.buf, buf2.buf, "remote add"))
- return error("Could not setup master '%s'", master);
+ return error(_("Could not setup master '%s'"), master);
}
strbuf_release(&buf);
info = item->util;
if (type == REMOTE) {
if (info->remote_name)
- warning("more than one %s", orig_key);
+ warning(_("more than one %s"), orig_key);
info->remote_name = xstrdup(value);
} else if (type == MERGE) {
char *space = strchr(value, ' ');
for (i = 0; i < states->remote->fetch_refspec_nr; i++)
if (get_fetch_map(remote_refs, states->remote->fetch + i, &tail, 1))
- die("Could not get fetch map for refspec %s",
+ die(_("Could not get fetch map for refspec %s"),
states->remote->fetch_refspec[i]);
states->new.strdup_strings = 1;
states->push.strdup_strings = 1;
if (!remote->push_refspec_nr) {
- item = string_list_append(&states->push, "(matching)");
+ item = string_list_append(&states->push, _("(matching)"));
info = item->util = xcalloc(sizeof(struct push_info), 1);
info->status = PUSH_STATUS_NOTQUERIED;
info->dest = xstrdup(item->string);
for (i = 0; i < remote->push_refspec_nr; i++) {
struct refspec *spec = remote->push + i;
if (spec->matching)
- item = string_list_append(&states->push, "(matching)");
+ item = string_list_append(&states->push, _("(matching)"));
else if (strlen(spec->src))
item = string_list_append(&states->push, spec->src);
else
- item = string_list_append(&states->push, "(delete)");
+ item = string_list_append(&states->push, _("(delete)"));
info = item->util = xcalloc(sizeof(struct push_info), 1);
info->forced = spec->force;
strbuf_addf(&buf, "remote.%s.url", remote->name);
for (i = 0; i < remote->url_nr; i++)
if (git_config_set_multivar(buf.buf, remote->url[i], "^$", 0))
- return error("Could not append '%s' to '%s'",
+ return error(_("Could not append '%s' to '%s'"),
remote->url[i], buf.buf);
strbuf_reset(&buf);
strbuf_addf(&buf, "remote.%s.push", remote->name);
for (i = 0; i < remote->push_refspec_nr; i++)
if (git_config_set_multivar(buf.buf, remote->push_refspec[i], "^$", 0))
- return error("Could not append '%s' to '%s'",
+ return error(_("Could not append '%s' to '%s'"),
remote->push_refspec[i], buf.buf);
strbuf_reset(&buf);
strbuf_addf(&buf, "remote.%s.fetch", remote->name);
for (i = 0; i < remote->fetch_refspec_nr; i++)
if (git_config_set_multivar(buf.buf, remote->fetch_refspec[i], "^$", 0))
- return error("Could not append '%s' to '%s'",
+ return error(_("Could not append '%s' to '%s'"),
remote->fetch_refspec[i], buf.buf);
if (remote->origin == REMOTE_REMOTES)
path = git_path("remotes/%s", remote->name);
oldremote = remote_get(rename.old);
if (!oldremote)
- die("No such remote: %s", rename.old);
+ die(_("No such remote: %s"), rename.old);
if (!strcmp(rename.old, rename.new) && oldremote->origin != REMOTE_CONFIG)
return migrate_file(oldremote);
newremote = remote_get(rename.new);
if (newremote && (newremote->url_nr > 1 || newremote->fetch_refspec_nr))
- die("remote %s already exists.", rename.new);
+ die(_("remote %s already exists."), rename.new);
strbuf_addf(&buf, "refs/heads/test:refs/remotes/%s/test", rename.new);
if (!valid_fetch_refspec(buf.buf))
- die("'%s' is not a valid remote name", rename.new);
+ die(_("'%s' is not a valid remote name"), rename.new);
strbuf_reset(&buf);
strbuf_addf(&buf, "remote.%s", rename.old);
strbuf_addf(&buf2, "remote.%s", rename.new);
if (git_config_rename_section(buf.buf, buf2.buf) < 1)
- return error("Could not rename config section '%s' to '%s'",
+ return error(_("Could not rename config section '%s' to '%s'"),
buf.buf, buf2.buf);
strbuf_reset(&buf);
strbuf_addf(&buf, "remote.%s.fetch", rename.new);
if (git_config_set_multivar(buf.buf, NULL, NULL, 1))
- return error("Could not remove config section '%s'", buf.buf);
+ return error(_("Could not remove config section '%s'"), buf.buf);
strbuf_addf(&old_remote_context, ":refs/remotes/%s/", rename.old);
for (i = 0; i < oldremote->fetch_refspec_nr; i++) {
char *ptr;
strlen(rename.old), rename.new,
strlen(rename.new));
} else
- warning("Not updating non-default fetch respec\n"
- "\t%s\n"
- "\tPlease update the configuration manually if necessary.",
+ warning(_("Not updating non-default fetch respec\n"
+ "\t%s\n"
+ "\tPlease update the configuration manually if necessary."),
buf2.buf);
if (git_config_set_multivar(buf.buf, buf2.buf, "^$", 0))
- return error("Could not append '%s'", buf.buf);
+ return error(_("Could not append '%s'"), buf.buf);
}
read_branches();
strbuf_reset(&buf);
strbuf_addf(&buf, "branch.%s.remote", item->string);
if (git_config_set(buf.buf, rename.new)) {
- return error("Could not set '%s'", buf.buf);
+ return error(_("Could not set '%s'"), buf.buf);
}
}
}
if (!(flag & REF_ISSYMREF))
continue;
if (delete_ref(item->string, NULL, REF_NODEREF))
- die("deleting '%s' failed", item->string);
+ die(_("deleting '%s' failed"), item->string);
}
for (i = 0; i < remote_branches.nr; i++) {
struct string_list_item *item = remote_branches.items + i;
strbuf_addf(&buf2, "remote: renamed %s to %s",
item->string, buf.buf);
if (rename_ref(item->string, buf.buf, buf2.buf))
- die("renaming '%s' failed", item->string);
+ die(_("renaming '%s' failed"), item->string);
}
for (i = 0; i < remote_branches.nr; i++) {
struct string_list_item *item = remote_branches.items + i;
strbuf_addf(&buf3, "remote: renamed %s to %s",
item->string, buf.buf);
if (create_symref(buf.buf, buf2.buf, buf3.buf))
- die("creating '%s' failed", buf.buf);
+ die(_("creating '%s' failed"), buf.buf);
}
return 0;
}
unsigned char *sha1 = item->util;
if (delete_ref(refname, sha1, 0))
- result |= error("Could not remove branch %s", refname);
+ result |= error(_("Could not remove branch %s"), refname);
}
return result;
}
remote = remote_get(argv[1]);
if (!remote)
- die("No such remote: %s", argv[1]);
+ die(_("No such remote: %s"), argv[1]);
known_remotes.to_delete = remote;
for_each_remote(add_known_remote, &known_remotes);
strbuf_addf(&buf, "remote.%s", remote->name);
if (git_config_rename_section(buf.buf, NULL) < 1)
- return error("Could not remove config section '%s'", buf.buf);
+ return error(_("Could not remove config section '%s'"), buf.buf);
read_branches();
for (i = 0; i < branch_list.nr; i++) {
string_list_clear(&branches, 1);
if (skipped.nr) {
- fprintf(stderr, skipped.nr == 1 ?
- "Note: A branch outside the refs/remotes/ hierarchy was not removed;\n"
- "to delete it, use:\n" :
- "Note: Some branches outside the refs/remotes/ hierarchy were not removed;\n"
- "to delete them, use:\n");
+ fprintf_ln(stderr,
+ Q_("Note: A branch outside the refs/remotes/ hierarchy was not removed;\n"
+ "to delete it, use:",
+ "Note: Some branches outside the refs/remotes/ hierarchy were not removed;\n"
+ "to delete them, use:",
+ skipped.nr));
for (i = 0; i < skipped.nr; i++)
fprintf(stderr, " git branch -d %s\n",
skipped.items[i].string);
states->remote = remote_get(name);
if (!states->remote)
- return error("No such remote: %s", name);
+ return error(_("No such remote: %s"), name);
read_branches();
const char *fmt = "%s";
const char *arg = "";
if (string_list_has_string(&states->new, name)) {
- fmt = " new (next fetch will store in remotes/%s)";
+ fmt = _(" new (next fetch will store in remotes/%s)");
arg = states->remote->name;
} else if (string_list_has_string(&states->tracked, name))
- arg = " tracked";
+ arg = _(" tracked");
else if (string_list_has_string(&states->stale, name))
- arg = " stale (use 'git remote prune' to remove)";
+ arg = _(" stale (use 'git remote prune' to remove)");
else
- arg = " ???";
+ arg = _(" ???");
printf(" %-*s", info->width, name);
printf(fmt, arg);
printf("\n");
int i;
if (branch_info->rebase && branch_info->merge.nr > 1) {
- error("invalid branch.%s.merge; cannot rebase onto > 1 branch",
+ error(_("invalid branch.%s.merge; cannot rebase onto > 1 branch"),
item->string);
return 0;
}
printf(" %-*s ", show_info->width, item->string);
if (branch_info->rebase) {
- printf("rebases onto remote %s\n", merge->items[0].string);
+ printf_ln(_("rebases onto remote %s"), merge->items[0].string);
return 0;
} else if (show_info->any_rebase) {
- printf(" merges with remote %s\n", merge->items[0].string);
- also = " and with remote";
+ printf_ln(_(" merges with remote %s"), merge->items[0].string);
+ also = _(" and with remote");
} else {
- printf("merges with remote %s\n", merge->items[0].string);
- also = " and with remote";
+ printf_ln(_("merges with remote %s"), merge->items[0].string);
+ also = _(" and with remote");
}
for (i = 1; i < merge->nr; i++)
printf(" %-*s %s %s\n", show_info->width, "", also,
{
struct show_info *show_info = cb_data;
struct push_info *push_info = item->util;
- char *src = item->string, *status = NULL;
+ const char *src = item->string, *status = NULL;
switch (push_info->status) {
case PUSH_STATUS_CREATE:
- status = "create";
+ status = _("create");
break;
case PUSH_STATUS_DELETE:
- status = "delete";
- src = "(none)";
+ status = _("delete");
+ src = _("(none)");
break;
case PUSH_STATUS_UPTODATE:
- status = "up to date";
+ status = _("up to date");
break;
case PUSH_STATUS_FASTFORWARD:
- status = "fast-forwardable";
+ status = _("fast-forwardable");
break;
case PUSH_STATUS_OUTOFDATE:
- status = "local out of date";
+ status = _("local out of date");
break;
case PUSH_STATUS_NOTQUERIED:
break;
}
- if (status)
- printf(" %-*s %s to %-*s (%s)\n", show_info->width, src,
- push_info->forced ? "forces" : "pushes",
- show_info->width2, push_info->dest, status);
- else
- printf(" %-*s %s to %s\n", show_info->width, src,
- push_info->forced ? "forces" : "pushes",
- push_info->dest);
+ if (status) {
+ if (push_info->forced)
+ printf_ln(_(" %-*s forces to %-*s (%s)"), show_info->width, src,
+ show_info->width2, push_info->dest, status);
+ else
+ printf_ln(_(" %-*s pushes to %-*s (%s)"), show_info->width, src,
+ show_info->width2, push_info->dest, status);
+ } else {
+ if (push_info->forced)
+ printf_ln(_(" %-*s forces to %s"), show_info->width, src,
+ push_info->dest);
+ else
+ printf_ln(_(" %-*s pushes to %s"), show_info->width, src,
+ push_info->dest);
+ }
return 0;
}
get_remote_ref_states(*argv, &states, query_flag);
- printf("* remote %s\n", *argv);
- printf(" Fetch URL: %s\n", states.remote->url_nr > 0 ?
- states.remote->url[0] : "(no URL)");
+ printf_ln(_("* remote %s"), *argv);
+ printf_ln(_(" Fetch URL: %s"), states.remote->url_nr > 0 ?
+ states.remote->url[0] : _("(no URL)"));
if (states.remote->pushurl_nr) {
url = states.remote->pushurl;
url_nr = states.remote->pushurl_nr;
url_nr = states.remote->url_nr;
}
for (i = 0; i < url_nr; i++)
- printf(" Push URL: %s\n", url[i]);
+ printf_ln(_(" Push URL: %s"), url[i]);
if (!i)
- printf(" Push URL: %s\n", "(no URL)");
+ printf_ln(_(" Push URL: %s"), "(no URL)");
if (no_query)
- printf(" HEAD branch: (not queried)\n");
+ printf_ln(_(" HEAD branch: %s"), "(not queried)");
else if (!states.heads.nr)
- printf(" HEAD branch: (unknown)\n");
+ printf_ln(_(" HEAD branch: %s"), "(unknown)");
else if (states.heads.nr == 1)
- printf(" HEAD branch: %s\n", states.heads.items[0].string);
+ printf_ln(_(" HEAD branch: %s"), states.heads.items[0].string);
else {
- printf(" HEAD branch (remote HEAD is ambiguous,"
- " may be one of the following):\n");
+ printf(_(" HEAD branch (remote HEAD is ambiguous,"
+ " may be one of the following):\n"));
for (i = 0; i < states.heads.nr; i++)
printf(" %s\n", states.heads.items[i].string);
}
for_each_string_list(&states.tracked, add_remote_to_show_info, &info);
for_each_string_list(&states.stale, add_remote_to_show_info, &info);
if (info.list->nr)
- printf(" Remote branch%s:%s\n",
- info.list->nr > 1 ? "es" : "",
- no_query ? " (status not queried)" : "");
+ printf_ln(Q_(" Remote branch:%s",
+ " Remote branches:%s",
+ info.list->nr),
+ no_query ? _(" (status not queried)") : "");
for_each_string_list(info.list, show_remote_info_item, &info);
string_list_clear(info.list, 0);
info.any_rebase = 0;
for_each_string_list(&branch_list, add_local_to_show_info, &info);
if (info.list->nr)
- printf(" Local branch%s configured for 'git pull':\n",
- info.list->nr > 1 ? "es" : "");
+ printf_ln(Q_(" Local branch configured for 'git pull':",
+ " Local branches configured for 'git pull':",
+ info.list->nr));
for_each_string_list(info.list, show_local_info_item, &info);
string_list_clear(info.list, 0);
/* git push info */
if (states.remote->mirror)
- printf(" Local refs will be mirrored by 'git push'\n");
+ printf_ln(_(" Local refs will be mirrored by 'git push'"));
info.width = info.width2 = 0;
for_each_string_list(&states.push, add_push_to_show_info, &info);
qsort(info.list->items, info.list->nr,
sizeof(*info.list->items), cmp_string_with_push);
if (info.list->nr)
- printf(" Local ref%s configured for 'git push'%s:\n",
- info.list->nr > 1 ? "s" : "",
- no_query ? " (status not queried)" : "");
+ printf_ln(Q_(" Local ref configured for 'git push'%s:",
+ " Local refs configured for 'git push'%s:",
+ info.list->nr),
+ no_query ? _(" (status not queried)") : "");
for_each_string_list(info.list, show_push_info_item, &info);
string_list_clear(info.list, 0);
memset(&states, 0, sizeof(states));
get_remote_ref_states(argv[0], &states, GET_HEAD_NAMES);
if (!states.heads.nr)
- result |= error("Cannot determine remote HEAD");
+ result |= error(_("Cannot determine remote HEAD"));
else if (states.heads.nr > 1) {
- result |= error("Multiple remote HEAD branches. "
- "Please choose one explicitly with:");
+ result |= error(_("Multiple remote HEAD branches. "
+ "Please choose one explicitly with:"));
for (i = 0; i < states.heads.nr; i++)
fprintf(stderr, " git remote set-head %s %s\n",
argv[0], states.heads.items[i].string);
free_remote_ref_states(&states);
} else if (opt_d && !opt_a && argc == 1) {
if (delete_ref(buf.buf, NULL, REF_NODEREF))
- result |= error("Could not delete %s", buf.buf);
+ result |= error(_("Could not delete %s"), buf.buf);
} else
usage_with_options(builtin_remote_sethead_usage, options);
strbuf_addf(&buf2, "refs/remotes/%s/%s", argv[0], head_name);
/* make sure it's valid */
if (!ref_exists(buf2.buf))
- result |= error("Not a valid ref: %s", buf2.buf);
+ result |= error(_("Not a valid ref: %s"), buf2.buf);
else if (create_symref(buf.buf, buf2.buf, "remote set-head"))
- result |= error("Could not setup %s", buf.buf);
+ result |= error(_("Could not setup %s"), buf.buf);
if (opt_a)
printf("%s/HEAD set to %s\n", argv[0], head_name);
free(head_name);
int result = 0, i;
struct ref_states states;
const char *dangling_msg = dry_run
- ? " %s will become dangling!\n"
- : " %s has become dangling!\n";
+ ? _(" %s will become dangling!")
+ : _(" %s has become dangling!");
memset(&states, 0, sizeof(states));
get_remote_ref_states(remote, &states, GET_REF_STATES);
if (states.stale.nr) {
- printf("Pruning %s\n", remote);
- printf("URL: %s\n",
+ printf_ln(_("Pruning %s"), remote);
+ printf_ln(_("URL: %s"),
states.remote->url_nr
? states.remote->url[0]
- : "(no URL)");
+ : _("(no URL)"));
}
for (i = 0; i < states.stale.nr; i++) {
if (!dry_run)
result |= delete_ref(refname, NULL, 0);
- printf(" * [%s] %s\n", dry_run ? "would prune" : "pruned",
- abbrev_ref(refname, "refs/remotes/"));
+ if (dry_run)
+ printf_ln(_(" * [would prune] %s"),
+ abbrev_ref(refname, "refs/remotes/"));
+ else
+ printf_ln(_(" * [pruned] %s"),
+ abbrev_ref(refname, "refs/remotes/"));
warn_dangling_symref(stdout, dangling_msg, refname);
}
strbuf_addf(&key, "remote.%s.fetch", remotename);
if (!remote_is_configured(remotename))
- die("No such remote '%s'", remotename);
+ die(_("No such remote '%s'"), remotename);
remote = remote_get(remotename);
if (!add_mode && remove_all_fetch_refspecs(remotename, key.buf)) {
argc = parse_options(argc, argv, NULL, options,
builtin_remote_setbranches_usage, 0);
if (argc == 0) {
- error("no remote specified");
+ error(_("no remote specified"));
usage_with_options(builtin_remote_setbranches_usage, options);
}
argv[argc] = NULL;
PARSE_OPT_KEEP_ARGV0);
if (add_mode && delete_mode)
- die("--add --delete doesn't make sense");
+ die(_("--add --delete doesn't make sense"));
if (argc < 3 || argc > 4 || ((add_mode || delete_mode) && argc != 3))
usage_with_options(builtin_remote_seturl_usage, options);
oldurl = newurl;
if (!remote_is_configured(remotename))
- die("No such remote '%s'", remotename);
+ die(_("No such remote '%s'"), remotename);
remote = remote_get(remotename);
if (push_mode) {
/* Old URL specified. Demand that one matches. */
if (regcomp(&old_regex, oldurl, REG_EXTENDED))
- die("Invalid old URL pattern: %s", oldurl);
+ die(_("Invalid old URL pattern: %s"), oldurl);
for (i = 0; i < urlset_nr; i++)
if (!regexec(&old_regex, urlset[i], 0, NULL, 0))
else
negative_matches++;
if (!delete_mode && !matches)
- die("No such URL found: %s", oldurl);
+ die(_("No such URL found: %s"), oldurl);
if (delete_mode && !negative_matches && !push_mode)
- die("Will not delete all non-push URLs");
+ die(_("Will not delete all non-push URLs"));
regfree(&old_regex);
else if (!strcmp(argv[0], "update"))
result = update(argc, argv);
else {
- error("Unknown subcommand: %s", argv[0]);
+ error(_("Unknown subcommand: %s"), argv[0]);
usage_with_options(builtin_remote_usage, options);
}
if (!strcmp(arg, "--show-prefix")) {
if (prefix)
puts(prefix);
+ else
+ putchar('\n');
continue;
}
if (!strcmp(arg, "--show-cdup")) {
break;
}
}
+ va_end(ap);
if (opt1 && opt2)
die(_("%s: %s cannot be used with %s"), me, opt1, opt2);
OPT_END(),
OPT_END(),
OPT_END(),
+ OPT_END(),
+ OPT_END(),
};
if (opts->action == REPLAY_PICK) {
struct option cp_extra[] = {
OPT_BOOLEAN('x', NULL, &opts->record_origin, "append commit name"),
OPT_BOOLEAN(0, "ff", &opts->allow_ff, "allow fast-forward"),
+ OPT_BOOLEAN(0, "allow-empty", &opts->allow_empty, "preserve initially empty commits"),
+ OPT_BOOLEAN(0, "keep-redundant-commits", &opts->keep_redundant_commits, "keep redundant, empty commits"),
OPT_END(),
};
if (parse_options_concat(options, ARRAY_SIZE(options), cp_extra))
"--abort", rollback,
NULL);
+ /* implies allow_empty */
+ if (opts->keep_redundant_commits)
+ opts->allow_empty = 1;
+
/* Set the subcommand */
if (remove_state)
opts->subcommand = REPLAY_REMOVE_STATE;
const char *receivepack = "git-receive-pack";
int flags;
int nonfastforward = 0;
+ int progress = -1;
argv++;
for (i = 1; i < argc; i++, argv++) {
args.verbose = 1;
continue;
}
+ if (!strcmp(arg, "--progress")) {
+ progress = 1;
+ continue;
+ }
+ if (!strcmp(arg, "--no-progress")) {
+ progress = 0;
+ continue;
+ }
if (!strcmp(arg, "--thin")) {
args.use_thin_pack = 1;
continue;
}
}
+ if (progress == -1)
+ progress = !args.quiet && isatty(2);
+ args.progress = progress;
+
if (args.stateless_rpc) {
conn = NULL;
fd[0] = 0;
#include "revision.h"
#include "gpg-interface.h"
#include "sha1-array.h"
+#include "column.h"
static const char * const git_tag_usage[] = {
"git tag [-a|-s|-u <key-id>] [-f] [-m <msg>|-F <file>] <tagname> [<head>]",
};
static struct sha1_array points_at;
+static unsigned int colopts;
static int match_pattern(const char **patterns, const char *ref)
{
int status = git_gpg_config(var, value, cb);
if (status)
return status;
+ if (!prefixcmp(var, "column."))
+ return git_column_config(var, value, "tag", &colopts);
return git_default_config(var, value, cb);
}
OPT_STRING('u', "local-user", &keyid, "key-id",
"use another key to sign the tag"),
OPT__FORCE(&force, "replace the tag if exists"),
+ OPT_COLUMN(0, "column", &colopts, "show tag list in columns"),
OPT_GROUP("Tag listing options"),
{
if (list + delete + verify > 1)
usage_with_options(git_tag_usage, options);
- if (list)
- return list_tags(argv, lines == -1 ? 0 : lines,
- with_commit);
+ finalize_colopts(&colopts, -1);
+ if (list && lines != -1) {
+ if (explicitly_enable_column(colopts))
+ die(_("--column and -n are incompatible"));
+ colopts = 0;
+ }
+ if (list) {
+ int ret;
+ if (column_active(colopts)) {
+ struct column_options copts;
+ memset(&copts, 0, sizeof(copts));
+ copts.padding = 2;
+ run_column_filter(colopts, &copts);
+ }
+ ret = list_tags(argv, lines == -1 ? 0 : lines, with_commit);
+ if (column_active(colopts))
+ stop_column_filter();
+ return ret;
+ }
if (lines != -1)
die(_("-n option is only allowed with -l."));
if (with_commit)
if (stream.total_out == size && ret == Z_STREAM_END)
break;
if (ret != Z_OK) {
- error("inflate returned %d\n", ret);
+ error("inflate returned %d", ret);
free(buf);
buf = NULL;
if (!recover)
int newfd, entries, has_errors = 0, line_termination = '\n';
int read_from_stdin = 0;
int prefix_length = prefix ? strlen(prefix) : 0;
+ int preferred_index_format = 0;
char set_executable_bit = 0;
struct refresh_params refresh_args = {0, &has_errors};
int lock_error = 0;
"(for porcelains) forget saved unresolved conflicts",
PARSE_OPT_NOARG | PARSE_OPT_NONEG,
resolve_undo_clear_callback},
+ OPT_INTEGER(0, "index-version", &preferred_index_format,
+ "write index in this format"),
OPT_END()
};
}
}
argc = parse_options_end(&ctx);
+ if (preferred_index_format) {
+ if (preferred_index_format < INDEX_FORMAT_LB ||
+ INDEX_FORMAT_UB < preferred_index_format)
+ die("index-version %d not in range: %d..%d",
+ preferred_index_format,
+ INDEX_FORMAT_LB, INDEX_FORMAT_UB);
+
+ if (the_index.version != preferred_index_format)
+ active_cache_changed = 1;
+ the_index.version = preferred_index_format;
+ }
if (read_from_stdin) {
struct strbuf buf = STRBUF_INIT, nbuf = STRBUF_INIT;
OPT_END()
};
+ git_config(git_default_config, NULL);
argc = parse_options(argc, argv, prefix, options,
update_server_info_usage, 0);
if (argc > 0)
if (strbuf_getwholeline_fd(&buf, fd, '\n') ||
strcmp(buf.buf, bundle_signature)) {
if (report_path)
- error("'%s' does not look like a v2 bundle file",
+ error(_("'%s' does not look like a v2 bundle file"),
report_path);
status = -1;
goto abort;
(40 <= buf.len && !isspace(buf.buf[40])) ||
(!is_prereq && buf.len <= 40)) {
if (report_path)
- error("unrecognized header: %s%s (%d)",
+ error(_("unrecognized header: %s%s (%d)"),
(is_prereq ? "-" : ""), buf.buf, (int)buf.len);
status = -1;
break;
int fd = open(path, O_RDONLY);
if (fd < 0)
- return error("could not open '%s'", path);
+ return error(_("could not open '%s'"), path);
return parse_bundle_header(fd, header, path);
}
struct object_array refs;
struct commit *commit;
int i, ret = 0, req_nr;
- const char *message = "Repository lacks these prerequisite commits:";
+ const char *message = _("Repository lacks these prerequisite commits:");
init_revisions(&revs, NULL);
for (i = 0; i < p->nr; i++) {
revs.leak_pending = 1;
if (prepare_revision_walk(&revs))
- die("revision walk setup failed");
+ die(_("revision walk setup failed"));
i = req_nr;
while (i && (commit = get_revision(&revs)))
struct ref_list *r;
r = &header->references;
- printf("The bundle contains %d ref%s\n",
- r->nr, (1 < r->nr) ? "s" : "");
+ printf_ln(Q_("The bundle contains %d ref",
+ "The bundle contains %d refs",
+ r->nr),
+ r->nr);
list_refs(r, 0, NULL);
r = &header->prerequisites;
- printf("The bundle requires these %d ref%s\n",
- r->nr, (1 < r->nr) ? "s" : "");
+ printf_ln(Q_("The bundle requires this ref",
+ "The bundle requires these %d refs",
+ r->nr),
+ r->nr);
list_refs(r, 0, NULL);
}
return ret;
strbuf_release(&buf);
fclose(rls_fout);
if (finish_command(&rls))
- return error("rev-list died");
+ return error(_("rev-list died"));
/* write references */
argc = setup_revisions(argc, argv, &revs, NULL);
if (argc > 1)
- return error("unrecognized argument: %s", argv[1]);
+ return error(_("unrecognized argument: %s"), argv[1]);
object_array_remove_duplicates(&revs.pending);
* constraints.
*/
if (!(e->item->flags & SHOWN) && e->item->type == OBJ_COMMIT) {
- warning("ref '%s' is excluded by the rev-list options",
+ warning(_("ref '%s' is excluded by the rev-list options"),
e->name);
free(ref);
continue;
free(ref);
}
if (!ref_count)
- die ("Refusing to create empty bundle.");
+ die(_("Refusing to create empty bundle."));
/* end header */
write_or_die(bundle_fd, "\n", 1);
rls.out = bundle_fd;
rls.git_cmd = 1;
if (start_command(&rls))
- return error("Could not spawn pack-objects");
+ return error(_("Could not spawn pack-objects"));
/*
* start_command closed bundle_fd if it was > 1
}
close(rls.in);
if (finish_command(&rls))
- return error ("pack-objects died");
+ return error(_("pack-objects died"));
if (!bundle_to_stdout) {
if (commit_lock_file(&lock))
- die_errno("cannot create '%s'", path);
+ die_errno(_("cannot create '%s'"), path);
}
return 0;
}
ip.no_stdout = 1;
ip.git_cmd = 1;
if (run_command(&ip))
- return error("index-pack died");
+ return error(_("index-pack died"));
return 0;
}
unsigned int hdr_entries;
};
+#define INDEX_FORMAT_LB 2
+#define INDEX_FORMAT_UB 4
+
/*
* The "cache_time" is just the low 32 bits of the
* time. It doesn't matter if it overflows - we only
unsigned int nsec;
};
-/*
- * dev/ino/uid/gid/size are also just tracked to the low 32 bits
- * Again - this is just a (very strong in practice) heuristic that
- * the inode hasn't changed.
- *
- * We save the fields in big-endian order to allow using the
- * index file over NFS transparently.
- */
-struct ondisk_cache_entry {
- struct cache_time ctime;
- struct cache_time mtime;
- unsigned int dev;
- unsigned int ino;
- unsigned int mode;
- unsigned int uid;
- unsigned int gid;
- unsigned int size;
- unsigned char sha1[20];
- unsigned short flags;
- char name[FLEX_ARRAY]; /* more */
-};
-
-/*
- * This struct is used when CE_EXTENDED bit is 1
- * The struct must match ondisk_cache_entry exactly from
- * ctime till flags
- */
-struct ondisk_cache_entry_extended {
- struct cache_time ctime;
- struct cache_time mtime;
- unsigned int dev;
- unsigned int ino;
- unsigned int mode;
- unsigned int uid;
- unsigned int gid;
- unsigned int size;
- unsigned char sha1[20];
- unsigned short flags;
- unsigned short flags2;
- char name[FLEX_ARRAY]; /* more */
-};
-
struct cache_entry {
struct cache_time ce_ctime;
struct cache_time ce_mtime;
}
#define ce_size(ce) cache_entry_size(ce_namelen(ce))
-#define ondisk_ce_size(ce) (((ce)->ce_flags & CE_EXTENDED) ? \
- ondisk_cache_entry_extended_size(ce_namelen(ce)) : \
- ondisk_cache_entry_size(ce_namelen(ce)))
#define ce_stage(ce) ((CE_STAGEMASK & (ce)->ce_flags) >> CE_STAGESHIFT)
#define ce_uptodate(ce) ((ce)->ce_flags & CE_UPTODATE)
#define ce_skip_worktree(ce) ((ce)->ce_flags & CE_SKIP_WORKTREE)
return S_IFGITLINK;
}
-#define flexible_size(STRUCT,len) ((offsetof(struct STRUCT,name) + (len) + 8) & ~7)
#define cache_entry_size(len) (offsetof(struct cache_entry,name) + (len) + 1)
-#define ondisk_cache_entry_size(len) flexible_size(ondisk_cache_entry,len)
-#define ondisk_cache_entry_extended_size(len) flexible_size(ondisk_cache_entry_extended,len)
struct index_state {
struct cache_entry **cache;
+ unsigned int version;
unsigned int cache_nr, cache_alloc, cache_changed;
struct string_list *resolve_undo;
struct cache_tree *cache_tree;
enum push_default_type {
PUSH_DEFAULT_NOTHING = 0,
PUSH_DEFAULT_MATCHING,
+ PUSH_DEFAULT_SIMPLE,
PUSH_DEFAULT_UPSTREAM,
- PUSH_DEFAULT_CURRENT
+ PUSH_DEFAULT_CURRENT,
+ PUSH_DEFAULT_UNSPECIFIED
};
extern enum branch_track git_branch_track;
};
const char *show_date(unsigned long time, int timezone, enum date_mode mode);
-const char *show_date_relative(unsigned long time, int tz,
- const struct timeval *now,
- char *timebuf,
- size_t timebuf_size);
+void show_date_relative(unsigned long time, int tz, const struct timeval *now,
+ struct strbuf *timebuf);
int parse_date(const char *date, char *buf, int bufsize);
int parse_date_basic(const char *date, unsigned long *timestamp, int *offset);
void datestamp(char *buf, int bufsize);
--- /dev/null
+#include "cache.h"
+#include "column.h"
+#include "string-list.h"
+#include "parse-options.h"
+#include "run-command.h"
+#include "utf8.h"
+
+#define XY2LINEAR(d, x, y) (COL_LAYOUT((d)->colopts) == COL_COLUMN ? \
+ (x) * (d)->rows + (y) : \
+ (y) * (d)->cols + (x))
+
+struct column_data {
+ const struct string_list *list;
+ unsigned int colopts;
+ struct column_options opts;
+
+ int rows, cols;
+ int *len; /* cell length */
+ int *width; /* index to the longest row in column */
+};
+
+/* return length of 's' in letters, ANSI escapes stripped */
+static int item_length(unsigned int colopts, const char *s)
+{
+ int len, i = 0;
+ struct strbuf str = STRBUF_INIT;
+
+ strbuf_addstr(&str, s);
+ while ((s = strstr(str.buf + i, "\033[")) != NULL) {
+ int len = strspn(s + 2, "0123456789;");
+ i = s - str.buf;
+ strbuf_remove(&str, i, len + 3); /* \033[<len><func char> */
+ }
+ len = utf8_strwidth(str.buf);
+ strbuf_release(&str);
+ return len;
+}
+
+/*
+ * Calculate cell width, rows and cols for a table of equal cells, given
+ * table width and how many spaces between cells.
+ */
+static void layout(struct column_data *data, int *width)
+{
+ int i;
+
+ *width = 0;
+ for (i = 0; i < data->list->nr; i++)
+ if (*width < data->len[i])
+ *width = data->len[i];
+
+ *width += data->opts.padding;
+
+ data->cols = (data->opts.width - strlen(data->opts.indent)) / *width;
+ if (data->cols == 0)
+ data->cols = 1;
+
+ data->rows = DIV_ROUND_UP(data->list->nr, data->cols);
+}
+
+static void compute_column_width(struct column_data *data)
+{
+ int i, x, y;
+ for (x = 0; x < data->cols; x++) {
+ data->width[x] = XY2LINEAR(data, x, 0);
+ for (y = 0; y < data->rows; y++) {
+ i = XY2LINEAR(data, x, y);
+ if (i < data->list->nr &&
+ data->len[data->width[x]] < data->len[i])
+ data->width[x] = i;
+ }
+ }
+}
+
+/*
+ * Shrink all columns by shortening them one row each time (and adding
+ * more columns along the way). Hopefully the longest cell will be
+ * moved to the next column, column is shrunk so we have more space
+ * for new columns. The process ends when the whole thing no longer
+ * fits in data->total_width.
+ */
+static void shrink_columns(struct column_data *data)
+{
+ data->width = xrealloc(data->width,
+ sizeof(*data->width) * data->cols);
+ while (data->rows > 1) {
+ int x, total_width, cols, rows;
+ rows = data->rows;
+ cols = data->cols;
+
+ data->rows--;
+ data->cols = DIV_ROUND_UP(data->list->nr, data->rows);
+ if (data->cols != cols)
+ data->width = xrealloc(data->width,
+ sizeof(*data->width) * data->cols);
+ compute_column_width(data);
+
+ total_width = strlen(data->opts.indent);
+ for (x = 0; x < data->cols; x++) {
+ total_width += data->len[data->width[x]];
+ total_width += data->opts.padding;
+ }
+ if (total_width > data->opts.width) {
+ data->rows = rows;
+ data->cols = cols;
+ break;
+ }
+ }
+ compute_column_width(data);
+}
+
+/* Display without layout when not enabled */
+static void display_plain(const struct string_list *list,
+ const char *indent, const char *nl)
+{
+ int i;
+
+ for (i = 0; i < list->nr; i++)
+ printf("%s%s%s", indent, list->items[i].string, nl);
+}
+
+/* Print a cell to stdout with all necessary leading/traling space */
+static int display_cell(struct column_data *data, int initial_width,
+ const char *empty_cell, int x, int y)
+{
+ int i, len, newline;
+
+ i = XY2LINEAR(data, x, y);
+ if (i >= data->list->nr)
+ return -1;
+
+ len = data->len[i];
+ if (data->width && data->len[data->width[x]] < initial_width) {
+ /*
+ * empty_cell has initial_width chars, if real column
+ * is narrower, increase len a bit so we fill less
+ * space.
+ */
+ len += initial_width - data->len[data->width[x]];
+ len -= data->opts.padding;
+ }
+
+ if (COL_LAYOUT(data->colopts) == COL_COLUMN)
+ newline = i + data->rows >= data->list->nr;
+ else
+ newline = x == data->cols - 1 || i == data->list->nr - 1;
+
+ printf("%s%s%s",
+ x == 0 ? data->opts.indent : "",
+ data->list->items[i].string,
+ newline ? data->opts.nl : empty_cell + len);
+ return 0;
+}
+
+/* Display COL_COLUMN or COL_ROW */
+static void display_table(const struct string_list *list,
+ unsigned int colopts,
+ const struct column_options *opts)
+{
+ struct column_data data;
+ int x, y, i, initial_width;
+ char *empty_cell;
+
+ memset(&data, 0, sizeof(data));
+ data.list = list;
+ data.colopts = colopts;
+ data.opts = *opts;
+
+ data.len = xmalloc(sizeof(*data.len) * list->nr);
+ for (i = 0; i < list->nr; i++)
+ data.len[i] = item_length(colopts, list->items[i].string);
+
+ layout(&data, &initial_width);
+
+ if (colopts & COL_DENSE)
+ shrink_columns(&data);
+
+ empty_cell = xmalloc(initial_width + 1);
+ memset(empty_cell, ' ', initial_width);
+ empty_cell[initial_width] = '\0';
+ for (y = 0; y < data.rows; y++) {
+ for (x = 0; x < data.cols; x++)
+ if (display_cell(&data, initial_width, empty_cell, x, y))
+ break;
+ }
+
+ free(data.len);
+ free(data.width);
+ free(empty_cell);
+}
+
+void print_columns(const struct string_list *list, unsigned int colopts,
+ const struct column_options *opts)
+{
+ struct column_options nopts;
+
+ if (!list->nr)
+ return;
+ assert((colopts & COL_ENABLE_MASK) != COL_AUTO);
+
+ memset(&nopts, 0, sizeof(nopts));
+ nopts.indent = opts && opts->indent ? opts->indent : "";
+ nopts.nl = opts && opts->nl ? opts->nl : "\n";
+ nopts.padding = opts ? opts->padding : 1;
+ nopts.width = opts && opts->width ? opts->width : term_columns() - 1;
+ if (!column_active(colopts)) {
+ display_plain(list, "", "\n");
+ return;
+ }
+ switch (COL_LAYOUT(colopts)) {
+ case COL_PLAIN:
+ display_plain(list, nopts.indent, nopts.nl);
+ break;
+ case COL_ROW:
+ case COL_COLUMN:
+ display_table(list, colopts, &nopts);
+ break;
+ default:
+ die("BUG: invalid layout mode %d", COL_LAYOUT(colopts));
+ }
+}
+
+int finalize_colopts(unsigned int *colopts, int stdout_is_tty)
+{
+ if ((*colopts & COL_ENABLE_MASK) == COL_AUTO) {
+ if (stdout_is_tty < 0)
+ stdout_is_tty = isatty(1);
+ *colopts &= ~COL_ENABLE_MASK;
+ if (stdout_is_tty)
+ *colopts |= COL_ENABLED;
+ }
+ return 0;
+}
+
+struct colopt {
+ const char *name;
+ unsigned int value;
+ unsigned int mask;
+};
+
+#define LAYOUT_SET 1
+#define ENABLE_SET 2
+
+static int parse_option(const char *arg, int len, unsigned int *colopts,
+ int *group_set)
+{
+ struct colopt opts[] = {
+ { "always", COL_ENABLED, COL_ENABLE_MASK },
+ { "never", COL_DISABLED, COL_ENABLE_MASK },
+ { "auto", COL_AUTO, COL_ENABLE_MASK },
+ { "plain", COL_PLAIN, COL_LAYOUT_MASK },
+ { "column", COL_COLUMN, COL_LAYOUT_MASK },
+ { "row", COL_ROW, COL_LAYOUT_MASK },
+ { "dense", COL_DENSE, 0 },
+ };
+ int i;
+
+ for (i = 0; i < ARRAY_SIZE(opts); i++) {
+ int set = 1, arg_len = len, name_len;
+ const char *arg_str = arg;
+
+ if (!opts[i].mask) {
+ if (arg_len > 2 && !strncmp(arg_str, "no", 2)) {
+ arg_str += 2;
+ arg_len -= 2;
+ set = 0;
+ }
+ }
+
+ name_len = strlen(opts[i].name);
+ if (arg_len != name_len ||
+ strncmp(arg_str, opts[i].name, name_len))
+ continue;
+
+ switch (opts[i].mask) {
+ case COL_ENABLE_MASK:
+ *group_set |= ENABLE_SET;
+ break;
+ case COL_LAYOUT_MASK:
+ *group_set |= LAYOUT_SET;
+ break;
+ }
+
+ if (opts[i].mask)
+ *colopts = (*colopts & ~opts[i].mask) | opts[i].value;
+ else {
+ if (set)
+ *colopts |= opts[i].value;
+ else
+ *colopts &= ~opts[i].value;
+ }
+ return 0;
+ }
+
+ return error("unsupported option '%s'", arg);
+}
+
+static int parse_config(unsigned int *colopts, const char *value)
+{
+ const char *sep = " ,";
+ int group_set = 0;
+
+ while (*value) {
+ int len = strcspn(value, sep);
+ if (len) {
+ if (parse_option(value, len, colopts, &group_set))
+ return -1;
+
+ value += len;
+ }
+ value += strspn(value, sep);
+ }
+ /*
+ * Setting layout implies "always" if neither always, never
+ * nor auto is specified.
+ *
+ * Current value in COL_ENABLE_MASK is disregarded. This means if
+ * you set column.ui = auto and pass --column=row, then "auto"
+ * will become "always".
+ */
+ if ((group_set & LAYOUT_SET) && !(group_set & ENABLE_SET))
+ *colopts = (*colopts & ~COL_ENABLE_MASK) | COL_ENABLED;
+ return 0;
+}
+
+static int column_config(const char *var, const char *value,
+ const char *key, unsigned int *colopts)
+{
+ if (!value)
+ return config_error_nonbool(var);
+ if (parse_config(colopts, value))
+ return error("invalid column.%s mode %s", key, value);
+ return 0;
+}
+
+int git_column_config(const char *var, const char *value,
+ const char *command, unsigned int *colopts)
+{
+ const char *it = skip_prefix(var, "column.");
+ if (!it)
+ return 0;
+
+ if (!strcmp(it, "ui"))
+ return column_config(var, value, "ui", colopts);
+
+ if (command && !strcmp(it, command))
+ return column_config(var, value, it, colopts);
+
+ return 0;
+}
+
+int parseopt_column_callback(const struct option *opt,
+ const char *arg, int unset)
+{
+ unsigned int *colopts = opt->value;
+ *colopts |= COL_PARSEOPT;
+ *colopts &= ~COL_ENABLE_MASK;
+ if (unset) /* --no-column == never */
+ return 0;
+ /* --column == always unless "arg" states otherwise */
+ *colopts |= COL_ENABLED;
+ if (arg)
+ return parse_config(colopts, arg);
+
+ return 0;
+}
+
+static int fd_out = -1;
+static struct child_process column_process;
+
+int run_column_filter(int colopts, const struct column_options *opts)
+{
+ const char *av[10];
+ int ret, ac = 0;
+ struct strbuf sb_colopt = STRBUF_INIT;
+ struct strbuf sb_width = STRBUF_INIT;
+ struct strbuf sb_padding = STRBUF_INIT;
+
+ if (fd_out != -1)
+ return -1;
+
+ av[ac++] = "column";
+ strbuf_addf(&sb_colopt, "--raw-mode=%d", colopts);
+ av[ac++] = sb_colopt.buf;
+ if (opts && opts->width) {
+ strbuf_addf(&sb_width, "--width=%d", opts->width);
+ av[ac++] = sb_width.buf;
+ }
+ if (opts && opts->indent) {
+ av[ac++] = "--indent";
+ av[ac++] = opts->indent;
+ }
+ if (opts && opts->padding) {
+ strbuf_addf(&sb_padding, "--padding=%d", opts->padding);
+ av[ac++] = sb_padding.buf;
+ }
+ av[ac] = NULL;
+
+ fflush(stdout);
+ memset(&column_process, 0, sizeof(column_process));
+ column_process.in = -1;
+ column_process.out = dup(1);
+ column_process.git_cmd = 1;
+ column_process.argv = av;
+
+ ret = start_command(&column_process);
+
+ strbuf_release(&sb_colopt);
+ strbuf_release(&sb_width);
+ strbuf_release(&sb_padding);
+
+ if (ret)
+ return -2;
+
+ fd_out = dup(1);
+ close(1);
+ dup2(column_process.in, 1);
+ close(column_process.in);
+ return 0;
+}
+
+int stop_column_filter(void)
+{
+ if (fd_out == -1)
+ return -1;
+
+ fflush(stdout);
+ close(1);
+ finish_command(&column_process);
+ dup2(fd_out, 1);
+ close(fd_out);
+ fd_out = -1;
+ return 0;
+}
--- /dev/null
+#ifndef COLUMN_H
+#define COLUMN_H
+
+#define COL_LAYOUT_MASK 0x000F
+#define COL_ENABLE_MASK 0x0030 /* always, never or auto */
+#define COL_PARSEOPT 0x0040 /* --column is given from cmdline */
+#define COL_DENSE 0x0080 /* Shrink columns when possible,
+ making space for more columns */
+
+#define COL_DISABLED 0x0000 /* must be zero */
+#define COL_ENABLED 0x0010
+#define COL_AUTO 0x0020
+
+#define COL_LAYOUT(c) ((c) & COL_LAYOUT_MASK)
+#define COL_COLUMN 0 /* Fill columns before rows */
+#define COL_ROW 1 /* Fill rows before columns */
+#define COL_PLAIN 15 /* one column */
+
+#define explicitly_enable_column(c) \
+ (((c) & COL_PARSEOPT) && column_active(c))
+
+struct column_options {
+ int width;
+ int padding;
+ const char *indent;
+ const char *nl;
+};
+
+struct option;
+extern int parseopt_column_callback(const struct option *, const char *, int);
+extern int git_column_config(const char *var, const char *value,
+ const char *command, unsigned int *colopts);
+extern int finalize_colopts(unsigned int *colopts, int stdout_is_tty);
+static inline int column_active(unsigned int colopts)
+{
+ return (colopts & COL_ENABLE_MASK) == COL_ENABLED;
+}
+
+extern void print_columns(const struct string_list *list, unsigned int colopts,
+ const struct column_options *opts);
+
+extern int run_column_filter(int colopts, const struct column_options *);
+extern int stop_column_filter(void);
+
+#endif
git-citool mainporcelain
git-clean mainporcelain
git-clone mainporcelain common
+git-column purehelpers
git-commit mainporcelain common
git-commit-tree plumbingmanipulators
git-config ancillarymanipulators
git-mv mainporcelain common
git-name-rev plumbinginterrogators
git-notes mainporcelain
+git-p4 foreignscminterface
git-pack-objects plumbingmanipulators
git-pack-redundant plumbinginterrogators
git-pack-refs ancillarymanipulators
#include "revision.h"
#include "notes.h"
#include "gpg-interface.h"
+#include "mergesort.h"
int save_commit_buffer = 1;
return commit_list_insert(item, pp);
}
+static int commit_list_compare_by_date(const void *a, const void *b)
+{
+ unsigned long a_date = ((const struct commit_list *)a)->item->date;
+ unsigned long b_date = ((const struct commit_list *)b)->item->date;
+ if (a_date < b_date)
+ return 1;
+ if (a_date > b_date)
+ return -1;
+ return 0;
+}
+
+static void *commit_list_get_next(const void *a)
+{
+ return ((const struct commit_list *)a)->next;
+}
+
+static void commit_list_set_next(void *a, void *next)
+{
+ ((struct commit_list *)a)->next = next;
+}
void commit_list_sort_by_date(struct commit_list **list)
{
- struct commit_list *ret = NULL;
- while (*list) {
- commit_list_insert_by_date((*list)->item, &ret);
- *list = (*list)->next;
- }
- *list = ret;
+ *list = llist_mergesort(*list, commit_list_get_next, commit_list_set_next,
+ commit_list_compare_by_date);
}
struct commit *pop_most_recent_commit(struct commit_list **list,
}
return commit;
}
+
+/*
+ * Append a commit to the end of the commit_list.
+ *
+ * next starts by pointing to the variable that holds the head of an
+ * empty commit_list, and is updated to point to the "next" field of
+ * the last item on the list as new commits are appended.
+ *
+ * Usage example:
+ *
+ * struct commit_list *list;
+ * struct commit_list **next = &list;
+ *
+ * next = commit_list_append(c1, next);
+ * next = commit_list_append(c2, next);
+ * assert(commit_list_count(list) == 2);
+ * return list;
+ */
+struct commit_list **commit_list_append(struct commit *commit,
+ struct commit_list **next)
+{
+ struct commit_list *new = xmalloc(sizeof(struct commit_list));
+ new->item = commit;
+ *next = new;
+ new->next = NULL;
+ return &new->next;
+}
struct commit_list *commit_list_insert(struct commit *item,
struct commit_list **list);
+struct commit_list **commit_list_append(struct commit *commit,
+ struct commit_list **next);
unsigned commit_list_count(const struct commit_list *l);
struct commit_list *commit_list_insert_by_date(struct commit *item,
struct commit_list **list);
#define S_IWOTH 0
#define S_IXOTH 0
#define S_IRWXO (S_IROTH | S_IWOTH | S_IXOTH)
-#define S_ISUID 0
-#define S_ISGID 0
-#define S_ISVTX 0
+
+#define S_ISUID 0004000
+#define S_ISGID 0002000
+#define S_ISVTX 0001000
#define WIFEXITED(x) 1
#define WIFSIGNALED(x) 0
temp = MapViewOfFileEx(hmap, FILE_MAP_COPY, h, l, length, start);
if (!CloseHandle(hmap))
- warning("unable to close file mapping handle\n");
+ warning("unable to close file mapping handle");
return temp ? temp : MAP_FAILED;
}
{
int ret = 0;
struct strbuf buf = STRBUF_INIT;
+ char *expanded = expand_user_path(path);
+
+ if (!expanded)
+ return error("Could not expand include path '%s'", path);
+ path = expanded;
/*
* Use an absolute path as-is, but interpret relative paths
inc->depth--;
}
strbuf_release(&buf);
+ free(expanded);
return ret;
}
push_default = PUSH_DEFAULT_NOTHING;
else if (!strcmp(value, "matching"))
push_default = PUSH_DEFAULT_MATCHING;
+ else if (!strcmp(value, "simple"))
+ push_default = PUSH_DEFAULT_SIMPLE;
else if (!strcmp(value, "upstream"))
push_default = PUSH_DEFAULT_UPSTREAM;
else if (!strcmp(value, "tracking")) /* deprecated */
push_default = PUSH_DEFAULT_CURRENT;
else {
error("Malformed value for %s: %s", var, value);
- return error("Must be one of nothing, matching, "
- "tracking or current.");
+ return error("Must be one of nothing, matching, simple, "
+ "upstream or current.");
}
return 0;
}
return 0;
}
+static int section_name_is_ok(const char *name)
+{
+ /* Empty section names are bogus. */
+ if (!*name)
+ return 0;
+
+ /*
+ * Before a dot, we must be alphanumeric or dash. After the first dot,
+ * anything goes, so we can stop checking.
+ */
+ for (; *name && *name != '.'; name++)
+ if (*name != '-' && !isalnum(*name))
+ return 0;
+ return 1;
+}
+
/* if new_name == NULL, the section is removed instead */
int git_config_rename_section_in_file(const char *config_filename,
const char *old_name, const char *new_name)
{
int ret = 0, remove = 0;
char *filename_buf = NULL;
- struct lock_file *lock = xcalloc(sizeof(struct lock_file), 1);
+ struct lock_file *lock;
int out_fd;
char buf[1024];
FILE *config_file;
+ if (new_name && !section_name_is_ok(new_name)) {
+ ret = error("invalid section name: %s", new_name);
+ goto out;
+ }
+
if (!config_filename)
config_filename = filename_buf = git_pathdup("config");
+ lock = xcalloc(sizeof(struct lock_file), 1);
out_fd = hold_lock_file_for_update(lock, config_filename, 0);
if (out_fd < 0) {
ret = error("could not lock config file %s", config_filename);
# -*- Autoconf -*-
# Process this file with autoconf to produce a configure script.
-AC_PREREQ(2.59)
-AC_INIT([git], [@@GIT_VERSION@@], [git@vger.kernel.org])
-
-AC_CONFIG_SRCDIR([git.c])
-
-config_file=config.mak.autogen
-config_append=config.mak.append
-config_in=config.mak.in
-
-echo "# ${config_append}. Generated by configure." > "${config_append}"
-
+## Definitions of private macros.
-## Definitions of macros
# GIT_CONF_APPEND_LINE(LINE)
# --------------------------
# Append LINE to file ${config_append}
AC_DEFUN([GIT_CONF_APPEND_LINE],
-[echo "$1" >> "${config_append}"])# GIT_CONF_APPEND_LINE
-#
+ [echo "$1" >> "${config_append}"])
+
# GIT_ARG_SET_PATH(PROGRAM)
# -------------------------
# Provide --with-PROGRAM=PATH option to set PATH to PROGRAM
# Optional second argument allows setting NO_PROGRAM=YesPlease if
# --without-PROGRAM version used.
AC_DEFUN([GIT_ARG_SET_PATH],
-[AC_ARG_WITH([$1],
- [AS_HELP_STRING([--with-$1=PATH],
- [provide PATH to $1])],
- [GIT_CONF_APPEND_PATH($1,$2)],[])
-])# GIT_ARG_SET_PATH
-#
+ [AC_ARG_WITH([$1],
+ [AS_HELP_STRING([--with-$1=PATH],
+ [provide PATH to $1])],
+ [GIT_CONF_APPEND_PATH([$1], [$2])],
+ [])])
+
# GIT_CONF_APPEND_PATH(PROGRAM)
-# ------------------------------
+# -----------------------------
# Parse --with-PROGRAM=PATH option to set PROGRAM_PATH=PATH
# Used by GIT_ARG_SET_PATH(PROGRAM)
# Optional second argument allows setting NO_PROGRAM=YesPlease if
# --without-PROGRAM is used.
AC_DEFUN([GIT_CONF_APPEND_PATH],
-[PROGRAM=m4_toupper($1); \
-if test "$withval" = "no"; then \
- if test -n "$2"; then \
- m4_toupper($1)_PATH=$withval; \
- AC_MSG_NOTICE([Disabling use of ${PROGRAM}]); \
- GIT_CONF_APPEND_LINE(NO_${PROGRAM}=YesPlease); \
- GIT_CONF_APPEND_LINE(${PROGRAM}_PATH=); \
- else \
- AC_MSG_ERROR([You cannot use git without $1]); \
- fi; \
-else \
- if test "$withval" = "yes"; then \
- AC_MSG_WARN([You should provide path for --with-$1=PATH]); \
- else \
- m4_toupper($1)_PATH=$withval; \
- AC_MSG_NOTICE([Setting m4_toupper($1)_PATH to $withval]); \
- GIT_CONF_APPEND_LINE(${PROGRAM}_PATH=$withval); \
- fi; \
-fi; \
-]) # GIT_CONF_APPEND_PATH
-#
+ [m4_pushdef([GIT_UC_PROGRAM], m4_toupper([$1]))dnl
+ PROGRAM=GIT_UC_PROGRAM
+ if test "$withval" = "no"; then
+ if test -n "$2"; then
+ GIT_UC_PROGRAM[]_PATH=$withval
+ AC_MSG_NOTICE([Disabling use of ${PROGRAM}])
+ GIT_CONF_APPEND_LINE(NO_${PROGRAM}=YesPlease)
+ GIT_CONF_APPEND_LINE(${PROGRAM}_PATH=)
+ else
+ AC_MSG_ERROR([You cannot use git without $1])
+ fi
+ else
+ if test "$withval" = "yes"; then
+ AC_MSG_WARN([You should provide path for --with-$1=PATH])
+ else
+ GIT_UC_PROGRAM[]_PATH=$withval
+ AC_MSG_NOTICE([Setting GIT_UC_PROGRAM[]_PATH to $withval])
+ GIT_CONF_APPEND_LINE(${PROGRAM}_PATH=$withval)
+ fi
+ fi
+ m4_popdef([GIT_UC_PROGRAM])])
+
# GIT_PARSE_WITH(PACKAGE)
# -----------------------
# For use in AC_ARG_WITH action-if-found, for packages default ON.
# * Set PACKAGEDIR=PATH for --with-PACKAGE=PATH
# * Unset NO_PACKAGE for --with-PACKAGE without ARG
AC_DEFUN([GIT_PARSE_WITH],
-[PACKAGE=m4_toupper($1); \
-if test "$withval" = "no"; then \
- m4_toupper(NO_$1)=YesPlease; \
-elif test "$withval" = "yes"; then \
- m4_toupper(NO_$1)=; \
-else \
- m4_toupper(NO_$1)=; \
- m4_toupper($1)DIR=$withval; \
- AC_MSG_NOTICE([Setting m4_toupper($1)DIR to $withval]); \
- GIT_CONF_APPEND_LINE(${PACKAGE}DIR=$withval); \
-fi \
-])# GIT_PARSE_WITH
-#
+ [m4_pushdef([GIT_UC_PACKAGE], m4_toupper([$1]))dnl
+ PACKAGE=GIT_UC_PACKAGE
+ if test "$withval" = "no"; then
+ NO_[]GIT_UC_PACKAGE=YesPlease
+ elif test "$withval" = "yes"; then
+ NO_[]GIT_UC_PACKAGE=
+ else
+ NO_[]GIT_UC_PACKAGE=
+ GIT_UC_PACKAGE[]DIR=$withval
+ AC_MSG_NOTICE([Setting GIT_UC_PACKAGE[]DIR to $withval])
+ GIT_CONF_APPEND_LINE(${PACKAGE}DIR=$withval)
+ fi
+ m4_popdef([GIT_UC_PACKAGE])])
+
# GIT_PARSE_WITH_SET_MAKE_VAR(WITHNAME, VAR, HELP_TEXT)
-# ---------------------
+# -----------------------------------------------------
# Set VAR to the value specied by --with-WITHNAME.
# No verification of arguments is performed, but warnings are issued
# if either 'yes' or 'no' is specified.
AC_DEFUN([GIT_PARSE_WITH_SET_MAKE_VAR],
[AC_ARG_WITH([$1],
[AS_HELP_STRING([--with-$1=VALUE], $3)],
- if test -n "$withval"; then \
- if test "$withval" = "yes" -o "$withval" = "no"; then \
+ if test -n "$withval"; then
+ if test "$withval" = "yes" -o "$withval" = "no"; then
AC_MSG_WARN([You likely do not want either 'yes' or 'no' as]
- [a value for $1 ($2). Maybe you do...?]); \
- fi; \
- \
- AC_MSG_NOTICE([Setting $2 to $withval]); \
- GIT_CONF_APPEND_LINE($2=$withval); \
+ [a value for $1 ($2). Maybe you do...?])
+ fi
+ AC_MSG_NOTICE([Setting $2 to $withval])
+ GIT_CONF_APPEND_LINE($2=$withval)
fi)])# GIT_PARSE_WITH_SET_MAKE_VAR
-dnl
-dnl GIT_CHECK_FUNC(FUNCTION, IFTRUE, IFFALSE)
-dnl -----------------------------------------
-dnl Similar to AC_CHECK_FUNC, but on systems that do not generate
-dnl warnings for missing prototypes (e.g. FreeBSD when compiling without
-dnl -Wall), it does not work. By looking for function definition in
-dnl libraries, this problem can be worked around.
+#
+# GIT_CHECK_FUNC(FUNCTION, IFTRUE, IFFALSE)
+# -----------------------------------------
+# Similar to AC_CHECK_FUNC, but on systems that do not generate
+# warnings for missing prototypes (e.g. FreeBSD when compiling without
+# -Wall), it does not work. By looking for function definition in
+# libraries, this problem can be worked around.
AC_DEFUN([GIT_CHECK_FUNC],[AC_CHECK_FUNC([$1],[
AC_SEARCH_LIBS([$1],,
[$2],[$3])
],[$3])])
-dnl
-dnl GIT_STASH_FLAGS(BASEPATH_VAR)
-dnl -----------------------------
-dnl Allow for easy stashing of LDFLAGS and CPPFLAGS before running
-dnl tests that may want to take user settings into account.
+#
+# GIT_STASH_FLAGS(BASEPATH_VAR)
+# -----------------------------
+# Allow for easy stashing of LDFLAGS and CPPFLAGS before running
+# tests that may want to take user settings into account.
AC_DEFUN([GIT_STASH_FLAGS],[
if test -n "$1"; then
old_CPPFLAGS="$CPPFLAGS"
fi
])
+## Configure body starts here.
+
+AC_PREREQ(2.59)
+AC_INIT([git], [@@GIT_VERSION@@], [git@vger.kernel.org])
+
+AC_CONFIG_SRCDIR([git.c])
+
+config_file=config.mak.autogen
+config_append=config.mak.append
+config_in=config.mak.in
+
+echo "# ${config_append}. Generated by configure." > "${config_append}"
+
# Directories holding "saner" versions of common or POSIX binaries.
AC_ARG_WITH([sane-tool-path],
[AS_HELP_STRING(
AC_ARG_WITH([lib],
[AS_HELP_STRING([--with-lib=ARG],
[ARG specifies alternative name for lib directory])],
- [if test "$withval" = "no" || test "$withval" = "yes"; then \
- AC_MSG_WARN([You should provide name for --with-lib=ARG]); \
-else \
- lib=$withval; \
- AC_MSG_NOTICE([Setting lib to '$lib']); \
- GIT_CONF_APPEND_LINE(lib=$withval); \
-fi; \
-],[])
+ [if test "$withval" = "no" || test "$withval" = "yes"; then
+ AC_MSG_WARN([You should provide name for --with-lib=ARG])
+ else
+ lib=$withval
+ AC_MSG_NOTICE([Setting lib to '$lib'])
+ GIT_CONF_APPEND_LINE(lib=$withval)
+ fi])
if test -z "$lib"; then
AC_MSG_NOTICE([Setting lib to 'lib' (the default)])
# /foo/bar/include and /foo/bar/lib directories.
AC_ARG_WITH(openssl,
AS_HELP_STRING([--with-openssl],[use OpenSSL library (default is YES)])
-AS_HELP_STRING([], [ARG can be prefix for openssl library and headers]),\
-GIT_PARSE_WITH(openssl))
-#
+AS_HELP_STRING([], [ARG can be prefix for openssl library and headers]),
+GIT_PARSE_WITH([openssl]))
+
# Define USE_LIBPCRE if you have and want to use libpcre. git-grep will be
# able to use Perl-compatible regular expressions.
#
AC_ARG_WITH(libpcre,
AS_HELP_STRING([--with-libpcre],[support Perl-compatible regexes (default is NO)])
AS_HELP_STRING([], [ARG can be also prefix for libpcre library and headers]),
-if test "$withval" = "no"; then \
- USE_LIBPCRE=; \
-elif test "$withval" = "yes"; then \
- USE_LIBPCRE=YesPlease; \
-else
- USE_LIBPCRE=YesPlease; \
- LIBPCREDIR=$withval; \
- AC_MSG_NOTICE([Setting LIBPCREDIR to $withval]); \
- GIT_CONF_APPEND_LINE(LIBPCREDIR=$withval); \
-fi \
-)
+ if test "$withval" = "no"; then
+ USE_LIBPCRE=
+ elif test "$withval" = "yes"; then
+ USE_LIBPCRE=YesPlease
+ else
+ USE_LIBPCRE=YesPlease
+ LIBPCREDIR=$withval
+ AC_MSG_NOTICE([Setting LIBPCREDIR to $withval])
+ GIT_CONF_APPEND_LINE(LIBPCREDIR=$withval)
+ fi)
#
# Define NO_CURL if you do not have curl installed. git-http-pull and
# git-http-push are not built, and you cannot use http:// and https://
AS_HELP_STRING([--with-tcltk],[use Tcl/Tk GUI (default is YES)])
AS_HELP_STRING([],[ARG is the full path to the Tcl/Tk interpreter.])
AS_HELP_STRING([],[Bare --with-tcltk will make the GUI part only if])
-AS_HELP_STRING([],[Tcl/Tk interpreter will be found in a system.]),\
+AS_HELP_STRING([],[Tcl/Tk interpreter will be found in a system.]),
GIT_PARSE_WITH(tcltk))
#
fi
}
-# __gitcomp_1 requires 2 arguments
__gitcomp_1 ()
{
- local c IFS=' '$'\t'$'\n'
+ local c IFS=$' \t\n'
for c in $1; do
- case "$c$2" in
- --*=*) printf %s$'\n' "$c$2" ;;
- *.) printf %s$'\n' "$c$2" ;;
- *) printf %s$'\n' "$c$2 " ;;
+ c="$c$2"
+ case $c in
+ --*=*|*.) ;;
+ *) c="$c " ;;
esac
+ printf '%s\n' "$c"
done
}
__gitcomp '--ref'
;;
,*)
- case "${words[cword-1]}" in
+ case "$prev" in
--ref)
__gitcomp_nl "$(__git_refs)"
;;
prune,*)
;;
*)
- case "${words[cword-1]}" in
+ case "$prev" in
-m|-F)
;;
*)
case "$i" in
--git-dir=*) __git_dir="${i#--git-dir=}" ;;
--bare) __git_dir="." ;;
- --version|-p|--paginate) ;;
--help) command="help"; break ;;
+ -c) c=$((++c)) ;;
+ -*) ;;
*) command="$i"; break ;;
esac
((c++))
--bare
--version
--exec-path
+ --exec-path=
--html-path
+ --info-path
--work-tree=
--namespace=
+ --no-replace-objects
--help
"
;;
filename = git_path("FETCH_HEAD");
fp = fopen(filename, "a");
if (!fp)
- return error("cannot open %s: %s\n", filename, strerror(errno));
+ return error("cannot open %s: %s", filename, strerror(errno));
result = append_fetch_head(fp, argv[2], argv[3],
argv[4], argv[5],
argv[6], !!argv[7][0],
filename = git_path("FETCH_HEAD");
fp = fopen(filename, "a");
if (!fp)
- return error("cannot open %s: %s\n", filename, strerror(errno));
+ return error("cannot open %s: %s", filename, strerror(errno));
result = fetch_native_store(fp, argv[2], argv[3], argv[4],
verbose, force);
fclose(fp);
+++ /dev/null
-#!/usr/bin/env python
-#
-# git-p4.py -- A tool for bidirectional operation between a Perforce depot and git.
-#
-# Author: Simon Hausmann <simon@lst.de>
-# Copyright: 2007 Simon Hausmann <simon@lst.de>
-# 2007 Trolltech ASA
-# License: MIT <http://www.opensource.org/licenses/mit-license.php>
-#
-
-import optparse, sys, os, marshal, subprocess, shelve
-import tempfile, getopt, os.path, time, platform
-import re, shutil
-
-verbose = False
-
-
-def p4_build_cmd(cmd):
- """Build a suitable p4 command line.
-
- This consolidates building and returning a p4 command line into one
- location. It means that hooking into the environment, or other configuration
- can be done more easily.
- """
- real_cmd = ["p4"]
-
- user = gitConfig("git-p4.user")
- if len(user) > 0:
- real_cmd += ["-u",user]
-
- password = gitConfig("git-p4.password")
- if len(password) > 0:
- real_cmd += ["-P", password]
-
- port = gitConfig("git-p4.port")
- if len(port) > 0:
- real_cmd += ["-p", port]
-
- host = gitConfig("git-p4.host")
- if len(host) > 0:
- real_cmd += ["-H", host]
-
- client = gitConfig("git-p4.client")
- if len(client) > 0:
- real_cmd += ["-c", client]
-
-
- if isinstance(cmd,basestring):
- real_cmd = ' '.join(real_cmd) + ' ' + cmd
- else:
- real_cmd += cmd
- return real_cmd
-
-def chdir(dir):
- # P4 uses the PWD environment variable rather than getcwd(). Since we're
- # not using the shell, we have to set it ourselves. This path could
- # be relative, so go there first, then figure out where we ended up.
- os.chdir(dir)
- os.environ['PWD'] = os.getcwd()
-
-def die(msg):
- if verbose:
- raise Exception(msg)
- else:
- sys.stderr.write(msg + "\n")
- sys.exit(1)
-
-def write_pipe(c, stdin):
- if verbose:
- sys.stderr.write('Writing pipe: %s\n' % str(c))
-
- expand = isinstance(c,basestring)
- p = subprocess.Popen(c, stdin=subprocess.PIPE, shell=expand)
- pipe = p.stdin
- val = pipe.write(stdin)
- pipe.close()
- if p.wait():
- die('Command failed: %s' % str(c))
-
- return val
-
-def p4_write_pipe(c, stdin):
- real_cmd = p4_build_cmd(c)
- return write_pipe(real_cmd, stdin)
-
-def read_pipe(c, ignore_error=False):
- if verbose:
- sys.stderr.write('Reading pipe: %s\n' % str(c))
-
- expand = isinstance(c,basestring)
- p = subprocess.Popen(c, stdout=subprocess.PIPE, shell=expand)
- pipe = p.stdout
- val = pipe.read()
- if p.wait() and not ignore_error:
- die('Command failed: %s' % str(c))
-
- return val
-
-def p4_read_pipe(c, ignore_error=False):
- real_cmd = p4_build_cmd(c)
- return read_pipe(real_cmd, ignore_error)
-
-def read_pipe_lines(c):
- if verbose:
- sys.stderr.write('Reading pipe: %s\n' % str(c))
-
- expand = isinstance(c, basestring)
- p = subprocess.Popen(c, stdout=subprocess.PIPE, shell=expand)
- pipe = p.stdout
- val = pipe.readlines()
- if pipe.close() or p.wait():
- die('Command failed: %s' % str(c))
-
- return val
-
-def p4_read_pipe_lines(c):
- """Specifically invoke p4 on the command supplied. """
- real_cmd = p4_build_cmd(c)
- return read_pipe_lines(real_cmd)
-
-def system(cmd):
- expand = isinstance(cmd,basestring)
- if verbose:
- sys.stderr.write("executing %s\n" % str(cmd))
- subprocess.check_call(cmd, shell=expand)
-
-def p4_system(cmd):
- """Specifically invoke p4 as the system command. """
- real_cmd = p4_build_cmd(cmd)
- expand = isinstance(real_cmd, basestring)
- subprocess.check_call(real_cmd, shell=expand)
-
-def p4_integrate(src, dest):
- p4_system(["integrate", "-Dt", src, dest])
-
-def p4_sync(path):
- p4_system(["sync", path])
-
-def p4_add(f):
- p4_system(["add", f])
-
-def p4_delete(f):
- p4_system(["delete", f])
-
-def p4_edit(f):
- p4_system(["edit", f])
-
-def p4_revert(f):
- p4_system(["revert", f])
-
-def p4_reopen(type, file):
- p4_system(["reopen", "-t", type, file])
-
-#
-# Canonicalize the p4 type and return a tuple of the
-# base type, plus any modifiers. See "p4 help filetypes"
-# for a list and explanation.
-#
-def split_p4_type(p4type):
-
- p4_filetypes_historical = {
- "ctempobj": "binary+Sw",
- "ctext": "text+C",
- "cxtext": "text+Cx",
- "ktext": "text+k",
- "kxtext": "text+kx",
- "ltext": "text+F",
- "tempobj": "binary+FSw",
- "ubinary": "binary+F",
- "uresource": "resource+F",
- "uxbinary": "binary+Fx",
- "xbinary": "binary+x",
- "xltext": "text+Fx",
- "xtempobj": "binary+Swx",
- "xtext": "text+x",
- "xunicode": "unicode+x",
- "xutf16": "utf16+x",
- }
- if p4type in p4_filetypes_historical:
- p4type = p4_filetypes_historical[p4type]
- mods = ""
- s = p4type.split("+")
- base = s[0]
- mods = ""
- if len(s) > 1:
- mods = s[1]
- return (base, mods)
-
-#
-# return the raw p4 type of a file (text, text+ko, etc)
-#
-def p4_type(file):
- results = p4CmdList(["fstat", "-T", "headType", file])
- return results[0]['headType']
-
-#
-# Given a type base and modifier, return a regexp matching
-# the keywords that can be expanded in the file
-#
-def p4_keywords_regexp_for_type(base, type_mods):
- if base in ("text", "unicode", "binary"):
- kwords = None
- if "ko" in type_mods:
- kwords = 'Id|Header'
- elif "k" in type_mods:
- kwords = 'Id|Header|Author|Date|DateTime|Change|File|Revision'
- else:
- return None
- pattern = r"""
- \$ # Starts with a dollar, followed by...
- (%s) # one of the keywords, followed by...
- (:[^$]+)? # possibly an old expansion, followed by...
- \$ # another dollar
- """ % kwords
- return pattern
- else:
- return None
-
-#
-# Given a file, return a regexp matching the possible
-# RCS keywords that will be expanded, or None for files
-# with kw expansion turned off.
-#
-def p4_keywords_regexp_for_file(file):
- if not os.path.exists(file):
- return None
- else:
- (type_base, type_mods) = split_p4_type(p4_type(file))
- return p4_keywords_regexp_for_type(type_base, type_mods)
-
-def setP4ExecBit(file, mode):
- # Reopens an already open file and changes the execute bit to match
- # the execute bit setting in the passed in mode.
-
- p4Type = "+x"
-
- if not isModeExec(mode):
- p4Type = getP4OpenedType(file)
- p4Type = re.sub('^([cku]?)x(.*)', '\\1\\2', p4Type)
- p4Type = re.sub('(.*?\+.*?)x(.*?)', '\\1\\2', p4Type)
- if p4Type[-1] == "+":
- p4Type = p4Type[0:-1]
-
- p4_reopen(p4Type, file)
-
-def getP4OpenedType(file):
- # Returns the perforce file type for the given file.
-
- result = p4_read_pipe(["opened", file])
- match = re.match(".*\((.+)\)\r?$", result)
- if match:
- return match.group(1)
- else:
- die("Could not determine file type for %s (result: '%s')" % (file, result))
-
-def diffTreePattern():
- # This is a simple generator for the diff tree regex pattern. This could be
- # a class variable if this and parseDiffTreeEntry were a part of a class.
- pattern = re.compile(':(\d+) (\d+) (\w+) (\w+) ([A-Z])(\d+)?\t(.*?)((\t(.*))|$)')
- while True:
- yield pattern
-
-def parseDiffTreeEntry(entry):
- """Parses a single diff tree entry into its component elements.
-
- See git-diff-tree(1) manpage for details about the format of the diff
- output. This method returns a dictionary with the following elements:
-
- src_mode - The mode of the source file
- dst_mode - The mode of the destination file
- src_sha1 - The sha1 for the source file
- dst_sha1 - The sha1 fr the destination file
- status - The one letter status of the diff (i.e. 'A', 'M', 'D', etc)
- status_score - The score for the status (applicable for 'C' and 'R'
- statuses). This is None if there is no score.
- src - The path for the source file.
- dst - The path for the destination file. This is only present for
- copy or renames. If it is not present, this is None.
-
- If the pattern is not matched, None is returned."""
-
- match = diffTreePattern().next().match(entry)
- if match:
- return {
- 'src_mode': match.group(1),
- 'dst_mode': match.group(2),
- 'src_sha1': match.group(3),
- 'dst_sha1': match.group(4),
- 'status': match.group(5),
- 'status_score': match.group(6),
- 'src': match.group(7),
- 'dst': match.group(10)
- }
- return None
-
-def isModeExec(mode):
- # Returns True if the given git mode represents an executable file,
- # otherwise False.
- return mode[-3:] == "755"
-
-def isModeExecChanged(src_mode, dst_mode):
- return isModeExec(src_mode) != isModeExec(dst_mode)
-
-def p4CmdList(cmd, stdin=None, stdin_mode='w+b', cb=None):
-
- if isinstance(cmd,basestring):
- cmd = "-G " + cmd
- expand = True
- else:
- cmd = ["-G"] + cmd
- expand = False
-
- cmd = p4_build_cmd(cmd)
- if verbose:
- sys.stderr.write("Opening pipe: %s\n" % str(cmd))
-
- # Use a temporary file to avoid deadlocks without
- # subprocess.communicate(), which would put another copy
- # of stdout into memory.
- stdin_file = None
- if stdin is not None:
- stdin_file = tempfile.TemporaryFile(prefix='p4-stdin', mode=stdin_mode)
- if isinstance(stdin,basestring):
- stdin_file.write(stdin)
- else:
- for i in stdin:
- stdin_file.write(i + '\n')
- stdin_file.flush()
- stdin_file.seek(0)
-
- p4 = subprocess.Popen(cmd,
- shell=expand,
- stdin=stdin_file,
- stdout=subprocess.PIPE)
-
- result = []
- try:
- while True:
- entry = marshal.load(p4.stdout)
- if cb is not None:
- cb(entry)
- else:
- result.append(entry)
- except EOFError:
- pass
- exitCode = p4.wait()
- if exitCode != 0:
- entry = {}
- entry["p4ExitCode"] = exitCode
- result.append(entry)
-
- return result
-
-def p4Cmd(cmd):
- list = p4CmdList(cmd)
- result = {}
- for entry in list:
- result.update(entry)
- return result;
-
-def p4Where(depotPath):
- if not depotPath.endswith("/"):
- depotPath += "/"
- depotPath = depotPath + "..."
- outputList = p4CmdList(["where", depotPath])
- output = None
- for entry in outputList:
- if "depotFile" in entry:
- if entry["depotFile"] == depotPath:
- output = entry
- break
- elif "data" in entry:
- data = entry.get("data")
- space = data.find(" ")
- if data[:space] == depotPath:
- output = entry
- break
- if output == None:
- return ""
- if output["code"] == "error":
- return ""
- clientPath = ""
- if "path" in output:
- clientPath = output.get("path")
- elif "data" in output:
- data = output.get("data")
- lastSpace = data.rfind(" ")
- clientPath = data[lastSpace + 1:]
-
- if clientPath.endswith("..."):
- clientPath = clientPath[:-3]
- return clientPath
-
-def currentGitBranch():
- return read_pipe("git name-rev HEAD").split(" ")[1].strip()
-
-def isValidGitDir(path):
- if (os.path.exists(path + "/HEAD")
- and os.path.exists(path + "/refs") and os.path.exists(path + "/objects")):
- return True;
- return False
-
-def parseRevision(ref):
- return read_pipe("git rev-parse %s" % ref).strip()
-
-def branchExists(ref):
- rev = read_pipe(["git", "rev-parse", "-q", "--verify", ref],
- ignore_error=True)
- return len(rev) > 0
-
-def extractLogMessageFromGitCommit(commit):
- logMessage = ""
-
- ## fixme: title is first line of commit, not 1st paragraph.
- foundTitle = False
- for log in read_pipe_lines("git cat-file commit %s" % commit):
- if not foundTitle:
- if len(log) == 1:
- foundTitle = True
- continue
-
- logMessage += log
- return logMessage
-
-def extractSettingsGitLog(log):
- values = {}
- for line in log.split("\n"):
- line = line.strip()
- m = re.search (r"^ *\[git-p4: (.*)\]$", line)
- if not m:
- continue
-
- assignments = m.group(1).split (':')
- for a in assignments:
- vals = a.split ('=')
- key = vals[0].strip()
- val = ('='.join (vals[1:])).strip()
- if val.endswith ('\"') and val.startswith('"'):
- val = val[1:-1]
-
- values[key] = val
-
- paths = values.get("depot-paths")
- if not paths:
- paths = values.get("depot-path")
- if paths:
- values['depot-paths'] = paths.split(',')
- return values
-
-def gitBranchExists(branch):
- proc = subprocess.Popen(["git", "rev-parse", branch],
- stderr=subprocess.PIPE, stdout=subprocess.PIPE);
- return proc.wait() == 0;
-
-_gitConfig = {}
-def gitConfig(key, args = None): # set args to "--bool", for instance
- if not _gitConfig.has_key(key):
- argsFilter = ""
- if args != None:
- argsFilter = "%s " % args
- cmd = "git config %s%s" % (argsFilter, key)
- _gitConfig[key] = read_pipe(cmd, ignore_error=True).strip()
- return _gitConfig[key]
-
-def gitConfigList(key):
- if not _gitConfig.has_key(key):
- _gitConfig[key] = read_pipe("git config --get-all %s" % key, ignore_error=True).strip().split(os.linesep)
- return _gitConfig[key]
-
-def p4BranchesInGit(branchesAreInRemotes = True):
- branches = {}
-
- cmdline = "git rev-parse --symbolic "
- if branchesAreInRemotes:
- cmdline += " --remotes"
- else:
- cmdline += " --branches"
-
- for line in read_pipe_lines(cmdline):
- line = line.strip()
-
- ## only import to p4/
- if not line.startswith('p4/') or line == "p4/HEAD":
- continue
- branch = line
-
- # strip off p4
- branch = re.sub ("^p4/", "", line)
-
- branches[branch] = parseRevision(line)
- return branches
-
-def findUpstreamBranchPoint(head = "HEAD"):
- branches = p4BranchesInGit()
- # map from depot-path to branch name
- branchByDepotPath = {}
- for branch in branches.keys():
- tip = branches[branch]
- log = extractLogMessageFromGitCommit(tip)
- settings = extractSettingsGitLog(log)
- if settings.has_key("depot-paths"):
- paths = ",".join(settings["depot-paths"])
- branchByDepotPath[paths] = "remotes/p4/" + branch
-
- settings = None
- parent = 0
- while parent < 65535:
- commit = head + "~%s" % parent
- log = extractLogMessageFromGitCommit(commit)
- settings = extractSettingsGitLog(log)
- if settings.has_key("depot-paths"):
- paths = ",".join(settings["depot-paths"])
- if branchByDepotPath.has_key(paths):
- return [branchByDepotPath[paths], settings]
-
- parent = parent + 1
-
- return ["", settings]
-
-def createOrUpdateBranchesFromOrigin(localRefPrefix = "refs/remotes/p4/", silent=True):
- if not silent:
- print ("Creating/updating branch(es) in %s based on origin branch(es)"
- % localRefPrefix)
-
- originPrefix = "origin/p4/"
-
- for line in read_pipe_lines("git rev-parse --symbolic --remotes"):
- line = line.strip()
- if (not line.startswith(originPrefix)) or line.endswith("HEAD"):
- continue
-
- headName = line[len(originPrefix):]
- remoteHead = localRefPrefix + headName
- originHead = line
-
- original = extractSettingsGitLog(extractLogMessageFromGitCommit(originHead))
- if (not original.has_key('depot-paths')
- or not original.has_key('change')):
- continue
-
- update = False
- if not gitBranchExists(remoteHead):
- if verbose:
- print "creating %s" % remoteHead
- update = True
- else:
- settings = extractSettingsGitLog(extractLogMessageFromGitCommit(remoteHead))
- if settings.has_key('change') > 0:
- if settings['depot-paths'] == original['depot-paths']:
- originP4Change = int(original['change'])
- p4Change = int(settings['change'])
- if originP4Change > p4Change:
- print ("%s (%s) is newer than %s (%s). "
- "Updating p4 branch from origin."
- % (originHead, originP4Change,
- remoteHead, p4Change))
- update = True
- else:
- print ("Ignoring: %s was imported from %s while "
- "%s was imported from %s"
- % (originHead, ','.join(original['depot-paths']),
- remoteHead, ','.join(settings['depot-paths'])))
-
- if update:
- system("git update-ref %s %s" % (remoteHead, originHead))
-
-def originP4BranchesExist():
- return gitBranchExists("origin") or gitBranchExists("origin/p4") or gitBranchExists("origin/p4/master")
-
-def p4ChangesForPaths(depotPaths, changeRange):
- assert depotPaths
- cmd = ['changes']
- for p in depotPaths:
- cmd += ["%s...%s" % (p, changeRange)]
- output = p4_read_pipe_lines(cmd)
-
- changes = {}
- for line in output:
- changeNum = int(line.split(" ")[1])
- changes[changeNum] = True
-
- changelist = changes.keys()
- changelist.sort()
- return changelist
-
-def p4PathStartsWith(path, prefix):
- # This method tries to remedy a potential mixed-case issue:
- #
- # If UserA adds //depot/DirA/file1
- # and UserB adds //depot/dira/file2
- #
- # we may or may not have a problem. If you have core.ignorecase=true,
- # we treat DirA and dira as the same directory
- ignorecase = gitConfig("core.ignorecase", "--bool") == "true"
- if ignorecase:
- return path.lower().startswith(prefix.lower())
- return path.startswith(prefix)
-
-def getClientSpec():
- """Look at the p4 client spec, create a View() object that contains
- all the mappings, and return it."""
-
- specList = p4CmdList("client -o")
- if len(specList) != 1:
- die('Output from "client -o" is %d lines, expecting 1' %
- len(specList))
-
- # dictionary of all client parameters
- entry = specList[0]
-
- # just the keys that start with "View"
- view_keys = [ k for k in entry.keys() if k.startswith("View") ]
-
- # hold this new View
- view = View()
-
- # append the lines, in order, to the view
- for view_num in range(len(view_keys)):
- k = "View%d" % view_num
- if k not in view_keys:
- die("Expected view key %s missing" % k)
- view.append(entry[k])
-
- return view
-
-def getClientRoot():
- """Grab the client directory."""
-
- output = p4CmdList("client -o")
- if len(output) != 1:
- die('Output from "client -o" is %d lines, expecting 1' % len(output))
-
- entry = output[0]
- if "Root" not in entry:
- die('Client has no "Root"')
-
- return entry["Root"]
-
-class Command:
- def __init__(self):
- self.usage = "usage: %prog [options]"
- self.needsGit = True
-
-class P4UserMap:
- def __init__(self):
- self.userMapFromPerforceServer = False
- self.myP4UserId = None
-
- def p4UserId(self):
- if self.myP4UserId:
- return self.myP4UserId
-
- results = p4CmdList("user -o")
- for r in results:
- if r.has_key('User'):
- self.myP4UserId = r['User']
- return r['User']
- die("Could not find your p4 user id")
-
- def p4UserIsMe(self, p4User):
- # return True if the given p4 user is actually me
- me = self.p4UserId()
- if not p4User or p4User != me:
- return False
- else:
- return True
-
- def getUserCacheFilename(self):
- home = os.environ.get("HOME", os.environ.get("USERPROFILE"))
- return home + "/.gitp4-usercache.txt"
-
- def getUserMapFromPerforceServer(self):
- if self.userMapFromPerforceServer:
- return
- self.users = {}
- self.emails = {}
-
- for output in p4CmdList("users"):
- if not output.has_key("User"):
- continue
- self.users[output["User"]] = output["FullName"] + " <" + output["Email"] + ">"
- self.emails[output["Email"]] = output["User"]
-
-
- s = ''
- for (key, val) in self.users.items():
- s += "%s\t%s\n" % (key.expandtabs(1), val.expandtabs(1))
-
- open(self.getUserCacheFilename(), "wb").write(s)
- self.userMapFromPerforceServer = True
-
- def loadUserMapFromCache(self):
- self.users = {}
- self.userMapFromPerforceServer = False
- try:
- cache = open(self.getUserCacheFilename(), "rb")
- lines = cache.readlines()
- cache.close()
- for line in lines:
- entry = line.strip().split("\t")
- self.users[entry[0]] = entry[1]
- except IOError:
- self.getUserMapFromPerforceServer()
-
-class P4Debug(Command):
- def __init__(self):
- Command.__init__(self)
- self.options = [
- optparse.make_option("--verbose", dest="verbose", action="store_true",
- default=False),
- ]
- self.description = "A tool to debug the output of p4 -G."
- self.needsGit = False
- self.verbose = False
-
- def run(self, args):
- j = 0
- for output in p4CmdList(args):
- print 'Element: %d' % j
- j += 1
- print output
- return True
-
-class P4RollBack(Command):
- def __init__(self):
- Command.__init__(self)
- self.options = [
- optparse.make_option("--verbose", dest="verbose", action="store_true"),
- optparse.make_option("--local", dest="rollbackLocalBranches", action="store_true")
- ]
- self.description = "A tool to debug the multi-branch import. Don't use :)"
- self.verbose = False
- self.rollbackLocalBranches = False
-
- def run(self, args):
- if len(args) != 1:
- return False
- maxChange = int(args[0])
-
- if "p4ExitCode" in p4Cmd("changes -m 1"):
- die("Problems executing p4");
-
- if self.rollbackLocalBranches:
- refPrefix = "refs/heads/"
- lines = read_pipe_lines("git rev-parse --symbolic --branches")
- else:
- refPrefix = "refs/remotes/"
- lines = read_pipe_lines("git rev-parse --symbolic --remotes")
-
- for line in lines:
- if self.rollbackLocalBranches or (line.startswith("p4/") and line != "p4/HEAD\n"):
- line = line.strip()
- ref = refPrefix + line
- log = extractLogMessageFromGitCommit(ref)
- settings = extractSettingsGitLog(log)
-
- depotPaths = settings['depot-paths']
- change = settings['change']
-
- changed = False
-
- if len(p4Cmd("changes -m 1 " + ' '.join (['%s...@%s' % (p, maxChange)
- for p in depotPaths]))) == 0:
- print "Branch %s did not exist at change %s, deleting." % (ref, maxChange)
- system("git update-ref -d %s `git rev-parse %s`" % (ref, ref))
- continue
-
- while change and int(change) > maxChange:
- changed = True
- if self.verbose:
- print "%s is at %s ; rewinding towards %s" % (ref, change, maxChange)
- system("git update-ref %s \"%s^\"" % (ref, ref))
- log = extractLogMessageFromGitCommit(ref)
- settings = extractSettingsGitLog(log)
-
-
- depotPaths = settings['depot-paths']
- change = settings['change']
-
- if changed:
- print "%s rewound to %s" % (ref, change)
-
- return True
-
-class P4Submit(Command, P4UserMap):
- def __init__(self):
- Command.__init__(self)
- P4UserMap.__init__(self)
- self.options = [
- optparse.make_option("--verbose", dest="verbose", action="store_true"),
- optparse.make_option("--origin", dest="origin"),
- optparse.make_option("-M", dest="detectRenames", action="store_true"),
- # preserve the user, requires relevant p4 permissions
- optparse.make_option("--preserve-user", dest="preserveUser", action="store_true"),
- ]
- self.description = "Submit changes from git to the perforce depot."
- self.usage += " [name of git branch to submit into perforce depot]"
- self.interactive = True
- self.origin = ""
- self.detectRenames = False
- self.verbose = False
- self.preserveUser = gitConfig("git-p4.preserveUser").lower() == "true"
- self.isWindows = (platform.system() == "Windows")
-
- def check(self):
- if len(p4CmdList("opened ...")) > 0:
- die("You have files opened with perforce! Close them before starting the sync.")
-
- # replaces everything between 'Description:' and the next P4 submit template field with the
- # commit message
- def prepareLogMessage(self, template, message):
- result = ""
-
- inDescriptionSection = False
-
- for line in template.split("\n"):
- if line.startswith("#"):
- result += line + "\n"
- continue
-
- if inDescriptionSection:
- if line.startswith("Files:") or line.startswith("Jobs:"):
- inDescriptionSection = False
- else:
- continue
- else:
- if line.startswith("Description:"):
- inDescriptionSection = True
- line += "\n"
- for messageLine in message.split("\n"):
- line += "\t" + messageLine + "\n"
-
- result += line + "\n"
-
- return result
-
- def patchRCSKeywords(self, file, pattern):
- # Attempt to zap the RCS keywords in a p4 controlled file matching the given pattern
- (handle, outFileName) = tempfile.mkstemp(dir='.')
- try:
- outFile = os.fdopen(handle, "w+")
- inFile = open(file, "r")
- regexp = re.compile(pattern, re.VERBOSE)
- for line in inFile.readlines():
- line = regexp.sub(r'$\1$', line)
- outFile.write(line)
- inFile.close()
- outFile.close()
- # Forcibly overwrite the original file
- os.unlink(file)
- shutil.move(outFileName, file)
- except:
- # cleanup our temporary file
- os.unlink(outFileName)
- print "Failed to strip RCS keywords in %s" % file
- raise
-
- print "Patched up RCS keywords in %s" % file
-
- def p4UserForCommit(self,id):
- # Return the tuple (perforce user,git email) for a given git commit id
- self.getUserMapFromPerforceServer()
- gitEmail = read_pipe("git log --max-count=1 --format='%%ae' %s" % id)
- gitEmail = gitEmail.strip()
- if not self.emails.has_key(gitEmail):
- return (None,gitEmail)
- else:
- return (self.emails[gitEmail],gitEmail)
-
- def checkValidP4Users(self,commits):
- # check if any git authors cannot be mapped to p4 users
- for id in commits:
- (user,email) = self.p4UserForCommit(id)
- if not user:
- msg = "Cannot find p4 user for email %s in commit %s." % (email, id)
- if gitConfig('git-p4.allowMissingP4Users').lower() == "true":
- print "%s" % msg
- else:
- die("Error: %s\nSet git-p4.allowMissingP4Users to true to allow this." % msg)
-
- def lastP4Changelist(self):
- # Get back the last changelist number submitted in this client spec. This
- # then gets used to patch up the username in the change. If the same
- # client spec is being used by multiple processes then this might go
- # wrong.
- results = p4CmdList("client -o") # find the current client
- client = None
- for r in results:
- if r.has_key('Client'):
- client = r['Client']
- break
- if not client:
- die("could not get client spec")
- results = p4CmdList(["changes", "-c", client, "-m", "1"])
- for r in results:
- if r.has_key('change'):
- return r['change']
- die("Could not get changelist number for last submit - cannot patch up user details")
-
- def modifyChangelistUser(self, changelist, newUser):
- # fixup the user field of a changelist after it has been submitted.
- changes = p4CmdList("change -o %s" % changelist)
- if len(changes) != 1:
- die("Bad output from p4 change modifying %s to user %s" %
- (changelist, newUser))
-
- c = changes[0]
- if c['User'] == newUser: return # nothing to do
- c['User'] = newUser
- input = marshal.dumps(c)
-
- result = p4CmdList("change -f -i", stdin=input)
- for r in result:
- if r.has_key('code'):
- if r['code'] == 'error':
- die("Could not modify user field of changelist %s to %s:%s" % (changelist, newUser, r['data']))
- if r.has_key('data'):
- print("Updated user field for changelist %s to %s" % (changelist, newUser))
- return
- die("Could not modify user field of changelist %s to %s" % (changelist, newUser))
-
- def canChangeChangelists(self):
- # check to see if we have p4 admin or super-user permissions, either of
- # which are required to modify changelists.
- results = p4CmdList(["protects", self.depotPath])
- for r in results:
- if r.has_key('perm'):
- if r['perm'] == 'admin':
- return 1
- if r['perm'] == 'super':
- return 1
- return 0
-
- def prepareSubmitTemplate(self):
- # remove lines in the Files section that show changes to files outside the depot path we're committing into
- template = ""
- inFilesSection = False
- for line in p4_read_pipe_lines(['change', '-o']):
- if line.endswith("\r\n"):
- line = line[:-2] + "\n"
- if inFilesSection:
- if line.startswith("\t"):
- # path starts and ends with a tab
- path = line[1:]
- lastTab = path.rfind("\t")
- if lastTab != -1:
- path = path[:lastTab]
- if not p4PathStartsWith(path, self.depotPath):
- continue
- else:
- inFilesSection = False
- else:
- if line.startswith("Files:"):
- inFilesSection = True
-
- template += line
-
- return template
-
- def edit_template(self, template_file):
- """Invoke the editor to let the user change the submission
- message. Return true if okay to continue with the submit."""
-
- # if configured to skip the editing part, just submit
- if gitConfig("git-p4.skipSubmitEdit") == "true":
- return True
-
- # look at the modification time, to check later if the user saved
- # the file
- mtime = os.stat(template_file).st_mtime
-
- # invoke the editor
- if os.environ.has_key("P4EDITOR"):
- editor = os.environ.get("P4EDITOR")
- else:
- editor = read_pipe("git var GIT_EDITOR").strip()
- system(editor + " " + template_file)
-
- # If the file was not saved, prompt to see if this patch should
- # be skipped. But skip this verification step if configured so.
- if gitConfig("git-p4.skipSubmitEditCheck") == "true":
- return True
-
- # modification time updated means user saved the file
- if os.stat(template_file).st_mtime > mtime:
- return True
-
- while True:
- response = raw_input("Submit template unchanged. Submit anyway? [y]es, [n]o (skip this patch) ")
- if response == 'y':
- return True
- if response == 'n':
- return False
-
- def applyCommit(self, id):
- print "Applying %s" % (read_pipe("git log --max-count=1 --pretty=oneline %s" % id))
-
- (p4User, gitEmail) = self.p4UserForCommit(id)
-
- if not self.detectRenames:
- # If not explicitly set check the config variable
- self.detectRenames = gitConfig("git-p4.detectRenames")
-
- if self.detectRenames.lower() == "false" or self.detectRenames == "":
- diffOpts = ""
- elif self.detectRenames.lower() == "true":
- diffOpts = "-M"
- else:
- diffOpts = "-M%s" % self.detectRenames
-
- detectCopies = gitConfig("git-p4.detectCopies")
- if detectCopies.lower() == "true":
- diffOpts += " -C"
- elif detectCopies != "" and detectCopies.lower() != "false":
- diffOpts += " -C%s" % detectCopies
-
- if gitConfig("git-p4.detectCopiesHarder", "--bool") == "true":
- diffOpts += " --find-copies-harder"
-
- diff = read_pipe_lines("git diff-tree -r %s \"%s^\" \"%s\"" % (diffOpts, id, id))
- filesToAdd = set()
- filesToDelete = set()
- editedFiles = set()
- filesToChangeExecBit = {}
-
- for line in diff:
- diff = parseDiffTreeEntry(line)
- modifier = diff['status']
- path = diff['src']
- if modifier == "M":
- p4_edit(path)
- if isModeExecChanged(diff['src_mode'], diff['dst_mode']):
- filesToChangeExecBit[path] = diff['dst_mode']
- editedFiles.add(path)
- elif modifier == "A":
- filesToAdd.add(path)
- filesToChangeExecBit[path] = diff['dst_mode']
- if path in filesToDelete:
- filesToDelete.remove(path)
- elif modifier == "D":
- filesToDelete.add(path)
- if path in filesToAdd:
- filesToAdd.remove(path)
- elif modifier == "C":
- src, dest = diff['src'], diff['dst']
- p4_integrate(src, dest)
- if diff['src_sha1'] != diff['dst_sha1']:
- p4_edit(dest)
- if isModeExecChanged(diff['src_mode'], diff['dst_mode']):
- p4_edit(dest)
- filesToChangeExecBit[dest] = diff['dst_mode']
- os.unlink(dest)
- editedFiles.add(dest)
- elif modifier == "R":
- src, dest = diff['src'], diff['dst']
- p4_integrate(src, dest)
- if diff['src_sha1'] != diff['dst_sha1']:
- p4_edit(dest)
- if isModeExecChanged(diff['src_mode'], diff['dst_mode']):
- p4_edit(dest)
- filesToChangeExecBit[dest] = diff['dst_mode']
- os.unlink(dest)
- editedFiles.add(dest)
- filesToDelete.add(src)
- else:
- die("unknown modifier %s for %s" % (modifier, path))
-
- diffcmd = "git format-patch -k --stdout \"%s^\"..\"%s\"" % (id, id)
- patchcmd = diffcmd + " | git apply "
- tryPatchCmd = patchcmd + "--check -"
- applyPatchCmd = patchcmd + "--check --apply -"
- patch_succeeded = True
-
- if os.system(tryPatchCmd) != 0:
- fixed_rcs_keywords = False
- patch_succeeded = False
- print "Unfortunately applying the change failed!"
-
- # Patch failed, maybe it's just RCS keyword woes. Look through
- # the patch to see if that's possible.
- if gitConfig("git-p4.attemptRCSCleanup","--bool") == "true":
- file = None
- pattern = None
- kwfiles = {}
- for file in editedFiles | filesToDelete:
- # did this file's delta contain RCS keywords?
- pattern = p4_keywords_regexp_for_file(file)
-
- if pattern:
- # this file is a possibility...look for RCS keywords.
- regexp = re.compile(pattern, re.VERBOSE)
- for line in read_pipe_lines(["git", "diff", "%s^..%s" % (id, id), file]):
- if regexp.search(line):
- if verbose:
- print "got keyword match on %s in %s in %s" % (pattern, line, file)
- kwfiles[file] = pattern
- break
-
- for file in kwfiles:
- if verbose:
- print "zapping %s with %s" % (line,pattern)
- self.patchRCSKeywords(file, kwfiles[file])
- fixed_rcs_keywords = True
-
- if fixed_rcs_keywords:
- print "Retrying the patch with RCS keywords cleaned up"
- if os.system(tryPatchCmd) == 0:
- patch_succeeded = True
-
- if not patch_succeeded:
- print "What do you want to do?"
- response = "x"
- while response != "s" and response != "a" and response != "w":
- response = raw_input("[s]kip this patch / [a]pply the patch forcibly "
- "and with .rej files / [w]rite the patch to a file (patch.txt) ")
- if response == "s":
- print "Skipping! Good luck with the next patches..."
- for f in editedFiles:
- p4_revert(f)
- for f in filesToAdd:
- os.remove(f)
- return
- elif response == "a":
- os.system(applyPatchCmd)
- if len(filesToAdd) > 0:
- print "You may also want to call p4 add on the following files:"
- print " ".join(filesToAdd)
- if len(filesToDelete):
- print "The following files should be scheduled for deletion with p4 delete:"
- print " ".join(filesToDelete)
- die("Please resolve and submit the conflict manually and "
- + "continue afterwards with git-p4 submit --continue")
- elif response == "w":
- system(diffcmd + " > patch.txt")
- print "Patch saved to patch.txt in %s !" % self.clientPath
- die("Please resolve and submit the conflict manually and "
- "continue afterwards with git-p4 submit --continue")
-
- system(applyPatchCmd)
-
- for f in filesToAdd:
- p4_add(f)
- for f in filesToDelete:
- p4_revert(f)
- p4_delete(f)
-
- # Set/clear executable bits
- for f in filesToChangeExecBit.keys():
- mode = filesToChangeExecBit[f]
- setP4ExecBit(f, mode)
-
- logMessage = extractLogMessageFromGitCommit(id)
- logMessage = logMessage.strip()
-
- template = self.prepareSubmitTemplate()
-
- if self.interactive:
- submitTemplate = self.prepareLogMessage(template, logMessage)
-
- if self.preserveUser:
- submitTemplate = submitTemplate + ("\n######## Actual user %s, modified after commit\n" % p4User)
-
- if os.environ.has_key("P4DIFF"):
- del(os.environ["P4DIFF"])
- diff = ""
- for editedFile in editedFiles:
- diff += p4_read_pipe(['diff', '-du', editedFile])
-
- newdiff = ""
- for newFile in filesToAdd:
- newdiff += "==== new file ====\n"
- newdiff += "--- /dev/null\n"
- newdiff += "+++ %s\n" % newFile
- f = open(newFile, "r")
- for line in f.readlines():
- newdiff += "+" + line
- f.close()
-
- if self.checkAuthorship and not self.p4UserIsMe(p4User):
- submitTemplate += "######## git author %s does not match your p4 account.\n" % gitEmail
- submitTemplate += "######## Use git-p4 option --preserve-user to modify authorship\n"
- submitTemplate += "######## Use git-p4 config git-p4.skipUserNameCheck hides this message.\n"
-
- separatorLine = "######## everything below this line is just the diff #######\n"
-
- (handle, fileName) = tempfile.mkstemp()
- tmpFile = os.fdopen(handle, "w+")
- if self.isWindows:
- submitTemplate = submitTemplate.replace("\n", "\r\n")
- separatorLine = separatorLine.replace("\n", "\r\n")
- newdiff = newdiff.replace("\n", "\r\n")
- tmpFile.write(submitTemplate + separatorLine + diff + newdiff)
- tmpFile.close()
-
- if self.edit_template(fileName):
- # read the edited message and submit
- tmpFile = open(fileName, "rb")
- message = tmpFile.read()
- tmpFile.close()
- submitTemplate = message[:message.index(separatorLine)]
- if self.isWindows:
- submitTemplate = submitTemplate.replace("\r\n", "\n")
- p4_write_pipe(['submit', '-i'], submitTemplate)
-
- if self.preserveUser:
- if p4User:
- # Get last changelist number. Cannot easily get it from
- # the submit command output as the output is
- # unmarshalled.
- changelist = self.lastP4Changelist()
- self.modifyChangelistUser(changelist, p4User)
- else:
- # skip this patch
- print "Submission cancelled, undoing p4 changes."
- for f in editedFiles:
- p4_revert(f)
- for f in filesToAdd:
- p4_revert(f)
- os.remove(f)
-
- os.remove(fileName)
- else:
- fileName = "submit.txt"
- file = open(fileName, "w+")
- file.write(self.prepareLogMessage(template, logMessage))
- file.close()
- print ("Perforce submit template written as %s. "
- + "Please review/edit and then use p4 submit -i < %s to submit directly!"
- % (fileName, fileName))
-
- def run(self, args):
- if len(args) == 0:
- self.master = currentGitBranch()
- if len(self.master) == 0 or not gitBranchExists("refs/heads/%s" % self.master):
- die("Detecting current git branch failed!")
- elif len(args) == 1:
- self.master = args[0]
- if not branchExists(self.master):
- die("Branch %s does not exist" % self.master)
- else:
- return False
-
- allowSubmit = gitConfig("git-p4.allowSubmit")
- if len(allowSubmit) > 0 and not self.master in allowSubmit.split(","):
- die("%s is not in git-p4.allowSubmit" % self.master)
-
- [upstream, settings] = findUpstreamBranchPoint()
- self.depotPath = settings['depot-paths'][0]
- if len(self.origin) == 0:
- self.origin = upstream
-
- if self.preserveUser:
- if not self.canChangeChangelists():
- die("Cannot preserve user names without p4 super-user or admin permissions")
-
- if self.verbose:
- print "Origin branch is " + self.origin
-
- if len(self.depotPath) == 0:
- print "Internal error: cannot locate perforce depot path from existing branches"
- sys.exit(128)
-
- self.useClientSpec = False
- if gitConfig("git-p4.useclientspec", "--bool") == "true":
- self.useClientSpec = True
- if self.useClientSpec:
- self.clientSpecDirs = getClientSpec()
-
- if self.useClientSpec:
- # all files are relative to the client spec
- self.clientPath = getClientRoot()
- else:
- self.clientPath = p4Where(self.depotPath)
-
- if self.clientPath == "":
- die("Error: Cannot locate perforce checkout of %s in client view" % self.depotPath)
-
- print "Perforce checkout for depot path %s located at %s" % (self.depotPath, self.clientPath)
- self.oldWorkingDirectory = os.getcwd()
-
- # ensure the clientPath exists
- if not os.path.exists(self.clientPath):
- os.makedirs(self.clientPath)
-
- chdir(self.clientPath)
- print "Synchronizing p4 checkout..."
- p4_sync("...")
- self.check()
-
- commits = []
- for line in read_pipe_lines("git rev-list --no-merges %s..%s" % (self.origin, self.master)):
- commits.append(line.strip())
- commits.reverse()
-
- if self.preserveUser or (gitConfig("git-p4.skipUserNameCheck") == "true"):
- self.checkAuthorship = False
- else:
- self.checkAuthorship = True
-
- if self.preserveUser:
- self.checkValidP4Users(commits)
-
- while len(commits) > 0:
- commit = commits[0]
- commits = commits[1:]
- self.applyCommit(commit)
- if not self.interactive:
- break
-
- if len(commits) == 0:
- print "All changes applied!"
- chdir(self.oldWorkingDirectory)
-
- sync = P4Sync()
- sync.run([])
-
- rebase = P4Rebase()
- rebase.rebase()
-
- return True
-
-class View(object):
- """Represent a p4 view ("p4 help views"), and map files in a
- repo according to the view."""
-
- class Path(object):
- """A depot or client path, possibly containing wildcards.
- The only one supported is ... at the end, currently.
- Initialize with the full path, with //depot or //client."""
-
- def __init__(self, path, is_depot):
- self.path = path
- self.is_depot = is_depot
- self.find_wildcards()
- # remember the prefix bit, useful for relative mappings
- m = re.match("(//[^/]+/)", self.path)
- if not m:
- die("Path %s does not start with //prefix/" % self.path)
- prefix = m.group(1)
- if not self.is_depot:
- # strip //client/ on client paths
- self.path = self.path[len(prefix):]
-
- def find_wildcards(self):
- """Make sure wildcards are valid, and set up internal
- variables."""
-
- self.ends_triple_dot = False
- # There are three wildcards allowed in p4 views
- # (see "p4 help views"). This code knows how to
- # handle "..." (only at the end), but cannot deal with
- # "%%n" or "*". Only check the depot_side, as p4 should
- # validate that the client_side matches too.
- if re.search(r'%%[1-9]', self.path):
- die("Can't handle %%n wildcards in view: %s" % self.path)
- if self.path.find("*") >= 0:
- die("Can't handle * wildcards in view: %s" % self.path)
- triple_dot_index = self.path.find("...")
- if triple_dot_index >= 0:
- if triple_dot_index != len(self.path) - 3:
- die("Can handle only single ... wildcard, at end: %s" %
- self.path)
- self.ends_triple_dot = True
-
- def ensure_compatible(self, other_path):
- """Make sure the wildcards agree."""
- if self.ends_triple_dot != other_path.ends_triple_dot:
- die("Both paths must end with ... if either does;\n" +
- "paths: %s %s" % (self.path, other_path.path))
-
- def match_wildcards(self, test_path):
- """See if this test_path matches us, and fill in the value
- of the wildcards if so. Returns a tuple of
- (True|False, wildcards[]). For now, only the ... at end
- is supported, so at most one wildcard."""
- if self.ends_triple_dot:
- dotless = self.path[:-3]
- if test_path.startswith(dotless):
- wildcard = test_path[len(dotless):]
- return (True, [ wildcard ])
- else:
- if test_path == self.path:
- return (True, [])
- return (False, [])
-
- def match(self, test_path):
- """Just return if it matches; don't bother with the wildcards."""
- b, _ = self.match_wildcards(test_path)
- return b
-
- def fill_in_wildcards(self, wildcards):
- """Return the relative path, with the wildcards filled in
- if there are any."""
- if self.ends_triple_dot:
- return self.path[:-3] + wildcards[0]
- else:
- return self.path
-
- class Mapping(object):
- def __init__(self, depot_side, client_side, overlay, exclude):
- # depot_side is without the trailing /... if it had one
- self.depot_side = View.Path(depot_side, is_depot=True)
- self.client_side = View.Path(client_side, is_depot=False)
- self.overlay = overlay # started with "+"
- self.exclude = exclude # started with "-"
- assert not (self.overlay and self.exclude)
- self.depot_side.ensure_compatible(self.client_side)
-
- def __str__(self):
- c = " "
- if self.overlay:
- c = "+"
- if self.exclude:
- c = "-"
- return "View.Mapping: %s%s -> %s" % \
- (c, self.depot_side.path, self.client_side.path)
-
- def map_depot_to_client(self, depot_path):
- """Calculate the client path if using this mapping on the
- given depot path; does not consider the effect of other
- mappings in a view. Even excluded mappings are returned."""
- matches, wildcards = self.depot_side.match_wildcards(depot_path)
- if not matches:
- return ""
- client_path = self.client_side.fill_in_wildcards(wildcards)
- return client_path
-
- #
- # View methods
- #
- def __init__(self):
- self.mappings = []
-
- def append(self, view_line):
- """Parse a view line, splitting it into depot and client
- sides. Append to self.mappings, preserving order."""
-
- # Split the view line into exactly two words. P4 enforces
- # structure on these lines that simplifies this quite a bit.
- #
- # Either or both words may be double-quoted.
- # Single quotes do not matter.
- # Double-quote marks cannot occur inside the words.
- # A + or - prefix is also inside the quotes.
- # There are no quotes unless they contain a space.
- # The line is already white-space stripped.
- # The two words are separated by a single space.
- #
- if view_line[0] == '"':
- # First word is double quoted. Find its end.
- close_quote_index = view_line.find('"', 1)
- if close_quote_index <= 0:
- die("No first-word closing quote found: %s" % view_line)
- depot_side = view_line[1:close_quote_index]
- # skip closing quote and space
- rhs_index = close_quote_index + 1 + 1
- else:
- space_index = view_line.find(" ")
- if space_index <= 0:
- die("No word-splitting space found: %s" % view_line)
- depot_side = view_line[0:space_index]
- rhs_index = space_index + 1
-
- if view_line[rhs_index] == '"':
- # Second word is double quoted. Make sure there is a
- # double quote at the end too.
- if not view_line.endswith('"'):
- die("View line with rhs quote should end with one: %s" %
- view_line)
- # skip the quotes
- client_side = view_line[rhs_index+1:-1]
- else:
- client_side = view_line[rhs_index:]
-
- # prefix + means overlay on previous mapping
- overlay = False
- if depot_side.startswith("+"):
- overlay = True
- depot_side = depot_side[1:]
-
- # prefix - means exclude this path
- exclude = False
- if depot_side.startswith("-"):
- exclude = True
- depot_side = depot_side[1:]
-
- m = View.Mapping(depot_side, client_side, overlay, exclude)
- self.mappings.append(m)
-
- def map_in_client(self, depot_path):
- """Return the relative location in the client where this
- depot file should live. Returns "" if the file should
- not be mapped in the client."""
-
- paths_filled = []
- client_path = ""
-
- # look at later entries first
- for m in self.mappings[::-1]:
-
- # see where will this path end up in the client
- p = m.map_depot_to_client(depot_path)
-
- if p == "":
- # Depot path does not belong in client. Must remember
- # this, as previous items should not cause files to
- # exist in this path either. Remember that the list is
- # being walked from the end, which has higher precedence.
- # Overlap mappings do not exclude previous mappings.
- if not m.overlay:
- paths_filled.append(m.client_side)
-
- else:
- # This mapping matched; no need to search any further.
- # But, the mapping could be rejected if the client path
- # has already been claimed by an earlier mapping (i.e.
- # one later in the list, which we are walking backwards).
- already_mapped_in_client = False
- for f in paths_filled:
- # this is View.Path.match
- if f.match(p):
- already_mapped_in_client = True
- break
- if not already_mapped_in_client:
- # Include this file, unless it is from a line that
- # explicitly said to exclude it.
- if not m.exclude:
- client_path = p
-
- # a match, even if rejected, always stops the search
- break
-
- return client_path
-
-class P4Sync(Command, P4UserMap):
- delete_actions = ( "delete", "move/delete", "purge" )
-
- def __init__(self):
- Command.__init__(self)
- P4UserMap.__init__(self)
- self.options = [
- optparse.make_option("--branch", dest="branch"),
- optparse.make_option("--detect-branches", dest="detectBranches", action="store_true"),
- optparse.make_option("--changesfile", dest="changesFile"),
- optparse.make_option("--silent", dest="silent", action="store_true"),
- optparse.make_option("--detect-labels", dest="detectLabels", action="store_true"),
- optparse.make_option("--verbose", dest="verbose", action="store_true"),
- optparse.make_option("--import-local", dest="importIntoRemotes", action="store_false",
- help="Import into refs/heads/ , not refs/remotes"),
- optparse.make_option("--max-changes", dest="maxChanges"),
- optparse.make_option("--keep-path", dest="keepRepoPath", action='store_true',
- help="Keep entire BRANCH/DIR/SUBDIR prefix during import"),
- optparse.make_option("--use-client-spec", dest="useClientSpec", action='store_true',
- help="Only sync files that are included in the Perforce Client Spec")
- ]
- self.description = """Imports from Perforce into a git repository.\n
- example:
- //depot/my/project/ -- to import the current head
- //depot/my/project/@all -- to import everything
- //depot/my/project/@1,6 -- to import only from revision 1 to 6
-
- (a ... is not needed in the path p4 specification, it's added implicitly)"""
-
- self.usage += " //depot/path[@revRange]"
- self.silent = False
- self.createdBranches = set()
- self.committedChanges = set()
- self.branch = ""
- self.detectBranches = False
- self.detectLabels = False
- self.changesFile = ""
- self.syncWithOrigin = True
- self.verbose = False
- self.importIntoRemotes = True
- self.maxChanges = ""
- self.isWindows = (platform.system() == "Windows")
- self.keepRepoPath = False
- self.depotPaths = None
- self.p4BranchesInGit = []
- self.cloneExclude = []
- self.useClientSpec = False
- self.useClientSpec_from_options = False
- self.clientSpecDirs = None
- self.tempBranches = []
- self.tempBranchLocation = "git-p4-tmp"
-
- if gitConfig("git-p4.syncFromOrigin") == "false":
- self.syncWithOrigin = False
-
- #
- # P4 wildcards are not allowed in filenames. P4 complains
- # if you simply add them, but you can force it with "-f", in
- # which case it translates them into %xx encoding internally.
- # Search for and fix just these four characters. Do % last so
- # that fixing it does not inadvertently create new %-escapes.
- #
- def wildcard_decode(self, path):
- # Cannot have * in a filename in windows; untested as to
- # what p4 would do in such a case.
- if not self.isWindows:
- path = path.replace("%2A", "*")
- path = path.replace("%23", "#") \
- .replace("%40", "@") \
- .replace("%25", "%")
- return path
-
- # Force a checkpoint in fast-import and wait for it to finish
- def checkpoint(self):
- self.gitStream.write("checkpoint\n\n")
- self.gitStream.write("progress checkpoint\n\n")
- out = self.gitOutput.readline()
- if self.verbose:
- print "checkpoint finished: " + out
-
- def extractFilesFromCommit(self, commit):
- self.cloneExclude = [re.sub(r"\.\.\.$", "", path)
- for path in self.cloneExclude]
- files = []
- fnum = 0
- while commit.has_key("depotFile%s" % fnum):
- path = commit["depotFile%s" % fnum]
-
- if [p for p in self.cloneExclude
- if p4PathStartsWith(path, p)]:
- found = False
- else:
- found = [p for p in self.depotPaths
- if p4PathStartsWith(path, p)]
- if not found:
- fnum = fnum + 1
- continue
-
- file = {}
- file["path"] = path
- file["rev"] = commit["rev%s" % fnum]
- file["action"] = commit["action%s" % fnum]
- file["type"] = commit["type%s" % fnum]
- files.append(file)
- fnum = fnum + 1
- return files
-
- def stripRepoPath(self, path, prefixes):
- if self.useClientSpec:
- return self.clientSpecDirs.map_in_client(path)
-
- if self.keepRepoPath:
- prefixes = [re.sub("^(//[^/]+/).*", r'\1', prefixes[0])]
-
- for p in prefixes:
- if p4PathStartsWith(path, p):
- path = path[len(p):]
-
- return path
-
- def splitFilesIntoBranches(self, commit):
- branches = {}
- fnum = 0
- while commit.has_key("depotFile%s" % fnum):
- path = commit["depotFile%s" % fnum]
- found = [p for p in self.depotPaths
- if p4PathStartsWith(path, p)]
- if not found:
- fnum = fnum + 1
- continue
-
- file = {}
- file["path"] = path
- file["rev"] = commit["rev%s" % fnum]
- file["action"] = commit["action%s" % fnum]
- file["type"] = commit["type%s" % fnum]
- fnum = fnum + 1
-
- relPath = self.stripRepoPath(path, self.depotPaths)
-
- for branch in self.knownBranches.keys():
-
- # add a trailing slash so that a commit into qt/4.2foo doesn't end up in qt/4.2
- if relPath.startswith(branch + "/"):
- if branch not in branches:
- branches[branch] = []
- branches[branch].append(file)
- break
-
- return branches
-
- # output one file from the P4 stream
- # - helper for streamP4Files
-
- def streamOneP4File(self, file, contents):
- relPath = self.stripRepoPath(file['depotFile'], self.branchPrefixes)
- relPath = self.wildcard_decode(relPath)
- if verbose:
- sys.stderr.write("%s\n" % relPath)
-
- (type_base, type_mods) = split_p4_type(file["type"])
-
- git_mode = "100644"
- if "x" in type_mods:
- git_mode = "100755"
- if type_base == "symlink":
- git_mode = "120000"
- # p4 print on a symlink contains "target\n"; remove the newline
- data = ''.join(contents)
- contents = [data[:-1]]
-
- if type_base == "utf16":
- # p4 delivers different text in the python output to -G
- # than it does when using "print -o", or normal p4 client
- # operations. utf16 is converted to ascii or utf8, perhaps.
- # But ascii text saved as -t utf16 is completely mangled.
- # Invoke print -o to get the real contents.
- text = p4_read_pipe(['print', '-q', '-o', '-', file['depotFile']])
- contents = [ text ]
-
- if type_base == "apple":
- # Apple filetype files will be streamed as a concatenation of
- # its appledouble header and the contents. This is useless
- # on both macs and non-macs. If using "print -q -o xx", it
- # will create "xx" with the data, and "%xx" with the header.
- # This is also not very useful.
- #
- # Ideally, someday, this script can learn how to generate
- # appledouble files directly and import those to git, but
- # non-mac machines can never find a use for apple filetype.
- print "\nIgnoring apple filetype file %s" % file['depotFile']
- return
-
- # Perhaps windows wants unicode, utf16 newlines translated too;
- # but this is not doing it.
- if self.isWindows and type_base == "text":
- mangled = []
- for data in contents:
- data = data.replace("\r\n", "\n")
- mangled.append(data)
- contents = mangled
-
- # Note that we do not try to de-mangle keywords on utf16 files,
- # even though in theory somebody may want that.
- pattern = p4_keywords_regexp_for_type(type_base, type_mods)
- if pattern:
- regexp = re.compile(pattern, re.VERBOSE)
- text = ''.join(contents)
- text = regexp.sub(r'$\1$', text)
- contents = [ text ]
-
- self.gitStream.write("M %s inline %s\n" % (git_mode, relPath))
-
- # total length...
- length = 0
- for d in contents:
- length = length + len(d)
-
- self.gitStream.write("data %d\n" % length)
- for d in contents:
- self.gitStream.write(d)
- self.gitStream.write("\n")
-
- def streamOneP4Deletion(self, file):
- relPath = self.stripRepoPath(file['path'], self.branchPrefixes)
- if verbose:
- sys.stderr.write("delete %s\n" % relPath)
- self.gitStream.write("D %s\n" % relPath)
-
- # handle another chunk of streaming data
- def streamP4FilesCb(self, marshalled):
-
- if marshalled.has_key('depotFile') and self.stream_have_file_info:
- # start of a new file - output the old one first
- self.streamOneP4File(self.stream_file, self.stream_contents)
- self.stream_file = {}
- self.stream_contents = []
- self.stream_have_file_info = False
-
- # pick up the new file information... for the
- # 'data' field we need to append to our array
- for k in marshalled.keys():
- if k == 'data':
- self.stream_contents.append(marshalled['data'])
- else:
- self.stream_file[k] = marshalled[k]
-
- self.stream_have_file_info = True
-
- # Stream directly from "p4 files" into "git fast-import"
- def streamP4Files(self, files):
- filesForCommit = []
- filesToRead = []
- filesToDelete = []
-
- for f in files:
- # if using a client spec, only add the files that have
- # a path in the client
- if self.clientSpecDirs:
- if self.clientSpecDirs.map_in_client(f['path']) == "":
- continue
-
- filesForCommit.append(f)
- if f['action'] in self.delete_actions:
- filesToDelete.append(f)
- else:
- filesToRead.append(f)
-
- # deleted files...
- for f in filesToDelete:
- self.streamOneP4Deletion(f)
-
- if len(filesToRead) > 0:
- self.stream_file = {}
- self.stream_contents = []
- self.stream_have_file_info = False
-
- # curry self argument
- def streamP4FilesCbSelf(entry):
- self.streamP4FilesCb(entry)
-
- fileArgs = ['%s#%s' % (f['path'], f['rev']) for f in filesToRead]
-
- p4CmdList(["-x", "-", "print"],
- stdin=fileArgs,
- cb=streamP4FilesCbSelf)
-
- # do the last chunk
- if self.stream_file.has_key('depotFile'):
- self.streamOneP4File(self.stream_file, self.stream_contents)
-
- def make_email(self, userid):
- if userid in self.users:
- return self.users[userid]
- else:
- return "%s <a@b>" % userid
-
- def commit(self, details, files, branch, branchPrefixes, parent = ""):
- epoch = details["time"]
- author = details["user"]
- self.branchPrefixes = branchPrefixes
-
- if self.verbose:
- print "commit into %s" % branch
-
- # start with reading files; if that fails, we should not
- # create a commit.
- new_files = []
- for f in files:
- if [p for p in branchPrefixes if p4PathStartsWith(f['path'], p)]:
- new_files.append (f)
- else:
- sys.stderr.write("Ignoring file outside of prefix: %s\n" % f['path'])
-
- self.gitStream.write("commit %s\n" % branch)
-# gitStream.write("mark :%s\n" % details["change"])
- self.committedChanges.add(int(details["change"]))
- committer = ""
- if author not in self.users:
- self.getUserMapFromPerforceServer()
- committer = "%s %s %s" % (self.make_email(author), epoch, self.tz)
-
- self.gitStream.write("committer %s\n" % committer)
-
- self.gitStream.write("data <<EOT\n")
- self.gitStream.write(details["desc"])
- self.gitStream.write("\n[git-p4: depot-paths = \"%s\": change = %s"
- % (','.join (branchPrefixes), details["change"]))
- if len(details['options']) > 0:
- self.gitStream.write(": options = %s" % details['options'])
- self.gitStream.write("]\nEOT\n\n")
-
- if len(parent) > 0:
- if self.verbose:
- print "parent %s" % parent
- self.gitStream.write("from %s\n" % parent)
-
- self.streamP4Files(new_files)
- self.gitStream.write("\n")
-
- change = int(details["change"])
-
- if self.labels.has_key(change):
- label = self.labels[change]
- labelDetails = label[0]
- labelRevisions = label[1]
- if self.verbose:
- print "Change %s is labelled %s" % (change, labelDetails)
-
- files = p4CmdList(["files"] + ["%s...@%s" % (p, change)
- for p in branchPrefixes])
-
- if len(files) == len(labelRevisions):
-
- cleanedFiles = {}
- for info in files:
- if info["action"] in self.delete_actions:
- continue
- cleanedFiles[info["depotFile"]] = info["rev"]
-
- if cleanedFiles == labelRevisions:
- self.gitStream.write("tag tag_%s\n" % labelDetails["label"])
- self.gitStream.write("from %s\n" % branch)
-
- owner = labelDetails["Owner"]
-
- # Try to use the owner of the p4 label, or failing that,
- # the current p4 user id.
- if owner:
- email = self.make_email(owner)
- else:
- email = self.make_email(self.p4UserId())
- tagger = "%s %s %s" % (email, epoch, self.tz)
-
- self.gitStream.write("tagger %s\n" % tagger)
-
- description = labelDetails["Description"]
- self.gitStream.write("data %d\n" % len(description))
- self.gitStream.write(description)
- self.gitStream.write("\n")
-
- else:
- if not self.silent:
- print ("Tag %s does not match with change %s: files do not match."
- % (labelDetails["label"], change))
-
- else:
- if not self.silent:
- print ("Tag %s does not match with change %s: file count is different."
- % (labelDetails["label"], change))
-
- def getLabels(self):
- self.labels = {}
-
- l = p4CmdList(["labels"] + ["%s..." % p for p in self.depotPaths])
- if len(l) > 0 and not self.silent:
- print "Finding files belonging to labels in %s" % `self.depotPaths`
-
- for output in l:
- label = output["label"]
- revisions = {}
- newestChange = 0
- if self.verbose:
- print "Querying files for label %s" % label
- for file in p4CmdList(["files"] +
- ["%s...@%s" % (p, label)
- for p in self.depotPaths]):
- revisions[file["depotFile"]] = file["rev"]
- change = int(file["change"])
- if change > newestChange:
- newestChange = change
-
- self.labels[newestChange] = [output, revisions]
-
- if self.verbose:
- print "Label changes: %s" % self.labels.keys()
-
- def guessProjectName(self):
- for p in self.depotPaths:
- if p.endswith("/"):
- p = p[:-1]
- p = p[p.strip().rfind("/") + 1:]
- if not p.endswith("/"):
- p += "/"
- return p
-
- def getBranchMapping(self):
- lostAndFoundBranches = set()
-
- user = gitConfig("git-p4.branchUser")
- if len(user) > 0:
- command = "branches -u %s" % user
- else:
- command = "branches"
-
- for info in p4CmdList(command):
- details = p4Cmd(["branch", "-o", info["branch"]])
- viewIdx = 0
- while details.has_key("View%s" % viewIdx):
- paths = details["View%s" % viewIdx].split(" ")
- viewIdx = viewIdx + 1
- # require standard //depot/foo/... //depot/bar/... mapping
- if len(paths) != 2 or not paths[0].endswith("/...") or not paths[1].endswith("/..."):
- continue
- source = paths[0]
- destination = paths[1]
- ## HACK
- if p4PathStartsWith(source, self.depotPaths[0]) and p4PathStartsWith(destination, self.depotPaths[0]):
- source = source[len(self.depotPaths[0]):-4]
- destination = destination[len(self.depotPaths[0]):-4]
-
- if destination in self.knownBranches:
- if not self.silent:
- print "p4 branch %s defines a mapping from %s to %s" % (info["branch"], source, destination)
- print "but there exists another mapping from %s to %s already!" % (self.knownBranches[destination], destination)
- continue
-
- self.knownBranches[destination] = source
-
- lostAndFoundBranches.discard(destination)
-
- if source not in self.knownBranches:
- lostAndFoundBranches.add(source)
-
- # Perforce does not strictly require branches to be defined, so we also
- # check git config for a branch list.
- #
- # Example of branch definition in git config file:
- # [git-p4]
- # branchList=main:branchA
- # branchList=main:branchB
- # branchList=branchA:branchC
- configBranches = gitConfigList("git-p4.branchList")
- for branch in configBranches:
- if branch:
- (source, destination) = branch.split(":")
- self.knownBranches[destination] = source
-
- lostAndFoundBranches.discard(destination)
-
- if source not in self.knownBranches:
- lostAndFoundBranches.add(source)
-
-
- for branch in lostAndFoundBranches:
- self.knownBranches[branch] = branch
-
- def getBranchMappingFromGitBranches(self):
- branches = p4BranchesInGit(self.importIntoRemotes)
- for branch in branches.keys():
- if branch == "master":
- branch = "main"
- else:
- branch = branch[len(self.projectName):]
- self.knownBranches[branch] = branch
-
- def listExistingP4GitBranches(self):
- # branches holds mapping from name to commit
- branches = p4BranchesInGit(self.importIntoRemotes)
- self.p4BranchesInGit = branches.keys()
- for branch in branches.keys():
- self.initialParents[self.refPrefix + branch] = branches[branch]
-
- def updateOptionDict(self, d):
- option_keys = {}
- if self.keepRepoPath:
- option_keys['keepRepoPath'] = 1
-
- d["options"] = ' '.join(sorted(option_keys.keys()))
-
- def readOptions(self, d):
- self.keepRepoPath = (d.has_key('options')
- and ('keepRepoPath' in d['options']))
-
- def gitRefForBranch(self, branch):
- if branch == "main":
- return self.refPrefix + "master"
-
- if len(branch) <= 0:
- return branch
-
- return self.refPrefix + self.projectName + branch
-
- def gitCommitByP4Change(self, ref, change):
- if self.verbose:
- print "looking in ref " + ref + " for change %s using bisect..." % change
-
- earliestCommit = ""
- latestCommit = parseRevision(ref)
-
- while True:
- if self.verbose:
- print "trying: earliest %s latest %s" % (earliestCommit, latestCommit)
- next = read_pipe("git rev-list --bisect %s %s" % (latestCommit, earliestCommit)).strip()
- if len(next) == 0:
- if self.verbose:
- print "argh"
- return ""
- log = extractLogMessageFromGitCommit(next)
- settings = extractSettingsGitLog(log)
- currentChange = int(settings['change'])
- if self.verbose:
- print "current change %s" % currentChange
-
- if currentChange == change:
- if self.verbose:
- print "found %s" % next
- return next
-
- if currentChange < change:
- earliestCommit = "^%s" % next
- else:
- latestCommit = "%s" % next
-
- return ""
-
- def importNewBranch(self, branch, maxChange):
- # make fast-import flush all changes to disk and update the refs using the checkpoint
- # command so that we can try to find the branch parent in the git history
- self.gitStream.write("checkpoint\n\n");
- self.gitStream.flush();
- branchPrefix = self.depotPaths[0] + branch + "/"
- range = "@1,%s" % maxChange
- #print "prefix" + branchPrefix
- changes = p4ChangesForPaths([branchPrefix], range)
- if len(changes) <= 0:
- return False
- firstChange = changes[0]
- #print "first change in branch: %s" % firstChange
- sourceBranch = self.knownBranches[branch]
- sourceDepotPath = self.depotPaths[0] + sourceBranch
- sourceRef = self.gitRefForBranch(sourceBranch)
- #print "source " + sourceBranch
-
- branchParentChange = int(p4Cmd(["changes", "-m", "1", "%s...@1,%s" % (sourceDepotPath, firstChange)])["change"])
- #print "branch parent: %s" % branchParentChange
- gitParent = self.gitCommitByP4Change(sourceRef, branchParentChange)
- if len(gitParent) > 0:
- self.initialParents[self.gitRefForBranch(branch)] = gitParent
- #print "parent git commit: %s" % gitParent
-
- self.importChanges(changes)
- return True
-
- def searchParent(self, parent, branch, target):
- parentFound = False
- for blob in read_pipe_lines(["git", "rev-list", "--reverse", "--no-merges", parent]):
- blob = blob.strip()
- if len(read_pipe(["git", "diff-tree", blob, target])) == 0:
- parentFound = True
- if self.verbose:
- print "Found parent of %s in commit %s" % (branch, blob)
- break
- if parentFound:
- return blob
- else:
- return None
-
- def importChanges(self, changes):
- cnt = 1
- for change in changes:
- description = p4Cmd(["describe", str(change)])
- self.updateOptionDict(description)
-
- if not self.silent:
- sys.stdout.write("\rImporting revision %s (%s%%)" % (change, cnt * 100 / len(changes)))
- sys.stdout.flush()
- cnt = cnt + 1
-
- try:
- if self.detectBranches:
- branches = self.splitFilesIntoBranches(description)
- for branch in branches.keys():
- ## HACK --hwn
- branchPrefix = self.depotPaths[0] + branch + "/"
-
- parent = ""
-
- filesForCommit = branches[branch]
-
- if self.verbose:
- print "branch is %s" % branch
-
- self.updatedBranches.add(branch)
-
- if branch not in self.createdBranches:
- self.createdBranches.add(branch)
- parent = self.knownBranches[branch]
- if parent == branch:
- parent = ""
- else:
- fullBranch = self.projectName + branch
- if fullBranch not in self.p4BranchesInGit:
- if not self.silent:
- print("\n Importing new branch %s" % fullBranch);
- if self.importNewBranch(branch, change - 1):
- parent = ""
- self.p4BranchesInGit.append(fullBranch)
- if not self.silent:
- print("\n Resuming with change %s" % change);
-
- if self.verbose:
- print "parent determined through known branches: %s" % parent
-
- branch = self.gitRefForBranch(branch)
- parent = self.gitRefForBranch(parent)
-
- if self.verbose:
- print "looking for initial parent for %s; current parent is %s" % (branch, parent)
-
- if len(parent) == 0 and branch in self.initialParents:
- parent = self.initialParents[branch]
- del self.initialParents[branch]
-
- blob = None
- if len(parent) > 0:
- tempBranch = os.path.join(self.tempBranchLocation, "%d" % (change))
- if self.verbose:
- print "Creating temporary branch: " + tempBranch
- self.commit(description, filesForCommit, tempBranch, [branchPrefix])
- self.tempBranches.append(tempBranch)
- self.checkpoint()
- blob = self.searchParent(parent, branch, tempBranch)
- if blob:
- self.commit(description, filesForCommit, branch, [branchPrefix], blob)
- else:
- if self.verbose:
- print "Parent of %s not found. Committing into head of %s" % (branch, parent)
- self.commit(description, filesForCommit, branch, [branchPrefix], parent)
- else:
- files = self.extractFilesFromCommit(description)
- self.commit(description, files, self.branch, self.depotPaths,
- self.initialParent)
- self.initialParent = ""
- except IOError:
- print self.gitError.read()
- sys.exit(1)
-
- def importHeadRevision(self, revision):
- print "Doing initial import of %s from revision %s into %s" % (' '.join(self.depotPaths), revision, self.branch)
-
- details = {}
- details["user"] = "git perforce import user"
- details["desc"] = ("Initial import of %s from the state at revision %s\n"
- % (' '.join(self.depotPaths), revision))
- details["change"] = revision
- newestRevision = 0
-
- fileCnt = 0
- fileArgs = ["%s...%s" % (p,revision) for p in self.depotPaths]
-
- for info in p4CmdList(["files"] + fileArgs):
-
- if 'code' in info and info['code'] == 'error':
- sys.stderr.write("p4 returned an error: %s\n"
- % info['data'])
- if info['data'].find("must refer to client") >= 0:
- sys.stderr.write("This particular p4 error is misleading.\n")
- sys.stderr.write("Perhaps the depot path was misspelled.\n");
- sys.stderr.write("Depot path: %s\n" % " ".join(self.depotPaths))
- sys.exit(1)
- if 'p4ExitCode' in info:
- sys.stderr.write("p4 exitcode: %s\n" % info['p4ExitCode'])
- sys.exit(1)
-
-
- change = int(info["change"])
- if change > newestRevision:
- newestRevision = change
-
- if info["action"] in self.delete_actions:
- # don't increase the file cnt, otherwise details["depotFile123"] will have gaps!
- #fileCnt = fileCnt + 1
- continue
-
- for prop in ["depotFile", "rev", "action", "type" ]:
- details["%s%s" % (prop, fileCnt)] = info[prop]
-
- fileCnt = fileCnt + 1
-
- details["change"] = newestRevision
-
- # Use time from top-most change so that all git-p4 clones of
- # the same p4 repo have the same commit SHA1s.
- res = p4CmdList("describe -s %d" % newestRevision)
- newestTime = None
- for r in res:
- if r.has_key('time'):
- newestTime = int(r['time'])
- if newestTime is None:
- die("\"describe -s\" on newest change %d did not give a time")
- details["time"] = newestTime
-
- self.updateOptionDict(details)
- try:
- self.commit(details, self.extractFilesFromCommit(details), self.branch, self.depotPaths)
- except IOError:
- print "IO error with git fast-import. Is your git version recent enough?"
- print self.gitError.read()
-
-
- def run(self, args):
- self.depotPaths = []
- self.changeRange = ""
- self.initialParent = ""
- self.previousDepotPaths = []
-
- # map from branch depot path to parent branch
- self.knownBranches = {}
- self.initialParents = {}
- self.hasOrigin = originP4BranchesExist()
- if not self.syncWithOrigin:
- self.hasOrigin = False
-
- if self.importIntoRemotes:
- self.refPrefix = "refs/remotes/p4/"
- else:
- self.refPrefix = "refs/heads/p4/"
-
- if self.syncWithOrigin and self.hasOrigin:
- if not self.silent:
- print "Syncing with origin first by calling git fetch origin"
- system("git fetch origin")
-
- if len(self.branch) == 0:
- self.branch = self.refPrefix + "master"
- if gitBranchExists("refs/heads/p4") and self.importIntoRemotes:
- system("git update-ref %s refs/heads/p4" % self.branch)
- system("git branch -D p4");
- # create it /after/ importing, when master exists
- if not gitBranchExists(self.refPrefix + "HEAD") and self.importIntoRemotes and gitBranchExists(self.branch):
- system("git symbolic-ref %sHEAD %s" % (self.refPrefix, self.branch))
-
- # accept either the command-line option, or the configuration variable
- if self.useClientSpec:
- # will use this after clone to set the variable
- self.useClientSpec_from_options = True
- else:
- if gitConfig("git-p4.useclientspec", "--bool") == "true":
- self.useClientSpec = True
- if self.useClientSpec:
- self.clientSpecDirs = getClientSpec()
-
- # TODO: should always look at previous commits,
- # merge with previous imports, if possible.
- if args == []:
- if self.hasOrigin:
- createOrUpdateBranchesFromOrigin(self.refPrefix, self.silent)
- self.listExistingP4GitBranches()
-
- if len(self.p4BranchesInGit) > 1:
- if not self.silent:
- print "Importing from/into multiple branches"
- self.detectBranches = True
-
- if self.verbose:
- print "branches: %s" % self.p4BranchesInGit
-
- p4Change = 0
- for branch in self.p4BranchesInGit:
- logMsg = extractLogMessageFromGitCommit(self.refPrefix + branch)
-
- settings = extractSettingsGitLog(logMsg)
-
- self.readOptions(settings)
- if (settings.has_key('depot-paths')
- and settings.has_key ('change')):
- change = int(settings['change']) + 1
- p4Change = max(p4Change, change)
-
- depotPaths = sorted(settings['depot-paths'])
- if self.previousDepotPaths == []:
- self.previousDepotPaths = depotPaths
- else:
- paths = []
- for (prev, cur) in zip(self.previousDepotPaths, depotPaths):
- prev_list = prev.split("/")
- cur_list = cur.split("/")
- for i in range(0, min(len(cur_list), len(prev_list))):
- if cur_list[i] <> prev_list[i]:
- i = i - 1
- break
-
- paths.append ("/".join(cur_list[:i + 1]))
-
- self.previousDepotPaths = paths
-
- if p4Change > 0:
- self.depotPaths = sorted(self.previousDepotPaths)
- self.changeRange = "@%s,#head" % p4Change
- if not self.detectBranches:
- self.initialParent = parseRevision(self.branch)
- if not self.silent and not self.detectBranches:
- print "Performing incremental import into %s git branch" % self.branch
-
- if not self.branch.startswith("refs/"):
- self.branch = "refs/heads/" + self.branch
-
- if len(args) == 0 and self.depotPaths:
- if not self.silent:
- print "Depot paths: %s" % ' '.join(self.depotPaths)
- else:
- if self.depotPaths and self.depotPaths != args:
- print ("previous import used depot path %s and now %s was specified. "
- "This doesn't work!" % (' '.join (self.depotPaths),
- ' '.join (args)))
- sys.exit(1)
-
- self.depotPaths = sorted(args)
-
- revision = ""
- self.users = {}
-
- # Make sure no revision specifiers are used when --changesfile
- # is specified.
- bad_changesfile = False
- if len(self.changesFile) > 0:
- for p in self.depotPaths:
- if p.find("@") >= 0 or p.find("#") >= 0:
- bad_changesfile = True
- break
- if bad_changesfile:
- die("Option --changesfile is incompatible with revision specifiers")
-
- newPaths = []
- for p in self.depotPaths:
- if p.find("@") != -1:
- atIdx = p.index("@")
- self.changeRange = p[atIdx:]
- if self.changeRange == "@all":
- self.changeRange = ""
- elif ',' not in self.changeRange:
- revision = self.changeRange
- self.changeRange = ""
- p = p[:atIdx]
- elif p.find("#") != -1:
- hashIdx = p.index("#")
- revision = p[hashIdx:]
- p = p[:hashIdx]
- elif self.previousDepotPaths == []:
- # pay attention to changesfile, if given, else import
- # the entire p4 tree at the head revision
- if len(self.changesFile) == 0:
- revision = "#head"
-
- p = re.sub ("\.\.\.$", "", p)
- if not p.endswith("/"):
- p += "/"
-
- newPaths.append(p)
-
- self.depotPaths = newPaths
-
-
- self.loadUserMapFromCache()
- self.labels = {}
- if self.detectLabels:
- self.getLabels();
-
- if self.detectBranches:
- ## FIXME - what's a P4 projectName ?
- self.projectName = self.guessProjectName()
-
- if self.hasOrigin:
- self.getBranchMappingFromGitBranches()
- else:
- self.getBranchMapping()
- if self.verbose:
- print "p4-git branches: %s" % self.p4BranchesInGit
- print "initial parents: %s" % self.initialParents
- for b in self.p4BranchesInGit:
- if b != "master":
-
- ## FIXME
- b = b[len(self.projectName):]
- self.createdBranches.add(b)
-
- self.tz = "%+03d%02d" % (- time.timezone / 3600, ((- time.timezone % 3600) / 60))
-
- importProcess = subprocess.Popen(["git", "fast-import"],
- stdin=subprocess.PIPE, stdout=subprocess.PIPE,
- stderr=subprocess.PIPE);
- self.gitOutput = importProcess.stdout
- self.gitStream = importProcess.stdin
- self.gitError = importProcess.stderr
-
- if revision:
- self.importHeadRevision(revision)
- else:
- changes = []
-
- if len(self.changesFile) > 0:
- output = open(self.changesFile).readlines()
- changeSet = set()
- for line in output:
- changeSet.add(int(line))
-
- for change in changeSet:
- changes.append(change)
-
- changes.sort()
- else:
- # catch "git-p4 sync" with no new branches, in a repo that
- # does not have any existing git-p4 branches
- if len(args) == 0 and not self.p4BranchesInGit:
- die("No remote p4 branches. Perhaps you never did \"git p4 clone\" in here.");
- if self.verbose:
- print "Getting p4 changes for %s...%s" % (', '.join(self.depotPaths),
- self.changeRange)
- changes = p4ChangesForPaths(self.depotPaths, self.changeRange)
-
- if len(self.maxChanges) > 0:
- changes = changes[:min(int(self.maxChanges), len(changes))]
-
- if len(changes) == 0:
- if not self.silent:
- print "No changes to import!"
- return True
-
- if not self.silent and not self.detectBranches:
- print "Import destination: %s" % self.branch
-
- self.updatedBranches = set()
-
- self.importChanges(changes)
-
- if not self.silent:
- print ""
- if len(self.updatedBranches) > 0:
- sys.stdout.write("Updated branches: ")
- for b in self.updatedBranches:
- sys.stdout.write("%s " % b)
- sys.stdout.write("\n")
-
- self.gitStream.close()
- if importProcess.wait() != 0:
- die("fast-import failed: %s" % self.gitError.read())
- self.gitOutput.close()
- self.gitError.close()
-
- # Cleanup temporary branches created during import
- if self.tempBranches != []:
- for branch in self.tempBranches:
- read_pipe("git update-ref -d %s" % branch)
- os.rmdir(os.path.join(os.environ.get("GIT_DIR", ".git"), self.tempBranchLocation))
-
- return True
-
-class P4Rebase(Command):
- def __init__(self):
- Command.__init__(self)
- self.options = [ ]
- self.description = ("Fetches the latest revision from perforce and "
- + "rebases the current work (branch) against it")
- self.verbose = False
-
- def run(self, args):
- sync = P4Sync()
- sync.run([])
-
- return self.rebase()
-
- def rebase(self):
- if os.system("git update-index --refresh") != 0:
- die("Some files in your working directory are modified and different than what is in your index. You can use git update-index <filename> to bring the index up-to-date or stash away all your changes with git stash.");
- if len(read_pipe("git diff-index HEAD --")) > 0:
- die("You have uncommited changes. Please commit them before rebasing or stash them away with git stash.");
-
- [upstream, settings] = findUpstreamBranchPoint()
- if len(upstream) == 0:
- die("Cannot find upstream branchpoint for rebase")
-
- # the branchpoint may be p4/foo~3, so strip off the parent
- upstream = re.sub("~[0-9]+$", "", upstream)
-
- print "Rebasing the current branch onto %s" % upstream
- oldHead = read_pipe("git rev-parse HEAD").strip()
- system("git rebase %s" % upstream)
- system("git diff-tree --stat --summary -M %s HEAD" % oldHead)
- return True
-
-class P4Clone(P4Sync):
- def __init__(self):
- P4Sync.__init__(self)
- self.description = "Creates a new git repository and imports from Perforce into it"
- self.usage = "usage: %prog [options] //depot/path[@revRange]"
- self.options += [
- optparse.make_option("--destination", dest="cloneDestination",
- action='store', default=None,
- help="where to leave result of the clone"),
- optparse.make_option("-/", dest="cloneExclude",
- action="append", type="string",
- help="exclude depot path"),
- optparse.make_option("--bare", dest="cloneBare",
- action="store_true", default=False),
- ]
- self.cloneDestination = None
- self.needsGit = False
- self.cloneBare = False
-
- # This is required for the "append" cloneExclude action
- def ensure_value(self, attr, value):
- if not hasattr(self, attr) or getattr(self, attr) is None:
- setattr(self, attr, value)
- return getattr(self, attr)
-
- def defaultDestination(self, args):
- ## TODO: use common prefix of args?
- depotPath = args[0]
- depotDir = re.sub("(@[^@]*)$", "", depotPath)
- depotDir = re.sub("(#[^#]*)$", "", depotDir)
- depotDir = re.sub(r"\.\.\.$", "", depotDir)
- depotDir = re.sub(r"/$", "", depotDir)
- return os.path.split(depotDir)[1]
-
- def run(self, args):
- if len(args) < 1:
- return False
-
- if self.keepRepoPath and not self.cloneDestination:
- sys.stderr.write("Must specify destination for --keep-path\n")
- sys.exit(1)
-
- depotPaths = args
-
- if not self.cloneDestination and len(depotPaths) > 1:
- self.cloneDestination = depotPaths[-1]
- depotPaths = depotPaths[:-1]
-
- self.cloneExclude = ["/"+p for p in self.cloneExclude]
- for p in depotPaths:
- if not p.startswith("//"):
- return False
-
- if not self.cloneDestination:
- self.cloneDestination = self.defaultDestination(args)
-
- print "Importing from %s into %s" % (', '.join(depotPaths), self.cloneDestination)
-
- if not os.path.exists(self.cloneDestination):
- os.makedirs(self.cloneDestination)
- chdir(self.cloneDestination)
-
- init_cmd = [ "git", "init" ]
- if self.cloneBare:
- init_cmd.append("--bare")
- subprocess.check_call(init_cmd)
-
- if not P4Sync.run(self, depotPaths):
- return False
- if self.branch != "master":
- if self.importIntoRemotes:
- masterbranch = "refs/remotes/p4/master"
- else:
- masterbranch = "refs/heads/p4/master"
- if gitBranchExists(masterbranch):
- system("git branch master %s" % masterbranch)
- if not self.cloneBare:
- system("git checkout -f")
- else:
- print "Could not detect main branch. No checkout/master branch created."
-
- # auto-set this variable if invoked with --use-client-spec
- if self.useClientSpec_from_options:
- system("git config --bool git-p4.useclientspec true")
-
- return True
-
-class P4Branches(Command):
- def __init__(self):
- Command.__init__(self)
- self.options = [ ]
- self.description = ("Shows the git branches that hold imports and their "
- + "corresponding perforce depot paths")
- self.verbose = False
-
- def run(self, args):
- if originP4BranchesExist():
- createOrUpdateBranchesFromOrigin()
-
- cmdline = "git rev-parse --symbolic "
- cmdline += " --remotes"
-
- for line in read_pipe_lines(cmdline):
- line = line.strip()
-
- if not line.startswith('p4/') or line == "p4/HEAD":
- continue
- branch = line
-
- log = extractLogMessageFromGitCommit("refs/remotes/%s" % branch)
- settings = extractSettingsGitLog(log)
-
- print "%s <= %s (%s)" % (branch, ",".join(settings["depot-paths"]), settings["change"])
- return True
-
-class HelpFormatter(optparse.IndentedHelpFormatter):
- def __init__(self):
- optparse.IndentedHelpFormatter.__init__(self)
-
- def format_description(self, description):
- if description:
- return description + "\n"
- else:
- return ""
-
-def printUsage(commands):
- print "usage: %s <command> [options]" % sys.argv[0]
- print ""
- print "valid commands: %s" % ", ".join(commands)
- print ""
- print "Try %s <command> --help for command specific help." % sys.argv[0]
- print ""
-
-commands = {
- "debug" : P4Debug,
- "submit" : P4Submit,
- "commit" : P4Submit,
- "sync" : P4Sync,
- "rebase" : P4Rebase,
- "clone" : P4Clone,
- "rollback" : P4RollBack,
- "branches" : P4Branches
-}
-
-
-def main():
- if len(sys.argv[1:]) == 0:
- printUsage(commands.keys())
- sys.exit(2)
-
- cmd = ""
- cmdName = sys.argv[1]
- try:
- klass = commands[cmdName]
- cmd = klass()
- except KeyError:
- print "unknown command %s" % cmdName
- print ""
- printUsage(commands.keys())
- sys.exit(2)
-
- options = cmd.options
- cmd.gitdir = os.environ.get("GIT_DIR", None)
-
- args = sys.argv[2:]
-
- if len(options) > 0:
- if cmd.needsGit:
- options.append(optparse.make_option("--git-dir", dest="gitdir"))
-
- parser = optparse.OptionParser(cmd.usage.replace("%prog", "%prog " + cmdName),
- options,
- description = cmd.description,
- formatter = HelpFormatter())
-
- (cmd, args) = parser.parse_args(sys.argv[2:], cmd);
- global verbose
- verbose = cmd.verbose
- if cmd.needsGit:
- if cmd.gitdir == None:
- cmd.gitdir = os.path.abspath(".git")
- if not isValidGitDir(cmd.gitdir):
- cmd.gitdir = read_pipe("git rev-parse --git-dir").strip()
- if os.path.exists(cmd.gitdir):
- cdup = read_pipe("git rev-parse --show-cdup").strip()
- if len(cdup) > 0:
- chdir(cdup);
-
- if not isValidGitDir(cmd.gitdir):
- if isValidGitDir(cmd.gitdir + "/.git"):
- cmd.gitdir += "/.git"
- else:
- die("fatal: cannot locate git repository at %s" % cmd.gitdir)
-
- os.environ["GIT_DIR"] = cmd.gitdir
-
- if not cmd.run(args):
- parser.print_help()
- sys.exit(2)
-
-
-if __name__ == '__main__':
- main()
--- /dev/null
+The git-p4 script moved to the top-level of the git source directory.
+
+Invoke it as any other git command, like "git p4 clone", for instance.
+
+Note that the top-level git-p4.py script is now the source. It is
+built using make to git-p4, which will be installed.
+
+Windows users can copy the git-p4.py source script directly, possibly
+invoking it through a batch file called "git-p4.bat" in the same folder.
+It should contain just one line:
+
+ @python "%~d0%~p0git-p4.py" %*
+++ /dev/null
-@python "%~d0%~p0git-p4" %*
SUBDIRECTORY_OK=Yes
OPTIONS_SPEC=
-. git-sh-setup
+. $(git --exec-path)/git-sh-setup
require_work_tree
cd_to_toplevel
--- /dev/null
+*~
+git-subtree.xml
+git-subtree.1
+mainline
+subproj
--- /dev/null
+ GNU GENERAL PUBLIC LICENSE
+ Version 2, June 1991
+
+ Copyright (C) 1989, 1991 Free Software Foundation, Inc.,
+ 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ Preamble
+
+ The licenses for most software are designed to take away your
+freedom to share and change it. By contrast, the GNU General Public
+License is intended to guarantee your freedom to share and change free
+software--to make sure the software is free for all its users. This
+General Public License applies to most of the Free Software
+Foundation's software and to any other program whose authors commit to
+using it. (Some other Free Software Foundation software is covered by
+the GNU Lesser General Public License instead.) You can apply it to
+your programs, too.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+this service if you wish), that you receive source code or can get it
+if you want it, that you can change the software or use pieces of it
+in new free programs; and that you know you can do these things.
+
+ To protect your rights, we need to make restrictions that forbid
+anyone to deny you these rights or to ask you to surrender the rights.
+These restrictions translate to certain responsibilities for you if you
+distribute copies of the software, or if you modify it.
+
+ For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must give the recipients all the rights that
+you have. You must make sure that they, too, receive or can get the
+source code. And you must show them these terms so they know their
+rights.
+
+ We protect your rights with two steps: (1) copyright the software, and
+(2) offer you this license which gives you legal permission to copy,
+distribute and/or modify the software.
+
+ Also, for each author's protection and ours, we want to make certain
+that everyone understands that there is no warranty for this free
+software. If the software is modified by someone else and passed on, we
+want its recipients to know that what they have is not the original, so
+that any problems introduced by others will not reflect on the original
+authors' reputations.
+
+ Finally, any free program is threatened constantly by software
+patents. We wish to avoid the danger that redistributors of a free
+program will individually obtain patent licenses, in effect making the
+program proprietary. To prevent this, we have made it clear that any
+patent must be licensed for everyone's free use or not licensed at all.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ GNU GENERAL PUBLIC LICENSE
+ TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+ 0. This License applies to any program or other work which contains
+a notice placed by the copyright holder saying it may be distributed
+under the terms of this General Public License. The "Program", below,
+refers to any such program or work, and a "work based on the Program"
+means either the Program or any derivative work under copyright law:
+that is to say, a work containing the Program or a portion of it,
+either verbatim or with modifications and/or translated into another
+language. (Hereinafter, translation is included without limitation in
+the term "modification".) Each licensee is addressed as "you".
+
+Activities other than copying, distribution and modification are not
+covered by this License; they are outside its scope. The act of
+running the Program is not restricted, and the output from the Program
+is covered only if its contents constitute a work based on the
+Program (independent of having been made by running the Program).
+Whether that is true depends on what the Program does.
+
+ 1. You may copy and distribute verbatim copies of the Program's
+source code as you receive it, in any medium, provided that you
+conspicuously and appropriately publish on each copy an appropriate
+copyright notice and disclaimer of warranty; keep intact all the
+notices that refer to this License and to the absence of any warranty;
+and give any other recipients of the Program a copy of this License
+along with the Program.
+
+You may charge a fee for the physical act of transferring a copy, and
+you may at your option offer warranty protection in exchange for a fee.
+
+ 2. You may modify your copy or copies of the Program or any portion
+of it, thus forming a work based on the Program, and copy and
+distribute such modifications or work under the terms of Section 1
+above, provided that you also meet all of these conditions:
+
+ a) You must cause the modified files to carry prominent notices
+ stating that you changed the files and the date of any change.
+
+ b) You must cause any work that you distribute or publish, that in
+ whole or in part contains or is derived from the Program or any
+ part thereof, to be licensed as a whole at no charge to all third
+ parties under the terms of this License.
+
+ c) If the modified program normally reads commands interactively
+ when run, you must cause it, when started running for such
+ interactive use in the most ordinary way, to print or display an
+ announcement including an appropriate copyright notice and a
+ notice that there is no warranty (or else, saying that you provide
+ a warranty) and that users may redistribute the program under
+ these conditions, and telling the user how to view a copy of this
+ License. (Exception: if the Program itself is interactive but
+ does not normally print such an announcement, your work based on
+ the Program is not required to print an announcement.)
+
+These requirements apply to the modified work as a whole. If
+identifiable sections of that work are not derived from the Program,
+and can be reasonably considered independent and separate works in
+themselves, then this License, and its terms, do not apply to those
+sections when you distribute them as separate works. But when you
+distribute the same sections as part of a whole which is a work based
+on the Program, the distribution of the whole must be on the terms of
+this License, whose permissions for other licensees extend to the
+entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest
+your rights to work written entirely by you; rather, the intent is to
+exercise the right to control the distribution of derivative or
+collective works based on the Program.
+
+In addition, mere aggregation of another work not based on the Program
+with the Program (or with a work based on the Program) on a volume of
+a storage or distribution medium does not bring the other work under
+the scope of this License.
+
+ 3. You may copy and distribute the Program (or a work based on it,
+under Section 2) in object code or executable form under the terms of
+Sections 1 and 2 above provided that you also do one of the following:
+
+ a) Accompany it with the complete corresponding machine-readable
+ source code, which must be distributed under the terms of Sections
+ 1 and 2 above on a medium customarily used for software interchange; or,
+
+ b) Accompany it with a written offer, valid for at least three
+ years, to give any third party, for a charge no more than your
+ cost of physically performing source distribution, a complete
+ machine-readable copy of the corresponding source code, to be
+ distributed under the terms of Sections 1 and 2 above on a medium
+ customarily used for software interchange; or,
+
+ c) Accompany it with the information you received as to the offer
+ to distribute corresponding source code. (This alternative is
+ allowed only for noncommercial distribution and only if you
+ received the program in object code or executable form with such
+ an offer, in accord with Subsection b above.)
+
+The source code for a work means the preferred form of the work for
+making modifications to it. For an executable work, complete source
+code means all the source code for all modules it contains, plus any
+associated interface definition files, plus the scripts used to
+control compilation and installation of the executable. However, as a
+special exception, the source code distributed need not include
+anything that is normally distributed (in either source or binary
+form) with the major components (compiler, kernel, and so on) of the
+operating system on which the executable runs, unless that component
+itself accompanies the executable.
+
+If distribution of executable or object code is made by offering
+access to copy from a designated place, then offering equivalent
+access to copy the source code from the same place counts as
+distribution of the source code, even though third parties are not
+compelled to copy the source along with the object code.
+
+ 4. You may not copy, modify, sublicense, or distribute the Program
+except as expressly provided under this License. Any attempt
+otherwise to copy, modify, sublicense or distribute the Program is
+void, and will automatically terminate your rights under this License.
+However, parties who have received copies, or rights, from you under
+this License will not have their licenses terminated so long as such
+parties remain in full compliance.
+
+ 5. You are not required to accept this License, since you have not
+signed it. However, nothing else grants you permission to modify or
+distribute the Program or its derivative works. These actions are
+prohibited by law if you do not accept this License. Therefore, by
+modifying or distributing the Program (or any work based on the
+Program), you indicate your acceptance of this License to do so, and
+all its terms and conditions for copying, distributing or modifying
+the Program or works based on it.
+
+ 6. Each time you redistribute the Program (or any work based on the
+Program), the recipient automatically receives a license from the
+original licensor to copy, distribute or modify the Program subject to
+these terms and conditions. You may not impose any further
+restrictions on the recipients' exercise of the rights granted herein.
+You are not responsible for enforcing compliance by third parties to
+this License.
+
+ 7. If, as a consequence of a court judgment or allegation of patent
+infringement or for any other reason (not limited to patent issues),
+conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot
+distribute so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you
+may not distribute the Program at all. For example, if a patent
+license would not permit royalty-free redistribution of the Program by
+all those who receive copies directly or indirectly through you, then
+the only way you could satisfy both it and this License would be to
+refrain entirely from distribution of the Program.
+
+If any portion of this section is held invalid or unenforceable under
+any particular circumstance, the balance of the section is intended to
+apply and the section as a whole is intended to apply in other
+circumstances.
+
+It is not the purpose of this section to induce you to infringe any
+patents or other property right claims or to contest validity of any
+such claims; this section has the sole purpose of protecting the
+integrity of the free software distribution system, which is
+implemented by public license practices. Many people have made
+generous contributions to the wide range of software distributed
+through that system in reliance on consistent application of that
+system; it is up to the author/donor to decide if he or she is willing
+to distribute software through any other system and a licensee cannot
+impose that choice.
+
+This section is intended to make thoroughly clear what is believed to
+be a consequence of the rest of this License.
+
+ 8. If the distribution and/or use of the Program is restricted in
+certain countries either by patents or by copyrighted interfaces, the
+original copyright holder who places the Program under this License
+may add an explicit geographical distribution limitation excluding
+those countries, so that distribution is permitted only in or among
+countries not thus excluded. In such case, this License incorporates
+the limitation as if written in the body of this License.
+
+ 9. The Free Software Foundation may publish revised and/or new versions
+of the General Public License from time to time. Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Program
+specifies a version number of this License which applies to it and "any
+later version", you have the option of following the terms and conditions
+either of that version or of any later version published by the Free
+Software Foundation. If the Program does not specify a version number of
+this License, you may choose any version ever published by the Free Software
+Foundation.
+
+ 10. If you wish to incorporate parts of the Program into other free
+programs whose distribution conditions are different, write to the author
+to ask for permission. For software which is copyrighted by the Free
+Software Foundation, write to the Free Software Foundation; we sometimes
+make exceptions for this. Our decision will be guided by the two goals
+of preserving the free status of all derivatives of our free software and
+of promoting the sharing and reuse of software generally.
+
+ NO WARRANTY
+
+ 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
+FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
+OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
+PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
+OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
+TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
+PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
+REPAIR OR CORRECTION.
+
+ 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
+REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
+INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
+OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
+TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
+YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
+PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGES.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+to attach them to the start of each source file to most effectively
+convey the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+ <one line to give the program's name and a brief idea of what it does.>
+ Copyright (C) <year> <name of author>
+
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation; either version 2 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License along
+ with this program; if not, write to the Free Software Foundation, Inc.,
+ 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+Also add information on how to contact you by electronic and paper mail.
+
+If the program is interactive, make it output a short notice like this
+when it starts in an interactive mode:
+
+ Gnomovision version 69, Copyright (C) year name of author
+ Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+ This is free software, and you are welcome to redistribute it
+ under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License. Of course, the commands you use may
+be called something other than `show w' and `show c'; they could even be
+mouse-clicks or menu items--whatever suits your program.
+
+You should also get your employer (if you work as a programmer) or your
+school, if any, to sign a "copyright disclaimer" for the program, if
+necessary. Here is a sample; alter the names:
+
+ Yoyodyne, Inc., hereby disclaims all copyright interest in the program
+ `Gnomovision' (which makes passes at compilers) written by James Hacker.
+
+ <signature of Ty Coon>, 1 April 1989
+ Ty Coon, President of Vice
+
+This General Public License does not permit incorporating your program into
+proprietary programs. If your program is a subroutine library, you may
+consider it more useful to permit linking proprietary applications with the
+library. If this is what you want to do, use the GNU Lesser General
+Public License instead of this License.
--- /dev/null
+HOW TO INSTALL git-subtree
+==========================
+
+First, build from the top source directory.
+
+Then, in contrib/subtree, run:
+
+ make
+ make install
+ make install-doc
+
+If you used configure to do the main build the git-subtree build will
+pick up those settings. If not, you will likely have to provide a
+value for prefix:
+
+ make prefix=<some dir>
+ make prefix=<some dir> install
+ make prefix=<some dir> install-doc
+
+To run tests first copy git-subtree to the main build area so the
+newly-built git can find it:
+
+ cp git-subtree ../..
+
+Then:
+
+ make test
+
--- /dev/null
+-include ../../config.mak.autogen
+-include ../../config.mak
+
+prefix ?= /usr/local
+mandir ?= $(prefix)/share/man
+libexecdir ?= $(prefix)/libexec/git-core
+gitdir ?= $(shell git --exec-path)
+man1dir ?= $(mandir)/man1
+
+gitver ?= $(word 3,$(shell git --version))
+
+# this should be set to a 'standard' bsd-type install program
+INSTALL ?= install
+
+ASCIIDOC_CONF = ../../Documentation/asciidoc.conf
+MANPAGE_NORMAL_XSL = ../../Documentation/manpage-normal.xsl
+
+GIT_SUBTREE_SH := git-subtree.sh
+GIT_SUBTREE := git-subtree
+
+GIT_SUBTREE_DOC := git-subtree.1
+GIT_SUBTREE_XML := git-subtree.xml
+GIT_SUBTREE_TXT := git-subtree.txt
+
+all: $(GIT_SUBTREE)
+
+$(GIT_SUBTREE): $(GIT_SUBTREE_SH)
+ cp $< $@ && chmod +x $@
+
+doc: $(GIT_SUBTREE_DOC)
+
+install: $(GIT_SUBTREE)
+ $(INSTALL) -m 755 $(GIT_SUBTREE) $(libexecdir)
+
+install-doc: install-man
+
+install-man: $(GIT_SUBTREE_DOC)
+ $(INSTALL) -m 644 $^ $(man1dir)
+
+$(GIT_SUBTREE_DOC): $(GIT_SUBTREE_XML)
+ xmlto -m $(MANPAGE_NORMAL_XSL) man $^
+
+$(GIT_SUBTREE_XML): $(GIT_SUBTREE_TXT)
+ asciidoc -b docbook -d manpage -f $(ASCIIDOC_CONF) \
+ -agit_version=$(gitver) $^
+
+test:
+ $(MAKE) -C t/ test
+
+clean:
+ rm -f *~ *.xml *.html *.1
+ rm -rf subproj mainline
--- /dev/null
+
+Please read git-subtree.txt for documentation.
+
+Please don't contact me using github mail; it's slow, ugly, and worst of
+all, redundant. Email me instead at apenwarr@gmail.com and I'll be happy to
+help.
+
+Avery
--- /dev/null
+#!/bin/bash
+#
+# git-subtree.sh: split/join git repositories in subdirectories of this one
+#
+# Copyright (C) 2009 Avery Pennarun <apenwarr@gmail.com>
+#
+if [ $# -eq 0 ]; then
+ set -- -h
+fi
+OPTS_SPEC="\
+git subtree add --prefix=<prefix> <commit>
+git subtree merge --prefix=<prefix> <commit>
+git subtree pull --prefix=<prefix> <repository> <refspec...>
+git subtree push --prefix=<prefix> <repository> <refspec...>
+git subtree split --prefix=<prefix> <commit...>
+--
+h,help show the help
+q quiet
+d show debug messages
+P,prefix= the name of the subdir to split out
+m,message= use the given message as the commit message for the merge commit
+ options for 'split'
+annotate= add a prefix to commit message of new commits
+b,branch= create a new branch from the split subtree
+ignore-joins ignore prior --rejoin commits
+onto= try connecting new tree to an existing one
+rejoin merge the new branch back into HEAD
+ options for 'add', 'merge', 'pull' and 'push'
+squash merge subtree changes as a single commit
+"
+eval "$(echo "$OPTS_SPEC" | git rev-parse --parseopt -- "$@" || echo exit $?)"
+
+PATH=$PATH:$(git --exec-path)
+. git-sh-setup
+
+require_work_tree
+
+quiet=
+branch=
+debug=
+command=
+onto=
+rejoin=
+ignore_joins=
+annotate=
+squash=
+message=
+
+debug()
+{
+ if [ -n "$debug" ]; then
+ echo "$@" >&2
+ fi
+}
+
+say()
+{
+ if [ -z "$quiet" ]; then
+ echo "$@" >&2
+ fi
+}
+
+assert()
+{
+ if "$@"; then
+ :
+ else
+ die "assertion failed: " "$@"
+ fi
+}
+
+
+#echo "Options: $*"
+
+while [ $# -gt 0 ]; do
+ opt="$1"
+ shift
+ case "$opt" in
+ -q) quiet=1 ;;
+ -d) debug=1 ;;
+ --annotate) annotate="$1"; shift ;;
+ --no-annotate) annotate= ;;
+ -b) branch="$1"; shift ;;
+ -P) prefix="$1"; shift ;;
+ -m) message="$1"; shift ;;
+ --no-prefix) prefix= ;;
+ --onto) onto="$1"; shift ;;
+ --no-onto) onto= ;;
+ --rejoin) rejoin=1 ;;
+ --no-rejoin) rejoin= ;;
+ --ignore-joins) ignore_joins=1 ;;
+ --no-ignore-joins) ignore_joins= ;;
+ --squash) squash=1 ;;
+ --no-squash) squash= ;;
+ --) break ;;
+ *) die "Unexpected option: $opt" ;;
+ esac
+done
+
+command="$1"
+shift
+case "$command" in
+ add|merge|pull) default= ;;
+ split|push) default="--default HEAD" ;;
+ *) die "Unknown command '$command'" ;;
+esac
+
+if [ -z "$prefix" ]; then
+ die "You must provide the --prefix option."
+fi
+
+case "$command" in
+ add) [ -e "$prefix" ] &&
+ die "prefix '$prefix' already exists." ;;
+ *) [ -e "$prefix" ] ||
+ die "'$prefix' does not exist; use 'git subtree add'" ;;
+esac
+
+dir="$(dirname "$prefix/.")"
+
+if [ "$command" != "pull" -a "$command" != "add" -a "$command" != "push" ]; then
+ revs=$(git rev-parse $default --revs-only "$@") || exit $?
+ dirs="$(git rev-parse --no-revs --no-flags "$@")" || exit $?
+ if [ -n "$dirs" ]; then
+ die "Error: Use --prefix instead of bare filenames."
+ fi
+fi
+
+debug "command: {$command}"
+debug "quiet: {$quiet}"
+debug "revs: {$revs}"
+debug "dir: {$dir}"
+debug "opts: {$*}"
+debug
+
+cache_setup()
+{
+ cachedir="$GIT_DIR/subtree-cache/$$"
+ rm -rf "$cachedir" || die "Can't delete old cachedir: $cachedir"
+ mkdir -p "$cachedir" || die "Can't create new cachedir: $cachedir"
+ mkdir -p "$cachedir/notree" || die "Can't create new cachedir: $cachedir/notree"
+ debug "Using cachedir: $cachedir" >&2
+}
+
+cache_get()
+{
+ for oldrev in $*; do
+ if [ -r "$cachedir/$oldrev" ]; then
+ read newrev <"$cachedir/$oldrev"
+ echo $newrev
+ fi
+ done
+}
+
+cache_miss()
+{
+ for oldrev in $*; do
+ if [ ! -r "$cachedir/$oldrev" ]; then
+ echo $oldrev
+ fi
+ done
+}
+
+check_parents()
+{
+ missed=$(cache_miss $*)
+ for miss in $missed; do
+ if [ ! -r "$cachedir/notree/$miss" ]; then
+ debug " incorrect order: $miss"
+ fi
+ done
+}
+
+set_notree()
+{
+ echo "1" > "$cachedir/notree/$1"
+}
+
+cache_set()
+{
+ oldrev="$1"
+ newrev="$2"
+ if [ "$oldrev" != "latest_old" \
+ -a "$oldrev" != "latest_new" \
+ -a -e "$cachedir/$oldrev" ]; then
+ die "cache for $oldrev already exists!"
+ fi
+ echo "$newrev" >"$cachedir/$oldrev"
+}
+
+rev_exists()
+{
+ if git rev-parse "$1" >/dev/null 2>&1; then
+ return 0
+ else
+ return 1
+ fi
+}
+
+rev_is_descendant_of_branch()
+{
+ newrev="$1"
+ branch="$2"
+ branch_hash=$(git rev-parse $branch)
+ match=$(git rev-list -1 $branch_hash ^$newrev)
+
+ if [ -z "$match" ]; then
+ return 0
+ else
+ return 1
+ fi
+}
+
+# if a commit doesn't have a parent, this might not work. But we only want
+# to remove the parent from the rev-list, and since it doesn't exist, it won't
+# be there anyway, so do nothing in that case.
+try_remove_previous()
+{
+ if rev_exists "$1^"; then
+ echo "^$1^"
+ fi
+}
+
+find_latest_squash()
+{
+ debug "Looking for latest squash ($dir)..."
+ dir="$1"
+ sq=
+ main=
+ sub=
+ git log --grep="^git-subtree-dir: $dir/*\$" \
+ --pretty=format:'START %H%n%s%n%n%b%nEND%n' HEAD |
+ while read a b junk; do
+ debug "$a $b $junk"
+ debug "{{$sq/$main/$sub}}"
+ case "$a" in
+ START) sq="$b" ;;
+ git-subtree-mainline:) main="$b" ;;
+ git-subtree-split:) sub="$b" ;;
+ END)
+ if [ -n "$sub" ]; then
+ if [ -n "$main" ]; then
+ # a rejoin commit?
+ # Pretend its sub was a squash.
+ sq="$sub"
+ fi
+ debug "Squash found: $sq $sub"
+ echo "$sq" "$sub"
+ break
+ fi
+ sq=
+ main=
+ sub=
+ ;;
+ esac
+ done
+}
+
+find_existing_splits()
+{
+ debug "Looking for prior splits..."
+ dir="$1"
+ revs="$2"
+ main=
+ sub=
+ git log --grep="^git-subtree-dir: $dir/*\$" \
+ --pretty=format:'START %H%n%s%n%n%b%nEND%n' $revs |
+ while read a b junk; do
+ case "$a" in
+ START) sq="$b" ;;
+ git-subtree-mainline:) main="$b" ;;
+ git-subtree-split:) sub="$b" ;;
+ END)
+ debug " Main is: '$main'"
+ if [ -z "$main" -a -n "$sub" ]; then
+ # squash commits refer to a subtree
+ debug " Squash: $sq from $sub"
+ cache_set "$sq" "$sub"
+ fi
+ if [ -n "$main" -a -n "$sub" ]; then
+ debug " Prior: $main -> $sub"
+ cache_set $main $sub
+ cache_set $sub $sub
+ try_remove_previous "$main"
+ try_remove_previous "$sub"
+ fi
+ main=
+ sub=
+ ;;
+ esac
+ done
+}
+
+copy_commit()
+{
+ # We're going to set some environment vars here, so
+ # do it in a subshell to get rid of them safely later
+ debug copy_commit "{$1}" "{$2}" "{$3}"
+ git log -1 --pretty=format:'%an%n%ae%n%ad%n%cn%n%ce%n%cd%n%s%n%n%b' "$1" |
+ (
+ read GIT_AUTHOR_NAME
+ read GIT_AUTHOR_EMAIL
+ read GIT_AUTHOR_DATE
+ read GIT_COMMITTER_NAME
+ read GIT_COMMITTER_EMAIL
+ read GIT_COMMITTER_DATE
+ export GIT_AUTHOR_NAME \
+ GIT_AUTHOR_EMAIL \
+ GIT_AUTHOR_DATE \
+ GIT_COMMITTER_NAME \
+ GIT_COMMITTER_EMAIL \
+ GIT_COMMITTER_DATE
+ (echo -n "$annotate"; cat ) |
+ git commit-tree "$2" $3 # reads the rest of stdin
+ ) || die "Can't copy commit $1"
+}
+
+add_msg()
+{
+ dir="$1"
+ latest_old="$2"
+ latest_new="$3"
+ if [ -n "$message" ]; then
+ commit_message="$message"
+ else
+ commit_message="Add '$dir/' from commit '$latest_new'"
+ fi
+ cat <<-EOF
+ $commit_message
+
+ git-subtree-dir: $dir
+ git-subtree-mainline: $latest_old
+ git-subtree-split: $latest_new
+ EOF
+}
+
+add_squashed_msg()
+{
+ if [ -n "$message" ]; then
+ echo "$message"
+ else
+ echo "Merge commit '$1' as '$2'"
+ fi
+}
+
+rejoin_msg()
+{
+ dir="$1"
+ latest_old="$2"
+ latest_new="$3"
+ if [ -n "$message" ]; then
+ commit_message="$message"
+ else
+ commit_message="Split '$dir/' into commit '$latest_new'"
+ fi
+ cat <<-EOF
+ $commit_message
+
+ git-subtree-dir: $dir
+ git-subtree-mainline: $latest_old
+ git-subtree-split: $latest_new
+ EOF
+}
+
+squash_msg()
+{
+ dir="$1"
+ oldsub="$2"
+ newsub="$3"
+ newsub_short=$(git rev-parse --short "$newsub")
+
+ if [ -n "$oldsub" ]; then
+ oldsub_short=$(git rev-parse --short "$oldsub")
+ echo "Squashed '$dir/' changes from $oldsub_short..$newsub_short"
+ echo
+ git log --pretty=tformat:'%h %s' "$oldsub..$newsub"
+ git log --pretty=tformat:'REVERT: %h %s' "$newsub..$oldsub"
+ else
+ echo "Squashed '$dir/' content from commit $newsub_short"
+ fi
+
+ echo
+ echo "git-subtree-dir: $dir"
+ echo "git-subtree-split: $newsub"
+}
+
+toptree_for_commit()
+{
+ commit="$1"
+ git log -1 --pretty=format:'%T' "$commit" -- || exit $?
+}
+
+subtree_for_commit()
+{
+ commit="$1"
+ dir="$2"
+ git ls-tree "$commit" -- "$dir" |
+ while read mode type tree name; do
+ assert [ "$name" = "$dir" ]
+ assert [ "$type" = "tree" -o "$type" = "commit" ]
+ [ "$type" = "commit" ] && continue # ignore submodules
+ echo $tree
+ break
+ done
+}
+
+tree_changed()
+{
+ tree=$1
+ shift
+ if [ $# -ne 1 ]; then
+ return 0 # weird parents, consider it changed
+ else
+ ptree=$(toptree_for_commit $1)
+ if [ "$ptree" != "$tree" ]; then
+ return 0 # changed
+ else
+ return 1 # not changed
+ fi
+ fi
+}
+
+new_squash_commit()
+{
+ old="$1"
+ oldsub="$2"
+ newsub="$3"
+ tree=$(toptree_for_commit $newsub) || exit $?
+ if [ -n "$old" ]; then
+ squash_msg "$dir" "$oldsub" "$newsub" |
+ git commit-tree "$tree" -p "$old" || exit $?
+ else
+ squash_msg "$dir" "" "$newsub" |
+ git commit-tree "$tree" || exit $?
+ fi
+}
+
+copy_or_skip()
+{
+ rev="$1"
+ tree="$2"
+ newparents="$3"
+ assert [ -n "$tree" ]
+
+ identical=
+ nonidentical=
+ p=
+ gotparents=
+ for parent in $newparents; do
+ ptree=$(toptree_for_commit $parent) || exit $?
+ [ -z "$ptree" ] && continue
+ if [ "$ptree" = "$tree" ]; then
+ # an identical parent could be used in place of this rev.
+ identical="$parent"
+ else
+ nonidentical="$parent"
+ fi
+
+ # sometimes both old parents map to the same newparent;
+ # eliminate duplicates
+ is_new=1
+ for gp in $gotparents; do
+ if [ "$gp" = "$parent" ]; then
+ is_new=
+ break
+ fi
+ done
+ if [ -n "$is_new" ]; then
+ gotparents="$gotparents $parent"
+ p="$p -p $parent"
+ fi
+ done
+
+ if [ -n "$identical" ]; then
+ echo $identical
+ else
+ copy_commit $rev $tree "$p" || exit $?
+ fi
+}
+
+ensure_clean()
+{
+ if ! git diff-index HEAD --exit-code --quiet 2>&1; then
+ die "Working tree has modifications. Cannot add."
+ fi
+ if ! git diff-index --cached HEAD --exit-code --quiet 2>&1; then
+ die "Index has modifications. Cannot add."
+ fi
+}
+
+cmd_add()
+{
+ if [ -e "$dir" ]; then
+ die "'$dir' already exists. Cannot add."
+ fi
+
+ ensure_clean
+
+ if [ $# -eq 1 ]; then
+ "cmd_add_commit" "$@"
+ elif [ $# -eq 2 ]; then
+ "cmd_add_repository" "$@"
+ else
+ say "error: parameters were '$@'"
+ die "Provide either a refspec or a repository and refspec."
+ fi
+}
+
+cmd_add_repository()
+{
+ echo "git fetch" "$@"
+ repository=$1
+ refspec=$2
+ git fetch "$@" || exit $?
+ revs=FETCH_HEAD
+ set -- $revs
+ cmd_add_commit "$@"
+}
+
+cmd_add_commit()
+{
+ revs=$(git rev-parse $default --revs-only "$@") || exit $?
+ set -- $revs
+ rev="$1"
+
+ debug "Adding $dir as '$rev'..."
+ git read-tree --prefix="$dir" $rev || exit $?
+ git checkout -- "$dir" || exit $?
+ tree=$(git write-tree) || exit $?
+
+ headrev=$(git rev-parse HEAD) || exit $?
+ if [ -n "$headrev" -a "$headrev" != "$rev" ]; then
+ headp="-p $headrev"
+ else
+ headp=
+ fi
+
+ if [ -n "$squash" ]; then
+ rev=$(new_squash_commit "" "" "$rev") || exit $?
+ commit=$(add_squashed_msg "$rev" "$dir" |
+ git commit-tree $tree $headp -p "$rev") || exit $?
+ else
+ commit=$(add_msg "$dir" "$headrev" "$rev" |
+ git commit-tree $tree $headp -p "$rev") || exit $?
+ fi
+ git reset "$commit" || exit $?
+
+ say "Added dir '$dir'"
+}
+
+cmd_split()
+{
+ debug "Splitting $dir..."
+ cache_setup || exit $?
+
+ if [ -n "$onto" ]; then
+ debug "Reading history for --onto=$onto..."
+ git rev-list $onto |
+ while read rev; do
+ # the 'onto' history is already just the subdir, so
+ # any parent we find there can be used verbatim
+ debug " cache: $rev"
+ cache_set $rev $rev
+ done
+ fi
+
+ if [ -n "$ignore_joins" ]; then
+ unrevs=
+ else
+ unrevs="$(find_existing_splits "$dir" "$revs")"
+ fi
+
+ # We can't restrict rev-list to only $dir here, because some of our
+ # parents have the $dir contents the root, and those won't match.
+ # (and rev-list --follow doesn't seem to solve this)
+ grl='git rev-list --topo-order --reverse --parents $revs $unrevs'
+ revmax=$(eval "$grl" | wc -l)
+ revcount=0
+ createcount=0
+ eval "$grl" |
+ while read rev parents; do
+ revcount=$(($revcount + 1))
+ say -n "$revcount/$revmax ($createcount)\r"
+ debug "Processing commit: $rev"
+ exists=$(cache_get $rev)
+ if [ -n "$exists" ]; then
+ debug " prior: $exists"
+ continue
+ fi
+ createcount=$(($createcount + 1))
+ debug " parents: $parents"
+ newparents=$(cache_get $parents)
+ debug " newparents: $newparents"
+
+ tree=$(subtree_for_commit $rev "$dir")
+ debug " tree is: $tree"
+
+ check_parents $parents
+
+ # ugly. is there no better way to tell if this is a subtree
+ # vs. a mainline commit? Does it matter?
+ if [ -z $tree ]; then
+ set_notree $rev
+ if [ -n "$newparents" ]; then
+ cache_set $rev $rev
+ fi
+ continue
+ fi
+
+ newrev=$(copy_or_skip "$rev" "$tree" "$newparents") || exit $?
+ debug " newrev is: $newrev"
+ cache_set $rev $newrev
+ cache_set latest_new $newrev
+ cache_set latest_old $rev
+ done || exit $?
+ latest_new=$(cache_get latest_new)
+ if [ -z "$latest_new" ]; then
+ die "No new revisions were found"
+ fi
+
+ if [ -n "$rejoin" ]; then
+ debug "Merging split branch into HEAD..."
+ latest_old=$(cache_get latest_old)
+ git merge -s ours \
+ -m "$(rejoin_msg $dir $latest_old $latest_new)" \
+ $latest_new >&2 || exit $?
+ fi
+ if [ -n "$branch" ]; then
+ if rev_exists "refs/heads/$branch"; then
+ if ! rev_is_descendant_of_branch $latest_new $branch; then
+ die "Branch '$branch' is not an ancestor of commit '$latest_new'."
+ fi
+ action='Updated'
+ else
+ action='Created'
+ fi
+ git update-ref -m 'subtree split' "refs/heads/$branch" $latest_new || exit $?
+ say "$action branch '$branch'"
+ fi
+ echo $latest_new
+ exit 0
+}
+
+cmd_merge()
+{
+ revs=$(git rev-parse $default --revs-only "$@") || exit $?
+ ensure_clean
+
+ set -- $revs
+ if [ $# -ne 1 ]; then
+ die "You must provide exactly one revision. Got: '$revs'"
+ fi
+ rev="$1"
+
+ if [ -n "$squash" ]; then
+ first_split="$(find_latest_squash "$dir")"
+ if [ -z "$first_split" ]; then
+ die "Can't squash-merge: '$dir' was never added."
+ fi
+ set $first_split
+ old=$1
+ sub=$2
+ if [ "$sub" = "$rev" ]; then
+ say "Subtree is already at commit $rev."
+ exit 0
+ fi
+ new=$(new_squash_commit "$old" "$sub" "$rev") || exit $?
+ debug "New squash commit: $new"
+ rev="$new"
+ fi
+
+ version=$(git version)
+ if [ "$version" \< "git version 1.7" ]; then
+ if [ -n "$message" ]; then
+ git merge -s subtree --message="$message" $rev
+ else
+ git merge -s subtree $rev
+ fi
+ else
+ if [ -n "$message" ]; then
+ git merge -Xsubtree="$prefix" --message="$message" $rev
+ else
+ git merge -Xsubtree="$prefix" $rev
+ fi
+ fi
+}
+
+cmd_pull()
+{
+ ensure_clean
+ git fetch "$@" || exit $?
+ revs=FETCH_HEAD
+ set -- $revs
+ cmd_merge "$@"
+}
+
+cmd_push()
+{
+ if [ $# -ne 2 ]; then
+ die "You must provide <repository> <refspec>"
+ fi
+ if [ -e "$dir" ]; then
+ repository=$1
+ refspec=$2
+ echo "git push using: " $repository $refspec
+ git push $repository $(git subtree split --prefix=$prefix):refs/heads/$refspec
+ else
+ die "'$dir' must already exist. Try 'git subtree add'."
+ fi
+}
+
+"cmd_$command" "$@"
--- /dev/null
+git-subtree(1)
+==============
+
+NAME
+----
+git-subtree - Merge subtrees together and split repository into subtrees
+
+
+SYNOPSIS
+--------
+[verse]
+'git subtree' add -P <prefix> <commit>
+'git subtree' pull -P <prefix> <repository> <refspec...>
+'git subtree' push -P <prefix> <repository> <refspec...>
+'git subtree' merge -P <prefix> <commit>
+'git subtree' split -P <prefix> [OPTIONS] [<commit>]
+
+
+DESCRIPTION
+-----------
+Subtrees allow subprojects to be included within a subdirectory
+of the main project, optionally including the subproject's
+entire history.
+
+For example, you could include the source code for a library
+as a subdirectory of your application.
+
+Subtrees are not to be confused with submodules, which are meant for
+the same task. Unlike submodules, subtrees do not need any special
+constructions (like .gitmodule files or gitlinks) be present in
+your repository, and do not force end-users of your
+repository to do anything special or to understand how subtrees
+work. A subtree is just a subdirectory that can be
+committed to, branched, and merged along with your project in
+any way you want.
+
+They are also not to be confused with using the subtree merge
+strategy. The main difference is that, besides merging
+the other project as a subdirectory, you can also extract the
+entire history of a subdirectory from your project and make it
+into a standalone project. Unlike the subtree merge strategy
+you can alternate back and forth between these
+two operations. If the standalone library gets updated, you can
+automatically merge the changes into your project; if you
+update the library inside your project, you can "split" the
+changes back out again and merge them back into the library
+project.
+
+For example, if a library you made for one application ends up being
+useful elsewhere, you can extract its entire history and publish
+that as its own git repository, without accidentally
+intermingling the history of your application project.
+
+[TIP]
+In order to keep your commit messages clean, we recommend that
+people split their commits between the subtrees and the main
+project as much as possible. That is, if you make a change that
+affects both the library and the main application, commit it in
+two pieces. That way, when you split the library commits out
+later, their descriptions will still make sense. But if this
+isn't important to you, it's not *necessary*. git subtree will
+simply leave out the non-library-related parts of the commit
+when it splits it out into the subproject later.
+
+
+COMMANDS
+--------
+add::
+ Create the <prefix> subtree by importing its contents
+ from the given <refspec> or <repository> and remote <refspec>.
+ A new commit is created automatically, joining the imported
+ project's history with your own. With '--squash', imports
+ only a single commit from the subproject, rather than its
+ entire history.
+
+merge::
+ Merge recent changes up to <commit> into the <prefix>
+ subtree. As with normal 'git merge', this doesn't
+ remove your own local changes; it just merges those
+ changes into the latest <commit>. With '--squash',
+ creates only one commit that contains all the changes,
+ rather than merging in the entire history.
+
+ If you use '--squash', the merge direction doesn't
+ always have to be forward; you can use this command to
+ go back in time from v2.5 to v2.4, for example. If your
+ merge introduces a conflict, you can resolve it in the
+ usual ways.
+
+pull::
+ Exactly like 'merge', but parallels 'git pull' in that
+ it fetches the given commit from the specified remote
+ repository.
+
+push::
+ Does a 'split' (see above) using the <prefix> supplied
+ and then does a 'git push' to push the result to the
+ repository and refspec. This can be used to push your
+ subtree to different branches of the remote repository.
+
+split::
+ Extract a new, synthetic project history from the
+ history of the <prefix> subtree. The new history
+ includes only the commits (including merges) that
+ affected <prefix>, and each of those commits now has the
+ contents of <prefix> at the root of the project instead
+ of in a subdirectory. Thus, the newly created history
+ is suitable for export as a separate git repository.
+
+ After splitting successfully, a single commit id is
+ printed to stdout. This corresponds to the HEAD of the
+ newly created tree, which you can manipulate however you
+ want.
+
+ Repeated splits of exactly the same history are
+ guaranteed to be identical (ie. to produce the same
+ commit ids). Because of this, if you add new commits
+ and then re-split, the new commits will be attached as
+ commits on top of the history you generated last time,
+ so 'git merge' and friends will work as expected.
+
+ Note that if you use '--squash' when you merge, you
+ should usually not just '--rejoin' when you split.
+
+
+OPTIONS
+-------
+-q::
+--quiet::
+ Suppress unnecessary output messages on stderr.
+
+-d::
+--debug::
+ Produce even more unnecessary output messages on stderr.
+
+-P <prefix>::
+--prefix=<prefix>::
+ Specify the path in the repository to the subtree you
+ want to manipulate. This option is mandatory
+ for all commands.
+
+-m <message>::
+--message=<message>::
+ This option is only valid for add, merge and pull (unsure).
+ Specify <message> as the commit message for the merge commit.
+
+
+OPTIONS FOR add, merge, push, pull
+----------------------------------
+--squash::
+ This option is only valid for add, merge, push and pull
+ commands.
+
+ Instead of merging the entire history from the subtree
+ project, produce only a single commit that contains all
+ the differences you want to merge, and then merge that
+ new commit into your project.
+
+ Using this option helps to reduce log clutter. People
+ rarely want to see every change that happened between
+ v1.0 and v1.1 of the library they're using, since none of the
+ interim versions were ever included in their application.
+
+ Using '--squash' also helps avoid problems when the same
+ subproject is included multiple times in the same
+ project, or is removed and then re-added. In such a
+ case, it doesn't make sense to combine the histories
+ anyway, since it's unclear which part of the history
+ belongs to which subtree.
+
+ Furthermore, with '--squash', you can switch back and
+ forth between different versions of a subtree, rather
+ than strictly forward. 'git subtree merge --squash'
+ always adjusts the subtree to match the exactly
+ specified commit, even if getting to that commit would
+ require undoing some changes that were added earlier.
+
+ Whether or not you use '--squash', changes made in your
+ local repository remain intact and can be later split
+ and send upstream to the subproject.
+
+
+OPTIONS FOR split
+-----------------
+--annotate=<annotation>::
+ This option is only valid for the split command.
+
+ When generating synthetic history, add <annotation> as a
+ prefix to each commit message. Since we're creating new
+ commits with the same commit message, but possibly
+ different content, from the original commits, this can help
+ to differentiate them and avoid confusion.
+
+ Whenever you split, you need to use the same
+ <annotation>, or else you don't have a guarantee that
+ the new re-created history will be identical to the old
+ one. That will prevent merging from working correctly.
+ git subtree tries to make it work anyway, particularly
+ if you use --rejoin, but it may not always be effective.
+
+-b <branch>::
+--branch=<branch>::
+ This option is only valid for the split command.
+
+ After generating the synthetic history, create a new
+ branch called <branch> that contains the new history.
+ This is suitable for immediate pushing upstream.
+ <branch> must not already exist.
+
+--ignore-joins::
+ This option is only valid for the split command.
+
+ If you use '--rejoin', git subtree attempts to optimize
+ its history reconstruction to generate only the new
+ commits since the last '--rejoin'. '--ignore-join'
+ disables this behaviour, forcing it to regenerate the
+ entire history. In a large project, this can take a
+ long time.
+
+--onto=<onto>::
+ This option is only valid for the split command.
+
+ If your subtree was originally imported using something
+ other than git subtree, its history may not match what
+ git subtree is expecting. In that case, you can specify
+ the commit id <onto> that corresponds to the first
+ revision of the subproject's history that was imported
+ into your project, and git subtree will attempt to build
+ its history from there.
+
+ If you used 'git subtree add', you should never need
+ this option.
+
+--rejoin::
+ This option is only valid for the split command.
+
+ After splitting, merge the newly created synthetic
+ history back into your main project. That way, future
+ splits can search only the part of history that has
+ been added since the most recent --rejoin.
+
+ If your split commits end up merged into the upstream
+ subproject, and then you want to get the latest upstream
+ version, this will allow git's merge algorithm to more
+ intelligently avoid conflicts (since it knows these
+ synthetic commits are already part of the upstream
+ repository).
+
+ Unfortunately, using this option results in 'git log'
+ showing an extra copy of every new commit that was
+ created (the original, and the synthetic one).
+
+ If you do all your merges with '--squash', don't use
+ '--rejoin' when you split, because you don't want the
+ subproject's history to be part of your project anyway.
+
+
+EXAMPLE 1. Add command
+----------------------
+Let's assume that you have a local repository that you would like
+to add an external vendor library to. In this case we will add the
+git-subtree repository as a subdirectory of your already existing
+git-extensions repository in ~/git-extensions/:
+
+ $ git subtree add --prefix=git-subtree --squash \
+ git://github.com/apenwarr/git-subtree.git master
+
+'master' needs to be a valid remote ref and can be a different branch
+name
+
+You can omit the --squash flag, but doing so will increase the number
+of commits that are incldued in your local repository.
+
+We now have a ~/git-extensions/git-subtree directory containing code
+from the master branch of git://github.com/apenwarr/git-subtree.git
+in our git-extensions repository.
+
+EXAMPLE 2. Extract a subtree using commit, merge and pull
+---------------------------------------------------------
+Let's use the repository for the git source code as an example.
+First, get your own copy of the git.git repository:
+
+ $ git clone git://git.kernel.org/pub/scm/git/git.git test-git
+ $ cd test-git
+
+gitweb (commit 1130ef3) was merged into git as of commit
+0a8f4f0, after which it was no longer maintained separately.
+But imagine it had been maintained separately, and we wanted to
+extract git's changes to gitweb since that time, to share with
+the upstream. You could do this:
+
+ $ git subtree split --prefix=gitweb --annotate='(split) ' \
+ 0a8f4f0^.. --onto=1130ef3 --rejoin \
+ --branch gitweb-latest
+ $ gitk gitweb-latest
+ $ git push git@github.com:whatever/gitweb.git gitweb-latest:master
+
+(We use '0a8f4f0^..' because that means "all the changes from
+0a8f4f0 to the current version, including 0a8f4f0 itself.")
+
+If gitweb had originally been merged using 'git subtree add' (or
+a previous split had already been done with --rejoin specified)
+then you can do all your splits without having to remember any
+weird commit ids:
+
+ $ git subtree split --prefix=gitweb --annotate='(split) ' --rejoin \
+ --branch gitweb-latest2
+
+And you can merge changes back in from the upstream project just
+as easily:
+
+ $ git subtree pull --prefix=gitweb \
+ git@github.com:whatever/gitweb.git master
+
+Or, using '--squash', you can actually rewind to an earlier
+version of gitweb:
+
+ $ git subtree merge --prefix=gitweb --squash gitweb-latest~10
+
+Then make some changes:
+
+ $ date >gitweb/myfile
+ $ git add gitweb/myfile
+ $ git commit -m 'created myfile'
+
+And fast forward again:
+
+ $ git subtree merge --prefix=gitweb --squash gitweb-latest
+
+And notice that your change is still intact:
+
+ $ ls -l gitweb/myfile
+
+And you can split it out and look at your changes versus
+the standard gitweb:
+
+ git log gitweb-latest..$(git subtree split --prefix=gitweb)
+
+EXAMPLE 3. Extract a subtree using branch
+-----------------------------------------
+Suppose you have a source directory with many files and
+subdirectories, and you want to extract the lib directory to its own
+git project. Here's a short way to do it:
+
+First, make the new repository wherever you want:
+
+ $ <go to the new location>
+ $ git init --bare
+
+Back in your original directory:
+
+ $ git subtree split --prefix=lib --annotate="(split)" -b split
+
+Then push the new branch onto the new empty repository:
+
+ $ git push <new-repo> split:master
+
+
+AUTHOR
+------
+Written by Avery Pennarun <apenwarr@gmail.com>
+
+
+GIT
+---
+Part of the linkgit:git[1] suite
--- /dev/null
+# Run tests
+#
+# Copyright (c) 2005 Junio C Hamano
+#
+
+-include ../../../config.mak.autogen
+-include ../../../config.mak
+
+#GIT_TEST_OPTS=--verbose --debug
+SHELL_PATH ?= $(SHELL)
+PERL_PATH ?= /usr/bin/perl
+TAR ?= $(TAR)
+RM ?= rm -f
+PROVE ?= prove
+DEFAULT_TEST_TARGET ?= test
+
+# Shell quote;
+SHELL_PATH_SQ = $(subst ','\'',$(SHELL_PATH))
+
+T = $(wildcard t[0-9][0-9][0-9][0-9]-*.sh)
+
+all: $(DEFAULT_TEST_TARGET)
+
+test: pre-clean $(TEST_LINT)
+ $(MAKE) aggregate-results-and-cleanup
+
+prove: pre-clean $(TEST_LINT)
+ @echo "*** prove ***"; GIT_CONFIG=.git/config $(PROVE) --exec '$(SHELL_PATH_SQ)' $(GIT_PROVE_OPTS) $(T) :: $(GIT_TEST_OPTS)
+ $(MAKE) clean
+
+$(T):
+ @echo "*** $@ ***"; GIT_CONFIG=.git/config '$(SHELL_PATH_SQ)' $@ $(GIT_TEST_OPTS)
+
+pre-clean:
+ $(RM) -r test-results
+
+clean:
+ $(RM) -r 'trash directory'.* test-results
+ $(RM) -r valgrind/bin
+ $(RM) .prove
+
+test-lint: test-lint-duplicates test-lint-executable
+
+test-lint-duplicates:
+ @dups=`echo $(T) | tr ' ' '\n' | sed 's/-.*//' | sort | uniq -d` && \
+ test -z "$$dups" || { \
+ echo >&2 "duplicate test numbers:" $$dups; exit 1; }
+
+test-lint-executable:
+ @bad=`for i in $(T); do test -x "$$i" || echo $$i; done` && \
+ test -z "$$bad" || { \
+ echo >&2 "non-executable tests:" $$bad; exit 1; }
+
+aggregate-results-and-cleanup: $(T)
+ $(MAKE) aggregate-results
+ $(MAKE) clean
+
+aggregate-results:
+ for f in ../../../t/test-results/t*-*.counts; do \
+ echo "$$f"; \
+ done | '$(SHELL_PATH_SQ)' ../../../t/aggregate-results.sh
+
+valgrind:
+ $(MAKE) GIT_TEST_OPTS="$(GIT_TEST_OPTS) --valgrind"
+
+test-results:
+ mkdir -p test-results
+
+.PHONY: pre-clean $(T) aggregate-results clean valgrind
--- /dev/null
+#!/bin/sh
+#
+# Copyright (c) 2012 Avery Pennaraum
+#
+test_description='Basic porcelain support for subtrees
+
+This test verifies the basic operation of the merge, pull, add
+and split subcommands of git subtree.
+'
+
+export TEST_DIRECTORY=$(pwd)/../../../t
+
+. ../../../t/test-lib.sh
+
+create()
+{
+ echo "$1" >"$1"
+ git add "$1"
+}
+
+
+check_equal()
+{
+ test_debug 'echo'
+ test_debug "echo \"check a:\" \"{$1}\""
+ test_debug "echo \" b:\" \"{$2}\""
+ if [ "$1" = "$2" ]; then
+ return 0
+ else
+ return 1
+ fi
+}
+
+fixnl()
+{
+ t=""
+ while read x; do
+ t="$t$x "
+ done
+ echo $t
+}
+
+multiline()
+{
+ while read x; do
+ set -- $x
+ for d in "$@"; do
+ echo "$d"
+ done
+ done
+}
+
+undo()
+{
+ git reset --hard HEAD~
+}
+
+last_commit_message()
+{
+ git log --pretty=format:%s -1
+}
+
+# 1
+test_expect_success 'init subproj' '
+ test_create_repo subproj
+'
+
+# To the subproject!
+cd subproj
+
+# 2
+test_expect_success 'add sub1' '
+ create sub1 &&
+ git commit -m "sub1" &&
+ git branch sub1 &&
+ git branch -m master subproj
+'
+
+# 3
+test_expect_success 'add sub2' '
+ create sub2 &&
+ git commit -m "sub2" &&
+ git branch sub2
+'
+
+# 4
+test_expect_success 'add sub3' '
+ create sub3 &&
+ git commit -m "sub3" &&
+ git branch sub3
+'
+
+# Back to mainline
+cd ..
+
+# 5
+test_expect_success 'add main4' '
+ create main4 &&
+ git commit -m "main4" &&
+ git branch -m master mainline &&
+ git branch subdir
+'
+
+# 6
+test_expect_success 'fetch subproj history' '
+ git fetch ./subproj sub1 &&
+ git branch sub1 FETCH_HEAD
+'
+
+# 7
+test_expect_success 'no subtree exists in main tree' '
+ test_must_fail git subtree merge --prefix=subdir sub1
+'
+
+# 8
+test_expect_success 'no pull from non-existant subtree' '
+ test_must_fail git subtree pull --prefix=subdir ./subproj sub1
+'
+
+# 9
+test_expect_success 'check if --message works for add' '
+ git subtree add --prefix=subdir --message="Added subproject" sub1 &&
+ check_equal ''"$(last_commit_message)"'' "Added subproject" &&
+ undo
+'
+
+# 10
+test_expect_success 'check if --message works as -m and --prefix as -P' '
+ git subtree add -P subdir -m "Added subproject using git subtree" sub1 &&
+ check_equal ''"$(last_commit_message)"'' "Added subproject using git subtree" &&
+ undo
+'
+
+# 11
+test_expect_success 'check if --message works with squash too' '
+ git subtree add -P subdir -m "Added subproject with squash" --squash sub1 &&
+ check_equal ''"$(last_commit_message)"'' "Added subproject with squash" &&
+ undo
+'
+
+# 12
+test_expect_success 'add subproj to mainline' '
+ git subtree add --prefix=subdir/ FETCH_HEAD &&
+ check_equal ''"$(last_commit_message)"'' "Add '"'subdir/'"' from commit '"'"'''"$(git rev-parse sub1)"'''"'"'"
+'
+
+# 13
+# this shouldn't actually do anything, since FETCH_HEAD is already a parent
+test_expect_success 'merge fetched subproj' '
+ git merge -m "merge -s -ours" -s ours FETCH_HEAD
+'
+
+# 14
+test_expect_success 'add main-sub5' '
+ create subdir/main-sub5 &&
+ git commit -m "main-sub5"
+'
+
+# 15
+test_expect_success 'add main6' '
+ create main6 &&
+ git commit -m "main6 boring"
+'
+
+# 16
+test_expect_success 'add main-sub7' '
+ create subdir/main-sub7 &&
+ git commit -m "main-sub7"
+'
+
+# 17
+test_expect_success 'fetch new subproj history' '
+ git fetch ./subproj sub2 &&
+ git branch sub2 FETCH_HEAD
+'
+
+# 18
+test_expect_success 'check if --message works for merge' '
+ git subtree merge --prefix=subdir -m "Merged changes from subproject" sub2 &&
+ check_equal ''"$(last_commit_message)"'' "Merged changes from subproject" &&
+ undo
+'
+
+# 19
+test_expect_success 'check if --message for merge works with squash too' '
+ git subtree merge --prefix subdir -m "Merged changes from subproject using squash" --squash sub2 &&
+ check_equal ''"$(last_commit_message)"'' "Merged changes from subproject using squash" &&
+ undo
+'
+
+# 20
+test_expect_success 'merge new subproj history into subdir' '
+ git subtree merge --prefix=subdir FETCH_HEAD &&
+ git branch pre-split &&
+ check_equal ''"$(last_commit_message)"'' "Merge commit '"'"'"$(git rev-parse sub2)"'"'"' into mainline"
+'
+
+# 21
+test_expect_success 'Check that prefix argument is required for split' '
+ echo "You must provide the --prefix option." > expected &&
+ test_must_fail git subtree split > actual 2>&1 &&
+ test_debug "echo -n expected: " &&
+ test_debug "cat expected" &&
+ test_debug "echo -n actual: " &&
+ test_debug "cat actual" &&
+ test_cmp expected actual &&
+ rm -f expected actual
+'
+
+# 22
+test_expect_success 'Check that the <prefix> exists for a split' '
+ echo "'"'"'non-existent-directory'"'"'" does not exist\; use "'"'"'git subtree add'"'"'" > expected &&
+ test_must_fail git subtree split --prefix=non-existent-directory > actual 2>&1 &&
+ test_debug "echo -n expected: " &&
+ test_debug "cat expected" &&
+ test_debug "echo -n actual: " &&
+ test_debug "cat actual" &&
+ test_cmp expected actual
+# rm -f expected actual
+'
+
+# 23
+test_expect_success 'check if --message works for split+rejoin' '
+ spl1=''"$(git subtree split --annotate='"'*'"' --prefix subdir --onto FETCH_HEAD --message "Split & rejoin" --rejoin)"'' &&
+ git branch spl1 "$spl1" &&
+ check_equal ''"$(last_commit_message)"'' "Split & rejoin" &&
+ undo
+'
+
+# 24
+test_expect_success 'check split with --branch' '
+ spl1=$(git subtree split --annotate='"'*'"' --prefix subdir --onto FETCH_HEAD --message "Split & rejoin" --rejoin) &&
+ undo &&
+ git subtree split --annotate='"'*'"' --prefix subdir --onto FETCH_HEAD --branch splitbr1 &&
+ check_equal ''"$(git rev-parse splitbr1)"'' "$spl1"
+'
+
+# 25
+test_expect_success 'check split with --branch for an existing branch' '
+ spl1=''"$(git subtree split --annotate='"'*'"' --prefix subdir --onto FETCH_HEAD --message "Split & rejoin" --rejoin)"'' &&
+ undo &&
+ git branch splitbr2 sub1 &&
+ git subtree split --annotate='"'*'"' --prefix subdir --onto FETCH_HEAD --branch splitbr2 &&
+ check_equal ''"$(git rev-parse splitbr2)"'' "$spl1"
+'
+
+# 26
+test_expect_success 'check split with --branch for an incompatible branch' '
+ test_must_fail git subtree split --prefix subdir --onto FETCH_HEAD --branch subdir
+'
+
+
+# 27
+test_expect_success 'check split+rejoin' '
+ spl1=''"$(git subtree split --annotate='"'*'"' --prefix subdir --onto FETCH_HEAD --message "Split & rejoin" --rejoin)"'' &&
+ undo &&
+ git subtree split --annotate='"'*'"' --prefix subdir --onto FETCH_HEAD --rejoin &&
+ check_equal ''"$(last_commit_message)"'' "Split '"'"'subdir/'"'"' into commit '"'"'"$spl1"'"'"'"
+'
+
+# 28
+test_expect_success 'add main-sub8' '
+ create subdir/main-sub8 &&
+ git commit -m "main-sub8"
+'
+
+# To the subproject!
+cd ./subproj
+
+# 29
+test_expect_success 'merge split into subproj' '
+ git fetch .. spl1 &&
+ git branch spl1 FETCH_HEAD &&
+ git merge FETCH_HEAD
+'
+
+# 30
+test_expect_success 'add sub9' '
+ create sub9 &&
+ git commit -m "sub9"
+'
+
+# Back to mainline
+cd ..
+
+# 31
+test_expect_success 'split for sub8' '
+ split2=''"$(git subtree split --annotate='"'*'"' --prefix subdir/ --rejoin)"''
+ git branch split2 "$split2"
+'
+
+# 32
+test_expect_success 'add main-sub10' '
+ create subdir/main-sub10 &&
+ git commit -m "main-sub10"
+'
+
+# 33
+test_expect_success 'split for sub10' '
+ spl3=''"$(git subtree split --annotate='"'*'"' --prefix subdir --rejoin)"'' &&
+ git branch spl3 "$spl3"
+'
+
+# To the subproject!
+cd ./subproj
+
+# 34
+test_expect_success 'merge split into subproj' '
+ git fetch .. spl3 &&
+ git branch spl3 FETCH_HEAD &&
+ git merge FETCH_HEAD &&
+ git branch subproj-merge-spl3
+'
+
+chkm="main4 main6"
+chkms="main-sub10 main-sub5 main-sub7 main-sub8"
+chkms_sub=$(echo $chkms | multiline | sed 's,^,subdir/,' | fixnl)
+chks="sub1 sub2 sub3 sub9"
+chks_sub=$(echo $chks | multiline | sed 's,^,subdir/,' | fixnl)
+
+# 35
+test_expect_success 'make sure exactly the right set of files ends up in the subproj' '
+ subfiles=''"$(git ls-files | fixnl)"'' &&
+ check_equal "$subfiles" "$chkms $chks"
+'
+
+# 36
+test_expect_success 'make sure the subproj history *only* contains commits that affect the subdir' '
+ allchanges=''"$(git log --name-only --pretty=format:'"''"' | sort | fixnl)"'' &&
+ check_equal "$allchanges" "$chkms $chks"
+'
+
+# Back to mainline
+cd ..
+
+# 37
+test_expect_success 'pull from subproj' '
+ git fetch ./subproj subproj-merge-spl3 &&
+ git branch subproj-merge-spl3 FETCH_HEAD &&
+ git subtree pull --prefix=subdir ./subproj subproj-merge-spl3
+'
+
+# 38
+test_expect_success 'make sure exactly the right set of files ends up in the mainline' '
+ mainfiles=''"$(git ls-files | fixnl)"'' &&
+ check_equal "$mainfiles" "$chkm $chkms_sub $chks_sub"
+'
+
+# 39
+test_expect_success 'make sure each filename changed exactly once in the entire history' '
+ # main-sub?? and /subdir/main-sub?? both change, because those are the
+ # changes that were split into their own history. And subdir/sub?? never
+ # change, since they were *only* changed in the subtree branch.
+ allchanges=''"$(git log --name-only --pretty=format:'"''"' | sort | fixnl)"'' &&
+ check_equal "$allchanges" ''"$(echo $chkms $chkm $chks $chkms_sub | multiline | sort | fixnl)"''
+'
+
+# 40
+test_expect_success 'make sure the --rejoin commits never make it into subproj' '
+ check_equal ''"$(git log --pretty=format:'"'%s'"' HEAD^2 | grep -i split)"'' ""
+'
+
+# 41
+test_expect_success 'make sure no "git subtree" tagged commits make it into subproj' '
+ # They are meaningless to subproj since one side of the merge refers to the mainline
+ check_equal ''"$(git log --pretty=format:'"'%s%n%b'"' HEAD^2 | grep "git-subtree.*:")"'' ""
+'
+
+# prepare second pair of repositories
+mkdir test2
+cd test2
+
+# 42
+test_expect_success 'init main' '
+ test_create_repo main
+'
+
+cd main
+
+# 43
+test_expect_success 'add main1' '
+ create main1 &&
+ git commit -m "main1"
+'
+
+cd ..
+
+# 44
+test_expect_success 'init sub' '
+ test_create_repo sub
+'
+
+cd sub
+
+# 45
+test_expect_success 'add sub2' '
+ create sub2 &&
+ git commit -m "sub2"
+'
+
+cd ../main
+
+# check if split can find proper base without --onto
+
+# 46
+test_expect_success 'add sub as subdir in main' '
+ git fetch ../sub master &&
+ git branch sub2 FETCH_HEAD &&
+ git subtree add --prefix subdir sub2
+'
+
+cd ../sub
+
+# 47
+test_expect_success 'add sub3' '
+ create sub3 &&
+ git commit -m "sub3"
+'
+
+cd ../main
+
+# 48
+test_expect_success 'merge from sub' '
+ git fetch ../sub master &&
+ git branch sub3 FETCH_HEAD &&
+ git subtree merge --prefix subdir sub3
+'
+
+# 49
+test_expect_success 'add main-sub4' '
+ create subdir/main-sub4 &&
+ git commit -m "main-sub4"
+'
+
+# 50
+test_expect_success 'split for main-sub4 without --onto' '
+ git subtree split --prefix subdir --branch mainsub4
+'
+
+# at this point, the new commit parent should be sub3 if it is not,
+# something went wrong (the "newparent" of "master~" commit should
+# have been sub3, but it was not, because its cache was not set to
+# itself)
+
+# 51
+test_expect_success 'check that the commit parent is sub3' '
+ check_equal ''"$(git log --pretty=format:%P -1 mainsub4)"'' ''"$(git rev-parse sub3)"''
+'
+
+# 52
+test_expect_success 'add main-sub5' '
+ mkdir subdir2 &&
+ create subdir2/main-sub5 &&
+ git commit -m "main-sub5"
+'
+
+# 53
+test_expect_success 'split for main-sub5 without --onto' '
+ # also test that we still can split out an entirely new subtree
+ # if the parent of the first commit in the tree is not empty,
+ # then the new subtree has accidently been attached to something
+ git subtree split --prefix subdir2 --branch mainsub5 &&
+ check_equal ''"$(git log --pretty=format:%P -1 mainsub5)"'' ""
+'
+
+# make sure no patch changes more than one file. The original set of commits
+# changed only one file each. A multi-file change would imply that we pruned
+# commits too aggressively.
+joincommits()
+{
+ commit=
+ all=
+ while read x y; do
+ #echo "{$x}" >&2
+ if [ -z "$x" ]; then
+ continue
+ elif [ "$x" = "commit:" ]; then
+ if [ -n "$commit" ]; then
+ echo "$commit $all"
+ all=
+ fi
+ commit="$y"
+ else
+ all="$all $y"
+ fi
+ done
+ echo "$commit $all"
+}
+
+# 54
+test_expect_success 'verify one file change per commit' '
+ x= &&
+ list=''"$(git log --pretty=format:'"'commit: %H'"' | joincommits)"'' &&
+# test_debug "echo HERE" &&
+# test_debug "echo ''"$list"''" &&
+ (git log --pretty=format:'"'commit: %H'"' | joincommits |
+ ( while read commit a b; do
+ test_debug "echo Verifying commit "''"$commit"''
+ test_debug "echo a: "''"$a"''
+ test_debug "echo b: "''"$b"''
+ check_equal "$b" ""
+ x=1
+ done
+ check_equal "$x" 1
+ ))
+'
+
+test_done
--- /dev/null
+
+ delete tempdir
+
+ 'git subtree rejoin' option to do the same as --rejoin, eg. after a
+ rebase
+
+ --prefix doesn't force the subtree correctly in merge/pull:
+ "-s subtree" should be given an explicit subtree option?
+ There doesn't seem to be a way to do this. We'd have to
+ patch git-merge-subtree. Ugh.
+ (but we could avoid this problem by generating squashes with
+ exactly the right subtree structure, rather than using
+ subtree merge...)
+
+ add a 'push' subcommand to parallel 'pull'
+
+ add a 'log' subcommand to see what's new in a subtree?
+
+ add to-submodule and from-submodule commands
+
+ automated tests for --squash stuff
+
+ "add" command non-obviously requires a commitid; would be easier if
+ it had a "pull" sort of mode instead
+
+ "pull" and "merge" commands should fail if you've never merged
+ that --prefix before
+
+ docs should provide an example of "add"
+
+ note that the initial split doesn't *have* to have a commitid
+ specified... that's just an optimization
+
+ if you try to add (or maybe merge?) with an invalid commitid, you
+ get a misleading "prefix must end with /" message from
+ one of the other git tools that git-subtree calls. Should
+ detect this situation and print the *real* problem.
+
+ "pull --squash" should do fetch-synthesize-merge, but instead just
+ does "pull" directly, which doesn't work at all.
+
+ make a 'force-update' that does what 'add' does even if the subtree
+ already exists. That way we can help people who imported
+ subtrees "incorrectly" (eg. by just copying in the files) in
+ the past.
+
+ guess --prefix automatically if possible based on pwd
+
+ make a 'git subtree grafts' that automatically expands --squash'd
+ commits so you can see the full history if you want it.
return offset * eastwest;
}
-const char *show_date_relative(unsigned long time, int tz,
+void show_date_relative(unsigned long time, int tz,
const struct timeval *now,
- char *timebuf,
- size_t timebuf_size)
+ struct strbuf *timebuf)
{
unsigned long diff;
- if (now->tv_sec < time)
- return "in the future";
+ if (now->tv_sec < time) {
+ strbuf_addstr(timebuf, _("in the future"));
+ return;
+ }
diff = now->tv_sec - time;
if (diff < 90) {
- snprintf(timebuf, timebuf_size, "%lu seconds ago", diff);
- return timebuf;
+ strbuf_addf(timebuf,
+ Q_("%lu second ago", "%lu seconds ago", diff), diff);
+ return;
}
/* Turn it into minutes */
diff = (diff + 30) / 60;
if (diff < 90) {
- snprintf(timebuf, timebuf_size, "%lu minutes ago", diff);
- return timebuf;
+ strbuf_addf(timebuf,
+ Q_("%lu minute ago", "%lu minutes ago", diff), diff);
+ return;
}
/* Turn it into hours */
diff = (diff + 30) / 60;
if (diff < 36) {
- snprintf(timebuf, timebuf_size, "%lu hours ago", diff);
- return timebuf;
+ strbuf_addf(timebuf,
+ Q_("%lu hour ago", "%lu hours ago", diff), diff);
+ return;
}
/* We deal with number of days from here on */
diff = (diff + 12) / 24;
if (diff < 14) {
- snprintf(timebuf, timebuf_size, "%lu days ago", diff);
- return timebuf;
+ strbuf_addf(timebuf,
+ Q_("%lu day ago", "%lu days ago", diff), diff);
+ return;
}
/* Say weeks for the past 10 weeks or so */
if (diff < 70) {
- snprintf(timebuf, timebuf_size, "%lu weeks ago", (diff + 3) / 7);
- return timebuf;
+ strbuf_addf(timebuf,
+ Q_("%lu week ago", "%lu weeks ago", (diff + 3) / 7),
+ (diff + 3) / 7);
+ return;
}
/* Say months for the past 12 months or so */
if (diff < 365) {
- snprintf(timebuf, timebuf_size, "%lu months ago", (diff + 15) / 30);
- return timebuf;
+ strbuf_addf(timebuf,
+ Q_("%lu month ago", "%lu months ago", (diff + 15) / 30),
+ (diff + 15) / 30);
+ return;
}
/* Give years and months for 5 years or so */
if (diff < 1825) {
unsigned long totalmonths = (diff * 12 * 2 + 365) / (365 * 2);
unsigned long years = totalmonths / 12;
unsigned long months = totalmonths % 12;
- int n;
- n = snprintf(timebuf, timebuf_size, "%lu year%s",
- years, (years > 1 ? "s" : ""));
- if (months)
- snprintf(timebuf + n, timebuf_size - n,
- ", %lu month%s ago",
- months, (months > 1 ? "s" : ""));
- else
- snprintf(timebuf + n, timebuf_size - n, " ago");
- return timebuf;
+ if (months) {
+ struct strbuf sb = STRBUF_INIT;
+ strbuf_addf(&sb, Q_("%lu year", "%lu years", years), years);
+ /* TRANSLATORS: "%s" is "<n> years" */
+ strbuf_addf(timebuf,
+ Q_("%s, %lu month ago", "%s, %lu months ago", months),
+ sb.buf, months);
+ strbuf_release(&sb);
+ } else
+ strbuf_addf(timebuf,
+ Q_("%lu year ago", "%lu years ago", years), years);
+ return;
}
/* Otherwise, just years. Centuries is probably overkill. */
- snprintf(timebuf, timebuf_size, "%lu years ago", (diff + 183) / 365);
- return timebuf;
+ strbuf_addf(timebuf,
+ Q_("%lu year ago", "%lu years ago", (diff + 183) / 365),
+ (diff + 183) / 365);
}
const char *show_date(unsigned long time, int tz, enum date_mode mode)
{
struct tm *tm;
- static char timebuf[200];
+ static struct strbuf timebuf = STRBUF_INIT;
if (mode == DATE_RAW) {
- snprintf(timebuf, sizeof(timebuf), "%lu %+05d", time, tz);
- return timebuf;
+ strbuf_reset(&timebuf);
+ strbuf_addf(&timebuf, "%lu %+05d", time, tz);
+ return timebuf.buf;
}
if (mode == DATE_RELATIVE) {
struct timeval now;
+
+ strbuf_reset(&timebuf);
gettimeofday(&now, NULL);
- return show_date_relative(time, tz, &now,
- timebuf, sizeof(timebuf));
+ show_date_relative(time, tz, &now, &timebuf);
+ return timebuf.buf;
}
if (mode == DATE_LOCAL)
tm = time_to_tm(time, tz);
if (!tm)
return NULL;
+
+ strbuf_reset(&timebuf);
if (mode == DATE_SHORT)
- sprintf(timebuf, "%04d-%02d-%02d", tm->tm_year + 1900,
+ strbuf_addf(&timebuf, "%04d-%02d-%02d", tm->tm_year + 1900,
tm->tm_mon + 1, tm->tm_mday);
else if (mode == DATE_ISO8601)
- sprintf(timebuf, "%04d-%02d-%02d %02d:%02d:%02d %+05d",
+ strbuf_addf(&timebuf, "%04d-%02d-%02d %02d:%02d:%02d %+05d",
tm->tm_year + 1900,
tm->tm_mon + 1,
tm->tm_mday,
tm->tm_hour, tm->tm_min, tm->tm_sec,
tz);
else if (mode == DATE_RFC2822)
- sprintf(timebuf, "%.3s, %d %.3s %d %02d:%02d:%02d %+05d",
+ strbuf_addf(&timebuf, "%.3s, %d %.3s %d %02d:%02d:%02d %+05d",
weekday_names[tm->tm_wday], tm->tm_mday,
month_names[tm->tm_mon], tm->tm_year + 1900,
tm->tm_hour, tm->tm_min, tm->tm_sec, tz);
else
- sprintf(timebuf, "%.3s %.3s %d %02d:%02d:%02d %d%c%+05d",
+ strbuf_addf(&timebuf, "%.3s %.3s %d %02d:%02d:%02d %d%c%+05d",
weekday_names[tm->tm_wday],
month_names[tm->tm_mon],
tm->tm_mday,
tm->tm_year + 1900,
(mode == DATE_LOCAL) ? 0 : ' ',
tz);
- return timebuf;
+ return timebuf.buf;
}
/*
{
int i, len, add, del, adds = 0, dels = 0;
uintmax_t max_change = 0, max_len = 0;
- int total_files = data->nr;
- int width, name_width, graph_width, number_width = 4, count;
+ int total_files = data->nr, count;
+ int width, name_width, graph_width, number_width = 0, bin_width = 0;
const char *reset, *add_c, *del_c;
const char *line_prefix = "";
int extra_shown = 0;
if (max_len < len)
max_len = len;
- if (file->is_binary || file->is_unmerged)
+ if (file->is_unmerged) {
+ /* "Unmerged" is 8 characters */
+ bin_width = bin_width < 8 ? 8 : bin_width;
continue;
+ }
+ if (file->is_binary) {
+ /* "Bin XXX -> YYY bytes" */
+ int w = 14 + decimal_width(file->added)
+ + decimal_width(file->deleted);
+ bin_width = bin_width < w ? w : bin_width;
+ /* Display change counts aligned with "Bin" */
+ number_width = 3;
+ continue;
+ }
+
if (max_change < change)
max_change = change;
}
* stat_name_width fixes the maximum width of the filename,
* and is also used to divide available columns if there
* aren't enough.
+ *
+ * Binary files are displayed with "Bin XXX -> YYY bytes"
+ * instead of the change count and graph. This part is treated
+ * similarly to the graph part, except that it is not
+ * "scaled". If total width is too small to accomodate the
+ * guaranteed minimum width of the filename part and the
+ * separators and this message, this message will "overflow"
+ * making the line longer than the maximum width.
*/
if (options->stat_width == -1)
- width = term_columns();
+ width = term_columns() - options->output_prefix_length;
else
width = options->stat_width ? options->stat_width : 80;
+ number_width = decimal_width(max_change) > number_width ?
+ decimal_width(max_change) : number_width;
if (options->stat_graph_width == -1)
options->stat_graph_width = diff_stat_graph_width;
/*
* First assign sizes that are wanted, ignoring available width.
+ * strlen("Bin XXX -> YYY bytes") == bin_width, and the part
+ * starting from "XXX" should fit in graph_width.
*/
- graph_width = (options->stat_graph_width &&
- options->stat_graph_width < max_change) ?
- options->stat_graph_width : max_change;
+ graph_width = max_change + 4 > bin_width ? max_change : bin_width - 4;
+ if (options->stat_graph_width &&
+ options->stat_graph_width < graph_width)
+ graph_width = options->stat_graph_width;
+
name_width = (options->stat_name_width > 0 &&
options->stat_name_width < max_len) ?
options->stat_name_width : max_len;
* Adjust adjustable widths not to exceed maximum width
*/
if (name_width + number_width + 6 + graph_width > width) {
- if (graph_width > width * 3/8 - number_width - 6)
+ if (graph_width > width * 3/8 - number_width - 6) {
graph_width = width * 3/8 - number_width - 6;
+ if (graph_width < 6)
+ graph_width = 6;
+ }
+
if (options->stat_graph_width &&
graph_width > options->stat_graph_width)
graph_width = options->stat_graph_width;
if (data->files[i]->is_binary) {
fprintf(options->file, "%s", line_prefix);
show_name(options->file, prefix, name, len);
- fprintf(options->file, " Bin ");
+ fprintf(options->file, " %*s ", number_width, "Bin");
fprintf(options->file, "%s%"PRIuMAX"%s",
del_c, deleted, reset);
fprintf(options->file, " -> ");
else if (data->files[i]->is_unmerged) {
fprintf(options->file, "%s", line_prefix);
show_name(options->file, prefix, name, len);
- fprintf(options->file, " Unmerged\n");
+ fprintf(options->file, " Unmerged\n");
continue;
}
}
fprintf(options->file, "%s", line_prefix);
show_name(options->file, prefix, name, len);
- fprintf(options->file, "%5"PRIuMAX"%s", added + deleted,
- added + deleted ? " " : "");
+ fprintf(options->file, " %*"PRIuMAX"%s",
+ number_width, added + deleted,
+ added + deleted ? " " : "");
show_graph(options->file, '+', add, add_c, reset);
show_graph(options->file, '-', del, del_c, reset);
fprintf(options->file, "\n");
else if (!strcmp(arg, "--ignore-space-at-eol"))
DIFF_XDL_SET(options, IGNORE_WHITESPACE_AT_EOL);
else if (!strcmp(arg, "--patience"))
- DIFF_XDL_SET(options, PATIENCE_DIFF);
+ options->xdl_opts = DIFF_WITH_ALG(options, PATIENCE_DIFF);
else if (!strcmp(arg, "--histogram"))
- DIFF_XDL_SET(options, HISTOGRAM_DIFF);
+ options->xdl_opts = DIFF_WITH_ALG(options, HISTOGRAM_DIFF);
/* flags options */
else if (!strcmp(arg, "--binary")) {
#define DIFF_XDL_SET(opts, flag) ((opts)->xdl_opts |= XDF_##flag)
#define DIFF_XDL_CLR(opts, flag) ((opts)->xdl_opts &= ~XDF_##flag)
+#define DIFF_WITH_ALG(opts, flag) (((opts)->xdl_opts & ~XDF_DIFF_ALGORITHM_MASK) | XDF_##flag)
+
enum diff_words_type {
DIFF_WORDS_NONE = 0,
DIFF_WORDS_PORCELAIN,
diff_format_fn_t format_callback;
void *format_callback_data;
diff_prefix_fn_t output_prefix;
+ int output_prefix_length;
void *output_prefix_data;
};
const struct checkout *state, int to_tempfile,
int *fstat_done, struct stat *statbuf)
{
- struct git_istream *st;
- enum object_type type;
- unsigned long sz;
int result = -1;
- ssize_t kept = 0;
- int fd = -1;
-
- st = open_istream(ce->sha1, &type, &sz, filter);
- if (!st)
- return -1;
- if (type != OBJ_BLOB)
- goto close_and_exit;
+ int fd;
fd = open_output_fd(path, ce, to_tempfile);
- if (fd < 0)
- goto close_and_exit;
-
- for (;;) {
- char buf[1024 * 16];
- ssize_t wrote, holeto;
- ssize_t readlen = read_istream(st, buf, sizeof(buf));
-
- if (!readlen)
- break;
- if (sizeof(buf) == readlen) {
- for (holeto = 0; holeto < readlen; holeto++)
- if (buf[holeto])
- break;
- if (readlen == holeto) {
- kept += holeto;
- continue;
- }
- }
-
- if (kept && lseek(fd, kept, SEEK_CUR) == (off_t) -1)
- goto close_and_exit;
- else
- kept = 0;
- wrote = write_in_full(fd, buf, readlen);
-
- if (wrote != readlen)
- goto close_and_exit;
- }
- if (kept && (lseek(fd, kept - 1, SEEK_CUR) == (off_t) -1 ||
- write(fd, "", 1) != 1))
- goto close_and_exit;
- *fstat_done = fstat_output(fd, state, statbuf);
-
-close_and_exit:
- close_istream(st);
- if (0 <= fd)
+ if (0 <= fd) {
+ result = stream_blob_to_fd(fd, ce->sha1, filter, 1);
+ *fstat_done = fstat_output(fd, state, statbuf);
result = close(fd);
+ }
if (result && 0 <= fd)
unlink(path);
return result;
unsigned whitespace_rule_cfg = WS_DEFAULT_RULE;
enum branch_track git_branch_track = BRANCH_TRACK_REMOTE;
enum rebase_setup_type autorebase = AUTOREBASE_NEVER;
-enum push_default_type push_default = PUSH_DEFAULT_MATCHING;
+enum push_default_type push_default = PUSH_DEFAULT_UNSPECIFIED;
#ifndef OBJECT_CREATION_MODE
#define OBJECT_CREATION_MODE OBJECT_CREATION_USES_HARDLINKS
#endif
else if (S_ISREG(entry.mode) || S_ISLNK(entry.mode))
result = walk(&lookup_blob(entry.sha1)->object, OBJ_BLOB, data);
else {
- result = error("in tree %s: entry %s has bad mode %.6o\n",
+ result = error("in tree %s: entry %s has bad mode %.6o",
sha1_to_hex(tree->object.sha1), entry.path, entry.mode);
}
if (result < 0)
/^NAME/,/git-'"$cmd"'/H
${
x
- s/.*git-'"$cmd"' - \(.*\)/ {"'"$cmd"'", "\1"},/
+ s/.*git-'"$cmd"' - \(.*\)/ {"'"$cmd"'", N_("\1")},/
p
}' "Documentation/git-$cmd.txt"
done
ignore-whitespace pass it through git-apply
directory= pass it through git-apply
exclude= pass it through git-apply
+include= pass it through git-apply
C= pass it through git-apply
p= pass it through git-apply
patch-format= format the patch(es) are in
say Using index info to reconstruct a base tree...
cmd='GIT_INDEX_FILE="$dotest/patch-merge-tmp-index"'
+
+ if test -z "$GIT_QUIET"
+ then
+ eval "$cmd git diff-index --cached --diff-filter=AM --name-status HEAD"
+ fi
+
cmd="$cmd git apply --cached $git_apply_opt"' <"$dotest/patch"'
if eval "$cmd"
then
;;
--resolvemsg)
shift; resolvemsg=$1 ;;
- --whitespace|--directory|--exclude)
+ --whitespace|--directory|--exclude|--include)
git_apply_opt="$git_apply_opt $(sq "$1=$2")"; shift ;;
-C|-p)
git_apply_opt="$git_apply_opt $(sq "$1$2")"; shift ;;
--- /dev/null
+#!/usr/bin/env python
+#
+# git-p4.py -- A tool for bidirectional operation between a Perforce depot and git.
+#
+# Author: Simon Hausmann <simon@lst.de>
+# Copyright: 2007 Simon Hausmann <simon@lst.de>
+# 2007 Trolltech ASA
+# License: MIT <http://www.opensource.org/licenses/mit-license.php>
+#
+
+import optparse, sys, os, marshal, subprocess, shelve
+import tempfile, getopt, os.path, time, platform
+import re, shutil
+
+verbose = False
+
+# Only labels/tags matching this will be imported/exported
+defaultLabelRegexp = r'[a-zA-Z0-9_\-.]+$'
+
+def p4_build_cmd(cmd):
+ """Build a suitable p4 command line.
+
+ This consolidates building and returning a p4 command line into one
+ location. It means that hooking into the environment, or other configuration
+ can be done more easily.
+ """
+ real_cmd = ["p4"]
+
+ user = gitConfig("git-p4.user")
+ if len(user) > 0:
+ real_cmd += ["-u",user]
+
+ password = gitConfig("git-p4.password")
+ if len(password) > 0:
+ real_cmd += ["-P", password]
+
+ port = gitConfig("git-p4.port")
+ if len(port) > 0:
+ real_cmd += ["-p", port]
+
+ host = gitConfig("git-p4.host")
+ if len(host) > 0:
+ real_cmd += ["-H", host]
+
+ client = gitConfig("git-p4.client")
+ if len(client) > 0:
+ real_cmd += ["-c", client]
+
+
+ if isinstance(cmd,basestring):
+ real_cmd = ' '.join(real_cmd) + ' ' + cmd
+ else:
+ real_cmd += cmd
+ return real_cmd
+
+def chdir(dir):
+ # P4 uses the PWD environment variable rather than getcwd(). Since we're
+ # not using the shell, we have to set it ourselves. This path could
+ # be relative, so go there first, then figure out where we ended up.
+ os.chdir(dir)
+ os.environ['PWD'] = os.getcwd()
+
+def die(msg):
+ if verbose:
+ raise Exception(msg)
+ else:
+ sys.stderr.write(msg + "\n")
+ sys.exit(1)
+
+def write_pipe(c, stdin):
+ if verbose:
+ sys.stderr.write('Writing pipe: %s\n' % str(c))
+
+ expand = isinstance(c,basestring)
+ p = subprocess.Popen(c, stdin=subprocess.PIPE, shell=expand)
+ pipe = p.stdin
+ val = pipe.write(stdin)
+ pipe.close()
+ if p.wait():
+ die('Command failed: %s' % str(c))
+
+ return val
+
+def p4_write_pipe(c, stdin):
+ real_cmd = p4_build_cmd(c)
+ return write_pipe(real_cmd, stdin)
+
+def read_pipe(c, ignore_error=False):
+ if verbose:
+ sys.stderr.write('Reading pipe: %s\n' % str(c))
+
+ expand = isinstance(c,basestring)
+ p = subprocess.Popen(c, stdout=subprocess.PIPE, shell=expand)
+ pipe = p.stdout
+ val = pipe.read()
+ if p.wait() and not ignore_error:
+ die('Command failed: %s' % str(c))
+
+ return val
+
+def p4_read_pipe(c, ignore_error=False):
+ real_cmd = p4_build_cmd(c)
+ return read_pipe(real_cmd, ignore_error)
+
+def read_pipe_lines(c):
+ if verbose:
+ sys.stderr.write('Reading pipe: %s\n' % str(c))
+
+ expand = isinstance(c, basestring)
+ p = subprocess.Popen(c, stdout=subprocess.PIPE, shell=expand)
+ pipe = p.stdout
+ val = pipe.readlines()
+ if pipe.close() or p.wait():
+ die('Command failed: %s' % str(c))
+
+ return val
+
+def p4_read_pipe_lines(c):
+ """Specifically invoke p4 on the command supplied. """
+ real_cmd = p4_build_cmd(c)
+ return read_pipe_lines(real_cmd)
+
+def system(cmd):
+ expand = isinstance(cmd,basestring)
+ if verbose:
+ sys.stderr.write("executing %s\n" % str(cmd))
+ subprocess.check_call(cmd, shell=expand)
+
+def p4_system(cmd):
+ """Specifically invoke p4 as the system command. """
+ real_cmd = p4_build_cmd(cmd)
+ expand = isinstance(real_cmd, basestring)
+ subprocess.check_call(real_cmd, shell=expand)
+
+def p4_integrate(src, dest):
+ p4_system(["integrate", "-Dt", wildcard_encode(src), wildcard_encode(dest)])
+
+def p4_sync(f, *options):
+ p4_system(["sync"] + list(options) + [wildcard_encode(f)])
+
+def p4_add(f):
+ # forcibly add file names with wildcards
+ if wildcard_present(f):
+ p4_system(["add", "-f", f])
+ else:
+ p4_system(["add", f])
+
+def p4_delete(f):
+ p4_system(["delete", wildcard_encode(f)])
+
+def p4_edit(f):
+ p4_system(["edit", wildcard_encode(f)])
+
+def p4_revert(f):
+ p4_system(["revert", wildcard_encode(f)])
+
+def p4_reopen(type, f):
+ p4_system(["reopen", "-t", type, wildcard_encode(f)])
+
+#
+# Canonicalize the p4 type and return a tuple of the
+# base type, plus any modifiers. See "p4 help filetypes"
+# for a list and explanation.
+#
+def split_p4_type(p4type):
+
+ p4_filetypes_historical = {
+ "ctempobj": "binary+Sw",
+ "ctext": "text+C",
+ "cxtext": "text+Cx",
+ "ktext": "text+k",
+ "kxtext": "text+kx",
+ "ltext": "text+F",
+ "tempobj": "binary+FSw",
+ "ubinary": "binary+F",
+ "uresource": "resource+F",
+ "uxbinary": "binary+Fx",
+ "xbinary": "binary+x",
+ "xltext": "text+Fx",
+ "xtempobj": "binary+Swx",
+ "xtext": "text+x",
+ "xunicode": "unicode+x",
+ "xutf16": "utf16+x",
+ }
+ if p4type in p4_filetypes_historical:
+ p4type = p4_filetypes_historical[p4type]
+ mods = ""
+ s = p4type.split("+")
+ base = s[0]
+ mods = ""
+ if len(s) > 1:
+ mods = s[1]
+ return (base, mods)
+
+#
+# return the raw p4 type of a file (text, text+ko, etc)
+#
+def p4_type(file):
+ results = p4CmdList(["fstat", "-T", "headType", file])
+ return results[0]['headType']
+
+#
+# Given a type base and modifier, return a regexp matching
+# the keywords that can be expanded in the file
+#
+def p4_keywords_regexp_for_type(base, type_mods):
+ if base in ("text", "unicode", "binary"):
+ kwords = None
+ if "ko" in type_mods:
+ kwords = 'Id|Header'
+ elif "k" in type_mods:
+ kwords = 'Id|Header|Author|Date|DateTime|Change|File|Revision'
+ else:
+ return None
+ pattern = r"""
+ \$ # Starts with a dollar, followed by...
+ (%s) # one of the keywords, followed by...
+ (:[^$]+)? # possibly an old expansion, followed by...
+ \$ # another dollar
+ """ % kwords
+ return pattern
+ else:
+ return None
+
+#
+# Given a file, return a regexp matching the possible
+# RCS keywords that will be expanded, or None for files
+# with kw expansion turned off.
+#
+def p4_keywords_regexp_for_file(file):
+ if not os.path.exists(file):
+ return None
+ else:
+ (type_base, type_mods) = split_p4_type(p4_type(file))
+ return p4_keywords_regexp_for_type(type_base, type_mods)
+
+def setP4ExecBit(file, mode):
+ # Reopens an already open file and changes the execute bit to match
+ # the execute bit setting in the passed in mode.
+
+ p4Type = "+x"
+
+ if not isModeExec(mode):
+ p4Type = getP4OpenedType(file)
+ p4Type = re.sub('^([cku]?)x(.*)', '\\1\\2', p4Type)
+ p4Type = re.sub('(.*?\+.*?)x(.*?)', '\\1\\2', p4Type)
+ if p4Type[-1] == "+":
+ p4Type = p4Type[0:-1]
+
+ p4_reopen(p4Type, file)
+
+def getP4OpenedType(file):
+ # Returns the perforce file type for the given file.
+
+ result = p4_read_pipe(["opened", wildcard_encode(file)])
+ match = re.match(".*\((.+)\)\r?$", result)
+ if match:
+ return match.group(1)
+ else:
+ die("Could not determine file type for %s (result: '%s')" % (file, result))
+
+# Return the set of all p4 labels
+def getP4Labels(depotPaths):
+ labels = set()
+ if isinstance(depotPaths,basestring):
+ depotPaths = [depotPaths]
+
+ for l in p4CmdList(["labels"] + ["%s..." % p for p in depotPaths]):
+ label = l['label']
+ labels.add(label)
+
+ return labels
+
+# Return the set of all git tags
+def getGitTags():
+ gitTags = set()
+ for line in read_pipe_lines(["git", "tag"]):
+ tag = line.strip()
+ gitTags.add(tag)
+ return gitTags
+
+def diffTreePattern():
+ # This is a simple generator for the diff tree regex pattern. This could be
+ # a class variable if this and parseDiffTreeEntry were a part of a class.
+ pattern = re.compile(':(\d+) (\d+) (\w+) (\w+) ([A-Z])(\d+)?\t(.*?)((\t(.*))|$)')
+ while True:
+ yield pattern
+
+def parseDiffTreeEntry(entry):
+ """Parses a single diff tree entry into its component elements.
+
+ See git-diff-tree(1) manpage for details about the format of the diff
+ output. This method returns a dictionary with the following elements:
+
+ src_mode - The mode of the source file
+ dst_mode - The mode of the destination file
+ src_sha1 - The sha1 for the source file
+ dst_sha1 - The sha1 fr the destination file
+ status - The one letter status of the diff (i.e. 'A', 'M', 'D', etc)
+ status_score - The score for the status (applicable for 'C' and 'R'
+ statuses). This is None if there is no score.
+ src - The path for the source file.
+ dst - The path for the destination file. This is only present for
+ copy or renames. If it is not present, this is None.
+
+ If the pattern is not matched, None is returned."""
+
+ match = diffTreePattern().next().match(entry)
+ if match:
+ return {
+ 'src_mode': match.group(1),
+ 'dst_mode': match.group(2),
+ 'src_sha1': match.group(3),
+ 'dst_sha1': match.group(4),
+ 'status': match.group(5),
+ 'status_score': match.group(6),
+ 'src': match.group(7),
+ 'dst': match.group(10)
+ }
+ return None
+
+def isModeExec(mode):
+ # Returns True if the given git mode represents an executable file,
+ # otherwise False.
+ return mode[-3:] == "755"
+
+def isModeExecChanged(src_mode, dst_mode):
+ return isModeExec(src_mode) != isModeExec(dst_mode)
+
+def p4CmdList(cmd, stdin=None, stdin_mode='w+b', cb=None):
+
+ if isinstance(cmd,basestring):
+ cmd = "-G " + cmd
+ expand = True
+ else:
+ cmd = ["-G"] + cmd
+ expand = False
+
+ cmd = p4_build_cmd(cmd)
+ if verbose:
+ sys.stderr.write("Opening pipe: %s\n" % str(cmd))
+
+ # Use a temporary file to avoid deadlocks without
+ # subprocess.communicate(), which would put another copy
+ # of stdout into memory.
+ stdin_file = None
+ if stdin is not None:
+ stdin_file = tempfile.TemporaryFile(prefix='p4-stdin', mode=stdin_mode)
+ if isinstance(stdin,basestring):
+ stdin_file.write(stdin)
+ else:
+ for i in stdin:
+ stdin_file.write(i + '\n')
+ stdin_file.flush()
+ stdin_file.seek(0)
+
+ p4 = subprocess.Popen(cmd,
+ shell=expand,
+ stdin=stdin_file,
+ stdout=subprocess.PIPE)
+
+ result = []
+ try:
+ while True:
+ entry = marshal.load(p4.stdout)
+ if cb is not None:
+ cb(entry)
+ else:
+ result.append(entry)
+ except EOFError:
+ pass
+ exitCode = p4.wait()
+ if exitCode != 0:
+ entry = {}
+ entry["p4ExitCode"] = exitCode
+ result.append(entry)
+
+ return result
+
+def p4Cmd(cmd):
+ list = p4CmdList(cmd)
+ result = {}
+ for entry in list:
+ result.update(entry)
+ return result;
+
+def p4Where(depotPath):
+ if not depotPath.endswith("/"):
+ depotPath += "/"
+ depotPath = depotPath + "..."
+ outputList = p4CmdList(["where", depotPath])
+ output = None
+ for entry in outputList:
+ if "depotFile" in entry:
+ if entry["depotFile"] == depotPath:
+ output = entry
+ break
+ elif "data" in entry:
+ data = entry.get("data")
+ space = data.find(" ")
+ if data[:space] == depotPath:
+ output = entry
+ break
+ if output == None:
+ return ""
+ if output["code"] == "error":
+ return ""
+ clientPath = ""
+ if "path" in output:
+ clientPath = output.get("path")
+ elif "data" in output:
+ data = output.get("data")
+ lastSpace = data.rfind(" ")
+ clientPath = data[lastSpace + 1:]
+
+ if clientPath.endswith("..."):
+ clientPath = clientPath[:-3]
+ return clientPath
+
+def currentGitBranch():
+ return read_pipe("git name-rev HEAD").split(" ")[1].strip()
+
+def isValidGitDir(path):
+ if (os.path.exists(path + "/HEAD")
+ and os.path.exists(path + "/refs") and os.path.exists(path + "/objects")):
+ return True;
+ return False
+
+def parseRevision(ref):
+ return read_pipe("git rev-parse %s" % ref).strip()
+
+def branchExists(ref):
+ rev = read_pipe(["git", "rev-parse", "-q", "--verify", ref],
+ ignore_error=True)
+ return len(rev) > 0
+
+def extractLogMessageFromGitCommit(commit):
+ logMessage = ""
+
+ ## fixme: title is first line of commit, not 1st paragraph.
+ foundTitle = False
+ for log in read_pipe_lines("git cat-file commit %s" % commit):
+ if not foundTitle:
+ if len(log) == 1:
+ foundTitle = True
+ continue
+
+ logMessage += log
+ return logMessage
+
+def extractSettingsGitLog(log):
+ values = {}
+ for line in log.split("\n"):
+ line = line.strip()
+ m = re.search (r"^ *\[git-p4: (.*)\]$", line)
+ if not m:
+ continue
+
+ assignments = m.group(1).split (':')
+ for a in assignments:
+ vals = a.split ('=')
+ key = vals[0].strip()
+ val = ('='.join (vals[1:])).strip()
+ if val.endswith ('\"') and val.startswith('"'):
+ val = val[1:-1]
+
+ values[key] = val
+
+ paths = values.get("depot-paths")
+ if not paths:
+ paths = values.get("depot-path")
+ if paths:
+ values['depot-paths'] = paths.split(',')
+ return values
+
+def gitBranchExists(branch):
+ proc = subprocess.Popen(["git", "rev-parse", branch],
+ stderr=subprocess.PIPE, stdout=subprocess.PIPE);
+ return proc.wait() == 0;
+
+_gitConfig = {}
+def gitConfig(key, args = None): # set args to "--bool", for instance
+ if not _gitConfig.has_key(key):
+ argsFilter = ""
+ if args != None:
+ argsFilter = "%s " % args
+ cmd = "git config %s%s" % (argsFilter, key)
+ _gitConfig[key] = read_pipe(cmd, ignore_error=True).strip()
+ return _gitConfig[key]
+
+def gitConfigList(key):
+ if not _gitConfig.has_key(key):
+ _gitConfig[key] = read_pipe("git config --get-all %s" % key, ignore_error=True).strip().split(os.linesep)
+ return _gitConfig[key]
+
+def p4BranchesInGit(branchesAreInRemotes = True):
+ branches = {}
+
+ cmdline = "git rev-parse --symbolic "
+ if branchesAreInRemotes:
+ cmdline += " --remotes"
+ else:
+ cmdline += " --branches"
+
+ for line in read_pipe_lines(cmdline):
+ line = line.strip()
+
+ ## only import to p4/
+ if not line.startswith('p4/') or line == "p4/HEAD":
+ continue
+ branch = line
+
+ # strip off p4
+ branch = re.sub ("^p4/", "", line)
+
+ branches[branch] = parseRevision(line)
+ return branches
+
+def findUpstreamBranchPoint(head = "HEAD"):
+ branches = p4BranchesInGit()
+ # map from depot-path to branch name
+ branchByDepotPath = {}
+ for branch in branches.keys():
+ tip = branches[branch]
+ log = extractLogMessageFromGitCommit(tip)
+ settings = extractSettingsGitLog(log)
+ if settings.has_key("depot-paths"):
+ paths = ",".join(settings["depot-paths"])
+ branchByDepotPath[paths] = "remotes/p4/" + branch
+
+ settings = None
+ parent = 0
+ while parent < 65535:
+ commit = head + "~%s" % parent
+ log = extractLogMessageFromGitCommit(commit)
+ settings = extractSettingsGitLog(log)
+ if settings.has_key("depot-paths"):
+ paths = ",".join(settings["depot-paths"])
+ if branchByDepotPath.has_key(paths):
+ return [branchByDepotPath[paths], settings]
+
+ parent = parent + 1
+
+ return ["", settings]
+
+def createOrUpdateBranchesFromOrigin(localRefPrefix = "refs/remotes/p4/", silent=True):
+ if not silent:
+ print ("Creating/updating branch(es) in %s based on origin branch(es)"
+ % localRefPrefix)
+
+ originPrefix = "origin/p4/"
+
+ for line in read_pipe_lines("git rev-parse --symbolic --remotes"):
+ line = line.strip()
+ if (not line.startswith(originPrefix)) or line.endswith("HEAD"):
+ continue
+
+ headName = line[len(originPrefix):]
+ remoteHead = localRefPrefix + headName
+ originHead = line
+
+ original = extractSettingsGitLog(extractLogMessageFromGitCommit(originHead))
+ if (not original.has_key('depot-paths')
+ or not original.has_key('change')):
+ continue
+
+ update = False
+ if not gitBranchExists(remoteHead):
+ if verbose:
+ print "creating %s" % remoteHead
+ update = True
+ else:
+ settings = extractSettingsGitLog(extractLogMessageFromGitCommit(remoteHead))
+ if settings.has_key('change') > 0:
+ if settings['depot-paths'] == original['depot-paths']:
+ originP4Change = int(original['change'])
+ p4Change = int(settings['change'])
+ if originP4Change > p4Change:
+ print ("%s (%s) is newer than %s (%s). "
+ "Updating p4 branch from origin."
+ % (originHead, originP4Change,
+ remoteHead, p4Change))
+ update = True
+ else:
+ print ("Ignoring: %s was imported from %s while "
+ "%s was imported from %s"
+ % (originHead, ','.join(original['depot-paths']),
+ remoteHead, ','.join(settings['depot-paths'])))
+
+ if update:
+ system("git update-ref %s %s" % (remoteHead, originHead))
+
+def originP4BranchesExist():
+ return gitBranchExists("origin") or gitBranchExists("origin/p4") or gitBranchExists("origin/p4/master")
+
+def p4ChangesForPaths(depotPaths, changeRange):
+ assert depotPaths
+ cmd = ['changes']
+ for p in depotPaths:
+ cmd += ["%s...%s" % (p, changeRange)]
+ output = p4_read_pipe_lines(cmd)
+
+ changes = {}
+ for line in output:
+ changeNum = int(line.split(" ")[1])
+ changes[changeNum] = True
+
+ changelist = changes.keys()
+ changelist.sort()
+ return changelist
+
+def p4PathStartsWith(path, prefix):
+ # This method tries to remedy a potential mixed-case issue:
+ #
+ # If UserA adds //depot/DirA/file1
+ # and UserB adds //depot/dira/file2
+ #
+ # we may or may not have a problem. If you have core.ignorecase=true,
+ # we treat DirA and dira as the same directory
+ ignorecase = gitConfig("core.ignorecase", "--bool") == "true"
+ if ignorecase:
+ return path.lower().startswith(prefix.lower())
+ return path.startswith(prefix)
+
+def getClientSpec():
+ """Look at the p4 client spec, create a View() object that contains
+ all the mappings, and return it."""
+
+ specList = p4CmdList("client -o")
+ if len(specList) != 1:
+ die('Output from "client -o" is %d lines, expecting 1' %
+ len(specList))
+
+ # dictionary of all client parameters
+ entry = specList[0]
+
+ # just the keys that start with "View"
+ view_keys = [ k for k in entry.keys() if k.startswith("View") ]
+
+ # hold this new View
+ view = View()
+
+ # append the lines, in order, to the view
+ for view_num in range(len(view_keys)):
+ k = "View%d" % view_num
+ if k not in view_keys:
+ die("Expected view key %s missing" % k)
+ view.append(entry[k])
+
+ return view
+
+def getClientRoot():
+ """Grab the client directory."""
+
+ output = p4CmdList("client -o")
+ if len(output) != 1:
+ die('Output from "client -o" is %d lines, expecting 1' % len(output))
+
+ entry = output[0]
+ if "Root" not in entry:
+ die('Client has no "Root"')
+
+ return entry["Root"]
+
+#
+# P4 wildcards are not allowed in filenames. P4 complains
+# if you simply add them, but you can force it with "-f", in
+# which case it translates them into %xx encoding internally.
+#
+def wildcard_decode(path):
+ # Search for and fix just these four characters. Do % last so
+ # that fixing it does not inadvertently create new %-escapes.
+ # Cannot have * in a filename in windows; untested as to
+ # what p4 would do in such a case.
+ if not platform.system() == "Windows":
+ path = path.replace("%2A", "*")
+ path = path.replace("%23", "#") \
+ .replace("%40", "@") \
+ .replace("%25", "%")
+ return path
+
+def wildcard_encode(path):
+ # do % first to avoid double-encoding the %s introduced here
+ path = path.replace("%", "%25") \
+ .replace("*", "%2A") \
+ .replace("#", "%23") \
+ .replace("@", "%40")
+ return path
+
+def wildcard_present(path):
+ return path.translate(None, "*#@%") != path
+
+class Command:
+ def __init__(self):
+ self.usage = "usage: %prog [options]"
+ self.needsGit = True
+ self.verbose = False
+
+class P4UserMap:
+ def __init__(self):
+ self.userMapFromPerforceServer = False
+ self.myP4UserId = None
+
+ def p4UserId(self):
+ if self.myP4UserId:
+ return self.myP4UserId
+
+ results = p4CmdList("user -o")
+ for r in results:
+ if r.has_key('User'):
+ self.myP4UserId = r['User']
+ return r['User']
+ die("Could not find your p4 user id")
+
+ def p4UserIsMe(self, p4User):
+ # return True if the given p4 user is actually me
+ me = self.p4UserId()
+ if not p4User or p4User != me:
+ return False
+ else:
+ return True
+
+ def getUserCacheFilename(self):
+ home = os.environ.get("HOME", os.environ.get("USERPROFILE"))
+ return home + "/.gitp4-usercache.txt"
+
+ def getUserMapFromPerforceServer(self):
+ if self.userMapFromPerforceServer:
+ return
+ self.users = {}
+ self.emails = {}
+
+ for output in p4CmdList("users"):
+ if not output.has_key("User"):
+ continue
+ self.users[output["User"]] = output["FullName"] + " <" + output["Email"] + ">"
+ self.emails[output["Email"]] = output["User"]
+
+
+ s = ''
+ for (key, val) in self.users.items():
+ s += "%s\t%s\n" % (key.expandtabs(1), val.expandtabs(1))
+
+ open(self.getUserCacheFilename(), "wb").write(s)
+ self.userMapFromPerforceServer = True
+
+ def loadUserMapFromCache(self):
+ self.users = {}
+ self.userMapFromPerforceServer = False
+ try:
+ cache = open(self.getUserCacheFilename(), "rb")
+ lines = cache.readlines()
+ cache.close()
+ for line in lines:
+ entry = line.strip().split("\t")
+ self.users[entry[0]] = entry[1]
+ except IOError:
+ self.getUserMapFromPerforceServer()
+
+class P4Debug(Command):
+ def __init__(self):
+ Command.__init__(self)
+ self.options = []
+ self.description = "A tool to debug the output of p4 -G."
+ self.needsGit = False
+
+ def run(self, args):
+ j = 0
+ for output in p4CmdList(args):
+ print 'Element: %d' % j
+ j += 1
+ print output
+ return True
+
+class P4RollBack(Command):
+ def __init__(self):
+ Command.__init__(self)
+ self.options = [
+ optparse.make_option("--local", dest="rollbackLocalBranches", action="store_true")
+ ]
+ self.description = "A tool to debug the multi-branch import. Don't use :)"
+ self.rollbackLocalBranches = False
+
+ def run(self, args):
+ if len(args) != 1:
+ return False
+ maxChange = int(args[0])
+
+ if "p4ExitCode" in p4Cmd("changes -m 1"):
+ die("Problems executing p4");
+
+ if self.rollbackLocalBranches:
+ refPrefix = "refs/heads/"
+ lines = read_pipe_lines("git rev-parse --symbolic --branches")
+ else:
+ refPrefix = "refs/remotes/"
+ lines = read_pipe_lines("git rev-parse --symbolic --remotes")
+
+ for line in lines:
+ if self.rollbackLocalBranches or (line.startswith("p4/") and line != "p4/HEAD\n"):
+ line = line.strip()
+ ref = refPrefix + line
+ log = extractLogMessageFromGitCommit(ref)
+ settings = extractSettingsGitLog(log)
+
+ depotPaths = settings['depot-paths']
+ change = settings['change']
+
+ changed = False
+
+ if len(p4Cmd("changes -m 1 " + ' '.join (['%s...@%s' % (p, maxChange)
+ for p in depotPaths]))) == 0:
+ print "Branch %s did not exist at change %s, deleting." % (ref, maxChange)
+ system("git update-ref -d %s `git rev-parse %s`" % (ref, ref))
+ continue
+
+ while change and int(change) > maxChange:
+ changed = True
+ if self.verbose:
+ print "%s is at %s ; rewinding towards %s" % (ref, change, maxChange)
+ system("git update-ref %s \"%s^\"" % (ref, ref))
+ log = extractLogMessageFromGitCommit(ref)
+ settings = extractSettingsGitLog(log)
+
+
+ depotPaths = settings['depot-paths']
+ change = settings['change']
+
+ if changed:
+ print "%s rewound to %s" % (ref, change)
+
+ return True
+
+class P4Submit(Command, P4UserMap):
+ def __init__(self):
+ Command.__init__(self)
+ P4UserMap.__init__(self)
+ self.options = [
+ optparse.make_option("--origin", dest="origin"),
+ optparse.make_option("-M", dest="detectRenames", action="store_true"),
+ # preserve the user, requires relevant p4 permissions
+ optparse.make_option("--preserve-user", dest="preserveUser", action="store_true"),
+ optparse.make_option("--export-labels", dest="exportLabels", action="store_true"),
+ ]
+ self.description = "Submit changes from git to the perforce depot."
+ self.usage += " [name of git branch to submit into perforce depot]"
+ self.interactive = True
+ self.origin = ""
+ self.detectRenames = False
+ self.preserveUser = gitConfig("git-p4.preserveUser").lower() == "true"
+ self.isWindows = (platform.system() == "Windows")
+ self.exportLabels = False
+
+ def check(self):
+ if len(p4CmdList("opened ...")) > 0:
+ die("You have files opened with perforce! Close them before starting the sync.")
+
+ # replaces everything between 'Description:' and the next P4 submit template field with the
+ # commit message
+ def prepareLogMessage(self, template, message):
+ result = ""
+
+ inDescriptionSection = False
+
+ for line in template.split("\n"):
+ if line.startswith("#"):
+ result += line + "\n"
+ continue
+
+ if inDescriptionSection:
+ if line.startswith("Files:") or line.startswith("Jobs:"):
+ inDescriptionSection = False
+ else:
+ continue
+ else:
+ if line.startswith("Description:"):
+ inDescriptionSection = True
+ line += "\n"
+ for messageLine in message.split("\n"):
+ line += "\t" + messageLine + "\n"
+
+ result += line + "\n"
+
+ return result
+
+ def patchRCSKeywords(self, file, pattern):
+ # Attempt to zap the RCS keywords in a p4 controlled file matching the given pattern
+ (handle, outFileName) = tempfile.mkstemp(dir='.')
+ try:
+ outFile = os.fdopen(handle, "w+")
+ inFile = open(file, "r")
+ regexp = re.compile(pattern, re.VERBOSE)
+ for line in inFile.readlines():
+ line = regexp.sub(r'$\1$', line)
+ outFile.write(line)
+ inFile.close()
+ outFile.close()
+ # Forcibly overwrite the original file
+ os.unlink(file)
+ shutil.move(outFileName, file)
+ except:
+ # cleanup our temporary file
+ os.unlink(outFileName)
+ print "Failed to strip RCS keywords in %s" % file
+ raise
+
+ print "Patched up RCS keywords in %s" % file
+
+ def p4UserForCommit(self,id):
+ # Return the tuple (perforce user,git email) for a given git commit id
+ self.getUserMapFromPerforceServer()
+ gitEmail = read_pipe("git log --max-count=1 --format='%%ae' %s" % id)
+ gitEmail = gitEmail.strip()
+ if not self.emails.has_key(gitEmail):
+ return (None,gitEmail)
+ else:
+ return (self.emails[gitEmail],gitEmail)
+
+ def checkValidP4Users(self,commits):
+ # check if any git authors cannot be mapped to p4 users
+ for id in commits:
+ (user,email) = self.p4UserForCommit(id)
+ if not user:
+ msg = "Cannot find p4 user for email %s in commit %s." % (email, id)
+ if gitConfig('git-p4.allowMissingP4Users').lower() == "true":
+ print "%s" % msg
+ else:
+ die("Error: %s\nSet git-p4.allowMissingP4Users to true to allow this." % msg)
+
+ def lastP4Changelist(self):
+ # Get back the last changelist number submitted in this client spec. This
+ # then gets used to patch up the username in the change. If the same
+ # client spec is being used by multiple processes then this might go
+ # wrong.
+ results = p4CmdList("client -o") # find the current client
+ client = None
+ for r in results:
+ if r.has_key('Client'):
+ client = r['Client']
+ break
+ if not client:
+ die("could not get client spec")
+ results = p4CmdList(["changes", "-c", client, "-m", "1"])
+ for r in results:
+ if r.has_key('change'):
+ return r['change']
+ die("Could not get changelist number for last submit - cannot patch up user details")
+
+ def modifyChangelistUser(self, changelist, newUser):
+ # fixup the user field of a changelist after it has been submitted.
+ changes = p4CmdList("change -o %s" % changelist)
+ if len(changes) != 1:
+ die("Bad output from p4 change modifying %s to user %s" %
+ (changelist, newUser))
+
+ c = changes[0]
+ if c['User'] == newUser: return # nothing to do
+ c['User'] = newUser
+ input = marshal.dumps(c)
+
+ result = p4CmdList("change -f -i", stdin=input)
+ for r in result:
+ if r.has_key('code'):
+ if r['code'] == 'error':
+ die("Could not modify user field of changelist %s to %s:%s" % (changelist, newUser, r['data']))
+ if r.has_key('data'):
+ print("Updated user field for changelist %s to %s" % (changelist, newUser))
+ return
+ die("Could not modify user field of changelist %s to %s" % (changelist, newUser))
+
+ def canChangeChangelists(self):
+ # check to see if we have p4 admin or super-user permissions, either of
+ # which are required to modify changelists.
+ results = p4CmdList(["protects", self.depotPath])
+ for r in results:
+ if r.has_key('perm'):
+ if r['perm'] == 'admin':
+ return 1
+ if r['perm'] == 'super':
+ return 1
+ return 0
+
+ def prepareSubmitTemplate(self):
+ # remove lines in the Files section that show changes to files outside the depot path we're committing into
+ template = ""
+ inFilesSection = False
+ for line in p4_read_pipe_lines(['change', '-o']):
+ if line.endswith("\r\n"):
+ line = line[:-2] + "\n"
+ if inFilesSection:
+ if line.startswith("\t"):
+ # path starts and ends with a tab
+ path = line[1:]
+ lastTab = path.rfind("\t")
+ if lastTab != -1:
+ path = path[:lastTab]
+ if not p4PathStartsWith(path, self.depotPath):
+ continue
+ else:
+ inFilesSection = False
+ else:
+ if line.startswith("Files:"):
+ inFilesSection = True
+
+ template += line
+
+ return template
+
+ def edit_template(self, template_file):
+ """Invoke the editor to let the user change the submission
+ message. Return true if okay to continue with the submit."""
+
+ # if configured to skip the editing part, just submit
+ if gitConfig("git-p4.skipSubmitEdit") == "true":
+ return True
+
+ # look at the modification time, to check later if the user saved
+ # the file
+ mtime = os.stat(template_file).st_mtime
+
+ # invoke the editor
+ if os.environ.has_key("P4EDITOR") and (os.environ.get("P4EDITOR") != ""):
+ editor = os.environ.get("P4EDITOR")
+ else:
+ editor = read_pipe("git var GIT_EDITOR").strip()
+ system(editor + " " + template_file)
+
+ # If the file was not saved, prompt to see if this patch should
+ # be skipped. But skip this verification step if configured so.
+ if gitConfig("git-p4.skipSubmitEditCheck") == "true":
+ return True
+
+ # modification time updated means user saved the file
+ if os.stat(template_file).st_mtime > mtime:
+ return True
+
+ while True:
+ response = raw_input("Submit template unchanged. Submit anyway? [y]es, [n]o (skip this patch) ")
+ if response == 'y':
+ return True
+ if response == 'n':
+ return False
+
+ def applyCommit(self, id):
+ print "Applying %s" % (read_pipe("git log --max-count=1 --pretty=oneline %s" % id))
+
+ (p4User, gitEmail) = self.p4UserForCommit(id)
+
+ if not self.detectRenames:
+ # If not explicitly set check the config variable
+ self.detectRenames = gitConfig("git-p4.detectRenames")
+
+ if self.detectRenames.lower() == "false" or self.detectRenames == "":
+ diffOpts = ""
+ elif self.detectRenames.lower() == "true":
+ diffOpts = "-M"
+ else:
+ diffOpts = "-M%s" % self.detectRenames
+
+ detectCopies = gitConfig("git-p4.detectCopies")
+ if detectCopies.lower() == "true":
+ diffOpts += " -C"
+ elif detectCopies != "" and detectCopies.lower() != "false":
+ diffOpts += " -C%s" % detectCopies
+
+ if gitConfig("git-p4.detectCopiesHarder", "--bool") == "true":
+ diffOpts += " --find-copies-harder"
+
+ diff = read_pipe_lines("git diff-tree -r %s \"%s^\" \"%s\"" % (diffOpts, id, id))
+ filesToAdd = set()
+ filesToDelete = set()
+ editedFiles = set()
+ pureRenameCopy = set()
+ filesToChangeExecBit = {}
+
+ for line in diff:
+ diff = parseDiffTreeEntry(line)
+ modifier = diff['status']
+ path = diff['src']
+ if modifier == "M":
+ p4_edit(path)
+ if isModeExecChanged(diff['src_mode'], diff['dst_mode']):
+ filesToChangeExecBit[path] = diff['dst_mode']
+ editedFiles.add(path)
+ elif modifier == "A":
+ filesToAdd.add(path)
+ filesToChangeExecBit[path] = diff['dst_mode']
+ if path in filesToDelete:
+ filesToDelete.remove(path)
+ elif modifier == "D":
+ filesToDelete.add(path)
+ if path in filesToAdd:
+ filesToAdd.remove(path)
+ elif modifier == "C":
+ src, dest = diff['src'], diff['dst']
+ p4_integrate(src, dest)
+ pureRenameCopy.add(dest)
+ if diff['src_sha1'] != diff['dst_sha1']:
+ p4_edit(dest)
+ pureRenameCopy.discard(dest)
+ if isModeExecChanged(diff['src_mode'], diff['dst_mode']):
+ p4_edit(dest)
+ pureRenameCopy.discard(dest)
+ filesToChangeExecBit[dest] = diff['dst_mode']
+ os.unlink(dest)
+ editedFiles.add(dest)
+ elif modifier == "R":
+ src, dest = diff['src'], diff['dst']
+ p4_integrate(src, dest)
+ if diff['src_sha1'] != diff['dst_sha1']:
+ p4_edit(dest)
+ else:
+ pureRenameCopy.add(dest)
+ if isModeExecChanged(diff['src_mode'], diff['dst_mode']):
+ p4_edit(dest)
+ filesToChangeExecBit[dest] = diff['dst_mode']
+ os.unlink(dest)
+ editedFiles.add(dest)
+ filesToDelete.add(src)
+ else:
+ die("unknown modifier %s for %s" % (modifier, path))
+
+ diffcmd = "git format-patch -k --stdout \"%s^\"..\"%s\"" % (id, id)
+ patchcmd = diffcmd + " | git apply "
+ tryPatchCmd = patchcmd + "--check -"
+ applyPatchCmd = patchcmd + "--check --apply -"
+ patch_succeeded = True
+
+ if os.system(tryPatchCmd) != 0:
+ fixed_rcs_keywords = False
+ patch_succeeded = False
+ print "Unfortunately applying the change failed!"
+
+ # Patch failed, maybe it's just RCS keyword woes. Look through
+ # the patch to see if that's possible.
+ if gitConfig("git-p4.attemptRCSCleanup","--bool") == "true":
+ file = None
+ pattern = None
+ kwfiles = {}
+ for file in editedFiles | filesToDelete:
+ # did this file's delta contain RCS keywords?
+ pattern = p4_keywords_regexp_for_file(file)
+
+ if pattern:
+ # this file is a possibility...look for RCS keywords.
+ regexp = re.compile(pattern, re.VERBOSE)
+ for line in read_pipe_lines(["git", "diff", "%s^..%s" % (id, id), file]):
+ if regexp.search(line):
+ if verbose:
+ print "got keyword match on %s in %s in %s" % (pattern, line, file)
+ kwfiles[file] = pattern
+ break
+
+ for file in kwfiles:
+ if verbose:
+ print "zapping %s with %s" % (line,pattern)
+ self.patchRCSKeywords(file, kwfiles[file])
+ fixed_rcs_keywords = True
+
+ if fixed_rcs_keywords:
+ print "Retrying the patch with RCS keywords cleaned up"
+ if os.system(tryPatchCmd) == 0:
+ patch_succeeded = True
+
+ if not patch_succeeded:
+ print "What do you want to do?"
+ response = "x"
+ while response != "s" and response != "a" and response != "w":
+ response = raw_input("[s]kip this patch / [a]pply the patch forcibly "
+ "and with .rej files / [w]rite the patch to a file (patch.txt) ")
+ if response == "s":
+ print "Skipping! Good luck with the next patches..."
+ for f in editedFiles:
+ p4_revert(f)
+ for f in filesToAdd:
+ os.remove(f)
+ return
+ elif response == "a":
+ os.system(applyPatchCmd)
+ if len(filesToAdd) > 0:
+ print "You may also want to call p4 add on the following files:"
+ print " ".join(filesToAdd)
+ if len(filesToDelete):
+ print "The following files should be scheduled for deletion with p4 delete:"
+ print " ".join(filesToDelete)
+ die("Please resolve and submit the conflict manually and "
+ + "continue afterwards with git p4 submit --continue")
+ elif response == "w":
+ system(diffcmd + " > patch.txt")
+ print "Patch saved to patch.txt in %s !" % self.clientPath
+ die("Please resolve and submit the conflict manually and "
+ "continue afterwards with git p4 submit --continue")
+
+ system(applyPatchCmd)
+
+ for f in filesToAdd:
+ p4_add(f)
+ for f in filesToDelete:
+ p4_revert(f)
+ p4_delete(f)
+
+ # Set/clear executable bits
+ for f in filesToChangeExecBit.keys():
+ mode = filesToChangeExecBit[f]
+ setP4ExecBit(f, mode)
+
+ logMessage = extractLogMessageFromGitCommit(id)
+ logMessage = logMessage.strip()
+
+ template = self.prepareSubmitTemplate()
+
+ if self.interactive:
+ submitTemplate = self.prepareLogMessage(template, logMessage)
+
+ if self.preserveUser:
+ submitTemplate = submitTemplate + ("\n######## Actual user %s, modified after commit\n" % p4User)
+
+ if os.environ.has_key("P4DIFF"):
+ del(os.environ["P4DIFF"])
+ diff = ""
+ for editedFile in editedFiles:
+ diff += p4_read_pipe(['diff', '-du',
+ wildcard_encode(editedFile)])
+
+ newdiff = ""
+ for newFile in filesToAdd:
+ newdiff += "==== new file ====\n"
+ newdiff += "--- /dev/null\n"
+ newdiff += "+++ %s\n" % newFile
+ f = open(newFile, "r")
+ for line in f.readlines():
+ newdiff += "+" + line
+ f.close()
+
+ if self.checkAuthorship and not self.p4UserIsMe(p4User):
+ submitTemplate += "######## git author %s does not match your p4 account.\n" % gitEmail
+ submitTemplate += "######## Use option --preserve-user to modify authorship.\n"
+ submitTemplate += "######## Variable git-p4.skipUserNameCheck hides this message.\n"
+
+ separatorLine = "######## everything below this line is just the diff #######\n"
+
+ (handle, fileName) = tempfile.mkstemp()
+ tmpFile = os.fdopen(handle, "w+")
+ if self.isWindows:
+ submitTemplate = submitTemplate.replace("\n", "\r\n")
+ separatorLine = separatorLine.replace("\n", "\r\n")
+ newdiff = newdiff.replace("\n", "\r\n")
+ tmpFile.write(submitTemplate + separatorLine + diff + newdiff)
+ tmpFile.close()
+
+ if self.edit_template(fileName):
+ # read the edited message and submit
+ tmpFile = open(fileName, "rb")
+ message = tmpFile.read()
+ tmpFile.close()
+ submitTemplate = message[:message.index(separatorLine)]
+ if self.isWindows:
+ submitTemplate = submitTemplate.replace("\r\n", "\n")
+ p4_write_pipe(['submit', '-i'], submitTemplate)
+
+ if self.preserveUser:
+ if p4User:
+ # Get last changelist number. Cannot easily get it from
+ # the submit command output as the output is
+ # unmarshalled.
+ changelist = self.lastP4Changelist()
+ self.modifyChangelistUser(changelist, p4User)
+
+ # The rename/copy happened by applying a patch that created a
+ # new file. This leaves it writable, which confuses p4.
+ for f in pureRenameCopy:
+ p4_sync(f, "-f")
+
+ else:
+ # skip this patch
+ print "Submission cancelled, undoing p4 changes."
+ for f in editedFiles:
+ p4_revert(f)
+ for f in filesToAdd:
+ p4_revert(f)
+ os.remove(f)
+
+ os.remove(fileName)
+ else:
+ fileName = "submit.txt"
+ file = open(fileName, "w+")
+ file.write(self.prepareLogMessage(template, logMessage))
+ file.close()
+ print ("Perforce submit template written as %s. "
+ + "Please review/edit and then use p4 submit -i < %s to submit directly!"
+ % (fileName, fileName))
+
+ # Export git tags as p4 labels. Create a p4 label and then tag
+ # with that.
+ def exportGitTags(self, gitTags):
+ validLabelRegexp = gitConfig("git-p4.labelExportRegexp")
+ if len(validLabelRegexp) == 0:
+ validLabelRegexp = defaultLabelRegexp
+ m = re.compile(validLabelRegexp)
+
+ for name in gitTags:
+
+ if not m.match(name):
+ if verbose:
+ print "tag %s does not match regexp %s" % (name, validTagRegexp)
+ continue
+
+ # Get the p4 commit this corresponds to
+ logMessage = extractLogMessageFromGitCommit(name)
+ values = extractSettingsGitLog(logMessage)
+
+ if not values.has_key('change'):
+ # a tag pointing to something not sent to p4; ignore
+ if verbose:
+ print "git tag %s does not give a p4 commit" % name
+ continue
+ else:
+ changelist = values['change']
+
+ # Get the tag details.
+ inHeader = True
+ isAnnotated = False
+ body = []
+ for l in read_pipe_lines(["git", "cat-file", "-p", name]):
+ l = l.strip()
+ if inHeader:
+ if re.match(r'tag\s+', l):
+ isAnnotated = True
+ elif re.match(r'\s*$', l):
+ inHeader = False
+ continue
+ else:
+ body.append(l)
+
+ if not isAnnotated:
+ body = ["lightweight tag imported by git p4\n"]
+
+ # Create the label - use the same view as the client spec we are using
+ clientSpec = getClientSpec()
+
+ labelTemplate = "Label: %s\n" % name
+ labelTemplate += "Description:\n"
+ for b in body:
+ labelTemplate += "\t" + b + "\n"
+ labelTemplate += "View:\n"
+ for mapping in clientSpec.mappings:
+ labelTemplate += "\t%s\n" % mapping.depot_side.path
+
+ p4_write_pipe(["label", "-i"], labelTemplate)
+
+ # Use the label
+ p4_system(["tag", "-l", name] +
+ ["%s@%s" % (mapping.depot_side.path, changelist) for mapping in clientSpec.mappings])
+
+ if verbose:
+ print "created p4 label for tag %s" % name
+
+ def run(self, args):
+ if len(args) == 0:
+ self.master = currentGitBranch()
+ if len(self.master) == 0 or not gitBranchExists("refs/heads/%s" % self.master):
+ die("Detecting current git branch failed!")
+ elif len(args) == 1:
+ self.master = args[0]
+ if not branchExists(self.master):
+ die("Branch %s does not exist" % self.master)
+ else:
+ return False
+
+ allowSubmit = gitConfig("git-p4.allowSubmit")
+ if len(allowSubmit) > 0 and not self.master in allowSubmit.split(","):
+ die("%s is not in git-p4.allowSubmit" % self.master)
+
+ [upstream, settings] = findUpstreamBranchPoint()
+ self.depotPath = settings['depot-paths'][0]
+ if len(self.origin) == 0:
+ self.origin = upstream
+
+ if self.preserveUser:
+ if not self.canChangeChangelists():
+ die("Cannot preserve user names without p4 super-user or admin permissions")
+
+ if self.verbose:
+ print "Origin branch is " + self.origin
+
+ if len(self.depotPath) == 0:
+ print "Internal error: cannot locate perforce depot path from existing branches"
+ sys.exit(128)
+
+ self.useClientSpec = False
+ if gitConfig("git-p4.useclientspec", "--bool") == "true":
+ self.useClientSpec = True
+ if self.useClientSpec:
+ self.clientSpecDirs = getClientSpec()
+
+ if self.useClientSpec:
+ # all files are relative to the client spec
+ self.clientPath = getClientRoot()
+ else:
+ self.clientPath = p4Where(self.depotPath)
+
+ if self.clientPath == "":
+ die("Error: Cannot locate perforce checkout of %s in client view" % self.depotPath)
+
+ print "Perforce checkout for depot path %s located at %s" % (self.depotPath, self.clientPath)
+ self.oldWorkingDirectory = os.getcwd()
+
+ # ensure the clientPath exists
+ new_client_dir = False
+ if not os.path.exists(self.clientPath):
+ new_client_dir = True
+ os.makedirs(self.clientPath)
+
+ chdir(self.clientPath)
+ print "Synchronizing p4 checkout..."
+ if new_client_dir:
+ # old one was destroyed, and maybe nobody told p4
+ p4_sync("...", "-f")
+ else:
+ p4_sync("...")
+ self.check()
+
+ commits = []
+ for line in read_pipe_lines("git rev-list --no-merges %s..%s" % (self.origin, self.master)):
+ commits.append(line.strip())
+ commits.reverse()
+
+ if self.preserveUser or (gitConfig("git-p4.skipUserNameCheck") == "true"):
+ self.checkAuthorship = False
+ else:
+ self.checkAuthorship = True
+
+ if self.preserveUser:
+ self.checkValidP4Users(commits)
+
+ while len(commits) > 0:
+ commit = commits[0]
+ commits = commits[1:]
+ self.applyCommit(commit)
+ if not self.interactive:
+ break
+
+ if len(commits) == 0:
+ print "All changes applied!"
+ chdir(self.oldWorkingDirectory)
+
+ sync = P4Sync()
+ sync.run([])
+
+ rebase = P4Rebase()
+ rebase.rebase()
+
+ if gitConfig("git-p4.exportLabels", "--bool") == "true":
+ self.exportLabels = true
+
+ if self.exportLabels:
+ p4Labels = getP4Labels(self.depotPath)
+ gitTags = getGitTags()
+
+ missingGitTags = gitTags - p4Labels
+ self.exportGitTags(missingGitTags)
+
+ return True
+
+class View(object):
+ """Represent a p4 view ("p4 help views"), and map files in a
+ repo according to the view."""
+
+ class Path(object):
+ """A depot or client path, possibly containing wildcards.
+ The only one supported is ... at the end, currently.
+ Initialize with the full path, with //depot or //client."""
+
+ def __init__(self, path, is_depot):
+ self.path = path
+ self.is_depot = is_depot
+ self.find_wildcards()
+ # remember the prefix bit, useful for relative mappings
+ m = re.match("(//[^/]+/)", self.path)
+ if not m:
+ die("Path %s does not start with //prefix/" % self.path)
+ prefix = m.group(1)
+ if not self.is_depot:
+ # strip //client/ on client paths
+ self.path = self.path[len(prefix):]
+
+ def find_wildcards(self):
+ """Make sure wildcards are valid, and set up internal
+ variables."""
+
+ self.ends_triple_dot = False
+ # There are three wildcards allowed in p4 views
+ # (see "p4 help views"). This code knows how to
+ # handle "..." (only at the end), but cannot deal with
+ # "%%n" or "*". Only check the depot_side, as p4 should
+ # validate that the client_side matches too.
+ if re.search(r'%%[1-9]', self.path):
+ die("Can't handle %%n wildcards in view: %s" % self.path)
+ if self.path.find("*") >= 0:
+ die("Can't handle * wildcards in view: %s" % self.path)
+ triple_dot_index = self.path.find("...")
+ if triple_dot_index >= 0:
+ if triple_dot_index != len(self.path) - 3:
+ die("Can handle only single ... wildcard, at end: %s" %
+ self.path)
+ self.ends_triple_dot = True
+
+ def ensure_compatible(self, other_path):
+ """Make sure the wildcards agree."""
+ if self.ends_triple_dot != other_path.ends_triple_dot:
+ die("Both paths must end with ... if either does;\n" +
+ "paths: %s %s" % (self.path, other_path.path))
+
+ def match_wildcards(self, test_path):
+ """See if this test_path matches us, and fill in the value
+ of the wildcards if so. Returns a tuple of
+ (True|False, wildcards[]). For now, only the ... at end
+ is supported, so at most one wildcard."""
+ if self.ends_triple_dot:
+ dotless = self.path[:-3]
+ if test_path.startswith(dotless):
+ wildcard = test_path[len(dotless):]
+ return (True, [ wildcard ])
+ else:
+ if test_path == self.path:
+ return (True, [])
+ return (False, [])
+
+ def match(self, test_path):
+ """Just return if it matches; don't bother with the wildcards."""
+ b, _ = self.match_wildcards(test_path)
+ return b
+
+ def fill_in_wildcards(self, wildcards):
+ """Return the relative path, with the wildcards filled in
+ if there are any."""
+ if self.ends_triple_dot:
+ return self.path[:-3] + wildcards[0]
+ else:
+ return self.path
+
+ class Mapping(object):
+ def __init__(self, depot_side, client_side, overlay, exclude):
+ # depot_side is without the trailing /... if it had one
+ self.depot_side = View.Path(depot_side, is_depot=True)
+ self.client_side = View.Path(client_side, is_depot=False)
+ self.overlay = overlay # started with "+"
+ self.exclude = exclude # started with "-"
+ assert not (self.overlay and self.exclude)
+ self.depot_side.ensure_compatible(self.client_side)
+
+ def __str__(self):
+ c = " "
+ if self.overlay:
+ c = "+"
+ if self.exclude:
+ c = "-"
+ return "View.Mapping: %s%s -> %s" % \
+ (c, self.depot_side.path, self.client_side.path)
+
+ def map_depot_to_client(self, depot_path):
+ """Calculate the client path if using this mapping on the
+ given depot path; does not consider the effect of other
+ mappings in a view. Even excluded mappings are returned."""
+ matches, wildcards = self.depot_side.match_wildcards(depot_path)
+ if not matches:
+ return ""
+ client_path = self.client_side.fill_in_wildcards(wildcards)
+ return client_path
+
+ #
+ # View methods
+ #
+ def __init__(self):
+ self.mappings = []
+
+ def append(self, view_line):
+ """Parse a view line, splitting it into depot and client
+ sides. Append to self.mappings, preserving order."""
+
+ # Split the view line into exactly two words. P4 enforces
+ # structure on these lines that simplifies this quite a bit.
+ #
+ # Either or both words may be double-quoted.
+ # Single quotes do not matter.
+ # Double-quote marks cannot occur inside the words.
+ # A + or - prefix is also inside the quotes.
+ # There are no quotes unless they contain a space.
+ # The line is already white-space stripped.
+ # The two words are separated by a single space.
+ #
+ if view_line[0] == '"':
+ # First word is double quoted. Find its end.
+ close_quote_index = view_line.find('"', 1)
+ if close_quote_index <= 0:
+ die("No first-word closing quote found: %s" % view_line)
+ depot_side = view_line[1:close_quote_index]
+ # skip closing quote and space
+ rhs_index = close_quote_index + 1 + 1
+ else:
+ space_index = view_line.find(" ")
+ if space_index <= 0:
+ die("No word-splitting space found: %s" % view_line)
+ depot_side = view_line[0:space_index]
+ rhs_index = space_index + 1
+
+ if view_line[rhs_index] == '"':
+ # Second word is double quoted. Make sure there is a
+ # double quote at the end too.
+ if not view_line.endswith('"'):
+ die("View line with rhs quote should end with one: %s" %
+ view_line)
+ # skip the quotes
+ client_side = view_line[rhs_index+1:-1]
+ else:
+ client_side = view_line[rhs_index:]
+
+ # prefix + means overlay on previous mapping
+ overlay = False
+ if depot_side.startswith("+"):
+ overlay = True
+ depot_side = depot_side[1:]
+
+ # prefix - means exclude this path
+ exclude = False
+ if depot_side.startswith("-"):
+ exclude = True
+ depot_side = depot_side[1:]
+
+ m = View.Mapping(depot_side, client_side, overlay, exclude)
+ self.mappings.append(m)
+
+ def map_in_client(self, depot_path):
+ """Return the relative location in the client where this
+ depot file should live. Returns "" if the file should
+ not be mapped in the client."""
+
+ paths_filled = []
+ client_path = ""
+
+ # look at later entries first
+ for m in self.mappings[::-1]:
+
+ # see where will this path end up in the client
+ p = m.map_depot_to_client(depot_path)
+
+ if p == "":
+ # Depot path does not belong in client. Must remember
+ # this, as previous items should not cause files to
+ # exist in this path either. Remember that the list is
+ # being walked from the end, which has higher precedence.
+ # Overlap mappings do not exclude previous mappings.
+ if not m.overlay:
+ paths_filled.append(m.client_side)
+
+ else:
+ # This mapping matched; no need to search any further.
+ # But, the mapping could be rejected if the client path
+ # has already been claimed by an earlier mapping (i.e.
+ # one later in the list, which we are walking backwards).
+ already_mapped_in_client = False
+ for f in paths_filled:
+ # this is View.Path.match
+ if f.match(p):
+ already_mapped_in_client = True
+ break
+ if not already_mapped_in_client:
+ # Include this file, unless it is from a line that
+ # explicitly said to exclude it.
+ if not m.exclude:
+ client_path = p
+
+ # a match, even if rejected, always stops the search
+ break
+
+ return client_path
+
+class P4Sync(Command, P4UserMap):
+ delete_actions = ( "delete", "move/delete", "purge" )
+
+ def __init__(self):
+ Command.__init__(self)
+ P4UserMap.__init__(self)
+ self.options = [
+ optparse.make_option("--branch", dest="branch"),
+ optparse.make_option("--detect-branches", dest="detectBranches", action="store_true"),
+ optparse.make_option("--changesfile", dest="changesFile"),
+ optparse.make_option("--silent", dest="silent", action="store_true"),
+ optparse.make_option("--detect-labels", dest="detectLabels", action="store_true"),
+ optparse.make_option("--import-labels", dest="importLabels", action="store_true"),
+ optparse.make_option("--import-local", dest="importIntoRemotes", action="store_false",
+ help="Import into refs/heads/ , not refs/remotes"),
+ optparse.make_option("--max-changes", dest="maxChanges"),
+ optparse.make_option("--keep-path", dest="keepRepoPath", action='store_true',
+ help="Keep entire BRANCH/DIR/SUBDIR prefix during import"),
+ optparse.make_option("--use-client-spec", dest="useClientSpec", action='store_true',
+ help="Only sync files that are included in the Perforce Client Spec")
+ ]
+ self.description = """Imports from Perforce into a git repository.\n
+ example:
+ //depot/my/project/ -- to import the current head
+ //depot/my/project/@all -- to import everything
+ //depot/my/project/@1,6 -- to import only from revision 1 to 6
+
+ (a ... is not needed in the path p4 specification, it's added implicitly)"""
+
+ self.usage += " //depot/path[@revRange]"
+ self.silent = False
+ self.createdBranches = set()
+ self.committedChanges = set()
+ self.branch = ""
+ self.detectBranches = False
+ self.detectLabels = False
+ self.importLabels = False
+ self.changesFile = ""
+ self.syncWithOrigin = True
+ self.importIntoRemotes = True
+ self.maxChanges = ""
+ self.isWindows = (platform.system() == "Windows")
+ self.keepRepoPath = False
+ self.depotPaths = None
+ self.p4BranchesInGit = []
+ self.cloneExclude = []
+ self.useClientSpec = False
+ self.useClientSpec_from_options = False
+ self.clientSpecDirs = None
+ self.tempBranches = []
+ self.tempBranchLocation = "git-p4-tmp"
+
+ if gitConfig("git-p4.syncFromOrigin") == "false":
+ self.syncWithOrigin = False
+
+ # Force a checkpoint in fast-import and wait for it to finish
+ def checkpoint(self):
+ self.gitStream.write("checkpoint\n\n")
+ self.gitStream.write("progress checkpoint\n\n")
+ out = self.gitOutput.readline()
+ if self.verbose:
+ print "checkpoint finished: " + out
+
+ def extractFilesFromCommit(self, commit):
+ self.cloneExclude = [re.sub(r"\.\.\.$", "", path)
+ for path in self.cloneExclude]
+ files = []
+ fnum = 0
+ while commit.has_key("depotFile%s" % fnum):
+ path = commit["depotFile%s" % fnum]
+
+ if [p for p in self.cloneExclude
+ if p4PathStartsWith(path, p)]:
+ found = False
+ else:
+ found = [p for p in self.depotPaths
+ if p4PathStartsWith(path, p)]
+ if not found:
+ fnum = fnum + 1
+ continue
+
+ file = {}
+ file["path"] = path
+ file["rev"] = commit["rev%s" % fnum]
+ file["action"] = commit["action%s" % fnum]
+ file["type"] = commit["type%s" % fnum]
+ files.append(file)
+ fnum = fnum + 1
+ return files
+
+ def stripRepoPath(self, path, prefixes):
+ if self.useClientSpec:
+ return self.clientSpecDirs.map_in_client(path)
+
+ if self.keepRepoPath:
+ prefixes = [re.sub("^(//[^/]+/).*", r'\1', prefixes[0])]
+
+ for p in prefixes:
+ if p4PathStartsWith(path, p):
+ path = path[len(p):]
+
+ return path
+
+ def splitFilesIntoBranches(self, commit):
+ branches = {}
+ fnum = 0
+ while commit.has_key("depotFile%s" % fnum):
+ path = commit["depotFile%s" % fnum]
+ found = [p for p in self.depotPaths
+ if p4PathStartsWith(path, p)]
+ if not found:
+ fnum = fnum + 1
+ continue
+
+ file = {}
+ file["path"] = path
+ file["rev"] = commit["rev%s" % fnum]
+ file["action"] = commit["action%s" % fnum]
+ file["type"] = commit["type%s" % fnum]
+ fnum = fnum + 1
+
+ relPath = self.stripRepoPath(path, self.depotPaths)
+ relPath = wildcard_decode(relPath)
+
+ for branch in self.knownBranches.keys():
+
+ # add a trailing slash so that a commit into qt/4.2foo doesn't end up in qt/4.2
+ if relPath.startswith(branch + "/"):
+ if branch not in branches:
+ branches[branch] = []
+ branches[branch].append(file)
+ break
+
+ return branches
+
+ # output one file from the P4 stream
+ # - helper for streamP4Files
+
+ def streamOneP4File(self, file, contents):
+ relPath = self.stripRepoPath(file['depotFile'], self.branchPrefixes)
+ relPath = wildcard_decode(relPath)
+ if verbose:
+ sys.stderr.write("%s\n" % relPath)
+
+ (type_base, type_mods) = split_p4_type(file["type"])
+
+ git_mode = "100644"
+ if "x" in type_mods:
+ git_mode = "100755"
+ if type_base == "symlink":
+ git_mode = "120000"
+ # p4 print on a symlink contains "target\n"; remove the newline
+ data = ''.join(contents)
+ contents = [data[:-1]]
+
+ if type_base == "utf16":
+ # p4 delivers different text in the python output to -G
+ # than it does when using "print -o", or normal p4 client
+ # operations. utf16 is converted to ascii or utf8, perhaps.
+ # But ascii text saved as -t utf16 is completely mangled.
+ # Invoke print -o to get the real contents.
+ text = p4_read_pipe(['print', '-q', '-o', '-', file['depotFile']])
+ contents = [ text ]
+
+ if type_base == "apple":
+ # Apple filetype files will be streamed as a concatenation of
+ # its appledouble header and the contents. This is useless
+ # on both macs and non-macs. If using "print -q -o xx", it
+ # will create "xx" with the data, and "%xx" with the header.
+ # This is also not very useful.
+ #
+ # Ideally, someday, this script can learn how to generate
+ # appledouble files directly and import those to git, but
+ # non-mac machines can never find a use for apple filetype.
+ print "\nIgnoring apple filetype file %s" % file['depotFile']
+ return
+
+ # Perhaps windows wants unicode, utf16 newlines translated too;
+ # but this is not doing it.
+ if self.isWindows and type_base == "text":
+ mangled = []
+ for data in contents:
+ data = data.replace("\r\n", "\n")
+ mangled.append(data)
+ contents = mangled
+
+ # Note that we do not try to de-mangle keywords on utf16 files,
+ # even though in theory somebody may want that.
+ pattern = p4_keywords_regexp_for_type(type_base, type_mods)
+ if pattern:
+ regexp = re.compile(pattern, re.VERBOSE)
+ text = ''.join(contents)
+ text = regexp.sub(r'$\1$', text)
+ contents = [ text ]
+
+ self.gitStream.write("M %s inline %s\n" % (git_mode, relPath))
+
+ # total length...
+ length = 0
+ for d in contents:
+ length = length + len(d)
+
+ self.gitStream.write("data %d\n" % length)
+ for d in contents:
+ self.gitStream.write(d)
+ self.gitStream.write("\n")
+
+ def streamOneP4Deletion(self, file):
+ relPath = self.stripRepoPath(file['path'], self.branchPrefixes)
+ relPath = wildcard_decode(relPath)
+ if verbose:
+ sys.stderr.write("delete %s\n" % relPath)
+ self.gitStream.write("D %s\n" % relPath)
+
+ # handle another chunk of streaming data
+ def streamP4FilesCb(self, marshalled):
+
+ if marshalled.has_key('depotFile') and self.stream_have_file_info:
+ # start of a new file - output the old one first
+ self.streamOneP4File(self.stream_file, self.stream_contents)
+ self.stream_file = {}
+ self.stream_contents = []
+ self.stream_have_file_info = False
+
+ # pick up the new file information... for the
+ # 'data' field we need to append to our array
+ for k in marshalled.keys():
+ if k == 'data':
+ self.stream_contents.append(marshalled['data'])
+ else:
+ self.stream_file[k] = marshalled[k]
+
+ self.stream_have_file_info = True
+
+ # Stream directly from "p4 files" into "git fast-import"
+ def streamP4Files(self, files):
+ filesForCommit = []
+ filesToRead = []
+ filesToDelete = []
+
+ for f in files:
+ # if using a client spec, only add the files that have
+ # a path in the client
+ if self.clientSpecDirs:
+ if self.clientSpecDirs.map_in_client(f['path']) == "":
+ continue
+
+ filesForCommit.append(f)
+ if f['action'] in self.delete_actions:
+ filesToDelete.append(f)
+ else:
+ filesToRead.append(f)
+
+ # deleted files...
+ for f in filesToDelete:
+ self.streamOneP4Deletion(f)
+
+ if len(filesToRead) > 0:
+ self.stream_file = {}
+ self.stream_contents = []
+ self.stream_have_file_info = False
+
+ # curry self argument
+ def streamP4FilesCbSelf(entry):
+ self.streamP4FilesCb(entry)
+
+ fileArgs = ['%s#%s' % (f['path'], f['rev']) for f in filesToRead]
+
+ p4CmdList(["-x", "-", "print"],
+ stdin=fileArgs,
+ cb=streamP4FilesCbSelf)
+
+ # do the last chunk
+ if self.stream_file.has_key('depotFile'):
+ self.streamOneP4File(self.stream_file, self.stream_contents)
+
+ def make_email(self, userid):
+ if userid in self.users:
+ return self.users[userid]
+ else:
+ return "%s <a@b>" % userid
+
+ # Stream a p4 tag
+ def streamTag(self, gitStream, labelName, labelDetails, commit, epoch):
+ if verbose:
+ print "writing tag %s for commit %s" % (labelName, commit)
+ gitStream.write("tag %s\n" % labelName)
+ gitStream.write("from %s\n" % commit)
+
+ if labelDetails.has_key('Owner'):
+ owner = labelDetails["Owner"]
+ else:
+ owner = None
+
+ # Try to use the owner of the p4 label, or failing that,
+ # the current p4 user id.
+ if owner:
+ email = self.make_email(owner)
+ else:
+ email = self.make_email(self.p4UserId())
+ tagger = "%s %s %s" % (email, epoch, self.tz)
+
+ gitStream.write("tagger %s\n" % tagger)
+
+ print "labelDetails=",labelDetails
+ if labelDetails.has_key('Description'):
+ description = labelDetails['Description']
+ else:
+ description = 'Label from git p4'
+
+ gitStream.write("data %d\n" % len(description))
+ gitStream.write(description)
+ gitStream.write("\n")
+
+ def commit(self, details, files, branch, branchPrefixes, parent = ""):
+ epoch = details["time"]
+ author = details["user"]
+ self.branchPrefixes = branchPrefixes
+
+ if self.verbose:
+ print "commit into %s" % branch
+
+ # start with reading files; if that fails, we should not
+ # create a commit.
+ new_files = []
+ for f in files:
+ if [p for p in branchPrefixes if p4PathStartsWith(f['path'], p)]:
+ new_files.append (f)
+ else:
+ sys.stderr.write("Ignoring file outside of prefix: %s\n" % f['path'])
+
+ self.gitStream.write("commit %s\n" % branch)
+# gitStream.write("mark :%s\n" % details["change"])
+ self.committedChanges.add(int(details["change"]))
+ committer = ""
+ if author not in self.users:
+ self.getUserMapFromPerforceServer()
+ committer = "%s %s %s" % (self.make_email(author), epoch, self.tz)
+
+ self.gitStream.write("committer %s\n" % committer)
+
+ self.gitStream.write("data <<EOT\n")
+ self.gitStream.write(details["desc"])
+ self.gitStream.write("\n[git-p4: depot-paths = \"%s\": change = %s"
+ % (','.join (branchPrefixes), details["change"]))
+ if len(details['options']) > 0:
+ self.gitStream.write(": options = %s" % details['options'])
+ self.gitStream.write("]\nEOT\n\n")
+
+ if len(parent) > 0:
+ if self.verbose:
+ print "parent %s" % parent
+ self.gitStream.write("from %s\n" % parent)
+
+ self.streamP4Files(new_files)
+ self.gitStream.write("\n")
+
+ change = int(details["change"])
+
+ if self.labels.has_key(change):
+ label = self.labels[change]
+ labelDetails = label[0]
+ labelRevisions = label[1]
+ if self.verbose:
+ print "Change %s is labelled %s" % (change, labelDetails)
+
+ files = p4CmdList(["files"] + ["%s...@%s" % (p, change)
+ for p in branchPrefixes])
+
+ if len(files) == len(labelRevisions):
+
+ cleanedFiles = {}
+ for info in files:
+ if info["action"] in self.delete_actions:
+ continue
+ cleanedFiles[info["depotFile"]] = info["rev"]
+
+ if cleanedFiles == labelRevisions:
+ self.streamTag(self.gitStream, 'tag_%s' % labelDetails['label'], labelDetails, branch, epoch)
+
+ else:
+ if not self.silent:
+ print ("Tag %s does not match with change %s: files do not match."
+ % (labelDetails["label"], change))
+
+ else:
+ if not self.silent:
+ print ("Tag %s does not match with change %s: file count is different."
+ % (labelDetails["label"], change))
+
+ # Build a dictionary of changelists and labels, for "detect-labels" option.
+ def getLabels(self):
+ self.labels = {}
+
+ l = p4CmdList(["labels"] + ["%s..." % p for p in self.depotPaths])
+ if len(l) > 0 and not self.silent:
+ print "Finding files belonging to labels in %s" % `self.depotPaths`
+
+ for output in l:
+ label = output["label"]
+ revisions = {}
+ newestChange = 0
+ if self.verbose:
+ print "Querying files for label %s" % label
+ for file in p4CmdList(["files"] +
+ ["%s...@%s" % (p, label)
+ for p in self.depotPaths]):
+ revisions[file["depotFile"]] = file["rev"]
+ change = int(file["change"])
+ if change > newestChange:
+ newestChange = change
+
+ self.labels[newestChange] = [output, revisions]
+
+ if self.verbose:
+ print "Label changes: %s" % self.labels.keys()
+
+ # Import p4 labels as git tags. A direct mapping does not
+ # exist, so assume that if all the files are at the same revision
+ # then we can use that, or it's something more complicated we should
+ # just ignore.
+ def importP4Labels(self, stream, p4Labels):
+ if verbose:
+ print "import p4 labels: " + ' '.join(p4Labels)
+
+ ignoredP4Labels = gitConfigList("git-p4.ignoredP4Labels")
+ validLabelRegexp = gitConfig("git-p4.labelImportRegexp")
+ if len(validLabelRegexp) == 0:
+ validLabelRegexp = defaultLabelRegexp
+ m = re.compile(validLabelRegexp)
+
+ for name in p4Labels:
+ commitFound = False
+
+ if not m.match(name):
+ if verbose:
+ print "label %s does not match regexp %s" % (name,validLabelRegexp)
+ continue
+
+ if name in ignoredP4Labels:
+ continue
+
+ labelDetails = p4CmdList(['label', "-o", name])[0]
+
+ # get the most recent changelist for each file in this label
+ change = p4Cmd(["changes", "-m", "1"] + ["%s...@%s" % (p, name)
+ for p in self.depotPaths])
+
+ if change.has_key('change'):
+ # find the corresponding git commit; take the oldest commit
+ changelist = int(change['change'])
+ gitCommit = read_pipe(["git", "rev-list", "--max-count=1",
+ "--reverse", ":/\[git-p4:.*change = %d\]" % changelist])
+ if len(gitCommit) == 0:
+ print "could not find git commit for changelist %d" % changelist
+ else:
+ gitCommit = gitCommit.strip()
+ commitFound = True
+ # Convert from p4 time format
+ try:
+ tmwhen = time.strptime(labelDetails['Update'], "%Y/%m/%d %H:%M:%S")
+ except ValueError:
+ print "Could not convert label time %s" % labelDetail['Update']
+ tmwhen = 1
+
+ when = int(time.mktime(tmwhen))
+ self.streamTag(stream, name, labelDetails, gitCommit, when)
+ if verbose:
+ print "p4 label %s mapped to git commit %s" % (name, gitCommit)
+ else:
+ if verbose:
+ print "Label %s has no changelists - possibly deleted?" % name
+
+ if not commitFound:
+ # We can't import this label; don't try again as it will get very
+ # expensive repeatedly fetching all the files for labels that will
+ # never be imported. If the label is moved in the future, the
+ # ignore will need to be removed manually.
+ system(["git", "config", "--add", "git-p4.ignoredP4Labels", name])
+
+ def guessProjectName(self):
+ for p in self.depotPaths:
+ if p.endswith("/"):
+ p = p[:-1]
+ p = p[p.strip().rfind("/") + 1:]
+ if not p.endswith("/"):
+ p += "/"
+ return p
+
+ def getBranchMapping(self):
+ lostAndFoundBranches = set()
+
+ user = gitConfig("git-p4.branchUser")
+ if len(user) > 0:
+ command = "branches -u %s" % user
+ else:
+ command = "branches"
+
+ for info in p4CmdList(command):
+ details = p4Cmd(["branch", "-o", info["branch"]])
+ viewIdx = 0
+ while details.has_key("View%s" % viewIdx):
+ paths = details["View%s" % viewIdx].split(" ")
+ viewIdx = viewIdx + 1
+ # require standard //depot/foo/... //depot/bar/... mapping
+ if len(paths) != 2 or not paths[0].endswith("/...") or not paths[1].endswith("/..."):
+ continue
+ source = paths[0]
+ destination = paths[1]
+ ## HACK
+ if p4PathStartsWith(source, self.depotPaths[0]) and p4PathStartsWith(destination, self.depotPaths[0]):
+ source = source[len(self.depotPaths[0]):-4]
+ destination = destination[len(self.depotPaths[0]):-4]
+
+ if destination in self.knownBranches:
+ if not self.silent:
+ print "p4 branch %s defines a mapping from %s to %s" % (info["branch"], source, destination)
+ print "but there exists another mapping from %s to %s already!" % (self.knownBranches[destination], destination)
+ continue
+
+ self.knownBranches[destination] = source
+
+ lostAndFoundBranches.discard(destination)
+
+ if source not in self.knownBranches:
+ lostAndFoundBranches.add(source)
+
+ # Perforce does not strictly require branches to be defined, so we also
+ # check git config for a branch list.
+ #
+ # Example of branch definition in git config file:
+ # [git-p4]
+ # branchList=main:branchA
+ # branchList=main:branchB
+ # branchList=branchA:branchC
+ configBranches = gitConfigList("git-p4.branchList")
+ for branch in configBranches:
+ if branch:
+ (source, destination) = branch.split(":")
+ self.knownBranches[destination] = source
+
+ lostAndFoundBranches.discard(destination)
+
+ if source not in self.knownBranches:
+ lostAndFoundBranches.add(source)
+
+
+ for branch in lostAndFoundBranches:
+ self.knownBranches[branch] = branch
+
+ def getBranchMappingFromGitBranches(self):
+ branches = p4BranchesInGit(self.importIntoRemotes)
+ for branch in branches.keys():
+ if branch == "master":
+ branch = "main"
+ else:
+ branch = branch[len(self.projectName):]
+ self.knownBranches[branch] = branch
+
+ def listExistingP4GitBranches(self):
+ # branches holds mapping from name to commit
+ branches = p4BranchesInGit(self.importIntoRemotes)
+ self.p4BranchesInGit = branches.keys()
+ for branch in branches.keys():
+ self.initialParents[self.refPrefix + branch] = branches[branch]
+
+ def updateOptionDict(self, d):
+ option_keys = {}
+ if self.keepRepoPath:
+ option_keys['keepRepoPath'] = 1
+
+ d["options"] = ' '.join(sorted(option_keys.keys()))
+
+ def readOptions(self, d):
+ self.keepRepoPath = (d.has_key('options')
+ and ('keepRepoPath' in d['options']))
+
+ def gitRefForBranch(self, branch):
+ if branch == "main":
+ return self.refPrefix + "master"
+
+ if len(branch) <= 0:
+ return branch
+
+ return self.refPrefix + self.projectName + branch
+
+ def gitCommitByP4Change(self, ref, change):
+ if self.verbose:
+ print "looking in ref " + ref + " for change %s using bisect..." % change
+
+ earliestCommit = ""
+ latestCommit = parseRevision(ref)
+
+ while True:
+ if self.verbose:
+ print "trying: earliest %s latest %s" % (earliestCommit, latestCommit)
+ next = read_pipe("git rev-list --bisect %s %s" % (latestCommit, earliestCommit)).strip()
+ if len(next) == 0:
+ if self.verbose:
+ print "argh"
+ return ""
+ log = extractLogMessageFromGitCommit(next)
+ settings = extractSettingsGitLog(log)
+ currentChange = int(settings['change'])
+ if self.verbose:
+ print "current change %s" % currentChange
+
+ if currentChange == change:
+ if self.verbose:
+ print "found %s" % next
+ return next
+
+ if currentChange < change:
+ earliestCommit = "^%s" % next
+ else:
+ latestCommit = "%s" % next
+
+ return ""
+
+ def importNewBranch(self, branch, maxChange):
+ # make fast-import flush all changes to disk and update the refs using the checkpoint
+ # command so that we can try to find the branch parent in the git history
+ self.gitStream.write("checkpoint\n\n");
+ self.gitStream.flush();
+ branchPrefix = self.depotPaths[0] + branch + "/"
+ range = "@1,%s" % maxChange
+ #print "prefix" + branchPrefix
+ changes = p4ChangesForPaths([branchPrefix], range)
+ if len(changes) <= 0:
+ return False
+ firstChange = changes[0]
+ #print "first change in branch: %s" % firstChange
+ sourceBranch = self.knownBranches[branch]
+ sourceDepotPath = self.depotPaths[0] + sourceBranch
+ sourceRef = self.gitRefForBranch(sourceBranch)
+ #print "source " + sourceBranch
+
+ branchParentChange = int(p4Cmd(["changes", "-m", "1", "%s...@1,%s" % (sourceDepotPath, firstChange)])["change"])
+ #print "branch parent: %s" % branchParentChange
+ gitParent = self.gitCommitByP4Change(sourceRef, branchParentChange)
+ if len(gitParent) > 0:
+ self.initialParents[self.gitRefForBranch(branch)] = gitParent
+ #print "parent git commit: %s" % gitParent
+
+ self.importChanges(changes)
+ return True
+
+ def searchParent(self, parent, branch, target):
+ parentFound = False
+ for blob in read_pipe_lines(["git", "rev-list", "--reverse", "--no-merges", parent]):
+ blob = blob.strip()
+ if len(read_pipe(["git", "diff-tree", blob, target])) == 0:
+ parentFound = True
+ if self.verbose:
+ print "Found parent of %s in commit %s" % (branch, blob)
+ break
+ if parentFound:
+ return blob
+ else:
+ return None
+
+ def importChanges(self, changes):
+ cnt = 1
+ for change in changes:
+ description = p4Cmd(["describe", str(change)])
+ self.updateOptionDict(description)
+
+ if not self.silent:
+ sys.stdout.write("\rImporting revision %s (%s%%)" % (change, cnt * 100 / len(changes)))
+ sys.stdout.flush()
+ cnt = cnt + 1
+
+ try:
+ if self.detectBranches:
+ branches = self.splitFilesIntoBranches(description)
+ for branch in branches.keys():
+ ## HACK --hwn
+ branchPrefix = self.depotPaths[0] + branch + "/"
+
+ parent = ""
+
+ filesForCommit = branches[branch]
+
+ if self.verbose:
+ print "branch is %s" % branch
+
+ self.updatedBranches.add(branch)
+
+ if branch not in self.createdBranches:
+ self.createdBranches.add(branch)
+ parent = self.knownBranches[branch]
+ if parent == branch:
+ parent = ""
+ else:
+ fullBranch = self.projectName + branch
+ if fullBranch not in self.p4BranchesInGit:
+ if not self.silent:
+ print("\n Importing new branch %s" % fullBranch);
+ if self.importNewBranch(branch, change - 1):
+ parent = ""
+ self.p4BranchesInGit.append(fullBranch)
+ if not self.silent:
+ print("\n Resuming with change %s" % change);
+
+ if self.verbose:
+ print "parent determined through known branches: %s" % parent
+
+ branch = self.gitRefForBranch(branch)
+ parent = self.gitRefForBranch(parent)
+
+ if self.verbose:
+ print "looking for initial parent for %s; current parent is %s" % (branch, parent)
+
+ if len(parent) == 0 and branch in self.initialParents:
+ parent = self.initialParents[branch]
+ del self.initialParents[branch]
+
+ blob = None
+ if len(parent) > 0:
+ tempBranch = os.path.join(self.tempBranchLocation, "%d" % (change))
+ if self.verbose:
+ print "Creating temporary branch: " + tempBranch
+ self.commit(description, filesForCommit, tempBranch, [branchPrefix])
+ self.tempBranches.append(tempBranch)
+ self.checkpoint()
+ blob = self.searchParent(parent, branch, tempBranch)
+ if blob:
+ self.commit(description, filesForCommit, branch, [branchPrefix], blob)
+ else:
+ if self.verbose:
+ print "Parent of %s not found. Committing into head of %s" % (branch, parent)
+ self.commit(description, filesForCommit, branch, [branchPrefix], parent)
+ else:
+ files = self.extractFilesFromCommit(description)
+ self.commit(description, files, self.branch, self.depotPaths,
+ self.initialParent)
+ self.initialParent = ""
+ except IOError:
+ print self.gitError.read()
+ sys.exit(1)
+
+ def importHeadRevision(self, revision):
+ print "Doing initial import of %s from revision %s into %s" % (' '.join(self.depotPaths), revision, self.branch)
+
+ details = {}
+ details["user"] = "git perforce import user"
+ details["desc"] = ("Initial import of %s from the state at revision %s\n"
+ % (' '.join(self.depotPaths), revision))
+ details["change"] = revision
+ newestRevision = 0
+
+ fileCnt = 0
+ fileArgs = ["%s...%s" % (p,revision) for p in self.depotPaths]
+
+ for info in p4CmdList(["files"] + fileArgs):
+
+ if 'code' in info and info['code'] == 'error':
+ sys.stderr.write("p4 returned an error: %s\n"
+ % info['data'])
+ if info['data'].find("must refer to client") >= 0:
+ sys.stderr.write("This particular p4 error is misleading.\n")
+ sys.stderr.write("Perhaps the depot path was misspelled.\n");
+ sys.stderr.write("Depot path: %s\n" % " ".join(self.depotPaths))
+ sys.exit(1)
+ if 'p4ExitCode' in info:
+ sys.stderr.write("p4 exitcode: %s\n" % info['p4ExitCode'])
+ sys.exit(1)
+
+
+ change = int(info["change"])
+ if change > newestRevision:
+ newestRevision = change
+
+ if info["action"] in self.delete_actions:
+ # don't increase the file cnt, otherwise details["depotFile123"] will have gaps!
+ #fileCnt = fileCnt + 1
+ continue
+
+ for prop in ["depotFile", "rev", "action", "type" ]:
+ details["%s%s" % (prop, fileCnt)] = info[prop]
+
+ fileCnt = fileCnt + 1
+
+ details["change"] = newestRevision
+
+ # Use time from top-most change so that all git p4 clones of
+ # the same p4 repo have the same commit SHA1s.
+ res = p4CmdList("describe -s %d" % newestRevision)
+ newestTime = None
+ for r in res:
+ if r.has_key('time'):
+ newestTime = int(r['time'])
+ if newestTime is None:
+ die("\"describe -s\" on newest change %d did not give a time")
+ details["time"] = newestTime
+
+ self.updateOptionDict(details)
+ try:
+ self.commit(details, self.extractFilesFromCommit(details), self.branch, self.depotPaths)
+ except IOError:
+ print "IO error with git fast-import. Is your git version recent enough?"
+ print self.gitError.read()
+
+
+ def run(self, args):
+ self.depotPaths = []
+ self.changeRange = ""
+ self.initialParent = ""
+ self.previousDepotPaths = []
+
+ # map from branch depot path to parent branch
+ self.knownBranches = {}
+ self.initialParents = {}
+ self.hasOrigin = originP4BranchesExist()
+ if not self.syncWithOrigin:
+ self.hasOrigin = False
+
+ if self.importIntoRemotes:
+ self.refPrefix = "refs/remotes/p4/"
+ else:
+ self.refPrefix = "refs/heads/p4/"
+
+ if self.syncWithOrigin and self.hasOrigin:
+ if not self.silent:
+ print "Syncing with origin first by calling git fetch origin"
+ system("git fetch origin")
+
+ if len(self.branch) == 0:
+ self.branch = self.refPrefix + "master"
+ if gitBranchExists("refs/heads/p4") and self.importIntoRemotes:
+ system("git update-ref %s refs/heads/p4" % self.branch)
+ system("git branch -D p4");
+ # create it /after/ importing, when master exists
+ if not gitBranchExists(self.refPrefix + "HEAD") and self.importIntoRemotes and gitBranchExists(self.branch):
+ system("git symbolic-ref %sHEAD %s" % (self.refPrefix, self.branch))
+
+ # accept either the command-line option, or the configuration variable
+ if self.useClientSpec:
+ # will use this after clone to set the variable
+ self.useClientSpec_from_options = True
+ else:
+ if gitConfig("git-p4.useclientspec", "--bool") == "true":
+ self.useClientSpec = True
+ if self.useClientSpec:
+ self.clientSpecDirs = getClientSpec()
+
+ # TODO: should always look at previous commits,
+ # merge with previous imports, if possible.
+ if args == []:
+ if self.hasOrigin:
+ createOrUpdateBranchesFromOrigin(self.refPrefix, self.silent)
+ self.listExistingP4GitBranches()
+
+ if len(self.p4BranchesInGit) > 1:
+ if not self.silent:
+ print "Importing from/into multiple branches"
+ self.detectBranches = True
+
+ if self.verbose:
+ print "branches: %s" % self.p4BranchesInGit
+
+ p4Change = 0
+ for branch in self.p4BranchesInGit:
+ logMsg = extractLogMessageFromGitCommit(self.refPrefix + branch)
+
+ settings = extractSettingsGitLog(logMsg)
+
+ self.readOptions(settings)
+ if (settings.has_key('depot-paths')
+ and settings.has_key ('change')):
+ change = int(settings['change']) + 1
+ p4Change = max(p4Change, change)
+
+ depotPaths = sorted(settings['depot-paths'])
+ if self.previousDepotPaths == []:
+ self.previousDepotPaths = depotPaths
+ else:
+ paths = []
+ for (prev, cur) in zip(self.previousDepotPaths, depotPaths):
+ prev_list = prev.split("/")
+ cur_list = cur.split("/")
+ for i in range(0, min(len(cur_list), len(prev_list))):
+ if cur_list[i] <> prev_list[i]:
+ i = i - 1
+ break
+
+ paths.append ("/".join(cur_list[:i + 1]))
+
+ self.previousDepotPaths = paths
+
+ if p4Change > 0:
+ self.depotPaths = sorted(self.previousDepotPaths)
+ self.changeRange = "@%s,#head" % p4Change
+ if not self.detectBranches:
+ self.initialParent = parseRevision(self.branch)
+ if not self.silent and not self.detectBranches:
+ print "Performing incremental import into %s git branch" % self.branch
+
+ if not self.branch.startswith("refs/"):
+ self.branch = "refs/heads/" + self.branch
+
+ if len(args) == 0 and self.depotPaths:
+ if not self.silent:
+ print "Depot paths: %s" % ' '.join(self.depotPaths)
+ else:
+ if self.depotPaths and self.depotPaths != args:
+ print ("previous import used depot path %s and now %s was specified. "
+ "This doesn't work!" % (' '.join (self.depotPaths),
+ ' '.join (args)))
+ sys.exit(1)
+
+ self.depotPaths = sorted(args)
+
+ revision = ""
+ self.users = {}
+
+ # Make sure no revision specifiers are used when --changesfile
+ # is specified.
+ bad_changesfile = False
+ if len(self.changesFile) > 0:
+ for p in self.depotPaths:
+ if p.find("@") >= 0 or p.find("#") >= 0:
+ bad_changesfile = True
+ break
+ if bad_changesfile:
+ die("Option --changesfile is incompatible with revision specifiers")
+
+ newPaths = []
+ for p in self.depotPaths:
+ if p.find("@") != -1:
+ atIdx = p.index("@")
+ self.changeRange = p[atIdx:]
+ if self.changeRange == "@all":
+ self.changeRange = ""
+ elif ',' not in self.changeRange:
+ revision = self.changeRange
+ self.changeRange = ""
+ p = p[:atIdx]
+ elif p.find("#") != -1:
+ hashIdx = p.index("#")
+ revision = p[hashIdx:]
+ p = p[:hashIdx]
+ elif self.previousDepotPaths == []:
+ # pay attention to changesfile, if given, else import
+ # the entire p4 tree at the head revision
+ if len(self.changesFile) == 0:
+ revision = "#head"
+
+ p = re.sub ("\.\.\.$", "", p)
+ if not p.endswith("/"):
+ p += "/"
+
+ newPaths.append(p)
+
+ self.depotPaths = newPaths
+
+ self.loadUserMapFromCache()
+ self.labels = {}
+ if self.detectLabels:
+ self.getLabels();
+
+ if self.detectBranches:
+ ## FIXME - what's a P4 projectName ?
+ self.projectName = self.guessProjectName()
+
+ if self.hasOrigin:
+ self.getBranchMappingFromGitBranches()
+ else:
+ self.getBranchMapping()
+ if self.verbose:
+ print "p4-git branches: %s" % self.p4BranchesInGit
+ print "initial parents: %s" % self.initialParents
+ for b in self.p4BranchesInGit:
+ if b != "master":
+
+ ## FIXME
+ b = b[len(self.projectName):]
+ self.createdBranches.add(b)
+
+ self.tz = "%+03d%02d" % (- time.timezone / 3600, ((- time.timezone % 3600) / 60))
+
+ importProcess = subprocess.Popen(["git", "fast-import"],
+ stdin=subprocess.PIPE, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE);
+ self.gitOutput = importProcess.stdout
+ self.gitStream = importProcess.stdin
+ self.gitError = importProcess.stderr
+
+ if revision:
+ self.importHeadRevision(revision)
+ else:
+ changes = []
+
+ if len(self.changesFile) > 0:
+ output = open(self.changesFile).readlines()
+ changeSet = set()
+ for line in output:
+ changeSet.add(int(line))
+
+ for change in changeSet:
+ changes.append(change)
+
+ changes.sort()
+ else:
+ # catch "git p4 sync" with no new branches, in a repo that
+ # does not have any existing p4 branches
+ if len(args) == 0 and not self.p4BranchesInGit:
+ die("No remote p4 branches. Perhaps you never did \"git p4 clone\" in here.");
+ if self.verbose:
+ print "Getting p4 changes for %s...%s" % (', '.join(self.depotPaths),
+ self.changeRange)
+ changes = p4ChangesForPaths(self.depotPaths, self.changeRange)
+
+ if len(self.maxChanges) > 0:
+ changes = changes[:min(int(self.maxChanges), len(changes))]
+
+ if len(changes) == 0:
+ if not self.silent:
+ print "No changes to import!"
+ else:
+ if not self.silent and not self.detectBranches:
+ print "Import destination: %s" % self.branch
+
+ self.updatedBranches = set()
+
+ self.importChanges(changes)
+
+ if not self.silent:
+ print ""
+ if len(self.updatedBranches) > 0:
+ sys.stdout.write("Updated branches: ")
+ for b in self.updatedBranches:
+ sys.stdout.write("%s " % b)
+ sys.stdout.write("\n")
+
+ if gitConfig("git-p4.importLabels", "--bool") == "true":
+ self.importLabels = true
+
+ if self.importLabels:
+ p4Labels = getP4Labels(self.depotPaths)
+ gitTags = getGitTags()
+
+ missingP4Labels = p4Labels - gitTags
+ self.importP4Labels(self.gitStream, missingP4Labels)
+
+ self.gitStream.close()
+ if importProcess.wait() != 0:
+ die("fast-import failed: %s" % self.gitError.read())
+ self.gitOutput.close()
+ self.gitError.close()
+
+ # Cleanup temporary branches created during import
+ if self.tempBranches != []:
+ for branch in self.tempBranches:
+ read_pipe("git update-ref -d %s" % branch)
+ os.rmdir(os.path.join(os.environ.get("GIT_DIR", ".git"), self.tempBranchLocation))
+
+ return True
+
+class P4Rebase(Command):
+ def __init__(self):
+ Command.__init__(self)
+ self.options = [
+ optparse.make_option("--import-labels", dest="importLabels", action="store_true"),
+ ]
+ self.importLabels = False
+ self.description = ("Fetches the latest revision from perforce and "
+ + "rebases the current work (branch) against it")
+
+ def run(self, args):
+ sync = P4Sync()
+ sync.importLabels = self.importLabels
+ sync.run([])
+
+ return self.rebase()
+
+ def rebase(self):
+ if os.system("git update-index --refresh") != 0:
+ die("Some files in your working directory are modified and different than what is in your index. You can use git update-index <filename> to bring the index up-to-date or stash away all your changes with git stash.");
+ if len(read_pipe("git diff-index HEAD --")) > 0:
+ die("You have uncommited changes. Please commit them before rebasing or stash them away with git stash.");
+
+ [upstream, settings] = findUpstreamBranchPoint()
+ if len(upstream) == 0:
+ die("Cannot find upstream branchpoint for rebase")
+
+ # the branchpoint may be p4/foo~3, so strip off the parent
+ upstream = re.sub("~[0-9]+$", "", upstream)
+
+ print "Rebasing the current branch onto %s" % upstream
+ oldHead = read_pipe("git rev-parse HEAD").strip()
+ system("git rebase %s" % upstream)
+ system("git diff-tree --stat --summary -M %s HEAD" % oldHead)
+ return True
+
+class P4Clone(P4Sync):
+ def __init__(self):
+ P4Sync.__init__(self)
+ self.description = "Creates a new git repository and imports from Perforce into it"
+ self.usage = "usage: %prog [options] //depot/path[@revRange]"
+ self.options += [
+ optparse.make_option("--destination", dest="cloneDestination",
+ action='store', default=None,
+ help="where to leave result of the clone"),
+ optparse.make_option("-/", dest="cloneExclude",
+ action="append", type="string",
+ help="exclude depot path"),
+ optparse.make_option("--bare", dest="cloneBare",
+ action="store_true", default=False),
+ ]
+ self.cloneDestination = None
+ self.needsGit = False
+ self.cloneBare = False
+
+ # This is required for the "append" cloneExclude action
+ def ensure_value(self, attr, value):
+ if not hasattr(self, attr) or getattr(self, attr) is None:
+ setattr(self, attr, value)
+ return getattr(self, attr)
+
+ def defaultDestination(self, args):
+ ## TODO: use common prefix of args?
+ depotPath = args[0]
+ depotDir = re.sub("(@[^@]*)$", "", depotPath)
+ depotDir = re.sub("(#[^#]*)$", "", depotDir)
+ depotDir = re.sub(r"\.\.\.$", "", depotDir)
+ depotDir = re.sub(r"/$", "", depotDir)
+ return os.path.split(depotDir)[1]
+
+ def run(self, args):
+ if len(args) < 1:
+ return False
+
+ if self.keepRepoPath and not self.cloneDestination:
+ sys.stderr.write("Must specify destination for --keep-path\n")
+ sys.exit(1)
+
+ depotPaths = args
+
+ if not self.cloneDestination and len(depotPaths) > 1:
+ self.cloneDestination = depotPaths[-1]
+ depotPaths = depotPaths[:-1]
+
+ self.cloneExclude = ["/"+p for p in self.cloneExclude]
+ for p in depotPaths:
+ if not p.startswith("//"):
+ return False
+
+ if not self.cloneDestination:
+ self.cloneDestination = self.defaultDestination(args)
+
+ print "Importing from %s into %s" % (', '.join(depotPaths), self.cloneDestination)
+
+ if not os.path.exists(self.cloneDestination):
+ os.makedirs(self.cloneDestination)
+ chdir(self.cloneDestination)
+
+ init_cmd = [ "git", "init" ]
+ if self.cloneBare:
+ init_cmd.append("--bare")
+ subprocess.check_call(init_cmd)
+
+ if not P4Sync.run(self, depotPaths):
+ return False
+ if self.branch != "master":
+ if self.importIntoRemotes:
+ masterbranch = "refs/remotes/p4/master"
+ else:
+ masterbranch = "refs/heads/p4/master"
+ if gitBranchExists(masterbranch):
+ system("git branch master %s" % masterbranch)
+ if not self.cloneBare:
+ system("git checkout -f")
+ else:
+ print "Could not detect main branch. No checkout/master branch created."
+
+ # auto-set this variable if invoked with --use-client-spec
+ if self.useClientSpec_from_options:
+ system("git config --bool git-p4.useclientspec true")
+
+ return True
+
+class P4Branches(Command):
+ def __init__(self):
+ Command.__init__(self)
+ self.options = [ ]
+ self.description = ("Shows the git branches that hold imports and their "
+ + "corresponding perforce depot paths")
+ self.verbose = False
+
+ def run(self, args):
+ if originP4BranchesExist():
+ createOrUpdateBranchesFromOrigin()
+
+ cmdline = "git rev-parse --symbolic "
+ cmdline += " --remotes"
+
+ for line in read_pipe_lines(cmdline):
+ line = line.strip()
+
+ if not line.startswith('p4/') or line == "p4/HEAD":
+ continue
+ branch = line
+
+ log = extractLogMessageFromGitCommit("refs/remotes/%s" % branch)
+ settings = extractSettingsGitLog(log)
+
+ print "%s <= %s (%s)" % (branch, ",".join(settings["depot-paths"]), settings["change"])
+ return True
+
+class HelpFormatter(optparse.IndentedHelpFormatter):
+ def __init__(self):
+ optparse.IndentedHelpFormatter.__init__(self)
+
+ def format_description(self, description):
+ if description:
+ return description + "\n"
+ else:
+ return ""
+
+def printUsage(commands):
+ print "usage: %s <command> [options]" % sys.argv[0]
+ print ""
+ print "valid commands: %s" % ", ".join(commands)
+ print ""
+ print "Try %s <command> --help for command specific help." % sys.argv[0]
+ print ""
+
+commands = {
+ "debug" : P4Debug,
+ "submit" : P4Submit,
+ "commit" : P4Submit,
+ "sync" : P4Sync,
+ "rebase" : P4Rebase,
+ "clone" : P4Clone,
+ "rollback" : P4RollBack,
+ "branches" : P4Branches
+}
+
+
+def main():
+ if len(sys.argv[1:]) == 0:
+ printUsage(commands.keys())
+ sys.exit(2)
+
+ cmd = ""
+ cmdName = sys.argv[1]
+ try:
+ klass = commands[cmdName]
+ cmd = klass()
+ except KeyError:
+ print "unknown command %s" % cmdName
+ print ""
+ printUsage(commands.keys())
+ sys.exit(2)
+
+ options = cmd.options
+ cmd.gitdir = os.environ.get("GIT_DIR", None)
+
+ args = sys.argv[2:]
+
+ options.append(optparse.make_option("--verbose", dest="verbose", action="store_true"))
+ if cmd.needsGit:
+ options.append(optparse.make_option("--git-dir", dest="gitdir"))
+
+ parser = optparse.OptionParser(cmd.usage.replace("%prog", "%prog " + cmdName),
+ options,
+ description = cmd.description,
+ formatter = HelpFormatter())
+
+ (cmd, args) = parser.parse_args(sys.argv[2:], cmd);
+ global verbose
+ verbose = cmd.verbose
+ if cmd.needsGit:
+ if cmd.gitdir == None:
+ cmd.gitdir = os.path.abspath(".git")
+ if not isValidGitDir(cmd.gitdir):
+ cmd.gitdir = read_pipe("git rev-parse --git-dir").strip()
+ if os.path.exists(cmd.gitdir):
+ cdup = read_pipe("git rev-parse --show-cdup").strip()
+ if len(cdup) > 0:
+ chdir(cdup);
+
+ if not isValidGitDir(cmd.gitdir):
+ if isValidGitDir(cmd.gitdir + "/.git"):
+ cmd.gitdir += "/.git"
+ else:
+ die("fatal: cannot locate git repository at %s" % cmd.gitdir)
+
+ os.environ["GIT_DIR"] = cmd.gitdir
+
+ if not cmd.run(args):
+ parser.print_help()
+ sys.exit(2)
+
+
+if __name__ == '__main__':
+ main()
test -n "$rebase_root" && root_flag=--root
-git format-patch -k --stdout --full-index --ignore-if-in-upstream \
- --src-prefix=a/ --dst-prefix=b/ \
- --no-renames $root_flag "$revisions" |
-git am $git_am_opt --rebasing --resolvemsg="$resolvemsg" &&
-move_to_original_branch
+if test -n "$keep_empty"
+then
+ # we have to do this the hard way. git format-patch completely squashes
+ # empty commits and even if it didn't the format doesn't really lend
+ # itself well to recording empty patches. fortunately, cherry-pick
+ # makes this easy
+ git cherry-pick --allow-empty "$revisions"
+else
+ git format-patch -k --stdout --full-index --ignore-if-in-upstream \
+ --src-prefix=a/ --dst-prefix=b/ \
+ --no-renames $root_flag "$revisions" |
+ git am $git_am_opt --rebasing --resolvemsg="$resolvemsg"
+fi && move_to_original_branch
+
ret=$?
test 0 != $ret -a -d "$state_dir" && write_basic_state
exit $ret
sane_grep '^[^#]' "$1" >/dev/null
}
+is_empty_commit() {
+ tree=$(git rev-parse -q --verify "$1"^{tree} 2>/dev/null ||
+ die "$1: not a commit that can be picked")
+ ptree=$(git rev-parse -q --verify "$1"^^{tree} 2>/dev/null ||
+ ptree=4b825dc642cb6eb9a060e54bf8d69288fbee4904)
+ test "$tree" = "$ptree"
+}
+
# Run command with GIT_AUTHOR_NAME, GIT_AUTHOR_EMAIL, and
# GIT_AUTHOR_DATE exported from the current environment.
do_with_author () {
pick_one () {
ff=--ff
+
case "$1" in -n) sha1=$2; ff= ;; *) sha1=$1 ;; esac
case "$force_rebase" in '') ;; ?*) ff= ;; esac
output git rev-parse --verify $sha1 || die "Invalid commit name: $sha1"
+
+ if is_empty_commit "$sha1"
+ then
+ empty_args="--allow-empty"
+ fi
+
test -d "$rewritten" &&
pick_one_preserving_merges "$@" && return
- output git cherry-pick $ff "$@"
+ output git cherry-pick $empty_args $ff "$@"
}
pick_one_preserving_merges () {
sed -n "s/^>//p" |
while read -r shortsha1 rest
do
+
+ if test -z "$keep_empty" && is_empty_commit $shortsha1
+ then
+ comment_out="# "
+ else
+ comment_out=
+ fi
+
if test t != "$preserve_merges"
then
- printf '%s\n' "pick $shortsha1 $rest" >> "$todo"
+ printf '%s\n' "${comment_out}pick $shortsha1 $rest" >>"$todo"
else
sha1=$(git rev-parse $shortsha1)
if test -z "$rebase_root"
if test f = "$preserve"
then
touch "$rewritten"/$sha1
- printf '%s\n' "pick $shortsha1 $rest" >> "$todo"
+ printf '%s\n' "${comment_out}pick $shortsha1 $rest" >>"$todo"
fi
fi
done
# f, fixup = like "squash", but discard this commit's log message
# x, exec = run command (the rest of the line) using shell
#
+# These lines can be re-ordered; they are executed from top to bottom.
+#
# If you remove a line here THAT COMMIT WILL BE LOST.
# However, if you remove everything, the rebase will be aborted.
#
EOF
+if test -z "$keep_empty"
+then
+ echo "# Note that empty commits are commented out" >>"$todo"
+fi
+
+
has_action "$todo" ||
die_abort "Nothing to do"
no-ff! cherry-pick all commits, even if unchanged
m,merge! use merging strategies to rebase
i,interactive! let the user edit the list of commits to rebase
+k,keep-empty preserve empty commits during rebase
f,force-rebase! force rebase even if branch is up to date
X,strategy-option=! pass the argument through to the merge strategy
stat! display a diffstat of what changed upstream
action=
preserve_merges=
autosquash=
+keep_empty=
test "$(git config --bool rebase.autosquash)" = "true" && autosquash=t
read_basic_state () {
-i)
interactive_rebase=explicit
;;
+ -k)
+ keep_empty=yes
+ ;;
-p)
preserve_merges=t
test -z "$interactive_rebase" && interactive_rebase=implied
-#!/usr/bin/env perl
+#!/usr/bin/perl
# Copyright 2005, Ryan Anderson <ryan@michonline.com>
# Distribution permitted under the GPL v2, as distributed
# by the Free Software Foundation.
_digest = sha.new
import sys
import os
+import time
sys.path.insert(0, os.getenv("GITPYTHONLIB","."))
from git_remote_helpers.util import die, debug, warn
"""Reads and processes one command.
"""
+ sleepy = os.environ.get("GIT_REMOTE_TESTGIT_SLEEPY")
+ if sleepy:
+ debug("Sleeping %d sec before readline" % int(sleepy))
+ time.sleep(int(sleepy))
+
line = sys.stdin.readline()
cmdline = line
more = True
+ sys.stdin = os.fdopen(sys.stdin.fileno(), 'r', 0)
while (more):
more = read_one_line(repo)
n do not run git-update-server-info
q,quiet be quiet
l pass --local to git-pack-objects
+unpack-unreachable= with -A, do not loosen objects older than this
Packing constraints
window= size of the window used for delta compression
window-memory= same as the above, but limit memory size instead of entries count
-a) all_into_one=t ;;
-A) all_into_one=t
unpack_unreachable=--unpack-unreachable ;;
+ --unpack-unreachable)
+ unpack_unreachable="--unpack-unreachable=$2"; shift ;;
-d) remove_redundant=t ;;
-q) GIT_QUIET=t ;;
-f) no_reuse=--no-reuse-delta ;;
if test -n "$existing" -a -n "$unpack_unreachable" -a \
-n "$remove_redundant"
then
- args="$args $unpack_unreachable"
+ # This may have arbitrary user arguments, so we
+ # have to protect it against whitespace splitting
+ # when it gets run as "pack-objects $args" later.
+ # Fortunately, we know it's an approxidate, so we
+ # can just use dots instead.
+ args="$args $(echo "$unpack_unreachable" | tr ' ' .)"
fi
fi
;;
find () {
/usr/bin/find "$@"
}
+ # git sees Windows-style pwd
+ pwd () {
+ builtin pwd -W
+ }
is_absolute_path () {
case "$1" in
[/\\]* | [A-Za-z]:*)
module_name()
{
# Do we have "submodule.<something>.path = $1" defined in .gitmodules file?
+ sm_path="$1"
re=$(printf '%s\n' "$1" | sed -e 's/[].[^$\\*]/\\&/g')
name=$( git config -f .gitmodules --get-regexp '^submodule\..*\.path$' |
sed -n -e 's|^submodule\.\(.*\)\.path '"$re"'$|\1|p' )
test -z "$name" &&
- die "$(eval_gettext "No submodule mapping found in .gitmodules for path '\$path'")"
+ die "$(eval_gettext "No submodule mapping found in .gitmodules for path '\$sm_path'")"
echo "$name"
}
#
module_clone()
{
- path=$1
+ sm_path=$1
url=$2
reference="$3"
quiet=
gitdir=
gitdir_base=
- name=$(module_name "$path" 2>/dev/null)
- test -n "$name" || name="$path"
+ name=$(module_name "$sm_path" 2>/dev/null)
+ test -n "$name" || name="$sm_path"
base_name=$(dirname "$name")
gitdir=$(git rev-parse --git-dir)
if test -d "$gitdir"
then
- mkdir -p "$path"
+ mkdir -p "$sm_path"
rm -f "$gitdir/index"
else
mkdir -p "$gitdir_base"
git clone $quiet -n ${reference:+"$reference"} \
- --separate-git-dir "$gitdir" "$url" "$path" ||
- die "$(eval_gettext "Clone of '\$url' into submodule path '\$path' failed")"
+ --separate-git-dir "$gitdir" "$url" "$sm_path" ||
+ die "$(eval_gettext "Clone of '\$url' into submodule path '\$sm_path' failed")"
fi
a=$(cd "$gitdir" && pwd)/
- b=$(cd "$path" && pwd)/
+ b=$(cd "$sm_path" && pwd)/
# normalize Windows-style absolute paths to POSIX-style absolute paths
case $a in [a-zA-Z]:/*) a=/${a%%:*}${a#*:} ;; esac
case $b in [a-zA-Z]:/*) b=/${b%%:*}${b#*:} ;; esac
a=${a%/}
b=${b%/}
- rel=$(echo $b | sed -e 's|[^/]*|..|g')
- echo "gitdir: $rel/$a" >"$path/.git"
+ # Turn each leading "*/" component into "../"
+ rel=$(echo $b | sed -e 's|[^/][^/]*|..|g')
+ echo "gitdir: $rel/$a" >"$sm_path/.git"
- rel=$(echo $a | sed -e 's|[^/]*|..|g')
- (clear_local_git_env; cd "$path" && GIT_WORK_TREE=. git config core.worktree "$rel/$b")
+ rel=$(echo $a | sed -e 's|[^/][^/]*|..|g')
+ (clear_local_git_env; cd "$sm_path" && GIT_WORK_TREE=. git config core.worktree "$rel/$b")
}
#
done
repo=$1
- path=$2
+ sm_path=$2
- if test -z "$path"; then
- path=$(echo "$repo" |
+ if test -z "$sm_path"; then
+ sm_path=$(echo "$repo" |
sed -e 's|/$||' -e 's|:*/*\.git$||' -e 's|.*[/:]||g')
fi
- if test -z "$repo" -o -z "$path"; then
+ if test -z "$repo" -o -z "$sm_path"; then
usage
fi
# normalize path:
# multiple //; leading ./; /./; /../; trailing /
- path=$(printf '%s/\n' "$path" |
+ sm_path=$(printf '%s/\n' "$sm_path" |
sed -e '
s|//*|/|g
s|^\(\./\)*||
tstart
s|/*$||
')
- git ls-files --error-unmatch "$path" > /dev/null 2>&1 &&
- die "$(eval_gettext "'\$path' already exists in the index")"
+ git ls-files --error-unmatch "$sm_path" > /dev/null 2>&1 &&
+ die "$(eval_gettext "'\$sm_path' already exists in the index")"
- if test -z "$force" && ! git add --dry-run --ignore-missing "$path" > /dev/null 2>&1
+ if test -z "$force" && ! git add --dry-run --ignore-missing "$sm_path" > /dev/null 2>&1
then
eval_gettextln "The following path is ignored by one of your .gitignore files:
-\$path
+\$sm_path
Use -f if you really want to add it." >&2
exit 1
fi
# perhaps the path exists and is already a git repo, else clone it
- if test -e "$path"
+ if test -e "$sm_path"
then
- if test -d "$path"/.git -o -f "$path"/.git
+ if test -d "$sm_path"/.git -o -f "$sm_path"/.git
then
- eval_gettextln "Adding existing repo at '\$path' to the index"
+ eval_gettextln "Adding existing repo at '\$sm_path' to the index"
else
- die "$(eval_gettext "'\$path' already exists and is not a valid git repo")"
+ die "$(eval_gettext "'\$sm_path' already exists and is not a valid git repo")"
fi
else
- module_clone "$path" "$realrepo" "$reference" || exit
+ module_clone "$sm_path" "$realrepo" "$reference" || exit
(
clear_local_git_env
- cd "$path" &&
+ cd "$sm_path" &&
# ash fails to wordsplit ${branch:+-b "$branch"...}
case "$branch" in
'') git checkout -f -q ;;
?*) git checkout -f -q -B "$branch" "origin/$branch" ;;
esac
- ) || die "$(eval_gettext "Unable to checkout submodule '\$path'")"
+ ) || die "$(eval_gettext "Unable to checkout submodule '\$sm_path'")"
fi
- git config submodule."$path".url "$realrepo"
+ git config submodule."$sm_path".url "$realrepo"
- git add $force "$path" ||
- die "$(eval_gettext "Failed to add submodule '\$path'")"
+ git add $force "$sm_path" ||
+ die "$(eval_gettext "Failed to add submodule '\$sm_path'")"
- git config -f .gitmodules submodule."$path".path "$path" &&
- git config -f .gitmodules submodule."$path".url "$repo" &&
+ git config -f .gitmodules submodule."$sm_path".path "$sm_path" &&
+ git config -f .gitmodules submodule."$sm_path".url "$repo" &&
git add --force .gitmodules ||
- die "$(eval_gettext "Failed to register submodule '\$path'")"
+ die "$(eval_gettext "Failed to register submodule '\$sm_path'")"
}
#
exec 3<&0
module_list |
- while read mode sha1 stage path
+ while read mode sha1 stage sm_path
do
- if test -e "$path"/.git
+ if test -e "$sm_path"/.git
then
- say "$(eval_gettext "Entering '\$prefix\$path'")"
- name=$(module_name "$path")
+ say "$(eval_gettext "Entering '\$prefix\$sm_path'")"
+ name=$(module_name "$sm_path")
(
- prefix="$prefix$path/"
+ prefix="$prefix$sm_path/"
clear_local_git_env
- cd "$path" &&
+ # we make $path available to scripts ...
+ path=$sm_path
+ cd "$sm_path" &&
eval "$@" &&
if test -n "$recursive"
then
cmd_foreach "--recursive" "$@"
fi
) <&3 3<&- ||
- die "$(eval_gettext "Stopping at '\$path'; script returned non-zero status.")"
+ die "$(eval_gettext "Stopping at '\$sm_path'; script returned non-zero status.")"
fi
done
}
done
module_list "$@" |
- while read mode sha1 stage path
+ while read mode sha1 stage sm_path
do
# Skip already registered paths
- name=$(module_name "$path") || exit
+ name=$(module_name "$sm_path") || exit
if test -z "$(git config "submodule.$name.url")"
then
url=$(git config -f .gitmodules submodule."$name".url)
test -z "$url" &&
- die "$(eval_gettext "No url found for submodule path '\$path' in .gitmodules")"
+ die "$(eval_gettext "No url found for submodule path '\$sm_path' in .gitmodules")"
# Possibly a url relative to parent
case "$url" in
;;
esac
git config submodule."$name".url "$url" ||
- die "$(eval_gettext "Failed to register url for submodule path '\$path'")"
+ die "$(eval_gettext "Failed to register url for submodule path '\$sm_path'")"
fi
# Copy "update" setting when it is not set yet
test -z "$upd" ||
test -n "$(git config submodule."$name".update)" ||
git config submodule."$name".update "$upd" ||
- die "$(eval_gettext "Failed to register update mode for submodule path '\$path'")"
+ die "$(eval_gettext "Failed to register update mode for submodule path '\$sm_path'")"
- say "$(eval_gettext "Submodule '\$name' (\$url) registered for path '\$path'")"
+ say "$(eval_gettext "Submodule '\$name' (\$url) registered for path '\$sm_path'")"
done
}
cloned_modules=
module_list "$@" | {
err=
- while read mode sha1 stage path
+ while read mode sha1 stage sm_path
do
if test "$stage" = U
then
- echo >&2 "Skipping unmerged submodule $path"
+ echo >&2 "Skipping unmerged submodule $sm_path"
continue
fi
- name=$(module_name "$path") || exit
+ name=$(module_name "$sm_path") || exit
url=$(git config submodule."$name".url)
if ! test -z "$update"
then
if test "$update_module" = "none"
then
- echo "Skipping submodule '$path'"
+ echo "Skipping submodule '$sm_path'"
continue
fi
# Only mention uninitialized submodules when its
# path have been specified
test "$#" != "0" &&
- say "$(eval_gettext "Submodule path '\$path' not initialized
+ say "$(eval_gettext "Submodule path '\$sm_path' not initialized
Maybe you want to use 'update --init'?")"
continue
fi
- if ! test -d "$path"/.git -o -f "$path"/.git
+ if ! test -d "$sm_path"/.git -o -f "$sm_path"/.git
then
- module_clone "$path" "$url" "$reference"|| exit
+ module_clone "$sm_path" "$url" "$reference"|| exit
cloned_modules="$cloned_modules;$name"
subsha1=
else
- subsha1=$(clear_local_git_env; cd "$path" &&
+ subsha1=$(clear_local_git_env; cd "$sm_path" &&
git rev-parse --verify HEAD) ||
- die "$(eval_gettext "Unable to find current revision in submodule path '\$path'")"
+ die "$(eval_gettext "Unable to find current revision in submodule path '\$sm_path'")"
fi
if test "$subsha1" != "$sha1"
then
# Run fetch only if $sha1 isn't present or it
# is not reachable from a ref.
- (clear_local_git_env; cd "$path" &&
+ (clear_local_git_env; cd "$sm_path" &&
( (rev=$(git rev-list -n 1 $sha1 --not --all 2>/dev/null) &&
test -z "$rev") || git-fetch)) ||
- die "$(eval_gettext "Unable to fetch in submodule path '\$path'")"
+ die "$(eval_gettext "Unable to fetch in submodule path '\$sm_path'")"
fi
# Is this something we just cloned?
case "$update_module" in
rebase)
command="git rebase"
- die_msg="$(eval_gettext "Unable to rebase '\$sha1' in submodule path '\$path'")"
- say_msg="$(eval_gettext "Submodule path '\$path': rebased into '\$sha1'")"
+ die_msg="$(eval_gettext "Unable to rebase '\$sha1' in submodule path '\$sm_path'")"
+ say_msg="$(eval_gettext "Submodule path '\$sm_path': rebased into '\$sha1'")"
must_die_on_failure=yes
;;
merge)
command="git merge"
- die_msg="$(eval_gettext "Unable to merge '\$sha1' in submodule path '\$path'")"
- say_msg="$(eval_gettext "Submodule path '\$path': merged in '\$sha1'")"
+ die_msg="$(eval_gettext "Unable to merge '\$sha1' in submodule path '\$sm_path'")"
+ say_msg="$(eval_gettext "Submodule path '\$sm_path': merged in '\$sha1'")"
must_die_on_failure=yes
;;
*)
command="git checkout $subforce -q"
- die_msg="$(eval_gettext "Unable to checkout '\$sha1' in submodule path '\$path'")"
- say_msg="$(eval_gettext "Submodule path '\$path': checked out '\$sha1'")"
+ die_msg="$(eval_gettext "Unable to checkout '\$sha1' in submodule path '\$sm_path'")"
+ say_msg="$(eval_gettext "Submodule path '\$sm_path': checked out '\$sha1'")"
;;
esac
- if (clear_local_git_env; cd "$path" && $command "$sha1")
+ if (clear_local_git_env; cd "$sm_path" && $command "$sha1")
then
say "$say_msg"
elif test -n "$must_die_on_failure"
if test -n "$recursive"
then
- (clear_local_git_env; cd "$path" && eval cmd_update "$orig_flags")
+ (clear_local_git_env; cd "$sm_path" && eval cmd_update "$orig_flags")
res=$?
if test $res -gt 0
then
- die_msg="$(eval_gettext "Failed to recurse into submodule path '\$path'")"
+ die_msg="$(eval_gettext "Failed to recurse into submodule path '\$sm_path'")"
if test $res -eq 1
then
err="${err};$die_msg"
done
module_list "$@" |
- while read mode sha1 stage path
+ while read mode sha1 stage sm_path
do
- name=$(module_name "$path") || exit
+ name=$(module_name "$sm_path") || exit
url=$(git config submodule."$name".url)
- displaypath="$prefix$path"
+ displaypath="$prefix$sm_path"
if test "$stage" = U
then
say "U$sha1 $displaypath"
continue
fi
- if test -z "$url" || ! test -d "$path"/.git -o -f "$path"/.git
+ if test -z "$url" || ! test -d "$sm_path"/.git -o -f "$sm_path"/.git
then
say "-$sha1 $displaypath"
continue;
fi
- set_name_rev "$path" "$sha1"
- if git diff-files --ignore-submodules=dirty --quiet -- "$path"
+ set_name_rev "$sm_path" "$sha1"
+ if git diff-files --ignore-submodules=dirty --quiet -- "$sm_path"
then
say " $sha1 $displaypath$revname"
else
if test -z "$cached"
then
- sha1=$(clear_local_git_env; cd "$path" && git rev-parse --verify HEAD)
- set_name_rev "$path" "$sha1"
+ sha1=$(clear_local_git_env; cd "$sm_path" && git rev-parse --verify HEAD)
+ set_name_rev "$sm_path" "$sha1"
fi
say "+$sha1 $displaypath$revname"
fi
(
prefix="$displaypath/"
clear_local_git_env
- cd "$path" &&
+ cd "$sm_path" &&
eval cmd_status "$orig_args"
) ||
- die "$(eval_gettext "Failed to recurse into submodule path '\$path'")"
+ die "$(eval_gettext "Failed to recurse into submodule path '\$sm_path'")"
fi
done
}
done
cd_to_toplevel
module_list "$@" |
- while read mode sha1 stage path
+ while read mode sha1 stage sm_path
do
- name=$(module_name "$path")
+ name=$(module_name "$sm_path")
url=$(git config -f .gitmodules --get submodule."$name".url)
# Possibly a url relative to parent
say "$(eval_gettext "Synchronizing submodule url for '\$name'")"
git config submodule."$name".url "$url"
- if test -e "$path"/.git
+ if test -e "$sm_path"/.git
then
(
clear_local_git_env
- cd "$path"
+ cd "$sm_path"
remote=$(get_default_remote)
git config remote."$remote".url "$url"
)
-#!/usr/bin/env perl
+#!/usr/bin/perl
# Copyright (C) 2006, Eric Wong <normalperson@yhbt.net>
# License: GPL v2 or later
use 5.008;
$| = 1; # unbuffer STDOUT
sub fatal (@) { print STDERR "@_\n"; exit 1 }
+
+# All SVN commands do it. Otherwise we may die on SIGPIPE when the remote
+# repository decides to close the connection which we expect to be kept alive.
+$SIG{PIPE} = 'IGNORE';
+
+# Given a dot separated version number, "subtract" it from
+# the SVN::Core::VERSION; non-negaitive return means the SVN::Core
+# is at least at the version the caller asked for.
+sub compare_svn_version {
+ my (@ours) = split(/\./, $SVN::Core::VERSION);
+ my (@theirs) = split(/\./, $_[0]);
+ my ($i, $diff);
+
+ for ($i = 0; $i < @ours && $i < @theirs; $i++) {
+ $diff = $ours[$i] - $theirs[$i];
+ return $diff if ($diff);
+ }
+ return 1 if ($i < @ours);
+ return -1 if ($i < @theirs);
+ return 0;
+}
+
sub _req_svn {
require SVN::Core; # use()-ing this causes segfaults for me... *shrug*
require SVN::Ra;
require SVN::Delta;
- if ($SVN::Core::VERSION lt '1.1.0') {
+ if (::compare_svn_version('1.1.0') < 0) {
fatal "Need SVN::Core 1.1.0 or better (got $SVN::Core::VERSION)";
}
}
}
::_req_svn();
$result .= "Repository UUID: $uuid\n" unless $diff_status eq "A" &&
- ($SVN::Core::VERSION le '1.5.4' || $file_type ne "dir");
+ (::compare_svn_version('1.5.4') <= 0 || $file_type ne "dir");
$result .= "Revision: " . ($diff_status eq "A" ? 0 : $rev) . "\n";
$result .= "Node Kind: " .
use Time::Local;
use Memoize; # core since 5.8.0, Jul 2002
use Memoize::Storable;
+use POSIX qw(:signal_h);
my ($_gc_nr, $_gc_period);
length $commit == 40 or die "arg3 must be a full SHA1 hexsum\n";
my $db = $self->map_path($uuid);
my $db_lock = "$db.lock";
- my $sig;
+ my $sigmask;
$update_ref ||= 0;
if ($update_ref) {
- $SIG{INT} = $SIG{HUP} = $SIG{TERM} = $SIG{ALRM} = $SIG{PIPE} =
- $SIG{USR1} = $SIG{USR2} = sub { $sig = $_[0] };
+ $sigmask = POSIX::SigSet->new();
+ my $signew = POSIX::SigSet->new(SIGINT, SIGHUP, SIGTERM,
+ SIGALRM, SIGUSR1, SIGUSR2);
+ sigprocmask(SIG_BLOCK, $signew, $sigmask) or
+ croak "Can't block signals: $!";
}
mkfile($db);
"$db_lock => $db ($!)\n";
delete $LOCKFILES{$db_lock};
if ($update_ref) {
- $SIG{INT} = $SIG{HUP} = $SIG{TERM} = $SIG{ALRM} = $SIG{PIPE} =
- $SIG{USR1} = $SIG{USR2} = 'DEFAULT';
- kill $sig, $$ if defined $sig;
+ sigprocmask(SIG_SETMASK, $sigmask) or
+ croak "Can't restore signal mask: $!";
}
}
}
sub _auth_providers () {
- [
+ my @rv = (
SVN::Client::get_simple_provider(),
SVN::Client::get_ssl_server_trust_file_provider(),
SVN::Client::get_simple_prompt_provider(
\&Git::SVN::Prompt::ssl_server_trust),
SVN::Client::get_username_prompt_provider(
\&Git::SVN::Prompt::username, 2)
- ]
+ );
+
+ # earlier 1.6.x versions would segfault, and <= 1.5.x didn't have
+ # this function
+ if (::compare_svn_version('1.6.12') > 0) {
+ my $config = SVN::Core::config_get_config($config_dir);
+ my ($p, @a);
+ # config_get_config returns all config files from
+ # ~/.subversion, auth_get_platform_specific_client_providers
+ # just wants the config "file".
+ @a = ($config->{'config'}, undef);
+ $p = SVN::Core::auth_get_platform_specific_client_providers(@a);
+ # Insert the return value from
+ # auth_get_platform_specific_providers
+ unshift @rv, @$p;
+ }
+ \@rv;
}
sub escape_uri_only {
# drop it. Therefore, the receiver callback passed to it
# is made aware of this limitation by being wrapped if
# the limit passed to is being wrapped.
- if ($SVN::Core::VERSION le '1.2.0') {
+ if (::compare_svn_version('1.2.0') <= 0) {
my $limit = splice(@args, 3, 1);
if ($limit > 0) {
my $receiver = pop @args;
sub get_commit_editor {
my ($self, $log, $cb, $pool) = @_;
- my @lock = $SVN::Core::VERSION ge '1.2.0' ? (undef, 0) : ();
+
+ my @lock = (::compare_svn_version('1.2.0') >= 0) ? (undef, 0) : ();
$self->SUPER::get_commit_editor($log, $cb, @lock, $pool);
}
my (@pc) = split m#/#, $path;
my $reporter = $self->do_update($rev_b, (@pc ? shift @pc : ''),
1, $editor, $pool);
- my @lock = $SVN::Core::VERSION ge '1.2.0' ? (undef) : ();
+ my @lock = (::compare_svn_version('1.2.0') >= 0) ? (undef) : ();
# Since we can't rely on svn_ra_reparent being available, we'll
# just have to do some magic with set_path to make it so
$ra ||= $self;
$url_b = escape_url($url_b);
my $reporter = $ra->do_switch($rev_b, '', 1, $url_b, $editor, $pool);
- my @lock = $SVN::Core::VERSION ge '1.2.0' ? (undef) : ();
+ my @lock = (::compare_svn_version('1.2.0') >= 0) ? (undef) : ();
$reporter->set_path('', $rev_a, 0, @lock, $pool);
$reporter->finish_report($pool);
" <command> [<args>]";
const char git_more_info_string[] =
- "See 'git help <command>' for more information on a specific command.";
+ N_("See 'git help <command>' for more information on a specific command.");
static struct startup_info git_startup_info;
static int use_pager = -1;
{ "cherry-pick", cmd_cherry_pick, RUN_SETUP | NEED_WORK_TREE },
{ "clean", cmd_clean, RUN_SETUP | NEED_WORK_TREE },
{ "clone", cmd_clone },
+ { "column", cmd_column, RUN_SETUP_GENTLY },
{ "commit", cmd_commit, RUN_SETUP | NEED_WORK_TREE },
{ "commit-tree", cmd_commit_tree, RUN_SETUP },
{ "config", cmd_config, RUN_SETUP_GENTLY },
# (only effective if this variable evaluates to true)
our $export_ok = "++GITWEB_EXPORT_OK++";
+# don't generate age column on the projects list page
+our $omit_age_column = 0;
+
+# don't generate information about owners of repositories
+our $omit_owner=0;
+
# show repository only if this subroutine returns true
# when given the path to the project, for example:
# sub { return -e "$_[0]/git-daemon-export-ok"; }
# '<span class="mark">foo</span>bar'
sub esc_html_hl_regions {
my ($str, $css_class, @sel) = @_;
- return esc_html($str) unless @sel;
+ my %opts = grep { ref($_) ne 'ARRAY' } @sel;
+ @sel = grep { ref($_) eq 'ARRAY' } @sel;
+ return esc_html($str, %opts) unless @sel;
my $out = '';
my $pos = 0;
for my $s (@sel) {
- $out .= esc_html(substr($str, $pos, $s->[0] - $pos))
- if ($s->[0] - $pos > 0);
- $out .= $cgi->span({-class => $css_class},
- esc_html(substr($str, $s->[0], $s->[1] - $s->[0])));
+ my ($begin, $end) = @$s;
+
+ # Don't create empty <span> elements.
+ next if $end <= $begin;
+
+ my $escaped = esc_html(substr($str, $begin, $end - $begin),
+ %opts);
- $pos = $s->[1];
+ $out .= esc_html(substr($str, $pos, $begin - $pos), %opts)
+ if ($begin - $pos > 0);
+ $out .= $cgi->span({-class => $css_class}, $escaped);
+
+ $pos = $end;
}
- $out .= esc_html(substr($str, $pos))
+ $out .= esc_html(substr($str, $pos), %opts)
if ($pos < length($str));
return $out;
}
# process patch (diff) line (not to be used for diff headers),
-# returning class and HTML-formatted (but not wrapped) line
-sub process_diff_line {
- my $line = shift;
- my ($from, $to) = @_;
-
- my $diff_class = diff_line_class($line, $from, $to);
-
- chomp $line;
- $line = untabify($line);
+# returning HTML-formatted (but not wrapped) line.
+# If the line is passed as a reference, it is treated as HTML and not
+# esc_html()'ed.
+sub format_diff_line {
+ my ($line, $diff_class, $from, $to) = @_;
+
+ if (ref($line)) {
+ $line = $$line;
+ } else {
+ chomp $line;
+ $line = untabify($line);
- if ($from && $to && $line =~ m/^\@{2} /) {
- $line = format_unidiff_chunk_header($line, $from, $to);
- return $diff_class, $line;
+ if ($from && $to && $line =~ m/^\@{2} /) {
+ $line = format_unidiff_chunk_header($line, $from, $to);
+ } elsif ($from && $to && $line =~ m/^\@{3}/) {
+ $line = format_cc_diff_chunk_header($line, $from, $to);
+ } else {
+ $line = esc_html($line, -nbsp=>1);
+ }
+ }
- } elsif ($from && $to && $line =~ m/^\@{3}/) {
- $line = format_cc_diff_chunk_header($line, $from, $to);
- return $diff_class, $line;
+ my $diff_classes = "diff";
+ $diff_classes .= " $diff_class" if ($diff_class);
+ $line = "<div class=\"$diff_classes\">$line</div>\n";
- }
- return $diff_class, esc_html($line, -nbsp=>1);
+ return $line;
}
# Generates undef or something like "_snapshot_" or "snapshot (_tbz2_ _zip_)",
}
if (check_export_ok("$projectroot/$path")) {
my $pr = {
- path => $path,
- owner => to_utf8($owner),
+ path => $path
};
+ if ($owner) {
+ $pr->{'owner'} = to_utf8($owner);
+ }
push @list, $pr;
}
}
'-type' => "application/$type+xml"
);
+ $href_params{'extra_options'} = undef;
$href_params{'action'} = $type;
$link_attr{'-href'} = href(%href_params);
print "<link ".
print "</table>\n";
}
-sub print_sidebyside_diff_chunk {
- my @chunk = @_;
+# Print context lines and then rem/add lines in a side-by-side manner.
+sub print_sidebyside_diff_lines {
+ my ($ctx, $rem, $add) = @_;
+
+ # print context block before add/rem block
+ if (@$ctx) {
+ print join '',
+ '<div class="chunk_block ctx">',
+ '<div class="old">',
+ @$ctx,
+ '</div>',
+ '<div class="new">',
+ @$ctx,
+ '</div>',
+ '</div>';
+ }
+
+ if (!@$add) {
+ # pure removal
+ print join '',
+ '<div class="chunk_block rem">',
+ '<div class="old">',
+ @$rem,
+ '</div>',
+ '</div>';
+ } elsif (!@$rem) {
+ # pure addition
+ print join '',
+ '<div class="chunk_block add">',
+ '<div class="new">',
+ @$add,
+ '</div>',
+ '</div>';
+ } else {
+ print join '',
+ '<div class="chunk_block chg">',
+ '<div class="old">',
+ @$rem,
+ '</div>',
+ '<div class="new">',
+ @$add,
+ '</div>',
+ '</div>';
+ }
+}
+
+# Print context lines and then rem/add lines in inline manner.
+sub print_inline_diff_lines {
+ my ($ctx, $rem, $add) = @_;
+
+ print @$ctx, @$rem, @$add;
+}
+
+# Format removed and added line, mark changed part and HTML-format them.
+# Implementation is based on contrib/diff-highlight
+sub format_rem_add_lines_pair {
+ my ($rem, $add, $num_parents) = @_;
+
+ # We need to untabify lines before split()'ing them;
+ # otherwise offsets would be invalid.
+ chomp $rem;
+ chomp $add;
+ $rem = untabify($rem);
+ $add = untabify($add);
+
+ my @rem = split(//, $rem);
+ my @add = split(//, $add);
+ my ($esc_rem, $esc_add);
+ # Ignore leading +/- characters for each parent.
+ my ($prefix_len, $suffix_len) = ($num_parents, 0);
+ my ($prefix_has_nonspace, $suffix_has_nonspace);
+
+ my $shorter = (@rem < @add) ? @rem : @add;
+ while ($prefix_len < $shorter) {
+ last if ($rem[$prefix_len] ne $add[$prefix_len]);
+
+ $prefix_has_nonspace = 1 if ($rem[$prefix_len] !~ /\s/);
+ $prefix_len++;
+ }
+
+ while ($prefix_len + $suffix_len < $shorter) {
+ last if ($rem[-1 - $suffix_len] ne $add[-1 - $suffix_len]);
+
+ $suffix_has_nonspace = 1 if ($rem[-1 - $suffix_len] !~ /\s/);
+ $suffix_len++;
+ }
+
+ # Mark lines that are different from each other, but have some common
+ # part that isn't whitespace. If lines are completely different, don't
+ # mark them because that would make output unreadable, especially if
+ # diff consists of multiple lines.
+ if ($prefix_has_nonspace || $suffix_has_nonspace) {
+ $esc_rem = esc_html_hl_regions($rem, 'marked',
+ [$prefix_len, @rem - $suffix_len], -nbsp=>1);
+ $esc_add = esc_html_hl_regions($add, 'marked',
+ [$prefix_len, @add - $suffix_len], -nbsp=>1);
+ } else {
+ $esc_rem = esc_html($rem, -nbsp=>1);
+ $esc_add = esc_html($add, -nbsp=>1);
+ }
+
+ return format_diff_line(\$esc_rem, 'rem'),
+ format_diff_line(\$esc_add, 'add');
+}
+
+# HTML-format diff context, removed and added lines.
+sub format_ctx_rem_add_lines {
+ my ($ctx, $rem, $add, $num_parents) = @_;
+ my (@new_ctx, @new_rem, @new_add);
+ my $can_highlight = 0;
+ my $is_combined = ($num_parents > 1);
+
+ # Highlight if every removed line has a corresponding added line.
+ if (@$add > 0 && @$add == @$rem) {
+ $can_highlight = 1;
+
+ # Highlight lines in combined diff only if the chunk contains
+ # diff between the same version, e.g.
+ #
+ # - a
+ # - b
+ # + c
+ # + d
+ #
+ # Otherwise the highlightling would be confusing.
+ if ($is_combined) {
+ for (my $i = 0; $i < @$add; $i++) {
+ my $prefix_rem = substr($rem->[$i], 0, $num_parents);
+ my $prefix_add = substr($add->[$i], 0, $num_parents);
+
+ $prefix_rem =~ s/-/+/g;
+
+ if ($prefix_rem ne $prefix_add) {
+ $can_highlight = 0;
+ last;
+ }
+ }
+ }
+ }
+
+ if ($can_highlight) {
+ for (my $i = 0; $i < @$add; $i++) {
+ my ($line_rem, $line_add) = format_rem_add_lines_pair(
+ $rem->[$i], $add->[$i], $num_parents);
+ push @new_rem, $line_rem;
+ push @new_add, $line_add;
+ }
+ } else {
+ @new_rem = map { format_diff_line($_, 'rem') } @$rem;
+ @new_add = map { format_diff_line($_, 'add') } @$add;
+ }
+
+ @new_ctx = map { format_diff_line($_, 'ctx') } @$ctx;
+
+ return (\@new_ctx, \@new_rem, \@new_add);
+}
+
+# Print context lines and then rem/add lines.
+sub print_diff_lines {
+ my ($ctx, $rem, $add, $diff_style, $num_parents) = @_;
+ my $is_combined = $num_parents > 1;
+
+ ($ctx, $rem, $add) = format_ctx_rem_add_lines($ctx, $rem, $add,
+ $num_parents);
+
+ if ($diff_style eq 'sidebyside' && !$is_combined) {
+ print_sidebyside_diff_lines($ctx, $rem, $add);
+ } else {
+ # default 'inline' style and unknown styles
+ print_inline_diff_lines($ctx, $rem, $add);
+ }
+}
+
+sub print_diff_chunk {
+ my ($diff_style, $num_parents, $from, $to, @chunk) = @_;
my (@ctx, @rem, @add);
+ # The class of the previous line.
+ my $prev_class = '';
+
return unless @chunk;
# incomplete last line might be among removed or added lines,
# print chunk headers
if ($class && $class eq 'chunk_header') {
- print $line;
+ print format_diff_line($line, $class, $from, $to);
next;
}
- ## print from accumulator when type of class of lines change
- # empty contents block on start rem/add block, or end of chunk
- if (@ctx && (!$class || $class eq 'rem' || $class eq 'add')) {
- print join '',
- '<div class="chunk_block ctx">',
- '<div class="old">',
- @ctx,
- '</div>',
- '<div class="new">',
- @ctx,
- '</div>',
- '</div>';
- @ctx = ();
- }
- # empty add/rem block on start context block, or end of chunk
- if ((@rem || @add) && (!$class || $class eq 'ctx')) {
- if (!@add) {
- # pure removal
- print join '',
- '<div class="chunk_block rem">',
- '<div class="old">',
- @rem,
- '</div>',
- '</div>';
- } elsif (!@rem) {
- # pure addition
- print join '',
- '<div class="chunk_block add">',
- '<div class="new">',
- @add,
- '</div>',
- '</div>';
- } else {
- # assume that it is change
- print join '',
- '<div class="chunk_block chg">',
- '<div class="old">',
- @rem,
- '</div>',
- '<div class="new">',
- @add,
- '</div>',
- '</div>';
- }
- @rem = @add = ();
+ ## print from accumulator when have some add/rem lines or end
+ # of chunk (flush context lines), or when have add and rem
+ # lines and new block is reached (otherwise add/rem lines could
+ # be reordered)
+ if (!$class || ((@rem || @add) && $class eq 'ctx') ||
+ (@rem && @add && $class ne $prev_class)) {
+ print_diff_lines(\@ctx, \@rem, \@add,
+ $diff_style, $num_parents);
+ @ctx = @rem = @add = ();
}
## adding lines to accumulator
if ($class eq 'ctx') {
push @ctx, $line;
}
+
+ $prev_class = $class;
}
}
next PATCH if ($patch_line =~ m/^diff /);
- my ($class, $line) = process_diff_line($patch_line, \%from, \%to);
- my $diff_classes = "diff";
- $diff_classes .= " $class" if ($class);
- $line = "<div class=\"$diff_classes\">$line</div>\n";
+ my $class = diff_line_class($patch_line, \%from, \%to);
- if ($diff_style eq 'sidebyside' && !$is_combined) {
- if ($class eq 'chunk_header') {
- print_sidebyside_diff_chunk(@chunk);
- @chunk = ( [ $class, $line ] );
- } else {
- push @chunk, [ $class, $line ];
- }
- } else {
- # default 'inline' style and unknown styles
- print $line;
+ if ($class eq 'chunk_header') {
+ print_diff_chunk($diff_style, scalar @hash_parents, \%from, \%to, @chunk);
+ @chunk = ();
}
+
+ push @chunk, [ $class, $patch_line ];
}
} continue {
if (@chunk) {
- print_sidebyside_diff_chunk(@chunk);
+ print_diff_chunk($diff_style, scalar @hash_parents, \%from, \%to, @chunk);
@chunk = ();
}
print "</div>\n"; # class="patch"
? esc_html_match_hl_chopped($pr->{'descr_long'},
$pr->{'descr'}, $search_regexp)
: esc_html($pr->{'descr'})) .
- "</td>\n" .
- "<td><i>" . chop_and_escape_str($pr->{'owner'}, 15) . "</i></td>\n";
- print "<td class=\"". age_class($pr->{'age'}) . "\">" .
- (defined $pr->{'age_string'} ? $pr->{'age_string'} : "No commits") . "</td>\n" .
- "<td class=\"link\">" .
+ "</td>\n";
+ unless ($omit_owner) {
+ print "<td><i>" . chop_and_escape_str($pr->{'owner'}, 15) . "</i></td>\n";
+ }
+ unless ($omit_age_column) {
+ print "<td class=\"". age_class($pr->{'age'}) . "\">" .
+ (defined $pr->{'age_string'} ? $pr->{'age_string'} : "No commits") . "</td>\n";
+ }
+ print"<td class=\"link\">" .
$cgi->a({-href => href(project=>$pr->{'path'}, action=>"summary")}, "summary") . " | " .
$cgi->a({-href => href(project=>$pr->{'path'}, action=>"shortlog")}, "shortlog") . " | " .
$cgi->a({-href => href(project=>$pr->{'path'}, action=>"log")}, "log") . " | " .
'tagfilter' => $tagfilter)
if ($tagfilter || $search_regexp);
# fill the rest
- @projects = fill_project_list_info(\@projects);
+ my @all_fields = ('descr', 'descr_long', 'ctags', 'category');
+ push @all_fields, ('age', 'age_string') unless($omit_age_column);
+ push @all_fields, 'owner' unless($omit_owner);
+ @projects = fill_project_list_info(\@projects, @all_fields);
$order ||= $default_projects_order;
$from = 0 unless defined $from;
}
print_sort_th('project', $order, 'Project');
print_sort_th('descr', $order, 'Description');
- print_sort_th('owner', $order, 'Owner');
- print_sort_th('age', $order, 'Last Change');
+ print_sort_th('owner', $order, 'Owner') unless $omit_owner;
+ print_sort_th('age', $order, 'Last Change') unless $omit_age_column;
print "<th></th>\n" . # for links
"</tr>\n";
}
print "<div class=\"title\"> </div>\n";
print "<table class=\"projects_list\">\n" .
- "<tr id=\"metadata_desc\"><td>description</td><td>" . esc_html($descr) . "</td></tr>\n" .
- "<tr id=\"metadata_owner\"><td>owner</td><td>" . esc_html($owner) . "</td></tr>\n";
+ "<tr id=\"metadata_desc\"><td>description</td><td>" . esc_html($descr) . "</td></tr>\n";
+ unless ($omit_owner) {
+ print "<tr id=\"metadata_owner\"><td>owner</td><td>" . esc_html($owner) . "</td></tr>\n";
+ }
if (defined $cd{'rfc2822'}) {
print "<tr id=\"metadata_lchange\"><td>last change</td>" .
"<td>".format_timestamp_html(\%cd)."</td></tr>\n";
return wantarray ? ($name, $name) : $name;
}
+sub exit_if_unmodified_since {
+ my ($latest_epoch) = @_;
+ our $cgi;
+
+ my $if_modified = $cgi->http('IF_MODIFIED_SINCE');
+ if (defined $if_modified) {
+ my $since;
+ if (eval { require HTTP::Date; 1; }) {
+ $since = HTTP::Date::str2time($if_modified);
+ } elsif (eval { require Time::ParseDate; 1; }) {
+ $since = Time::ParseDate::parsedate($if_modified, GMT => 1);
+ }
+ if (defined $since && $latest_epoch <= $since) {
+ my %latest_date = parse_date($latest_epoch);
+ print $cgi->header(
+ -last_modified => $latest_date{'rfc2822'},
+ -status => '304 Not Modified');
+ goto DONE_GITWEB;
+ }
+ }
+}
+
sub git_snapshot {
my $format = $input_params{'snapshot_format'};
if (!@snapshot_fmts) {
my ($name, $prefix) = snapshot_name($project, $hash);
my $filename = "$name$known_snapshot_formats{$format}{'suffix'}";
+
+ my %co = parse_commit($hash);
+ exit_if_unmodified_since($co{'committer_epoch'}) if %co;
+
my $cmd = quote_command(
git_cmd(), 'archive',
"--format=$known_snapshot_formats{$format}{'format'}",
}
$filename =~ s/(["\\])/\\$1/g;
+ my %latest_date;
+ if (%co) {
+ %latest_date = parse_date($co{'committer_epoch'}, $co{'committer_tz'});
+ }
+
print $cgi->header(
-type => $known_snapshot_formats{$format}{'type'},
-content_disposition => 'inline; filename="' . $filename . '"',
+ %co ? (-last_modified => $latest_date{'rfc2822'}) : (),
-status => '200 OK');
open my $fd, "-|", $cmd
if (defined($commitlist[0])) {
%latest_commit = %{$commitlist[0]};
my $latest_epoch = $latest_commit{'committer_epoch'};
- %latest_date = parse_date($latest_epoch, $latest_commit{'comitter_tz'});
- my $if_modified = $cgi->http('IF_MODIFIED_SINCE');
- if (defined $if_modified) {
- my $since;
- if (eval { require HTTP::Date; 1; }) {
- $since = HTTP::Date::str2time($if_modified);
- } elsif (eval { require Time::ParseDate; 1; }) {
- $since = Time::ParseDate::parsedate($if_modified, GMT => 1);
- }
- if (defined $since && $latest_epoch <= $since) {
- print $cgi->header(
- -type => $content_type,
- -charset => 'utf-8',
- -last_modified => $latest_date{'rfc2822'},
- -status => '304 Not Modified');
- return;
- }
- }
- print $cgi->header(
- -type => $content_type,
- -charset => 'utf-8',
- -last_modified => $latest_date{'rfc2822'});
- } else {
- print $cgi->header(
- -type => $content_type,
- -charset => 'utf-8');
+ exit_if_unmodified_since($latest_epoch);
+ %latest_date = parse_date($latest_epoch, $latest_commit{'comitter_tz'});
}
+ print $cgi->header(
+ -type => $content_type,
+ -charset => 'utf-8',
+ %latest_date ? (-last_modified => $latest_date{'rfc2822'}) : (),
+ -status => '200 OK');
# Optimization: skip generating the body if client asks only
# for Last-Modified date.
color: #008800;
}
+div.diff.add span.marked {
+ background-color: #aaffaa;
+}
+
div.diff.from_file a.path,
div.diff.from_file {
color: #aa0000;
color: #cc0000;
}
+div.diff.rem span.marked {
+ background-color: #ffaaaa;
+}
+
div.diff.chunk_header a,
div.diff.chunk_header {
color: #990099;
struct git_graph *graph = data;
static struct strbuf msgbuf = STRBUF_INIT;
+ assert(opt);
assert(graph);
+ opt->output_prefix_length = graph->width;
strbuf_reset(&msgbuf);
graph_padding_line(graph, &msgbuf);
return &msgbuf;
*/
opt->diffopt.output_prefix = diff_output_prefix_callback;
opt->diffopt.output_prefix_data = graph;
+ opt->diffopt.output_prefix_length = 0;
return graph;
}
#include "levenshtein.h"
#include "help.h"
#include "common-cmds.h"
+#include "string-list.h"
+#include "column.h"
void add_cmdname(struct cmdnames *cmds, const char *name, int len)
{
cmds->cnt = cj;
}
-static void pretty_print_string_list(struct cmdnames *cmds, int longest)
+static void pretty_print_string_list(struct cmdnames *cmds,
+ unsigned int colopts)
{
- int cols = 1, rows;
- int space = longest + 1; /* min 1 SP between words */
- int max_cols = term_columns() - 1; /* don't print *on* the edge */
- int i, j;
-
- if (space < max_cols)
- cols = max_cols / space;
- rows = DIV_ROUND_UP(cmds->cnt, cols);
-
- for (i = 0; i < rows; i++) {
- printf(" ");
+ struct string_list list = STRING_LIST_INIT_NODUP;
+ struct column_options copts;
+ int i;
- for (j = 0; j < cols; j++) {
- int n = j * rows + i;
- int size = space;
- if (n >= cmds->cnt)
- break;
- if (j == cols-1 || n + rows >= cmds->cnt)
- size = 1;
- printf("%-*s", size, cmds->names[n]->name);
- }
- putchar('\n');
- }
+ for (i = 0; i < cmds->cnt; i++)
+ string_list_append(&list, cmds->names[i]->name);
+ /*
+ * always enable column display, we only consult column.*
+ * about layout strategy and stuff
+ */
+ colopts = (colopts & ~COL_ENABLE_MASK) | COL_ENABLED;
+ memset(&copts, 0, sizeof(copts));
+ copts.indent = " ";
+ copts.padding = 2;
+ print_columns(&list, colopts, &copts);
+ string_list_clear(&list, 0);
}
static int is_executable(const char *name)
exclude_cmds(other_cmds, main_cmds);
}
-void list_commands(const char *title, struct cmdnames *main_cmds,
- struct cmdnames *other_cmds)
+void list_commands(unsigned int colopts,
+ struct cmdnames *main_cmds, struct cmdnames *other_cmds)
{
- int i, longest = 0;
-
- for (i = 0; i < main_cmds->cnt; i++)
- if (longest < main_cmds->names[i]->len)
- longest = main_cmds->names[i]->len;
- for (i = 0; i < other_cmds->cnt; i++)
- if (longest < other_cmds->names[i]->len)
- longest = other_cmds->names[i]->len;
-
if (main_cmds->cnt) {
const char *exec_path = git_exec_path();
- printf("available %s in '%s'\n", title, exec_path);
- printf("----------------");
- mput_char('-', strlen(title) + strlen(exec_path));
+ printf_ln(_("available git commands in '%s'"), exec_path);
putchar('\n');
- pretty_print_string_list(main_cmds, longest);
+ pretty_print_string_list(main_cmds, colopts);
putchar('\n');
}
if (other_cmds->cnt) {
- printf("%s available from elsewhere on your $PATH\n", title);
- printf("---------------------------------------");
- mput_char('-', strlen(title));
+ printf_ln(_("git commands available from elsewhere on your $PATH"));
putchar('\n');
- pretty_print_string_list(other_cmds, longest);
+ pretty_print_string_list(other_cmds, colopts);
putchar('\n');
}
}
sizeof(*main_cmds.names), levenshtein_compare);
if (!main_cmds.cnt)
- die ("Uh oh. Your system reports no Git commands at all.");
+ die(_("Uh oh. Your system reports no Git commands at all."));
/* skip and count prefix matches */
for (n = 0; n < main_cmds.cnt && !main_cmds.names[n]->len; n++)
const char *assumed = main_cmds.names[0]->name;
main_cmds.names[0] = NULL;
clean_cmdnames(&main_cmds);
- fprintf(stderr, "WARNING: You called a Git command named '%s', "
- "which does not exist.\n"
- "Continuing under the assumption that you meant '%s'\n",
+ fprintf_ln(stderr,
+ _("WARNING: You called a Git command named '%s', "
+ "which does not exist.\n"
+ "Continuing under the assumption that you meant '%s'"),
cmd, assumed);
if (autocorrect > 0) {
- fprintf(stderr, "in %0.1f seconds automatically...\n",
+ fprintf_ln(stderr, _("in %0.1f seconds automatically..."),
(float)autocorrect/10.0);
poll(NULL, 0, autocorrect * 100);
}
return assumed;
}
- fprintf(stderr, "git: '%s' is not a git command. See 'git --help'.\n", cmd);
+ fprintf_ln(stderr, _("git: '%s' is not a git command. See 'git --help'."), cmd);
if (SIMILAR_ENOUGH(best_similarity)) {
- fprintf(stderr, "\nDid you mean %s?\n",
- n < 2 ? "this": "one of these");
+ fprintf_ln(stderr,
+ Q_("\nDid you mean this?",
+ "\nDid you mean one of these?",
+ n));
for (i = 0; i < n; i++)
fprintf(stderr, "\t%s\n", main_cmds.names[i]->name);
/* Here we require that excludes is a sorted list. */
extern void exclude_cmds(struct cmdnames *cmds, struct cmdnames *excludes);
extern int is_in_cmdlist(struct cmdnames *cmds, const char *name);
-extern void list_commands(const char *title,
- struct cmdnames *main_cmds,
- struct cmdnames *other_cmds);
+extern void list_commands(unsigned int colopts, struct cmdnames *main_cmds, struct cmdnames *other_cmds);
#endif /* HELP_H */
#include "run-command.h"
#include "string-list.h"
#include "url.h"
+#include "argv-array.h"
static const char content_type[] = "Content-Type";
static const char content_length[] = "Content-Length";
const char *encoding = getenv("HTTP_CONTENT_ENCODING");
const char *user = getenv("REMOTE_USER");
const char *host = getenv("REMOTE_ADDR");
- char *env[3];
- struct strbuf buf = STRBUF_INIT;
+ struct argv_array env = ARGV_ARRAY_INIT;
int gzipped_request = 0;
struct child_process cld;
if (!host || !*host)
host = "(none)";
- memset(&env, 0, sizeof(env));
- strbuf_addf(&buf, "GIT_COMMITTER_NAME=%s", user);
- env[0] = strbuf_detach(&buf, NULL);
-
- strbuf_addf(&buf, "GIT_COMMITTER_EMAIL=%s@http.%s", user, host);
- env[1] = strbuf_detach(&buf, NULL);
- env[2] = NULL;
+ if (!getenv("GIT_COMMITTER_NAME"))
+ argv_array_pushf(&env, "GIT_COMMITTER_NAME=%s", user);
+ if (!getenv("GIT_COMMITTER_EMAIL"))
+ argv_array_pushf(&env, "GIT_COMMITTER_EMAIL=%s@http.%s",
+ user, host);
memset(&cld, 0, sizeof(cld));
cld.argv = argv;
- cld.env = (const char *const *)env;
+ cld.env = env.argv;
if (gzipped_request)
cld.in = -1;
cld.git_cmd = 1;
if (finish_command(&cld))
exit(1);
- free(env[0]);
- free(env[1]);
- strbuf_release(&buf);
+ argv_array_clear(&env);
}
static int show_text_ref(const char *name, const unsigned char *sha1,
if (repo->path)
url = repo->path;
if (strncmp(path, url, repo->path_len))
- error("Parsed path '%s' does not match url: '%s'\n",
+ error("Parsed path '%s' does not match url: '%s'",
path, url);
else {
path += repo->path_len;
run_active_slot(slot);
free(url);
if (results.curl_result != CURLE_OK)
- return error("DELETE request failed (%d/%ld)\n",
+ return error("DELETE request failed (%d/%ld)",
results.curl_result, results.http_code);
} else {
free(url);
tmp = strbuf_detach(&buf, NULL);
if (http_get_file(url, tmp, 0) != HTTP_OK) {
- error("Unable to get pack index %s\n", url);
+ error("Unable to get pack index %s", url);
free(tmp);
tmp = NULL;
}
ret = socket_write(&ctx->imap->buf.sock, response, strlen(response));
if (ret != strlen(response))
- return error("IMAP error: sending response failed\n");
+ return error("IMAP error: sending response failed");
free(response);
if (buffer_is_binary(orig->ptr, orig->size) ||
buffer_is_binary(src1->ptr, src1->size) ||
buffer_is_binary(src2->ptr, src2->size)) {
- warning("Cannot merge binary files: %s (%s vs. %s)\n",
+ warning("Cannot merge binary files: %s (%s vs. %s)",
path, name1, name2);
return ll_binary_merge(drv_unused, result,
path,
else if (!prefixcmp(s, "subtree="))
o->subtree_shift = s + strlen("subtree=");
else if (!strcmp(s, "patience"))
- o->xdl_opts |= XDF_PATIENCE_DIFF;
+ o->xdl_opts = DIFF_WITH_ALG(o, PATIENCE_DIFF);
else if (!strcmp(s, "histogram"))
- o->xdl_opts |= XDF_HISTOGRAM_DIFF;
+ o->xdl_opts = DIFF_WITH_ALG(o, HISTOGRAM_DIFF);
else if (!strcmp(s, "ignore-space-change"))
o->xdl_opts |= XDF_IGNORE_WHITESPACE_CHANGE;
else if (!strcmp(s, "ignore-all-space"))
--- /dev/null
+#include "cache.h"
+#include "mergesort.h"
+
+struct mergesort_sublist {
+ void *ptr;
+ unsigned long len;
+};
+
+static void *get_nth_next(void *list, unsigned long n,
+ void *(*get_next_fn)(const void *))
+{
+ while (n-- && list)
+ list = get_next_fn(list);
+ return list;
+}
+
+static void *pop_item(struct mergesort_sublist *l,
+ void *(*get_next_fn)(const void *))
+{
+ void *p = l->ptr;
+ l->ptr = get_next_fn(l->ptr);
+ l->len = l->ptr ? (l->len - 1) : 0;
+ return p;
+}
+
+void *llist_mergesort(void *list,
+ void *(*get_next_fn)(const void *),
+ void (*set_next_fn)(void *, void *),
+ int (*compare_fn)(const void *, const void *))
+{
+ unsigned long l;
+
+ if (!list)
+ return NULL;
+ for (l = 1; ; l *= 2) {
+ void *curr;
+ struct mergesort_sublist p, q;
+
+ p.ptr = list;
+ q.ptr = get_nth_next(p.ptr, l, get_next_fn);
+ if (!q.ptr)
+ break;
+ p.len = q.len = l;
+
+ if (compare_fn(p.ptr, q.ptr) > 0)
+ list = curr = pop_item(&q, get_next_fn);
+ else
+ list = curr = pop_item(&p, get_next_fn);
+
+ while (p.ptr) {
+ while (p.len || q.len) {
+ void *prev = curr;
+
+ if (!p.len)
+ curr = pop_item(&q, get_next_fn);
+ else if (!q.len)
+ curr = pop_item(&p, get_next_fn);
+ else if (compare_fn(p.ptr, q.ptr) > 0)
+ curr = pop_item(&q, get_next_fn);
+ else
+ curr = pop_item(&p, get_next_fn);
+ set_next_fn(prev, curr);
+ }
+ p.ptr = q.ptr;
+ p.len = l;
+ q.ptr = get_nth_next(p.ptr, l, get_next_fn);
+ q.len = q.ptr ? l : 0;
+
+ }
+ set_next_fn(curr, NULL);
+ }
+ return list;
+}
--- /dev/null
+#ifndef MERGESORT_H
+#define MERGESORT_H
+
+/*
+ * Sort linked list in place.
+ * - get_next_fn() returns the next element given an element of a linked list.
+ * - set_next_fn() takes two elements A and B, and makes B the "next" element
+ * of A on the list.
+ * - compare_fn() takes two elements A and B, and returns negative, 0, positive
+ * as the same sign as "subtracting" B from A.
+ */
+void *llist_mergesort(void *list,
+ void *(*get_next_fn)(const void *),
+ void (*set_next_fn)(void *, void *),
+ int (*compare_fn)(const void *, const void *));
+
+#endif
obj = &tag->object;
}
} else {
- warning("object %s has unknown type id %d\n", sha1_to_hex(sha1), type);
+ warning("object %s has unknown type id %d", sha1_to_hex(sha1), type);
obj = NULL;
}
if (obj && obj->type == OBJ_NONE)
if (obj && obj->parsed)
return obj;
+ if ((obj && obj->type == OBJ_BLOB) ||
+ (!obj && has_sha1_file(sha1) &&
+ sha1_object_info(sha1, NULL) == OBJ_BLOB)) {
+ if (check_sha1_signature(repl, NULL, 0, NULL) < 0) {
+ error("sha1 mismatch %s", sha1_to_hex(repl));
+ return NULL;
+ }
+ parse_blob_buffer(lookup_blob(sha1), NULL, 0);
+ return lookup_object(sha1);
+ }
+
buffer = read_sha1_file(sha1, &type, &size);
if (buffer) {
if (check_sha1_signature(repl, buffer, size, typename(type)) < 0) {
free(buffer);
- error("sha1 mismatch %s\n", sha1_to_hex(repl));
+ error("sha1 mismatch %s", sha1_to_hex(repl));
return NULL;
}
array->nr = dst;
}
}
+
+void clear_object_flags(unsigned flags)
+{
+ int i;
+
+ for (i=0; i < obj_hash_size; i++) {
+ struct object *obj = obj_hash[i];
+ if (obj)
+ obj->flags &= ~flags;
+ }
+}
void add_object_array_with_mode(struct object *obj, const char *name, struct object_array *array, unsigned mode);
void object_array_remove_duplicates(struct object_array *);
+void clear_object_flags(unsigned flags);
+
#endif /* OBJECT_H */
{
const char *pager = git_pager(isatty(1));
- if (!pager)
+ if (!pager || pager_in_use())
return;
/*
PARSE_OPT_OPTARG, &parse_opt_abbrev_cb, 0 }
#define OPT__COLOR(var, h) \
OPT_COLOR_FLAG(0, "color", (var), (h))
+#define OPT_COLUMN(s, l, v, h) \
+ { OPTION_CALLBACK, (s), (l), (v), "style", (h), PARSE_OPT_OPTARG, parseopt_column_callback }
#endif
# German translations for Git.
-# Copyright (C) 2012 Ralf Thielow <ralf.thielow@googlemail.com>
+# Copyright (C) 2010-2012 Ralf Thielow <ralf.thielow@googlemail.com>
# This file is distributed under the same license as the Git package.
-# Ralf Thielow <ralf.thielow@googlemail.com>, 2012.
+# Ralf Thielow <ralf.thielow@googlemail.com>, 2010, 2011, 2012.
#
msgid ""
msgstr ""
"Project-Id-Version: git 1.7.10\n"
"Report-Msgid-Bugs-To: Git Mailing List <git@vger.kernel.org>\n"
-"POT-Creation-Date: 2012-04-28 20:33+0800\n"
+"POT-Creation-Date: 2012-04-28 20:17+0800\n"
"PO-Revision-Date: 2012-03-28 18:46+0200\n"
"Last-Translator: Ralf Thielow <ralf.thielow@googlemail.com>\n"
"Language-Team: German\n"
"Content-Transfer-Encoding: 8bit\n"
"Plural-Forms: nplurals=2; plural=(n != 1);\n"
-#: advice.c:34
+#: advice.c:40
#, c-format
msgid "hint: %.*s\n"
msgstr "Hinweis: %.*s\n"
#. * Message used both when 'git commit' fails and when
#. * other commands doing a merge do.
#.
-#: advice.c:64
+#: advice.c:70
msgid ""
"Fix them up in the work tree,\n"
"and then use 'git add/rm <file>' as\n"
"vorgesehen, um die Auflösung zu markieren und dann einzutragen,\n"
"oder benutze 'git commit -a'."
-#: commit.c:47
+#: commit.c:48
#, c-format
msgid "could not parse %s"
msgstr "konnte %s nicht parsen"
-#: commit.c:49
+#: commit.c:50
#, c-format
msgid "%s %s is not a commit!"
msgstr "%s %s ist keine Version!"
"Dein Zweig und '%s' sind divergiert,\n"
"und haben jeweils %d und %d unterschiedliche Versionen.\n"
-#: sequencer.c:120 builtin/merge.c:864 builtin/merge.c:985
-#: builtin/merge.c:1095 builtin/merge.c:1105
+#: sequencer.c:120 builtin/merge.c:865 builtin/merge.c:978
+#: builtin/merge.c:1088 builtin/merge.c:1098
#, c-format
msgid "Could not open '%s' for writing"
msgstr "Konnte '%s' nicht zum Schreiben öffnen."
-#: sequencer.c:122 builtin/merge.c:334 builtin/merge.c:867
-#: builtin/merge.c:1097 builtin/merge.c:1110
+#: sequencer.c:122 builtin/merge.c:333 builtin/merge.c:868
+#: builtin/merge.c:1090 builtin/merge.c:1103
#, c-format
msgid "Could not write to '%s'"
msgstr "Konnte nicht nach '%s' schreiben."
msgid "could not apply %s... %s"
msgstr "Konnte %s nicht anwenden... %s"
-#: sequencer.c:450 sequencer.c:909 builtin/log.c:288 builtin/log.c:713
-#: builtin/log.c:1329 builtin/log.c:1548 builtin/merge.c:348
+#: sequencer.c:450 sequencer.c:909 builtin/log.c:289 builtin/log.c:719
+#: builtin/log.c:1335 builtin/log.c:1554 builtin/merge.c:347
#: builtin/shortlog.c:181
msgid "revision walk setup failed"
msgstr "Einrichtung des Revisionsgangs fehlgeschlagen"
#: sequencer.c:742
#, c-format
msgid "stored pre-cherry-pick HEAD file '%s' is corrupt"
-msgstr "gespeicherte \"pre-cherry-pick\" Datei der Zweigspitze (HEAD) '%s' "
-"ist beschädigt"
+msgstr ""
+"gespeicherte \"pre-cherry-pick\" Datei der Zweigspitze (HEAD) '%s' ist "
+"beschädigt"
#: sequencer.c:765
#, c-format
#: sequencer.c:928
msgid "Can't cherry-pick into empty head"
-msgstr "Kann \"cherry-pick\" nicht in einen leeren Kopf ausführen."
+msgstr "Kann \"cherry-pick\" nicht in einem leerem Kopf ausführen."
+
+#: sha1_name.c:864
+msgid "HEAD does not point to a branch"
+msgstr "Zweigspitze (HEAD) zeigt auf keinen Zweig"
+
+#: sha1_name.c:867
+#, c-format
+msgid "No such branch: '%s'"
+msgstr "Kein solcher Zweig '%s'"
+
+#: sha1_name.c:869
+#, c-format
+msgid "No upstream configured for branch '%s'"
+msgstr "Kein entferntes Projektarchiv für Zweig '%s' konfiguriert."
+
+#: sha1_name.c:872
+#, c-format
+msgid "Upstream branch '%s' not stored as a remote-tracking branch"
+msgstr "Zweig '%s' des entfernten Projektarchivs ist nicht als entfernter "
+"Übernahmezweig gespeichert"
#: wt-status.c:134
msgid "Unmerged paths:"
#: builtin/add.c:209
#, c-format
msgid "'%s' is beyond a symbolic link"
-msgstr "'%s' ist über einer symbolischen Verknüpfung"
+msgstr "'%s' ist über einem symbolischen Link"
#: builtin/add.c:276
msgid "Could not read the index"
#: builtin/archive.c:71
msgid "git archive: expected a flush"
-msgstr "git archive: erwartete eine Leerung (flush)"
+msgstr "git archive: erwartete eine Spülung (flush)"
#: builtin/branch.c:137
#, c-format
" '%s', obwohl er mit der Zweigspitze (HEAD) zusammengeführt wurde."
#. TRANSLATORS: This is "remote " in "remote branch '%s' not found"
-#: builtin/branch.c:163
+#: builtin/branch.c:164
msgid "remote "
msgstr "entfernter "
-#: builtin/branch.c:171
+#: builtin/branch.c:172
msgid "cannot use -a with -d"
msgstr "kann -a nicht mit -d benutzen"
-#: builtin/branch.c:177
+#: builtin/branch.c:178
msgid "Couldn't look up commit object for HEAD"
msgstr "Konnte Versionsobjekt für Zweigspitze (HEAD) nicht nachschlagen."
-#: builtin/branch.c:182
+#: builtin/branch.c:183
#, c-format
msgid "Cannot delete the branch '%s' which you are currently on."
msgstr "Kann Zweig '%s' nicht entfernen auf dem du dich gerade befindest."
-#: builtin/branch.c:192
+#: builtin/branch.c:193
#, c-format
msgid "%sbranch '%s' not found."
msgstr "%sZweig '%s' nicht gefunden."
-#: builtin/branch.c:200
+#: builtin/branch.c:201
#, c-format
msgid "Couldn't look up commit object for '%s'"
msgstr "Konnte Versionsobjekt für '%s' nicht nachschlagen."
-#: builtin/branch.c:206
+#: builtin/branch.c:207
#, c-format
msgid ""
"The branch '%s' is not fully merged.\n"
"Der Zweig '%s' ist nicht vollständig zusammengeführt.\n"
"Wenn du sicher bist diesen Zweig zu entfernen, führe 'git branch -D %s' aus."
-#: builtin/branch.c:214
+#: builtin/branch.c:215
#, c-format
msgid "Error deleting %sbranch '%s'"
msgstr "Fehler beim Löschen von %sZweig '%s'"
-#: builtin/branch.c:219
+#: builtin/branch.c:221
#, c-format
msgid "Deleted %sbranch %s (was %s).\n"
msgstr "Entferne %sZweig %s (war %s).\n"
-#: builtin/branch.c:224
+#: builtin/branch.c:226
msgid "Update of config-file failed"
msgstr "Aktualisierung der Konfigurationsdatei fehlgeschlagen."
-#: builtin/branch.c:322
+#: builtin/branch.c:324
#, c-format
msgid "branch '%s' does not point at a commit"
msgstr "Zweig '%s' zeigt auf keine Version"
-#: builtin/branch.c:394
+#: builtin/branch.c:396
#, c-format
msgid "behind %d] "
msgstr "hinter %d] "
-#: builtin/branch.c:396
+#: builtin/branch.c:398
#, c-format
msgid "ahead %d] "
msgstr "vor %d] "
-#: builtin/branch.c:398
+#: builtin/branch.c:400
#, c-format
msgid "ahead %d, behind %d] "
msgstr "vor %d, hinter %d] "
-#: builtin/branch.c:501
+#: builtin/branch.c:503
msgid "(no branch)"
msgstr "(kein Zweig)"
-#: builtin/branch.c:566
+#: builtin/branch.c:568
msgid "some refs could not be read"
msgstr "einige Referenzen konnten nicht gelesen werden"
-#: builtin/branch.c:579
+#: builtin/branch.c:581
msgid "cannot rename the current branch while not on any."
msgstr ""
"Kann aktuellen Zweig nicht umbennen, solange du dich auf keinem befindest."
-#: builtin/branch.c:589
+#: builtin/branch.c:591
#, c-format
msgid "Invalid branch name: '%s'"
msgstr "Ungültiger Zweig-Name: '%s'"
-#: builtin/branch.c:604
+#: builtin/branch.c:606
msgid "Branch rename failed"
msgstr "Umbenennung des Zweiges fehlgeschlagen"
-#: builtin/branch.c:608
+#: builtin/branch.c:610
#, c-format
msgid "Renamed a misnamed branch '%s' away"
msgstr "falsch benannten Zweig '%s' umbenannt"
-#: builtin/branch.c:612
+#: builtin/branch.c:614
#, c-format
msgid "Branch renamed to %s, but HEAD is not updated!"
msgstr "Zweig umbenannt zu %s, aber Zweigspitze (HEAD) ist nicht aktualisiert!"
-#: builtin/branch.c:619
+#: builtin/branch.c:621
msgid "Branch is renamed, but update of config-file failed"
msgstr ""
"Zweig ist umbenannt, aber die Aktualisierung der Konfigurationsdatei ist "
"fehlgeschlagen."
-#: builtin/branch.c:634
+#: builtin/branch.c:636
#, c-format
msgid "malformed object name %s"
msgstr "Missgebildeter Objektname %s"
-#: builtin/branch.c:658
+#: builtin/branch.c:660
#, c-format
msgid "could not write branch description template: %s\n"
msgstr "Konnte Beschreibungsvorlage für Zweig nicht schreiben: %s\n"
-#: builtin/branch.c:746
+#: builtin/branch.c:750
msgid "Failed to resolve HEAD as a valid ref."
msgstr "Zweigspitze (HEAD) konnte nicht als gültige Referenz aufgelöst werden."
-#: builtin/branch.c:751 builtin/clone.c:558
+#: builtin/branch.c:755 builtin/clone.c:558
msgid "HEAD not found below refs/heads!"
msgstr "Zweigspitze (HEAD) wurde nicht unter \"refs/heads\" gefunden!"
-#: builtin/branch.c:809
+#: builtin/branch.c:813
msgid "-a and -r options to 'git branch' do not make sense with a branch name"
msgstr ""
"Die Optionen -a und -r bei 'git branch' machen mit einem Zweignamen keinen "
msgstr "Pfad '%s' ist nicht zusammengeführt."
#: builtin/checkout.c:302 builtin/checkout.c:498 builtin/clone.c:583
-#: builtin/merge.c:811
+#: builtin/merge.c:812
msgid "unable to write new index file"
msgstr "Konnte neue Bereitstellungsdatei nicht schreiben."
msgid "Can not do reflog for '%s'\n"
msgstr "Konnte \"reflog\" für '%s' nicht durchführen\n"
-#: builtin/checkout.c:565
+#: builtin/checkout.c:566
msgid "HEAD is now at"
msgstr "Zweigspitze (HEAD) ist jetzt bei"
-#: builtin/checkout.c:572
+#: builtin/checkout.c:573
#, c-format
msgid "Reset branch '%s'\n"
msgstr "Setze Zweig '%s' zurück\n"
-#: builtin/checkout.c:575
+#: builtin/checkout.c:576
#, c-format
msgid "Already on '%s'\n"
msgstr "Bereits auf '%s'\n"
-#: builtin/checkout.c:579
+#: builtin/checkout.c:580
#, c-format
msgid "Switched to and reset branch '%s'\n"
msgstr "Gewechselt zu zurückgesetztem Zweig '%s'\n"
-#: builtin/checkout.c:581
+#: builtin/checkout.c:582
#, c-format
msgid "Switched to a new branch '%s'\n"
msgstr "Gewechselt zu einem neuen Zweig '%s'\n"
-#: builtin/checkout.c:583
+#: builtin/checkout.c:584
#, c-format
msgid "Switched to branch '%s'\n"
msgstr "Gewechselt zu Zweig '%s'\n"
-#: builtin/checkout.c:639
+#: builtin/checkout.c:640
#, c-format
msgid " ... and %d more.\n"
msgstr " ... und %d weitere.\n"
#. The singular version
-#: builtin/checkout.c:645
+#: builtin/checkout.c:646
#, c-format
msgid ""
"Warning: you are leaving %d commit behind, not connected to\n"
"\n"
"%s\n"
-#: builtin/checkout.c:663
+#: builtin/checkout.c:664
#, c-format
msgid ""
"If you want to keep them by creating a new branch, this may be a good time\n"
" git branch neuer_zweig_name %s\n"
"\n"
-#: builtin/checkout.c:692
+#: builtin/checkout.c:693
msgid "internal error in revision walk"
msgstr "interner Fehler im Revisionsgang"
-#: builtin/checkout.c:696
+#: builtin/checkout.c:697
msgid "Previous HEAD position was"
msgstr "Vorherige Position der Zweigspitze (HEAD) war"
-#: builtin/checkout.c:722
+#: builtin/checkout.c:723
msgid "You are on a branch yet to be born"
msgstr "Du bist auf einem Zweig, der noch nicht geboren wurde."
#. case (1)
-#: builtin/checkout.c:853
+#: builtin/checkout.c:854
#, c-format
msgid "invalid reference: %s"
msgstr "Ungültige Referenz: %s"
#. case (1): want a tree
-#: builtin/checkout.c:892
+#: builtin/checkout.c:893
#, c-format
msgid "reference is not a tree: %s"
msgstr "Referenz ist kein Baum: %s"
-#: builtin/checkout.c:972
+#: builtin/checkout.c:973
msgid "-B cannot be used with -b"
msgstr "-B kann nicht mit -b benutzt werden"
-#: builtin/checkout.c:981
+#: builtin/checkout.c:982
msgid "--patch is incompatible with all other options"
msgstr "--patch ist inkompatibel mit allen anderen Optionen"
-#: builtin/checkout.c:984
+#: builtin/checkout.c:985
msgid "--detach cannot be used with -b/-B/--orphan"
msgstr "--detach kann nicht mit -b/-B/--orphan benutzt werden"
-#: builtin/checkout.c:986
+#: builtin/checkout.c:987
msgid "--detach cannot be used with -t"
msgstr "--detach kann nicht mit -t benutzt werden"
-#: builtin/checkout.c:992
+#: builtin/checkout.c:993
msgid "--track needs a branch name"
msgstr "--track benötigt einen Zweignamen"
-#: builtin/checkout.c:999
+#: builtin/checkout.c:1000
msgid "Missing branch name; try -b"
msgstr "Vermisse Zweignamen; versuche -b"
-#: builtin/checkout.c:1005
+#: builtin/checkout.c:1006
msgid "--orphan and -b|-B are mutually exclusive"
msgstr "--orphan und -b|-B sind gegenseitig exklusiv"
-#: builtin/checkout.c:1007
+#: builtin/checkout.c:1008
msgid "--orphan cannot be used with -t"
msgstr "--orphan kann nicht mit -t benutzt werden"
-#: builtin/checkout.c:1017
+#: builtin/checkout.c:1018
msgid "git checkout: -f and -m are incompatible"
msgstr "git checkout: -f und -m sind inkompatibel"
-#: builtin/checkout.c:1051
+#: builtin/checkout.c:1052
msgid "invalid path specification"
msgstr "ungültige Pfadspezifikation"
-#: builtin/checkout.c:1059
+#: builtin/checkout.c:1060
#, c-format
msgid ""
"git checkout: updating paths is incompatible with switching branches.\n"
"Hast du beabsichtigt '%s' auszuchecken, welcher nicht als Version aufgelöst "
"werden kann?"
-#: builtin/checkout.c:1061
+#: builtin/checkout.c:1062
msgid "git checkout: updating paths is incompatible with switching branches."
msgstr ""
"git checkout: Aktualisierung von Pfaden ist inkompatibel mit dem Wechsel von "
"Zweigen."
-#: builtin/checkout.c:1066
+#: builtin/checkout.c:1067
msgid "git checkout: --detach does not take a path argument"
msgstr "git checkout: --detach nimmt kein Pfad-Argument"
-#: builtin/checkout.c:1069
+#: builtin/checkout.c:1070
msgid ""
"git checkout: --ours/--theirs, --force and --merge are incompatible when\n"
"checking out of the index."
"git checkout: --ours/--theirs, --force and --merge sind inkompatibel wenn\n"
"du außerhalb der Bereitstellung auscheckst."
-#: builtin/checkout.c:1088
+#: builtin/checkout.c:1089
msgid "Cannot switch branch to a non-commit."
msgstr "Kann Zweig nur zu einer Version wechseln."
-#: builtin/checkout.c:1091
+#: builtin/checkout.c:1092
msgid "--ours/--theirs is incompatible with switching branches."
msgstr "--ours/--theirs ist inkompatibel mit den Wechseln von Zweigen."
#: builtin/clean.c:166
#, c-format
msgid "Would not remove %s\n"
-msgstr "Würde %s nicht entfernen\n"
+msgstr "Würde nicht entfernen %s\n"
#: builtin/clean.c:168
#, c-format
msgid "Not removing %s\n"
-msgstr "Entferne %s nicht\n"
+msgstr "Entferne nicht %s\n"
#: builtin/clone.c:243
#, c-format
#: builtin/clone.c:835
#, c-format
msgid "Remote branch %s not found in upstream %s"
-msgstr "entfernten Zweig %s nicht im anderen Projektarchiv %s gefunden"
+msgstr "entfernter Zweig %s nicht im anderen Projektarchiv gefunden %s"
#: builtin/clone.c:842
msgid "You appear to have cloned an empty repository."
#: builtin/commit.c:651
#, c-format
msgid "Malformed ident string: '%s'"
-msgstr "Fehlerhafter Identifikations-String: '%s'"
+msgstr "Fehlerhafte Identifikations-String: '%s'"
#: builtin/commit.c:689 builtin/commit.c:722 builtin/commit.c:1033
#, c-format
#: builtin/commit.c:1119
msgid "Clever... amending the last one with dirty index."
msgstr ""
-"Klug... nachbessern der letzten Version mit einer unsauberen Bereitstellung."
+"Klug... nachbessern der letzten Version mit einem unsauberen Bereitstellung."
#: builtin/commit.c:1121
msgid "Explicit paths specified without -i nor -o; assuming --only paths..."
#: builtin/commit.c:1317
msgid "could not parse newly created commit"
-msgstr "Konnte neu erstellte Version nicht analysieren."
+msgstr "Konnte neulich erstellte Version nicht analysieren."
#: builtin/commit.c:1358
msgid "detached HEAD"
msgid "Aborting commit due to empty commit message.\n"
msgstr "Eintragung aufgrund leerer Versionsbeschreibung abgebrochen.\n"
-#: builtin/commit.c:1554 builtin/merge.c:935 builtin/merge.c:968
+#: builtin/commit.c:1554 builtin/merge.c:936 builtin/merge.c:961
msgid "failed to write commit object"
msgstr "Fehler beim Schreiben des Versionsobjektes."
msgstr ""
"Das Projektarchiv wurde aktualisiert, aber die \"new_index\"-Datei\n"
"konnte nicht geschrieben werden. Prüfe, dass dein Speicher nicht\n"
-"voll und dein Kontingent nicht aufgebraucht ist und führe\n"
+"voll und Dein Kontingent nicht aufgebraucht ist und führe\n"
"anschließend \"git reset HEAD\" zu Wiederherstellung aus."
#: builtin/describe.c:234
#: builtin/diff.c:77
#, c-format
msgid "'%s': not a regular file or symlink"
-msgstr "'%s': keine reguläre Datei oder symbolische Verknüpfung"
+msgstr "'%s': keine reguläre Datei oder symbolischer Link"
#: builtin/diff.c:220
#, c-format
msgid "Couldn't find remote ref HEAD"
msgstr "Konnte entfernte Referenz der Zweigspitze (HEAD) nicht finden."
-#: builtin/fetch.c:252
+#: builtin/fetch.c:253
#, c-format
msgid "object %s not found"
msgstr "Objekt %s nicht gefunden"
-#: builtin/fetch.c:258
+#: builtin/fetch.c:259
msgid "[up to date]"
msgstr "[aktuell]"
-#: builtin/fetch.c:272
+#: builtin/fetch.c:273
#, c-format
msgid "! %-*s %-*s -> %s (can't fetch in current branch)"
msgstr "! %-*s %-*s -> %s (kann nicht im aktuellen Zweig anfordern)"
-#: builtin/fetch.c:273 builtin/fetch.c:351
+#: builtin/fetch.c:274 builtin/fetch.c:360
msgid "[rejected]"
msgstr "[zurückgewiesen]"
-#: builtin/fetch.c:284
+#: builtin/fetch.c:285
msgid "[tag update]"
msgstr "[Markierungsaktualisierung]"
-#: builtin/fetch.c:286 builtin/fetch.c:313 builtin/fetch.c:331
+#: builtin/fetch.c:287 builtin/fetch.c:322 builtin/fetch.c:340
msgid " (unable to update local ref)"
msgstr " (kann lokale Referenz nicht aktualisieren)"
-#: builtin/fetch.c:298
+#: builtin/fetch.c:305
msgid "[new tag]"
msgstr "[neue Markierung]"
-#: builtin/fetch.c:302
+#: builtin/fetch.c:308
msgid "[new branch]"
msgstr "[neuer Zweig]"
-#: builtin/fetch.c:347
+#: builtin/fetch.c:311
+msgid "[new ref]"
+msgstr "[neue Referenz]"
+
+#: builtin/fetch.c:356
msgid "unable to update local ref"
msgstr "kann lokale Referenz nicht aktualisieren"
-#: builtin/fetch.c:347
+#: builtin/fetch.c:356
msgid "forced update"
msgstr "Aktualisierung erzwungen"
-#: builtin/fetch.c:353
+#: builtin/fetch.c:362
msgid "(non-fast-forward)"
msgstr "(kein Vorspulen)"
-#: builtin/fetch.c:384 builtin/fetch.c:676
+#: builtin/fetch.c:393 builtin/fetch.c:685
#, c-format
msgid "cannot open %s: %s\n"
msgstr "kann %s nicht öffnen: %s\n"
-#: builtin/fetch.c:393
+#: builtin/fetch.c:402
#, c-format
msgid "%s did not send all necessary objects\n"
msgstr "%s hat nicht alle erforderlichen Objekte gesendet\n"
-#: builtin/fetch.c:479
+#: builtin/fetch.c:488
#, c-format
msgid "From %.*s\n"
msgstr "Von %.*s\n"
-#: builtin/fetch.c:490
+#: builtin/fetch.c:499
#, c-format
msgid ""
"some local refs could not be updated; try running\n"
"Einige lokale Referenzen konnten nicht aktualisiert werden; versuche\n"
"'git remote prune %s' um jeden älteren, widersprüchlichen Zweig zu entfernen."
-#: builtin/fetch.c:540
+#: builtin/fetch.c:549
#, c-format
msgid " (%s will become dangling)\n"
msgstr " (%s wird unreferenziert werden)\n"
-#: builtin/fetch.c:541
+#: builtin/fetch.c:550
#, c-format
msgid " (%s has become dangling)\n"
msgstr " (%s wurde unreferenziert)\n"
-#: builtin/fetch.c:548
+#: builtin/fetch.c:557
msgid "[deleted]"
msgstr "[gelöscht]"
-#: builtin/fetch.c:549
+#: builtin/fetch.c:558
msgid "(none)"
msgstr "(keine)"
-#: builtin/fetch.c:666
+#: builtin/fetch.c:675
#, c-format
msgid "Refusing to fetch into current branch %s of non-bare repository"
msgstr ""
"Ablehnung des Anforderns in aktuellen Zweig %s von einem nicht-leeren "
"Projektarchiv"
-#: builtin/fetch.c:700
+#: builtin/fetch.c:709
#, c-format
msgid "Don't know how to fetch from %s"
msgstr "Weiß nicht wie von %s angefordert wird."
-#: builtin/fetch.c:777
+#: builtin/fetch.c:786
#, c-format
msgid "Option \"%s\" value \"%s\" is not valid for %s"
msgstr "Option \"%s\" Wert \"%s\" ist nicht gültig für %s"
-#: builtin/fetch.c:780
+#: builtin/fetch.c:789
#, c-format
msgid "Option \"%s\" is ignored for %s\n"
msgstr "Option \"%s\" wird ignoriert für %s\n"
-#: builtin/fetch.c:879
+#: builtin/fetch.c:888
#, c-format
msgid "Fetching %s\n"
msgstr "Hole %s ab\n"
-#: builtin/fetch.c:881
+#: builtin/fetch.c:890
#, c-format
msgid "Could not fetch %s"
msgstr "Konnte %s nicht anfordern"
-#: builtin/fetch.c:898
+#: builtin/fetch.c:907
msgid ""
"No remote repository specified. Please, specify either a URL or a\n"
"remote name from which new revisions should be fetched."
"eine URL oder einen Entfernungsname, von welchem neue Revisionen angefordert "
"werden sollen."
-#: builtin/fetch.c:918
+#: builtin/fetch.c:927
msgid "You need to specify a tag name."
msgstr "Du musst den Namen der Markierung spezifizieren."
-#: builtin/fetch.c:970
+#: builtin/fetch.c:979
msgid "fetch --all does not take a repository argument"
msgstr "fetch -all nimmt kein Projektarchiv als Argument"
-#: builtin/fetch.c:972
+#: builtin/fetch.c:981
msgid "fetch --all does not make sense with refspecs"
msgstr "fetch --all macht keinen Sinn mit Referenzspezifikationen"
-#: builtin/fetch.c:983
+#: builtin/fetch.c:992
#, c-format
msgid "No such remote or remote group: %s"
msgstr "Keine solche Entfernung oder Entfernungsgruppe: %s"
-#: builtin/fetch.c:991
+#: builtin/fetch.c:1000
msgid "Fetching a group and specifying refspecs does not make sense"
msgstr ""
"Abholen einer Gruppe und Spezifizieren von Referenzspezifikationen macht "
msgid "Cannot access work tree '%s'"
msgstr "Kann nicht auf Arbeitsbaum '%s' zugreifen."
-#: builtin/log.c:187
+#: builtin/log.c:188
#, c-format
msgid "Final output: %d %s\n"
msgstr "letzte Ausgabe: %d %s\n"
-#: builtin/log.c:395 builtin/log.c:483
+#: builtin/log.c:401 builtin/log.c:489
#, c-format
msgid "Could not read object %s"
msgstr "Kann Objekt %s nicht lesen."
-#: builtin/log.c:507
+#: builtin/log.c:513
#, c-format
msgid "Unknown type: %d"
msgstr "Unbekannter Typ: %d"
-#: builtin/log.c:596
+#: builtin/log.c:602
msgid "format.headers without value"
msgstr "format.headers ohne Wert"
-#: builtin/log.c:669
+#: builtin/log.c:675
msgid "name of output directory is too long"
msgstr "Name des Ausgabeverzeichnisses ist zu lang."
-#: builtin/log.c:680
+#: builtin/log.c:686
#, c-format
msgid "Cannot open patch file %s"
msgstr "Kann Patch-Datei %s nicht öffnen"
-#: builtin/log.c:694
+#: builtin/log.c:700
msgid "Need exactly one range."
msgstr "Brauche genau einen Bereich."
-#: builtin/log.c:702
+#: builtin/log.c:708
msgid "Not a range."
msgstr "Kein Bereich."
-#: builtin/log.c:739
+#: builtin/log.c:745
msgid "Could not extract email from committer identity."
msgstr "Konnte E-Mail von der Intentität des Einreichers nicht extrahieren."
-#: builtin/log.c:785
+#: builtin/log.c:791
msgid "Cover letter needs email format"
-msgstr "Deckblatt benötigt E-Mail-Format"
+msgstr "Anschreiben benötigt E-Mail-Format"
-#: builtin/log.c:879
+#: builtin/log.c:885
#, c-format
msgid "insane in-reply-to: %s"
msgstr "verrücktes in-reply-to: %s"
-#: builtin/log.c:952
+#: builtin/log.c:958
msgid "Two output directories?"
msgstr "Zwei Ausgabeverzeichnisse?"
-#: builtin/log.c:1173
+#: builtin/log.c:1179
#, c-format
msgid "bogus committer info %s"
-msgstr "unechte Einreicher-Information %s"
+msgstr "unechte Einreicher-Informationen %s"
-#: builtin/log.c:1218
+#: builtin/log.c:1224
msgid "-n and -k are mutually exclusive."
msgstr "-n und -k sind zueinander exklusiv"
-#: builtin/log.c:1220
+#: builtin/log.c:1226
msgid "--subject-prefix and -k are mutually exclusive."
msgstr "--subject-prefix und -k sind zueinander exklusiv"
-#: builtin/log.c:1225 builtin/shortlog.c:284
+#: builtin/log.c:1231 builtin/shortlog.c:284
#, c-format
msgid "unrecognized argument: %s"
msgstr "nicht erkanntes Argument: %s"
-#: builtin/log.c:1228
+#: builtin/log.c:1234
msgid "--name-only does not make sense"
msgstr "--name-only macht keinen Sinn"
-#: builtin/log.c:1230
+#: builtin/log.c:1236
msgid "--name-status does not make sense"
msgstr "--name-status macht keinen Sinn"
-#: builtin/log.c:1232
+#: builtin/log.c:1238
msgid "--check does not make sense"
msgstr "--check macht keinen Sinn"
-#: builtin/log.c:1255
+#: builtin/log.c:1261
msgid "standard output, or directory, which one?"
msgstr "Standard-Ausgabe oder Verzeichnis, welches von beidem?"
-#: builtin/log.c:1257
+#: builtin/log.c:1263
#, c-format
msgid "Could not create directory '%s'"
msgstr "Konnte Verzeichnis '%s' nicht erstellen."
-#: builtin/log.c:1410
+#: builtin/log.c:1416
msgid "Failed to create output files"
msgstr "Fehler beim Erstellen der Ausgabedateien."
-#: builtin/log.c:1514
+#: builtin/log.c:1520
#, c-format
msgid ""
"Could not find a tracked remote branch, please specify <upstream> manually.\n"
"Konnte gefolgten, entfernten Zweig nicht finden, bitte spezifiziere "
"<upstream> manuell.\n"
-#: builtin/log.c:1530 builtin/log.c:1532 builtin/log.c:1544
+#: builtin/log.c:1536 builtin/log.c:1538 builtin/log.c:1550
#, c-format
msgid "Unknown commit %s"
msgstr "Unbekannte Version %s"
-#: builtin/merge.c:91
+#: builtin/merge.c:90
msgid "switch `m' requires a value"
msgstr "Schalter 'm' erfordert einen Wert."
-#: builtin/merge.c:128
+#: builtin/merge.c:127
#, c-format
msgid "Could not find merge strategy '%s'.\n"
msgstr "Konnte Zusammenführungsstrategie '%s' nicht finden.\n"
-#: builtin/merge.c:129
+#: builtin/merge.c:128
#, c-format
msgid "Available strategies are:"
msgstr "Verfügbare Strategien sind:"
-#: builtin/merge.c:134
+#: builtin/merge.c:133
#, c-format
msgid "Available custom strategies are:"
msgstr "Verfügbare benutzerdefinierte Strategien sind:"
-#: builtin/merge.c:241
+#: builtin/merge.c:240
msgid "could not run stash."
msgstr "Konnte \"stash\" nicht ausführen."
-#: builtin/merge.c:246
+#: builtin/merge.c:245
msgid "stash failed"
msgstr "\"stash\" fehlgeschlagen"
-#: builtin/merge.c:251
+#: builtin/merge.c:250
#, c-format
msgid "not a valid object: %s"
msgstr "kein gültiges Objekt: %s"
-#: builtin/merge.c:270 builtin/merge.c:287
+#: builtin/merge.c:269 builtin/merge.c:286
msgid "read-tree failed"
msgstr "read-tree fehlgeschlagen"
-#: builtin/merge.c:317
+#: builtin/merge.c:316
msgid " (nothing to squash)"
msgstr " (nichts zu quetschen)"
-#: builtin/merge.c:330
+#: builtin/merge.c:329
#, c-format
msgid "Squash commit -- not updating HEAD\n"
msgstr "Quetsche Version -- aktualisiere Zweigspitze (HEAD) nicht\n"
-#: builtin/merge.c:362
+#: builtin/merge.c:361
msgid "Writing SQUASH_MSG"
msgstr "Schreibe SQUASH_MSG"
-#: builtin/merge.c:364
+#: builtin/merge.c:363
msgid "Finishing SQUASH_MSG"
msgstr "Schließe SQUASH_MSG ab"
#: builtin/merge.c:386
#, c-format
msgid "No merge message -- not updating HEAD\n"
-msgstr "Keine Zusammenführungsbeschreibung -- aktualisiere Zweigspitze (HEAD) "
-"nicht\n"
+msgstr ""
+"Keine Zusammenführungsbeschreibung -- aktualisiere Zweigspitze (HEAD) nicht\n"
#: builtin/merge.c:437
#, c-format
msgid "failed to read the cache"
msgstr "Lesen des Zwischenspeichers fehlgeschlagen"
-#: builtin/merge.c:696
+#: builtin/merge.c:697
msgid "Unable to write index."
msgstr "Konnte Bereitstellung nicht schreiben."
-#: builtin/merge.c:709
+#: builtin/merge.c:710
msgid "Not handling anything other than two heads merge."
msgstr "Behandle nichts anderes als die Zusammenführung von zwei Köpfen."
-#: builtin/merge.c:723
+#: builtin/merge.c:724
#, c-format
msgid "Unknown option for merge-recursive: -X%s"
msgstr "Unbekannte Option für merge-recursive: -X%s"
-#: builtin/merge.c:737
+#: builtin/merge.c:738
#, c-format
msgid "unable to write %s"
msgstr "konnte %s nicht schreiben"
-#: builtin/merge.c:876
+#: builtin/merge.c:877
#, c-format
msgid "Could not read from '%s'"
msgstr "konnte nicht von '%s' lesen"
-#: builtin/merge.c:885
+#: builtin/merge.c:886
#, c-format
msgid "Not committing merge; use 'git commit' to complete the merge.\n"
msgstr ""
"Zusammenführung nicht eingetragen; benutze 'git commit' um die "
"Zusammenführung abzuschließen.\n"
-#: builtin/merge.c:891
+#: builtin/merge.c:892
msgid ""
"Please enter a commit message to explain why this merge is necessary,\n"
"especially if it merges an updated upstream into a topic branch.\n"
msgstr ""
"Bitte gebe eine Versionsbeschreibung ein um zu erklären, warum diese "
"Zusammenführung erforderlich ist,\n"
-"insbesondere wenn es einen aktualisierten entfernten Zweig mit einem "
-"Thema-Zweig zusammenführt.\n"
+"insbesondere wenn es einen aktualisierten entfernten Zweig mit einem Thema-"
+"Zweig zusammenführt.\n"
"\n"
"Zeilen beginnend mit '#' werden ignoriert, und eine leere Beschreibung "
"bricht die Eintragung ab.\n"
-#: builtin/merge.c:915
+#: builtin/merge.c:916
msgid "Empty commit message."
-msgstr "Leere Versionsbeschreibung."
+msgstr "Leere Versionsbeschreibung"
-#: builtin/merge.c:927
+#: builtin/merge.c:928
#, c-format
msgid "Wonderful.\n"
msgstr "Wunderbar.\n"
-#: builtin/merge.c:1000
+#: builtin/merge.c:993
#, c-format
msgid "Automatic merge failed; fix conflicts and then commit the result.\n"
msgstr ""
"Automatische Zusammenführung fehlgeschlagen; behebe die Konflikte und trage "
"dann das Ergebnis ein.\n"
-#: builtin/merge.c:1016
+#: builtin/merge.c:1009
#, c-format
msgid "'%s' is not a commit"
msgstr "'%s' ist keine Version"
-#: builtin/merge.c:1057
+#: builtin/merge.c:1050
msgid "No current branch."
msgstr "Kein aktueller Zweig."
-#: builtin/merge.c:1059
+#: builtin/merge.c:1052
msgid "No remote for the current branch."
msgstr "Kein anderes Archiv für den aktuellen Zweig."
-#: builtin/merge.c:1061
+#: builtin/merge.c:1054
msgid "No default upstream defined for the current branch."
-msgstr "Kein entferntes Standard-Projektarchiv für den aktuellen Zweig "
-"definiert."
+msgstr ""
+"Kein entferntes Standard-Projektarchiv für den aktuellen Zweig definiert."
-#: builtin/merge.c:1066
+#: builtin/merge.c:1059
#, c-format
msgid "No remote tracking branch for %s from %s"
msgstr "Kein entfernter Übernahmezweig für %s von %s"
-#: builtin/merge.c:1188
+#: builtin/merge.c:1146 builtin/merge.c:1303
+#, c-format
+msgid "%s - not something we can merge"
+msgstr "%s - nichts was wir zusammenführen können"
+
+#: builtin/merge.c:1214
msgid "There is no merge to abort (MERGE_HEAD missing)."
msgstr "Es gibt keine Zusammenführung zum Abbrechen (vermisse MERGE_HEAD)"
-#: builtin/merge.c:1204 git-pull.sh:31
+#: builtin/merge.c:1230 git-pull.sh:31
msgid ""
"You have not concluded your merge (MERGE_HEAD exists).\n"
"Please, commit your changes before you can merge."
"Du hast deine Zusammenführung nicht abgeschlossen (MERGE_HEAD existiert).\n"
"Bitte trage deine Änderungen ein, bevor du zusammenführen kannst."
-#: builtin/merge.c:1207 git-pull.sh:34
+#: builtin/merge.c:1233 git-pull.sh:34
msgid "You have not concluded your merge (MERGE_HEAD exists)."
msgstr ""
"Du hast deine Zusammenführung nicht abgeschlossen (MERGE_HEAD existiert)."
-#: builtin/merge.c:1211
+#: builtin/merge.c:1237
msgid ""
"You have not concluded your cherry-pick (CHERRY_PICK_HEAD exists).\n"
"Please, commit your changes before you can merge."
"existiert).\n"
"Bitte trage deine Änderungen ein, bevor du zusammenführen kannst."
-#: builtin/merge.c:1214
+#: builtin/merge.c:1240
msgid "You have not concluded your cherry-pick (CHERRY_PICK_HEAD exists)."
msgstr ""
"Du hast deinen \"cherry-pick\" nicht abgeschlossen (CHERRY_PICK_HEAD "
"existiert)."
-#: builtin/merge.c:1223
+#: builtin/merge.c:1249
msgid "You cannot combine --squash with --no-ff."
msgstr "Du kannst --squash nicht mit --no-ff kombinieren."
-#: builtin/merge.c:1228
+#: builtin/merge.c:1254
msgid "You cannot combine --no-ff with --ff-only."
msgstr "Du kannst --no-ff nicht mit --ff--only kombinieren."
-#: builtin/merge.c:1235
+#: builtin/merge.c:1261
msgid "No commit specified and merge.defaultToUpstream not set."
msgstr "Keine Version spezifiziert und merge.defaultToUpstream nicht gesetzt."
-#: builtin/merge.c:1266
+#: builtin/merge.c:1293
msgid "Can merge only exactly one commit into empty head"
msgstr "Kann nur exakt eine Version in einem leeren Kopf zusammenführen."
-#: builtin/merge.c:1269
+#: builtin/merge.c:1296
msgid "Squash commit into empty head not supported yet"
msgstr ""
"Quetschen einer Version in einen leeren Kopf wird momentan nicht unterstützt."
-#: builtin/merge.c:1271
+#: builtin/merge.c:1298
msgid "Non-fast-forward commit does not make sense into an empty head"
msgstr "nicht vorzuspulende Version macht in einem leeren Kopf keinen Sinn"
-#: builtin/merge.c:1275 builtin/merge.c:1319
-#, c-format
-msgid "%s - not something we can merge"
-msgstr "%s - nichts was wir zusammenführen können"
-
-#: builtin/merge.c:1382
+#: builtin/merge.c:1413
#, c-format
msgid "Updating %s..%s\n"
msgstr "Aktualisiere %s..%s\n"
-#: builtin/merge.c:1420
+#: builtin/merge.c:1451
#, c-format
msgid "Trying really trivial in-index merge...\n"
msgstr "Probiere wirklich triviale \"in-index\"-Zusammenführung...\n"
-#: builtin/merge.c:1427
+#: builtin/merge.c:1458
#, c-format
msgid "Nope.\n"
msgstr "Nein.\n"
-#: builtin/merge.c:1459
+#: builtin/merge.c:1490
msgid "Not possible to fast-forward, aborting."
msgstr "Vorspulen nicht möglich, breche ab."
-#: builtin/merge.c:1482 builtin/merge.c:1559
+#: builtin/merge.c:1513 builtin/merge.c:1592
#, c-format
msgid "Rewinding the tree to pristine...\n"
msgstr "Rücklauf des Baumes bis zum Ursprung...\n"
-#: builtin/merge.c:1486
+#: builtin/merge.c:1517
#, c-format
msgid "Trying merge strategy %s...\n"
msgstr "Probiere Zusammenführungsstrategie %s...\n"
-#: builtin/merge.c:1550
+#: builtin/merge.c:1583
#, c-format
msgid "No merge strategy handled the merge.\n"
msgstr "Keine Zusammenführungsstrategie behandelt die Zusammenführung.\n"
-#: builtin/merge.c:1552
+#: builtin/merge.c:1585
#, c-format
msgid "Merge with strategy %s failed.\n"
msgstr "Zusammenführung mit Strategie %s fehlgeschlagen.\n"
-#: builtin/merge.c:1561
+#: builtin/merge.c:1594
#, c-format
msgid "Using the %s to prepare resolving by hand.\n"
msgstr "Benutze \"%s\" um die Auflösung per Hand vorzubereiten.\n"
-#: builtin/merge.c:1572
+#: builtin/merge.c:1606
#, c-format
msgid "Automatic merge went well; stopped before committing as requested\n"
msgstr ""
#: builtin/mv.c:128
#, c-format
msgid "Huh? %.*s is in index?"
-msgstr "Huh? %.*s ist bereitgestellt?"
+msgstr "Huh? %.*s ist in der Bereitstellung?"
#: builtin/mv.c:140
msgid "source directory is empty"
msgid "unable to parse value '%s' for option %s"
msgstr "konnte Wert '%s' für Option %s nicht analysieren"
-#: builtin/push.c:44
+#: builtin/push.c:45
msgid "tag shorthand without <tag>"
msgstr "Kurzschrift für Markierung ohne <Markierung>"
-#: builtin/push.c:63
+#: builtin/push.c:64
msgid "--delete only accepts plain target ref names"
msgstr "--delete akzeptiert nur reine Referenz-Namen als Ziel"
-#: builtin/push.c:83
+#: builtin/push.c:84
#, c-format
msgid ""
"You are not currently on a branch.\n"
"\n"
" git push %s HEAD:<Name-des-entfernten-Zweiges>\n"
-#: builtin/push.c:90
+#: builtin/push.c:91
#, c-format
msgid ""
"The current branch %s has no upstream branch.\n"
"\n"
" git push --set-upstream %s %s\n"
-#: builtin/push.c:98
+#: builtin/push.c:99
#, c-format
msgid "The current branch %s has multiple upstream branches, refusing to push."
msgstr ""
"Der aktuelle Zweig %s hat mehrere entfernte Zweige, Versand verweigert."
-#: builtin/push.c:101
+#: builtin/push.c:102
#, c-format
msgid ""
"You are pushing to remote '%s', which is not the upstream of\n"
msgstr ""
"Du versendest nach '%s', welches kein entferntes Projektarchiv deines\n"
"aktuellen Zweiges '%s' ist, ohne mir mitzuteilen, was ich versenden\n"
-"soll, um welchen entfernten Zweig zu aktualisieren."
+"soll um welchen entfernten Zweig zu aktualisieren."
-#: builtin/push.c:127
+#: builtin/push.c:131
msgid ""
"You didn't specify any refspecs to push, and push.default is \"nothing\"."
msgstr ""
-"Du hast keine Referenzspezifikationen zum Versenden spezifiziert, und "
-"push.default ist \"nothing\"."
+"Du hast keine Referenzspezifikationen zum Versenden spezifiziert, und push."
+"default ist \"nothing\"."
-#: builtin/push.c:147
+#: builtin/push.c:138
+msgid ""
+"Updates were rejected because the tip of your current branch is behind\n"
+"its remote counterpart. Merge the remote changes (e.g. 'git pull')\n"
+"before pushing again.\n"
+"See the 'Note about fast-forwards' in 'git push --help' for details."
+msgstr ""
+"Aktualisierungen wurden zurückgewiesen, weil die Spitze deines aktuellen\n"
+"Zweiges hinter seinem entfernten Gegenstück ist. Führe die entfernten\n"
+"Änderungen zusammen (z.B. 'git pull') bevor du erneut versendest.\n"
+"Siehe auch die 'Note about fast-forwards' Sektion von 'git push --help'\n"
+"für weitere Details."
+
+#: builtin/push.c:144
+msgid ""
+"Updates were rejected because a pushed branch tip is behind its remote\n"
+"counterpart. If you did not intend to push that branch, you may want to\n"
+"specify branches to push or set the 'push.default' configuration\n"
+"variable to 'current' or 'upstream' to push only the current branch."
+msgstr ""
+"Aktualisierungen wurden zurückgewiesen, weil die Spitze eines versendeten\n"
+"Zweiges hinter seinem entfernten Gegenstück ist. Wenn du nicht beabsichtigt\n"
+"hast, diesen Zweig zu versenden, kannst du auch den zu versendenden Zweig\n"
+"spezifizieren oder die Konfigurationsvariable 'push.default' zu 'current'\n"
+"oder 'upstream' setze, um nur den aktuellen Zweig zu versenden."
+
+#: builtin/push.c:150
+msgid ""
+"Updates were rejected because a pushed branch tip is behind its remote\n"
+"counterpart. Check out this branch and merge the remote changes\n"
+"(e.g. 'git pull') before pushing again.\n"
+"See the 'Note about fast-forwards' in 'git push --help' for details."
+msgstr ""
+"Aktualisierungen wurden zurückgewiesen, weil die Spitze eines versendeten\n"
+"Zweiges hinter seinem entfernten Gegenstück ist. Checke diesen Zweig aus\n"
+"und führe die entfernten Änderungen zusammen (z.B. 'git pull') bevor du\n"
+"erneut versendest.\n"
+"Sie auch die 'Note about fast-forwards' Sektion von 'git push --help'\n"
+"für weitere Details."
+
+#: builtin/push.c:190
#, c-format
msgid "Pushing to %s\n"
-msgstr "Sende nach %s\n"
+msgstr "Schiebe zu %s\n"
-#: builtin/push.c:151
+#: builtin/push.c:194
#, c-format
msgid "failed to push some refs to '%s'"
msgstr "Fehler beim Versenden einiger Referenzen nach '%s'"
-#: builtin/push.c:159
-#, c-format
-msgid ""
-"To prevent you from losing history, non-fast-forward updates were rejected\n"
-"Merge the remote changes (e.g. 'git pull') before pushing again. See the\n"
-"'Note about fast-forwards' section of 'git push --help' for details.\n"
-msgstr ""
-"Um dich vor Verlust von Historie zu bewahren, wurden nicht vorzuspulende "
-"Aktualisierungen zurückgewiesen.\n"
-"Führe die entfernten Änderungen zusammen (z.B. 'git pull') bevor du erneut "
-"versendest. Siehe auch die 'Note about fast-forwards' Sektion von \n"
-"'git push --help' für weitere Details.\n"
-
-#: builtin/push.c:176
+#: builtin/push.c:226
#, c-format
msgid "bad repository '%s'"
msgstr "schlechtes Projektarchiv '%s'"
-#: builtin/push.c:177
+#: builtin/push.c:227
msgid ""
"No configured push destination.\n"
"Either specify the URL from the command-line or configure a remote "
"\n"
" git push <Name>\n"
-#: builtin/push.c:192
+#: builtin/push.c:242
msgid "--all and --tags are incompatible"
msgstr "--all und --tags sind inkompatibel"
-#: builtin/push.c:193
+#: builtin/push.c:243
msgid "--all can't be combined with refspecs"
msgstr "--all kann nicht mit Referenzspezifikationen kombiniert werden"
-#: builtin/push.c:198
+#: builtin/push.c:248
msgid "--mirror and --tags are incompatible"
msgstr "--mirror und --tags sind inkompatibel"
-#: builtin/push.c:199
+#: builtin/push.c:249
msgid "--mirror can't be combined with refspecs"
msgstr "--mirror kann nicht mit Referenzspezifikationen kombiniert werden"
-#: builtin/push.c:204
+#: builtin/push.c:254
msgid "--all and --mirror are incompatible"
msgstr "--all und --mirror sind inkompatibel"
-#: builtin/push.c:284
+#: builtin/push.c:342
msgid "--delete is incompatible with --all, --mirror and --tags"
msgstr "--delete ist inkompatibel mit --all, --mirror und --tags"
-#: builtin/push.c:286
+#: builtin/push.c:344
msgid "--delete doesn't make sense without any refs"
msgstr "--delete macht ohne irgendeine Referenz ohne keinen Sinn"
msgid "Could not reset index file to revision '%s'."
msgstr "Konnte Bereitstellungsdatei nicht zu Revision '%s' zurücksetzen."
-#: builtin/revert.c:70 builtin/revert.c:91
+#: builtin/revert.c:70 builtin/revert.c:92
#, c-format
msgid "%s: %s cannot be used with %s"
msgstr "%s: %s kann nicht mit %s benutzt werden"
-#: builtin/revert.c:126
+#: builtin/revert.c:127
msgid "program error"
msgstr "Programmfehler"
-#: builtin/revert.c:209
+#: builtin/revert.c:213
msgid "revert failed"
msgstr "\"revert\" fehlgeschlagen"
-#: builtin/revert.c:224
+#: builtin/revert.c:228
msgid "cherry-pick failed"
msgstr "\"cherry-pick\" fehlgeschlagen"
#: builtin/tag.c:372
#, c-format
msgid "The tag message has been left in %s\n"
-msgstr "Die Markierungsbeschreibung wurde in %s gelassen\n"
+msgstr "Die Markierungsbeschreibung wurde gelassen in %s\n"
#: builtin/tag.c:421
msgid "switch 'points-at' requires an object"
msgid "Updated tag '%s' (was %s)\n"
msgstr "Aktualisierte Markierung '%s' (war %s)\n"
-#: git-am.sh:49
+#: git-am.sh:50
msgid "You need to set your committer info first"
msgstr "Du musst zuerst die Informationen des Eintragenden setzen."
-#: git-am.sh:136
+#: git-am.sh:137
msgid "Repository lacks necessary blobs to fall back on 3-way merge."
msgstr ""
"Dem Projektarchiv fehlen notwendige Blobs um auf eine 3-Wege-Zusammenführung "
"zurückzufallen."
-#: git-am.sh:147
+#: git-am.sh:154
msgid ""
"Did you hand edit your patch?\n"
"It does not apply to blobs recorded in its index."
"Er kann nicht auf aufgezeichnete Blobs in seiner Bereitstellung angewendet "
"werden."
-#: git-am.sh:156
+#: git-am.sh:163
msgid "Falling back to patching base and 3-way merge..."
msgstr "Falle zurück zum Patchen der Basis und der 3-Wege-Zusammenführung..."
-#: git-am.sh:268
+#: git-am.sh:275
msgid "Only one StGIT patch series can be applied at once"
msgstr "Es kann nur eine StGIT Patch-Serie auf einmal angewendet werden."
-#: git-am.sh:355
+#: git-am.sh:362
#, sh-format
msgid "Patch format $patch_format is not supported."
msgstr "Patch-Format $patch_format wird nicht unterstützt."
-#: git-am.sh:357
+#: git-am.sh:364
msgid "Patch format detection failed."
msgstr "Patch-Formaterkennung fehlgeschlagen."
-#: git-am.sh:411
+#: git-am.sh:418
msgid "-d option is no longer supported. Do not use."
msgstr "-d Option wird nicht länger unterstützt. Nicht benutzen."
-#: git-am.sh:474
+#: git-am.sh:481
#, sh-format
msgid "previous rebase directory $dotest still exists but mbox given."
msgstr ""
"Vorheriges Verzeichnis des Neuaufbaus $dotest existiert noch, aber mbox "
"gegeben."
-#: git-am.sh:479
+#: git-am.sh:486
msgid "Please make up your mind. --skip or --abort?"
msgstr "Bitte werde dir klar. --skip oder --abort?"
-#: git-am.sh:506
+#: git-am.sh:513
msgid "Resolve operation not in progress, we are not resuming."
msgstr "keine Auflösung in Durchführung, wir setzen nicht fort."
-#: git-am.sh:572
+#: git-am.sh:579
#, sh-format
msgid "Dirty index: cannot apply patches (dirty: $files)"
msgstr ""
"Unsaubere Bereitstellung: kann Patches nicht anwenden (unsauber: $files)"
-#: git-am.sh:748
+#: git-am.sh:755
msgid "cannot be interactive without stdin connected to a terminal."
msgstr ""
"Kann nicht interaktiv sein, ohne das die Standard-Eingabe mit einem Terminal "
#. TRANSLATORS: Make sure to include [y], [n], [e], [v] and [a]
#. in your translation. The program will only accept English
#. input at this point.
-#: git-am.sh:759
+#: git-am.sh:766
msgid "Apply? [y]es/[n]o/[e]dit/[v]iew patch/[a]ccept all "
msgstr "Anwenden? [y]es/[n]o/[e]dit/[v]iew patch/[a]ccept all "
-#: git-am.sh:795
+#: git-am.sh:802
#, sh-format
msgid "Applying: $FIRSTLINE"
msgstr "Wende an: $FIRSTLINE"
-#: git-am.sh:840
+#: git-am.sh:847
msgid "No changes -- Patch already applied."
msgstr "Keine Änderungen -- Patches bereits angewendet."
-#: git-am.sh:866
+#: git-am.sh:873
msgid "applying to an empty history"
msgstr "wende zu leerer Historie an"
#: git-bisect.sh:391
#, sh-format
msgid "cannot read $file for replaying"
-msgstr "kann $file nicht zum neu abspielen lesen"
+msgstr "kann $file nicht für das Abspielen lesen"
#: git-bisect.sh:408
msgid "?? what are you talking about?"
#: git-stash.sh:412
msgid "unable to refresh index"
-msgstr "Konnte die Bereitstellung nicht aktualisieren"
+msgstr "unfähig die Bereitstellung zu aktualisieren"
#: git-stash.sh:416
msgid "Cannot apply a stash in the middle of a merge"
#: git-stash.sh:570
msgid "(To restore them type \"git stash apply\")"
-msgstr "(Um es wiederherzustellen, schreibe \"git stash apply\")"
+msgstr "(Um es wiederherzustellen schreibe \"git stash apply\")"
#: git-submodule.sh:56
#, sh-format
msgid "cannot strip one component off url '$remoteurl'"
-msgstr "Kann eine Komponente von URL '$remoteurl' nicht rausziehen"
+msgstr "Kann eine Komponente von URL '$remoteurl' nicht abstreifen"
-#: git-submodule.sh:108
+#: git-submodule.sh:109
#, sh-format
-msgid "No submodule mapping found in .gitmodules for path '$path'"
-msgstr "Keine Unterprojekt-Zuordnung in .gitmodules für Pfad '$path' gefunden"
+msgid "No submodule mapping found in .gitmodules for path '$sm_path'"
+msgstr "Keine Unterprojekt-Zuordnung in .gitmodules für Pfad '$sm_path' gefunden"
-#: git-submodule.sh:149
+#: git-submodule.sh:150
#, sh-format
-msgid "Clone of '$url' into submodule path '$path' failed"
-msgstr "Klonen von '$url' in Unterprojekt-Pfad '$path' fehlgeschlagen"
+msgid "Clone of '$url' into submodule path '$sm_path' failed"
+msgstr "Klonen von '$url' in Unterprojekt-Pfad '$sm_path' fehlgeschlagen"
-#: git-submodule.sh:159
+#: git-submodule.sh:160
#, sh-format
msgid "Gitdir '$a' is part of the submodule path '$b' or vice versa"
-msgstr "Git-Verzeichnis '$a' ist Teil des Unterprojekt-Pfades '$b' oder umgekehrt"
+msgstr ""
+"Git-Verzeichnis '$a' ist Teil des Unterprojekt-Pfades '$b' oder umgekehrt"
-#: git-submodule.sh:247
+#: git-submodule.sh:249
#, sh-format
msgid "repo URL: '$repo' must be absolute or begin with ./|../"
msgstr "repo URL: '$repo' muss absolut sein oder mit ./|../ beginnen"
-#: git-submodule.sh:264
-#, sh-format
-msgid "'$path' already exists in the index"
-msgstr "'$path' existiert bereits in der Bereitstellung"
+#: git-submodule.sh:266
+#, sh-format
+msgid "'$sm_path' already exists in the index"
+msgstr "'$sm_path' existiert bereits in der Bereitstellung"
-#: git-submodule.sh:281
+#: git-submodule.sh:283
#, sh-format
-msgid "'$path' already exists and is not a valid git repo"
-msgstr "'$path' existiert bereits und ist kein gültiges Git-Projektarchiv"
+msgid "'$sm_path' already exists and is not a valid git repo"
+msgstr "'$sm_path' existiert bereits und ist kein gültiges Git-Projektarchiv"
-#: git-submodule.sh:295
+#: git-submodule.sh:297
#, sh-format
-msgid "Unable to checkout submodule '$path'"
-msgstr "Unfähig Unterprojekt '$path' auszuchecken"
+msgid "Unable to checkout submodule '$sm_path'"
+msgstr "Unfähig Unterprojekt '$sm_path' auszuchecken"
-#: git-submodule.sh:300
+#: git-submodule.sh:302
#, sh-format
-msgid "Failed to add submodule '$path'"
-msgstr "Hinzufügen von Unterprojekt '$path' fehlgeschlagen"
+msgid "Failed to add submodule '$sm_path'"
+msgstr "Hinzufügen von Unterprojekt '$sm_path' fehlgeschlagen"
-#: git-submodule.sh:305
+#: git-submodule.sh:307
#, sh-format
-msgid "Failed to register submodule '$path'"
-msgstr "Registrierung von Unterprojekt '$path' fehlgeschlagen"
+msgid "Failed to register submodule '$sm_path'"
+msgstr "Registierung von Unterprojekt '$sm_path' fehlgeschlagen"
-#: git-submodule.sh:347
+#: git-submodule.sh:349
#, sh-format
-msgid "Entering '$prefix$path'"
-msgstr "Betrete '$prefix$path'"
+msgid "Entering '$prefix$sm_path'"
+msgstr "Betrete '$prefix$sm_path'"
-#: git-submodule.sh:359
+#: git-submodule.sh:363
#, sh-format
-msgid "Stopping at '$path'; script returned non-zero status."
-msgstr "Stoppe bei '$path'; Skript gab nicht-Null Status zurück."
+msgid "Stopping at '$sm_path'; script returned non-zero status."
+msgstr "Stoppe bei '$sm_path'; Skript gab nicht-Null Status zurück."
-#: git-submodule.sh:401
+#: git-submodule.sh:405
#, sh-format
-msgid "No url found for submodule path '$path' in .gitmodules"
-msgstr "Keine URL für Unterprojekt-Pfad '$path' in .gitmodules gefunden"
+msgid "No url found for submodule path '$sm_path' in .gitmodules"
+msgstr "Keine URL für Unterprojekt-Pfad '$sm_path' in .gitmodules gefunden"
-#: git-submodule.sh:410
+#: git-submodule.sh:414
#, sh-format
-msgid "Failed to register url for submodule path '$path'"
-msgstr "Fehler beim Registrieren der URL für Unterprojekt-Pfad '$path'"
+msgid "Failed to register url for submodule path '$sm_path'"
+msgstr "Fehler beim Registrieren der URL für Unterprojekt-Pfad '$sm_path'"
-#: git-submodule.sh:418
+#: git-submodule.sh:422
#, sh-format
-msgid "Failed to register update mode for submodule path '$path'"
-msgstr "Fehler beim Registrieren des Aktualisierungsmodus für Unterprojekt-Pfad"
-" '$path'"
+msgid "Failed to register update mode for submodule path '$sm_path'"
+msgstr ""
+"Fehler beim Registrieren des Aktualisierungsmodus für Unterprojekt-Pfad "
+"'$sm_path'"
-#: git-submodule.sh:420
+#: git-submodule.sh:424
#, sh-format
-msgid "Submodule '$name' ($url) registered for path '$path'"
-msgstr "Unterprojekt '$name' ($url) registriert für Pfad '$path'"
+msgid "Submodule '$name' ($url) registered for path '$sm_path'"
+msgstr "Unterprojekt '$name' ($url) registriert für Pfad '$sm_path'"
-#: git-submodule.sh:519
+#: git-submodule.sh:523
#, sh-format
msgid ""
-"Submodule path '$path' not initialized\n"
+"Submodule path '$sm_path' not initialized\n"
"Maybe you want to use 'update --init'?"
msgstr ""
-"Unterprojekt-Pfad '$path' nicht initialisiert\n"
+"Unterprojekt-Pfad '$sm_path' nicht initialisiert\n"
"Vielleicht möchtest du 'update --init' benutzen?"
-#: git-submodule.sh:532
+#: git-submodule.sh:536
#, sh-format
-msgid "Unable to find current revision in submodule path '$path'"
-msgstr "Konnte aktuelle Revision in Unterprojekt-Pfad '$path' nicht finden"
+msgid "Unable to find current revision in submodule path '$sm_path'"
+msgstr "Konnte aktuelle Revision in Unterprojekt-Pfad '$sm_path' nicht finden"
-#: git-submodule.sh:551
+#: git-submodule.sh:555
#, sh-format
-msgid "Unable to fetch in submodule path '$path'"
-msgstr "Konnte Unterprojekt-Pfad '$path' nicht anfordern"
+msgid "Unable to fetch in submodule path '$sm_path'"
+msgstr "Konnte in Unterprojekt-Pfad '$sm_path' nicht anfordern"
-#: git-submodule.sh:565
+#: git-submodule.sh:569
#, sh-format
-msgid "Unable to rebase '$sha1' in submodule path '$path'"
-msgstr "Neuaufbau von '$sha1' in Unterprojekt-Pfad '$path' nicht möglich"
+msgid "Unable to rebase '$sha1' in submodule path '$sm_path'"
+msgstr "Neuaufbau von '$sha1' in Unterprojekt-Pfad '$sm_path' nicht möglich"
-#: git-submodule.sh:566
+#: git-submodule.sh:570
#, sh-format
-msgid "Submodule path '$path': rebased into '$sha1'"
-msgstr "Unterprojekt-Pfad '$path': neu aufgebaut in '$sha1'"
+msgid "Submodule path '$sm_path': rebased into '$sha1'"
+msgstr "Unterprojekt-Pfad '$sm_path': neu aufgebaut in '$sha1'"
-#: git-submodule.sh:571
+#: git-submodule.sh:575
#, sh-format
-msgid "Unable to merge '$sha1' in submodule path '$path'"
-msgstr "Konnte '$sha1' nicht in Unterprojekt-Pfad '$path' zusammenführen"
+msgid "Unable to merge '$sha1' in submodule path '$sm_path'"
+msgstr "Konnte '$sha1' nicht in Unterprojekt-Pfad '$sm_path' zusammenführen"
-#: git-submodule.sh:572
+#: git-submodule.sh:576
#, sh-format
-msgid "Submodule path '$path': merged in '$sha1'"
-msgstr "Unterprojekt-Pfad '$path': zusammengeführt in '$sha1'"
+msgid "Submodule path '$sm_path': merged in '$sha1'"
+msgstr "Unterprojekt-Pfad '$sm_path': zusammengeführt in '$sha1'"
-#: git-submodule.sh:577
+#: git-submodule.sh:581
#, sh-format
-msgid "Unable to checkout '$sha1' in submodule path '$path'"
-msgstr "Konnte '$sha1' in Unterprojekt-Pfad '$path' nicht auschecken."
+msgid "Unable to checkout '$sha1' in submodule path '$sm_path'"
+msgstr "Konnte '$sha1' in Unterprojekt-Pfad '$sm_path' nicht auschecken."
-#: git-submodule.sh:578
+#: git-submodule.sh:582
#, sh-format
-msgid "Submodule path '$path': checked out '$sha1'"
-msgstr "Unterprojekt-Pfad: '$path': '$sha1' ausgecheckt"
+msgid "Submodule path '$sm_path': checked out '$sha1'"
+msgstr "Unterprojekt-Pfad: '$sm_path': '$sha1' ausgecheckt"
-#: git-submodule.sh:600 git-submodule.sh:923
+#: git-submodule.sh:604 git-submodule.sh:927
#, sh-format
-msgid "Failed to recurse into submodule path '$path'"
-msgstr "Fehler bei Rekursion in Unterprojekt-Pfad '$path'"
+msgid "Failed to recurse into submodule path '$sm_path'"
+msgstr "Fehler bei Rekursion in Unterprojekt-Pfad '$sm_path'"
-#: git-submodule.sh:708
+#: git-submodule.sh:712
msgid "--"
msgstr "--"
-#: git-submodule.sh:766
+#: git-submodule.sh:770
#, sh-format
msgid " Warn: $name doesn't contain commit $sha1_src"
msgstr " Warnung: $name beinhaltet nicht Version $sha1_src"
-#: git-submodule.sh:769
+#: git-submodule.sh:773
#, sh-format
msgid " Warn: $name doesn't contain commit $sha1_dst"
msgstr " Warnung: $name beinhaltet nicht Version $sha1_dst"
-#: git-submodule.sh:772
+#: git-submodule.sh:776
#, sh-format
msgid " Warn: $name doesn't contain commits $sha1_src and $sha1_dst"
msgstr " Warnung: $name beinhaltet nich die Versionen $sha1_src und $sha1_dst"
-#: git-submodule.sh:797
+#: git-submodule.sh:801
msgid "blob"
msgstr "Blob"
-#: git-submodule.sh:798
+#: git-submodule.sh:802
msgid "submodule"
msgstr "Unterprojekt"
-#: git-submodule.sh:969
+#: git-submodule.sh:973
#, sh-format
msgid "Synchronizing submodule url for '$name'"
msgstr "Synchronisiere Unterprojekt-URL für '$name'"
msgstr ""
"Project-Id-Version: PACKAGE VERSION\n"
"Report-Msgid-Bugs-To: Git Mailing List <git@vger.kernel.org>\n"
-"POT-Creation-Date: 2012-04-28 20:33+0800\n"
+"POT-Creation-Date: 2012-04-28 20:17+0800\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"
"Content-Transfer-Encoding: 8bit\n"
"Plural-Forms: nplurals=INTEGER; plural=EXPRESSION;\n"
-#: advice.c:34
+#: advice.c:40
#, c-format
msgid "hint: %.*s\n"
msgstr ""
#. * Message used both when 'git commit' fails and when
#. * other commands doing a merge do.
#.
-#: advice.c:64
+#: advice.c:70
msgid ""
"Fix them up in the work tree,\n"
"and then use 'git add/rm <file>' as\n"
"or use 'git commit -a'."
msgstr ""
-#: commit.c:47
+#: commit.c:48
#, c-format
msgid "could not parse %s"
msgstr ""
-#: commit.c:49
+#: commit.c:50
#, c-format
msgid "%s %s is not a commit!"
msgstr ""
msgstr[0] ""
msgstr[1] ""
-#: sequencer.c:120 builtin/merge.c:864 builtin/merge.c:985
-#: builtin/merge.c:1095 builtin/merge.c:1105
+#: sequencer.c:120 builtin/merge.c:865 builtin/merge.c:978
+#: builtin/merge.c:1088 builtin/merge.c:1098
#, c-format
msgid "Could not open '%s' for writing"
msgstr ""
-#: sequencer.c:122 builtin/merge.c:334 builtin/merge.c:867
-#: builtin/merge.c:1097 builtin/merge.c:1110
+#: sequencer.c:122 builtin/merge.c:333 builtin/merge.c:868
+#: builtin/merge.c:1090 builtin/merge.c:1103
#, c-format
msgid "Could not write to '%s'"
msgstr ""
msgid "could not apply %s... %s"
msgstr ""
-#: sequencer.c:450 sequencer.c:909 builtin/log.c:288 builtin/log.c:713
-#: builtin/log.c:1329 builtin/log.c:1548 builtin/merge.c:348
+#: sequencer.c:450 sequencer.c:909 builtin/log.c:289 builtin/log.c:719
+#: builtin/log.c:1335 builtin/log.c:1554 builtin/merge.c:347
#: builtin/shortlog.c:181
msgid "revision walk setup failed"
msgstr ""
msgid "Can't cherry-pick into empty head"
msgstr ""
+#: sha1_name.c:864
+msgid "HEAD does not point to a branch"
+msgstr ""
+
+#: sha1_name.c:867
+#, c-format
+msgid "No such branch: '%s'"
+msgstr ""
+
+#: sha1_name.c:869
+#, c-format
+msgid "No upstream configured for branch '%s'"
+msgstr ""
+
+#: sha1_name.c:872
+#, c-format
+msgid "Upstream branch '%s' not stored as a remote-tracking branch"
+msgstr ""
+
#: wt-status.c:134
msgid "Unmerged paths:"
msgstr ""
msgstr ""
#. TRANSLATORS: This is "remote " in "remote branch '%s' not found"
-#: builtin/branch.c:163
+#: builtin/branch.c:164
msgid "remote "
msgstr ""
-#: builtin/branch.c:171
+#: builtin/branch.c:172
msgid "cannot use -a with -d"
msgstr ""
-#: builtin/branch.c:177
+#: builtin/branch.c:178
msgid "Couldn't look up commit object for HEAD"
msgstr ""
-#: builtin/branch.c:182
+#: builtin/branch.c:183
#, c-format
msgid "Cannot delete the branch '%s' which you are currently on."
msgstr ""
-#: builtin/branch.c:192
+#: builtin/branch.c:193
#, c-format
msgid "%sbranch '%s' not found."
msgstr ""
-#: builtin/branch.c:200
+#: builtin/branch.c:201
#, c-format
msgid "Couldn't look up commit object for '%s'"
msgstr ""
-#: builtin/branch.c:206
+#: builtin/branch.c:207
#, c-format
msgid ""
"The branch '%s' is not fully merged.\n"
"If you are sure you want to delete it, run 'git branch -D %s'."
msgstr ""
-#: builtin/branch.c:214
+#: builtin/branch.c:215
#, c-format
msgid "Error deleting %sbranch '%s'"
msgstr ""
-#: builtin/branch.c:219
+#: builtin/branch.c:221
#, c-format
msgid "Deleted %sbranch %s (was %s).\n"
msgstr ""
-#: builtin/branch.c:224
+#: builtin/branch.c:226
msgid "Update of config-file failed"
msgstr ""
-#: builtin/branch.c:322
+#: builtin/branch.c:324
#, c-format
msgid "branch '%s' does not point at a commit"
msgstr ""
-#: builtin/branch.c:394
+#: builtin/branch.c:396
#, c-format
msgid "behind %d] "
msgstr ""
-#: builtin/branch.c:396
+#: builtin/branch.c:398
#, c-format
msgid "ahead %d] "
msgstr ""
-#: builtin/branch.c:398
+#: builtin/branch.c:400
#, c-format
msgid "ahead %d, behind %d] "
msgstr ""
-#: builtin/branch.c:501
+#: builtin/branch.c:503
msgid "(no branch)"
msgstr ""
-#: builtin/branch.c:566
+#: builtin/branch.c:568
msgid "some refs could not be read"
msgstr ""
-#: builtin/branch.c:579
+#: builtin/branch.c:581
msgid "cannot rename the current branch while not on any."
msgstr ""
-#: builtin/branch.c:589
+#: builtin/branch.c:591
#, c-format
msgid "Invalid branch name: '%s'"
msgstr ""
-#: builtin/branch.c:604
+#: builtin/branch.c:606
msgid "Branch rename failed"
msgstr ""
-#: builtin/branch.c:608
+#: builtin/branch.c:610
#, c-format
msgid "Renamed a misnamed branch '%s' away"
msgstr ""
-#: builtin/branch.c:612
+#: builtin/branch.c:614
#, c-format
msgid "Branch renamed to %s, but HEAD is not updated!"
msgstr ""
-#: builtin/branch.c:619
+#: builtin/branch.c:621
msgid "Branch is renamed, but update of config-file failed"
msgstr ""
-#: builtin/branch.c:634
+#: builtin/branch.c:636
#, c-format
msgid "malformed object name %s"
msgstr ""
-#: builtin/branch.c:658
+#: builtin/branch.c:660
#, c-format
msgid "could not write branch description template: %s\n"
msgstr ""
-#: builtin/branch.c:746
+#: builtin/branch.c:750
msgid "Failed to resolve HEAD as a valid ref."
msgstr ""
-#: builtin/branch.c:751 builtin/clone.c:558
+#: builtin/branch.c:755 builtin/clone.c:558
msgid "HEAD not found below refs/heads!"
msgstr ""
-#: builtin/branch.c:809
+#: builtin/branch.c:813
msgid "-a and -r options to 'git branch' do not make sense with a branch name"
msgstr ""
msgstr ""
#: builtin/checkout.c:302 builtin/checkout.c:498 builtin/clone.c:583
-#: builtin/merge.c:811
+#: builtin/merge.c:812
msgid "unable to write new index file"
msgstr ""
msgid "Can not do reflog for '%s'\n"
msgstr ""
-#: builtin/checkout.c:565
+#: builtin/checkout.c:566
msgid "HEAD is now at"
msgstr ""
-#: builtin/checkout.c:572
+#: builtin/checkout.c:573
#, c-format
msgid "Reset branch '%s'\n"
msgstr ""
-#: builtin/checkout.c:575
+#: builtin/checkout.c:576
#, c-format
msgid "Already on '%s'\n"
msgstr ""
-#: builtin/checkout.c:579
+#: builtin/checkout.c:580
#, c-format
msgid "Switched to and reset branch '%s'\n"
msgstr ""
-#: builtin/checkout.c:581
+#: builtin/checkout.c:582
#, c-format
msgid "Switched to a new branch '%s'\n"
msgstr ""
-#: builtin/checkout.c:583
+#: builtin/checkout.c:584
#, c-format
msgid "Switched to branch '%s'\n"
msgstr ""
-#: builtin/checkout.c:639
+#: builtin/checkout.c:640
#, c-format
msgid " ... and %d more.\n"
msgstr ""
#. The singular version
-#: builtin/checkout.c:645
+#: builtin/checkout.c:646
#, c-format
msgid ""
"Warning: you are leaving %d commit behind, not connected to\n"
msgstr[0] ""
msgstr[1] ""
-#: builtin/checkout.c:663
+#: builtin/checkout.c:664
#, c-format
msgid ""
"If you want to keep them by creating a new branch, this may be a good time\n"
"\n"
msgstr ""
-#: builtin/checkout.c:692
+#: builtin/checkout.c:693
msgid "internal error in revision walk"
msgstr ""
-#: builtin/checkout.c:696
+#: builtin/checkout.c:697
msgid "Previous HEAD position was"
msgstr ""
-#: builtin/checkout.c:722
+#: builtin/checkout.c:723
msgid "You are on a branch yet to be born"
msgstr ""
#. case (1)
-#: builtin/checkout.c:853
+#: builtin/checkout.c:854
#, c-format
msgid "invalid reference: %s"
msgstr ""
#. case (1): want a tree
-#: builtin/checkout.c:892
+#: builtin/checkout.c:893
#, c-format
msgid "reference is not a tree: %s"
msgstr ""
-#: builtin/checkout.c:972
+#: builtin/checkout.c:973
msgid "-B cannot be used with -b"
msgstr ""
-#: builtin/checkout.c:981
+#: builtin/checkout.c:982
msgid "--patch is incompatible with all other options"
msgstr ""
-#: builtin/checkout.c:984
+#: builtin/checkout.c:985
msgid "--detach cannot be used with -b/-B/--orphan"
msgstr ""
-#: builtin/checkout.c:986
+#: builtin/checkout.c:987
msgid "--detach cannot be used with -t"
msgstr ""
-#: builtin/checkout.c:992
+#: builtin/checkout.c:993
msgid "--track needs a branch name"
msgstr ""
-#: builtin/checkout.c:999
+#: builtin/checkout.c:1000
msgid "Missing branch name; try -b"
msgstr ""
-#: builtin/checkout.c:1005
+#: builtin/checkout.c:1006
msgid "--orphan and -b|-B are mutually exclusive"
msgstr ""
-#: builtin/checkout.c:1007
+#: builtin/checkout.c:1008
msgid "--orphan cannot be used with -t"
msgstr ""
-#: builtin/checkout.c:1017
+#: builtin/checkout.c:1018
msgid "git checkout: -f and -m are incompatible"
msgstr ""
-#: builtin/checkout.c:1051
+#: builtin/checkout.c:1052
msgid "invalid path specification"
msgstr ""
-#: builtin/checkout.c:1059
+#: builtin/checkout.c:1060
#, c-format
msgid ""
"git checkout: updating paths is incompatible with switching branches.\n"
"Did you intend to checkout '%s' which can not be resolved as commit?"
msgstr ""
-#: builtin/checkout.c:1061
+#: builtin/checkout.c:1062
msgid "git checkout: updating paths is incompatible with switching branches."
msgstr ""
-#: builtin/checkout.c:1066
+#: builtin/checkout.c:1067
msgid "git checkout: --detach does not take a path argument"
msgstr ""
-#: builtin/checkout.c:1069
+#: builtin/checkout.c:1070
msgid ""
"git checkout: --ours/--theirs, --force and --merge are incompatible when\n"
"checking out of the index."
msgstr ""
-#: builtin/checkout.c:1088
+#: builtin/checkout.c:1089
msgid "Cannot switch branch to a non-commit."
msgstr ""
-#: builtin/checkout.c:1091
+#: builtin/checkout.c:1092
msgid "--ours/--theirs is incompatible with switching branches."
msgstr ""
msgid "Aborting commit due to empty commit message.\n"
msgstr ""
-#: builtin/commit.c:1554 builtin/merge.c:935 builtin/merge.c:968
+#: builtin/commit.c:1554 builtin/merge.c:936 builtin/merge.c:961
msgid "failed to write commit object"
msgstr ""
msgid "Couldn't find remote ref HEAD"
msgstr ""
-#: builtin/fetch.c:252
+#: builtin/fetch.c:253
#, c-format
msgid "object %s not found"
msgstr ""
-#: builtin/fetch.c:258
+#: builtin/fetch.c:259
msgid "[up to date]"
msgstr ""
-#: builtin/fetch.c:272
+#: builtin/fetch.c:273
#, c-format
msgid "! %-*s %-*s -> %s (can't fetch in current branch)"
msgstr ""
-#: builtin/fetch.c:273 builtin/fetch.c:351
+#: builtin/fetch.c:274 builtin/fetch.c:360
msgid "[rejected]"
msgstr ""
-#: builtin/fetch.c:284
+#: builtin/fetch.c:285
msgid "[tag update]"
msgstr ""
-#: builtin/fetch.c:286 builtin/fetch.c:313 builtin/fetch.c:331
+#: builtin/fetch.c:287 builtin/fetch.c:322 builtin/fetch.c:340
msgid " (unable to update local ref)"
msgstr ""
-#: builtin/fetch.c:298
+#: builtin/fetch.c:305
msgid "[new tag]"
msgstr ""
-#: builtin/fetch.c:302
+#: builtin/fetch.c:308
msgid "[new branch]"
msgstr ""
-#: builtin/fetch.c:347
+#: builtin/fetch.c:311
+msgid "[new ref]"
+msgstr ""
+
+#: builtin/fetch.c:356
msgid "unable to update local ref"
msgstr ""
-#: builtin/fetch.c:347
+#: builtin/fetch.c:356
msgid "forced update"
msgstr ""
-#: builtin/fetch.c:353
+#: builtin/fetch.c:362
msgid "(non-fast-forward)"
msgstr ""
-#: builtin/fetch.c:384 builtin/fetch.c:676
+#: builtin/fetch.c:393 builtin/fetch.c:685
#, c-format
msgid "cannot open %s: %s\n"
msgstr ""
-#: builtin/fetch.c:393
+#: builtin/fetch.c:402
#, c-format
msgid "%s did not send all necessary objects\n"
msgstr ""
-#: builtin/fetch.c:479
+#: builtin/fetch.c:488
#, c-format
msgid "From %.*s\n"
msgstr ""
-#: builtin/fetch.c:490
+#: builtin/fetch.c:499
#, c-format
msgid ""
"some local refs could not be updated; try running\n"
" 'git remote prune %s' to remove any old, conflicting branches"
msgstr ""
-#: builtin/fetch.c:540
+#: builtin/fetch.c:549
#, c-format
msgid " (%s will become dangling)\n"
msgstr ""
-#: builtin/fetch.c:541
+#: builtin/fetch.c:550
#, c-format
msgid " (%s has become dangling)\n"
msgstr ""
-#: builtin/fetch.c:548
+#: builtin/fetch.c:557
msgid "[deleted]"
msgstr ""
-#: builtin/fetch.c:549
+#: builtin/fetch.c:558
msgid "(none)"
msgstr ""
-#: builtin/fetch.c:666
+#: builtin/fetch.c:675
#, c-format
msgid "Refusing to fetch into current branch %s of non-bare repository"
msgstr ""
-#: builtin/fetch.c:700
+#: builtin/fetch.c:709
#, c-format
msgid "Don't know how to fetch from %s"
msgstr ""
-#: builtin/fetch.c:777
+#: builtin/fetch.c:786
#, c-format
msgid "Option \"%s\" value \"%s\" is not valid for %s"
msgstr ""
-#: builtin/fetch.c:780
+#: builtin/fetch.c:789
#, c-format
msgid "Option \"%s\" is ignored for %s\n"
msgstr ""
-#: builtin/fetch.c:879
+#: builtin/fetch.c:888
#, c-format
msgid "Fetching %s\n"
msgstr ""
-#: builtin/fetch.c:881
+#: builtin/fetch.c:890
#, c-format
msgid "Could not fetch %s"
msgstr ""
-#: builtin/fetch.c:898
+#: builtin/fetch.c:907
msgid ""
"No remote repository specified. Please, specify either a URL or a\n"
"remote name from which new revisions should be fetched."
msgstr ""
-#: builtin/fetch.c:918
+#: builtin/fetch.c:927
msgid "You need to specify a tag name."
msgstr ""
-#: builtin/fetch.c:970
+#: builtin/fetch.c:979
msgid "fetch --all does not take a repository argument"
msgstr ""
-#: builtin/fetch.c:972
+#: builtin/fetch.c:981
msgid "fetch --all does not make sense with refspecs"
msgstr ""
-#: builtin/fetch.c:983
+#: builtin/fetch.c:992
#, c-format
msgid "No such remote or remote group: %s"
msgstr ""
-#: builtin/fetch.c:991
+#: builtin/fetch.c:1000
msgid "Fetching a group and specifying refspecs does not make sense"
msgstr ""
msgid "Cannot access work tree '%s'"
msgstr ""
-#: builtin/log.c:187
+#: builtin/log.c:188
#, c-format
msgid "Final output: %d %s\n"
msgstr ""
-#: builtin/log.c:395 builtin/log.c:483
+#: builtin/log.c:401 builtin/log.c:489
#, c-format
msgid "Could not read object %s"
msgstr ""
-#: builtin/log.c:507
+#: builtin/log.c:513
#, c-format
msgid "Unknown type: %d"
msgstr ""
-#: builtin/log.c:596
+#: builtin/log.c:602
msgid "format.headers without value"
msgstr ""
-#: builtin/log.c:669
+#: builtin/log.c:675
msgid "name of output directory is too long"
msgstr ""
-#: builtin/log.c:680
+#: builtin/log.c:686
#, c-format
msgid "Cannot open patch file %s"
msgstr ""
-#: builtin/log.c:694
+#: builtin/log.c:700
msgid "Need exactly one range."
msgstr ""
-#: builtin/log.c:702
+#: builtin/log.c:708
msgid "Not a range."
msgstr ""
-#: builtin/log.c:739
+#: builtin/log.c:745
msgid "Could not extract email from committer identity."
msgstr ""
-#: builtin/log.c:785
+#: builtin/log.c:791
msgid "Cover letter needs email format"
msgstr ""
-#: builtin/log.c:879
+#: builtin/log.c:885
#, c-format
msgid "insane in-reply-to: %s"
msgstr ""
-#: builtin/log.c:952
+#: builtin/log.c:958
msgid "Two output directories?"
msgstr ""
-#: builtin/log.c:1173
+#: builtin/log.c:1179
#, c-format
msgid "bogus committer info %s"
msgstr ""
-#: builtin/log.c:1218
+#: builtin/log.c:1224
msgid "-n and -k are mutually exclusive."
msgstr ""
-#: builtin/log.c:1220
+#: builtin/log.c:1226
msgid "--subject-prefix and -k are mutually exclusive."
msgstr ""
-#: builtin/log.c:1225 builtin/shortlog.c:284
+#: builtin/log.c:1231 builtin/shortlog.c:284
#, c-format
msgid "unrecognized argument: %s"
msgstr ""
-#: builtin/log.c:1228
+#: builtin/log.c:1234
msgid "--name-only does not make sense"
msgstr ""
-#: builtin/log.c:1230
+#: builtin/log.c:1236
msgid "--name-status does not make sense"
msgstr ""
-#: builtin/log.c:1232
+#: builtin/log.c:1238
msgid "--check does not make sense"
msgstr ""
-#: builtin/log.c:1255
+#: builtin/log.c:1261
msgid "standard output, or directory, which one?"
msgstr ""
-#: builtin/log.c:1257
+#: builtin/log.c:1263
#, c-format
msgid "Could not create directory '%s'"
msgstr ""
-#: builtin/log.c:1410
+#: builtin/log.c:1416
msgid "Failed to create output files"
msgstr ""
-#: builtin/log.c:1514
+#: builtin/log.c:1520
#, c-format
msgid ""
"Could not find a tracked remote branch, please specify <upstream> manually.\n"
msgstr ""
-#: builtin/log.c:1530 builtin/log.c:1532 builtin/log.c:1544
+#: builtin/log.c:1536 builtin/log.c:1538 builtin/log.c:1550
#, c-format
msgid "Unknown commit %s"
msgstr ""
-#: builtin/merge.c:91
+#: builtin/merge.c:90
msgid "switch `m' requires a value"
msgstr ""
-#: builtin/merge.c:128
+#: builtin/merge.c:127
#, c-format
msgid "Could not find merge strategy '%s'.\n"
msgstr ""
-#: builtin/merge.c:129
+#: builtin/merge.c:128
#, c-format
msgid "Available strategies are:"
msgstr ""
-#: builtin/merge.c:134
+#: builtin/merge.c:133
#, c-format
msgid "Available custom strategies are:"
msgstr ""
-#: builtin/merge.c:241
+#: builtin/merge.c:240
msgid "could not run stash."
msgstr ""
-#: builtin/merge.c:246
+#: builtin/merge.c:245
msgid "stash failed"
msgstr ""
-#: builtin/merge.c:251
+#: builtin/merge.c:250
#, c-format
msgid "not a valid object: %s"
msgstr ""
-#: builtin/merge.c:270 builtin/merge.c:287
+#: builtin/merge.c:269 builtin/merge.c:286
msgid "read-tree failed"
msgstr ""
-#: builtin/merge.c:317
+#: builtin/merge.c:316
msgid " (nothing to squash)"
msgstr ""
-#: builtin/merge.c:330
+#: builtin/merge.c:329
#, c-format
msgid "Squash commit -- not updating HEAD\n"
msgstr ""
-#: builtin/merge.c:362
+#: builtin/merge.c:361
msgid "Writing SQUASH_MSG"
msgstr ""
-#: builtin/merge.c:364
+#: builtin/merge.c:363
msgid "Finishing SQUASH_MSG"
msgstr ""
msgid "failed to read the cache"
msgstr ""
-#: builtin/merge.c:696
+#: builtin/merge.c:697
msgid "Unable to write index."
msgstr ""
-#: builtin/merge.c:709
+#: builtin/merge.c:710
msgid "Not handling anything other than two heads merge."
msgstr ""
-#: builtin/merge.c:723
+#: builtin/merge.c:724
#, c-format
msgid "Unknown option for merge-recursive: -X%s"
msgstr ""
-#: builtin/merge.c:737
+#: builtin/merge.c:738
#, c-format
msgid "unable to write %s"
msgstr ""
-#: builtin/merge.c:876
+#: builtin/merge.c:877
#, c-format
msgid "Could not read from '%s'"
msgstr ""
-#: builtin/merge.c:885
+#: builtin/merge.c:886
#, c-format
msgid "Not committing merge; use 'git commit' to complete the merge.\n"
msgstr ""
-#: builtin/merge.c:891
+#: builtin/merge.c:892
msgid ""
"Please enter a commit message to explain why this merge is necessary,\n"
"especially if it merges an updated upstream into a topic branch.\n"
"the commit.\n"
msgstr ""
-#: builtin/merge.c:915
+#: builtin/merge.c:916
msgid "Empty commit message."
msgstr ""
-#: builtin/merge.c:927
+#: builtin/merge.c:928
#, c-format
msgid "Wonderful.\n"
msgstr ""
-#: builtin/merge.c:1000
+#: builtin/merge.c:993
#, c-format
msgid "Automatic merge failed; fix conflicts and then commit the result.\n"
msgstr ""
-#: builtin/merge.c:1016
+#: builtin/merge.c:1009
#, c-format
msgid "'%s' is not a commit"
msgstr ""
-#: builtin/merge.c:1057
+#: builtin/merge.c:1050
msgid "No current branch."
msgstr ""
-#: builtin/merge.c:1059
+#: builtin/merge.c:1052
msgid "No remote for the current branch."
msgstr ""
-#: builtin/merge.c:1061
+#: builtin/merge.c:1054
msgid "No default upstream defined for the current branch."
msgstr ""
-#: builtin/merge.c:1066
+#: builtin/merge.c:1059
#, c-format
msgid "No remote tracking branch for %s from %s"
msgstr ""
-#: builtin/merge.c:1188
+#: builtin/merge.c:1146 builtin/merge.c:1303
+#, c-format
+msgid "%s - not something we can merge"
+msgstr ""
+
+#: builtin/merge.c:1214
msgid "There is no merge to abort (MERGE_HEAD missing)."
msgstr ""
-#: builtin/merge.c:1204 git-pull.sh:31
+#: builtin/merge.c:1230 git-pull.sh:31
msgid ""
"You have not concluded your merge (MERGE_HEAD exists).\n"
"Please, commit your changes before you can merge."
msgstr ""
-#: builtin/merge.c:1207 git-pull.sh:34
+#: builtin/merge.c:1233 git-pull.sh:34
msgid "You have not concluded your merge (MERGE_HEAD exists)."
msgstr ""
-#: builtin/merge.c:1211
+#: builtin/merge.c:1237
msgid ""
"You have not concluded your cherry-pick (CHERRY_PICK_HEAD exists).\n"
"Please, commit your changes before you can merge."
msgstr ""
-#: builtin/merge.c:1214
+#: builtin/merge.c:1240
msgid "You have not concluded your cherry-pick (CHERRY_PICK_HEAD exists)."
msgstr ""
-#: builtin/merge.c:1223
+#: builtin/merge.c:1249
msgid "You cannot combine --squash with --no-ff."
msgstr ""
-#: builtin/merge.c:1228
+#: builtin/merge.c:1254
msgid "You cannot combine --no-ff with --ff-only."
msgstr ""
-#: builtin/merge.c:1235
+#: builtin/merge.c:1261
msgid "No commit specified and merge.defaultToUpstream not set."
msgstr ""
-#: builtin/merge.c:1266
+#: builtin/merge.c:1293
msgid "Can merge only exactly one commit into empty head"
msgstr ""
-#: builtin/merge.c:1269
+#: builtin/merge.c:1296
msgid "Squash commit into empty head not supported yet"
msgstr ""
-#: builtin/merge.c:1271
+#: builtin/merge.c:1298
msgid "Non-fast-forward commit does not make sense into an empty head"
msgstr ""
-#: builtin/merge.c:1275 builtin/merge.c:1319
-#, c-format
-msgid "%s - not something we can merge"
-msgstr ""
-
-#: builtin/merge.c:1382
+#: builtin/merge.c:1413
#, c-format
msgid "Updating %s..%s\n"
msgstr ""
-#: builtin/merge.c:1420
+#: builtin/merge.c:1451
#, c-format
msgid "Trying really trivial in-index merge...\n"
msgstr ""
-#: builtin/merge.c:1427
+#: builtin/merge.c:1458
#, c-format
msgid "Nope.\n"
msgstr ""
-#: builtin/merge.c:1459
+#: builtin/merge.c:1490
msgid "Not possible to fast-forward, aborting."
msgstr ""
-#: builtin/merge.c:1482 builtin/merge.c:1559
+#: builtin/merge.c:1513 builtin/merge.c:1592
#, c-format
msgid "Rewinding the tree to pristine...\n"
msgstr ""
-#: builtin/merge.c:1486
+#: builtin/merge.c:1517
#, c-format
msgid "Trying merge strategy %s...\n"
msgstr ""
-#: builtin/merge.c:1550
+#: builtin/merge.c:1583
#, c-format
msgid "No merge strategy handled the merge.\n"
msgstr ""
-#: builtin/merge.c:1552
+#: builtin/merge.c:1585
#, c-format
msgid "Merge with strategy %s failed.\n"
msgstr ""
-#: builtin/merge.c:1561
+#: builtin/merge.c:1594
#, c-format
msgid "Using the %s to prepare resolving by hand.\n"
msgstr ""
-#: builtin/merge.c:1572
+#: builtin/merge.c:1606
#, c-format
msgid "Automatic merge went well; stopped before committing as requested\n"
msgstr ""
msgid "unable to parse value '%s' for option %s"
msgstr ""
-#: builtin/push.c:44
+#: builtin/push.c:45
msgid "tag shorthand without <tag>"
msgstr ""
-#: builtin/push.c:63
+#: builtin/push.c:64
msgid "--delete only accepts plain target ref names"
msgstr ""
-#: builtin/push.c:83
+#: builtin/push.c:84
#, c-format
msgid ""
"You are not currently on a branch.\n"
" git push %s HEAD:<name-of-remote-branch>\n"
msgstr ""
-#: builtin/push.c:90
+#: builtin/push.c:91
#, c-format
msgid ""
"The current branch %s has no upstream branch.\n"
" git push --set-upstream %s %s\n"
msgstr ""
-#: builtin/push.c:98
+#: builtin/push.c:99
#, c-format
msgid "The current branch %s has multiple upstream branches, refusing to push."
msgstr ""
-#: builtin/push.c:101
+#: builtin/push.c:102
#, c-format
msgid ""
"You are pushing to remote '%s', which is not the upstream of\n"
"to update which remote branch."
msgstr ""
-#: builtin/push.c:127
+#: builtin/push.c:131
msgid ""
"You didn't specify any refspecs to push, and push.default is \"nothing\"."
msgstr ""
-#: builtin/push.c:147
-#, c-format
-msgid "Pushing to %s\n"
+#: builtin/push.c:138
+msgid ""
+"Updates were rejected because the tip of your current branch is behind\n"
+"its remote counterpart. Merge the remote changes (e.g. 'git pull')\n"
+"before pushing again.\n"
+"See the 'Note about fast-forwards' in 'git push --help' for details."
+msgstr ""
+
+#: builtin/push.c:144
+msgid ""
+"Updates were rejected because a pushed branch tip is behind its remote\n"
+"counterpart. If you did not intend to push that branch, you may want to\n"
+"specify branches to push or set the 'push.default' configuration\n"
+"variable to 'current' or 'upstream' to push only the current branch."
msgstr ""
-#: builtin/push.c:151
+#: builtin/push.c:150
+msgid ""
+"Updates were rejected because a pushed branch tip is behind its remote\n"
+"counterpart. Check out this branch and merge the remote changes\n"
+"(e.g. 'git pull') before pushing again.\n"
+"See the 'Note about fast-forwards' in 'git push --help' for details."
+msgstr ""
+
+#: builtin/push.c:190
#, c-format
-msgid "failed to push some refs to '%s'"
+msgid "Pushing to %s\n"
msgstr ""
-#: builtin/push.c:159
+#: builtin/push.c:194
#, c-format
-msgid ""
-"To prevent you from losing history, non-fast-forward updates were rejected\n"
-"Merge the remote changes (e.g. 'git pull') before pushing again. See the\n"
-"'Note about fast-forwards' section of 'git push --help' for details.\n"
+msgid "failed to push some refs to '%s'"
msgstr ""
-#: builtin/push.c:176
+#: builtin/push.c:226
#, c-format
msgid "bad repository '%s'"
msgstr ""
-#: builtin/push.c:177
+#: builtin/push.c:227
msgid ""
"No configured push destination.\n"
"Either specify the URL from the command-line or configure a remote "
" git push <name>\n"
msgstr ""
-#: builtin/push.c:192
+#: builtin/push.c:242
msgid "--all and --tags are incompatible"
msgstr ""
-#: builtin/push.c:193
+#: builtin/push.c:243
msgid "--all can't be combined with refspecs"
msgstr ""
-#: builtin/push.c:198
+#: builtin/push.c:248
msgid "--mirror and --tags are incompatible"
msgstr ""
-#: builtin/push.c:199
+#: builtin/push.c:249
msgid "--mirror can't be combined with refspecs"
msgstr ""
-#: builtin/push.c:204
+#: builtin/push.c:254
msgid "--all and --mirror are incompatible"
msgstr ""
-#: builtin/push.c:284
+#: builtin/push.c:342
msgid "--delete is incompatible with --all, --mirror and --tags"
msgstr ""
-#: builtin/push.c:286
+#: builtin/push.c:344
msgid "--delete doesn't make sense without any refs"
msgstr ""
msgid "Could not reset index file to revision '%s'."
msgstr ""
-#: builtin/revert.c:70 builtin/revert.c:91
+#: builtin/revert.c:70 builtin/revert.c:92
#, c-format
msgid "%s: %s cannot be used with %s"
msgstr ""
-#: builtin/revert.c:126
+#: builtin/revert.c:127
msgid "program error"
msgstr ""
-#: builtin/revert.c:209
+#: builtin/revert.c:213
msgid "revert failed"
msgstr ""
-#: builtin/revert.c:224
+#: builtin/revert.c:228
msgid "cherry-pick failed"
msgstr ""
msgid "Updated tag '%s' (was %s)\n"
msgstr ""
-#: git-am.sh:49
+#: git-am.sh:50
msgid "You need to set your committer info first"
msgstr ""
-#: git-am.sh:136
+#: git-am.sh:137
msgid "Repository lacks necessary blobs to fall back on 3-way merge."
msgstr ""
-#: git-am.sh:147
+#: git-am.sh:154
msgid ""
"Did you hand edit your patch?\n"
"It does not apply to blobs recorded in its index."
msgstr ""
-#: git-am.sh:156
+#: git-am.sh:163
msgid "Falling back to patching base and 3-way merge..."
msgstr ""
-#: git-am.sh:268
+#: git-am.sh:275
msgid "Only one StGIT patch series can be applied at once"
msgstr ""
-#: git-am.sh:355
+#: git-am.sh:362
#, sh-format
msgid "Patch format $patch_format is not supported."
msgstr ""
-#: git-am.sh:357
+#: git-am.sh:364
msgid "Patch format detection failed."
msgstr ""
-#: git-am.sh:411
+#: git-am.sh:418
msgid "-d option is no longer supported. Do not use."
msgstr ""
-#: git-am.sh:474
+#: git-am.sh:481
#, sh-format
msgid "previous rebase directory $dotest still exists but mbox given."
msgstr ""
-#: git-am.sh:479
+#: git-am.sh:486
msgid "Please make up your mind. --skip or --abort?"
msgstr ""
-#: git-am.sh:506
+#: git-am.sh:513
msgid "Resolve operation not in progress, we are not resuming."
msgstr ""
-#: git-am.sh:572
+#: git-am.sh:579
#, sh-format
msgid "Dirty index: cannot apply patches (dirty: $files)"
msgstr ""
-#: git-am.sh:748
+#: git-am.sh:755
msgid "cannot be interactive without stdin connected to a terminal."
msgstr ""
#. TRANSLATORS: Make sure to include [y], [n], [e], [v] and [a]
#. in your translation. The program will only accept English
#. input at this point.
-#: git-am.sh:759
+#: git-am.sh:766
msgid "Apply? [y]es/[n]o/[e]dit/[v]iew patch/[a]ccept all "
msgstr ""
-#: git-am.sh:795
+#: git-am.sh:802
#, sh-format
msgid "Applying: $FIRSTLINE"
msgstr ""
-#: git-am.sh:840
+#: git-am.sh:847
msgid "No changes -- Patch already applied."
msgstr ""
-#: git-am.sh:866
+#: git-am.sh:873
msgid "applying to an empty history"
msgstr ""
msgid "cannot strip one component off url '$remoteurl'"
msgstr ""
-#: git-submodule.sh:108
+#: git-submodule.sh:109
#, sh-format
-msgid "No submodule mapping found in .gitmodules for path '$path'"
+msgid "No submodule mapping found in .gitmodules for path '$sm_path'"
msgstr ""
-#: git-submodule.sh:149
+#: git-submodule.sh:150
#, sh-format
-msgid "Clone of '$url' into submodule path '$path' failed"
+msgid "Clone of '$url' into submodule path '$sm_path' failed"
msgstr ""
-#: git-submodule.sh:159
+#: git-submodule.sh:160
#, sh-format
msgid "Gitdir '$a' is part of the submodule path '$b' or vice versa"
msgstr ""
-#: git-submodule.sh:247
+#: git-submodule.sh:249
#, sh-format
msgid "repo URL: '$repo' must be absolute or begin with ./|../"
msgstr ""
-#: git-submodule.sh:264
+#: git-submodule.sh:266
#, sh-format
-msgid "'$path' already exists in the index"
+msgid "'$sm_path' already exists in the index"
msgstr ""
-#: git-submodule.sh:281
+#: git-submodule.sh:283
#, sh-format
-msgid "'$path' already exists and is not a valid git repo"
+msgid "'$sm_path' already exists and is not a valid git repo"
msgstr ""
-#: git-submodule.sh:295
+#: git-submodule.sh:297
#, sh-format
-msgid "Unable to checkout submodule '$path'"
+msgid "Unable to checkout submodule '$sm_path'"
msgstr ""
-#: git-submodule.sh:300
+#: git-submodule.sh:302
#, sh-format
-msgid "Failed to add submodule '$path'"
+msgid "Failed to add submodule '$sm_path'"
msgstr ""
-#: git-submodule.sh:305
+#: git-submodule.sh:307
#, sh-format
-msgid "Failed to register submodule '$path'"
+msgid "Failed to register submodule '$sm_path'"
msgstr ""
-#: git-submodule.sh:347
+#: git-submodule.sh:349
#, sh-format
-msgid "Entering '$prefix$path'"
+msgid "Entering '$prefix$sm_path'"
msgstr ""
-#: git-submodule.sh:359
+#: git-submodule.sh:363
#, sh-format
-msgid "Stopping at '$path'; script returned non-zero status."
+msgid "Stopping at '$sm_path'; script returned non-zero status."
msgstr ""
-#: git-submodule.sh:401
+#: git-submodule.sh:405
#, sh-format
-msgid "No url found for submodule path '$path' in .gitmodules"
+msgid "No url found for submodule path '$sm_path' in .gitmodules"
msgstr ""
-#: git-submodule.sh:410
+#: git-submodule.sh:414
#, sh-format
-msgid "Failed to register url for submodule path '$path'"
+msgid "Failed to register url for submodule path '$sm_path'"
msgstr ""
-#: git-submodule.sh:418
+#: git-submodule.sh:422
#, sh-format
-msgid "Failed to register update mode for submodule path '$path'"
+msgid "Failed to register update mode for submodule path '$sm_path'"
msgstr ""
-#: git-submodule.sh:420
+#: git-submodule.sh:424
#, sh-format
-msgid "Submodule '$name' ($url) registered for path '$path'"
+msgid "Submodule '$name' ($url) registered for path '$sm_path'"
msgstr ""
-#: git-submodule.sh:519
+#: git-submodule.sh:523
#, sh-format
msgid ""
-"Submodule path '$path' not initialized\n"
+"Submodule path '$sm_path' not initialized\n"
"Maybe you want to use 'update --init'?"
msgstr ""
-#: git-submodule.sh:532
+#: git-submodule.sh:536
#, sh-format
-msgid "Unable to find current revision in submodule path '$path'"
+msgid "Unable to find current revision in submodule path '$sm_path'"
msgstr ""
-#: git-submodule.sh:551
+#: git-submodule.sh:555
#, sh-format
-msgid "Unable to fetch in submodule path '$path'"
+msgid "Unable to fetch in submodule path '$sm_path'"
msgstr ""
-#: git-submodule.sh:565
+#: git-submodule.sh:569
#, sh-format
-msgid "Unable to rebase '$sha1' in submodule path '$path'"
+msgid "Unable to rebase '$sha1' in submodule path '$sm_path'"
msgstr ""
-#: git-submodule.sh:566
+#: git-submodule.sh:570
#, sh-format
-msgid "Submodule path '$path': rebased into '$sha1'"
+msgid "Submodule path '$sm_path': rebased into '$sha1'"
msgstr ""
-#: git-submodule.sh:571
+#: git-submodule.sh:575
#, sh-format
-msgid "Unable to merge '$sha1' in submodule path '$path'"
+msgid "Unable to merge '$sha1' in submodule path '$sm_path'"
msgstr ""
-#: git-submodule.sh:572
+#: git-submodule.sh:576
#, sh-format
-msgid "Submodule path '$path': merged in '$sha1'"
+msgid "Submodule path '$sm_path': merged in '$sha1'"
msgstr ""
-#: git-submodule.sh:577
+#: git-submodule.sh:581
#, sh-format
-msgid "Unable to checkout '$sha1' in submodule path '$path'"
+msgid "Unable to checkout '$sha1' in submodule path '$sm_path'"
msgstr ""
-#: git-submodule.sh:578
+#: git-submodule.sh:582
#, sh-format
-msgid "Submodule path '$path': checked out '$sha1'"
+msgid "Submodule path '$sm_path': checked out '$sha1'"
msgstr ""
-#: git-submodule.sh:600 git-submodule.sh:923
+#: git-submodule.sh:604 git-submodule.sh:927
#, sh-format
-msgid "Failed to recurse into submodule path '$path'"
+msgid "Failed to recurse into submodule path '$sm_path'"
msgstr ""
-#: git-submodule.sh:708
+#: git-submodule.sh:712
msgid "--"
msgstr ""
-#: git-submodule.sh:766
+#: git-submodule.sh:770
#, sh-format
msgid " Warn: $name doesn't contain commit $sha1_src"
msgstr ""
-#: git-submodule.sh:769
+#: git-submodule.sh:773
#, sh-format
msgid " Warn: $name doesn't contain commit $sha1_dst"
msgstr ""
-#: git-submodule.sh:772
+#: git-submodule.sh:776
#, sh-format
msgid " Warn: $name doesn't contain commits $sha1_src and $sha1_dst"
msgstr ""
-#: git-submodule.sh:797
+#: git-submodule.sh:801
msgid "blob"
msgstr ""
-#: git-submodule.sh:798
+#: git-submodule.sh:802
msgid "submodule"
msgstr ""
-#: git-submodule.sh:969
+#: git-submodule.sh:973
#, sh-format
msgid "Synchronizing submodule url for '$name'"
msgstr ""
msgstr ""
"Project-Id-Version: Git\n"
"Report-Msgid-Bugs-To: Git Mailing List <git@vger.kernel.org>\n"
-"POT-Creation-Date: 2012-04-28 20:33+0800\n"
+"POT-Creation-Date: 2012-04-28 20:17+0800\n"
"PO-Revision-Date: 2012-01-30 00:00+0800\n"
"Last-Translator: Jiang Xin <worldhello.net@gmail.com>\n"
"Language-Team: GitHub <https://github.com/gotgit/git/>\n"
"Content-Transfer-Encoding: 8bit\n"
"Plural-Forms: nplurals=1; plural=0;\n"
-#: advice.c:34
+#: advice.c:40
#, c-format
msgid "hint: %.*s\n"
msgstr "提示:%.*s\n"
#. * Message used both when 'git commit' fails and when
#. * other commands doing a merge do.
#.
-#: advice.c:64
+#: advice.c:70
msgid ""
"Fix them up in the work tree,\n"
"and then use 'git add/rm <file>' as\n"
"'git add/rm <file>' 标记解决方案,\n"
"或使用 'git commit -a'。"
-#: commit.c:47
+#: commit.c:48
#, c-format
msgid "could not parse %s"
msgstr "不能解析 %s"
-#: commit.c:49
+#: commit.c:50
#, c-format
msgid "%s %s is not a commit!"
msgstr "%s %s 不是一个提交!"
"您的分支和 '%s' 出现了偏离,\n"
"并且各自分别有 %d 和 %d 处不同的提交。\n"
-#: sequencer.c:120 builtin/merge.c:864 builtin/merge.c:985
-#: builtin/merge.c:1095 builtin/merge.c:1105
+#: sequencer.c:120 builtin/merge.c:865 builtin/merge.c:978
+#: builtin/merge.c:1088 builtin/merge.c:1098
#, c-format
msgid "Could not open '%s' for writing"
msgstr "不能为写入打开 '%s'"
-#: sequencer.c:122 builtin/merge.c:334 builtin/merge.c:867
-#: builtin/merge.c:1097 builtin/merge.c:1110
+#: sequencer.c:122 builtin/merge.c:333 builtin/merge.c:868
+#: builtin/merge.c:1090 builtin/merge.c:1103
#, c-format
msgid "Could not write to '%s'"
msgstr "不能写入 '%s'"
msgid "could not apply %s... %s"
msgstr "不能应用 %s... %s"
-#: sequencer.c:450 sequencer.c:909 builtin/log.c:288 builtin/log.c:713
-#: builtin/log.c:1329 builtin/log.c:1548 builtin/merge.c:348
+#: sequencer.c:450 sequencer.c:909 builtin/log.c:289 builtin/log.c:719
+#: builtin/log.c:1335 builtin/log.c:1554 builtin/merge.c:347
#: builtin/shortlog.c:181
msgid "revision walk setup failed"
msgstr "版本遍历设置失败"
msgid "Can't cherry-pick into empty head"
msgstr "不能拣选到空分支"
+#: sha1_name.c:864
+msgid "HEAD does not point to a branch"
+msgstr "HEAD 没有指向一个分支"
+
+#: sha1_name.c:867
+#, c-format
+msgid "No such branch: '%s'"
+msgstr "没有此分支:'%s'"
+
+#: sha1_name.c:869
+#, c-format
+msgid "No upstream configured for branch '%s'"
+msgstr "尚未给分支 '%s' 设置上游"
+
+#: sha1_name.c:872
+#, c-format
+msgid "Upstream branch '%s' not stored as a remote-tracking branch"
+msgstr "上游分支 '%s' 没有存储为一个远程跟踪分支"
+
#: wt-status.c:134
msgid "Unmerged paths:"
msgstr "未合并的路径:"
# 译者:汉字之间无空格,故删除尾部空格
#. TRANSLATORS: This is "remote " in "remote branch '%s' not found"
-#: builtin/branch.c:163
+#: builtin/branch.c:164
msgid "remote "
msgstr "远程"
-#: builtin/branch.c:171
+#: builtin/branch.c:172
msgid "cannot use -a with -d"
msgstr "不能将 -a 和 -d 共用"
-#: builtin/branch.c:177
+#: builtin/branch.c:178
msgid "Couldn't look up commit object for HEAD"
msgstr "无法查询 HEAD 指向的提交对象"
-#: builtin/branch.c:182
+#: builtin/branch.c:183
#, c-format
msgid "Cannot delete the branch '%s' which you are currently on."
msgstr "无法删除您当前所在的分支 '%s'。"
-#: builtin/branch.c:192
+#: builtin/branch.c:193
#, c-format
msgid "%sbranch '%s' not found."
msgstr "%s分支 '%s' 未发现。"
-#: builtin/branch.c:200
+#: builtin/branch.c:201
#, c-format
msgid "Couldn't look up commit object for '%s'"
msgstr "无法查询 '%s' 指向的提交对象"
-#: builtin/branch.c:206
+#: builtin/branch.c:207
#, c-format
msgid ""
"The branch '%s' is not fully merged.\n"
"分支 '%s' 没有完全合并。\n"
"如果您确认要删除它,执行 'git branch -D %s'。"
-#: builtin/branch.c:214
+#: builtin/branch.c:215
#, c-format
msgid "Error deleting %sbranch '%s'"
msgstr "删除 %s分支 '%s' 时出错"
-#: builtin/branch.c:219
+#: builtin/branch.c:221
#, c-format
msgid "Deleted %sbranch %s (was %s).\n"
msgstr "已删除 %s分支 %s(曾为 %s)。\n"
-#: builtin/branch.c:224
+#: builtin/branch.c:226
msgid "Update of config-file failed"
msgstr "无法更新 config 文件"
-#: builtin/branch.c:322
+#: builtin/branch.c:324
#, c-format
msgid "branch '%s' does not point at a commit"
msgstr "分支 '%s' 未指向一个提交"
# 译者:注意保持句尾空格
-#: builtin/branch.c:394
+#: builtin/branch.c:396
#, c-format
msgid "behind %d] "
msgstr "落后 %d] "
# 译者:注意保持句尾空格
-#: builtin/branch.c:396
+#: builtin/branch.c:398
#, c-format
msgid "ahead %d] "
msgstr "领先 %d] "
# 译者:注意保持句尾空格
-#: builtin/branch.c:398
+#: builtin/branch.c:400
#, c-format
msgid "ahead %d, behind %d] "
msgstr "领先 %d,落后 %d] "
-#: builtin/branch.c:501
+#: builtin/branch.c:503
msgid "(no branch)"
msgstr "(非分支)"
-#: builtin/branch.c:566
+#: builtin/branch.c:568
msgid "some refs could not be read"
msgstr "一些引用不能读取"
-#: builtin/branch.c:579
+#: builtin/branch.c:581
msgid "cannot rename the current branch while not on any."
msgstr "无法重命名当前分支因为不处于任何分支上。"
-#: builtin/branch.c:589
+#: builtin/branch.c:591
#, c-format
msgid "Invalid branch name: '%s'"
msgstr "无效的分支名:'%s'"
-#: builtin/branch.c:604
+#: builtin/branch.c:606
msgid "Branch rename failed"
msgstr "分支重命名失败"
-#: builtin/branch.c:608
+#: builtin/branch.c:610
#, c-format
msgid "Renamed a misnamed branch '%s' away"
msgstr "重命名掉一个错误命名的旧分支 '%s'"
-#: builtin/branch.c:612
+#: builtin/branch.c:614
#, c-format
msgid "Branch renamed to %s, but HEAD is not updated!"
msgstr "分支重命名为 %s,但 HEAD 没有更新!"
-#: builtin/branch.c:619
+#: builtin/branch.c:621
msgid "Branch is renamed, but update of config-file failed"
msgstr "分支被重命名,但更新 config 文件失败"
-#: builtin/branch.c:634
+#: builtin/branch.c:636
#, c-format
msgid "malformed object name %s"
msgstr "非法的对象名 %s"
-#: builtin/branch.c:658
+#: builtin/branch.c:660
#, c-format
msgid "could not write branch description template: %s\n"
msgstr "不能写分支描述模版:%s\n"
-#: builtin/branch.c:746
+#: builtin/branch.c:750
msgid "Failed to resolve HEAD as a valid ref."
msgstr "无法将 HEAD 解析为有效引用。"
-#: builtin/branch.c:751 builtin/clone.c:558
+#: builtin/branch.c:755 builtin/clone.c:558
msgid "HEAD not found below refs/heads!"
msgstr "HEAD 没有位于 /refs/heads 之下!"
-#: builtin/branch.c:809
+#: builtin/branch.c:813
msgid "-a and -r options to 'git branch' do not make sense with a branch name"
msgstr "'git branch' 的 -a 和 -r 选项带一个分支名参数没有意义"
msgstr "路径 '%s' 未合并"
#: builtin/checkout.c:302 builtin/checkout.c:498 builtin/clone.c:583
-#: builtin/merge.c:811
+#: builtin/merge.c:812
msgid "unable to write new index file"
msgstr "无法写新的索引文件"
msgid "Can not do reflog for '%s'\n"
msgstr "不能对 '%s' 执行 reflog 操作\n"
-#: builtin/checkout.c:565
+#: builtin/checkout.c:566
msgid "HEAD is now at"
msgstr "HEAD 目前位于"
-#: builtin/checkout.c:572
+#: builtin/checkout.c:573
#, c-format
msgid "Reset branch '%s'\n"
msgstr "重置分支 '%s'\n"
-#: builtin/checkout.c:575
+#: builtin/checkout.c:576
#, c-format
msgid "Already on '%s'\n"
msgstr "已经位于 '%s'\n"
-#: builtin/checkout.c:579
+#: builtin/checkout.c:580
#, c-format
msgid "Switched to and reset branch '%s'\n"
msgstr "切换并重置分支 '%s'\n"
-#: builtin/checkout.c:581
+#: builtin/checkout.c:582
#, c-format
msgid "Switched to a new branch '%s'\n"
msgstr "切换到一个新分支 '%s'\n"
-#: builtin/checkout.c:583
+#: builtin/checkout.c:584
#, c-format
msgid "Switched to branch '%s'\n"
msgstr "切换到分支 '%s'\n"
# 译者:注意保持前导空格
-#: builtin/checkout.c:639
+#: builtin/checkout.c:640
#, c-format
msgid " ... and %d more.\n"
msgstr " ... 及其它 %d 个。\n"
#. The singular version
-#: builtin/checkout.c:645
+#: builtin/checkout.c:646
#, c-format
msgid ""
"Warning: you are leaving %d commit behind, not connected to\n"
"\n"
"%s\n"
-#: builtin/checkout.c:663
+#: builtin/checkout.c:664
#, c-format
msgid ""
"If you want to keep them by creating a new branch, this may be a good time\n"
" git branch new_branch_name %s\n"
"\n"
-#: builtin/checkout.c:692
+#: builtin/checkout.c:693
msgid "internal error in revision walk"
msgstr "在版本遍历时遇到内部错误"
-#: builtin/checkout.c:696
+#: builtin/checkout.c:697
msgid "Previous HEAD position was"
msgstr "之前的 HEAD 位置是"
-#: builtin/checkout.c:722
+#: builtin/checkout.c:723
msgid "You are on a branch yet to be born"
msgstr "您位于一个尚未初始化的分支"
#. case (1)
-#: builtin/checkout.c:853
+#: builtin/checkout.c:854
#, c-format
msgid "invalid reference: %s"
msgstr "无效引用:%s"
#. case (1): want a tree
-#: builtin/checkout.c:892
+#: builtin/checkout.c:893
#, c-format
msgid "reference is not a tree: %s"
msgstr "引用不是一个树:%s"
-#: builtin/checkout.c:972
+#: builtin/checkout.c:973
msgid "-B cannot be used with -b"
msgstr "-B 不能和 -b 共用"
-#: builtin/checkout.c:981
+#: builtin/checkout.c:982
msgid "--patch is incompatible with all other options"
msgstr "--patch 选项和其他选项不兼容"
-#: builtin/checkout.c:984
+#: builtin/checkout.c:985
msgid "--detach cannot be used with -b/-B/--orphan"
msgstr "--detach 不能和 -b/-B/--orphan 共用"
-#: builtin/checkout.c:986
+#: builtin/checkout.c:987
msgid "--detach cannot be used with -t"
msgstr "--detach 不能和 -t 共用"
-#: builtin/checkout.c:992
+#: builtin/checkout.c:993
msgid "--track needs a branch name"
msgstr "--track 需要一个分支名"
-#: builtin/checkout.c:999
+#: builtin/checkout.c:1000
msgid "Missing branch name; try -b"
msgstr "缺少分支名;尝试 -b"
-#: builtin/checkout.c:1005
+#: builtin/checkout.c:1006
msgid "--orphan and -b|-B are mutually exclusive"
msgstr "--orphan 和 -b|-B 互斥"
-#: builtin/checkout.c:1007
+#: builtin/checkout.c:1008
msgid "--orphan cannot be used with -t"
msgstr "--orphan 不能和 -t 共用"
-#: builtin/checkout.c:1017
+#: builtin/checkout.c:1018
msgid "git checkout: -f and -m are incompatible"
msgstr "git checkout:-f 和 -m 不兼容"
-#: builtin/checkout.c:1051
+#: builtin/checkout.c:1052
msgid "invalid path specification"
msgstr "无效的路径规格"
-#: builtin/checkout.c:1059
+#: builtin/checkout.c:1060
#, c-format
msgid ""
"git checkout: updating paths is incompatible with switching branches.\n"
"git checkout:更新路径和切换分支不兼容。\n"
"您是想要检出 '%s' 但未能将其解析为提交么?"
-#: builtin/checkout.c:1061
+#: builtin/checkout.c:1062
msgid "git checkout: updating paths is incompatible with switching branches."
msgstr "git checkout:更新路径和切换分支不兼容。"
-#: builtin/checkout.c:1066
+#: builtin/checkout.c:1067
msgid "git checkout: --detach does not take a path argument"
msgstr "git checkout:--detach 不跟路径参数"
-#: builtin/checkout.c:1069
+#: builtin/checkout.c:1070
msgid ""
"git checkout: --ours/--theirs, --force and --merge are incompatible when\n"
"checking out of the index."
msgstr ""
"git checkout:在从索引检出时,--ours/--theirs、--force 和 --merge 不兼容。"
-#: builtin/checkout.c:1088
+#: builtin/checkout.c:1089
msgid "Cannot switch branch to a non-commit."
msgstr "无法切换分支到一个非提交。"
-#: builtin/checkout.c:1091
+#: builtin/checkout.c:1092
msgid "--ours/--theirs is incompatible with switching branches."
msgstr "--ours/--theirs 和切换分支不兼容。"
msgid "Aborting commit due to empty commit message.\n"
msgstr "终止提交因为提交说明为空。\n"
-#: builtin/commit.c:1554 builtin/merge.c:935 builtin/merge.c:968
+#: builtin/commit.c:1554 builtin/merge.c:936 builtin/merge.c:961
msgid "failed to write commit object"
msgstr "无法写提交对象"
msgid "Couldn't find remote ref HEAD"
msgstr "无法发现远程 HEAD 引用"
-#: builtin/fetch.c:252
+#: builtin/fetch.c:253
#, c-format
msgid "object %s not found"
msgstr "对象 %s 未发现"
-#: builtin/fetch.c:258
+#: builtin/fetch.c:259
msgid "[up to date]"
msgstr "[最新]"
-#: builtin/fetch.c:272
+#: builtin/fetch.c:273
#, c-format
msgid "! %-*s %-*s -> %s (can't fetch in current branch)"
msgstr "! %-*s %-*s -> %s (在当前分支下不能获取)"
-#: builtin/fetch.c:273 builtin/fetch.c:351
+#: builtin/fetch.c:274 builtin/fetch.c:360
msgid "[rejected]"
msgstr "[已拒绝]"
-#: builtin/fetch.c:284
+#: builtin/fetch.c:285
msgid "[tag update]"
msgstr "[tag更新]"
# 译者:注意保持前导空格
-#: builtin/fetch.c:286 builtin/fetch.c:313 builtin/fetch.c:331
+#: builtin/fetch.c:287 builtin/fetch.c:322 builtin/fetch.c:340
msgid " (unable to update local ref)"
msgstr " (不能更新本地引用)"
-#: builtin/fetch.c:298
+#: builtin/fetch.c:305
msgid "[new tag]"
msgstr "[新tag]"
-#: builtin/fetch.c:302
+#: builtin/fetch.c:308
msgid "[new branch]"
msgstr "[新分支]"
-#: builtin/fetch.c:347
+#: builtin/fetch.c:311
+msgid "[new ref]"
+msgstr "[新引用]"
+
+#: builtin/fetch.c:356
msgid "unable to update local ref"
msgstr "不能更新本地引用"
-#: builtin/fetch.c:347
+#: builtin/fetch.c:356
msgid "forced update"
msgstr "强制更新"
-#: builtin/fetch.c:353
+#: builtin/fetch.c:362
msgid "(non-fast-forward)"
msgstr "(非快进式)"
-#: builtin/fetch.c:384 builtin/fetch.c:676
+#: builtin/fetch.c:393 builtin/fetch.c:685
#, c-format
msgid "cannot open %s: %s\n"
msgstr "无法打开 %s:%s\n"
-#: builtin/fetch.c:393
+#: builtin/fetch.c:402
#, c-format
msgid "%s did not send all necessary objects\n"
msgstr "%s 未发送所有必须的对象\n"
-#: builtin/fetch.c:479
+#: builtin/fetch.c:488
#, c-format
msgid "From %.*s\n"
msgstr "来自 %.*s\n"
-#: builtin/fetch.c:490
+#: builtin/fetch.c:499
#, c-format
msgid ""
"some local refs could not be updated; try running\n"
" 'git remote prune %s' 来删除旧的、有冲突的分支"
# 译者:注意保持前导空格
-#: builtin/fetch.c:540
+#: builtin/fetch.c:549
#, c-format
msgid " (%s will become dangling)\n"
msgstr " (%s 将成为悬空状态)\n"
# 译者:注意保持前导空格
-#: builtin/fetch.c:541
+#: builtin/fetch.c:550
#, c-format
msgid " (%s has become dangling)\n"
msgstr " (%s 已成为悬空状态)\n"
-#: builtin/fetch.c:548
+#: builtin/fetch.c:557
msgid "[deleted]"
msgstr "[已删除]"
-#: builtin/fetch.c:549
+#: builtin/fetch.c:558
msgid "(none)"
msgstr "(无)"
-#: builtin/fetch.c:666
+#: builtin/fetch.c:675
#, c-format
msgid "Refusing to fetch into current branch %s of non-bare repository"
msgstr "拒绝获取到非裸版本库的当前分支 %s"
-#: builtin/fetch.c:700
+#: builtin/fetch.c:709
#, c-format
msgid "Don't know how to fetch from %s"
msgstr "不知道如何从 %s 获取"
-#: builtin/fetch.c:777
+#: builtin/fetch.c:786
#, c-format
msgid "Option \"%s\" value \"%s\" is not valid for %s"
msgstr "选项 \"%s\" 的值 \"%s\" 对于 %s 是无效的"
-#: builtin/fetch.c:780
+#: builtin/fetch.c:789
#, c-format
msgid "Option \"%s\" is ignored for %s\n"
msgstr "选项 \"%s\" 对于 %s 被忽略\n"
-#: builtin/fetch.c:879
+#: builtin/fetch.c:888
#, c-format
msgid "Fetching %s\n"
msgstr "正在获取 %s\n"
-#: builtin/fetch.c:881
+#: builtin/fetch.c:890
#, c-format
msgid "Could not fetch %s"
msgstr "不能获取 %s"
-#: builtin/fetch.c:898
+#: builtin/fetch.c:907
msgid ""
"No remote repository specified. Please, specify either a URL or a\n"
"remote name from which new revisions should be fetched."
msgstr "未指定远程版本库。请通过一个URL或远程版本库名指定,用以获取新提交。"
-#: builtin/fetch.c:918
+#: builtin/fetch.c:927
msgid "You need to specify a tag name."
msgstr "您需要指定一个 tag 名称。"
-#: builtin/fetch.c:970
+#: builtin/fetch.c:979
msgid "fetch --all does not take a repository argument"
msgstr "fetch --all 不能带一个版本库参数"
-#: builtin/fetch.c:972
+#: builtin/fetch.c:981
msgid "fetch --all does not make sense with refspecs"
msgstr "fetch --all 带引用表达式没有任何意义"
-#: builtin/fetch.c:983
+#: builtin/fetch.c:992
#, c-format
msgid "No such remote or remote group: %s"
msgstr "没有这样的远程或远程组:%s"
-#: builtin/fetch.c:991
+#: builtin/fetch.c:1000
msgid "Fetching a group and specifying refspecs does not make sense"
msgstr "获取组并指定引用表达式没有意义"
msgid "Cannot access work tree '%s'"
msgstr "不能访问工作区 '%s'"
-#: builtin/log.c:187
+#: builtin/log.c:188
#, c-format
msgid "Final output: %d %s\n"
msgstr "最终输出:%d %s\n"
-#: builtin/log.c:395 builtin/log.c:483
+#: builtin/log.c:401 builtin/log.c:489
#, c-format
msgid "Could not read object %s"
msgstr "不能读取对象 %s"
-#: builtin/log.c:507
+#: builtin/log.c:513
#, c-format
msgid "Unknown type: %d"
msgstr "未知类型:%d"
-#: builtin/log.c:596
+#: builtin/log.c:602
msgid "format.headers without value"
msgstr "format.headers 没有值"
-#: builtin/log.c:669
+#: builtin/log.c:675
msgid "name of output directory is too long"
msgstr "输出目录名太长"
-#: builtin/log.c:680
+#: builtin/log.c:686
#, c-format
msgid "Cannot open patch file %s"
msgstr "无法打开补丁文件 %s"
-#: builtin/log.c:694
+#: builtin/log.c:700
msgid "Need exactly one range."
msgstr "只需要一个范围。"
-#: builtin/log.c:702
+#: builtin/log.c:708
msgid "Not a range."
msgstr "不是一个范围。"
-#: builtin/log.c:739
+#: builtin/log.c:745
msgid "Could not extract email from committer identity."
msgstr "不能从提交者身份中提取邮件地址。"
-#: builtin/log.c:785
+#: builtin/log.c:791
msgid "Cover letter needs email format"
msgstr "信封需要邮件地址格式"
-#: builtin/log.c:879
+#: builtin/log.c:885
#, c-format
msgid "insane in-reply-to: %s"
msgstr "不正常的 in-reply-to:%s"
-#: builtin/log.c:952
+#: builtin/log.c:958
msgid "Two output directories?"
msgstr "两个输出目录?"
-#: builtin/log.c:1173
+#: builtin/log.c:1179
#, c-format
msgid "bogus committer info %s"
msgstr "虚假的提交者信息 %s"
-#: builtin/log.c:1218
+#: builtin/log.c:1224
msgid "-n and -k are mutually exclusive."
msgstr "-n 和 -k 互斥。"
-#: builtin/log.c:1220
+#: builtin/log.c:1226
msgid "--subject-prefix and -k are mutually exclusive."
msgstr "--subject-prefix 和 -k 互斥。"
-#: builtin/log.c:1225 builtin/shortlog.c:284
+#: builtin/log.c:1231 builtin/shortlog.c:284
#, c-format
msgid "unrecognized argument: %s"
msgstr "未识别的参数:%s"
-#: builtin/log.c:1228
+#: builtin/log.c:1234
msgid "--name-only does not make sense"
msgstr "--name-only 无意义"
-#: builtin/log.c:1230
+#: builtin/log.c:1236
msgid "--name-status does not make sense"
msgstr "--name-status 无意义"
-#: builtin/log.c:1232
+#: builtin/log.c:1238
msgid "--check does not make sense"
msgstr "--check 无意义"
-#: builtin/log.c:1255
+#: builtin/log.c:1261
msgid "standard output, or directory, which one?"
msgstr "标准输出或目录,哪一个?"
-#: builtin/log.c:1257
+#: builtin/log.c:1263
#, c-format
msgid "Could not create directory '%s'"
msgstr "不能创建目录 '%s'"
-#: builtin/log.c:1410
+#: builtin/log.c:1416
msgid "Failed to create output files"
msgstr "无法创建输出文件"
-#: builtin/log.c:1514
+#: builtin/log.c:1520
#, c-format
msgid ""
"Could not find a tracked remote branch, please specify <upstream> manually.\n"
msgstr "不能找到跟踪的远程分支,请手工指定 <upstream>。\n"
-#: builtin/log.c:1530 builtin/log.c:1532 builtin/log.c:1544
+#: builtin/log.c:1536 builtin/log.c:1538 builtin/log.c:1550
#, c-format
msgid "Unknown commit %s"
msgstr "未知提交 %s"
-#: builtin/merge.c:91
+#: builtin/merge.c:90
msgid "switch `m' requires a value"
msgstr "开关 `m' 需要一个值"
-#: builtin/merge.c:128
+#: builtin/merge.c:127
#, c-format
msgid "Could not find merge strategy '%s'.\n"
msgstr "不能找到合并策略 '%s'。\n"
-#: builtin/merge.c:129
+#: builtin/merge.c:128
#, c-format
msgid "Available strategies are:"
msgstr "可用的策略有:"
-#: builtin/merge.c:134
+#: builtin/merge.c:133
#, c-format
msgid "Available custom strategies are:"
msgstr "可用的自定义策略有:"
-#: builtin/merge.c:241
+#: builtin/merge.c:240
msgid "could not run stash."
msgstr "不能进行进度保存。"
-#: builtin/merge.c:246
+#: builtin/merge.c:245
msgid "stash failed"
msgstr "进度保存失败"
-#: builtin/merge.c:251
+#: builtin/merge.c:250
#, c-format
msgid "not a valid object: %s"
msgstr "不是一个有效对象:%s"
-#: builtin/merge.c:270 builtin/merge.c:287
+#: builtin/merge.c:269 builtin/merge.c:286
msgid "read-tree failed"
msgstr "读取树失败"
# 译者:注意保持前导空格
-#: builtin/merge.c:317
+#: builtin/merge.c:316
msgid " (nothing to squash)"
msgstr " (无可压缩)"
-#: builtin/merge.c:330
+#: builtin/merge.c:329
#, c-format
msgid "Squash commit -- not updating HEAD\n"
msgstr "压缩提交 -- 未更新 HEAD\n"
-#: builtin/merge.c:362
+#: builtin/merge.c:361
msgid "Writing SQUASH_MSG"
msgstr "写入 SQUASH_MSG"
-#: builtin/merge.c:364
+#: builtin/merge.c:363
msgid "Finishing SQUASH_MSG"
msgstr "完成 SQUASH_MSG"
msgid "failed to read the cache"
msgstr "无法读取缓存"
-#: builtin/merge.c:696
+#: builtin/merge.c:697
msgid "Unable to write index."
msgstr "不能写索引。"
-#: builtin/merge.c:709
+#: builtin/merge.c:710
msgid "Not handling anything other than two heads merge."
msgstr "不能处理两个头合并之外的任何操作。"
-#: builtin/merge.c:723
+#: builtin/merge.c:724
#, c-format
msgid "Unknown option for merge-recursive: -X%s"
msgstr "merge-recursive 的未知选项:-X%s"
-#: builtin/merge.c:737
+#: builtin/merge.c:738
#, c-format
msgid "unable to write %s"
msgstr "不能写 %s"
-#: builtin/merge.c:876
+#: builtin/merge.c:877
#, c-format
msgid "Could not read from '%s'"
msgstr "不能从 '%s' 读取"
-#: builtin/merge.c:885
+#: builtin/merge.c:886
#, c-format
msgid "Not committing merge; use 'git commit' to complete the merge.\n"
msgstr "未提交合并,使用 'git commit' 完成此次合并。\n"
-#: builtin/merge.c:891
+#: builtin/merge.c:892
msgid ""
"Please enter a commit message to explain why this merge is necessary,\n"
"especially if it merges an updated upstream into a topic branch.\n"
"\n"
"以 '#' 开头的行将被忽略,而且空提交说明将会终止提交。\n"
-#: builtin/merge.c:915
+#: builtin/merge.c:916
msgid "Empty commit message."
msgstr "空提交信息。"
-#: builtin/merge.c:927
+#: builtin/merge.c:928
#, c-format
msgid "Wonderful.\n"
msgstr "太棒了。\n"
-#: builtin/merge.c:1000
+#: builtin/merge.c:993
#, c-format
msgid "Automatic merge failed; fix conflicts and then commit the result.\n"
msgstr "自动合并失败,修正冲突然后提交修正的结果。\n"
-#: builtin/merge.c:1016
+#: builtin/merge.c:1009
#, c-format
msgid "'%s' is not a commit"
msgstr "'%s' 不是一个提交"
-#: builtin/merge.c:1057
+#: builtin/merge.c:1050
msgid "No current branch."
msgstr "没有当前分支。"
-#: builtin/merge.c:1059
+#: builtin/merge.c:1052
msgid "No remote for the current branch."
msgstr "当前分支没有对应的远程版本库。"
-#: builtin/merge.c:1061
+#: builtin/merge.c:1054
msgid "No default upstream defined for the current branch."
msgstr "当前分支没有定义默认的上游分支。"
-#: builtin/merge.c:1066
+#: builtin/merge.c:1059
#, c-format
msgid "No remote tracking branch for %s from %s"
msgstr "%s 没有来自 %s 的远程跟踪分支"
-#: builtin/merge.c:1188
+#: builtin/merge.c:1146 builtin/merge.c:1303
+#, c-format
+msgid "%s - not something we can merge"
+msgstr "%s - 不能被合并"
+
+#: builtin/merge.c:1214
msgid "There is no merge to abort (MERGE_HEAD missing)."
msgstr "没有要终止的合并(MERGE_HEAD 丢失)。"
-#: builtin/merge.c:1204 git-pull.sh:31
+#: builtin/merge.c:1230 git-pull.sh:31
msgid ""
"You have not concluded your merge (MERGE_HEAD exists).\n"
"Please, commit your changes before you can merge."
"您尚未结束您的合并(存在 MERGE_HEAD)。\n"
"请在合并前先提交您的修改。"
-#: builtin/merge.c:1207 git-pull.sh:34
+#: builtin/merge.c:1233 git-pull.sh:34
msgid "You have not concluded your merge (MERGE_HEAD exists)."
msgstr "您尚未结束您的合并(存在 MERGE_HEAD)。"
-#: builtin/merge.c:1211
+#: builtin/merge.c:1237
msgid ""
"You have not concluded your cherry-pick (CHERRY_PICK_HEAD exists).\n"
"Please, commit your changes before you can merge."
"您尚未结束您的拣选(存在 CHERRY_PICK_HEAD)。\n"
"请在合并前先提交您的修改。"
-#: builtin/merge.c:1214
+#: builtin/merge.c:1240
msgid "You have not concluded your cherry-pick (CHERRY_PICK_HEAD exists)."
msgstr "您尚未结束您的拣选(存在 CHERRY_PICK_HEAD)。"
-#: builtin/merge.c:1223
+#: builtin/merge.c:1249
msgid "You cannot combine --squash with --no-ff."
msgstr "您不能将 --squash 与 --no-ff 共用。"
-#: builtin/merge.c:1228
+#: builtin/merge.c:1254
msgid "You cannot combine --no-ff with --ff-only."
msgstr "您不能将 --no-ff 与 --ff-only 共用。"
-#: builtin/merge.c:1235
+#: builtin/merge.c:1261
msgid "No commit specified and merge.defaultToUpstream not set."
msgstr "未指定提交并且 merge.defaultToUpstream 未设置。"
-#: builtin/merge.c:1266
+#: builtin/merge.c:1293
msgid "Can merge only exactly one commit into empty head"
msgstr "只能将一个提交合并到空分支上"
-#: builtin/merge.c:1269
+#: builtin/merge.c:1296
msgid "Squash commit into empty head not supported yet"
msgstr "尚不支持到空分支的压缩提交"
-#: builtin/merge.c:1271
+#: builtin/merge.c:1298
msgid "Non-fast-forward commit does not make sense into an empty head"
msgstr "到空分支的非快进式提交没有意义"
-#: builtin/merge.c:1275 builtin/merge.c:1319
-#, c-format
-msgid "%s - not something we can merge"
-msgstr "%s - 不能被合并"
-
-#: builtin/merge.c:1382
+#: builtin/merge.c:1413
#, c-format
msgid "Updating %s..%s\n"
msgstr "更新 %s..%s\n"
-#: builtin/merge.c:1420
+#: builtin/merge.c:1451
#, c-format
msgid "Trying really trivial in-index merge...\n"
msgstr "尝试非常小的索引内合并...\n"
-#: builtin/merge.c:1427
+#: builtin/merge.c:1458
#, c-format
msgid "Nope.\n"
msgstr "无。\n"
-#: builtin/merge.c:1459
+#: builtin/merge.c:1490
msgid "Not possible to fast-forward, aborting."
msgstr "无法快进,终止。"
-#: builtin/merge.c:1482 builtin/merge.c:1559
+#: builtin/merge.c:1513 builtin/merge.c:1592
#, c-format
msgid "Rewinding the tree to pristine...\n"
msgstr "将树回滚至原始状态...\n"
-#: builtin/merge.c:1486
+#: builtin/merge.c:1517
#, c-format
msgid "Trying merge strategy %s...\n"
msgstr "尝试合并策略 %s...\n"
-#: builtin/merge.c:1550
+#: builtin/merge.c:1583
#, c-format
msgid "No merge strategy handled the merge.\n"
msgstr "没有合并策略处理此合并。\n"
-#: builtin/merge.c:1552
+#: builtin/merge.c:1585
#, c-format
msgid "Merge with strategy %s failed.\n"
msgstr "使用策略 %s 合并失败。\n"
-#: builtin/merge.c:1561
+#: builtin/merge.c:1594
#, c-format
msgid "Using the %s to prepare resolving by hand.\n"
msgstr "使用 %s 以准备手工解决。\n"
-#: builtin/merge.c:1572
+#: builtin/merge.c:1606
#, c-format
msgid "Automatic merge went well; stopped before committing as requested\n"
msgstr "自动合并进展顺利,按要求在提交前停止\n"
msgid "unable to parse value '%s' for option %s"
msgstr "不能解析值 '%s' 针对于选项 %s"
-#: builtin/push.c:44
+#: builtin/push.c:45
msgid "tag shorthand without <tag>"
msgstr "tag 简写没有跟 <tag> 参数"
-#: builtin/push.c:63
+#: builtin/push.c:64
msgid "--delete only accepts plain target ref names"
msgstr "--delete 只接受简单的目标引用名"
-#: builtin/push.c:83
+#: builtin/push.c:84
#, c-format
msgid ""
"You are not currently on a branch.\n"
"\n"
" git push %s HEAD:<name-of-remote-branch>\n"
-#: builtin/push.c:90
+#: builtin/push.c:91
#, c-format
msgid ""
"The current branch %s has no upstream branch.\n"
"\n"
" git push --set-upstream %s %s\n"
-#: builtin/push.c:98
+#: builtin/push.c:99
#, c-format
msgid "The current branch %s has multiple upstream branches, refusing to push."
msgstr "当前分支 %s 有多个上游分支,拒绝推送。"
-#: builtin/push.c:101
+#: builtin/push.c:102
#, c-format
msgid ""
"You are pushing to remote '%s', which is not the upstream of\n"
"您正推送至远程 '%s'(其并非当前分支 '%s' 的上游),\n"
"而没有告诉我要推送什么、更新哪个远程分支。"
-#: builtin/push.c:127
+#: builtin/push.c:131
msgid ""
"You didn't specify any refspecs to push, and push.default is \"nothing\"."
msgstr "您没有为推送指定任何引用表达式,并且 push.default 为 \"nothing\"。"
-#: builtin/push.c:147
+#: builtin/push.c:138
+msgid ""
+"Updates were rejected because the tip of your current branch is behind\n"
+"its remote counterpart. Merge the remote changes (e.g. 'git pull')\n"
+"before pushing again.\n"
+"See the 'Note about fast-forwards' in 'git push --help' for details."
+msgstr ""
+"更新被拒绝,因为您当前分支落后于对应的远程分支。再次推送前先与远程变更\n"
+"合并(如 'git pull')。\n"
+"详见 'git push --help' 中的 'Note about fast-forwards' 的内容。"
+
+#: builtin/push.c:144
+msgid ""
+"Updates were rejected because a pushed branch tip is behind its remote\n"
+"counterpart. If you did not intend to push that branch, you may want to\n"
+"specify branches to push or set the 'push.default' configuration\n"
+"variable to 'current' or 'upstream' to push only the current branch."
+msgstr ""
+"更新被拒绝,因为推送的一个分支落后于对应的远程分支。如果您并非有意推送\n"
+"该分支,您可以指定要推送的分支或者设置 'push.default' 配置变量为\n"
+"'current' 或 'upstream' 以便只推送当前分支。"
+
+#: builtin/push.c:150
+msgid ""
+"Updates were rejected because a pushed branch tip is behind its remote\n"
+"counterpart. Check out this branch and merge the remote changes\n"
+"(e.g. 'git pull') before pushing again.\n"
+"See the 'Note about fast-forwards' in 'git push --help' for details."
+msgstr ""
+"更新被拒绝,因为推送的一个分支落后于对应的远程分支。检出该分支并在再次\n"
+"推送之前与远程变更合并(如 'git pull')。\n"
+"详见 'git push --help' 中的 'Note about fast-forwards' 的内容。"
+
+#: builtin/push.c:190
#, c-format
msgid "Pushing to %s\n"
msgstr "推送到 %s\n"
-#: builtin/push.c:151
+#: builtin/push.c:194
#, c-format
msgid "failed to push some refs to '%s'"
msgstr "无法推送一些引用到 '%s'"
-#: builtin/push.c:159
-#, c-format
-msgid ""
-"To prevent you from losing history, non-fast-forward updates were rejected\n"
-"Merge the remote changes (e.g. 'git pull') before pushing again. See the\n"
-"'Note about fast-forwards' section of 'git push --help' for details.\n"
-msgstr ""
-"为了防止您丢失提交历史,非快进式更新被拒绝。\n"
-"再次推送前先与远程变更合并(如 'git pull')。详见\n"
-"'git push --help' 中的 'Note about fast-forwards' 小节。\n"
-
-#: builtin/push.c:176
+#: builtin/push.c:226
#, c-format
msgid "bad repository '%s'"
msgstr "坏的版本库 '%s'"
-#: builtin/push.c:177
+#: builtin/push.c:227
msgid ""
"No configured push destination.\n"
"Either specify the URL from the command-line or configure a remote "
"\n"
" git push <name>\n"
-#: builtin/push.c:192
+#: builtin/push.c:242
msgid "--all and --tags are incompatible"
msgstr "--all 和 --tags 不兼容"
-#: builtin/push.c:193
+#: builtin/push.c:243
msgid "--all can't be combined with refspecs"
msgstr "--all 不能和引用表达式共用"
-#: builtin/push.c:198
+#: builtin/push.c:248
msgid "--mirror and --tags are incompatible"
msgstr "--mirror 和 --tags 不兼容"
-#: builtin/push.c:199
+#: builtin/push.c:249
msgid "--mirror can't be combined with refspecs"
msgstr "--mirror 不能和引用表达式共用"
-#: builtin/push.c:204
+#: builtin/push.c:254
msgid "--all and --mirror are incompatible"
msgstr "--all 和 --mirror 不兼容"
-#: builtin/push.c:284
+#: builtin/push.c:342
msgid "--delete is incompatible with --all, --mirror and --tags"
msgstr "--delete 与 --all、--mirror 及 --tags 不兼容"
-#: builtin/push.c:286
+#: builtin/push.c:344
msgid "--delete doesn't make sense without any refs"
msgstr "--delete 未接任何引用没有意义"
msgid "Could not reset index file to revision '%s'."
msgstr "不能重置索引文件至版本 '%s'。"
-#: builtin/revert.c:70 builtin/revert.c:91
+#: builtin/revert.c:70 builtin/revert.c:92
#, c-format
msgid "%s: %s cannot be used with %s"
msgstr "%s:%s 不能和 %s 共用"
-#: builtin/revert.c:126
+#: builtin/revert.c:127
msgid "program error"
msgstr "程序错误"
-#: builtin/revert.c:209
+#: builtin/revert.c:213
msgid "revert failed"
msgstr "还原失败"
-#: builtin/revert.c:224
+#: builtin/revert.c:228
msgid "cherry-pick failed"
msgstr "拣选失败"
msgid "Updated tag '%s' (was %s)\n"
msgstr "已更新tag '%s'(曾为 %s)\n"
-#: git-am.sh:49
+#: git-am.sh:50
msgid "You need to set your committer info first"
msgstr "您需要先设置你的提交者信息"
-#: git-am.sh:136
+#: git-am.sh:137
msgid "Repository lacks necessary blobs to fall back on 3-way merge."
msgstr "版本库缺乏必要的 blob 数据以进行三路合并。"
-#: git-am.sh:147
+#: git-am.sh:154
msgid ""
"Did you hand edit your patch?\n"
"It does not apply to blobs recorded in its index."
"您是否曾手动编辑过您的补丁?\n"
"无法应用补丁到索引中的数据上。"
-#: git-am.sh:156
+#: git-am.sh:163
msgid "Falling back to patching base and 3-way merge..."
msgstr "回退到补丁基础版本并使用三路合并..."
-#: git-am.sh:268
+#: git-am.sh:275
msgid "Only one StGIT patch series can be applied at once"
msgstr "一次只能有一个 StGIT 补丁队列被应用"
-#: git-am.sh:355
+#: git-am.sh:362
#, sh-format
msgid "Patch format $patch_format is not supported."
msgstr "不支持 $patch_format 补丁格式。"
-#: git-am.sh:357
+#: git-am.sh:364
msgid "Patch format detection failed."
msgstr "补丁格式检测失败。"
-#: git-am.sh:411
+#: git-am.sh:418
msgid "-d option is no longer supported. Do not use."
msgstr "不再支持 -d 选项。不要使用。"
-#: git-am.sh:474
+#: git-am.sh:481
#, sh-format
msgid "previous rebase directory $dotest still exists but mbox given."
msgstr "之前的变基目录 $dotest 仍然存在但给出了mbox。"
-#: git-am.sh:479
+#: git-am.sh:486
msgid "Please make up your mind. --skip or --abort?"
msgstr "请下决心。--skip 或是 --abort ?"
-#: git-am.sh:506
+#: git-am.sh:513
msgid "Resolve operation not in progress, we are not resuming."
msgstr "解决操作未进行,我们不会继续。"
-#: git-am.sh:572
+#: git-am.sh:579
#, sh-format
msgid "Dirty index: cannot apply patches (dirty: $files)"
msgstr "脏的索引:不能应用补丁(脏文件:$files)"
-#: git-am.sh:748
+#: git-am.sh:755
msgid "cannot be interactive without stdin connected to a terminal."
msgstr "标准输入没有和终端关联,不能进行交互式操作。"
#. TRANSLATORS: Make sure to include [y], [n], [e], [v] and [a]
#. in your translation. The program will only accept English
#. input at this point.
-#: git-am.sh:759
+#: git-am.sh:766
msgid "Apply? [y]es/[n]o/[e]dit/[v]iew patch/[a]ccept all "
msgstr "应用?[y]es/[n]o/[e]dit/[v]iew patch/[a]ccept all "
-#: git-am.sh:795
+#: git-am.sh:802
#, sh-format
msgid "Applying: $FIRSTLINE"
msgstr "正应用:$FIRSTLINE"
-#: git-am.sh:840
+#: git-am.sh:847
msgid "No changes -- Patch already applied."
msgstr "没有变更 -- 补丁已经应用过。"
-#: git-am.sh:866
+#: git-am.sh:873
msgid "applying to an empty history"
msgstr "正应用到一个空历史上"
msgid "cannot strip one component off url '$remoteurl'"
msgstr "无法从 url '$remoteurl' 剥离一个组件"
-#: git-submodule.sh:108
+#: git-submodule.sh:109
#, sh-format
-msgid "No submodule mapping found in .gitmodules for path '$path'"
-msgstr "未在 .gitmodules 中发现路径 '$path' 的子模组映射"
+msgid "No submodule mapping found in .gitmodules for path '$sm_path'"
+msgstr "未在 .gitmodules 中发现路径 '$sm_path' 的子模组映射"
-#: git-submodule.sh:149
+#: git-submodule.sh:150
#, sh-format
-msgid "Clone of '$url' into submodule path '$path' failed"
-msgstr "无法克隆 '$url' 到子模组路径 '$path'"
+msgid "Clone of '$url' into submodule path '$sm_path' failed"
+msgstr "无法克隆 '$url' 到子模组路径 '$sm_path'"
-#: git-submodule.sh:159
+#: git-submodule.sh:160
#, sh-format
msgid "Gitdir '$a' is part of the submodule path '$b' or vice versa"
msgstr "Gitdir '$a' 在子模组路径 '$b' 之下或者相反"
-#: git-submodule.sh:247
+#: git-submodule.sh:249
#, sh-format
msgid "repo URL: '$repo' must be absolute or begin with ./|../"
msgstr "版本库URL:'$repo' 必须是绝对路径或以 ./|../ 起始"
-#: git-submodule.sh:264
+#: git-submodule.sh:266
#, sh-format
-msgid "'$path' already exists in the index"
-msgstr "'$path' 已经存在于索引中"
+msgid "'$sm_path' already exists in the index"
+msgstr "'$sm_path' 已经存在于索引中"
-#: git-submodule.sh:281
+#: git-submodule.sh:283
#, sh-format
-msgid "'$path' already exists and is not a valid git repo"
-msgstr "'$path' 已存在且不是一个有效的 git 版本库"
+msgid "'$sm_path' already exists and is not a valid git repo"
+msgstr "'$sm_path' 已存在且不是一个有效的 git 版本库"
-#: git-submodule.sh:295
+#: git-submodule.sh:297
#, sh-format
-msgid "Unable to checkout submodule '$path'"
-msgstr "不能检出子模组 '$path'"
+msgid "Unable to checkout submodule '$sm_path'"
+msgstr "不能检出子模组 '$sm_path'"
-#: git-submodule.sh:300
+#: git-submodule.sh:302
#, sh-format
-msgid "Failed to add submodule '$path'"
-msgstr "无法添加子模组 '$path'"
+msgid "Failed to add submodule '$sm_path'"
+msgstr "无法添加子模组 '$sm_path'"
-#: git-submodule.sh:305
+#: git-submodule.sh:307
#, sh-format
-msgid "Failed to register submodule '$path'"
-msgstr "无法注册子模组 '$path'"
+msgid "Failed to register submodule '$sm_path'"
+msgstr "无法注册子模组 '$sm_path'"
-#: git-submodule.sh:347
+#: git-submodule.sh:349
#, sh-format
-msgid "Entering '$prefix$path'"
-msgstr "正在进入 '$prefix$path'"
+msgid "Entering '$prefix$sm_path'"
+msgstr "正在进入 '$prefix$sm_path'"
-#: git-submodule.sh:359
+#: git-submodule.sh:363
#, sh-format
-msgid "Stopping at '$path'; script returned non-zero status."
-msgstr "停止于 '$path',脚本返回非零值。"
+msgid "Stopping at '$sm_path'; script returned non-zero status."
+msgstr "停止于 '$sm_path',脚本返回非零值。"
-#: git-submodule.sh:401
+#: git-submodule.sh:405
#, sh-format
-msgid "No url found for submodule path '$path' in .gitmodules"
-msgstr "在 .gitmodules 中未找到子模组路径 '$path' 的 url"
+msgid "No url found for submodule path '$sm_path' in .gitmodules"
+msgstr "在 .gitmodules 中未找到子模组路径 '$sm_path' 的 url"
-#: git-submodule.sh:410
+#: git-submodule.sh:414
#, sh-format
-msgid "Failed to register url for submodule path '$path'"
-msgstr "无法为子模组路径 '$path' 注册 url"
+msgid "Failed to register url for submodule path '$sm_path'"
+msgstr "无法为子模组路径 '$sm_path' 注册 url"
-#: git-submodule.sh:418
+#: git-submodule.sh:422
#, sh-format
-msgid "Failed to register update mode for submodule path '$path'"
-msgstr "无法为子模组路径 '$path' 注册更新模式"
+msgid "Failed to register update mode for submodule path '$sm_path'"
+msgstr "无法为子模组路径 '$sm_path' 注册更新模式"
-#: git-submodule.sh:420
+#: git-submodule.sh:424
#, sh-format
-msgid "Submodule '$name' ($url) registered for path '$path'"
-msgstr "子模组 '$name' ($url) 已为路径 '$path' 注册"
+msgid "Submodule '$name' ($url) registered for path '$sm_path'"
+msgstr "子模组 '$name' ($url) 已为路径 '$sm_path' 注册"
-#: git-submodule.sh:519
+#: git-submodule.sh:523
#, sh-format
msgid ""
-"Submodule path '$path' not initialized\n"
+"Submodule path '$sm_path' not initialized\n"
"Maybe you want to use 'update --init'?"
msgstr ""
-"子模组路径 '$path' 没有初始化\n"
+"子模组路径 '$sm_path' 没有初始化\n"
"也许您想用 'update --init'?"
-#: git-submodule.sh:532
+#: git-submodule.sh:536
#, sh-format
-msgid "Unable to find current revision in submodule path '$path'"
-msgstr "无法在子模组路径 '$path' 中找到当前版本"
+msgid "Unable to find current revision in submodule path '$sm_path'"
+msgstr "无法在子模组路径 '$sm_path' 中找到当前版本"
-#: git-submodule.sh:551
+#: git-submodule.sh:555
#, sh-format
-msgid "Unable to fetch in submodule path '$path'"
-msgstr "无法在子模组路径 '$path' 中获取"
+msgid "Unable to fetch in submodule path '$sm_path'"
+msgstr "无法在子模组路径 '$sm_path' 中获取"
-#: git-submodule.sh:565
+#: git-submodule.sh:569
#, sh-format
-msgid "Unable to rebase '$sha1' in submodule path '$path'"
-msgstr "无法在子模组路径 '$path' 中变基 '$sha1'"
+msgid "Unable to rebase '$sha1' in submodule path '$sm_path'"
+msgstr "无法在子模组路径 '$sm_path' 中变基 '$sha1'"
-#: git-submodule.sh:566
+#: git-submodule.sh:570
#, sh-format
-msgid "Submodule path '$path': rebased into '$sha1'"
-msgstr "子模组路径 '$path':变基至 '$sha1'"
+msgid "Submodule path '$sm_path': rebased into '$sha1'"
+msgstr "子模组路径 '$sm_path':变基至 '$sha1'"
-#: git-submodule.sh:571
+#: git-submodule.sh:575
#, sh-format
-msgid "Unable to merge '$sha1' in submodule path '$path'"
-msgstr "无法合并 '$sha1' 到子模组路径 '$path' 中"
+msgid "Unable to merge '$sha1' in submodule path '$sm_path'"
+msgstr "无法合并 '$sha1' 到子模组路径 '$sm_path' 中"
-#: git-submodule.sh:572
+#: git-submodule.sh:576
#, sh-format
-msgid "Submodule path '$path': merged in '$sha1'"
-msgstr "子模组路径 '$path':已合并入 '$sha1'"
+msgid "Submodule path '$sm_path': merged in '$sha1'"
+msgstr "子模组路径 '$sm_path':已合并入 '$sha1'"
-#: git-submodule.sh:577
+#: git-submodule.sh:581
#, sh-format
-msgid "Unable to checkout '$sha1' in submodule path '$path'"
-msgstr "无法在子模组路径 '$path' 中检出 '$sha1'"
+msgid "Unable to checkout '$sha1' in submodule path '$sm_path'"
+msgstr "无法在子模组路径 '$sm_path' 中检出 '$sha1'"
-#: git-submodule.sh:578
+#: git-submodule.sh:582
#, sh-format
-msgid "Submodule path '$path': checked out '$sha1'"
-msgstr "子模组路径 '$path':检出 '$sha1'"
+msgid "Submodule path '$sm_path': checked out '$sha1'"
+msgstr "子模组路径 '$sm_path':检出 '$sha1'"
-#: git-submodule.sh:600 git-submodule.sh:923
+#: git-submodule.sh:604 git-submodule.sh:927
#, sh-format
-msgid "Failed to recurse into submodule path '$path'"
-msgstr "无法递归进子模组路径 '$path'"
+msgid "Failed to recurse into submodule path '$sm_path'"
+msgstr "无法递归进子模组路径 '$sm_path'"
-#: git-submodule.sh:708
+#: git-submodule.sh:712
msgid "--"
msgstr "--"
# 译者:注意保持前导空格
-#: git-submodule.sh:766
+#: git-submodule.sh:770
#, sh-format
msgid " Warn: $name doesn't contain commit $sha1_src"
msgstr " 警告:$name 未包含提交 $sha1_src"
# 译者:注意保持前导空格
-#: git-submodule.sh:769
+#: git-submodule.sh:773
#, sh-format
msgid " Warn: $name doesn't contain commit $sha1_dst"
msgstr " 警告:$name 未包含提交 $sha1_dst"
# 译者:注意保持前导空格
-#: git-submodule.sh:772
+#: git-submodule.sh:776
#, sh-format
msgid " Warn: $name doesn't contain commits $sha1_src and $sha1_dst"
msgstr " 警告:$name 未包含提交 $sha1_src 和 $sha1_dst"
-#: git-submodule.sh:797
+#: git-submodule.sh:801
msgid "blob"
msgstr "blob"
-#: git-submodule.sh:798
+#: git-submodule.sh:802
msgid "submodule"
msgstr "子模组"
-#: git-submodule.sh:969
+#: git-submodule.sh:973
#, sh-format
msgid "Synchronizing submodule url for '$name'"
msgstr "为 '$name' 同步子模组 url"
#include "commit.h"
#include "blob.h"
#include "resolve-undo.h"
+#include "strbuf.h"
+#include "varint.h"
static struct cache_entry *refresh_cache_entry(struct cache_entry *ce, int really);
return refresh_cache_ent(&the_index, ce, really, NULL, NULL);
}
+
+/*****************************************************************
+ * Index File I/O
+ *****************************************************************/
+
+#define INDEX_FORMAT_DEFAULT 3
+
+/*
+ * dev/ino/uid/gid/size are also just tracked to the low 32 bits
+ * Again - this is just a (very strong in practice) heuristic that
+ * the inode hasn't changed.
+ *
+ * We save the fields in big-endian order to allow using the
+ * index file over NFS transparently.
+ */
+struct ondisk_cache_entry {
+ struct cache_time ctime;
+ struct cache_time mtime;
+ unsigned int dev;
+ unsigned int ino;
+ unsigned int mode;
+ unsigned int uid;
+ unsigned int gid;
+ unsigned int size;
+ unsigned char sha1[20];
+ unsigned short flags;
+ char name[FLEX_ARRAY]; /* more */
+};
+
+/*
+ * This struct is used when CE_EXTENDED bit is 1
+ * The struct must match ondisk_cache_entry exactly from
+ * ctime till flags
+ */
+struct ondisk_cache_entry_extended {
+ struct cache_time ctime;
+ struct cache_time mtime;
+ unsigned int dev;
+ unsigned int ino;
+ unsigned int mode;
+ unsigned int uid;
+ unsigned int gid;
+ unsigned int size;
+ unsigned char sha1[20];
+ unsigned short flags;
+ unsigned short flags2;
+ char name[FLEX_ARRAY]; /* more */
+};
+
+/* These are only used for v3 or lower */
+#define align_flex_name(STRUCT,len) ((offsetof(struct STRUCT,name) + (len) + 8) & ~7)
+#define ondisk_cache_entry_size(len) align_flex_name(ondisk_cache_entry,len)
+#define ondisk_cache_entry_extended_size(len) align_flex_name(ondisk_cache_entry_extended,len)
+#define ondisk_ce_size(ce) (((ce)->ce_flags & CE_EXTENDED) ? \
+ ondisk_cache_entry_extended_size(ce_namelen(ce)) : \
+ ondisk_cache_entry_size(ce_namelen(ce)))
+
static int verify_hdr(struct cache_header *hdr, unsigned long size)
{
git_SHA_CTX c;
unsigned char sha1[20];
+ int hdr_version;
if (hdr->hdr_signature != htonl(CACHE_SIGNATURE))
return error("bad signature");
- if (hdr->hdr_version != htonl(2) && hdr->hdr_version != htonl(3))
- return error("bad index version");
+ hdr_version = ntohl(hdr->hdr_version);
+ if (hdr_version < 2 || 4 < hdr_version)
+ return error("bad index version %d", hdr_version);
git_SHA1_Init(&c);
git_SHA1_Update(&c, hdr, size - 20);
git_SHA1_Final(sha1, &c);
return read_index_from(istate, get_index_file());
}
-static struct cache_entry *create_from_disk(struct ondisk_cache_entry *ondisk)
+#ifndef NEEDS_ALIGNED_ACCESS
+#define ntoh_s(var) ntohs(var)
+#define ntoh_l(var) ntohl(var)
+#else
+static inline uint16_t ntoh_s_force_align(void *p)
+{
+ uint16_t x;
+ memcpy(&x, p, sizeof(x));
+ return ntohs(x);
+}
+static inline uint32_t ntoh_l_force_align(void *p)
+{
+ uint32_t x;
+ memcpy(&x, p, sizeof(x));
+ return ntohl(x);
+}
+#define ntoh_s(var) ntoh_s_force_align(&(var))
+#define ntoh_l(var) ntoh_l_force_align(&(var))
+#endif
+
+static struct cache_entry *cache_entry_from_ondisk(struct ondisk_cache_entry *ondisk,
+ unsigned int flags,
+ const char *name,
+ size_t len)
+{
+ struct cache_entry *ce = xmalloc(cache_entry_size(len));
+
+ ce->ce_ctime.sec = ntoh_l(ondisk->ctime.sec);
+ ce->ce_mtime.sec = ntoh_l(ondisk->mtime.sec);
+ ce->ce_ctime.nsec = ntoh_l(ondisk->ctime.nsec);
+ ce->ce_mtime.nsec = ntoh_l(ondisk->mtime.nsec);
+ ce->ce_dev = ntoh_l(ondisk->dev);
+ ce->ce_ino = ntoh_l(ondisk->ino);
+ ce->ce_mode = ntoh_l(ondisk->mode);
+ ce->ce_uid = ntoh_l(ondisk->uid);
+ ce->ce_gid = ntoh_l(ondisk->gid);
+ ce->ce_size = ntoh_l(ondisk->size);
+ ce->ce_flags = flags;
+ hashcpy(ce->sha1, ondisk->sha1);
+ memcpy(ce->name, name, len);
+ ce->name[len] = '\0';
+ return ce;
+}
+
+/*
+ * Adjacent cache entries tend to share the leading paths, so it makes
+ * sense to only store the differences in later entries. In the v4
+ * on-disk format of the index, each on-disk cache entry stores the
+ * number of bytes to be stripped from the end of the previous name,
+ * and the bytes to append to the result, to come up with its name.
+ */
+static unsigned long expand_name_field(struct strbuf *name, const char *cp_)
+{
+ const unsigned char *ep, *cp = (const unsigned char *)cp_;
+ size_t len = decode_varint(&cp);
+
+ if (name->len < len)
+ die("malformed name field in the index");
+ strbuf_remove(name, name->len - len, len);
+ for (ep = cp; *ep; ep++)
+ ; /* find the end */
+ strbuf_add(name, cp, ep - cp);
+ return (const char *)ep + 1 - cp_;
+}
+
+static struct cache_entry *create_from_disk(struct ondisk_cache_entry *ondisk,
+ unsigned long *ent_size,
+ struct strbuf *previous_name)
{
struct cache_entry *ce;
size_t len;
unsigned int flags;
/* On-disk flags are just 16 bits */
- flags = ntohs(ondisk->flags);
+ flags = ntoh_s(ondisk->flags);
len = flags & CE_NAMEMASK;
if (flags & CE_EXTENDED) {
struct ondisk_cache_entry_extended *ondisk2;
int extended_flags;
ondisk2 = (struct ondisk_cache_entry_extended *)ondisk;
- extended_flags = ntohs(ondisk2->flags2) << 16;
+ extended_flags = ntoh_s(ondisk2->flags2) << 16;
/* We do not yet understand any bit out of CE_EXTENDED_FLAGS */
if (extended_flags & ~CE_EXTENDED_FLAGS)
die("Unknown index entry format %08x", extended_flags);
else
name = ondisk->name;
- if (len == CE_NAMEMASK)
- len = strlen(name);
-
- ce = xmalloc(cache_entry_size(len));
-
- ce->ce_ctime.sec = ntohl(ondisk->ctime.sec);
- ce->ce_mtime.sec = ntohl(ondisk->mtime.sec);
- ce->ce_ctime.nsec = ntohl(ondisk->ctime.nsec);
- ce->ce_mtime.nsec = ntohl(ondisk->mtime.nsec);
- ce->ce_dev = ntohl(ondisk->dev);
- ce->ce_ino = ntohl(ondisk->ino);
- ce->ce_mode = ntohl(ondisk->mode);
- ce->ce_uid = ntohl(ondisk->uid);
- ce->ce_gid = ntohl(ondisk->gid);
- ce->ce_size = ntohl(ondisk->size);
- ce->ce_flags = flags;
-
- hashcpy(ce->sha1, ondisk->sha1);
-
- memcpy(ce->name, name, len);
- ce->name[len] = '\0';
+ if (!previous_name) {
+ /* v3 and earlier */
+ if (len == CE_NAMEMASK)
+ len = strlen(name);
+ ce = cache_entry_from_ondisk(ondisk, flags, name, len);
+
+ *ent_size = ondisk_ce_size(ce);
+ } else {
+ unsigned long consumed;
+ consumed = expand_name_field(previous_name, name);
+ ce = cache_entry_from_ondisk(ondisk, flags,
+ previous_name->buf,
+ previous_name->len);
+
+ *ent_size = (name - ((char *)ondisk)) + consumed;
+ }
return ce;
}
struct cache_header *hdr;
void *mmap;
size_t mmap_size;
+ struct strbuf previous_name_buf = STRBUF_INIT, *previous_name;
errno = EBUSY;
if (istate->initialized)
if (verify_hdr(hdr, mmap_size) < 0)
goto unmap;
+ istate->version = ntohl(hdr->hdr_version);
istate->cache_nr = ntohl(hdr->hdr_entries);
istate->cache_alloc = alloc_nr(istate->cache_nr);
istate->cache = xcalloc(istate->cache_alloc, sizeof(struct cache_entry *));
istate->initialized = 1;
+ if (istate->version == 4)
+ previous_name = &previous_name_buf;
+ else
+ previous_name = NULL;
+
src_offset = sizeof(*hdr);
for (i = 0; i < istate->cache_nr; i++) {
struct ondisk_cache_entry *disk_ce;
struct cache_entry *ce;
+ unsigned long consumed;
disk_ce = (struct ondisk_cache_entry *)((char *)mmap + src_offset);
- ce = create_from_disk(disk_ce);
+ ce = create_from_disk(disk_ce, &consumed, previous_name);
set_index_entry(istate, i, ce);
- src_offset += ondisk_ce_size(ce);
+ src_offset += consumed;
}
+ strbuf_release(&previous_name_buf);
istate->timestamp.sec = st.st_mtime;
istate->timestamp.nsec = ST_MTIME_NSEC(st);
}
}
-static int ce_write_entry(git_SHA_CTX *c, int fd, struct cache_entry *ce)
+/* Copy miscellaneous fields but not the name */
+static char *copy_cache_entry_to_ondisk(struct ondisk_cache_entry *ondisk,
+ struct cache_entry *ce)
{
- int size = ondisk_ce_size(ce);
- struct ondisk_cache_entry *ondisk = xcalloc(1, size);
- char *name;
- int result;
-
ondisk->ctime.sec = htonl(ce->ce_ctime.sec);
ondisk->mtime.sec = htonl(ce->ce_mtime.sec);
ondisk->ctime.nsec = htonl(ce->ce_ctime.nsec);
struct ondisk_cache_entry_extended *ondisk2;
ondisk2 = (struct ondisk_cache_entry_extended *)ondisk;
ondisk2->flags2 = htons((ce->ce_flags & CE_EXTENDED_FLAGS) >> 16);
- name = ondisk2->name;
+ return ondisk2->name;
+ }
+ else {
+ return ondisk->name;
+ }
+}
+
+static int ce_write_entry(git_SHA_CTX *c, int fd, struct cache_entry *ce,
+ struct strbuf *previous_name)
+{
+ int size;
+ struct ondisk_cache_entry *ondisk;
+ char *name;
+ int result;
+
+ if (!previous_name) {
+ size = ondisk_ce_size(ce);
+ ondisk = xcalloc(1, size);
+ name = copy_cache_entry_to_ondisk(ondisk, ce);
+ memcpy(name, ce->name, ce_namelen(ce));
+ } else {
+ int common, to_remove, prefix_size;
+ unsigned char to_remove_vi[16];
+ for (common = 0;
+ (ce->name[common] &&
+ common < previous_name->len &&
+ ce->name[common] == previous_name->buf[common]);
+ common++)
+ ; /* still matching */
+ to_remove = previous_name->len - common;
+ prefix_size = encode_varint(to_remove, to_remove_vi);
+
+ if (ce->ce_flags & CE_EXTENDED)
+ size = offsetof(struct ondisk_cache_entry_extended, name);
+ else
+ size = offsetof(struct ondisk_cache_entry, name);
+ size += prefix_size + (ce_namelen(ce) - common + 1);
+
+ ondisk = xcalloc(1, size);
+ name = copy_cache_entry_to_ondisk(ondisk, ce);
+ memcpy(name, to_remove_vi, prefix_size);
+ memcpy(name + prefix_size, ce->name + common, ce_namelen(ce) - common);
+
+ strbuf_splice(previous_name, common, to_remove,
+ ce->name + common, ce_namelen(ce) - common);
}
- else
- name = ondisk->name;
- memcpy(name, ce->name, ce_namelen(ce));
result = ce_write(c, fd, ondisk, size);
free(ondisk);
{
git_SHA_CTX c;
struct cache_header hdr;
- int i, err, removed, extended;
+ int i, err, removed, extended, hdr_version;
struct cache_entry **cache = istate->cache;
int entries = istate->cache_nr;
struct stat st;
+ struct strbuf previous_name_buf = STRBUF_INIT, *previous_name;
for (i = removed = extended = 0; i < entries; i++) {
if (cache[i]->ce_flags & CE_REMOVE)
}
}
+ if (!istate->version)
+ istate->version = INDEX_FORMAT_DEFAULT;
+
+ /* demote version 3 to version 2 when the latter suffices */
+ if (istate->version == 3 || istate->version == 2)
+ istate->version = extended ? 3 : 2;
+
+ hdr_version = istate->version;
+
hdr.hdr_signature = htonl(CACHE_SIGNATURE);
- /* for extended format, increase version so older git won't try to read it */
- hdr.hdr_version = htonl(extended ? 3 : 2);
+ hdr.hdr_version = htonl(hdr_version);
hdr.hdr_entries = htonl(entries - removed);
git_SHA1_Init(&c);
if (ce_write(&c, newfd, &hdr, sizeof(hdr)) < 0)
return -1;
+ previous_name = (hdr_version == 4) ? &previous_name_buf : NULL;
for (i = 0; i < entries; i++) {
struct cache_entry *ce = cache[i];
if (ce->ce_flags & CE_REMOVE)
continue;
if (!ce_uptodate(ce) && is_racy_timestamp(istate, ce))
ce_smudge_racily_clean_entry(ce);
- if (ce_write_entry(&c, newfd, ce) < 0)
+ if (ce_write_entry(&c, newfd, ce, previous_name) < 0)
return -1;
}
+ strbuf_release(&previous_name_buf);
/* Write extension data here */
if (istate->cache_tree) {
#include "tag.h"
#include "dir.h"
-/* ISSYMREF=0x01, ISPACKED=0x02 and ISBROKEN=0x04 are public interfaces */
-#define REF_KNOWS_PEELED 0x10
+/*
+ * Make sure "ref" is something reasonable to have under ".git/refs/";
+ * We do not like it if:
+ *
+ * - any path component of it begins with ".", or
+ * - it has double dots "..", or
+ * - it has ASCII control character, "~", "^", ":" or SP, anywhere, or
+ * - it ends with a "/".
+ * - it ends with ".lock"
+ * - it contains a "\" (backslash)
+ */
-struct ref_entry {
- unsigned char flag; /* ISSYMREF? ISPACKED? */
+/* Return true iff ch is not allowed in reference names. */
+static inline int bad_ref_char(int ch)
+{
+ if (((unsigned) ch) <= ' ' || ch == 0x7f ||
+ ch == '~' || ch == '^' || ch == ':' || ch == '\\')
+ return 1;
+ /* 2.13 Pattern Matching Notation */
+ if (ch == '*' || ch == '?' || ch == '[') /* Unsupported */
+ return 1;
+ return 0;
+}
+
+/*
+ * Try to read one refname component from the front of refname. Return
+ * the length of the component found, or -1 if the component is not
+ * legal.
+ */
+static int check_refname_component(const char *refname, int flags)
+{
+ const char *cp;
+ char last = '\0';
+
+ for (cp = refname; ; cp++) {
+ char ch = *cp;
+ if (ch == '\0' || ch == '/')
+ break;
+ if (bad_ref_char(ch))
+ return -1; /* Illegal character in refname. */
+ if (last == '.' && ch == '.')
+ return -1; /* Refname contains "..". */
+ if (last == '@' && ch == '{')
+ return -1; /* Refname contains "@{". */
+ last = ch;
+ }
+ if (cp == refname)
+ return 0; /* Component has zero length. */
+ if (refname[0] == '.') {
+ if (!(flags & REFNAME_DOT_COMPONENT))
+ return -1; /* Component starts with '.'. */
+ /*
+ * Even if leading dots are allowed, don't allow "."
+ * as a component (".." is prevented by a rule above).
+ */
+ if (refname[1] == '\0')
+ return -1; /* Component equals ".". */
+ }
+ if (cp - refname >= 5 && !memcmp(cp - 5, ".lock", 5))
+ return -1; /* Refname ends with ".lock". */
+ return cp - refname;
+}
+
+int check_refname_format(const char *refname, int flags)
+{
+ int component_len, component_count = 0;
+
+ while (1) {
+ /* We are at the start of a path component. */
+ component_len = check_refname_component(refname, flags);
+ if (component_len <= 0) {
+ if ((flags & REFNAME_REFSPEC_PATTERN) &&
+ refname[0] == '*' &&
+ (refname[1] == '\0' || refname[1] == '/')) {
+ /* Accept one wildcard as a full refname component. */
+ flags &= ~REFNAME_REFSPEC_PATTERN;
+ component_len = 1;
+ } else {
+ return -1;
+ }
+ }
+ component_count++;
+ if (refname[component_len] == '\0')
+ break;
+ /* Skip to next component. */
+ refname += component_len + 1;
+ }
+
+ if (refname[component_len - 1] == '.')
+ return -1; /* Refname ends with '.'. */
+ if (!(flags & REFNAME_ALLOW_ONELEVEL) && component_count < 2)
+ return -1; /* Refname has only one component. */
+ return 0;
+}
+
+struct ref_entry;
+
+struct ref_value {
unsigned char sha1[20];
unsigned char peeled[20];
- /* The full name of the reference (e.g., "refs/heads/master"): */
- char name[FLEX_ARRAY];
};
-struct ref_array {
+struct ref_dir {
int nr, alloc;
/*
*/
int sorted;
- struct ref_entry **refs;
+ struct ref_entry **entries;
};
+/* ISSYMREF=0x01, ISPACKED=0x02, and ISBROKEN=0x04 are public interfaces */
+#define REF_KNOWS_PEELED 0x08
+#define REF_DIR 0x10
+
/*
- * Parse one line from a packed-refs file. Write the SHA1 to sha1.
- * Return a pointer to the refname within the line (null-terminated),
- * or NULL if there was a problem.
+ * A ref_entry represents either a reference or a "subdirectory" of
+ * references. Each directory in the reference namespace is
+ * represented by a ref_entry with (flags & REF_DIR) set and
+ * containing a subdir member that holds the entries in that
+ * directory. References are represented by a ref_entry with (flags &
+ * REF_DIR) unset and a value member that describes the reference's
+ * value. The flag member is at the ref_entry level, but it is also
+ * needed to interpret the contents of the value field (in other
+ * words, a ref_value object is not very much use without the
+ * enclosing ref_entry).
+ *
+ * Reference names cannot end with slash and directories' names are
+ * always stored with a trailing slash (except for the top-level
+ * directory, which is always denoted by ""). This has two nice
+ * consequences: (1) when the entries in each subdir are sorted
+ * lexicographically by name (as they usually are), the references in
+ * a whole tree can be generated in lexicographic order by traversing
+ * the tree in left-to-right, depth-first order; (2) the names of
+ * references and subdirectories cannot conflict, and therefore the
+ * presence of an empty subdirectory does not block the creation of a
+ * similarly-named reference. (The fact that reference names with the
+ * same leading components can conflict *with each other* is a
+ * separate issue that is regulated by is_refname_available().)
+ *
+ * Please note that the name field contains the fully-qualified
+ * reference (or subdirectory) name. Space could be saved by only
+ * storing the relative names. But that would require the full names
+ * to be generated on the fly when iterating in do_for_each_ref(), and
+ * would break callback functions, who have always been able to assume
+ * that the name strings that they are passed will not be freed during
+ * the iteration.
*/
-static const char *parse_ref_line(char *line, unsigned char *sha1)
-{
+struct ref_entry {
+ unsigned char flag; /* ISSYMREF? ISPACKED? */
+ union {
+ struct ref_value value; /* if not (flags&REF_DIR) */
+ struct ref_dir subdir; /* if (flags&REF_DIR) */
+ } u;
/*
- * 42: the answer to everything.
- *
- * In this case, it happens to be the answer to
- * 40 (length of sha1 hex representation)
- * +1 (space in between hex and name)
- * +1 (newline at the end of the line)
+ * The full name of the reference (e.g., "refs/heads/master")
+ * or the full name of the directory with a trailing slash
+ * (e.g., "refs/heads/"):
*/
- int len = strlen(line) - 42;
-
- if (len <= 0)
- return NULL;
- if (get_sha1_hex(line, sha1) < 0)
- return NULL;
- if (!isspace(line[40]))
- return NULL;
- line += 41;
- if (isspace(*line))
- return NULL;
- if (line[len] != '\n')
- return NULL;
- line[len] = 0;
-
- return line;
-}
+ char name[FLEX_ARRAY];
+};
static struct ref_entry *create_ref_entry(const char *refname,
const unsigned char *sha1, int flag,
die("Reference has invalid format: '%s'", refname);
len = strlen(refname) + 1;
ref = xmalloc(sizeof(struct ref_entry) + len);
- hashcpy(ref->sha1, sha1);
- hashclr(ref->peeled);
+ hashcpy(ref->u.value.sha1, sha1);
+ hashclr(ref->u.value.peeled);
memcpy(ref->name, refname, len);
ref->flag = flag;
return ref;
}
-/* Add a ref_entry to the end of the ref_array (unsorted). */
-static void add_ref(struct ref_array *refs, struct ref_entry *ref)
+static void clear_ref_dir(struct ref_dir *dir);
+
+static void free_ref_entry(struct ref_entry *entry)
+{
+ if (entry->flag & REF_DIR)
+ clear_ref_dir(&entry->u.subdir);
+ free(entry);
+}
+
+/*
+ * Add a ref_entry to the end of dir (unsorted). Entry is always
+ * stored directly in dir; no recursion into subdirectories is
+ * done.
+ */
+static void add_entry_to_dir(struct ref_dir *dir, struct ref_entry *entry)
{
- ALLOC_GROW(refs->refs, refs->nr + 1, refs->alloc);
- refs->refs[refs->nr++] = ref;
+ ALLOC_GROW(dir->entries, dir->nr + 1, dir->alloc);
+ dir->entries[dir->nr++] = entry;
+}
+
+/*
+ * Clear and free all entries in dir, recursively.
+ */
+static void clear_ref_dir(struct ref_dir *dir)
+{
+ int i;
+ for (i = 0; i < dir->nr; i++)
+ free_ref_entry(dir->entries[i]);
+ free(dir->entries);
+ dir->sorted = dir->nr = dir->alloc = 0;
+ dir->entries = NULL;
+}
+
+/*
+ * Create a struct ref_entry object for the specified dirname.
+ * dirname is the name of the directory with a trailing slash (e.g.,
+ * "refs/heads/") or "" for the top-level directory.
+ */
+static struct ref_entry *create_dir_entry(const char *dirname)
+{
+ struct ref_entry *direntry;
+ int len = strlen(dirname);
+ direntry = xcalloc(1, sizeof(struct ref_entry) + len + 1);
+ memcpy(direntry->name, dirname, len + 1);
+ direntry->flag = REF_DIR;
+ return direntry;
}
static int ref_entry_cmp(const void *a, const void *b)
return strcmp(one->name, two->name);
}
+static void sort_ref_dir(struct ref_dir *dir);
+
+/*
+ * Return the entry with the given refname from the ref_dir
+ * (non-recursively), sorting dir if necessary. Return NULL if no
+ * such entry is found.
+ */
+static struct ref_entry *search_ref_dir(struct ref_dir *dir, const char *refname)
+{
+ struct ref_entry *e, **r;
+ int len;
+
+ if (refname == NULL || !dir->nr)
+ return NULL;
+
+ sort_ref_dir(dir);
+
+ len = strlen(refname) + 1;
+ e = xmalloc(sizeof(struct ref_entry) + len);
+ memcpy(e->name, refname, len);
+
+ r = bsearch(&e, dir->entries, dir->nr, sizeof(*dir->entries), ref_entry_cmp);
+
+ free(e);
+
+ if (r == NULL)
+ return NULL;
+
+ return *r;
+}
+
+/*
+ * If refname is a reference name, find the ref_dir within the dir
+ * tree that should hold refname. If refname is a directory name
+ * (i.e., ends in '/'), then return that ref_dir itself. dir must
+ * represent the top-level directory. Sort ref_dirs and recurse into
+ * subdirectories as necessary. If mkdir is set, then create any
+ * missing directories; otherwise, return NULL if the desired
+ * directory cannot be found.
+ */
+static struct ref_dir *find_containing_dir(struct ref_dir *dir,
+ const char *refname, int mkdir)
+{
+ char *refname_copy = xstrdup(refname);
+ char *slash;
+ struct ref_entry *entry;
+ for (slash = strchr(refname_copy, '/'); slash; slash = strchr(slash + 1, '/')) {
+ char tmp = slash[1];
+ slash[1] = '\0';
+ entry = search_ref_dir(dir, refname_copy);
+ if (!entry) {
+ if (!mkdir) {
+ dir = NULL;
+ break;
+ }
+ entry = create_dir_entry(refname_copy);
+ add_entry_to_dir(dir, entry);
+ }
+ slash[1] = tmp;
+ assert(entry->flag & REF_DIR);
+ dir = &entry->u.subdir;
+ }
+
+ free(refname_copy);
+ return dir;
+}
+
+/*
+ * Find the value entry with the given name in dir, sorting ref_dirs
+ * and recursing into subdirectories as necessary. If the name is not
+ * found or it corresponds to a directory entry, return NULL.
+ */
+static struct ref_entry *find_ref(struct ref_dir *dir, const char *refname)
+{
+ struct ref_entry *entry;
+ dir = find_containing_dir(dir, refname, 0);
+ if (!dir)
+ return NULL;
+ entry = search_ref_dir(dir, refname);
+ return (entry && !(entry->flag & REF_DIR)) ? entry : NULL;
+}
+
+/*
+ * Add a ref_entry to the ref_dir (unsorted), recursing into
+ * subdirectories as necessary. dir must represent the top-level
+ * directory. Return 0 on success.
+ */
+static int add_ref(struct ref_dir *dir, struct ref_entry *ref)
+{
+ dir = find_containing_dir(dir, ref->name, 1);
+ if (!dir)
+ return -1;
+ add_entry_to_dir(dir, ref);
+ return 0;
+}
+
/*
* Emit a warning and return true iff ref1 and ref2 have the same name
* and the same sha1. Die if they have the same name but different
*/
static int is_dup_ref(const struct ref_entry *ref1, const struct ref_entry *ref2)
{
- if (!strcmp(ref1->name, ref2->name)) {
- /* Duplicate name; make sure that the SHA1s match: */
- if (hashcmp(ref1->sha1, ref2->sha1))
- die("Duplicated ref, and SHA1s don't match: %s",
- ref1->name);
- warning("Duplicated ref: %s", ref1->name);
- return 1;
- } else {
+ if (strcmp(ref1->name, ref2->name))
return 0;
- }
+
+ /* Duplicate name; make sure that they don't conflict: */
+
+ if ((ref1->flag & REF_DIR) || (ref2->flag & REF_DIR))
+ /* This is impossible by construction */
+ die("Reference directory conflict: %s", ref1->name);
+
+ if (hashcmp(ref1->u.value.sha1, ref2->u.value.sha1))
+ die("Duplicated ref, and SHA1s don't match: %s", ref1->name);
+
+ warning("Duplicated ref: %s", ref1->name);
+ return 1;
}
/*
- * Sort the entries in array (if they are not already sorted).
+ * Sort the entries in dir non-recursively (if they are not already
+ * sorted) and remove any duplicate entries.
*/
-static void sort_ref_array(struct ref_array *array)
+static void sort_ref_dir(struct ref_dir *dir)
{
int i, j;
+ struct ref_entry *last = NULL;
/*
* This check also prevents passing a zero-length array to qsort(),
* which is a problem on some platforms.
*/
- if (array->sorted == array->nr)
+ if (dir->sorted == dir->nr)
return;
- qsort(array->refs, array->nr, sizeof(*array->refs), ref_entry_cmp);
+ qsort(dir->entries, dir->nr, sizeof(*dir->entries), ref_entry_cmp);
- /* Remove any duplicates from the ref_array */
- i = 0;
- for (j = 1; j < array->nr; j++) {
- if (is_dup_ref(array->refs[i], array->refs[j])) {
- free(array->refs[j]);
- continue;
+ /* Remove any duplicates: */
+ for (i = 0, j = 0; j < dir->nr; j++) {
+ struct ref_entry *entry = dir->entries[j];
+ if (last && is_dup_ref(last, entry))
+ free_ref_entry(entry);
+ else
+ last = dir->entries[i++] = entry;
+ }
+ dir->sorted = dir->nr = i;
+}
+
+#define DO_FOR_EACH_INCLUDE_BROKEN 01
+
+static struct ref_entry *current_ref;
+
+static int do_one_ref(const char *base, each_ref_fn fn, int trim,
+ int flags, void *cb_data, struct ref_entry *entry)
+{
+ int retval;
+ if (prefixcmp(entry->name, base))
+ return 0;
+
+ if (!(flags & DO_FOR_EACH_INCLUDE_BROKEN)) {
+ if (entry->flag & REF_ISBROKEN)
+ return 0; /* ignore broken refs e.g. dangling symref */
+ if (!has_sha1_file(entry->u.value.sha1)) {
+ error("%s does not point to a valid object!", entry->name);
+ return 0;
}
- array->refs[++i] = array->refs[j];
}
- array->sorted = array->nr = i + 1;
+ current_ref = entry;
+ retval = fn(entry->name + trim, entry->u.value.sha1, entry->flag, cb_data);
+ current_ref = NULL;
+ return retval;
}
-static struct ref_entry *search_ref_array(struct ref_array *array, const char *refname)
+/*
+ * Call fn for each reference in dir that has index in the range
+ * offset <= index < dir->nr. Recurse into subdirectories that are in
+ * that index range, sorting them before iterating. This function
+ * does not sort dir itself; it should be sorted beforehand.
+ */
+static int do_for_each_ref_in_dir(struct ref_dir *dir, int offset,
+ const char *base,
+ each_ref_fn fn, int trim, int flags, void *cb_data)
{
- struct ref_entry *e, **r;
- int len;
+ int i;
+ assert(dir->sorted == dir->nr);
+ for (i = offset; i < dir->nr; i++) {
+ struct ref_entry *entry = dir->entries[i];
+ int retval;
+ if (entry->flag & REF_DIR) {
+ sort_ref_dir(&entry->u.subdir);
+ retval = do_for_each_ref_in_dir(&entry->u.subdir, 0,
+ base, fn, trim, flags, cb_data);
+ } else {
+ retval = do_one_ref(base, fn, trim, flags, cb_data, entry);
+ }
+ if (retval)
+ return retval;
+ }
+ return 0;
+}
- if (refname == NULL)
- return NULL;
+/*
+ * Call fn for each reference in the union of dir1 and dir2, in order
+ * by refname. Recurse into subdirectories. If a value entry appears
+ * in both dir1 and dir2, then only process the version that is in
+ * dir2. The input dirs must already be sorted, but subdirs will be
+ * sorted as needed.
+ */
+static int do_for_each_ref_in_dirs(struct ref_dir *dir1,
+ struct ref_dir *dir2,
+ const char *base, each_ref_fn fn, int trim,
+ int flags, void *cb_data)
+{
+ int retval;
+ int i1 = 0, i2 = 0;
- if (!array->nr)
- return NULL;
- sort_ref_array(array);
- len = strlen(refname) + 1;
- e = xmalloc(sizeof(struct ref_entry) + len);
- memcpy(e->name, refname, len);
+ assert(dir1->sorted == dir1->nr);
+ assert(dir2->sorted == dir2->nr);
+ while (1) {
+ struct ref_entry *e1, *e2;
+ int cmp;
+ if (i1 == dir1->nr) {
+ return do_for_each_ref_in_dir(dir2, i2,
+ base, fn, trim, flags, cb_data);
+ }
+ if (i2 == dir2->nr) {
+ return do_for_each_ref_in_dir(dir1, i1,
+ base, fn, trim, flags, cb_data);
+ }
+ e1 = dir1->entries[i1];
+ e2 = dir2->entries[i2];
+ cmp = strcmp(e1->name, e2->name);
+ if (cmp == 0) {
+ if ((e1->flag & REF_DIR) && (e2->flag & REF_DIR)) {
+ /* Both are directories; descend them in parallel. */
+ sort_ref_dir(&e1->u.subdir);
+ sort_ref_dir(&e2->u.subdir);
+ retval = do_for_each_ref_in_dirs(
+ &e1->u.subdir, &e2->u.subdir,
+ base, fn, trim, flags, cb_data);
+ i1++;
+ i2++;
+ } else if (!(e1->flag & REF_DIR) && !(e2->flag & REF_DIR)) {
+ /* Both are references; ignore the one from dir1. */
+ retval = do_one_ref(base, fn, trim, flags, cb_data, e2);
+ i1++;
+ i2++;
+ } else {
+ die("conflict between reference and directory: %s",
+ e1->name);
+ }
+ } else {
+ struct ref_entry *e;
+ if (cmp < 0) {
+ e = e1;
+ i1++;
+ } else {
+ e = e2;
+ i2++;
+ }
+ if (e->flag & REF_DIR) {
+ sort_ref_dir(&e->u.subdir);
+ retval = do_for_each_ref_in_dir(
+ &e->u.subdir, 0,
+ base, fn, trim, flags, cb_data);
+ } else {
+ retval = do_one_ref(base, fn, trim, flags, cb_data, e);
+ }
+ }
+ if (retval)
+ return retval;
+ }
+ if (i1 < dir1->nr)
+ return do_for_each_ref_in_dir(dir1, i1,
+ base, fn, trim, flags, cb_data);
+ if (i2 < dir2->nr)
+ return do_for_each_ref_in_dir(dir2, i2,
+ base, fn, trim, flags, cb_data);
+ return 0;
+}
- r = bsearch(&e, array->refs, array->nr, sizeof(*array->refs), ref_entry_cmp);
+/*
+ * Return true iff refname1 and refname2 conflict with each other.
+ * Two reference names conflict if one of them exactly matches the
+ * leading components of the other; e.g., "foo/bar" conflicts with
+ * both "foo" and with "foo/bar/baz" but not with "foo/bar" or
+ * "foo/barbados".
+ */
+static int names_conflict(const char *refname1, const char *refname2)
+{
+ for (; *refname1 && *refname1 == *refname2; refname1++, refname2++)
+ ;
+ return (*refname1 == '\0' && *refname2 == '/')
+ || (*refname1 == '/' && *refname2 == '\0');
+}
- free(e);
+struct name_conflict_cb {
+ const char *refname;
+ const char *oldrefname;
+ const char *conflicting_refname;
+};
- if (r == NULL)
- return NULL;
+static int name_conflict_fn(const char *existingrefname, const unsigned char *sha1,
+ int flags, void *cb_data)
+{
+ struct name_conflict_cb *data = (struct name_conflict_cb *)cb_data;
+ if (data->oldrefname && !strcmp(data->oldrefname, existingrefname))
+ return 0;
+ if (names_conflict(data->refname, existingrefname)) {
+ data->conflicting_refname = existingrefname;
+ return 1;
+ }
+ return 0;
+}
- return *r;
+/*
+ * Return true iff a reference named refname could be created without
+ * conflicting with the name of an existing reference in array. If
+ * oldrefname is non-NULL, ignore potential conflicts with oldrefname
+ * (e.g., because oldrefname is scheduled for deletion in the same
+ * operation).
+ */
+static int is_refname_available(const char *refname, const char *oldrefname,
+ struct ref_dir *dir)
+{
+ struct name_conflict_cb data;
+ data.refname = refname;
+ data.oldrefname = oldrefname;
+ data.conflicting_refname = NULL;
+
+ sort_ref_dir(dir);
+ if (do_for_each_ref_in_dir(dir, 0, "", name_conflict_fn,
+ 0, DO_FOR_EACH_INCLUDE_BROKEN,
+ &data)) {
+ error("'%s' exists; cannot create '%s'",
+ data.conflicting_refname, refname);
+ return 0;
+ }
+ return 1;
}
/*
struct ref_cache *next;
char did_loose;
char did_packed;
- struct ref_array loose;
- struct ref_array packed;
+ struct ref_dir loose;
+ struct ref_dir packed;
/* The submodule name, or "" for the main repo. */
char name[FLEX_ARRAY];
} *ref_cache;
-static struct ref_entry *current_ref;
-
-static void clear_ref_array(struct ref_array *array)
-{
- int i;
- for (i = 0; i < array->nr; i++)
- free(array->refs[i]);
- free(array->refs);
- array->sorted = array->nr = array->alloc = 0;
- array->refs = NULL;
-}
-
static void clear_packed_ref_cache(struct ref_cache *refs)
{
if (refs->did_packed)
- clear_ref_array(&refs->packed);
+ clear_ref_dir(&refs->packed);
refs->did_packed = 0;
}
static void clear_loose_ref_cache(struct ref_cache *refs)
{
if (refs->did_loose)
- clear_ref_array(&refs->loose);
+ clear_ref_dir(&refs->loose);
refs->did_loose = 0;
}
refs = refs->next;
}
- refs = create_ref_cache(submodule);
- refs->next = ref_cache;
- ref_cache = refs;
- return refs;
-}
+ refs = create_ref_cache(submodule);
+ refs->next = ref_cache;
+ ref_cache = refs;
+ return refs;
+}
+
+void invalidate_ref_cache(const char *submodule)
+{
+ struct ref_cache *refs = get_ref_cache(submodule);
+ clear_packed_ref_cache(refs);
+ clear_loose_ref_cache(refs);
+}
+
+/*
+ * Parse one line from a packed-refs file. Write the SHA1 to sha1.
+ * Return a pointer to the refname within the line (null-terminated),
+ * or NULL if there was a problem.
+ */
+static const char *parse_ref_line(char *line, unsigned char *sha1)
+{
+ /*
+ * 42: the answer to everything.
+ *
+ * In this case, it happens to be the answer to
+ * 40 (length of sha1 hex representation)
+ * +1 (space in between hex and name)
+ * +1 (newline at the end of the line)
+ */
+ int len = strlen(line) - 42;
+
+ if (len <= 0)
+ return NULL;
+ if (get_sha1_hex(line, sha1) < 0)
+ return NULL;
+ if (!isspace(line[40]))
+ return NULL;
+ line += 41;
+ if (isspace(*line))
+ return NULL;
+ if (line[len] != '\n')
+ return NULL;
+ line[len] = 0;
-void invalidate_ref_cache(const char *submodule)
-{
- struct ref_cache *refs = get_ref_cache(submodule);
- clear_packed_ref_cache(refs);
- clear_loose_ref_cache(refs);
+ return line;
}
-static void read_packed_refs(FILE *f, struct ref_array *array)
+static void read_packed_refs(FILE *f, struct ref_dir *dir)
{
struct ref_entry *last = NULL;
char refline[PATH_MAX];
refname = parse_ref_line(refline, sha1);
if (refname) {
last = create_ref_entry(refname, sha1, flag, 1);
- add_ref(array, last);
+ add_ref(dir, last);
continue;
}
if (last &&
strlen(refline) == 42 &&
refline[41] == '\n' &&
!get_sha1_hex(refline + 1, sha1))
- hashcpy(last->peeled, sha1);
+ hashcpy(last->u.value.peeled, sha1);
}
}
-static struct ref_array *get_packed_refs(struct ref_cache *refs)
+static struct ref_dir *get_packed_refs(struct ref_cache *refs)
{
if (!refs->did_packed) {
const char *packed_refs_file;
}
static void get_ref_dir(struct ref_cache *refs, const char *base,
- struct ref_array *array)
+ struct ref_dir *dir)
{
- DIR *dir;
+ DIR *d;
const char *path;
if (*refs->name)
else
path = git_path("%s", base);
-
- dir = opendir(path);
-
- if (dir) {
+ d = opendir(path);
+ if (d) {
struct dirent *de;
int baselen = strlen(base);
char *refname = xmalloc(baselen + 257);
if (baselen && base[baselen-1] != '/')
refname[baselen++] = '/';
- while ((de = readdir(dir)) != NULL) {
+ while ((de = readdir(d)) != NULL) {
unsigned char sha1[20];
struct stat st;
int flag;
if (stat(refdir, &st) < 0)
continue;
if (S_ISDIR(st.st_mode)) {
- get_ref_dir(refs, refname, array);
+ get_ref_dir(refs, refname, dir);
continue;
}
if (*refs->name) {
hashclr(sha1);
flag |= REF_ISBROKEN;
}
- add_ref(array, create_ref_entry(refname, sha1, flag, 1));
+ add_ref(dir, create_ref_entry(refname, sha1, flag, 1));
}
free(refname);
- closedir(dir);
+ closedir(d);
}
}
-struct warn_if_dangling_data {
- FILE *fp;
- const char *refname;
- const char *msg_fmt;
-};
-
-static int warn_if_dangling_symref(const char *refname, const unsigned char *sha1,
- int flags, void *cb_data)
-{
- struct warn_if_dangling_data *d = cb_data;
- const char *resolves_to;
- unsigned char junk[20];
-
- if (!(flags & REF_ISSYMREF))
- return 0;
-
- resolves_to = resolve_ref_unsafe(refname, junk, 0, NULL);
- if (!resolves_to || strcmp(resolves_to, d->refname))
- return 0;
-
- fprintf(d->fp, d->msg_fmt, refname);
- return 0;
-}
-
-void warn_dangling_symref(FILE *fp, const char *msg_fmt, const char *refname)
-{
- struct warn_if_dangling_data data;
-
- data.fp = fp;
- data.refname = refname;
- data.msg_fmt = msg_fmt;
- for_each_rawref(warn_if_dangling_symref, &data);
-}
-
-static struct ref_array *get_loose_refs(struct ref_cache *refs)
+static struct ref_dir *get_loose_refs(struct ref_cache *refs)
{
if (!refs->did_loose) {
get_ref_dir(refs, "refs", &refs->loose);
const char *refname, unsigned char *sha1)
{
struct ref_entry *ref;
- struct ref_array *array = get_packed_refs(refs);
+ struct ref_dir *dir = get_packed_refs(refs);
- ref = search_ref_array(array, refname);
+ ref = find_ref(dir, refname);
if (ref == NULL)
return -1;
- memcpy(sha1, ref->sha1, 20);
+ memcpy(sha1, ref->u.value.sha1, 20);
return 0;
}
*/
static int get_packed_ref(const char *refname, unsigned char *sha1)
{
- struct ref_array *packed = get_packed_refs(get_ref_cache(NULL));
- struct ref_entry *entry = search_ref_array(packed, refname);
+ struct ref_dir *packed = get_packed_refs(get_ref_cache(NULL));
+ struct ref_entry *entry = find_ref(packed, refname);
if (entry) {
- hashcpy(sha1, entry->sha1);
+ hashcpy(sha1, entry->u.value.sha1);
return 0;
}
return -1;
return read_ref_full(refname, sha1, 1, NULL);
}
-#define DO_FOR_EACH_INCLUDE_BROKEN 01
-static int do_one_ref(const char *base, each_ref_fn fn, int trim,
- int flags, void *cb_data, struct ref_entry *entry)
+int ref_exists(const char *refname)
{
- if (prefixcmp(entry->name, base))
- return 0;
-
- if (!(flags & DO_FOR_EACH_INCLUDE_BROKEN)) {
- if (entry->flag & REF_ISBROKEN)
- return 0; /* ignore broken refs e.g. dangling symref */
- if (!has_sha1_file(entry->sha1)) {
- error("%s does not point to a valid object!", entry->name);
- return 0;
- }
- }
- current_ref = entry;
- return fn(entry->name + trim, entry->sha1, entry->flag, cb_data);
+ unsigned char sha1[20];
+ return !!resolve_ref_unsafe(refname, sha1, 1, NULL);
}
static int filter_refs(const char *refname, const unsigned char *sha1, int flags,
if (current_ref && (current_ref->name == refname
|| !strcmp(current_ref->name, refname))) {
if (current_ref->flag & REF_KNOWS_PEELED) {
- hashcpy(sha1, current_ref->peeled);
+ hashcpy(sha1, current_ref->u.value.peeled);
return 0;
}
- hashcpy(base, current_ref->sha1);
+ hashcpy(base, current_ref->u.value.sha1);
goto fallback;
}
return -1;
if ((flag & REF_ISPACKED)) {
- struct ref_array *array = get_packed_refs(get_ref_cache(NULL));
- struct ref_entry *r = search_ref_array(array, refname);
+ struct ref_dir *dir = get_packed_refs(get_ref_cache(NULL));
+ struct ref_entry *r = find_ref(dir, refname);
if (r != NULL && r->flag & REF_KNOWS_PEELED) {
- hashcpy(sha1, r->peeled);
+ hashcpy(sha1, r->u.value.peeled);
return 0;
}
}
return -1;
}
+struct warn_if_dangling_data {
+ FILE *fp;
+ const char *refname;
+ const char *msg_fmt;
+};
+
+static int warn_if_dangling_symref(const char *refname, const unsigned char *sha1,
+ int flags, void *cb_data)
+{
+ struct warn_if_dangling_data *d = cb_data;
+ const char *resolves_to;
+ unsigned char junk[20];
+
+ if (!(flags & REF_ISSYMREF))
+ return 0;
+
+ resolves_to = resolve_ref_unsafe(refname, junk, 0, NULL);
+ if (!resolves_to || strcmp(resolves_to, d->refname))
+ return 0;
+
+ fprintf(d->fp, d->msg_fmt, refname);
+ fputc('\n', d->fp);
+ return 0;
+}
+
+void warn_dangling_symref(FILE *fp, const char *msg_fmt, const char *refname)
+{
+ struct warn_if_dangling_data data;
+
+ data.fp = fp;
+ data.refname = refname;
+ data.msg_fmt = msg_fmt;
+ for_each_rawref(warn_if_dangling_symref, &data);
+}
+
static int do_for_each_ref(const char *submodule, const char *base, each_ref_fn fn,
int trim, int flags, void *cb_data)
{
- int retval = 0, p = 0, l = 0;
struct ref_cache *refs = get_ref_cache(submodule);
- struct ref_array *packed = get_packed_refs(refs);
- struct ref_array *loose = get_loose_refs(refs);
-
- sort_ref_array(packed);
- sort_ref_array(loose);
- while (p < packed->nr && l < loose->nr) {
- struct ref_entry *entry;
- int cmp = strcmp(packed->refs[p]->name, loose->refs[l]->name);
- if (!cmp) {
- p++;
- continue;
- }
- if (cmp > 0) {
- entry = loose->refs[l++];
- } else {
- entry = packed->refs[p++];
- }
- retval = do_one_ref(base, fn, trim, flags, cb_data, entry);
- if (retval)
- goto end_each;
- }
-
- if (l < loose->nr) {
- p = l;
- packed = loose;
- }
+ struct ref_dir *packed_dir = get_packed_refs(refs);
+ struct ref_dir *loose_dir = get_loose_refs(refs);
+ int retval = 0;
- for (; p < packed->nr; p++) {
- retval = do_one_ref(base, fn, trim, flags, cb_data, packed->refs[p]);
- if (retval)
- goto end_each;
+ if (base && *base) {
+ packed_dir = find_containing_dir(packed_dir, base, 0);
+ loose_dir = find_containing_dir(loose_dir, base, 0);
+ }
+
+ if (packed_dir && loose_dir) {
+ sort_ref_dir(packed_dir);
+ sort_ref_dir(loose_dir);
+ retval = do_for_each_ref_in_dirs(
+ packed_dir, loose_dir,
+ base, fn, trim, flags, cb_data);
+ } else if (packed_dir) {
+ sort_ref_dir(packed_dir);
+ retval = do_for_each_ref_in_dir(
+ packed_dir, 0,
+ base, fn, trim, flags, cb_data);
+ } else if (loose_dir) {
+ sort_ref_dir(loose_dir);
+ retval = do_for_each_ref_in_dir(
+ loose_dir, 0,
+ base, fn, trim, flags, cb_data);
}
-end_each:
- current_ref = NULL;
return retval;
}
-
static int do_head_ref(const char *submodule, each_ref_fn fn, void *cb_data)
{
unsigned char sha1[20];
DO_FOR_EACH_INCLUDE_BROKEN, cb_data);
}
-/*
- * Make sure "ref" is something reasonable to have under ".git/refs/";
- * We do not like it if:
- *
- * - any path component of it begins with ".", or
- * - it has double dots "..", or
- * - it has ASCII control character, "~", "^", ":" or SP, anywhere, or
- * - it ends with a "/".
- * - it ends with ".lock"
- * - it contains a "\" (backslash)
- */
-
-/* Return true iff ch is not allowed in reference names. */
-static inline int bad_ref_char(int ch)
-{
- if (((unsigned) ch) <= ' ' || ch == 0x7f ||
- ch == '~' || ch == '^' || ch == ':' || ch == '\\')
- return 1;
- /* 2.13 Pattern Matching Notation */
- if (ch == '*' || ch == '?' || ch == '[') /* Unsupported */
- return 1;
- return 0;
-}
-
-/*
- * Try to read one refname component from the front of refname. Return
- * the length of the component found, or -1 if the component is not
- * legal.
- */
-static int check_refname_component(const char *refname, int flags)
-{
- const char *cp;
- char last = '\0';
-
- for (cp = refname; ; cp++) {
- char ch = *cp;
- if (ch == '\0' || ch == '/')
- break;
- if (bad_ref_char(ch))
- return -1; /* Illegal character in refname. */
- if (last == '.' && ch == '.')
- return -1; /* Refname contains "..". */
- if (last == '@' && ch == '{')
- return -1; /* Refname contains "@{". */
- last = ch;
- }
- if (cp == refname)
- return -1; /* Component has zero length. */
- if (refname[0] == '.') {
- if (!(flags & REFNAME_DOT_COMPONENT))
- return -1; /* Component starts with '.'. */
- /*
- * Even if leading dots are allowed, don't allow "."
- * as a component (".." is prevented by a rule above).
- */
- if (refname[1] == '\0')
- return -1; /* Component equals ".". */
- }
- if (cp - refname >= 5 && !memcmp(cp - 5, ".lock", 5))
- return -1; /* Refname ends with ".lock". */
- return cp - refname;
-}
-
-int check_refname_format(const char *refname, int flags)
-{
- int component_len, component_count = 0;
-
- while (1) {
- /* We are at the start of a path component. */
- component_len = check_refname_component(refname, flags);
- if (component_len < 0) {
- if ((flags & REFNAME_REFSPEC_PATTERN) &&
- refname[0] == '*' &&
- (refname[1] == '\0' || refname[1] == '/')) {
- /* Accept one wildcard as a full refname component. */
- flags &= ~REFNAME_REFSPEC_PATTERN;
- component_len = 1;
- } else {
- return -1;
- }
- }
- component_count++;
- if (refname[component_len] == '\0')
- break;
- /* Skip to next component. */
- refname += component_len + 1;
- }
-
- if (refname[component_len - 1] == '.')
- return -1; /* Refname ends with '.'. */
- if (!(flags & REFNAME_ALLOW_ONELEVEL) && component_count < 2)
- return -1; /* Refname has only one component. */
- return 0;
-}
-
const char *prettify_refname(const char *name)
{
return name + (
return result;
}
-/*
- * Return true iff a reference named refname could be created without
- * conflicting with the name of an existing reference. If oldrefname
- * is non-NULL, ignore potential conflicts with oldrefname (e.g.,
- * because oldrefname is scheduled for deletion in the same
- * operation).
- */
-static int is_refname_available(const char *refname, const char *oldrefname,
- struct ref_array *array)
-{
- int i, namlen = strlen(refname); /* e.g. 'foo/bar' */
- for (i = 0; i < array->nr; i++ ) {
- struct ref_entry *entry = array->refs[i];
- /* entry->name could be 'foo' or 'foo/bar/baz' */
- if (!oldrefname || strcmp(oldrefname, entry->name)) {
- int len = strlen(entry->name);
- int cmplen = (namlen < len) ? namlen : len;
- const char *lead = (namlen < len) ? entry->name : refname;
- if (!strncmp(refname, entry->name, cmplen) &&
- lead[cmplen] == '/') {
- error("'%s' exists; cannot create '%s'",
- entry->name, refname);
- return 0;
- }
- }
- }
- return 1;
-}
-
/*
* *string and *len will only be substituted, and *string returned (for
* later free()ing) if the string passed in is a magic short-hand form
return lock_ref_sha1_basic(refname, old_sha1, flags, NULL);
}
+struct repack_without_ref_sb {
+ const char *refname;
+ int fd;
+};
+
+static int repack_without_ref_fn(const char *refname, const unsigned char *sha1,
+ int flags, void *cb_data)
+{
+ struct repack_without_ref_sb *data = cb_data;
+ char line[PATH_MAX + 100];
+ int len;
+
+ if (!strcmp(data->refname, refname))
+ return 0;
+ len = snprintf(line, sizeof(line), "%s %s\n",
+ sha1_to_hex(sha1), refname);
+ /* this should not happen but just being defensive */
+ if (len > sizeof(line))
+ die("too long a refname '%s'", refname);
+ write_or_die(data->fd, line, len);
+ return 0;
+}
+
static struct lock_file packlock;
static int repack_without_ref(const char *refname)
{
- struct ref_array *packed;
- int fd, i;
-
- packed = get_packed_refs(get_ref_cache(NULL));
- if (search_ref_array(packed, refname) == NULL)
+ struct repack_without_ref_sb data;
+ struct ref_dir *packed = get_packed_refs(get_ref_cache(NULL));
+ if (find_ref(packed, refname) == NULL)
return 0;
- fd = hold_lock_file_for_update(&packlock, git_path("packed-refs"), 0);
- if (fd < 0) {
+ data.refname = refname;
+ data.fd = hold_lock_file_for_update(&packlock, git_path("packed-refs"), 0);
+ if (data.fd < 0) {
unable_to_lock_error(git_path("packed-refs"), errno);
return error("cannot delete '%s' from packed refs", refname);
}
-
- for (i = 0; i < packed->nr; i++) {
- char line[PATH_MAX + 100];
- int len;
- struct ref_entry *ref = packed->refs[i];
-
- if (!strcmp(refname, ref->name))
- continue;
- len = snprintf(line, sizeof(line), "%s %s\n",
- sha1_to_hex(ref->sha1), ref->name);
- /* this should not happen but just being defensive */
- if (len > sizeof(line))
- die("too long a refname '%s'", ref->name);
- write_or_die(fd, line, len);
- }
+ do_for_each_ref_in_dir(packed, 0, "", repack_without_ref_fn, 0, 0, &data);
return commit_lock_file(&packlock);
}
static int do_for_each_reflog(const char *base, each_ref_fn fn, void *cb_data)
{
- DIR *dir = opendir(git_path("logs/%s", base));
+ DIR *d = opendir(git_path("logs/%s", base));
int retval = 0;
- if (dir) {
+ if (d) {
struct dirent *de;
int baselen = strlen(base);
char *log = xmalloc(baselen + 257);
if (baselen && base[baselen-1] != '/')
log[baselen++] = '/';
- while ((de = readdir(dir)) != NULL) {
+ while ((de = readdir(d)) != NULL) {
struct stat st;
int namelen;
break;
}
free(log);
- closedir(dir);
+ closedir(d);
}
else if (*base)
return errno;
return 0;
}
-int ref_exists(const char *refname)
-{
- unsigned char sha1[20];
- return !!resolve_ref_unsafe(refname, sha1, 1, NULL);
-}
-
struct ref *find_ref_by_name(const struct ref *list, const char *name)
{
for ( ; list; list = list->next)
#define REF_ISBROKEN 0x04
/*
- * Calls the specified function for each ref file until it returns nonzero,
- * and returns the value
+ * Calls the specified function for each ref file until it returns
+ * nonzero, and returns the value. Please note that it is not safe to
+ * modify references while an iteration is in progress, unless the
+ * same callback function invocation that modifies the reference also
+ * returns a nonzero value to immediately stop the iteration.
*/
typedef int each_ref_fn(const char *refname, const unsigned char *sha1, int flags, void *cb_data);
extern int head_ref(each_ref_fn, void *);
argv[argc++] = "--quiet";
else if (options.verbosity > 1)
argv[argc++] = "--verbose";
+ argv[argc++] = options.progress ? "--progress" : "--no-progress";
argv[argc++] = url;
for (i = 0; i < nr_spec; i++)
argv[argc++] = specs[i];
}
}
+void reset_revision_walk(void)
+{
+ clear_object_flags(SEEN | ADDED | SHOWN);
+}
+
int prepare_revision_walk(struct rev_info *revs)
{
int nr = revs->pending.nr;
struct object_array_entry *e, *list;
+ struct commit_list **next = &revs->commits;
e = list = revs->pending.objects;
revs->pending.nr = 0;
if (commit) {
if (!(commit->object.flags & SEEN)) {
commit->object.flags |= SEEN;
- commit_list_insert_by_date(commit, &revs->commits);
+ next = commit_list_append(commit, next);
}
}
e++;
}
+ commit_list_sort_by_date(&revs->commits);
if (!revs->leak_pending)
free(list);
const char * const usagestr[]);
extern int handle_revision_arg(const char *arg, struct rev_info *revs,int flags,int cant_be_filename);
+extern void reset_revision_walk(void);
extern int prepare_revision_walk(struct rev_info *revs);
extern struct commit *get_revision(struct rev_info *revs);
extern char *get_revision_mark(const struct rev_info *revs, const struct commit *commit);
#include "sigchain.h"
#include "argv-array.h"
+#ifndef SHELL_PATH
+# define SHELL_PATH "/bin/sh"
+#endif
+
struct child_to_clean {
pid_t pid;
struct child_to_clean *next;
die("BUG: shell command is empty");
if (strcspn(argv[0], "|&;<>()$`\\\"' \t\n*?[#~=%") != strlen(argv[0])) {
+#ifndef WIN32
+ nargv[nargc++] = SHELL_PATH;
+#else
nargv[nargc++] = "sh";
+#endif
nargv[nargc++] = "-c";
if (argc < 2)
#include "rerere.h"
#include "merge-recursive.h"
#include "refs.h"
+#include "argv-array.h"
#define GIT_REFLOG_ACTION "GIT_REFLOG_ACTION"
if (!clean) {
int i;
- strbuf_addstr(msgbuf, "\nConflicts:\n\n");
+ strbuf_addstr(msgbuf, "\nConflicts:\n");
for (i = 0; i < active_nr;) {
struct cache_entry *ce = active_cache[i++];
if (ce_stage(ce)) {
return !clean;
}
+static int is_index_unchanged(void)
+{
+ unsigned char head_sha1[20];
+ struct commit *head_commit;
+
+ if (!resolve_ref_unsafe("HEAD", head_sha1, 1, NULL))
+ return error(_("Could not resolve HEAD commit\n"));
+
+ head_commit = lookup_commit(head_sha1);
+ if (!head_commit || parse_commit(head_commit))
+ return error(_("could not parse commit %s\n"),
+ sha1_to_hex(head_commit->object.sha1));
+
+ if (!active_cache_tree)
+ active_cache_tree = cache_tree();
+
+ if (!cache_tree_fully_valid(active_cache_tree))
+ if (cache_tree_update(active_cache_tree, active_cache,
+ active_nr, 0))
+ return error(_("Unable to update cache tree\n"));
+
+ return !hashcmp(active_cache_tree->sha1, head_commit->tree->object.sha1);
+}
+
/*
* If we are cherry-pick, and if the merge did not result in
* hand-editing, we will hit this commit and inherit the original
*/
static int run_git_commit(const char *defmsg, struct replay_opts *opts)
{
- /* 6 is max possible length of our args array including NULL */
- const char *args[6];
- int i = 0;
+ struct argv_array array;
+ int rc;
+
+ argv_array_init(&array);
+ argv_array_push(&array, "commit");
+ argv_array_push(&array, "-n");
- args[i++] = "commit";
- args[i++] = "-n";
if (opts->signoff)
- args[i++] = "-s";
+ argv_array_push(&array, "-s");
if (!opts->edit) {
- args[i++] = "-F";
- args[i++] = defmsg;
+ argv_array_push(&array, "-F");
+ argv_array_push(&array, defmsg);
+ }
+
+ if (opts->allow_empty)
+ argv_array_push(&array, "--allow-empty");
+
+ rc = run_command_v_opt(array.argv, RUN_GIT_CMD);
+ argv_array_clear(&array);
+ return rc;
+}
+
+static int is_original_commit_empty(struct commit *commit)
+{
+ const unsigned char *ptree_sha1;
+
+ if (parse_commit(commit))
+ return error(_("Could not parse commit %s\n"),
+ sha1_to_hex(commit->object.sha1));
+ if (commit->parents) {
+ struct commit *parent = commit->parents->item;
+ if (parse_commit(parent))
+ return error(_("Could not parse parent commit %s\n"),
+ sha1_to_hex(parent->object.sha1));
+ ptree_sha1 = parent->tree->object.sha1;
+ } else {
+ ptree_sha1 = EMPTY_TREE_SHA1_BIN; /* commit is root */
}
- args[i] = NULL;
- return run_command_v_opt(args, RUN_GIT_CMD);
+ return !hashcmp(ptree_sha1, commit->tree->object.sha1);
}
static int do_pick_commit(struct commit *commit, struct replay_opts *opts)
char *defmsg = NULL;
struct strbuf msgbuf = STRBUF_INIT;
int res;
+ int empty_commit;
+ int index_unchanged;
if (opts->no_commit) {
/*
free_commit_list(remotes);
}
+ empty_commit = is_original_commit_empty(commit);
+ if (empty_commit < 0)
+ return empty_commit;
+
/*
* If the merge was clean or if it failed due to conflict, we write
* CHERRY_PICK_HEAD for the subsequent invocation of commit to use.
print_advice(res == 1, opts);
rerere(opts->allow_rerere_auto);
} else {
+ index_unchanged = is_index_unchanged();
+ /*
+ * If index_unchanged is less than 0, that indicates we either
+ * couldn't parse HEAD or the index, so error out here.
+ */
+ if (index_unchanged < 0)
+ return index_unchanged;
+
+ if (!empty_commit && !opts->keep_redundant_commits && index_unchanged)
+ /*
+ * The head tree and the index match
+ * meaning the commit is empty. Since it wasn't created
+ * empty (based on the previous test), we can conclude
+ * the commit has been made redundant. Since we don't
+ * want to keep redundant commits, we can just return
+ * here, skipping this commit
+ */
+ return 0;
+
if (!opts->no_commit)
res = run_git_commit(defmsg, opts);
}
rollback_lock_file(&index_lock);
}
-/*
- * Append a commit to the end of the commit_list.
- *
- * next starts by pointing to the variable that holds the head of an
- * empty commit_list, and is updated to point to the "next" field of
- * the last item on the list as new commits are appended.
- *
- * Usage example:
- *
- * struct commit_list *list;
- * struct commit_list **next = &list;
- *
- * next = commit_list_append(c1, next);
- * next = commit_list_append(c2, next);
- * assert(commit_list_count(list) == 2);
- * return list;
- */
-static struct commit_list **commit_list_append(struct commit *commit,
- struct commit_list **next)
-{
- struct commit_list *new = xmalloc(sizeof(struct commit_list));
- new->item = commit;
- *next = new;
- new->next = NULL;
- return &new->next;
-}
-
static int format_todo(struct strbuf *buf, struct commit_list *todo_list,
struct replay_opts *opts)
{
int signoff;
int allow_ff;
int allow_rerere_auto;
+ int allow_empty;
+ int keep_redundant_commits;
int mainline;
return NULL;
}
-static dev_t get_device_or_die(const char *path, const char *prefix)
+static dev_t get_device_or_die(const char *path, const char *prefix, int prefix_len)
{
struct stat buf;
- if (stat(path, &buf))
- die_errno("failed to stat '%s%s%s'",
+ if (stat(path, &buf)) {
+ die_errno("failed to stat '%*s%s%s'",
+ prefix_len,
prefix ? prefix : "",
prefix ? "/" : "", path);
+ }
return buf.st_dev;
}
static char cwd[PATH_MAX+1];
const char *gitdirenv, *ret;
char *gitfile;
- int len, offset, ceil_offset;
+ int len, offset, offset_parent, ceil_offset;
dev_t current_device = 0;
int one_filesystem = 1;
*/
one_filesystem = !git_env_bool("GIT_DISCOVERY_ACROSS_FILESYSTEM", 0);
if (one_filesystem)
- current_device = get_device_or_die(".", NULL);
+ current_device = get_device_or_die(".", NULL, 0);
for (;;) {
gitfile = (char*)read_gitfile(DEFAULT_GIT_DIR_ENVIRONMENT);
if (gitfile)
if (is_git_directory("."))
return setup_bare_git_dir(cwd, offset, len, nongit_ok);
- while (--offset > ceil_offset && cwd[offset] != '/');
- if (offset <= ceil_offset)
+ offset_parent = offset;
+ while (--offset_parent > ceil_offset && cwd[offset_parent] != '/');
+ if (offset_parent <= ceil_offset)
return setup_nongit(cwd, nongit_ok);
if (one_filesystem) {
- dev_t parent_device = get_device_or_die("..", cwd);
+ dev_t parent_device = get_device_or_die("..", cwd, offset);
if (parent_device != current_device) {
if (nongit_ok) {
if (chdir(cwd))
return NULL;
}
cwd[offset] = '\0';
- die("Not a git repository (or any parent up to mount parent %s)\n"
+ die("Not a git repository (or any parent up to mount point %s)\n"
"Stopping at filesystem boundary (GIT_DISCOVERY_ACROSS_FILESYSTEM not set).", cwd);
}
}
cwd[offset] = '\0';
die_errno("Cannot change to '%s/..'", cwd);
}
+ offset = offset_parent;
}
}
#include "pack-revindex.h"
#include "sha1-lookup.h"
#include "bulk-checkin.h"
+#include "streaming.h"
#ifndef O_NOATIME
#if defined(__linux__) && (defined(__i386__) || defined(__PPC__))
return NULL;
}
-int check_sha1_signature(const unsigned char *sha1, void *map, unsigned long size, const char *type)
+/*
+ * With an in-core object data in "map", rehash it to make sure the
+ * object name actually matches "sha1" to detect object corruption.
+ * With "map" == NULL, try reading the object named with "sha1" using
+ * the streaming interface and rehash it to do the same.
+ */
+int check_sha1_signature(const unsigned char *sha1, void *map,
+ unsigned long size, const char *type)
{
unsigned char real_sha1[20];
- hash_sha1_file(map, size, type, real_sha1);
+ enum object_type obj_type;
+ struct git_istream *st;
+ git_SHA_CTX c;
+ char hdr[32];
+ int hdrlen;
+
+ if (map) {
+ hash_sha1_file(map, size, type, real_sha1);
+ return hashcmp(sha1, real_sha1) ? -1 : 0;
+ }
+
+ st = open_istream(sha1, &obj_type, &size, NULL);
+ if (!st)
+ return -1;
+
+ /* Generate the header */
+ hdrlen = sprintf(hdr, "%s %lu", typename(obj_type), size) + 1;
+
+ /* Sha1.. */
+ git_SHA1_Init(&c);
+ git_SHA1_Update(&c, hdr, hdrlen);
+ for (;;) {
+ char buf[1024 * 16];
+ ssize_t readlen = read_istream(st, buf, sizeof(buf));
+
+ if (!readlen)
+ break;
+ git_SHA1_Update(&c, buf, readlen);
+ }
+ git_SHA1_Final(real_sha1, &c);
+ close_istream(st);
return hashcmp(sha1, real_sha1) ? -1 : 0;
}
unlink_or_warn(tmpfile);
if (ret) {
if (ret != EEXIST) {
- return error("unable to write sha1 filename %s: %s\n", filename, strerror(ret));
+ return error("unable to write sha1 filename %s: %s", filename, strerror(ret));
}
/* FIXME!!! Collision check here ? */
}
fd = create_tmpfile(tmp_file, sizeof(tmp_file), filename);
if (fd < 0) {
if (errno == EACCES)
- return error("insufficient permission for adding an object to repository database %s\n", get_object_directory());
+ return error("insufficient permission for adding an object to repository database %s", get_object_directory());
else
- return error("unable to create temporary sha1 filename %s: %s\n", tmp_file, strerror(errno));
+ return error("unable to create temporary file: %s", strerror(errno));
}
/* Set it up */
len = cp + tmp_len - name;
cp = xstrndup(name, cp - name);
upstream = branch_get(*cp ? cp : NULL);
- if (!upstream
- || !upstream->merge
- || !upstream->merge[0]->dst)
- return error("No upstream branch found for '%s'", cp);
+ /*
+ * Upstream can be NULL only if cp refers to HEAD and HEAD
+ * points to something different than a branch.
+ */
+ if (!upstream)
+ return error(_("HEAD does not point to a branch"));
+ if (!upstream->merge || !upstream->merge[0]->dst) {
+ if (!ref_exists(upstream->refname))
+ return error(_("No such branch: '%s'"), cp);
+ if (!upstream->merge)
+ return error(_("No upstream configured for branch '%s'"),
+ upstream->name);
+ return error(
+ _("Upstream branch '%s' not stored as a remote-tracking branch"),
+ upstream->merge[0]->src);
+ }
free(cp);
cp = shorten_unambiguous_ref(upstream->merge[0]->dst, 0);
strbuf_reset(buf);
{
strbuf_add_urlencode(sb, s, strlen(s), reserved);
}
+
+void strbuf_addf_ln(struct strbuf *sb, const char *fmt, ...)
+{
+ va_list ap;
+ va_start(ap, fmt);
+ strbuf_vaddf(sb, fmt, ap);
+ va_end(ap);
+ strbuf_addch(sb, '\n');
+}
+
+int printf_ln(const char *fmt, ...)
+{
+ int ret;
+ va_list ap;
+ va_start(ap, fmt);
+ ret = vprintf(fmt, ap);
+ va_end(ap);
+ if (ret < 0 || putchar('\n') == EOF)
+ return -1;
+ return ret + 1;
+}
+
+int fprintf_ln(FILE *fp, const char *fmt, ...)
+{
+ int ret;
+ va_list ap;
+ va_start(ap, fmt);
+ ret = vfprintf(fp, fmt, ap);
+ va_end(ap);
+ if (ret < 0 || putc('\n', fp) == EOF)
+ return -1;
+ return ret + 1;
+}
extern void strbuf_addf(struct strbuf *sb, const char *fmt, ...);
__attribute__((format (printf,2,0)))
extern void strbuf_vaddf(struct strbuf *sb, const char *fmt, va_list ap);
+__attribute__((format (printf,2,3)))
+extern void strbuf_addf_ln(struct strbuf *sb, const char *fmt, ...);
extern void strbuf_add_lines(struct strbuf *sb, const char *prefix, const char *buf, size_t size);
extern void strbuf_addstr_urlencode(struct strbuf *, const char *,
int reserved);
+__attribute__((format (printf,1,2)))
+extern int printf_ln(const char *fmt, ...);
+__attribute__((format (printf,2,3)))
+extern int fprintf_ln(FILE *fp, const char *fmt, ...);
+
#endif /* STRBUF_H */
return st->u.incore.buf ? 0 : -1;
}
+
+
+/****************************************************************
+ * Users of streaming interface
+ ****************************************************************/
+
+int stream_blob_to_fd(int fd, unsigned const char *sha1, struct stream_filter *filter,
+ int can_seek)
+{
+ struct git_istream *st;
+ enum object_type type;
+ unsigned long sz;
+ ssize_t kept = 0;
+ int result = -1;
+
+ st = open_istream(sha1, &type, &sz, filter);
+ if (!st)
+ return result;
+ if (type != OBJ_BLOB)
+ goto close_and_exit;
+ for (;;) {
+ char buf[1024 * 16];
+ ssize_t wrote, holeto;
+ ssize_t readlen = read_istream(st, buf, sizeof(buf));
+
+ if (!readlen)
+ break;
+ if (can_seek && sizeof(buf) == readlen) {
+ for (holeto = 0; holeto < readlen; holeto++)
+ if (buf[holeto])
+ break;
+ if (readlen == holeto) {
+ kept += holeto;
+ continue;
+ }
+ }
+
+ if (kept && lseek(fd, kept, SEEK_CUR) == (off_t) -1)
+ goto close_and_exit;
+ else
+ kept = 0;
+ wrote = write_in_full(fd, buf, readlen);
+
+ if (wrote != readlen)
+ goto close_and_exit;
+ }
+ if (kept && (lseek(fd, kept - 1, SEEK_CUR) == (off_t) -1 ||
+ write(fd, "", 1) != 1))
+ goto close_and_exit;
+ result = 0;
+
+ close_and_exit:
+ close_istream(st);
+ return result;
+}
extern int close_istream(struct git_istream *);
extern ssize_t read_istream(struct git_istream *, char *, size_t);
+extern int stream_blob_to_fd(int fd, const unsigned char *, struct stream_filter *, int can_seek);
+
#endif /* STREAMING_H */
void *data)
{
int i;
- int *needs_pushing = data;
+ struct string_list *needs_pushing = data;
for (i = 0; i < q->nr; i++) {
struct diff_filepair *p = q->queue[i];
if (!S_ISGITLINK(p->two->mode))
continue;
- if (submodule_needs_pushing(p->two->path, p->two->sha1)) {
- *needs_pushing = 1;
- break;
- }
+ if (submodule_needs_pushing(p->two->path, p->two->sha1))
+ string_list_insert(needs_pushing, p->two->path);
}
}
-
-static void commit_need_pushing(struct commit *commit, int *needs_pushing)
+static void find_unpushed_submodule_commits(struct commit *commit,
+ struct string_list *needs_pushing)
{
struct rev_info rev;
diff_tree_combined_merge(commit, 1, &rev);
}
-int check_submodule_needs_pushing(unsigned char new_sha1[20], const char *remotes_name)
+int find_unpushed_submodules(unsigned char new_sha1[20],
+ const char *remotes_name, struct string_list *needs_pushing)
{
struct rev_info rev;
struct commit *commit;
const char *argv[] = {NULL, NULL, "--not", "NULL", NULL};
int argc = ARRAY_SIZE(argv) - 1;
char *sha1_copy;
- int needs_pushing = 0;
+
struct strbuf remotes_arg = STRBUF_INIT;
strbuf_addf(&remotes_arg, "--remotes=%s", remotes_name);
if (prepare_revision_walk(&rev))
die("revision walk setup failed");
- while ((commit = get_revision(&rev)) && !needs_pushing)
- commit_need_pushing(commit, &needs_pushing);
+ while ((commit = get_revision(&rev)) != NULL)
+ find_unpushed_submodule_commits(commit, needs_pushing);
+ reset_revision_walk();
free(sha1_copy);
strbuf_release(&remotes_arg);
- return needs_pushing;
+ return needs_pushing->nr;
+}
+
+static int push_submodule(const char *path)
+{
+ if (add_submodule_odb(path))
+ return 1;
+
+ if (for_each_remote_ref_submodule(path, has_remote, NULL) > 0) {
+ struct child_process cp;
+ const char *argv[] = {"push", NULL};
+
+ memset(&cp, 0, sizeof(cp));
+ cp.argv = argv;
+ cp.env = local_repo_env;
+ cp.git_cmd = 1;
+ cp.no_stdin = 1;
+ cp.dir = path;
+ if (run_command(&cp))
+ return 0;
+ close(cp.out);
+ }
+
+ return 1;
+}
+
+int push_unpushed_submodules(unsigned char new_sha1[20], const char *remotes_name)
+{
+ int i, ret = 1;
+ struct string_list needs_pushing;
+
+ memset(&needs_pushing, 0, sizeof(struct string_list));
+ needs_pushing.strdup_strings = 1;
+
+ if (!find_unpushed_submodules(new_sha1, remotes_name, &needs_pushing))
+ return 1;
+
+ for (i = 0; i < needs_pushing.nr; i++) {
+ const char *path = needs_pushing.items[i].string;
+ fprintf(stderr, "Pushing submodule '%s'\n", path);
+ if (!push_submodule(path)) {
+ fprintf(stderr, "Unable to push submodule '%s'\n", path);
+ ret = 0;
+ }
+ }
+
+ string_list_clear(&needs_pushing, 0);
+
+ return ret;
}
static int is_submodule_commit_present(const char *path, unsigned char sha1[20])
if (in_merge_bases(b, &commit, 1))
add_object_array(o, NULL, &merges);
}
+ reset_revision_walk();
/* Now we've got all merges that contain a and b. Prune all
* merges that contain another found merge and save them in
unsigned is_submodule_modified(const char *path, int ignore_untracked);
int merge_submodule(unsigned char result[20], const char *path, const unsigned char base[20],
const unsigned char a[20], const unsigned char b[20], int search);
-int check_submodule_needs_pushing(unsigned char new_sha1[20], const char *remotes_name);
+int find_unpushed_submodules(unsigned char new_sha1[20], const char *remotes_name,
+ struct string_list *needs_pushing);
+int push_unpushed_submodules(unsigned char new_sha1[20], const char *remotes_name);
#endif
# written to web server logs, so we are not interested in that:
# we are interested only in properly formatted errors/warnings
rm -f gitweb.log &&
- perl -- "$SCRIPT_NAME" \
+ "$PERL_PATH" -- "$SCRIPT_NAME" \
>gitweb.output 2>gitweb.log &&
perl -w -e '
open O, ">gitweb.headers";
>&3 2>git_daemon_output &
GIT_DAEMON_PID=$!
{
- read line
+ read line <&7
echo >&4 "$line"
- cat >&4 &
+ cat <&7 >&4 &
+ } 7<git_daemon_output &&
- # Check expected output
- if test x"$(expr "$line" : "\[[0-9]*\] \(.*\)")" != x"Ready to rumble"
- then
- kill "$GIT_DAEMON_PID"
- wait "$GIT_DAEMON_PID"
- trap 'die' EXIT
- error "git daemon failed to start"
- fi
- } <git_daemon_output
+ # Check expected output
+ if test x"$(expr "$line" : "\[[0-9]*\] \(.*\)")" != x"Ready to rumble"
+ then
+ kill "$GIT_DAEMON_PID"
+ wait "$GIT_DAEMON_PID"
+ trap 'die' EXIT
+ error "git daemon failed to start"
+ fi
}
stop_git_daemon() {
#
-# Library code for git-p4 tests
+# Library code for git p4 tests
#
. ./test-lib.sh
if ! test_have_prereq PYTHON; then
- skip_all='skipping git-p4 tests; python not available'
+ skip_all='skipping git p4 tests; python not available'
test_done
fi
( p4 -h && p4d -h ) >/dev/null 2>&1 || {
- skip_all='skipping git-p4 tests; no p4 or p4d'
+ skip_all='skipping git p4 tests; no p4 or p4d'
test_done
}
-GITP4="$GIT_BUILD_DIR/contrib/fast-import/git-p4"
-
# Try to pick a unique port: guess a large number, then hope
# no more than one of each test is running.
#
export P4PORT=localhost:$P4DPORT
export P4CLIENT=client
+export P4EDITOR=:
db="$TRASH_DIRECTORY/db"
cli="$TRASH_DIRECTORY/cli"
'
test_expect_success 'non-fast-forward push shows help message' '
- test_i18ngrep "To prevent you from losing history, non-fast-forward updates were rejected" output
+ test_i18ngrep "Updates were rejected because" output
'
}
<Location /smart_noexport/>
SetEnv GIT_EXEC_PATH ${GIT_EXEC_PATH}
</Location>
+<Location /smart_custom_env/>
+ SetEnv GIT_EXEC_PATH ${GIT_EXEC_PATH}
+ SetEnv GIT_HTTP_EXPORT_ALL
+ SetEnv GIT_COMMITTER_NAME "Custom User"
+ SetEnv GIT_COMMITTER_EMAIL custom@example.com
+</Location>
ScriptAlias /smart/ ${GIT_EXEC_PATH}/git-http-backend/
ScriptAlias /smart_noexport/ ${GIT_EXEC_PATH}/git-http-backend/
+ScriptAlias /smart_custom_env/ ${GIT_EXEC_PATH}/git-http-backend/
<Directory ${GIT_EXEC_PATH}>
Options None
</Directory>
Subject: test1
---
- foo | 1 +
+ foo | 1 +
1 files changed, 1 insertions(+), 0 deletions(-)
create mode 100644 foo
--- /dev/null
+#!/bin/sh
+#
+# Copyright (c) 2012 Heiko Voigt
+#
+
+test_description='Test revision walking api'
+
+. ./test-lib.sh
+
+cat >run_twice_expected <<-EOF
+1st
+ > add b
+ > add a
+2nd
+ > add b
+ > add a
+EOF
+
+test_expect_success 'setup' '
+ echo a > a &&
+ git add a &&
+ git commit -m "add a" &&
+ echo b > b &&
+ git add b &&
+ git commit -m "add b"
+'
+
+test_expect_success 'revision walking can be done twice' '
+ test-revision-walking run-twice > run_twice_actual
+ test_cmp run_twice_expected run_twice_actual
+'
+
+test_done
#!/bin/sh
-test_description='external credential helper tests'
-. ./test-lib.sh
-. "$TEST_DIRECTORY"/lib-credential.sh
+test_description='external credential helper tests
-pre_test() {
- test -z "$GIT_TEST_CREDENTIAL_HELPER_SETUP" ||
- eval "$GIT_TEST_CREDENTIAL_HELPER_SETUP"
+This is a tool for authors of external helper tools to sanity-check
+their helpers. If you have written the "git-credential-foo" helper,
+you check it with:
+
+ make GIT_TEST_CREDENTIAL_HELPER=foo t0303-credential-external.sh
+
+This assumes that your helper is capable of both storing and
+retrieving credentials (some helpers may be read-only, and they will
+fail these tests).
+
+Please note that the individual tests do not verify all of the
+preconditions themselves, but rather build on each other. A failing
+test means that tests later in the sequence can return false "OK"
+results.
+
+If your helper supports time-based expiration with a configurable
+timeout, you can test that feature with:
+
+ make GIT_TEST_CREDENTIAL_HELPER=foo \
+ GIT_TEST_CREDENTIAL_HELPER_TIMEOUT="foo --timeout=1" \
+ t0303-credential-external.sh
- # clean before the test in case there is cruft left
- # over from a previous run that would impact results
- helper_test_clean "$GIT_TEST_CREDENTIAL_HELPER"
-}
+If your helper requires additional setup before the tests are started,
+you can set GIT_TEST_CREDENTIAL_HELPER_SETUP to a sequence of shell
+commands.
+'
-post_test() {
- # clean afterwards so that we are good citizens
- # and don't leave cruft in the helper's storage, which
- # might be long-term system storage
- helper_test_clean "$GIT_TEST_CREDENTIAL_HELPER"
-}
+. ./test-lib.sh
+. "$TEST_DIRECTORY"/lib-credential.sh
if test -z "$GIT_TEST_CREDENTIAL_HELPER"; then
- say "# skipping external helper tests (set GIT_TEST_CREDENTIAL_HELPER)"
-else
- pre_test
- helper_test "$GIT_TEST_CREDENTIAL_HELPER"
- post_test
+ skip_all="used to test external credential helpers"
+ test_done
fi
+test -z "$GIT_TEST_CREDENTIAL_HELPER_SETUP" ||
+ eval "$GIT_TEST_CREDENTIAL_HELPER_SETUP"
+
+# clean before the test in case there is cruft left
+# over from a previous run that would impact results
+helper_test_clean "$GIT_TEST_CREDENTIAL_HELPER"
+
+helper_test "$GIT_TEST_CREDENTIAL_HELPER"
+
if test -z "$GIT_TEST_CREDENTIAL_HELPER_TIMEOUT"; then
- say "# skipping external helper timeout tests"
+ say "# skipping timeout tests (GIT_TEST_CREDENTIAL_HELPER_TIMEOUT not set)"
else
- pre_test
helper_test_timeout "$GIT_TEST_CREDENTIAL_HELPER_TIMEOUT"
- post_test
fi
+# clean afterwards so that we are good citizens
+# and don't leave cruft in the helper's storage, which
+# might be long-term system storage
+helper_test_clean "$GIT_TEST_CREDENTIAL_HELPER"
+
test_done
. ./test-lib.sh
test_expect_success setup '
- git config core.bigfilethreshold 200k &&
+ # clone does not allow us to pass core.bigfilethreshold to
+ # new repos, so set core.bigfilethreshold globally
+ git config --global core.bigfilethreshold 200k &&
echo X | dd of=large1 bs=1k seek=2000 &&
echo X | dd of=large2 bs=1k seek=2000 &&
echo X | dd of=large3 bs=1k seek=2000 &&
- echo Y | dd of=huge bs=1k seek=2500
+ echo Y | dd of=huge bs=1k seek=2500 &&
+ GIT_ALLOC_LIMIT=1500 &&
+ export GIT_ALLOC_LIMIT
'
test_expect_success 'add a large file or two' '
)
'
+test_expect_success 'diff --raw' '
+ git commit -q -m initial &&
+ echo modified >>large1 &&
+ git add large1 &&
+ git commit -q -m modified &&
+ git diff --raw HEAD^
+'
+
+test_expect_success 'hash-object' '
+ git hash-object large1
+'
+
+test_expect_success 'cat-file a large file' '
+ git cat-file blob :large1 >/dev/null
+'
+
+test_expect_success 'cat-file a large file from a tag' '
+ git tag -m largefile largefiletag :large1 &&
+ git cat-file blob largefiletag >/dev/null
+'
+
+test_expect_success 'git-show a large file' '
+ git show :large1 >/dev/null
+
+'
+
+test_expect_success 'repack' '
+ git repack -ad
+'
+
test_done
cat > resolve.expect << EOF
Updating VARIABLE..VARIABLE
FASTFORWARD (no commit created; -m option ignored)
- example | 1 +
- hello | 1 +
+ example | 1 +
+ hello | 1 +
2 files changed, 2 insertions(+)
EOF
test_expect_success "rename succeeded" "test_cmp expect .git/config"
+test_expect_success 'renaming empty section name is rejected' '
+ test_must_fail git config --rename-section branch.zwei ""
+'
+
+test_expect_success 'renaming to bogus section is rejected' '
+ test_must_fail git config --rename-section branch.zwei "bogus name"
+'
+
cat >> .git/config << EOF
[branch "zwei"] a = 1 [branch "vier"]
EOF
test_cmp expect actual
'
+test_expect_success 'include paths get tilde-expansion' '
+ echo "[test]one = 1" >one &&
+ echo "[include]path = ~/one" >.gitconfig &&
+ echo 1 >expect &&
+ git config test.one >actual &&
+ test_cmp expect actual
+'
+
test_expect_success 'include options can still be examined' '
echo "[test]one = 1" >one &&
echo "[include]path = one" >.gitconfig &&
check_fsck &&
- loglen=$(wc -l <.git/logs/refs/heads/master) &&
- test $loglen = 4
+ test_line_count = 4 .git/logs/refs/heads/master
'
test_expect_success rewind '
check_have A B C D E F G H I J K L &&
- loglen=$(wc -l <.git/logs/refs/heads/master) &&
- test $loglen = 5
+ test_line_count = 5 .git/logs/refs/heads/master
'
test_expect_success 'corrupt and check' '
--stale-fix \
--all &&
- loglen=$(wc -l <.git/logs/refs/heads/master) &&
- test $loglen = 5 &&
+ test_line_count = 5 .git/logs/refs/heads/master &&
check_fsck "missing blob $F"
'
--stale-fix \
--all &&
- loglen=$(wc -l <.git/logs/refs/heads/master) &&
- test $loglen = 2 &&
+ test_line_count = 2 .git/logs/refs/heads/master &&
check_fsck "dangling commit $K"
'
test_expect_success 'rewind2' '
test_tick && git reset --hard HEAD~2 &&
- loglen=$(wc -l <.git/logs/refs/heads/master) &&
- test $loglen = 4
-
+ test_line_count = 4 .git/logs/refs/heads/master
'
test_expect_success '--expire=never' '
--expire=never \
--expire-unreachable=never \
--all &&
- loglen=$(wc -l <.git/logs/refs/heads/master) &&
- test $loglen = 4
-
+ test_line_count = 4 .git/logs/refs/heads/master
'
test_expect_success 'gc.reflogexpire=never' '
git config gc.reflogexpire never &&
git config gc.reflogexpireunreachable never &&
git reflog expire --verbose --all &&
- loglen=$(wc -l <.git/logs/refs/heads/master) &&
- test $loglen = 4
+ test_line_count = 4 .git/logs/refs/heads/master
'
test_expect_success 'gc.reflogexpire=false' '
git config gc.reflogexpire false &&
git config gc.reflogexpireunreachable false &&
git reflog expire --verbose --all &&
- loglen=$(wc -l <.git/logs/refs/heads/master) &&
- test $loglen = 4 &&
+ test_line_count = 4 .git/logs/refs/heads/master &&
git config --unset gc.reflogexpire &&
git config --unset gc.reflogexpireunreachable
)
'
-test_expect_failure 'empty prefix is actually written out' '
+test_expect_success 'empty prefix is actually written out' '
echo >expected &&
(
cd work &&
test_commit 3 &&
(cd clone &&
test_commit 4 &&
- git branch --track my-side origin/side)
-
+ git branch --track my-side origin/side &&
+ git branch --track local-master master &&
+ git remote add -t master master-only .. &&
+ git fetch master-only &&
+ git branch bad-upstream &&
+ git config branch.bad-upstream.remote master-only &&
+ git config branch.bad-upstream.merge refs/heads/side
+ )
'
+sq="'"
+
full_name () {
(cd clone &&
git rev-parse --symbolic-full-name "$@")
git show -s --pretty=format:%s "$@")
}
+error_message () {
+ (cd clone &&
+ test_must_fail git rev-parse --verify "$@")
+}
+
test_expect_success '@{upstream} resolves to correct full name' '
test refs/remotes/origin/master = "$(full_name @{upstream})"
'
test_expect_success 'merge my-side@{u} records the correct name' '
(
- sq="'\''" &&
cd clone || exit
git checkout master || exit
git branch -D new ;# can fail but is ok
test_cmp expect actual
'
+test_expect_success 'branch@{u} works when tracking a local branch' '
+ test refs/heads/master = "$(full_name local-master@{u})"
+'
+
+test_expect_success 'branch@{u} error message when no upstream' '
+ cat >expect <<-EOF &&
+ error: No upstream configured for branch ${sq}non-tracking${sq}
+ fatal: Needed a single revision
+ EOF
+ error_message non-tracking@{u} 2>actual &&
+ test_i18ncmp expect actual
+'
+
+test_expect_success '@{u} error message when no upstream' '
+ cat >expect <<-EOF &&
+ error: No upstream configured for branch ${sq}master${sq}
+ fatal: Needed a single revision
+ EOF
+ test_must_fail git rev-parse --verify @{u} 2>actual &&
+ test_i18ncmp expect actual
+'
+
+test_expect_success 'branch@{u} error message with misspelt branch' '
+ cat >expect <<-EOF &&
+ error: No such branch: ${sq}no-such-branch${sq}
+ fatal: Needed a single revision
+ EOF
+ error_message no-such-branch@{u} 2>actual &&
+ test_i18ncmp expect actual
+'
+
+test_expect_success '@{u} error message when not on a branch' '
+ cat >expect <<-EOF &&
+ error: HEAD does not point to a branch
+ fatal: Needed a single revision
+ EOF
+ git checkout HEAD^0 &&
+ test_must_fail git rev-parse --verify @{u} 2>actual &&
+ test_i18ncmp expect actual
+'
+
+test_expect_success 'branch@{u} error message if upstream branch not fetched' '
+ cat >expect <<-EOF &&
+ error: Upstream branch ${sq}refs/heads/side${sq} not stored as a remote-tracking branch
+ fatal: Needed a single revision
+ EOF
+ error_message bad-upstream@{u} 2>actual &&
+ test_i18ncmp expect actual
+'
+
+test_expect_success 'pull works when tracking a local branch' '
+(
+ cd clone &&
+ git checkout local-master &&
+ git pull
+)
+'
+
+# makes sense if the previous one succeeded
+test_expect_success '@{u} works when tracking a local branch' '
+ test refs/heads/master = "$(full_name @{u})"
+'
+
cat >expect <<EOF
commit 8f489d01d0cc65c3b0f09504ec50b5ed02a70bd5
Reflog: master@{0} (C O Mitter <committer@example.com>)
rm -f path* .merge_* out .git/index &&
git read-tree $t1 &&
git checkout-index --temp -- path1 >out &&
-test $(wc -l <out) = 1 &&
+test_line_count = 1 out &&
test $(cut "-d " -f2 out) = path1 &&
p=$(cut "-d " -f1 out) &&
test -f $p &&
rm -f path* .merge_* out .git/index &&
git read-tree $t1 &&
git checkout-index -a --temp >out &&
-test $(wc -l <out) = 5 &&
+test_line_count = 5 out &&
for f in path0 path1 path3 path4 asubdir/path5
do
test $(grep $f out | cut "-d " -f2) = $f &&
'checkout one stage 2 to temporary file' '
rm -f path* .merge_* out &&
git checkout-index --stage=2 --temp -- path1 >out &&
-test $(wc -l <out) = 1 &&
+test_line_count = 1 out &&
test $(cut "-d " -f2 out) = path1 &&
p=$(cut "-d " -f1 out) &&
test -f $p &&
'checkout all stage 2 to temporary files' '
rm -f path* .merge_* out &&
git checkout-index --all --stage=2 --temp >out &&
-test $(wc -l <out) = 3 &&
+test_line_count = 3 out &&
for f in path1 path2 path4
do
test $(grep $f out | cut "-d " -f2) = $f &&
'checkout all stages/one file to nothing' '
rm -f path* .merge_* out &&
git checkout-index --stage=all --temp -- path0 >out &&
-test $(wc -l <out) = 0'
+test_line_count = 0 out'
test_expect_success \
'checkout all stages/one file to temporary files' '
rm -f path* .merge_* out &&
git checkout-index --stage=all --temp -- path1 >out &&
-test $(wc -l <out) = 1 &&
+test_line_count = 1 out &&
test $(cut "-d " -f2 out) = path1 &&
cut "-d " -f1 out | (read s1 s2 s3 &&
test -f $s1 &&
'checkout some stages/one file to temporary files' '
rm -f path* .merge_* out &&
git checkout-index --stage=all --temp -- path2 >out &&
-test $(wc -l <out) = 1 &&
+test_line_count = 1 out &&
test $(cut "-d " -f2 out) = path2 &&
cut "-d " -f1 out | (read s1 s2 s3 &&
test $s1 = . &&
'checkout all stages/all files to temporary files' '
rm -f path* .merge_* out &&
git checkout-index -a --stage=all --temp >out &&
-test $(wc -l <out) = 5'
+test_line_count = 5 out'
test_expect_success \
'-- path0: no entry' '
'checkout --temp within subdir' '
(cd asubdir &&
git checkout-index -a --stage=all >out &&
- test $(wc -l <out) = 1 &&
+ test_line_count = 1 out &&
test $(grep path5 out | cut "-d " -f2) = path5 &&
grep path5 out | cut "-d " -f1 | (read s1 s2 s3 &&
test -f ../$s1 &&
rm -f .git/index &&
git read-tree $t4 &&
git checkout-index --temp -a >out &&
-test $(wc -l <out) = 1 &&
+test_line_count = 1 out &&
test $(cut "-d " -f2 out) = a &&
p=$(cut "-d " -f1 out) &&
test -f $p &&
git config branch.child.merge refs/heads/master &&
git checkout child^ &&
git checkout child >stdout &&
- test_cmp expect stdout
+ test_i18ncmp expect stdout
'
test_done
prime_resolve_undo &&
git update-index --unresolve fi/le &&
git ls-files -u >actual &&
- test $(wc -l <actual) = 3
+ test_line_count = 3 actual
'
test_expect_success 'rerere and rerere forget' '
test_path_is_missing .git/refs/heads/t
'
+test_expect_success 'git branch --column' '
+ COLUMNS=81 git branch --column=column >actual &&
+ cat >expected <<\EOF &&
+ a/b/c bam foo l * master n o/p r
+ abc bar j/k m/m master2 o/o q
+EOF
+ test_cmp expected actual
+'
+
+test_expect_success 'git branch --column with an extremely long branch name' '
+ long=this/is/a/part/of/long/branch/name &&
+ long=z$long/$long/$long/$long &&
+ test_when_finished "git branch -d $long" &&
+ git branch $long &&
+ COLUMNS=80 git branch --column=column >actual &&
+ cat >expected <<EOF &&
+ a/b/c
+ abc
+ bam
+ bar
+ foo
+ j/k
+ l
+ m/m
+* master
+ master2
+ n
+ o/o
+ o/p
+ q
+ r
+ $long
+EOF
+ test_cmp expected actual
+'
+
+test_expect_success 'git branch with column.*' '
+ git config column.ui column &&
+ git config column.branch "dense" &&
+ COLUMNS=80 git branch >actual &&
+ git config --unset column.branch &&
+ git config --unset column.ui &&
+ cat >expected <<\EOF &&
+ a/b/c bam foo l * master n o/p r
+ abc bar j/k m/m master2 o/o q
+EOF
+ test_cmp expected actual
+'
+
+test_expect_success 'git branch --column -v should fail' '
+ test_must_fail git branch --column -v
+'
+
+test_expect_success 'git branch -v with column.ui ignored' '
+ git config column.ui column &&
+ COLUMNS=80 git branch -v | cut -c -10 | sed "s/ *$//" >actual &&
+ git config --unset column.ui &&
+ cat >expected <<\EOF &&
+ a/b/c
+ abc
+ bam
+ bar
+ foo
+ j/k
+ l
+ m/m
+* master
+ master2
+ n
+ o/o
+ o/p
+ q
+ r
+EOF
+ test_cmp expected actual
+'
+
mv .git/config .git/config-saved
test_expect_success 'git branch -m q q2 without config should succeed' '
p1='tabs ," (dq) and spaces'
p2='just space'
-cat >"$p0" <<\EOF
-1. A quick brown fox jumps over the lazy cat, oops dog.
-2. A quick brown fox jumps over the lazy cat, oops dog.
-3. A quick brown fox jumps over the lazy cat, oops dog.
-EOF
-
-cat 2>/dev/null >"$p1" "$p0"
-echo 'Foo Bar Baz' >"$p2"
+test_expect_success 'setup' '
+ cat >"$p0" <<-\EOF &&
+ 1. A quick brown fox jumps over the lazy cat, oops dog.
+ 2. A quick brown fox jumps over the lazy cat, oops dog.
+ 3. A quick brown fox jumps over the lazy cat, oops dog.
+ EOF
+
+ { cat "$p0" >"$p1" || :; } &&
+ { echo "Foo Bar Baz" >"$p2" || :; } &&
+
+ if test -f "$p1" && cmp "$p0" "$p1"
+ then
+ test_set_prereq TABS_IN_FILENAMES
+ fi
+'
-if test -f "$p1" && cmp "$p0" "$p1"
+if ! test_have_prereq TABS_IN_FILENAMES
then
- test_set_prereq TABS_IN_FILENAMES
-else
# since FAT/NTFS does not allow tabs in filenames, skip this test
- say 'Your filesystem does not allow tabs in filenames'
+ skip_all='Your filesystem does not allow tabs in filenames'
+ test_done
fi
-test_expect_success TABS_IN_FILENAMES 'setup expect' "
-echo 'just space
-no-funny' >expected
-"
+test_expect_success 'setup: populate index and tree' '
+ git update-index --add "$p0" "$p2" &&
+ t0=$(git write-tree)
+'
-test_expect_success TABS_IN_FILENAMES 'git ls-files no-funny' \
- 'git update-index --add "$p0" "$p2" &&
+test_expect_success 'ls-files prints space in filename verbatim' '
+ printf "%s\n" "just space" no-funny >expected &&
git ls-files >current &&
- test_cmp expected current'
-
-test_expect_success TABS_IN_FILENAMES 'setup expect' '
-t0=`git write-tree` &&
-echo "$t0" >t0 &&
+ test_cmp expected current
+'
-cat > expected <<\EOF
-just space
-no-funny
-"tabs\t,\" (dq) and spaces"
-EOF
+test_expect_success 'setup: add funny filename' '
+ git update-index --add "$p1" &&
+ t1=$(git write-tree)
'
-test_expect_success TABS_IN_FILENAMES 'git ls-files with-funny' \
- 'git update-index --add "$p1" &&
+test_expect_success 'ls-files quotes funny filename' '
+ cat >expected <<-\EOF &&
+ just space
+ no-funny
+ "tabs\t,\" (dq) and spaces"
+ EOF
git ls-files >current &&
- test_cmp expected current'
-
-test_expect_success TABS_IN_FILENAMES 'setup expect' "
-echo 'just space
-no-funny
-tabs ,\" (dq) and spaces' >expected
-"
-
-test_expect_success TABS_IN_FILENAMES 'git ls-files -z with-funny' \
- 'git ls-files -z | perl -pe y/\\000/\\012/ >current &&
- test_cmp expected current'
-
-test_expect_success TABS_IN_FILENAMES 'setup expect' '
-t1=`git write-tree` &&
-echo "$t1" >t1 &&
-
-cat > expected <<\EOF
-just space
-no-funny
-"tabs\t,\" (dq) and spaces"
-EOF
-'
-
-test_expect_success TABS_IN_FILENAMES 'git ls-tree with funny' \
- 'git ls-tree -r $t1 | sed -e "s/^[^ ]* //" >current &&
- test_cmp expected current'
-
-test_expect_success TABS_IN_FILENAMES 'setup expect' '
-cat > expected <<\EOF
-A "tabs\t,\" (dq) and spaces"
-EOF
-'
-
-test_expect_success TABS_IN_FILENAMES 'git diff-index with-funny' \
- 'git diff-index --name-status $t0 >current &&
- test_cmp expected current'
-
-test_expect_success TABS_IN_FILENAMES 'git diff-tree with-funny' \
- 'git diff-tree --name-status $t0 $t1 >current &&
- test_cmp expected current'
-
-test_expect_success TABS_IN_FILENAMES 'setup expect' "
-echo 'A
-tabs ,\" (dq) and spaces' >expected
-"
-
-test_expect_success TABS_IN_FILENAMES 'git diff-index -z with-funny' \
- 'git diff-index -z --name-status $t0 | perl -pe y/\\000/\\012/ >current &&
- test_cmp expected current'
-
-test_expect_success TABS_IN_FILENAMES 'git diff-tree -z with-funny' \
- 'git diff-tree -z --name-status $t0 $t1 | perl -pe y/\\000/\\012/ >current &&
- test_cmp expected current'
-
-test_expect_success TABS_IN_FILENAMES 'setup expect' '
-cat > expected <<\EOF
-CNUM no-funny "tabs\t,\" (dq) and spaces"
-EOF
-'
-
-test_expect_success TABS_IN_FILENAMES 'git diff-tree -C with-funny' \
- 'git diff-tree -C --find-copies-harder --name-status \
- $t0 $t1 | sed -e 's/^C[0-9]*/CNUM/' >current &&
- test_cmp expected current'
-
-test_expect_success TABS_IN_FILENAMES 'setup expect' '
-cat > expected <<\EOF
-RNUM no-funny "tabs\t,\" (dq) and spaces"
-EOF
-'
-
-test_expect_success TABS_IN_FILENAMES 'git diff-tree delete with-funny' \
- 'git update-index --force-remove "$p0" &&
- git diff-index -M --name-status \
- $t0 | sed -e 's/^R[0-9]*/RNUM/' >current &&
- test_cmp expected current'
-
-test_expect_success TABS_IN_FILENAMES 'setup expect' '
-cat > expected <<\EOF
-diff --git a/no-funny "b/tabs\t,\" (dq) and spaces"
-similarity index NUM%
-rename from no-funny
-rename to "tabs\t,\" (dq) and spaces"
-EOF
-'
-
-test_expect_success TABS_IN_FILENAMES 'git diff-tree delete with-funny' \
- 'git diff-index -M -p $t0 |
- sed -e "s/index [0-9]*%/index NUM%/" >current &&
- test_cmp expected current'
-
-test_expect_success TABS_IN_FILENAMES 'setup expect' '
-chmod +x "$p1" &&
-cat > expected <<\EOF
-diff --git a/no-funny "b/tabs\t,\" (dq) and spaces"
-old mode 100644
-new mode 100755
-similarity index NUM%
-rename from no-funny
-rename to "tabs\t,\" (dq) and spaces"
-EOF
-'
-
-test_expect_success TABS_IN_FILENAMES 'git diff-tree delete with-funny' \
- 'git diff-index -M -p $t0 |
- sed -e "s/index [0-9]*%/index NUM%/" >current &&
- test_cmp expected current'
-
-test_expect_success TABS_IN_FILENAMES 'setup expect' '
-cat >expected <<\EOF
- "tabs\t,\" (dq) and spaces"
- 1 file changed, 0 insertions(+), 0 deletions(-)
-EOF
-'
-
-test_expect_success TABS_IN_FILENAMES 'git diff-tree rename with-funny applied' \
- 'git diff-index -M -p $t0 |
- git apply --stat | sed -e "s/|.*//" -e "s/ *\$//" >current &&
- test_cmp expected current'
-
-test_expect_success TABS_IN_FILENAMES 'setup expect' '
-cat > expected <<\EOF
- no-funny
- "tabs\t,\" (dq) and spaces"
- 2 files changed, 3 insertions(+), 3 deletions(-)
-EOF
-'
-
-test_expect_success TABS_IN_FILENAMES 'git diff-tree delete with-funny applied' \
- 'git diff-index -p $t0 |
- git apply --stat | sed -e "s/|.*//" -e "s/ *\$//" >current &&
- test_cmp expected current'
-
-test_expect_success TABS_IN_FILENAMES 'git apply non-git diff' \
- 'git diff-index -p $t0 |
- sed -ne "/^[-+@]/p" |
- git apply --stat | sed -e "s/|.*//" -e "s/ *\$//" >current &&
- test_cmp expected current'
+ test_cmp expected current
+'
+
+test_expect_success 'ls-files -z does not quote funny filename' '
+ cat >expected <<-\EOF &&
+ just space
+ no-funny
+ tabs ," (dq) and spaces
+ EOF
+ git ls-files -z >ls-files.z &&
+ perl -pe "y/\000/\012/" <ls-files.z >current &&
+ test_cmp expected current
+'
+
+test_expect_success 'ls-tree quotes funny filename' '
+ cat >expected <<-\EOF &&
+ just space
+ no-funny
+ "tabs\t,\" (dq) and spaces"
+ EOF
+ git ls-tree -r $t1 >ls-tree &&
+ sed -e "s/^[^ ]* //" <ls-tree >current &&
+ test_cmp expected current
+'
+
+test_expect_success 'diff-index --name-status quotes funny filename' '
+ cat >expected <<-\EOF &&
+ A "tabs\t,\" (dq) and spaces"
+ EOF
+ git diff-index --name-status $t0 >current &&
+ test_cmp expected current
+'
+
+test_expect_success 'diff-tree --name-status quotes funny filename' '
+ cat >expected <<-\EOF &&
+ A "tabs\t,\" (dq) and spaces"
+ EOF
+ git diff-tree --name-status $t0 $t1 >current &&
+ test_cmp expected current
+'
+
+test_expect_success 'diff-index -z does not quote funny filename' '
+ cat >expected <<-\EOF &&
+ A
+ tabs ," (dq) and spaces
+ EOF
+ git diff-index -z --name-status $t0 >diff-index.z &&
+ perl -pe "y/\000/\012/" <diff-index.z >current &&
+ test_cmp expected current
+'
+
+test_expect_success 'diff-tree -z does not quote funny filename' '
+ cat >expected <<-\EOF &&
+ A
+ tabs ," (dq) and spaces
+ EOF
+ git diff-tree -z --name-status $t0 $t1 >diff-tree.z &&
+ perl -pe y/\\000/\\012/ <diff-tree.z >current &&
+ test_cmp expected current
+'
+
+test_expect_success 'diff-tree --find-copies-harder quotes funny filename' '
+ cat >expected <<-\EOF &&
+ CNUM no-funny "tabs\t,\" (dq) and spaces"
+ EOF
+ git diff-tree -C --find-copies-harder --name-status $t0 $t1 >out &&
+ sed -e "s/^C[0-9]*/CNUM/" <out >current &&
+ test_cmp expected current
+'
+
+test_expect_success 'setup: remove unfunny index entry' '
+ git update-index --force-remove "$p0"
+'
+
+test_expect_success 'diff-tree -M quotes funny filename' '
+ cat >expected <<-\EOF &&
+ RNUM no-funny "tabs\t,\" (dq) and spaces"
+ EOF
+ git diff-index -M --name-status $t0 >out &&
+ sed -e "s/^R[0-9]*/RNUM/" <out >current &&
+ test_cmp expected current
+'
+
+test_expect_success 'diff-index -M -p quotes funny filename' '
+ cat >expected <<-\EOF &&
+ diff --git a/no-funny "b/tabs\t,\" (dq) and spaces"
+ similarity index NUM%
+ rename from no-funny
+ rename to "tabs\t,\" (dq) and spaces"
+ EOF
+ git diff-index -M -p $t0 >diff &&
+ sed -e "s/index [0-9]*%/index NUM%/" <diff >current &&
+ test_cmp expected current
+'
+
+test_expect_success 'setup: mode change' '
+ chmod +x "$p1"
+'
+
+test_expect_success 'diff-index -M -p with mode change quotes funny filename' '
+ cat >expected <<-\EOF &&
+ diff --git a/no-funny "b/tabs\t,\" (dq) and spaces"
+ old mode 100644
+ new mode 100755
+ similarity index NUM%
+ rename from no-funny
+ rename to "tabs\t,\" (dq) and spaces"
+ EOF
+ git diff-index -M -p $t0 >diff &&
+ sed -e "s/index [0-9]*%/index NUM%/" <diff >current &&
+ test_cmp expected current
+'
+
+test_expect_success 'diffstat for rename quotes funny filename' '
+ cat >expected <<-\EOF &&
+ "tabs\t,\" (dq) and spaces"
+ 1 file changed, 0 insertions(+), 0 deletions(-)
+ EOF
+ git diff-index -M -p $t0 >diff &&
+ git apply --stat <diff >diffstat &&
+ sed -e "s/|.*//" -e "s/ *\$//" <diffstat >current &&
+ test_i18ncmp expected current
+'
+
+test_expect_success 'numstat for rename quotes funny filename' '
+ cat >expected <<-\EOF &&
+ 0 0 "tabs\t,\" (dq) and spaces"
+ EOF
+ git diff-index -M -p $t0 >diff &&
+ git apply --numstat <diff >current &&
+ test_cmp expected current
+'
+
+test_expect_success 'numstat without -M quotes funny filename' '
+ cat >expected <<-\EOF &&
+ 0 3 no-funny
+ 3 0 "tabs\t,\" (dq) and spaces"
+ EOF
+ git diff-index -p $t0 >diff &&
+ git apply --numstat <diff >current &&
+ test_cmp expected current
+'
+
+test_expect_success 'numstat for non-git rename diff quotes funny filename' '
+ cat >expected <<-\EOF &&
+ 0 3 no-funny
+ 3 0 "tabs\t,\" (dq) and spaces"
+ EOF
+ git diff-index -p $t0 >git-diff &&
+ sed -ne "/^[-+@]/p" <git-diff >diff &&
+ git apply --numstat <diff >current &&
+ test_cmp expected current
+'
test_done
echo resolved > file1 &&
git add file1 &&
git rebase --continue > output &&
- grep "^ file1 | 2 +-$" output
+ grep "^ file1 | 2 +-$" output
'
test_expect_success 'multi-squash only fires up editor once' '
test_tick &&
git rebase $2 -i HEAD^^^ &&
git log --oneline >actual &&
- test 3 = $(wc -l <actual) &&
+ test_line_count = 3 actual &&
git diff --exit-code $1 &&
test 1 = "$(git cat-file blob HEAD^:file1)" &&
test 1 = $(git cat-file commit HEAD^ | grep first | wc -l)
test_tick &&
git rebase $2 -i HEAD^^^ &&
git log --oneline >actual &&
- test 3 = $(wc -l <actual) &&
+ test_line_count = 3 actual &&
git diff --exit-code $1 &&
test 1 = "$(git cat-file blob HEAD^:file1)" &&
test 2 = $(git cat-file commit HEAD^ | grep first | wc -l)
test_tick &&
git rebase --autosquash -i HEAD^^^ &&
git log --oneline >actual &&
- test 4 = $(wc -l <actual) &&
+ test_line_count = 4 actual &&
git diff --exit-code final-missquash &&
test 0 = $(git rev-list final-missquash...HEAD | wc -l)
'
test_tick &&
git rebase --autosquash -i HEAD~4 &&
git log --oneline >actual &&
- test 4 = $(wc -l <actual) &&
+ test_line_count = 4 actual &&
git diff --exit-code final-multisquash &&
test 1 = "$(git cat-file blob HEAD^^:file1)" &&
test 2 = $(git cat-file commit HEAD^^ | grep first | wc -l) &&
test_tick &&
git rebase --autosquash -i HEAD~4 &&
git log --oneline >actual &&
- test 5 = $(wc -l <actual) &&
+ test_line_count = 5 actual &&
git diff --exit-code final-presquash &&
test 0 = "$(git cat-file blob HEAD^^:file1)" &&
test 1 = "$(git cat-file blob HEAD^:file1)" &&
test_tick &&
git rebase --autosquash -i HEAD^^^ &&
git log --oneline >actual &&
- test 3 = $(wc -l <actual) &&
+ test_line_count = 3 actual &&
git diff --exit-code final-shasquash &&
test 1 = "$(git cat-file blob HEAD^:file1)" &&
test 1 = $(git cat-file commit HEAD^ | grep squash | wc -l)
test_tick &&
git rebase --autosquash -i HEAD^^^ &&
git log --oneline >actual &&
- test 3 = $(wc -l <actual) &&
+ test_line_count = 3 actual &&
git diff --exit-code final-longshasquash &&
test 1 = "$(git cat-file blob HEAD^:file1)" &&
test 1 = $(git cat-file commit HEAD^ | grep squash | wc -l)
test_tick &&
git rebase --autosquash -i HEAD^^^ &&
git log --oneline >actual &&
- test 3 = $(wc -l <actual) &&
+ test_line_count = 3 actual &&
git diff --exit-code final-commit-$1 &&
test 1 = "$(git cat-file blob HEAD^:file1)" &&
test $2 = $(git cat-file commit HEAD^ | grep first | wc -l)
echo third >> file1 &&
git add file1 &&
test_tick &&
- git commit --allow-empty-message -m ""
+ git commit --allow-empty-message -m "" &&
+
+ git checkout master &&
+ git checkout -b empty-branch2 &&
+ test_tick &&
+ git commit --allow-empty -m "empty"
'
'
+test_expect_success 'cherry pick an empty non-ff commit without --allow-empty' '
+ git checkout master &&
+ echo fourth >>file2 &&
+ git add file2 &&
+ git commit -m "fourth" &&
+ test_must_fail git cherry-pick empty-branch2
+'
+
+test_expect_success 'cherry pick an empty non-ff commit with --allow-empty' '
+ git checkout master &&
+ git cherry-pick --allow-empty empty-branch2
+'
+
+test_expect_success 'cherry pick with --keep-redundant-commits' '
+ git checkout master &&
+ git cherry-pick --keep-redundant-commits HEAD^
+'
+
test_done
'
test_expect_success 'cherry-pick first..fourth works' '
+ git checkout -f master &&
+ git reset --hard first &&
+ test_tick &&
+ git cherry-pick first..fourth &&
+ git diff --quiet other &&
+ git diff --quiet HEAD other &&
+ check_head_differs_from fourth
+'
+
+test_expect_success 'output to keep user entertained during multi-pick' '
cat <<-\EOF >expected &&
[master OBJID] second
Author: A U Thor <author@example.com>
git reset --hard first &&
test_tick &&
git cherry-pick first..fourth >actual &&
+ sed -e "s/$_x05[0-9a-f][0-9a-f]/OBJID/" <actual >actual.fuzzy &&
+ test_line_count -ge 3 actual.fuzzy &&
+ test_i18ncmp expected actual.fuzzy
+'
+
+test_expect_success 'cherry-pick --strategy resolve first..fourth works' '
+ git checkout -f master &&
+ git reset --hard first &&
+ test_tick &&
+ git cherry-pick --strategy resolve first..fourth &&
git diff --quiet other &&
git diff --quiet HEAD other &&
-
- sed -e "s/$_x05[0-9a-f][0-9a-f]/OBJID/" <actual >actual.fuzzy &&
- test_cmp expected actual.fuzzy &&
check_head_differs_from fourth
'
-test_expect_success 'cherry-pick --strategy resolve first..fourth works' '
+test_expect_success 'output during multi-pick indicates merge strategy' '
cat <<-\EOF >expected &&
Trying simple merge.
[master OBJID] second
git reset --hard first &&
test_tick &&
git cherry-pick --strategy resolve first..fourth >actual &&
- git diff --quiet other &&
- git diff --quiet HEAD other &&
sed -e "s/$_x05[0-9a-f][0-9a-f]/OBJID/" <actual >actual.fuzzy &&
- test_cmp expected actual.fuzzy &&
- check_head_differs_from fourth
+ test_i18ncmp expected actual.fuzzy
'
test_expect_success 'cherry-pick --ff first..fourth works' '
git config --unset-all i18n.commitencoding &&
git rebase --autosquash -i HEAD^^^ &&
git log --oneline >actual &&
- test 3 = $(wc -l <actual)
+ test_line_count = 3 actual
'
}
test $(git ls-files --modified | wc -l) -eq 1
'
-test_expect_success 'stash show - stashes on stack, stash-like argument' '
+test_expect_success 'stash show format defaults to --stat' '
git stash clear &&
test_when_finished "git reset --hard HEAD" &&
git reset --hard &&
STASH_ID=$(git stash create) &&
git reset --hard &&
cat >expected <<-EOF &&
- file | 1 +
+ file | 1 +
1 file changed, 1 insertion(+)
EOF
git stash show ${STASH_ID} >actual &&
+ test_i18ncmp expected actual
+'
+
+test_expect_success 'stash show - stashes on stack, stash-like argument' '
+ git stash clear &&
+ test_when_finished "git reset --hard HEAD" &&
+ git reset --hard &&
+ echo foo >> file &&
+ git stash &&
+ test_when_finished "git stash drop" &&
+ echo bar >> file &&
+ STASH_ID=$(git stash create) &&
+ git reset --hard &&
+ echo "1 0 file" >expected &&
+ git stash show --numstat ${STASH_ID} >actual &&
test_cmp expected actual
'
echo foo >> file &&
STASH_ID=$(git stash create) &&
git reset --hard &&
- cat >expected <<-EOF &&
- file | 1 +
- 1 file changed, 1 insertion(+)
- EOF
- git stash show ${STASH_ID} >actual &&
+ echo "1 0 file" >expected &&
+ git stash show --numstat ${STASH_ID} >actual &&
test_cmp expected actual
'
. ./test-lib.sh
+cat >expect.binary-numstat <<\EOF
+1 1 a
+- - b
+1 1 c
+- - d
+EOF
+
test_expect_success 'prepare repository' \
'echo AIT >a && echo BIT >b && echo CIT >c && echo DIT >d &&
git update-index --add a b c d &&
d | Bin
4 files changed, 2 insertions(+), 2 deletions(-)
EOF
-test_expect_success 'diff without --binary' \
- 'git diff | git apply --stat --summary >current &&
- test_cmp expected current'
+test_expect_success '"apply --stat" output for binary file change' '
+ git diff >diff &&
+ git apply --stat --summary <diff >current &&
+ test_i18ncmp expected current
+'
+
+test_expect_success 'apply --numstat notices binary file change' '
+ git diff >diff &&
+ git apply --numstat <diff >current &&
+ test_cmp expect.binary-numstat current
+'
-test_expect_success 'diff with --binary' \
- 'git diff --binary | git apply --stat --summary >current &&
- test_cmp expected current'
+test_expect_success 'apply --numstat understands diff --binary format' '
+ git diff --binary >diff &&
+ git apply --numstat <diff >current &&
+ test_cmp expect.binary-numstat current
+'
# apply needs to be able to skip the binary material correctly
# in order to report the line number of a corrupt patch.
test_cmp expected actual
'
+cat >expect <<EOF
+ binfile | Bin 0 -> 1026 bytes
+ textfile | 10000 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+EOF
+
+test_expect_success 'diff --stat with binary files and big change count' '
+ echo X | dd of=binfile bs=1k seek=1 &&
+ git add binfile &&
+ i=0 &&
+ while test $i -lt 10000; do
+ echo $i &&
+ i=$(($i + 1))
+ done >textfile &&
+ git add textfile &&
+ git diff --cached --stat binfile textfile >output &&
+ grep " | " output >actual &&
+ test_cmp expect actual
+'
+
test_done
} >"$actual" &&
if test -f "$expect"
then
- test_cmp "$expect" "$actual" &&
+ case $cmd in
+ *format-patch* | *-stat*)
+ test_i18ncmp "$expect" "$actual";;
+ *)
+ test_cmp "$expect" "$actual";;
+ esac &&
rm -f "$actual"
else
# this is to help developing new tests.
$ git diff-tree --cc --patch-with-stat --summary master
59d314ad6f356dd08601a4cd5e530381da3e3c64
- dir/sub | 2 ++
- file0 | 3 +++
+ dir/sub | 2 ++
+ file0 | 3 +++
2 files changed, 5 insertions(+)
diff --cc dir/sub
$ git diff-tree --cc --patch-with-stat --summary side
c7a2ab9e8eac7b117442a607d5a9b3950ae34d5a
- dir/sub | 2 ++
- file0 | 3 +++
- file3 | 4 ++++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file3 | 4 ++++
3 files changed, 9 insertions(+)
create mode 100644 file3
$ git diff-tree --cc --patch-with-stat master
59d314ad6f356dd08601a4cd5e530381da3e3c64
- dir/sub | 2 ++
- file0 | 3 +++
+ dir/sub | 2 ++
+ file0 | 3 +++
2 files changed, 5 insertions(+)
diff --cc dir/sub
$ git diff-tree --cc --stat --summary master
59d314ad6f356dd08601a4cd5e530381da3e3c64
- dir/sub | 2 ++
- file0 | 3 +++
+ dir/sub | 2 ++
+ file0 | 3 +++
2 files changed, 5 insertions(+)
$
$ git diff-tree --cc --stat --summary side
c7a2ab9e8eac7b117442a607d5a9b3950ae34d5a
- dir/sub | 2 ++
- file0 | 3 +++
- file3 | 4 ++++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file3 | 4 ++++
3 files changed, 9 insertions(+)
create mode 100644 file3
$
$ git diff-tree --cc --stat master
59d314ad6f356dd08601a4cd5e530381da3e3c64
- dir/sub | 2 ++
- file0 | 3 +++
+ dir/sub | 2 ++
+ file0 | 3 +++
2 files changed, 5 insertions(+)
$
$ git diff-tree --pretty=oneline --root --patch-with-stat initial
444ac553ac7612cc88969031b02b3767fb8a353a Initial
- dir/sub | 2 ++
- file0 | 3 +++
- file2 | 3 +++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file2 | 3 +++
3 files changed, 8 insertions(+)
diff --git a/dir/sub b/dir/sub
Side
---
- dir/sub | 2 ++
- file0 | 3 +++
- file3 | 4 ++++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file3 | 4 ++++
3 files changed, 9 insertions(+)
diff --git a/dir/sub b/dir/sub
Initial
---
- dir/sub | 2 ++
- file0 | 3 +++
- file2 | 3 +++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file2 | 3 +++
3 files changed, 8 insertions(+)
diff --git a/dir/sub b/dir/sub
Initial
- dir/sub | 2 ++
- file0 | 3 +++
- file2 | 3 +++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file2 | 3 +++
3 files changed, 8 insertions(+)
create mode 100644 dir/sub
create mode 100644 file0
Initial
- dir/sub | 2 ++
- file0 | 3 +++
- file2 | 3 +++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file2 | 3 +++
3 files changed, 8 insertions(+)
$
$ git diff-tree --root --patch-with-stat initial
444ac553ac7612cc88969031b02b3767fb8a353a
- dir/sub | 2 ++
- file0 | 3 +++
- file2 | 3 +++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file2 | 3 +++
3 files changed, 8 insertions(+)
diff --git a/dir/sub b/dir/sub
$ git diff-tree -c --stat --summary master
59d314ad6f356dd08601a4cd5e530381da3e3c64
- dir/sub | 2 ++
- file0 | 3 +++
+ dir/sub | 2 ++
+ file0 | 3 +++
2 files changed, 5 insertions(+)
$
$ git diff-tree -c --stat --summary side
c7a2ab9e8eac7b117442a607d5a9b3950ae34d5a
- dir/sub | 2 ++
- file0 | 3 +++
- file3 | 4 ++++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file3 | 4 ++++
3 files changed, 9 insertions(+)
create mode 100644 file3
$
$ git diff-tree -c --stat master
59d314ad6f356dd08601a4cd5e530381da3e3c64
- dir/sub | 2 ++
- file0 | 3 +++
+ dir/sub | 2 ++
+ file0 | 3 +++
2 files changed, 5 insertions(+)
$
$ git diff --patch-with-stat -r initial..side
- dir/sub | 2 ++
- file0 | 3 +++
- file3 | 4 ++++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file3 | 4 ++++
3 files changed, 9 insertions(+)
diff --git a/dir/sub b/dir/sub
$ git diff --patch-with-stat initial..side
- dir/sub | 2 ++
- file0 | 3 +++
- file3 | 4 ++++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file3 | 4 ++++
3 files changed, 9 insertions(+)
diff --git a/dir/sub b/dir/sub
$ git diff --stat initial..side
- dir/sub | 2 ++
- file0 | 3 +++
- file3 | 4 ++++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file3 | 4 ++++
3 files changed, 9 insertions(+)
$
$ git diff -r --stat initial..side
- dir/sub | 2 ++
- file0 | 3 +++
- file3 | 4 ++++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file3 | 4 ++++
3 files changed, 9 insertions(+)
$
Content-Transfer-Encoding: 8bit
---
- dir/sub | 2 ++
- file0 | 3 +++
- file3 | 4 ++++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file3 | 4 ++++
3 files changed, 9 insertions(+)
create mode 100644 file3
This is the second commit.
---
- dir/sub | 2 ++
- file0 | 3 +++
- file2 | 3 ---
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file2 | 3 ---
3 files changed, 5 insertions(+), 3 deletions(-)
delete mode 100644 file2
Content-Transfer-Encoding: 8bit
---
- dir/sub | 2 ++
- file1 | 3 +++
+ dir/sub | 2 ++
+ file1 | 3 +++
2 files changed, 5 insertions(+)
create mode 100644 file1
Content-Transfer-Encoding: 8bit
---
- dir/sub | 2 ++
- file0 | 3 +++
- file3 | 4 ++++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file3 | 4 ++++
3 files changed, 9 insertions(+)
create mode 100644 file3
This is the second commit.
---
- dir/sub | 2 ++
- file0 | 3 +++
- file2 | 3 ---
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file2 | 3 ---
3 files changed, 5 insertions(+), 3 deletions(-)
delete mode 100644 file2
Content-Transfer-Encoding: 8bit
---
- dir/sub | 2 ++
- file1 | 3 +++
+ dir/sub | 2 ++
+ file1 | 3 +++
2 files changed, 5 insertions(+)
create mode 100644 file1
Content-Transfer-Encoding: 8bit
---
- dir/sub | 2 ++
- file0 | 3 +++
- file3 | 4 ++++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file3 | 4 ++++
3 files changed, 9 insertions(+)
create mode 100644 file3
This is the second commit.
---
- dir/sub | 2 ++
- file0 | 3 +++
- file2 | 3 ---
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file2 | 3 ---
3 files changed, 5 insertions(+), 3 deletions(-)
delete mode 100644 file2
Content-Transfer-Encoding: 8bit
---
- dir/sub | 2 ++
- file1 | 3 +++
+ dir/sub | 2 ++
+ file1 | 3 +++
2 files changed, 5 insertions(+)
create mode 100644 file1
Content-Transfer-Encoding: 8bit
---
- dir/sub | 2 ++
- file0 | 3 +++
- file3 | 4 ++++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file3 | 4 ++++
3 files changed, 9 insertions(+)
create mode 100644 file3
This is the second commit.
---
- dir/sub | 2 ++
- file0 | 3 +++
- file2 | 3 ---
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file2 | 3 ---
3 files changed, 5 insertions(+), 3 deletions(-)
delete mode 100644 file2
Content-Transfer-Encoding: 8bit
---
- dir/sub | 2 ++
- file1 | 3 +++
+ dir/sub | 2 ++
+ file1 | 3 +++
2 files changed, 5 insertions(+)
create mode 100644 file1
Content-Transfer-Encoding: 8bit
---
- dir/sub | 2 ++
- file0 | 3 +++
- file3 | 4 ++++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file3 | 4 ++++
3 files changed, 9 insertions(+)
create mode 100644 file3
This is the second commit.
---
- dir/sub | 2 ++
- file0 | 3 +++
- file2 | 3 ---
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file2 | 3 ---
3 files changed, 5 insertions(+), 3 deletions(-)
delete mode 100644 file2
Content-Transfer-Encoding: 8bit
---
- dir/sub | 2 ++
- file1 | 3 +++
+ dir/sub | 2 ++
+ file1 | 3 +++
2 files changed, 5 insertions(+)
create mode 100644 file1
Content-Transfer-Encoding: 8bit
---
- dir/sub | 2 ++
- file0 | 3 +++
- file3 | 4 ++++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file3 | 4 ++++
3 files changed, 9 insertions(+)
create mode 100644 file3
This is the second commit.
---
- dir/sub | 2 ++
- file0 | 3 +++
- file2 | 3 ---
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file2 | 3 ---
3 files changed, 5 insertions(+), 3 deletions(-)
delete mode 100644 file2
Content-Transfer-Encoding: 8bit
---
- dir/sub | 2 ++
- file1 | 3 +++
+ dir/sub | 2 ++
+ file1 | 3 +++
2 files changed, 5 insertions(+)
create mode 100644 file1
This is the second commit.
---
- dir/sub | 2 ++
- file0 | 3 +++
- file2 | 3 ---
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file2 | 3 ---
3 files changed, 5 insertions(+), 3 deletions(-)
delete mode 100644 file2
Content-Transfer-Encoding: 8bit
---
- dir/sub | 2 ++
- file0 | 3 +++
- file3 | 4 ++++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file3 | 4 ++++
3 files changed, 9 insertions(+)
create mode 100644 file3
Second
Third
- dir/sub | 4 ++++
- file0 | 3 +++
- file1 | 3 +++
- file2 | 3 ---
+ dir/sub | 4 ++++
+ file0 | 3 +++
+ file1 | 3 +++
+ file2 | 3 ---
4 files changed, 10 insertions(+), 3 deletions(-)
create mode 100644 file1
delete mode 100644 file2
This is the second commit.
---
- dir/sub | 2 ++
- file0 | 3 +++
- file2 | 3 ---
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file2 | 3 ---
3 files changed, 5 insertions(+), 3 deletions(-)
delete mode 100644 file2
Subject: [DIFFERENT_PREFIX 2/2] Third
---
- dir/sub | 2 ++
- file1 | 3 +++
+ dir/sub | 2 ++
+ file1 | 3 +++
2 files changed, 5 insertions(+)
create mode 100644 file1
This is the second commit.
---
- dir/sub | 2 ++
- file0 | 3 +++
- file2 | 3 ---
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file2 | 3 ---
3 files changed, 5 insertions(+), 3 deletions(-)
delete mode 100644 file2
Subject: [PATCH] Third
---
- dir/sub | 2 ++
- file1 | 3 +++
+ dir/sub | 2 ++
+ file1 | 3 +++
2 files changed, 5 insertions(+)
create mode 100644 file1
Subject: [PATCH] Side
---
- dir/sub | 2 ++
- file0 | 3 +++
- file3 | 4 ++++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file3 | 4 ++++
3 files changed, 9 insertions(+)
create mode 100644 file3
This is the second commit.
---
- dir/sub | 2 ++
- file0 | 3 +++
- file2 | 3 ---
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file2 | 3 ---
3 files changed, 5 insertions(+), 3 deletions(-)
delete mode 100644 file2
Subject: [PATCH 2/3] Third
---
- dir/sub | 2 ++
- file1 | 3 +++
+ dir/sub | 2 ++
+ file1 | 3 +++
2 files changed, 5 insertions(+)
create mode 100644 file1
Subject: [PATCH 3/3] Side
---
- dir/sub | 2 ++
- file0 | 3 +++
- file3 | 4 ++++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file3 | 4 ++++
3 files changed, 9 insertions(+)
create mode 100644 file3
This is the second commit.
---
- dir/sub | 2 ++
- file0 | 3 +++
- file2 | 3 ---
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file2 | 3 ---
3 files changed, 5 insertions(+), 3 deletions(-)
delete mode 100644 file2
Subject: [PATCH 2/3] Third
---
- dir/sub | 2 ++
- file1 | 3 +++
+ dir/sub | 2 ++
+ file1 | 3 +++
2 files changed, 5 insertions(+)
create mode 100644 file1
Subject: [PATCH 3/3] Side
---
- dir/sub | 2 ++
- file0 | 3 +++
- file3 | 4 ++++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file3 | 4 ++++
3 files changed, 9 insertions(+)
create mode 100644 file3
This is the second commit.
---
- dir/sub | 2 ++
- file0 | 3 +++
- file2 | 3 ---
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file2 | 3 ---
3 files changed, 5 insertions(+), 3 deletions(-)
delete mode 100644 file2
Subject: [PATCH 2/2] Third
---
- dir/sub | 2 ++
- file1 | 3 +++
+ dir/sub | 2 ++
+ file1 | 3 +++
2 files changed, 5 insertions(+)
create mode 100644 file1
Subject: [PATCH] Side
---
- dir/sub | 2 ++
- file0 | 3 +++
- file3 | 4 ++++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file3 | 4 ++++
3 files changed, 9 insertions(+)
create mode 100644 file3
Side
---
- dir/sub | 2 ++
+ dir/sub | 2 ++
1 file changed, 2 insertions(+)
diff --git a/dir/sub b/dir/sub
Third
---
- dir/sub | 2 ++
+ dir/sub | 2 ++
1 file changed, 2 insertions(+)
diff --git a/dir/sub b/dir/sub
This is the second commit.
---
- dir/sub | 2 ++
+ dir/sub | 2 ++
1 file changed, 2 insertions(+)
diff --git a/dir/sub b/dir/sub
Side
---
- dir/sub | 2 ++
- file0 | 3 +++
- file3 | 4 ++++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file3 | 4 ++++
3 files changed, 9 insertions(+)
diff --git a/dir/sub b/dir/sub
Third
---
- dir/sub | 2 ++
- file1 | 3 +++
+ dir/sub | 2 ++
+ file1 | 3 +++
2 files changed, 5 insertions(+)
diff --git a/dir/sub b/dir/sub
This is the second commit.
---
- dir/sub | 2 ++
- file0 | 3 +++
- file2 | 3 ---
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file2 | 3 ---
3 files changed, 5 insertions(+), 3 deletions(-)
diff --git a/dir/sub b/dir/sub
Side
---
- dir/sub | 2 ++
+ dir/sub | 2 ++
1 file changed, 2 insertions(+)
diff --git a/dir/sub b/dir/sub
Third
---
- dir/sub | 2 ++
+ dir/sub | 2 ++
1 file changed, 2 insertions(+)
diff --git a/dir/sub b/dir/sub
This is the second commit.
---
- dir/sub | 2 ++
+ dir/sub | 2 ++
1 file changed, 2 insertions(+)
diff --git a/dir/sub b/dir/sub
Merge branch 'side'
- dir/sub | 2 ++
- file0 | 3 +++
+ dir/sub | 2 ++
+ file0 | 3 +++
2 files changed, 5 insertions(+)
diff --cc dir/sub
Side
---
- dir/sub | 2 ++
- file0 | 3 +++
- file3 | 4 ++++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file3 | 4 ++++
3 files changed, 9 insertions(+)
create mode 100644 file3
Third
---
- dir/sub | 2 ++
- file1 | 3 +++
+ dir/sub | 2 ++
+ file1 | 3 +++
2 files changed, 5 insertions(+)
create mode 100644 file1
This is the second commit.
---
- dir/sub | 2 ++
- file0 | 3 +++
- file2 | 3 ---
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file2 | 3 ---
3 files changed, 5 insertions(+), 3 deletions(-)
delete mode 100644 file2
Initial
---
- dir/sub | 2 ++
- file0 | 3 +++
- file2 | 3 +++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file2 | 3 +++
3 files changed, 8 insertions(+)
create mode 100644 dir/sub
create mode 100644 file0
Side
---
- dir/sub | 2 ++
- file0 | 3 +++
- file3 | 4 ++++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file3 | 4 ++++
3 files changed, 9 insertions(+)
create mode 100644 file3
Third
---
- dir/sub | 2 ++
- file1 | 3 +++
+ dir/sub | 2 ++
+ file1 | 3 +++
2 files changed, 5 insertions(+)
create mode 100644 file1
This is the second commit.
---
- dir/sub | 2 ++
- file0 | 3 +++
- file2 | 3 ---
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file2 | 3 ---
3 files changed, 5 insertions(+), 3 deletions(-)
delete mode 100644 file2
Initial
---
- dir/sub | 2 ++
- file0 | 3 +++
- file2 | 3 +++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file2 | 3 +++
3 files changed, 8 insertions(+)
create mode 100644 dir/sub
create mode 100644 file0
Side
---
- dir/sub | 2 ++
- file0 | 3 +++
- file3 | 4 ++++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file3 | 4 ++++
3 files changed, 9 insertions(+)
diff --git a/dir/sub b/dir/sub
Third
---
- dir/sub | 2 ++
- file1 | 3 +++
+ dir/sub | 2 ++
+ file1 | 3 +++
2 files changed, 5 insertions(+)
diff --git a/dir/sub b/dir/sub
This is the second commit.
---
- dir/sub | 2 ++
- file0 | 3 +++
- file2 | 3 ---
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file2 | 3 ---
3 files changed, 5 insertions(+), 3 deletions(-)
diff --git a/dir/sub b/dir/sub
Initial
---
- dir/sub | 2 ++
- file0 | 3 +++
- file2 | 3 +++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file2 | 3 +++
3 files changed, 8 insertions(+)
diff --git a/dir/sub b/dir/sub
Merge branch 'side'
- dir/sub | 2 ++
- file0 | 3 +++
+ dir/sub | 2 ++
+ file0 | 3 +++
2 files changed, 5 insertions(+)
diff --combined dir/sub
Side
---
- dir/sub | 2 ++
- file0 | 3 +++
- file3 | 4 ++++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file3 | 4 ++++
3 files changed, 9 insertions(+)
create mode 100644 file3
Third
---
- dir/sub | 2 ++
- file1 | 3 +++
+ dir/sub | 2 ++
+ file1 | 3 +++
2 files changed, 5 insertions(+)
create mode 100644 file1
This is the second commit.
---
- dir/sub | 2 ++
- file0 | 3 +++
- file2 | 3 ---
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file2 | 3 ---
3 files changed, 5 insertions(+), 3 deletions(-)
delete mode 100644 file2
Initial
---
- dir/sub | 2 ++
- file0 | 3 +++
- file2 | 3 +++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file2 | 3 +++
3 files changed, 8 insertions(+)
create mode 100644 dir/sub
create mode 100644 file0
Side
---
- dir/sub | 2 ++
- file0 | 3 +++
- file3 | 4 ++++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file3 | 4 ++++
3 files changed, 9 insertions(+)
create mode 100644 file3
Side
---
- dir/sub | 2 ++
- file0 | 3 +++
- file3 | 4 ++++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file3 | 4 ++++
3 files changed, 9 insertions(+)
diff --git a/dir/sub b/dir/sub
Side
- dir/sub | 2 ++
- file0 | 3 +++
- file3 | 4 ++++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file3 | 4 ++++
3 files changed, 9 insertions(+)
create mode 100644 file3
$
Side
- dir/sub | 2 ++
- file0 | 3 +++
- file3 | 4 ++++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file3 | 4 ++++
3 files changed, 9 insertions(+)
$
Side
---
- dir/sub | 2 ++
+ dir/sub | 2 ++
1 file changed, 2 insertions(+)
diff --git a/dir/sub b/dir/sub
Third
---
- dir/sub | 2 ++
+ dir/sub | 2 ++
1 file changed, 2 insertions(+)
diff --git a/dir/sub b/dir/sub
This is the second commit.
---
- dir/sub | 2 ++
+ dir/sub | 2 ++
1 file changed, 2 insertions(+)
diff --git a/dir/sub b/dir/sub
Side
---
- dir/sub | 2 ++
- file0 | 3 +++
- file3 | 4 ++++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file3 | 4 ++++
3 files changed, 9 insertions(+)
diff --git a/dir/sub b/dir/sub
Third
---
- dir/sub | 2 ++
- file1 | 3 +++
+ dir/sub | 2 ++
+ file1 | 3 +++
2 files changed, 5 insertions(+)
diff --git a/dir/sub b/dir/sub
This is the second commit.
---
- dir/sub | 2 ++
- file0 | 3 +++
- file2 | 3 ---
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file2 | 3 ---
3 files changed, 5 insertions(+), 3 deletions(-)
diff --git a/dir/sub b/dir/sub
Side
---
- dir/sub | 2 ++
+ dir/sub | 2 ++
1 file changed, 2 insertions(+)
diff --git a/dir/sub b/dir/sub
Third
---
- dir/sub | 2 ++
+ dir/sub | 2 ++
1 file changed, 2 insertions(+)
diff --git a/dir/sub b/dir/sub
This is the second commit.
---
- dir/sub | 2 ++
+ dir/sub | 2 ++
1 file changed, 2 insertions(+)
diff --git a/dir/sub b/dir/sub
Merge branch 'side'
- dir/sub | 2 ++
- file0 | 3 +++
+ dir/sub | 2 ++
+ file0 | 3 +++
2 files changed, 5 insertions(+)
diff --cc dir/sub
Side
---
- dir/sub | 2 ++
- file0 | 3 +++
- file3 | 4 ++++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file3 | 4 ++++
3 files changed, 9 insertions(+)
create mode 100644 file3
Third
---
- dir/sub | 2 ++
- file1 | 3 +++
+ dir/sub | 2 ++
+ file1 | 3 +++
2 files changed, 5 insertions(+)
create mode 100644 file1
This is the second commit.
---
- dir/sub | 2 ++
- file0 | 3 +++
- file2 | 3 ---
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file2 | 3 ---
3 files changed, 5 insertions(+), 3 deletions(-)
delete mode 100644 file2
Initial
---
- dir/sub | 2 ++
- file0 | 3 +++
- file2 | 3 +++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file2 | 3 +++
3 files changed, 8 insertions(+)
create mode 100644 dir/sub
create mode 100644 file0
Side
---
- dir/sub | 2 ++
- file0 | 3 +++
- file3 | 4 ++++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file3 | 4 ++++
3 files changed, 9 insertions(+)
create mode 100644 file3
Third
---
- dir/sub | 2 ++
- file1 | 3 +++
+ dir/sub | 2 ++
+ file1 | 3 +++
2 files changed, 5 insertions(+)
create mode 100644 file1
This is the second commit.
---
- dir/sub | 2 ++
- file0 | 3 +++
- file2 | 3 ---
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file2 | 3 ---
3 files changed, 5 insertions(+), 3 deletions(-)
delete mode 100644 file2
Initial
---
- dir/sub | 2 ++
- file0 | 3 +++
- file2 | 3 +++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file2 | 3 +++
3 files changed, 8 insertions(+)
create mode 100644 dir/sub
create mode 100644 file0
Side
---
- dir/sub | 2 ++
- file0 | 3 +++
- file3 | 4 ++++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file3 | 4 ++++
3 files changed, 9 insertions(+)
diff --git a/dir/sub b/dir/sub
Third
---
- dir/sub | 2 ++
- file1 | 3 +++
+ dir/sub | 2 ++
+ file1 | 3 +++
2 files changed, 5 insertions(+)
diff --git a/dir/sub b/dir/sub
This is the second commit.
---
- dir/sub | 2 ++
- file0 | 3 +++
- file2 | 3 ---
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file2 | 3 ---
3 files changed, 5 insertions(+), 3 deletions(-)
diff --git a/dir/sub b/dir/sub
Initial
---
- dir/sub | 2 ++
- file0 | 3 +++
- file2 | 3 +++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file2 | 3 +++
3 files changed, 8 insertions(+)
diff --git a/dir/sub b/dir/sub
Merge branch 'side'
- dir/sub | 2 ++
- file0 | 3 +++
+ dir/sub | 2 ++
+ file0 | 3 +++
2 files changed, 5 insertions(+)
diff --combined dir/sub
Side
---
- dir/sub | 2 ++
- file0 | 3 +++
- file3 | 4 ++++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file3 | 4 ++++
3 files changed, 9 insertions(+)
create mode 100644 file3
Third
---
- dir/sub | 2 ++
- file1 | 3 +++
+ dir/sub | 2 ++
+ file1 | 3 +++
2 files changed, 5 insertions(+)
create mode 100644 file1
This is the second commit.
---
- dir/sub | 2 ++
- file0 | 3 +++
- file2 | 3 ---
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file2 | 3 ---
3 files changed, 5 insertions(+), 3 deletions(-)
delete mode 100644 file2
Initial
---
- dir/sub | 2 ++
- file0 | 3 +++
- file2 | 3 +++
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file2 | 3 +++
3 files changed, 8 insertions(+)
create mode 100644 dir/sub
create mode 100644 file0
'
cat > expect << EOF
----
- file | 16 ++++++++++++++++
- 1 file changed, 16 insertions(+)
-
-diff --git a/file b/file
index 40f36c6..2dc5c23 100644
--- a/file
+++ b/file
test_expect_success 'format-patch respects -U' '
git format-patch -U4 -2 &&
- sed -e "1,/^\$/d" -e "/^+5/q" < 0001-This-is-an-excessively-long-subject-line-for-a-messa.patch > output &&
+ sed -e "1,/^diff/d" -e "/^+5/q" \
+ <0001-This-is-an-excessively-long-subject-line-for-a-messa.patch \
+ >output &&
test_cmp expect output
'
test_cmp expect actual
'
-test_expect_success TABS_IN_FILENAMES 'setup expected files' '
-cat >expect <<\EOF
- pathname.1 => "Rpathname\twith HT.0" | 0
- pathname.3 => "Rpathname\nwith LF.0" | 0
- "pathname\twith HT.3" => "Rpathname\nwith LF.1" | 0
- pathname.2 => Rpathname with SP.0 | 0
- "pathname\twith HT.2" => Rpathname with SP.1 | 0
- pathname.0 => Rpathname.0 | 0
- "pathname\twith HT.0" => Rpathname.1 | 0
- 7 files changed, 0 insertions(+), 0 deletions(-)
-EOF
+test_expect_success TABS_IN_FILENAMES 'git diff --numstat -M HEAD' '
+ cat >expect <<-\EOF &&
+ 0 0 pathname.1 => "Rpathname\twith HT.0"
+ 0 0 pathname.3 => "Rpathname\nwith LF.0"
+ 0 0 "pathname\twith HT.3" => "Rpathname\nwith LF.1"
+ 0 0 pathname.2 => Rpathname with SP.0
+ 0 0 "pathname\twith HT.2" => Rpathname with SP.1
+ 0 0 pathname.0 => Rpathname.0
+ 0 0 "pathname\twith HT.0" => Rpathname.1
+ EOF
+ git diff --numstat -M HEAD >actual &&
+ test_cmp expect actual
'
test_expect_success TABS_IN_FILENAMES 'git diff --stat -M HEAD' '
+ cat >expect <<-\EOF &&
+ pathname.1 => "Rpathname\twith HT.0" | 0
+ pathname.3 => "Rpathname\nwith LF.0" | 0
+ "pathname\twith HT.3" => "Rpathname\nwith LF.1" | 0
+ pathname.2 => Rpathname with SP.0 | 0
+ "pathname\twith HT.2" => Rpathname with SP.1 | 0
+ pathname.0 => Rpathname.0 | 0
+ "pathname\twith HT.0" => Rpathname.1 | 0
+ 7 files changed, 0 insertions(+), 0 deletions(-)
+ EOF
git diff --stat -M HEAD >actual &&
- test_cmp expect actual
+ test_i18ncmp expect actual
'
test_done
'
cat >expect.stat <<'EOF'
- file | Bin 2 -> 4 bytes
+ file | Bin 2 -> 4 bytes
1 file changed, 0 insertions(+), 0 deletions(-)
EOF
test_expect_success 'diffstat does not run textconv' '
echo file diff=fail >.gitattributes &&
git diff --stat HEAD^ HEAD >actual &&
- test_cmp expect.stat actual
+ test_i18ncmp expect.stat actual &&
+
+ head -n1 <expect.stat >expect.line1 &&
+ head -n1 <actual >actual.line1 &&
+ test_cmp expect.line1 actual.line1
'
# restore working setup
echo file diff=foo >.gitattributes
grep "GIT binary patch" diff
'
-test_expect_success 'rewrite diff --stat shows binary changes' '
+test_expect_success 'rewrite diff --numstat shows binary changes' '
+ git diff -B --numstat --summary >diff &&
+ grep -e "- - " diff &&
+ grep " rewrite file" diff
+'
+
+test_expect_success 'diff --stat counts binary rewrite as 0 lines' '
git diff -B --stat --summary >diff &&
grep "Bin" diff &&
- grep "0 insertions.*0 deletions" diff &&
+ test_i18ngrep "0 insertions.*0 deletions" diff &&
grep " rewrite file" diff
'
test_expect_success 'git diff-tree HEAD^ HEAD' '
git diff-tree --quiet HEAD^ HEAD >cnt
- test $? = 1 && test $(wc -l <cnt) = 0
+ test $? = 1 && test_line_count = 0 cnt
'
test_expect_success 'git diff-tree HEAD^ HEAD -- a' '
git diff-tree --quiet HEAD^ HEAD -- a >cnt
- test $? = 0 && test $(wc -l <cnt) = 0
+ test $? = 0 && test_line_count = 0 cnt
'
test_expect_success 'git diff-tree HEAD^ HEAD -- b' '
git diff-tree --quiet HEAD^ HEAD -- b >cnt
- test $? = 1 && test $(wc -l <cnt) = 0
+ test $? = 1 && test_line_count = 0 cnt
'
# this diff outputs one line: sha1 of the given head
test_expect_success 'echo HEAD | git diff-tree --stdin' '
echo $(git rev-parse HEAD) | git diff-tree --quiet --stdin >cnt
- test $? = 1 && test $(wc -l <cnt) = 1
+ test $? = 1 && test_line_count = 1 cnt
'
test_expect_success 'git diff-tree HEAD HEAD' '
git diff-tree --quiet HEAD HEAD >cnt
- test $? = 0 && test $(wc -l <cnt) = 0
+ test $? = 0 && test_line_count = 0 cnt
'
test_expect_success 'git diff-files' '
git diff-files --quiet >cnt
- test $? = 0 && test $(wc -l <cnt) = 0
+ test $? = 0 && test_line_count = 0 cnt
'
test_expect_success 'git diff-index --cached HEAD' '
git diff-index --quiet --cached HEAD >cnt
- test $? = 0 && test $(wc -l <cnt) = 0
+ test $? = 0 && test_line_count = 0 cnt
'
test_expect_success 'git diff-index --cached HEAD^' '
git diff-index --quiet --cached HEAD^ >cnt
- test $? = 1 && test $(wc -l <cnt) = 0
+ test $? = 1 && test_line_count = 0 cnt
'
test_expect_success 'git diff-index --cached HEAD^' '
echo text >>b &&
echo 3 >c &&
git add . && {
git diff-index --quiet --cached HEAD^ >cnt
- test $? = 1 && test $(wc -l <cnt) = 0
+ test $? = 1 && test_line_count = 0 cnt
}
'
test_expect_success 'git diff-tree -Stext HEAD^ HEAD -- b' '
git commit -m "text in b" && {
git diff-tree --quiet -Stext HEAD^ HEAD -- b >cnt
- test $? = 1 && test $(wc -l <cnt) = 0
+ test $? = 1 && test_line_count = 0 cnt
}
'
test_expect_success 'git diff-tree -Snot-found HEAD^ HEAD -- b' '
git diff-tree --quiet -Snot-found HEAD^ HEAD -- b >cnt
- test $? = 0 && test $(wc -l <cnt) = 0
+ test $? = 0 && test_line_count = 0 cnt
'
test_expect_success 'git diff-files' '
echo 3 >>c && {
git diff-files --quiet >cnt
- test $? = 1 && test $(wc -l <cnt) = 0
+ test $? = 1 && test_line_count = 0 cnt
}
'
test_expect_success 'git diff-index --cached HEAD' '
git update-index c && {
git diff-index --quiet --cached HEAD >cnt
- test $? = 1 && test $(wc -l <cnt) = 0
+ test $? = 1 && test_line_count = 0 cnt
}
'
'
cat > expected <<\EOF
- bar => sub/bar | Bin 5 -> 5 bytes
- foo => sub/foo | 0
- 2 files changed, 0 insertions(+), 0 deletions(-)
+- - bar => sub/bar
+0 0 foo => sub/foo
diff --git a/bar b/sub/bar
similarity index 100%
EOF
test_expect_success 'git show -C -C report renames' '
- git show -C -C --raw --binary --stat | tail -n 12 > current &&
+ git show -C -C --raw --binary --numstat >patch-with-stat &&
+ tail -n 11 patch-with-stat >current &&
test_cmp expected current
'
"
}
+check_numstat() {
+expect=$1; shift
+cat >expected <<EOF
+1 0 $expect
+EOF
+test_expect_success "--numstat $*" "
+ echo '1 0 $expect' >expected &&
+ git diff --numstat $* HEAD^ >actual &&
+ test_cmp expected actual
+"
+}
+
check_stat() {
expect=$1; shift
cat >expected <<EOF
- $expect | 1 +
+ $expect | 1 +
1 file changed, 1 insertion(+)
EOF
test_expect_success "--stat $*" "
git diff --stat $* HEAD^ >actual &&
- test_cmp expected actual
+ test_i18ncmp expected actual
"
}
"
}
-for type in diff stat raw; do
+for type in diff numstat stat raw; do
check_$type file2 --relative=subdir/
check_$type file2 --relative=subdir
check_$type dir/file2 --relative=sub
'
cat <<EOF >expect_diff_stat
- changed/text | 2 +-
- dst/copy/changed/text | 10 ++++++++++
- dst/copy/rearranged/text | 10 ++++++++++
- dst/copy/unchanged/text | 10 ++++++++++
- dst/move/changed/text | 10 ++++++++++
- dst/move/rearranged/text | 10 ++++++++++
- dst/move/unchanged/text | 10 ++++++++++
- rearranged/text | 2 +-
- src/move/changed/text | 10 ----------
- src/move/rearranged/text | 10 ----------
- src/move/unchanged/text | 10 ----------
- 11 files changed, 62 insertions(+), 32 deletions(-)
+1 1 changed/text
+10 0 dst/copy/changed/text
+10 0 dst/copy/rearranged/text
+10 0 dst/copy/unchanged/text
+10 0 dst/move/changed/text
+10 0 dst/move/rearranged/text
+10 0 dst/move/unchanged/text
+1 1 rearranged/text
+0 10 src/move/changed/text
+0 10 src/move/rearranged/text
+0 10 src/move/unchanged/text
EOF
cat <<EOF >expect_diff_stat_M
- changed/text | 2 +-
- dst/copy/changed/text | 10 ++++++++++
- dst/copy/rearranged/text | 10 ++++++++++
- dst/copy/unchanged/text | 10 ++++++++++
- {src => dst}/move/changed/text | 2 +-
- {src => dst}/move/rearranged/text | 2 +-
- {src => dst}/move/unchanged/text | 0
- rearranged/text | 2 +-
- 8 files changed, 34 insertions(+), 4 deletions(-)
+1 1 changed/text
+10 0 dst/copy/changed/text
+10 0 dst/copy/rearranged/text
+10 0 dst/copy/unchanged/text
+1 1 {src => dst}/move/changed/text
+1 1 {src => dst}/move/rearranged/text
+0 0 {src => dst}/move/unchanged/text
+1 1 rearranged/text
EOF
cat <<EOF >expect_diff_stat_CC
- changed/text | 2 +-
- {src => dst}/copy/changed/text | 2 +-
- {src => dst}/copy/rearranged/text | 2 +-
- {src => dst}/copy/unchanged/text | 0
- {src => dst}/move/changed/text | 2 +-
- {src => dst}/move/rearranged/text | 2 +-
- {src => dst}/move/unchanged/text | 0
- rearranged/text | 2 +-
- 8 files changed, 6 insertions(+), 6 deletions(-)
-EOF
-
-test_expect_success 'sanity check setup (--stat)' '
- git diff --stat HEAD^..HEAD >actual_diff_stat &&
+1 1 changed/text
+1 1 {src => dst}/copy/changed/text
+1 1 {src => dst}/copy/rearranged/text
+0 0 {src => dst}/copy/unchanged/text
+1 1 {src => dst}/move/changed/text
+1 1 {src => dst}/move/rearranged/text
+0 0 {src => dst}/move/unchanged/text
+1 1 rearranged/text
+EOF
+
+test_expect_success 'sanity check setup (--numstat)' '
+ git diff --numstat HEAD^..HEAD >actual_diff_stat &&
test_cmp expect_diff_stat actual_diff_stat &&
- git diff --stat -M HEAD^..HEAD >actual_diff_stat_M &&
+ git diff --numstat -M HEAD^..HEAD >actual_diff_stat_M &&
test_cmp expect_diff_stat_M actual_diff_stat_M &&
- git diff --stat -C -C HEAD^..HEAD >actual_diff_stat_CC &&
+ git diff --numstat -C -C HEAD^..HEAD >actual_diff_stat_CC &&
test_cmp expect_diff_stat_CC actual_diff_stat_CC
'
echo a >a &&
echo b >b &&
cat >expect <<-\EOF
- a | 1 +
- b | 1 +
+ a | 1 +
+ b | 1 +
2 files changed, 2 insertions(+)
EOF
git diff --stat --stat-count=2 >actual &&
- test_cmp expect actual
+ test_i18ncmp expect actual
'
test_done
while read cmd args
do
cat >expect <<-'EOF'
- ...aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa | 1 +
+ ...aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa | 1 +
EOF
test_expect_success "$cmd: small change with long name gives more space to the name" '
git $cmd $args >output &&
'
cat >expect <<-'EOF'
- ...aaaaaaaaaaaaaaaaaaaaaaaaaa | 1 +
+ ...aaaaaaaaaaaaaaaaaaaaaaaaaaaaa | 1 +
EOF
test_expect_success "$cmd --stat=width: a long name is given more room when the bar is short" '
git $cmd $args --stat=40 >output &&
'
cat >expect <<-'EOF'
- ...aaaaaaaaaaaaaaaaaaaaaaaaaaa | 1 +
+ ...aaaaaaaaaaaaaaaaaaaaaaaaaaa | 1 +
EOF
test_expect_success "$cmd --stat=...,name-width with long name" '
git $cmd $args --stat=60,30 >output &&
cat >expect80 <<'EOF'
abcd | 1000 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
EOF
-
+cat >expect80-graph <<'EOF'
+| abcd | 1000 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+EOF
cat >expect200 <<'EOF'
abcd | 1000 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
EOF
-
+cat >expect200-graph <<'EOF'
+| abcd | 1000 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+EOF
while read verb expect cmd args
do
test_expect_success "$cmd $verb COLUMNS (big change)" '
grep " | " output >actual &&
test_cmp "$expect" actual
'
+
+ test "$cmd" != diff || continue
+
+ test_expect_success "$cmd --graph $verb COLUMNS (big change)" '
+ COLUMNS=200 git $cmd $args --graph >output
+ grep " | " output >actual &&
+ test_cmp "$expect-graph" actual
+ '
done <<\EOF
ignores expect80 format-patch -1 --stdout
respects expect200 diff HEAD^ HEAD --stat
cat >expect40 <<'EOF'
abcd | 1000 ++++++++++++++++++++++++++
EOF
-
+cat >expect40-graph <<'EOF'
+| abcd | 1000 ++++++++++++++++++++++++
+EOF
while read verb expect cmd args
do
test_expect_success "$cmd $verb not enough COLUMNS (big change)" '
test_cmp "$expect" actual
'
+ test "$cmd" != diff || continue
+
+ test_expect_success "$cmd --graph $verb not enough COLUMNS (big change)" '
+ COLUMNS=40 git $cmd $args --graph >output
+ grep " | " output >actual &&
+ test_cmp "$expect-graph" actual
+ '
+done <<\EOF
+ignores expect80 format-patch -1 --stdout
+respects expect40 diff HEAD^ HEAD --stat
+respects expect40 show --stat
+respects expect40 log -1 --stat
+EOF
+
+cat >expect40 <<'EOF'
+ abcd | 1000 ++++++++++++++++++++++++++
+EOF
+cat >expect40-graph <<'EOF'
+| abcd | 1000 ++++++++++++++++++++++++++
+EOF
+while read verb expect cmd args
+do
test_expect_success "$cmd $verb statGraphWidth config" '
git -c diff.statGraphWidth=26 $cmd $args >output
grep " | " output >actual &&
test_cmp "$expect" actual
'
+
+ test "$cmd" != diff || continue
+
+ test_expect_success "$cmd --graph $verb statGraphWidth config" '
+ git -c diff.statGraphWidth=26 $cmd $args --graph >output
+ grep " | " output >actual &&
+ test_cmp "$expect-graph" actual
+ '
done <<\EOF
ignores expect80 format-patch -1 --stdout
respects expect40 diff HEAD^ HEAD --stat
cat >expect <<'EOF'
abcd | 1000 ++++++++++++++++++++++++++
EOF
+cat >expect-graph <<'EOF'
+| abcd | 1000 ++++++++++++++++++++++++++
+EOF
while read cmd args
do
test_expect_success "$cmd --stat=width with big change" '
test_cmp expect actual
'
- test_expect_success "$cmd --stat-graph--width with big change" '
+ test_expect_success "$cmd --stat-graph-width with big change" '
git $cmd $args --stat-graph-width=26 >output
grep " | " output >actual &&
test_cmp expect actual
'
+
+ test "$cmd" != diff || continue
+
+ test_expect_success "$cmd --stat-width=width --graph with big change" '
+ git $cmd $args --stat-width=40 --graph >output
+ grep " | " output >actual &&
+ test_cmp expect-graph actual
+ '
+
+ test_expect_success "$cmd --stat-graph-width --graph with big change" '
+ git $cmd $args --stat-graph-width=26 --graph >output
+ grep " | " output >actual &&
+ test_cmp expect-graph actual
+ '
done <<\EOF
format-patch -1 --stdout
diff HEAD^ HEAD --stat
cat >expect <<'EOF'
...aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa | 1000 ++++++++++++
EOF
+cat >expect-graph <<'EOF'
+| ...aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa | 1000 ++++++++++++
+EOF
while read cmd args
do
test_expect_success "$cmd --stat=width with big change is more balanced" '
grep " | " output >actual &&
test_cmp expect actual
'
+
+ test "$cmd" != diff || continue
+
+ test_expect_success "$cmd --stat=width --graph with big change is balanced" '
+ git $cmd $args --stat-width=60 --graph >output &&
+ grep " | " output >actual &&
+ test_cmp expect-graph actual
+ '
done <<\EOF
format-patch -1 --stdout
diff HEAD^ HEAD --stat
cat >expect80 <<'EOF'
...aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa | 1000 ++++++++++++++++++++
EOF
+cat >expect80-graph <<'EOF'
+| ...aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa | 1000 ++++++++++++++++++++
+EOF
cat >expect200 <<'EOF'
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa | 1000 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
EOF
+cat >expect200-graph <<'EOF'
+| aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa | 1000 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+EOF
while read verb expect cmd args
do
test_expect_success "$cmd $verb COLUMNS (long filename)" '
grep " | " output >actual &&
test_cmp "$expect" actual
'
+
+ test "$cmd" != diff || continue
+
+ test_expect_success "$cmd --graph $verb COLUMNS (long filename)" '
+ COLUMNS=200 git $cmd $args --graph >output
+ grep " | " output >actual &&
+ test_cmp "$expect-graph" actual
+ '
done <<\EOF
ignores expect80 format-patch -1 --stdout
respects expect200 diff HEAD^ HEAD --stat
respects expect200 log -1 --stat
EOF
+cat >expect1 <<'EOF'
+ ...aaaaaaa | 1000 ++++++
+EOF
+cat >expect1-graph <<'EOF'
+| ...aaaaaaa | 1000 ++++++
+EOF
+while read verb expect cmd args
+do
+ test_expect_success COLUMNS_CAN_BE_1 \
+ "$cmd $verb prefix greater than COLUMNS (big change)" '
+ COLUMNS=1 git $cmd $args >output
+ grep " | " output >actual &&
+ test_cmp "$expect" actual
+ '
+
+ test "$cmd" != diff || continue
+
+ test_expect_success COLUMNS_CAN_BE_1 \
+ "$cmd --graph $verb prefix greater than COLUMNS (big change)" '
+ COLUMNS=1 git $cmd $args --graph >output
+ grep " | " output >actual &&
+ test_cmp "$expect-graph" actual
+ '
+done <<\EOF
+ignores expect80 format-patch -1 --stdout
+respects expect1 diff HEAD^ HEAD --stat
+respects expect1 show --stat
+respects expect1 log -1 --stat
+EOF
+
cat >expect <<'EOF'
abcd | 1000 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
EOF
test_expect_success "$title" '
git apply --stat --summary \
<"$TEST_DIRECTORY/t4100/t-apply-$num.patch" >current &&
- test_cmp "$TEST_DIRECTORY"/t4100/t-apply-$num.expect current
+ test_i18ncmp "$TEST_DIRECTORY"/t4100/t-apply-$num.expect current
'
test_expect_success "$title with recount" '
sed -e "$UNC" <"$TEST_DIRECTORY/t4100/t-apply-$num.patch" |
git apply --recount --stat --summary >current &&
- test_cmp "$TEST_DIRECTORY"/t4100/t-apply-$num.expect current
+ test_i18ncmp "$TEST_DIRECTORY"/t4100/t-apply-$num.expect current
'
done <<\EOF
rename
git reset --hard &&
touch empty-file &&
test_tick &&
- { git am empty-file > actual 2>&1 && false || :; } &&
+ test_must_fail git am empty-file 2>actual &&
echo Patch format detection failed. >expected &&
- test_cmp expected actual
+ test_i18ncmp expected actual
'
test_done
| |
| | reach
| | ---
-| | reach.t | 1 +
+| | reach.t | 1 +
| | 1 file changed, 1 insertion(+)
| |
| | diff --git a/reach.t b/reach.t
| | |
| | | octopus-b
| | | ---
-| | | octopus-b.t | 1 +
+| | | octopus-b.t | 1 +
| | | 1 file changed, 1 insertion(+)
| | |
| | | diff --git a/octopus-b.t b/octopus-b.t
| |
| | octopus-a
| | ---
-| | octopus-a.t | 1 +
+| | octopus-a.t | 1 +
| | 1 file changed, 1 insertion(+)
| |
| | diff --git a/octopus-a.t b/octopus-a.t
|
| seventh
| ---
-| seventh.t | 1 +
+| seventh.t | 1 +
| 1 file changed, 1 insertion(+)
|
| diff --git a/seventh.t b/seventh.t
| | | |
| | | | tangle-a
| | | | ---
-| | | | tangle-a | 1 +
+| | | | tangle-a | 1 +
| | | | 1 file changed, 1 insertion(+)
| | | |
| | | | diff --git a/tangle-a b/tangle-a
| |/| |
| | | | side-2
| | | | ---
-| | | | 2 | 1 +
+| | | | 2 | 1 +
| | | | 1 file changed, 1 insertion(+)
| | | |
| | | | diff --git a/2 b/2
| | | |
| | | | side-1
| | | | ---
-| | | | 1 | 1 +
+| | | | 1 | 1 +
| | | | 1 file changed, 1 insertion(+)
| | | |
| | | | diff --git a/1 b/1
| | | |
| | | | Second
| | | | ---
-| | | | one | 1 +
+| | | | one | 1 +
| | | | 1 file changed, 1 insertion(+)
| | | |
| | | | diff --git a/one b/one
|/| |
| | | sixth
| | | ---
-| | | a/two | 1 -
+| | | a/two | 1 -
| | | 1 file changed, 1 deletion(-)
| | |
| | | diff --git a/a/two b/a/two
| | |
| | | fifth
| | | ---
-| | | a/two | 1 +
+| | | a/two | 1 +
| | | 1 file changed, 1 insertion(+)
| | |
| | | diff --git a/a/two b/a/two
| |
| | fourth
| | ---
-| | ein | 1 +
+| | ein | 1 +
| | 1 file changed, 1 insertion(+)
| |
| | diff --git a/ein b/ein
|
| third
| ---
-| ichi | 1 +
-| one | 1 -
+| ichi | 1 +
+| one | 1 -
| 2 files changed, 1 insertion(+), 1 deletion(-)
|
| diff --git a/ichi b/ichi
|
| second
| ---
-| one | 2 +-
+| one | 2 +-
| 1 file changed, 1 insertion(+), 1 deletion(-)
|
| diff --git a/one b/one
initial
---
- one | 1 +
+ one | 1 +
1 file changed, 1 insertion(+)
diff --git a/one b/one
git mailsplit -d3 -o. "$TEST_DIRECTORY"/t5100/nul-plain &&
test_cmp "$TEST_DIRECTORY"/t5100/nul-plain 001 &&
(cat 001 | git mailinfo msg patch) &&
- test 4 = $(wc -l < patch)
+ test_line_count = 4 patch
'
---
- foo | 2 +-
+ foo | 2 +-
1 files changed, 1 insertions(+), 1 deletions(-)
diff --git a/foo b/foo
---
- foo | 2 +-
+ foo | 2 +-
1 files changed, 1 insertions(+), 1 deletions(-)
diff --git a/foo b/foo
---
- foo | 2 +-
+ foo | 2 +-
1 files changed, 1 insertions(+), 1 deletions(-)
diff --git a/foo b/foo
---
- Documentation/git-cvsimport-script.txt | 9 ++++++++-
- git-cvsimport-script | 4 ++--
+ Documentation/git-cvsimport-script.txt | 9 ++++++++-
+ git-cvsimport-script | 4 ++--
2 files changed, 10 insertions(+), 3 deletions(-)
50452f9c0c2df1f04d83a26266ba704b13861632
---
- foo | 2 +-
+ foo | 2 +-
1 files changed, 1 insertions(+), 1 deletions(-)
diff --git a/foo b/foo
---
- builtin-mailinfo.c | 2 +-
+ builtin-mailinfo.c | 2 +-
1 files changed, 1 insertions(+), 1 deletions(-)
diff --git a/builtin-mailinfo.c b/builtin-mailinfo.c
---
- builtin-mailinfo.c | 4 ++--
+ builtin-mailinfo.c | 4 ++--
diff --git a/builtin-mailinfo.c b/builtin-mailinfo.c
index 3e5fe51..aabfe5c 100644
---
- builtin-mailinfo.c | 37 ++++++++++++++++++++++++++++++++++++-
+ builtin-mailinfo.c | 37 ++++++++++++++++++++++++++++++++++++-
1 files changed, 36 insertions(+), 1 deletions(-)
diff --git a/builtin-mailinfo.c b/builtin-mailinfo.c
---
- builtin-mailinfo.c | 37 ++++++++++++++++++++++++++++++++++++-
+ builtin-mailinfo.c | 37 ++++++++++++++++++++++++++++++++++++-
1 files changed, 36 insertions(+), 1 deletions(-)
diff --git a/builtin-mailinfo.c b/builtin-mailinfo.c
Here is a patch from A U Thor.
---
- foo | 2 +-
+ foo | 2 +-
1 files changed, 1 insertions(+), 1 deletions(-)
diff --git a/foo b/foo
Hope this helps.
---
- foo | 2 +-
+ foo | 2 +-
1 files changed, 1 insertions(+), 1 deletions(-)
diff --git a/foo b/foo
Hopefully this would fix the problem stated there.
---
- foo | 2 +-
+ foo | 2 +-
1 files changed, 1 insertions(+), 1 deletions(-)
diff --git a/foo b/foo
Signed-off-by: David K=E5gedal <davidk@lysator.liu.se>
---
- Documentation/git-cvsimport-script.txt | 9 ++++++++-
- git-cvsimport-script | 4 ++--
+ Documentation/git-cvsimport-script.txt | 9 ++++++++-
+ git-cvsimport-script | 4 ++--
2 files changed, 10 insertions(+), 3 deletions(-)
50452f9c0c2df1f04d83a26266ba704b13861632
Here is a patch from A U Thor.
---
- foo | 2 +-
+ foo | 2 +-
1 files changed, 1 insertions(+), 1 deletions(-)
diff --git a/foo b/foo
Signed-off-by: Lukas Sandström <lukass@etek.chalmers.se>
Signed-off-by: Junio C Hamano <gitster@pobox.com>
---
- builtin-mailinfo.c | 2 +-
+ builtin-mailinfo.c | 2 +-
1 files changed, 1 insertions(+), 1 deletions(-)
diff --git a/builtin-mailinfo.c b/builtin-mailinfo.c
Here comes a commit log message, and
its second line is here.
---
- builtin-mailinfo.c | 4 ++--
+ builtin-mailinfo.c | 4 ++--
diff --git a/builtin-mailinfo.c b/builtin-mailinfo.c
index 3e5fe51..aabfe5c 100644
Signed-off-by: Junio C Hamano <gitster@pobox.com>
---
- builtin-mailinfo.c | 37 ++++++++++++++++++++++++++++++++++++-
+ builtin-mailinfo.c | 37 ++++++++++++++++++++++++++++++++++++-
1 files changed, 36 insertions(+), 1 deletions(-)
diff --git a/builtin-mailinfo.c b/builtin-mailinfo.c
git request-pull initial "$downstream_url" >../request
) &&
<request sed -nf fuzz.sed >request.fuzzy &&
- test_cmp expect request.fuzzy
+ test_i18ncmp expect request.fuzzy
'
git push parent2 HEAD
'
+# $1 = local revision
+# $2 = remote revision (tested to be equal to the local one)
+check_pushed_commit () {
+ git log -1 --format='%h %s' "$1" >expect &&
+ git --git-dir=repo1 log -1 --format='%h %s' "$2" >actual &&
+ test_cmp expect actual
+}
+
+# $1 = push.default value
+# $2 = expected target branch for the push
+test_push_success () {
+ git -c push.default="$1" push &&
+ check_pushed_commit HEAD "$2"
+}
+
+# $1 = push.default value
+# check that push fails and does not modify any remote branch
+test_push_failure () {
+ git --git-dir=repo1 log --no-walk --format='%h %s' --all >expect &&
+ test_must_fail git -c push.default="$1" push &&
+ git --git-dir=repo1 log --no-walk --format='%h %s' --all >actual &&
+ test_cmp expect actual
+}
+
test_expect_success '"upstream" pushes to configured upstream' '
git checkout master &&
test_config branch.master.remote parent1 &&
test_config branch.master.merge refs/heads/foo &&
- test_config push.default upstream &&
test_commit two &&
- git push &&
- echo two >expect &&
- git --git-dir=repo1 log -1 --format=%s foo >actual &&
- test_cmp expect actual
+ test_push_success upstream foo
'
test_expect_success '"upstream" does not push on unconfigured remote' '
test_unconfig branch.master.remote &&
test_config push.default upstream &&
test_commit three &&
- test_must_fail git push
+ test_push_failure upstream
'
test_expect_success '"upstream" does not push on unconfigured branch' '
test_unconfig branch.master.merge &&
test_config push.default upstream
test_commit four &&
- test_must_fail git push
+ test_push_failure upstream
'
test_expect_success '"upstream" does not push when remotes do not match' '
test_must_fail git push parent2
'
+test_expect_success 'push from/to new branch with upstream, matching and simple' '
+ git checkout -b new-branch &&
+ test_push_failure simple &&
+ test_push_failure matching &&
+ test_push_failure upstream
+'
+
+test_expect_success 'push from/to new branch with current creates remote branch' '
+ test_config branch.new-branch.remote repo1 &&
+ git checkout new-branch &&
+ test_push_success current new-branch
+'
+
+test_expect_success 'push to existing branch, with no upstream configured' '
+ test_config branch.master.remote repo1 &&
+ git checkout master &&
+ test_push_failure simple &&
+ test_push_failure upstream
+'
+
+test_expect_success 'push to existing branch, upstream configured with same name' '
+ test_config branch.master.remote repo1 &&
+ test_config branch.master.merge refs/heads/master &&
+ git checkout master &&
+ test_commit six &&
+ test_push_success upstream master &&
+ test_commit seven &&
+ test_push_success simple master
+'
+
+test_expect_success 'push to existing branch, upstream configured with different name' '
+ test_config branch.master.remote repo1 &&
+ test_config branch.master.merge refs/heads/other-name &&
+ git checkout master &&
+ test_commit eight &&
+ test_push_success upstream other-name &&
+ test_commit nine &&
+ test_push_failure simple &&
+ git --git-dir=repo1 log -1 --format="%h %s" "other-name" >expect-other-name &&
+ test_push_success current master &&
+ git --git-dir=repo1 log -1 --format="%h %s" "other-name" >actual-other-name &&
+ test_cmp expect-other-name actual-other-name
+'
+
test_done
)
'
+test_expect_success 'push unpushed submodules when not needed' '
+ (
+ cd work &&
+ (
+ cd gar/bage &&
+ git checkout master &&
+ >junk5 &&
+ git add junk5 &&
+ git commit -m "Fifth junk" &&
+ git push &&
+ git rev-parse origin/master >../../../expected
+ ) &&
+ git checkout master &&
+ git add gar/bage &&
+ git commit -m "Fifth commit for gar/bage" &&
+ git push --recurse-submodules=on-demand ../pub.git master
+ ) &&
+ (
+ cd submodule.git &&
+ git rev-parse master >../actual
+ ) &&
+ test_cmp expected actual
+'
+
+test_expect_success 'push unpushed submodules when not needed 2' '
+ (
+ cd submodule.git &&
+ git rev-parse master >../expected
+ ) &&
+ (
+ cd work &&
+ (
+ cd gar/bage &&
+ >junk6 &&
+ git add junk6 &&
+ git commit -m "Sixth junk"
+ ) &&
+ >junk2 &&
+ git add junk2 &&
+ git commit -m "Second junk for work" &&
+ git push --recurse-submodules=on-demand ../pub.git master
+ ) &&
+ (
+ cd submodule.git &&
+ git rev-parse master >../actual
+ ) &&
+ test_cmp expected actual
+'
+
+test_expect_success 'push unpushed submodules recursively' '
+ (
+ cd work &&
+ (
+ cd gar/bage &&
+ git checkout master &&
+ > junk7 &&
+ git add junk7 &&
+ git commit -m "Seventh junk" &&
+ git rev-parse master >../../../expected
+ ) &&
+ git checkout master &&
+ git add gar/bage &&
+ git commit -m "Seventh commit for gar/bage" &&
+ git push --recurse-submodules=on-demand ../pub.git master
+ ) &&
+ (
+ cd submodule.git &&
+ git rev-parse master >../actual
+ ) &&
+ test_cmp expected actual
+'
+
+test_expect_success 'push unpushable submodule recursively fails' '
+ (
+ cd work &&
+ (
+ cd gar/bage &&
+ git rev-parse origin/master >../../../expected &&
+ git checkout master~0 &&
+ > junk8 &&
+ git add junk8 &&
+ git commit -m "Eighth junk"
+ ) &&
+ git add gar/bage &&
+ git commit -m "Eighth commit for gar/bage" &&
+ test_must_fail git push --recurse-submodules=on-demand ../pub.git master
+ ) &&
+ (
+ cd submodule.git &&
+ git rev-parse master >../actual
+ ) &&
+ test_cmp expected actual
+'
+
test_done
git clone --bare test_repo test_repo.git &&
cd test_repo.git &&
git config http.receivepack true &&
+ git config core.logallrefupdates true &&
ORIG_HEAD=$(git rev-parse --verify HEAD) &&
cd - &&
mv test_repo.git "$HTTPD_DOCUMENT_ROOT_PATH"
'
test_expect_success 'push fails for non-fast-forward refs unmatched by remote helper: our output' '
- test_i18ngrep "To prevent you from losing history, non-fast-forward updates were rejected" \
+ test_i18ngrep "Updates were rejected because" \
output
'
git push --mirror "$HTTPD_URL"/smart/alternates-mirror.git
'
-test_expect_success TTY 'quiet push' '
+test_expect_success TTY 'push shows progress when stderr is a tty' '
+ cd "$ROOT_PATH"/test_repo_clone &&
+ test_commit noisy &&
+ test_terminal git push >output 2>&1 &&
+ grep "^Writing objects" output
+'
+
+test_expect_success TTY 'push --quiet silences status and progress' '
cd "$ROOT_PATH"/test_repo_clone &&
test_commit quiet &&
- test_terminal git push --quiet --no-progress 2>&1 | tee output &&
+ test_terminal git push --quiet >output 2>&1 &&
test_cmp /dev/null output
'
+test_expect_success TTY 'push --no-progress silences progress but not status' '
+ cd "$ROOT_PATH"/test_repo_clone &&
+ test_commit no-progress &&
+ test_terminal git push --no-progress >output 2>&1 &&
+ grep "^To http" output &&
+ ! grep "^Writing objects"
+'
+
+test_expect_success 'push --progress shows progress to non-tty' '
+ cd "$ROOT_PATH"/test_repo_clone &&
+ test_commit progress &&
+ git push --progress >output 2>&1 &&
+ grep "^To http" output &&
+ grep "^Writing objects" output
+'
+
+test_expect_success 'http push gives sane defaults to reflog' '
+ cd "$ROOT_PATH"/test_repo_clone &&
+ test_commit reflog-test &&
+ git push "$HTTPD_URL"/smart/test_repo.git &&
+ git --git-dir="$HTTPD_DOCUMENT_ROOT_PATH/test_repo.git" \
+ log -g -1 --format="%gn <%ge>" >actual &&
+ echo "anonymous <anonymous@http.127.0.0.1>" >expect &&
+ test_cmp expect actual
+'
+
+test_expect_success 'http push respects GIT_COMMITTER_* in reflog' '
+ cd "$ROOT_PATH"/test_repo_clone &&
+ test_commit custom-reflog-test &&
+ git push "$HTTPD_URL"/smart_custom_env/test_repo.git &&
+ git --git-dir="$HTTPD_DOCUMENT_ROOT_PATH/test_repo.git" \
+ log -g -1 --format="%gn <%ge>" >actual &&
+ echo "Custom User <custom@example.com>" >expect &&
+ test_cmp expect actual
+'
+
stop_httpd
test_done
esac
done
- if test $# -ne 3
- then
- error "invalid number of arguments"
- fi
-
+ msg=$1
+ shift
cmd=$1
- repo=$2
- msg=$3
+ shift
+ repo=$1
+ shift || error "invalid number of arguments"
if test -x "$GIT_DAEMON_DOCUMENT_ROOT_PATH/$repo"
then
fi
fi
- test_must_fail git "$cmd" "$GIT_DAEMON_URL/$repo" 2>output &&
+ test_must_fail git "$cmd" "$GIT_DAEMON_URL/$repo" "$@" 2>output &&
echo "fatal: remote error: $msg: /$repo" >expect &&
test_cmp expect output
ret=$?
}
msg="access denied or repository not exported"
-test_expect_success 'clone non-existent' "test_remote_error clone nowhere.git '$msg'"
-test_expect_success 'push disabled' "test_remote_error push repo.git '$msg'"
-test_expect_success 'read access denied' "test_remote_error -x fetch repo.git '$msg'"
-test_expect_success 'not exported' "test_remote_error -n fetch repo.git '$msg'"
+test_expect_success 'clone non-existent' "test_remote_error '$msg' clone nowhere.git "
+test_expect_success 'push disabled' "test_remote_error '$msg' push repo.git master"
+test_expect_success 'read access denied' "test_remote_error -x '$msg' fetch repo.git "
+test_expect_success 'not exported' "test_remote_error -n '$msg' fetch repo.git "
stop_git_daemon
start_git_daemon --informative-errors
-test_expect_success 'clone non-existent' "test_remote_error clone nowhere.git 'no such repository'"
-test_expect_success 'push disabled' "test_remote_error push repo.git 'service not enabled'"
-test_expect_success 'read access denied' "test_remote_error -x fetch repo.git 'no such repository'"
-test_expect_success 'not exported' "test_remote_error -n fetch repo.git 'repository not exported'"
+test_expect_success 'clone non-existent' "test_remote_error 'no such repository' clone nowhere.git "
+test_expect_success 'push disabled' "test_remote_error 'service not enabled' push repo.git master"
+test_expect_success 'read access denied' "test_remote_error -x 'no such repository' fetch repo.git "
+test_expect_success 'not exported' "test_remote_error -n 'repository not exported' fetch repo.git "
stop_git_daemon
test_done
cd "$base_dir"
test_expect_success 'existence of info/alternates' \
-'test `wc -l <C/.git/objects/info/alternates` = 2'
+'test_line_count = 2 C/.git/objects/info/alternates'
cd "$base_dir"
cd "$base_dir"
test_expect_success 'existence of info/alternates' \
-'test `wc -l <D/.git/objects/info/alternates` = 1'
+'test_line_count = 1 D/.git/objects/info/alternates'
cd "$base_dir"
test_valid_repo() {
git fsck --full > fsck.log &&
- test `wc -l < fsck.log` = 0
+ test_line_count = 0 fsck.log
}
base_dir=`pwd`
compare_refs localclone HEAD server HEAD
'
+# Generally, skip this test. It demonstrates a now-fixed race in
+# git-remote-testgit, but is too slow to leave in for general use.
+: test_expect_success 'racily pushing to local repo' '
+ test_when_finished "rm -rf server2 localclone2" &&
+ cp -a server server2 &&
+ git clone "testgit::${PWD}/server2" localclone2 &&
+ (cd localclone2 &&
+ echo content >>file &&
+ git commit -a -m three &&
+ GIT_REMOTE_TESTGIT_SLEEPY=2 git push) &&
+ compare_refs localclone2 HEAD server2 HEAD
+'
+
test_expect_success 'synch with changes from localclone' '
(cd clone &&
git pull)
test_expect_success 'del LF before empty (1)' '
git show -s --pretty=format:"%s%n%-b%nThanks%n" HEAD^^ >actual &&
- test $(wc -l <actual) = 2
+ test_line_count = 2 actual
'
test_expect_success 'del LF before empty (2)' '
git show -s --pretty=format:"%s%n%-b%nThanks%n" HEAD >actual &&
- test $(wc -l <actual) = 6 &&
+ test_line_count = 6 actual &&
grep "^$" actual
'
test_expect_success 'add LF before non-empty (1)' '
git show -s --pretty=format:"%s%+b%nThanks%n" HEAD^^ >actual &&
- test $(wc -l <actual) = 2
+ test_line_count = 2 actual
'
test_expect_success 'add LF before non-empty (2)' '
git show -s --pretty=format:"%s%+b%nThanks%n" HEAD >actual &&
- test $(wc -l <actual) = 6 &&
+ test_line_count = 6 actual &&
grep "^$" actual
'
git commit -m "dummy" --allow-empty &&
git filter-branch --msg-filter "sed -e s/dummy//" HEAD^^.. &&
git rev-list --oneline HEAD >test.txt &&
- test $(git rev-list --oneline HEAD | wc -l) -eq 5 &&
- test $(git rev-list --oneline --graph HEAD | wc -l) -eq 5
+ test_line_count = 5 test.txt &&
+ git rev-list --oneline --graph HEAD >testg.txt &&
+ test_line_count = 5 testg.txt
'
test_done
git merge -m "merge HASH7 and SIDE_HASH7" "$HASH7" &&
B_HASH=$(git rev-parse --verify HEAD) &&
git merge-base --all "$A_HASH" "$B_HASH" > merge_bases.txt &&
- test $(wc -l < merge_bases.txt) = "2" &&
+ test_line_count = 2 merge_bases.txt &&
grep "$HASH5" merge_bases.txt &&
grep "$SIDE_HASH5" merge_bases.txt
'
test_expect_success 'massive simple rename does not spam added files' '
sane_unset GIT_MERGE_VERBOSITY &&
git merge --no-stat simple-rename | grep -v Removing >output &&
- test 5 -gt "$(wc -l < output)"
+ test_line_count -lt 5 output
'
test_done
(
cd test && git checkout b1
) >actual &&
- grep "have 1 and 1 different" actual
+ test_i18ngrep "have 1 and 1 different" actual
'
test_expect_success 'checkout with local tracked branch' '
git checkout master &&
git checkout follower >actual &&
- grep "is ahead of" actual
+ test_i18ngrep "is ahead of" actual
'
test_expect_success 'status' '
# reports nothing to commit
test_must_fail git commit --dry-run
) >actual &&
- grep "have 1 and 1 different" actual
+ test_i18ngrep "have 1 and 1 different" actual
'
test_expect_success 'fail to track lightweight tags' '
git checkout master &&
git tag light &&
test_must_fail git branch --track lighttrack light >actual &&
- test_must_fail grep "set up to track" actual &&
+ test_i18ngrep ! "set up to track" actual &&
test_must_fail git checkout lighttrack
'
git checkout master &&
git tag -m heavy heavy &&
test_must_fail git branch --track heavytrack heavy >actual &&
- test_must_fail grep "set up to track" actual &&
+ test_i18ngrep ! "set up to track" actual &&
test_must_fail git checkout heavytrack
'
test 0 -eq $(git ls-files -u | wc -l) &&
test 0 -eq $(git ls-files -o | wc -l) &&
- test 6 -eq $(wc -l < c) &&
+ test_line_count = 6 c &&
test $(git rev-parse HEAD:a) = $(git rev-parse B:a) &&
test $(git rev-parse HEAD:b) = $(git rev-parse A:b)
'
echo "l3" >two &&
test_tick &&
- git commit -a -m "Left #3" &&
+ GIT_COMMITTER_NAME="Another Committer" \
+ GIT_AUTHOR_NAME="Another Author" git commit -a -m "Left #3" &&
echo "l4" >two &&
test_tick &&
- git commit -a -m "Left #4" &&
+ GIT_COMMITTER_NAME="Another Committer" \
+ GIT_AUTHOR_NAME="Another Author" git commit -a -m "Left #4" &&
echo "l5" >two &&
test_tick &&
- git commit -a -m "Left #5" &&
+ GIT_COMMITTER_NAME="Another Committer" \
+ GIT_AUTHOR_NAME="Another Author" git commit -a -m "Left #5" &&
git tag tag-l5 &&
git checkout right &&
cat >expected <<-EOF &&
Merge branch ${apos}left${apos}
+ By Another Author (3) and A U Thor (2)
+ via Another Committer
* left:
Left #5
Left #4
cat >expected <<-EOF &&
Merge branch ${apos}left${apos}
+ By Another Author (3) and A U Thor (2)
+ via Another Committer
* left: (5 commits)
Left #5
Left #4
cat >expected <<-EOF &&
Merge branch ${apos}left${apos}
+ By Another Author (3) and A U Thor (2)
+ via Another Committer
* left:
Left #5
Left #4
cat >expected <<-EOF &&
Merge branch ${apos}left${apos}
+ By Another Author (3) and A U Thor (2)
+ via Another Committer
* left: (5 commits)
Left #5
Left #4
cat >expected <<-EOF &&
Merge branch ${apos}left${apos}
+ By Another Author (3) and A U Thor (2)
+ via Another Committer
* left:
Left #5
Left #4
cat >expected.log <<-EOF &&
Sync with left
+ By Another Author (3) and A U Thor (2)
+ via Another Committer
* ${apos}left${apos} of $(pwd):
Left #5
Left #4
cat >expected <<-EOF
Merge branches ${apos}left${apos} and ${apos}right${apos}
+ By Another Author (3) and A U Thor (2)
+ via Another Committer
* left:
Left #5
Left #4
Common #2
Common #1
+ By Another Author (3) and A U Thor (2)
+ via Another Committer
* tag ${apos}tag-l5${apos}:
Left #5
Left #4
Common #2
Common #1
+ By Another Author (3) and A U Thor (2)
+ via Another Committer
* left:
Left #5
Left #4
test_cmp expect actual
'
+test_expect_success 'listing tags in column' '
+ COLUMNS=40 git tag -l --column=row >actual &&
+ cat >expected <<\EOF &&
+a1 aa1 cba t210 t211
+v0.2.1 v1.0 v1.0.1 v1.1.3
+EOF
+ test_cmp expected actual
+'
+
+test_expect_success 'listing tags in column with column.*' '
+ git config column.tag row &&
+ git config column.ui dense &&
+ COLUMNS=40 git tag -l >actual &&
+ git config --unset column.ui &&
+ git config --unset column.tag &&
+ cat >expected <<\EOF &&
+a1 aa1 cba t210 t211
+v0.2.1 v1.0 v1.0.1 v1.1.3
+EOF
+ test_cmp expected actual
+'
+
+test_expect_success 'listing tag with -n --column should fail' '
+ test_must_fail git tag --column -n
+'
+
+test_expect_success 'listing tags -n in column with column.ui ignored' '
+ git config column.ui "row dense" &&
+ COLUMNS=40 git tag -l -n >actual &&
+ git config --unset column.ui &&
+ cat >expected <<\EOF &&
+a1 Foo
+aa1 Foo
+cba Foo
+t210 Foo
+t211 Foo
+v0.2.1 Foo
+v1.0 Foo
+v1.0.1 Foo
+v1.1.3 Foo
+EOF
+ test_cmp expected actual
+'
+
# creating and verifying lightweight tags:
test_expect_success \
git checkout -f renamer && git clean -f &&
git checkout renamer^ 2>messages &&
test_i18ngrep "HEAD is now at 7329388" messages &&
- test 1 -eq $(wc -l <messages) &&
+ test_line_count = 1 messages &&
H=$(git rev-parse --verify HEAD) &&
M=$(git show-ref -s --verify refs/heads/master) &&
test "z$H" = "z$M" &&
git checkout -f renamer && git clean -f &&
git checkout renamer^ 2>messages &&
test_i18ngrep "HEAD is now at 7329388" messages &&
- test 1 -lt $(wc -l <messages) &&
+ test_line_count -gt 1 messages &&
H=$(git rev-parse --verify HEAD) &&
M=$(git show-ref -s --verify refs/heads/master) &&
test "z$H" = "z$M" &&
test_expect_success 'status should only print one line' '
git submodule status >lines &&
- test $(wc -l <lines) = 1
+ test_line_count = 1 lines
'
test_expect_success 'setup - fetch commit name from submodule' '
cd "$base_dir"
test_expect_success 'after add: existence of info/alternates' \
-'test `wc -l <super/.git/modules/sub/objects/info/alternates` = 1'
+'test_line_count = 1 super/.git/modules/sub/objects/info/alternates'
cd "$base_dir"
cd "$base_dir"
test_expect_success 'after update: existence of info/alternates' \
-'test `wc -l <super-clone/.git/modules/sub/objects/info/alternates` = 1'
+'test_line_count = 1 super-clone/.git/modules/sub/objects/info/alternates'
cd "$base_dir"
git reset --hard &&
git commit -s -m "hello: kitty" --allow-empty &&
git cat-file commit HEAD | sed -e "1,/^$/d" >actual &&
- test $(wc -l <actual) = 3
+ test_line_count = 3 actual
'
test_i18ngrep "use \"git rm --cached <file>\.\.\.\" to unstage" output
'
+test_expect_success 'status --column' '
+ COLUMNS=50 git status --column="column dense" >output &&
+ cat >expect <<\EOF &&
+# On branch master
+# Changes to be committed:
+# (use "git reset HEAD <file>..." to unstage)
+#
+# new file: dir2/added
+#
+# Changes not staged for commit:
+# (use "git add <file>..." to update what will be committed)
+# (use "git checkout -- <file>..." to discard changes in working directory)
+#
+# modified: dir1/modified
+#
+# Untracked files:
+# (use "git add <file>..." to include in what will be committed)
+#
+# dir1/untracked dir2/untracked untracked
+# dir2/modified output
+EOF
+ test_cmp expect output
+'
+
cat >expect <<\EOF
# On branch master
# Changes to be committed:
Trying simple merge with c3
Trying simple merge with c4
Merge made by the 'octopus' strategy.
- c2.c | 1 +
- c3.c | 1 +
- c4.c | 1 +
+ c2.c | 1 +
+ c3.c | 1 +
+ c4.c | 1 +
3 files changed, 3 insertions(+)
create mode 100644 c2.c
create mode 100644 c3.c
test_expect_success 'merge output uses pretty names' '
git reset --hard c1 &&
git merge c2 c3 c4 >actual &&
- test_cmp actual expected
+ test_i18ncmp expected actual
'
cat >expected <<\EOF
Merge made by the 'recursive' strategy.
- c5.c | 1 +
+ c5.c | 1 +
1 file changed, 1 insertion(+)
create mode 100644 c5.c
EOF
test_expect_success 'merge reduces irrelevant remote heads' '
GIT_MERGE_VERBOSITY=0 git merge c4 c5 >actual &&
- test_cmp expected actual
+ test_i18ncmp expected actual
'
cat >expected <<\EOF
Fast-forwarding to: c1
Trying simple merge with c2
Merge made by the 'octopus' strategy.
- c1.c | 1 +
- c2.c | 1 +
+ c1.c | 1 +
+ c2.c | 1 +
2 files changed, 2 insertions(+)
create mode 100644 c1.c
create mode 100644 c2.c
test_expect_success 'merge fast-forward output uses pretty names' '
git reset --hard c0 &&
git merge c1 c2 >actual &&
- test_cmp actual expected
+ test_i18ncmp expected actual
'
test_done
test_cmp important c1.c
'
+test_expect_failure 'will not overwrite unstaged changes in renamed file' '
+ git reset --hard c1 &&
+ git mv c1.c other.c &&
+ git commit -m rename &&
+ cp important other.c &&
+ git merge c1a &&
+ test_cmp important other.c
+'
+
test_expect_success 'will not overwrite untracked subtree' '
git reset --hard c0 &&
rm -rf sub &&
compare_mtimes < mtimes
'
+test_expect_success 'do not bother loosening old objects' '
+ obj1=$(echo one | git hash-object -w --stdin) &&
+ obj2=$(echo two | git hash-object -w --stdin) &&
+ pack1=$(echo $obj1 | git pack-objects .git/objects/pack/pack) &&
+ pack2=$(echo $obj2 | git pack-objects .git/objects/pack/pack) &&
+ git prune-packed &&
+ git cat-file -p $obj1 &&
+ git cat-file -p $obj2 &&
+ test-chmtime =-86400 .git/objects/pack/pack-$pack2.pack &&
+ git repack -A -d --unpack-unreachable=1.hour.ago &&
+ git cat-file -p $obj1 &&
+ test_must_fail git cat-file -p $obj2
+'
+
test_done
test "$diff" = ""
'
+test_expect_success PERL 'difftool forwards arguments to diff' '
+ >for-diff &&
+ git add for-diff &&
+ echo changes>for-diff &&
+ git add for-diff &&
+ diff=$(git difftool --cached --no-prompt -- for-diff) &&
+ test "$diff" = "" &&
+ git reset -- for-diff &&
+ rm for-diff
+'
+
test_expect_success PERL 'difftool honors --gui' '
git config merge.tool bogus-tool &&
git config diff.tool bogus-tool &&
--- /dev/null
+#!/bin/sh
+
+test_description='git column'
+. ./test-lib.sh
+
+test_expect_success 'setup' '
+ cat >lista <<\EOF
+one
+two
+three
+four
+five
+six
+seven
+eight
+nine
+ten
+eleven
+EOF
+'
+
+test_expect_success 'never' '
+ git column --indent=Z --mode=never <lista >actual &&
+ test_cmp lista actual
+'
+
+test_expect_success 'always' '
+ cat >expected <<\EOF &&
+Zone
+Ztwo
+Zthree
+Zfour
+Zfive
+Zsix
+Zseven
+Zeight
+Znine
+Zten
+Zeleven
+EOF
+ git column --indent=Z --mode=plain <lista >actual &&
+ test_cmp expected actual
+'
+
+test_expect_success '80 columns' '
+ cat >expected <<\EOF &&
+one two three four five six seven eight nine ten eleven
+EOF
+ COLUMNS=80 git column --mode=column <lista >actual &&
+ test_cmp expected actual
+'
+
+cat >expected <<\EOF
+one
+two
+three
+four
+five
+six
+seven
+eight
+nine
+ten
+eleven
+EOF
+
+test_expect_success COLUMNS_CAN_BE_1 'COLUMNS = 1' '
+ COLUMNS=1 git column --mode=column <lista >actual &&
+ test_cmp expected actual
+'
+
+test_expect_success 'width = 1' '
+ git column --mode=column --width=1 <lista >actual &&
+ test_cmp expected actual
+'
+
+COLUMNS=20
+export COLUMNS
+
+test_expect_success '20 columns' '
+ cat >expected <<\EOF &&
+one seven
+two eight
+three nine
+four ten
+five eleven
+six
+EOF
+ git column --mode=column <lista >actual &&
+ test_cmp expected actual
+'
+
+test_expect_success '20 columns, nodense' '
+ cat >expected <<\EOF &&
+one seven
+two eight
+three nine
+four ten
+five eleven
+six
+EOF
+ git column --mode=column,nodense < lista > actual &&
+ test_cmp expected actual
+'
+
+test_expect_success '20 columns, dense' '
+ cat >expected <<\EOF &&
+one five nine
+two six ten
+three seven eleven
+four eight
+EOF
+ git column --mode=column,dense < lista > actual &&
+ test_cmp expected actual
+'
+
+test_expect_success '20 columns, padding 2' '
+ cat >expected <<\EOF &&
+one seven
+two eight
+three nine
+four ten
+five eleven
+six
+EOF
+ git column --mode=column --padding 2 <lista >actual &&
+ test_cmp expected actual
+'
+
+test_expect_success '20 columns, indented' '
+ cat >expected <<\EOF &&
+ one seven
+ two eight
+ three nine
+ four ten
+ five eleven
+ six
+EOF
+ git column --mode=column --indent=" " <lista >actual &&
+ test_cmp expected actual
+'
+
+test_expect_success '20 columns, row first' '
+ cat >expected <<\EOF &&
+one two
+three four
+five six
+seven eight
+nine ten
+eleven
+EOF
+ git column --mode=row <lista >actual &&
+ test_cmp expected actual
+'
+
+test_expect_success '20 columns, row first, nodense' '
+ cat >expected <<\EOF &&
+one two
+three four
+five six
+seven eight
+nine ten
+eleven
+EOF
+ git column --mode=row,nodense <lista >actual &&
+ test_cmp expected actual
+'
+
+test_expect_success '20 columns, row first, dense' '
+ cat >expected <<\EOF &&
+one two three
+four five six
+seven eight nine
+ten eleven
+EOF
+ git column --mode=row,dense <lista >actual &&
+ test_cmp expected actual
+'
+
+test_done
' - "$1"
}
+verify_packs () {
+ for p in .git/objects/pack/*.pack
+ do
+ git verify-pack "$@" "$p" || return
+ done
+}
+
file2_data='file2
second line of EOF'
'A: create pack from stdin' \
'git fast-import --export-marks=marks.out <input &&
git whatchanged master'
-test_expect_success \
- 'A: verify pack' \
- 'for p in .git/objects/pack/*.pack;do git verify-pack $p||exit;done'
+
+test_expect_success 'A: verify pack' '
+ verify_packs
+'
cat >expect <<EOF
author $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL> $GIT_COMMITTER_DATE
'A: verify marks import does not crash' \
'git fast-import --import-marks=marks.out <input &&
git whatchanged verify--import-marks'
-test_expect_success \
- 'A: verify pack' \
- 'for p in .git/objects/pack/*.pack;do git verify-pack $p||exit;done'
+
+test_expect_success 'A: verify pack' '
+ verify_packs
+'
+
cat >expect <<EOF
:000000 100755 0000000000000000000000000000000000000000 7123f7f44e39be127c5eb701e5968176ee9d78b1 A copy-of-file2
EOF
'C: incremental import create pack from stdin' \
'git fast-import <input &&
git whatchanged branch'
-test_expect_success \
- 'C: verify pack' \
- 'for p in .git/objects/pack/*.pack;do git verify-pack $p||exit;done'
+
+test_expect_success 'C: verify pack' '
+ verify_packs
+'
+
test_expect_success \
'C: validate reuse existing blob' \
'test $newf = `git rev-parse --verify branch:file2/newf` &&
'D: inline data in commit' \
'git fast-import <input &&
git whatchanged branch'
-test_expect_success \
- 'D: verify pack' \
- 'for p in .git/objects/pack/*.pack;do git verify-pack $p||exit;done'
+
+test_expect_success 'D: verify pack' '
+ verify_packs
+'
cat >expect <<EOF
:000000 100755 0000000000000000000000000000000000000000 35a59026a33beac1569b1c7f66f3090ce9c09afc A newdir/exec.sh
test_expect_success \
'E: rfc2822 date, --date-format=rfc2822' \
'git fast-import --date-format=rfc2822 <input'
-test_expect_success \
- 'E: verify pack' \
- 'for p in .git/objects/pack/*.pack;do git verify-pack $p||exit;done'
+
+test_expect_success 'E: verify pack' '
+ verify_packs
+'
cat >expect <<EOF
author $GIT_AUTHOR_NAME <$GIT_AUTHOR_EMAIL> 1170778938 -0500
fi
fi
'
-test_expect_success \
- 'F: verify pack' \
- 'for p in .git/objects/pack/*.pack;do git verify-pack $p||exit;done'
+
+test_expect_success 'F: verify pack' '
+ verify_packs
+'
cat >expect <<EOF
tree `git rev-parse branch~1^{tree}`
test_expect_success \
'G: non-fast-forward update forced' \
'git fast-import --force <input'
-test_expect_success \
- 'G: verify pack' \
- 'for p in .git/objects/pack/*.pack;do git verify-pack $p||exit;done'
+
+test_expect_success 'G: verify pack' '
+ verify_packs
+'
+
test_expect_success \
'G: branch changed, but logged' \
'test $old_branch != `git rev-parse --verify branch^0` &&
'H: deletall, add 1' \
'git fast-import <input &&
git whatchanged H'
-test_expect_success \
- 'H: verify pack' \
- 'for p in .git/objects/pack/*.pack;do git verify-pack $p||exit;done'
+
+test_expect_success 'H: verify pack' '
+ verify_packs
+'
cat >expect <<EOF
:100755 000000 f1fb5da718392694d0076d677d6d0e364c79b0bc 0000000000000000000000000000000000000000 D file2/newf
'Q: commit notes' \
'git fast-import <input &&
git whatchanged notes-test'
-test_expect_success \
- 'Q: verify pack' \
- 'for p in .git/objects/pack/*.pack;do git verify-pack $p||exit;done'
+
+test_expect_success 'Q: verify pack' '
+ verify_packs
+'
commit1=$(git rev-parse notes-test~2)
commit2=$(git rev-parse notes-test^)
'R: blob bigger than threshold' \
'test_create_repo R &&
git --git-dir=R/.git fast-import --big-file-threshold=1 <input'
-test_expect_success \
- 'R: verify created pack' \
- ': >verify &&
- for p in R/.git/objects/pack/*.pack;
- do
- git verify-pack -v $p >>verify || exit;
- done'
+
+test_expect_success 'R: verify created pack' '
+ (
+ cd R &&
+ verify_packs -v > ../verify
+ )
+'
+
test_expect_success \
'R: verify written objects' \
'git --git-dir=R/.git cat-file blob big-file:big1 >actual &&
git checkout -b marks master &&
git fast-export --export-marks=tmp-marks HEAD &&
test -s tmp-marks &&
- test $(wc -l < tmp-marks) -eq 3 &&
+ test_line_count = 3 tmp-marks &&
test $(
git fast-export --import-marks=tmp-marks\
--export-marks=tmp-marks HEAD |
grep ^commit\ |
wc -l) \
-eq 1 &&
- test $(wc -l < tmp-marks) -eq 4
+ test_line_count = 4 tmp-marks
'
cd cvswork &&
GIT_CONFIG="$git_config" cvs update &&
GIT_CONFIG="$git_config" cvs status | grep "^File: status.file" >../out &&
- test $(wc -l <../out) = 2
+ test_line_count = 2 ../out
'
cd "$WORKDIR"
test_expect_success 'cvs status (nonrecursive)' '
cd cvswork &&
GIT_CONFIG="$git_config" cvs status -l | grep "^File: status.file" >../out &&
- test $(wc -l <../out) = 1
+ test_line_count = 1 ../out
'
cd "$WORKDIR"
cd "$WORKDIR"
test_expect_success 'cvs co -c (shows module database)' '
GIT_CONFIG="$git_config" cvs co -c > out &&
- grep "^master[ ]\+master$" < out &&
- ! grep -v "^master[ ]\+master$" < out
+ grep "^master[ ][ ]*master$" <out &&
+ ! grep -v "^master[ ][ ]*master$" <out
'
#------------
test_expect_success 'snapshots: bad tree-ish id (tagged object)' '
echo object > tag-object &&
git add tag-object &&
- git commit -m "Object to be tagged" &&
+ test_tick && git commit -m "Object to be tagged" &&
git tag tagged-object `git hash-object tag-object` &&
gitweb_run "p=.git;a=snapshot;h=tagged-object;sf=tgz" &&
grep "400 - Object is not a tree-ish" gitweb.output
'
test_debug 'cat gitweb.output'
+# ----------------------------------------------------------------------
+# modification times (Last-Modified and If-Modified-Since)
+
+test_expect_success 'modification: feed last-modified' '
+ gitweb_run "p=.git;a=atom;h=master" &&
+ grep "Status: 200 OK" gitweb.headers &&
+ grep "Last-modified: Thu, 7 Apr 2005 22:14:13 +0000" gitweb.headers
+'
+test_debug 'cat gitweb.headers'
+
+test_expect_success 'modification: feed if-modified-since (modified)' '
+ export HTTP_IF_MODIFIED_SINCE="Wed, 6 Apr 2005 22:14:13 +0000" &&
+ test_when_finished "unset HTTP_IF_MODIFIED_SINCE" &&
+ gitweb_run "p=.git;a=atom;h=master" &&
+ grep "Status: 200 OK" gitweb.headers
+'
+test_debug 'cat gitweb.headers'
+
+test_expect_success 'modification: feed if-modified-since (unmodified)' '
+ export HTTP_IF_MODIFIED_SINCE="Thu, 7 Apr 2005 22:14:13 +0000" &&
+ test_when_finished "unset HTTP_IF_MODIFIED_SINCE" &&
+ gitweb_run "p=.git;a=atom;h=master" &&
+ grep "Status: 304 Not Modified" gitweb.headers
+'
+test_debug 'cat gitweb.headers'
+
+test_expect_success 'modification: snapshot last-modified' '
+ gitweb_run "p=.git;a=snapshot;h=master;sf=tgz" &&
+ grep "Status: 200 OK" gitweb.headers &&
+ grep "Last-modified: Thu, 7 Apr 2005 22:14:13 +0000" gitweb.headers
+'
+test_debug 'cat gitweb.headers'
+
+test_expect_success 'modification: snapshot if-modified-since (modified)' '
+ export HTTP_IF_MODIFIED_SINCE="Wed, 6 Apr 2005 22:14:13 +0000" &&
+ test_when_finished "unset HTTP_IF_MODIFIED_SINCE" &&
+ gitweb_run "p=.git;a=snapshot;h=master;sf=tgz" &&
+ grep "Status: 200 OK" gitweb.headers
+'
+test_debug 'cat gitweb.headers'
+
+test_expect_success 'modification: snapshot if-modified-since (unmodified)' '
+ export HTTP_IF_MODIFIED_SINCE="Thu, 7 Apr 2005 22:14:13 +0000" &&
+ test_when_finished "unset HTTP_IF_MODIFIED_SINCE" &&
+ gitweb_run "p=.git;a=snapshot;h=master;sf=tgz" &&
+ grep "Status: 304 Not Modified" gitweb.headers
+'
+test_debug 'cat gitweb.headers'
+
+test_expect_success 'modification: tree snapshot' '
+ ID=`git rev-parse --verify HEAD^{tree}` &&
+ export HTTP_IF_MODIFIED_SINCE="Wed, 6 Apr 2005 22:14:13 +0000" &&
+ test_when_finished "unset HTTP_IF_MODIFIED_SINCE" &&
+ gitweb_run "p=.git;a=snapshot;h=$ID;sf=tgz" &&
+ grep "Status: 200 OK" gitweb.headers &&
+ ! grep -i "last-modified" gitweb.headers
+'
+test_debug 'cat gitweb.headers'
# ----------------------------------------------------------------------
# load checking
#!/bin/sh
-test_description='git-p4 tests'
+test_description='git p4 tests'
. ./lib-git-p4.sh
)
'
-test_expect_success 'basic git-p4 clone' '
- "$GITP4" clone --dest="$git" //depot &&
+test_expect_success 'basic git p4 clone' '
+ git p4 clone --dest="$git" //depot &&
test_when_finished cleanup_git &&
(
cd "$git" &&
)
'
-test_expect_success 'git-p4 clone @all' '
- "$GITP4" clone --dest="$git" //depot@all &&
+test_expect_success 'git p4 clone @all' '
+ git p4 clone --dest="$git" //depot@all &&
test_when_finished cleanup_git &&
(
cd "$git" &&
)
'
-test_expect_success 'git-p4 sync uninitialized repo' '
+test_expect_success 'git p4 sync uninitialized repo' '
test_create_repo "$git" &&
test_when_finished cleanup_git &&
(
cd "$git" &&
- test_must_fail "$GITP4" sync
+ test_must_fail git p4 sync
)
'
# Create a git repo by hand. Add a commit so that HEAD is valid.
# Test imports a new p4 repository into a new git branch.
#
-test_expect_success 'git-p4 sync new branch' '
+test_expect_success 'git p4 sync new branch' '
test_create_repo "$git" &&
test_when_finished cleanup_git &&
(
cd "$git" &&
test_commit head &&
- "$GITP4" sync --branch=refs/remotes/p4/depot //depot@all &&
+ git p4 sync --branch=refs/remotes/p4/depot //depot@all &&
git log --oneline p4/depot >lines &&
test_line_count = 2 lines
)
p4 add sub2/f2 &&
p4 submit -d "sub2/f2"
) &&
- "$GITP4" clone --dest="$git" //depot/sub1 //depot/sub2 &&
+ git p4 clone --dest="$git" //depot/sub1 //depot/sub2 &&
test_when_finished cleanup_git &&
(
cd "$git" &&
p4 add sub1/f3 &&
p4 submit -d "sub1/f3"
) &&
- "$GITP4" clone --dest="$git" //depot/sub1@all //depot/sub2@all &&
+ git p4 clone --dest="$git" //depot/sub1@all //depot/sub2@all &&
test_when_finished cleanup_git &&
(
cd "$git" &&
p4 add sub2/f3 &&
p4 submit -d "sub2/f3"
) &&
- "$GITP4" clone --dest="$git" //depot/sub1@all //depot/sub2@all &&
+ git p4 clone --dest="$git" //depot/sub1@all //depot/sub2@all &&
test_when_finished cleanup_git &&
(
cd "$git" &&
exit 1
EOF
chmod 755 "$badp4dir"/p4 &&
- PATH="$badp4dir:$PATH" "$GITP4" clone --dest="$git" //depot >errs 2>&1 ; retval=$? &&
+ PATH="$badp4dir:$PATH" git p4 clone --dest="$git" //depot >errs 2>&1 ; retval=$? &&
test $retval -eq 1 &&
test_must_fail grep -q Traceback errs
'
)
'
-test_expect_success 'wildcard files git-p4 clone' '
- "$GITP4" clone --dest="$git" //depot &&
+test_expect_success 'wildcard files git p4 clone' '
+ git p4 clone --dest="$git" //depot &&
test_when_finished cleanup_git &&
(
cd "$git" &&
)
'
+test_expect_success 'wildcard files submit back to p4, add' '
+ test_when_finished cleanup_git &&
+ git p4 clone --dest="$git" //depot &&
+ (
+ cd "$git" &&
+ echo git-wild-hash >git-wild#hash &&
+ echo git-wild-star >git-wild\*star &&
+ echo git-wild-at >git-wild@at &&
+ echo git-wild-percent >git-wild%percent &&
+ git add git-wild* &&
+ git commit -m "add some wildcard filenames" &&
+ git config git-p4.skipSubmitEdit true &&
+ git p4 submit
+ ) &&
+ (
+ cd "$cli" &&
+ test_path_is_file git-wild#hash &&
+ test_path_is_file git-wild\*star &&
+ test_path_is_file git-wild@at &&
+ test_path_is_file git-wild%percent
+ )
+'
+
+test_expect_success 'wildcard files submit back to p4, modify' '
+ test_when_finished cleanup_git &&
+ git p4 clone --dest="$git" //depot &&
+ (
+ cd "$git" &&
+ echo new-line >>git-wild#hash &&
+ echo new-line >>git-wild\*star &&
+ echo new-line >>git-wild@at &&
+ echo new-line >>git-wild%percent &&
+ git add git-wild* &&
+ git commit -m "modify the wildcard files" &&
+ git config git-p4.skipSubmitEdit true &&
+ git p4 submit
+ ) &&
+ (
+ cd "$cli" &&
+ test_line_count = 2 git-wild#hash &&
+ test_line_count = 2 git-wild\*star &&
+ test_line_count = 2 git-wild@at &&
+ test_line_count = 2 git-wild%percent
+ )
+'
+
+test_expect_success 'wildcard files submit back to p4, copy' '
+ test_when_finished cleanup_git &&
+ git p4 clone --dest="$git" //depot &&
+ (
+ cd "$git" &&
+ cp file2 git-wild-cp#hash &&
+ git add git-wild-cp#hash &&
+ cp git-wild\*star file-wild-3 &&
+ git add file-wild-3 &&
+ git commit -m "wildcard copies" &&
+ git config git-p4.detectCopies true &&
+ git config git-p4.detectCopiesHarder true &&
+ git config git-p4.skipSubmitEdit true &&
+ git p4 submit
+ ) &&
+ (
+ cd "$cli" &&
+ test_path_is_file git-wild-cp#hash &&
+ test_path_is_file file-wild-3
+ )
+'
+
+test_expect_success 'wildcard files submit back to p4, rename' '
+ test_when_finished cleanup_git &&
+ git p4 clone --dest="$git" //depot &&
+ (
+ cd "$git" &&
+ git mv git-wild@at file-wild-4 &&
+ git mv file-wild-3 git-wild-cp%percent &&
+ git commit -m "wildcard renames" &&
+ git config git-p4.detectRenames true &&
+ git config git-p4.skipSubmitEdit true &&
+ git p4 submit
+ ) &&
+ (
+ cd "$cli" &&
+ test_path_is_missing git-wild@at &&
+ test_path_is_file git-wild-cp%percent
+ )
+'
+
+test_expect_success 'wildcard files submit back to p4, delete' '
+ test_when_finished cleanup_git &&
+ git p4 clone --dest="$git" //depot &&
+ (
+ cd "$git" &&
+ git rm git-wild* &&
+ git commit -m "delete the wildcard files" &&
+ git config git-p4.skipSubmitEdit true &&
+ git p4 submit
+ ) &&
+ (
+ cd "$cli" &&
+ test_path_is_missing git-wild#hash &&
+ test_path_is_missing git-wild\*star &&
+ test_path_is_missing git-wild@at &&
+ test_path_is_missing git-wild%percent
+ )
+'
+
test_expect_success 'clone bare' '
- "$GITP4" clone --dest="$git" --bare //depot &&
+ git p4 clone --dest="$git" --bare //depot &&
test_when_finished cleanup_git &&
(
cd "$git" &&
p4_add_user alice Alice &&
p4_add_user bob Bob &&
p4_grant_admin alice &&
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
test_when_finished cleanup_git &&
(
cd "$git" &&
git commit --author "Alice <alice@localhost>" -m "a change by alice" file1 &&
git commit --author "Bob <bob@localhost>" -m "a change by bob" file2 &&
git config git-p4.skipSubmitEditCheck true &&
- P4EDITOR=touch P4USER=alice P4PASSWD=secret "$GITP4" commit --preserve-user &&
+ P4EDITOR=touch P4USER=alice P4PASSWD=secret git p4 commit --preserve-user &&
p4_check_commit_author file1 alice &&
p4_check_commit_author file2 bob
)
# Test username support, submitting as bob, who lacks admin rights. Should
# not submit change to p4 (git diff should show deltas).
test_expect_success 'refuse to preserve users without perms' '
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
test_when_finished cleanup_git &&
(
cd "$git" &&
git commit --author "Alice <alice@localhost>" -m "perms: a change by alice" file1 &&
P4EDITOR=touch P4USER=bob P4PASSWD=secret &&
export P4EDITOR P4USER P4PASSWD &&
- test_must_fail "$GITP4" commit --preserve-user &&
+ test_must_fail git p4 commit --preserve-user &&
! git diff --exit-code HEAD..p4/master
)
'
# What happens with unknown author? Without allowMissingP4Users it should fail.
test_expect_success 'preserve user where author is unknown to p4' '
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
test_when_finished cleanup_git &&
(
cd "$git" &&
git commit --author "Charlie <charlie@localhost>" -m "preserve: a change by charlie" file1 &&
P4EDITOR=touch P4USER=alice P4PASSWD=secret &&
export P4EDITOR P4USER P4PASSWD &&
- test_must_fail "$GITP4" commit --preserve-user &&
+ test_must_fail git p4 commit --preserve-user &&
! git diff --exit-code HEAD..p4/master &&
echo "$0: repeat with allowMissingP4Users enabled" &&
git config git-p4.allowMissingP4Users true &&
git config git-p4.preserveUser true &&
- "$GITP4" commit &&
+ git p4 commit &&
git diff --exit-code HEAD..p4/master &&
p4_check_commit_author file1 alice
)
'
-# If we're *not* using --preserve-user, git-p4 should warn if we're submitting
+# If we're *not* using --preserve-user, git p4 should warn if we're submitting
# changes that are not all ours.
# Test: user in p4 and user unknown to p4.
# Test: warning disabled and user is the same.
test_expect_success 'not preserving user with mixed authorship' '
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
test_when_finished cleanup_git &&
(
cd "$git" &&
make_change_by_user usernamefile3 Derek derek@localhost &&
P4EDITOR=cat P4USER=alice P4PASSWD=secret &&
export P4EDITOR P4USER P4PASSWD &&
- "$GITP4" commit |\
+ git p4 commit |\
grep "git author derek@localhost does not match" &&
make_change_by_user usernamefile3 Charlie charlie@localhost &&
- "$GITP4" commit |\
+ git p4 commit |\
grep "git author charlie@localhost does not match" &&
make_change_by_user usernamefile3 alice alice@localhost &&
- "$GITP4" commit |\
+ git p4 commit |\
test_must_fail grep "git author.*does not match" &&
git config git-p4.skipUserNameCheck true &&
make_change_by_user usernamefile3 Charlie charlie@localhost &&
- "$GITP4" commit |\
+ git p4 commit |\
test_must_fail grep "git author.*does not match" &&
p4_check_commit_author usernamefile3 alice
p4change=$(p4 -G changes -m 1 //depot/... | marshal_dump change) &&
p4time=$(p4 -G changes -m 1 //depot/... | marshal_dump time) &&
sleep 3 &&
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
test_when_finished cleanup_git &&
(
cd "$git" &&
# Repeat, this time with a smaller threshold and confirm that the rename is
# detected in P4.
test_expect_success 'detect renames' '
- "$GITP4" clone --dest="$git" //depot@all &&
+ git p4 clone --dest="$git" //depot@all &&
test_when_finished cleanup_git &&
(
cd "$git" &&
- git config git-p4.skipSubmitEditCheck true &&
+ git config git-p4.skipSubmitEdit true &&
git mv file1 file4 &&
git commit -a -m "Rename file1 to file4" &&
git diff-tree -r -M HEAD &&
- "$GITP4" submit &&
+ git p4 submit &&
p4 filelog //depot/file4 &&
p4 filelog //depot/file4 | test_must_fail grep -q "branch from" &&
git commit -a -m "Rename file4 to file5" &&
git diff-tree -r -M HEAD &&
git config git-p4.detectRenames true &&
- "$GITP4" submit &&
+ git p4 submit &&
p4 filelog //depot/file5 &&
p4 filelog //depot/file5 | grep -q "branch from //depot/file4" &&
level=$(git diff-tree -r -M HEAD | sed 1d | cut -f1 | cut -d" " -f5 | sed "s/R0*//") &&
test -n "$level" && test "$level" -gt 0 && test "$level" -lt 98 &&
git config git-p4.detectRenames $(($level + 2)) &&
- "$GITP4" submit &&
+ git p4 submit &&
p4 filelog //depot/file6 &&
p4 filelog //depot/file6 | test_must_fail grep -q "branch from" &&
level=$(git diff-tree -r -M HEAD | sed 1d | cut -f1 | cut -d" " -f5 | sed "s/R0*//") &&
test -n "$level" && test "$level" -gt 2 && test "$level" -lt 100 &&
git config git-p4.detectRenames $(($level - 2)) &&
- "$GITP4" submit &&
+ git p4 submit &&
p4 filelog //depot/file7 &&
p4 filelog //depot/file7 | grep -q "branch from //depot/file6"
)
# Modify and copy a file, configure a smaller threshold in detectCopies and
# confirm that copy is detected in P4.
test_expect_success 'detect copies' '
- "$GITP4" clone --dest="$git" //depot@all &&
+ git p4 clone --dest="$git" //depot@all &&
test_when_finished cleanup_git &&
(
cd "$git" &&
- git config git-p4.skipSubmitEditCheck true &&
+ git config git-p4.skipSubmitEdit true &&
cp file2 file8 &&
git add file8 &&
git commit -a -m "Copy file2 to file8" &&
git diff-tree -r -C HEAD &&
- "$GITP4" submit &&
+ git p4 submit &&
p4 filelog //depot/file8 &&
p4 filelog //depot/file8 | test_must_fail grep -q "branch from" &&
git commit -a -m "Copy file2 to file9" &&
git diff-tree -r -C HEAD &&
git config git-p4.detectCopies true &&
- "$GITP4" submit &&
+ git p4 submit &&
p4 filelog //depot/file9 &&
p4 filelog //depot/file9 | test_must_fail grep -q "branch from" &&
git add file2 file10 &&
git commit -a -m "Modify and copy file2 to file10" &&
git diff-tree -r -C HEAD &&
- "$GITP4" submit &&
+ git p4 submit &&
p4 filelog //depot/file10 &&
p4 filelog //depot/file10 | grep -q "branch from //depot/file" &&
src=$(git diff-tree -r -C --find-copies-harder HEAD | sed 1d | cut -f2) &&
test "$src" = file10 &&
git config git-p4.detectCopiesHarder true &&
- "$GITP4" submit &&
+ git p4 submit &&
p4 filelog //depot/file11 &&
p4 filelog //depot/file11 | grep -q "branch from //depot/file" &&
src=$(git diff-tree -r -C --find-copies-harder HEAD | sed 1d | cut -f2) &&
test "$src" = file10 &&
git config git-p4.detectCopies $(($level + 2)) &&
- "$GITP4" submit &&
+ git p4 submit &&
p4 filelog //depot/file12 &&
p4 filelog //depot/file12 | test_must_fail grep -q "branch from" &&
src=$(git diff-tree -r -C --find-copies-harder HEAD | sed 1d | cut -f2) &&
test "$src" = file10 &&
git config git-p4.detectCopies $(($level - 2)) &&
- "$GITP4" submit &&
+ git p4 submit &&
p4 filelog //depot/file13 &&
p4 filelog //depot/file13 | grep -q "branch from //depot/file"
)
#!/bin/sh
-test_description='git-p4 p4 branching tests'
+test_description='git p4 tests for p4 branches'
. ./lib-git-p4.sh
test_expect_success 'import main, no branch detection' '
test_when_finished cleanup_git &&
- "$GITP4" clone --dest="$git" //depot/main@all &&
+ git p4 clone --dest="$git" //depot/main@all &&
(
cd "$git" &&
git log --oneline --graph --decorate --all &&
test_expect_success 'import branch1, no branch detection' '
test_when_finished cleanup_git &&
- "$GITP4" clone --dest="$git" //depot/branch1@all &&
+ git p4 clone --dest="$git" //depot/branch1@all &&
(
cd "$git" &&
git log --oneline --graph --decorate --all &&
test_expect_success 'import branch2, no branch detection' '
test_when_finished cleanup_git &&
- "$GITP4" clone --dest="$git" //depot/branch2@all &&
+ git p4 clone --dest="$git" //depot/branch2@all &&
(
cd "$git" &&
git log --oneline --graph --decorate --all &&
test_expect_success 'import depot, no branch detection' '
test_when_finished cleanup_git &&
- "$GITP4" clone --dest="$git" //depot@all &&
+ git p4 clone --dest="$git" //depot@all &&
(
cd "$git" &&
git log --oneline --graph --decorate --all &&
test_expect_success 'import depot, branch detection' '
test_when_finished cleanup_git &&
- "$GITP4" clone --dest="$git" --detect-branches //depot@all &&
+ git p4 clone --dest="$git" --detect-branches //depot@all &&
(
cd "$git" &&
(
cd "$git" &&
git config git-p4.branchList main:branch1 &&
- "$GITP4" clone --dest=. --detect-branches //depot@all &&
+ git p4 clone --dest=. --detect-branches //depot@all &&
git log --oneline --graph --decorate --all &&
# Configure branches through git-config and clone them.
# All files are tested to make sure branches were cloned correctly.
# Finally, make an update to branch1 on P4 side to check if it is imported
-# correctly by git-p4.
-test_expect_success 'git-p4 clone simple branches' '
+# correctly by git p4.
+test_expect_success 'git p4 clone simple branches' '
test_when_finished cleanup_git &&
test_create_repo "$git" &&
(
cd "$git" &&
git config git-p4.branchList branch1:branch2 &&
git config --add git-p4.branchList branch1:branch3 &&
- "$GITP4" clone --dest=. --detect-branches //depot@all &&
+ git p4 clone --dest=. --detect-branches //depot@all &&
git log --all --graph --decorate --stat &&
git reset --hard p4/depot/branch1 &&
test -f file1 &&
p4 submit -d "update file2 in branch3" &&
cd "$git" &&
git reset --hard p4/depot/branch1 &&
- "$GITP4" rebase &&
+ git p4 rebase &&
grep file2_ file2
)
'
# Create a complex branch structure in P4 depot to check if they are correctly
-# cloned. The branches are created from older changelists to check if git-p4 is
+# cloned. The branches are created from older changelists to check if git p4 is
# able to correctly detect them.
# The final expected structure is:
# `branch1
# `- file1
# `- file2
# `- file3
-test_expect_success 'git-p4 add complex branches' '
+test_expect_success 'git p4 add complex branches' '
test_when_finished cleanup_git &&
test_create_repo "$git" &&
(
)
'
-# Configure branches through git-config and clone them. git-p4 will only be able
+# Configure branches through git-config and clone them. git p4 will only be able
# to clone the original structure if it is able to detect the origin changelist
# of each branch.
-test_expect_success 'git-p4 clone complex branches' '
+test_expect_success 'git p4 clone complex branches' '
test_when_finished cleanup_git &&
test_create_repo "$git" &&
(
git config --add git-p4.branchList branch1:branch3 &&
git config --add git-p4.branchList branch1:branch4 &&
git config --add git-p4.branchList branch1:branch5 &&
- "$GITP4" clone --dest=. --detect-branches //depot@all &&
+ git p4 clone --dest=. --detect-branches //depot@all &&
git log --all --graph --decorate --stat &&
git reset --hard p4/depot/branch1 &&
test_path_is_file file1 &&
#!/bin/sh
-test_description='git-p4 p4 filetype tests'
+test_description='git p4 filetype tests'
. ./lib-git-p4.sh
test_expect_success 'utf-16 file test' '
test_when_finished cleanup_git &&
- "$GITP4" clone --dest="$git" //depot@all &&
+ git p4 clone --dest="$git" //depot@all &&
(
cd "$git" &&
build_smush &&
test_when_finished rm -f k_smush.py ko_smush.py &&
test_when_finished cleanup_git &&
- "$GITP4" clone --dest="$git" //depot@all &&
+ git p4 clone --dest="$git" //depot@all &&
(
cd "$git" &&
"$PYTHON_PATH" "$TRASH_DIRECTORY/ko_smush.py" <"$cli/k-text-ko" >cli-k-text-ko-smush &&
test_cmp cli-k-text-ko-smush k-text-ko &&
- # utf16, even though p4 expands keywords, git-p4 does not
+ # utf16, even though p4 expands keywords, git p4 does not
# try to undo that
test_cmp "$cli/k-utf16-k" k-utf16-k &&
test_cmp "$cli/k-utf16-ko" k-utf16-ko
p4 submit -d appledouble
) &&
test_when_finished cleanup_git &&
- "$GITP4" clone --dest="$git" //depot@all &&
+ git p4 clone --dest="$git" //depot@all &&
(
cd "$git" &&
test ! -f double.png
#!/bin/sh
-test_description='git-p4 transparency to shell metachars in filenames'
+test_description='git p4 transparency to shell metachars in filenames'
. ./lib-git-p4.sh
'
test_expect_success 'shell metachars in filenames' '
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
test_when_finished cleanup_git &&
(
cd "$git" &&
echo f2 >"file with spaces" &&
git add "file with spaces" &&
git commit -m "add files" &&
- P4EDITOR=touch "$GITP4" submit
+ P4EDITOR=touch git p4 submit
) &&
(
cd "$cli" &&
'
test_expect_success 'deleting with shell metachars' '
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
test_when_finished cleanup_git &&
(
cd "$git" &&
git rm foo\$bar &&
git rm file\ with\ spaces &&
git commit -m "remove files" &&
- P4EDITOR=touch "$GITP4" submit
+ P4EDITOR=touch git p4 submit
) &&
(
cd "$cli" &&
cd "$git" &&
git config git-p4.branchList main:branch\$3 &&
- "$GITP4" clone --dest=. --detect-branches //depot@all &&
+ git p4 clone --dest=. --detect-branches //depot@all &&
git log --all --graph --decorate --stat &&
git reset --hard p4/depot/branch\$3 &&
test -f shell_char_branch_file &&
#!/bin/sh
-test_description='git-p4 p4 label tests'
+test_description='git p4 label tests'
. ./lib-git-p4.sh
p4 labels ... &&
- "$GITP4" clone --dest="$git" --detect-labels //depot@all &&
+ git p4 clone --dest="$git" --detect-labels //depot@all &&
cd "$git" &&
git tag &&
p4 labels ... &&
- "$GITP4" clone --dest="$git" --detect-labels //depot@all &&
+ git p4 clone --dest="$git" --detect-labels //depot@all &&
cd "$git" &&
git tag | grep tag_f1 &&
#!/bin/sh
-test_description='git-p4 skipSubmitEdit config variables'
+test_description='git p4 skipSubmitEdit config variables'
. ./lib-git-p4.sh
# this works because EDITOR is set to :
test_expect_success 'no config, unedited, say yes' '
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
test_when_finished cleanup_git &&
(
cd "$git" &&
echo line >>file1 &&
git commit -a -m "change 2" &&
- echo y | "$GITP4" submit &&
+ echo y | git p4 submit &&
p4 changes //depot/... >wc &&
test_line_count = 2 wc
)
'
test_expect_success 'no config, unedited, say no' '
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
test_when_finished cleanup_git &&
(
cd "$git" &&
echo line >>file1 &&
git commit -a -m "change 3 (not really)" &&
- printf "bad response\nn\n" | "$GITP4" submit &&
+ printf "bad response\nn\n" | git p4 submit &&
p4 changes //depot/... >wc &&
test_line_count = 2 wc
)
'
test_expect_success 'skipSubmitEdit' '
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
test_when_finished cleanup_git &&
(
cd "$git" &&
git config core.editor /bin/false &&
echo line >>file1 &&
git commit -a -m "change 3" &&
- "$GITP4" submit &&
+ git p4 submit &&
p4 changes //depot/... >wc &&
test_line_count = 3 wc
)
'
test_expect_success 'skipSubmitEditCheck' '
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
test_when_finished cleanup_git &&
(
cd "$git" &&
git config git-p4.skipSubmitEditCheck true &&
echo line >>file1 &&
git commit -a -m "change 4" &&
- "$GITP4" submit &&
+ git p4 submit &&
p4 changes //depot/... >wc &&
test_line_count = 4 wc
)
# check the normal case, where the template really is edited
test_expect_success 'no config, edited' '
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
test_when_finished cleanup_git &&
ed="$TRASH_DIRECTORY/ed.sh" &&
test_when_finished "rm \"$ed\"" &&
cd "$git" &&
echo line >>file1 &&
git commit -a -m "change 5" &&
- EDITOR="\"$ed\"" "$GITP4" submit &&
+ P4EDITOR="" EDITOR="\"$ed\"" git p4 submit &&
p4 changes //depot/... >wc &&
test_line_count = 5 wc
)
#!/bin/sh
-test_description='git-p4 options'
+test_description='git p4 options'
. ./lib-git-p4.sh
'
test_expect_success 'clone no --git-dir' '
- test_must_fail "$GITP4" clone --git-dir=xx //depot
+ test_must_fail git p4 clone --git-dir=xx //depot
'
test_expect_success 'clone --branch' '
- "$GITP4" clone --branch=refs/remotes/p4/sb --dest="$git" //depot &&
+ git p4 clone --branch=refs/remotes/p4/sb --dest="$git" //depot &&
test_when_finished cleanup_git &&
(
cd "$git" &&
cf="$TRASH_DIRECTORY/cf" &&
test_when_finished "rm \"$cf\"" &&
printf "1\n3\n" >"$cf" &&
- "$GITP4" clone --changesfile="$cf" --dest="$git" //depot &&
+ git p4 clone --changesfile="$cf" --dest="$git" //depot &&
test_when_finished cleanup_git &&
(
cd "$git" &&
cf="$TRASH_DIRECTORY/cf" &&
test_when_finished "rm \"$cf\"" &&
printf "1\n3\n" >"$cf" &&
- test_must_fail "$GITP4" clone --changesfile="$cf" --dest="$git" //depot@all
+ test_must_fail git p4 clone --changesfile="$cf" --dest="$git" //depot@all
'
# imports both master and p4/master in refs/heads
# requires --import-local on sync to find p4 refs/heads
# does not update master on sync, just p4/master
test_expect_success 'clone/sync --import-local' '
- "$GITP4" clone --import-local --dest="$git" //depot@1,2 &&
+ git p4 clone --import-local --dest="$git" //depot@1,2 &&
test_when_finished cleanup_git &&
(
cd "$git" &&
test_line_count = 2 lines &&
git log --oneline refs/heads/p4/master >lines &&
test_line_count = 2 lines &&
- test_must_fail "$GITP4" sync &&
+ test_must_fail git p4 sync &&
- "$GITP4" sync --import-local &&
+ git p4 sync --import-local &&
git log --oneline refs/heads/master >lines &&
test_line_count = 2 lines &&
git log --oneline refs/heads/p4/master >lines &&
'
test_expect_success 'clone --max-changes' '
- "$GITP4" clone --dest="$git" --max-changes 2 //depot@all &&
+ git p4 clone --dest="$git" --max-changes 2 //depot@all &&
test_when_finished cleanup_git &&
(
cd "$git" &&
p4 add sub/dir/f4 &&
p4 submit -d "change 4"
) &&
- "$GITP4" clone --dest="$git" --keep-path //depot/sub/dir@all &&
+ git p4 clone --dest="$git" --keep-path //depot/sub/dir@all &&
test_when_finished cleanup_git &&
(
cd "$git" &&
test_path_is_file sub/dir/f4
) &&
cleanup_git &&
- "$GITP4" clone --dest="$git" //depot/sub/dir@all &&
+ git p4 clone --dest="$git" //depot/sub/dir@all &&
(
cd "$git" &&
test_path_is_file f4 &&
(
# big usage message
exec >/dev/null &&
- test_must_fail "$GITP4" clone --dest="$git" --use-client-spec
+ test_must_fail git p4 clone --dest="$git" --use-client-spec
) &&
cli2="$TRASH_DIRECTORY/cli2" &&
mkdir -p "$cli2" &&
) &&
P4CLIENT=client2 &&
test_when_finished cleanup_git &&
- "$GITP4" clone --dest="$git" --use-client-spec //depot/... &&
+ git p4 clone --dest="$git" --use-client-spec //depot/... &&
(
cd "$git" &&
test_path_is_file bus/dir/f4 &&
cd "$git" &&
git init &&
git config git-p4.useClientSpec true &&
- "$GITP4" sync //depot/... &&
+ git p4 sync //depot/... &&
git checkout -b master p4/master &&
test_path_is_file bus/dir/f4 &&
test_path_is_missing file1
#!/bin/sh
-test_description='git-p4 submit'
+test_description='git p4 submit'
. ./lib-git-p4.sh
test_expect_success 'submit with no client dir' '
test_when_finished cleanup_git &&
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
(
cd "$git" &&
echo file2 >file2 &&
git commit -m "git commit 2" &&
rm -rf "$cli" &&
git config git-p4.skipSubmitEdit true &&
- "$GITP4" submit
+ git p4 submit
+ ) &&
+ (
+ cd "$cli" &&
+ test_path_is_file file1 &&
+ test_path_is_file file2
)
'
# make two commits, but tell it to apply only from HEAD^
test_expect_success 'submit --origin' '
test_when_finished cleanup_git &&
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
(
cd "$git" &&
test_commit "file3" &&
test_commit "file4" &&
git config git-p4.skipSubmitEdit true &&
- "$GITP4" submit --origin=HEAD^
+ git p4 submit --origin=HEAD^
) &&
(
cd "$cli" &&
- p4 sync &&
test_path_is_missing "file3.t" &&
test_path_is_file "file4.t"
)
test_expect_success 'submit with allowSubmit' '
test_when_finished cleanup_git &&
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
(
cd "$git" &&
test_commit "file5" &&
git config git-p4.skipSubmitEdit true &&
git config git-p4.allowSubmit "nobranch" &&
- test_must_fail "$GITP4" submit &&
+ test_must_fail git p4 submit &&
git config git-p4.allowSubmit "nobranch,master" &&
- "$GITP4" submit
+ git p4 submit
)
'
test_expect_success 'submit with master branch name from argv' '
test_when_finished cleanup_git &&
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
(
cd "$git" &&
test_commit "file6" &&
git config git-p4.skipSubmitEdit true &&
- test_must_fail "$GITP4" submit nobranch &&
+ test_must_fail git p4 submit nobranch &&
git branch otherbranch &&
git reset --hard HEAD^ &&
test_commit "file7" &&
- "$GITP4" submit otherbranch
+ git p4 submit otherbranch
) &&
(
cd "$cli" &&
- p4 sync &&
test_path_is_file "file6.t" &&
test_path_is_missing "file7.t"
)
'
+#
+# Basic submit tests, the five handled cases
+#
+
+test_expect_success 'submit modify' '
+ test_when_finished cleanup_git &&
+ git p4 clone --dest="$git" //depot &&
+ (
+ cd "$git" &&
+ git config git-p4.skipSubmitEdit true &&
+ echo line >>file1 &&
+ git add file1 &&
+ git commit -m file1 &&
+ git p4 submit
+ ) &&
+ (
+ cd "$cli" &&
+ test_path_is_file file1 &&
+ test_line_count = 2 file1
+ )
+'
+
+test_expect_success 'submit add' '
+ test_when_finished cleanup_git &&
+ git p4 clone --dest="$git" //depot &&
+ (
+ cd "$git" &&
+ git config git-p4.skipSubmitEdit true &&
+ echo file13 >file13 &&
+ git add file13 &&
+ git commit -m file13 &&
+ git p4 submit
+ ) &&
+ (
+ cd "$cli" &&
+ test_path_is_file file13
+ )
+'
+
+test_expect_success 'submit delete' '
+ test_when_finished cleanup_git &&
+ git p4 clone --dest="$git" //depot &&
+ (
+ cd "$git" &&
+ git config git-p4.skipSubmitEdit true &&
+ git rm file4.t &&
+ git commit -m "delete file4.t" &&
+ git p4 submit
+ ) &&
+ (
+ cd "$cli" &&
+ test_path_is_missing file4.t
+ )
+'
+
+test_expect_success 'submit copy' '
+ test_when_finished cleanup_git &&
+ git p4 clone --dest="$git" //depot &&
+ (
+ cd "$git" &&
+ git config git-p4.skipSubmitEdit true &&
+ git config git-p4.detectCopies true &&
+ git config git-p4.detectCopiesHarder true &&
+ cp file5.t file5.ta &&
+ git add file5.ta &&
+ git commit -m "copy to file5.ta" &&
+ git p4 submit
+ ) &&
+ (
+ cd "$cli" &&
+ test_path_is_file file5.ta &&
+ test ! -w file5.ta
+ )
+'
+
+test_expect_success 'submit rename' '
+ test_when_finished cleanup_git &&
+ git p4 clone --dest="$git" //depot &&
+ (
+ cd "$git" &&
+ git config git-p4.skipSubmitEdit true &&
+ git config git-p4.detectRenames true &&
+ git mv file6.t file6.ta &&
+ git commit -m "rename file6.t to file6.ta" &&
+ git p4 submit
+ ) &&
+ (
+ cd "$cli" &&
+ test_path_is_missing file6.t &&
+ test_path_is_file file6.ta &&
+ test ! -w file6.ta
+ )
+'
+
test_expect_success 'kill p4d' '
kill_p4d
'
#!/bin/sh
-test_description='git-p4 relative chdir'
+test_description='git p4 relative chdir'
. ./lib-git-p4.sh
(
P4CONFIG=p4config && export P4CONFIG &&
sane_unset P4PORT P4CLIENT &&
- "$GITP4" clone --verbose --dest="$git" //depot
+ git p4 clone --verbose --dest="$git" //depot
)
'
(
P4CONFIG=p4config && export P4CONFIG &&
sane_unset P4PORT P4CLIENT &&
- "$GITP4" clone --verbose --dest="git" //depot
+ git p4 clone --verbose --dest="git" //depot
)
'
#!/bin/sh
-test_description='git-p4 client view'
+test_description='git p4 client view'
. ./lib-git-p4.sh
test_expect_success 'unsupported view wildcard %%n' '
client_view "//depot/%%%%1/sub/... //client/sub/%%%%1/..." &&
test_when_finished cleanup_git &&
- test_must_fail "$GITP4" clone --use-client-spec --dest="$git" //depot
+ test_must_fail git p4 clone --use-client-spec --dest="$git" //depot
'
test_expect_success 'unsupported view wildcard *' '
client_view "//depot/*/bar/... //client/*/bar/..." &&
test_when_finished cleanup_git &&
- test_must_fail "$GITP4" clone --use-client-spec --dest="$git" //depot
+ test_must_fail git p4 clone --use-client-spec --dest="$git" //depot
'
test_expect_success 'wildcard ... only supported at end of spec 1' '
client_view "//depot/.../file11 //client/.../file11" &&
test_when_finished cleanup_git &&
- test_must_fail "$GITP4" clone --use-client-spec --dest="$git" //depot
+ test_must_fail git p4 clone --use-client-spec --dest="$git" //depot
'
test_expect_success 'wildcard ... only supported at end of spec 2' '
client_view "//depot/.../a/... //client/.../a/..." &&
test_when_finished cleanup_git &&
- test_must_fail "$GITP4" clone --use-client-spec --dest="$git" //depot
+ test_must_fail git p4 clone --use-client-spec --dest="$git" //depot
'
test_expect_success 'basic map' '
files="cli1/file11 cli1/file12" &&
client_verify $files &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot &&
+ git p4 clone --use-client-spec --dest="$git" //depot &&
git_verify $files
'
client_view &&
client_verify &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot &&
+ git p4 clone --use-client-spec --dest="$git" //depot &&
git_verify
'
files="file11" &&
client_verify $files &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot &&
+ git p4 clone --use-client-spec --dest="$git" //depot &&
git_verify $files
'
cli2/dir2/file21 cli2/dir2/file22" &&
client_verify $files &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot &&
+ git p4 clone --use-client-spec --dest="$git" //depot &&
git_verify $files
'
files="file21 file22" &&
client_verify $files &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot &&
+ git p4 clone --use-client-spec --dest="$git" //depot &&
git_verify $files
'
files="cli12/file21 cli12/file22" &&
client_verify $files &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot &&
+ git p4 clone --use-client-spec --dest="$git" //depot &&
git_verify $files
'
"-//depot/dir2/... //client/..." &&
client_verify &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot &&
+ git p4 clone --use-client-spec --dest="$git" //depot &&
git_verify
'
files="dir1/file11 dir1/file12" &&
client_verify $files &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot &&
+ git p4 clone --use-client-spec --dest="$git" //depot &&
git_verify $files
'
files="dir1/file11 dir1/file12 dir2/file21" &&
client_verify $files &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot &&
+ git p4 clone --use-client-spec --dest="$git" //depot &&
git_verify $files
'
files="cli/file11 cli/file12 cli/file21 cli/file22" &&
client_verify $files &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot &&
+ git p4 clone --use-client-spec --dest="$git" //depot &&
git_verify $files
'
files="cli/file11 cli/file12 cli/file21" &&
client_verify $files &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot &&
+ git p4 clone --use-client-spec --dest="$git" //depot &&
git_verify $files
'
files="dir1/file11 dir1/file12 dir2incl/file21 dir2incl/file22" &&
client_verify $files &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot &&
+ git p4 clone --use-client-spec --dest="$git" //depot &&
git_verify $files
'
client_view "//depot/dir1/... \"//client/cdir 1/...\"" &&
client_verify "cdir 1/file11" "cdir 1/file12" &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot &&
+ git p4 clone --use-client-spec --dest="$git" //depot &&
git_verify "cdir 1/file11" "cdir 1/file12"
'
test_expect_success 'clone --use-client-spec sets useClientSpec' '
client_view "//depot/... //client/..." &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot &&
+ git p4 clone --use-client-spec --dest="$git" //depot &&
(
cd "$git" &&
git config --bool git-p4.useClientSpec >actual &&
files="dir1/file11 dir1/file12 dir2/file21 dir2/file22" &&
client_verify $files &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot/dir1 &&
+ git p4 clone --use-client-spec --dest="$git" //depot/dir1 &&
git_verify dir1/file11 dir1/file12
'
test_expect_success 'subdir clone, submit modify' '
client_view "//depot/... //client/..." &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot/dir1 &&
+ git p4 clone --use-client-spec --dest="$git" //depot/dir1 &&
(
cd "$git" &&
git config git-p4.skipSubmitEdit true &&
echo line >>dir1/file12 &&
git add dir1/file12 &&
git commit -m dir1/file12 &&
- "$GITP4" submit
+ git p4 submit
) &&
(
cd "$cli" &&
test_expect_success 'subdir clone, submit add' '
client_view "//depot/... //client/..." &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot/dir1 &&
+ git p4 clone --use-client-spec --dest="$git" //depot/dir1 &&
(
cd "$git" &&
git config git-p4.skipSubmitEdit true &&
echo file13 >dir1/file13 &&
git add dir1/file13 &&
git commit -m dir1/file13 &&
- "$GITP4" submit
+ git p4 submit
) &&
(
cd "$cli" &&
test_expect_success 'subdir clone, submit delete' '
client_view "//depot/... //client/..." &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot/dir1 &&
+ git p4 clone --use-client-spec --dest="$git" //depot/dir1 &&
(
cd "$git" &&
git config git-p4.skipSubmitEdit true &&
git rm dir1/file12 &&
git commit -m "delete dir1/file12" &&
- "$GITP4" submit
+ git p4 submit
) &&
(
cd "$cli" &&
test_expect_success 'subdir clone, submit copy' '
client_view "//depot/... //client/..." &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot/dir1 &&
+ git p4 clone --use-client-spec --dest="$git" //depot/dir1 &&
(
cd "$git" &&
git config git-p4.skipSubmitEdit true &&
cp dir1/file11 dir1/file11a &&
git add dir1/file11a &&
git commit -m "copy to dir1/file11a" &&
- "$GITP4" submit
+ git p4 submit
) &&
(
cd "$cli" &&
- test_path_is_file dir1/file11a
+ test_path_is_file dir1/file11a &&
+ test ! -w dir1/file11a
)
'
test_expect_success 'subdir clone, submit rename' '
client_view "//depot/... //client/..." &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot/dir1 &&
+ git p4 clone --use-client-spec --dest="$git" //depot/dir1 &&
(
cd "$git" &&
git config git-p4.skipSubmitEdit true &&
git config git-p4.detectRenames true &&
git mv dir1/file13 dir1/file13a &&
git commit -m "rename dir1/file13 to dir1/file13a" &&
- "$GITP4" submit
+ git p4 submit
) &&
(
cd "$cli" &&
test_path_is_missing dir1/file13 &&
- test_path_is_file dir1/file13a
+ test_path_is_file dir1/file13a &&
+ test ! -w dir1/file13a
+ )
+'
+
+# see t9800 for the non-client-spec case, and the rest of the wildcard tests
+test_expect_success 'wildcard files submit back to p4, client-spec case' '
+ client_view "//depot/... //client/..." &&
+ test_when_finished cleanup_git &&
+ git p4 clone --use-client-spec --dest="$git" //depot/dir1 &&
+ (
+ cd "$git" &&
+ echo git-wild-hash >dir1/git-wild#hash &&
+ echo git-wild-star >dir1/git-wild\*star &&
+ echo git-wild-at >dir1/git-wild@at &&
+ echo git-wild-percent >dir1/git-wild%percent &&
+ git add dir1/git-wild* &&
+ git commit -m "add some wildcard filenames" &&
+ git config git-p4.skipSubmitEditCheck true &&
+ git p4 submit
+ ) &&
+ (
+ cd "$cli" &&
+ test_path_is_file dir1/git-wild#hash &&
+ test_path_is_file dir1/git-wild\*star &&
+ test_path_is_file dir1/git-wild@at &&
+ test_path_is_file dir1/git-wild%percent
+ ) &&
+ (
+ # delete these carefully, cannot just do "p4 delete"
+ # on files with wildcards; but git-p4 knows how
+ cd "$git" &&
+ git rm dir1/git-wild* &&
+ git commit -m "clean up the wildcards" &&
+ git p4 submit
)
'
test_expect_success 'reinit depot' '
(
cd "$cli" &&
- p4 sync -f &&
rm files &&
p4 delete */* &&
p4 submit -d "delete all files" &&
client_verify $files &&
test_cmp actual "$cli"/filecollide &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot &&
+ git p4 clone --use-client-spec --dest="$git" //depot &&
git_verify $files &&
test_cmp actual "$git"/filecollide
'
client_verify $files &&
test_cmp actual "$cli"/filecollide &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot &&
+ git p4 clone --use-client-spec --dest="$git" //depot &&
git_verify $files &&
test_cmp actual "$git"/filecollide
'
files="file11 file12 file21 file22" &&
client_verify $files &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot &&
+ git p4 clone --use-client-spec --dest="$git" //depot &&
git_verify $files
'
files="file11 file12 file21 file22" &&
client_verify $files &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot &&
+ git p4 clone --use-client-spec --dest="$git" //depot &&
git_verify $files
'
echo dir1/colA >actual &&
client_verify $files &&
test_cmp actual "$cli"/colA &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot &&
+ git p4 clone --use-client-spec --dest="$git" //depot &&
git_verify $files &&
test_cmp actual "$git"/colA
'
test_cmp actual "$cli"/colA &&
(
cd "$git" &&
- "$GITP4" sync --use-client-spec &&
+ git p4 sync --use-client-spec &&
git merge --ff-only p4/master
) &&
git_verify $files &&
test_cmp actual "$cli"/colB &&
(
cd "$git" &&
- "$GITP4" sync --use-client-spec &&
+ git p4 sync --use-client-spec &&
git merge --ff-only p4/master
) &&
git_verify $files &&
test_when_finished cleanup_git &&
(
cd "$git" &&
- "$GITP4" sync --use-client-spec &&
+ git p4 sync --use-client-spec &&
git merge --ff-only p4/master
) &&
git_verify $files
echo dir1/colA >actual &&
client_verify $files &&
test_cmp actual "$cli"/colA &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot &&
+ git p4 clone --use-client-spec --dest="$git" //depot &&
git_verify $files &&
test_cmp actual "$git"/colA
'
test_cmp actual "$cli"/colA &&
(
cd "$git" &&
- "$GITP4" sync --use-client-spec &&
+ git p4 sync --use-client-spec &&
git merge --ff-only p4/master
) &&
git_verify $files &&
test_cmp actual "$cli"/colB &&
(
cd "$git" &&
- "$GITP4" sync --use-client-spec &&
+ git p4 sync --use-client-spec &&
git merge --ff-only p4/master
) &&
git_verify $files &&
test_when_finished cleanup_git &&
(
cd "$git" &&
- "$GITP4" sync --use-client-spec &&
+ git p4 sync --use-client-spec &&
git merge --ff-only p4/master
) &&
git_verify $files &&
files="cdir1/file11 cdir1/file12" &&
client_verify $files &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot &&
+ git p4 clone --use-client-spec --dest="$git" //depot &&
client_verify $files
'
client_view "\"//depot/dir 1/...\" \"//client/cdir 1/...\"" &&
client_verify "cdir 1/file11" "cdir 1/file12" &&
test_when_finished cleanup_git &&
- "$GITP4" clone --use-client-spec --dest="$git" //depot &&
+ git p4 clone --use-client-spec --dest="$git" //depot &&
git_verify "cdir 1/file11" "cdir 1/file12"
'
#
test_expect_success 'edit far away from RCS lines' '
test_when_finished cleanup_git &&
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
(
cd "$git" &&
git config git-p4.skipSubmitEdit true &&
sed -i "s/^line7/line7 edit/" filek &&
git commit -m "filek line7 edit" filek &&
- "$GITP4" submit &&
+ git p4 submit &&
scrub_k_check filek
)
'
#
test_expect_success 'edit near RCS lines' '
test_when_finished cleanup_git &&
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
(
cd "$git" &&
git config git-p4.skipSubmitEdit true &&
git config git-p4.attemptRCSCleanup true &&
sed -i "s/^line4/line4 edit/" filek &&
git commit -m "filek line4 edit" filek &&
- "$GITP4" submit &&
+ git p4 submit &&
scrub_k_check filek
)
'
#
test_expect_success 'edit keyword lines' '
test_when_finished cleanup_git &&
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
(
cd "$git" &&
git config git-p4.skipSubmitEdit true &&
git config git-p4.attemptRCSCleanup true &&
sed -i "/Revision/d" filek &&
git commit -m "filek remove Revision line" filek &&
- "$GITP4" submit &&
+ git p4 submit &&
scrub_k_check filek
)
'
#
test_expect_success 'scrub ko files differently' '
test_when_finished cleanup_git &&
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
(
cd "$git" &&
git config git-p4.skipSubmitEdit true &&
git config git-p4.attemptRCSCleanup true &&
sed -i "s/^line4/line4 edit/" fileko &&
git commit -m "fileko line4 edit" fileko &&
- "$GITP4" submit &&
+ git p4 submit &&
scrub_ko_check fileko &&
! scrub_k_check fileko
)
#
test_expect_success 'do not scrub plain text' '
test_when_finished cleanup_git &&
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
(
cd "$git" &&
git config git-p4.skipSubmitEdit true &&
sed -i "s/^line5/line5 p4 edit/" file_text &&
p4 submit -d "file5 p4 edit"
) &&
- ! "$GITP4" submit &&
+ ! git p4 submit &&
(
# exepct something like:
# file_text - file(s) not opened on this client
# even though the change itself would otherwise apply cleanly.
test_expect_success 'cope with rcs keyword expansion damage' '
test_when_finished cleanup_git &&
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
(
cd "$git" &&
git config git-p4.skipSubmitEdit true &&
git add kwfile1.c &&
git commit -m "Zap an RCS kw line" &&
- "$GITP4" submit &&
- "$GITP4" rebase &&
+ git p4 submit &&
+ git p4 rebase &&
git diff p4/master &&
- "$GITP4" commit &&
+ git p4 commit &&
echo "try modifying in both" &&
cd "$cli" &&
p4 edit kwfile1.c &&
echo "line from git at the top" | cat - kwfile1.c >kwfile1.c.new &&
mv kwfile1.c.new kwfile1.c &&
git commit -m "Add line in git at the top" kwfile1.c &&
- "$GITP4" rebase &&
- "$GITP4" submit
+ git p4 rebase &&
+ git p4 submit
)
'
cat kwdelfile.c &&
grep 1 kwdelfile.c
) &&
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
(
cd "$git" &&
grep Revision kwdelfile.c &&
git commit -m "Delete a file containing RCS keywords" &&
git config git-p4.skipSubmitEdit true &&
git config git-p4.attemptRCSCleanup true &&
- "$GITP4" submit
+ git p4 submit
) &&
(
cd "$cli" &&
# work fine without any special handling.
test_expect_success 'Add keywords in git which match the default p4 values' '
test_when_finished cleanup_git &&
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
(
cd "$git" &&
echo "NewKW: \$Revision\$" >>kwfile1.c &&
git commit -m "Adding RCS keywords in git" &&
git config git-p4.skipSubmitEdit true &&
git config git-p4.attemptRCSCleanup true &&
- "$GITP4" submit
+ git p4 submit
) &&
(
cd "$cli" &&
#
test_expect_failure 'Add keywords in git which do not match the default p4 values' '
test_when_finished cleanup_git &&
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
(
cd "$git" &&
echo "NewKW2: \$Revision:1\$" >>kwfile1.c &&
git commit -m "Adding RCS keywords in git" &&
git config git-p4.skipSubmitEdit true &&
git config git-p4.attemptRCSCleanup true &&
- "$GITP4" submit
+ git p4 submit
) &&
(
cd "$cli" &&
p4 add -t ktext merge2.c &&
p4 submit -d "add merge test file"
) &&
- "$GITP4" clone --dest="$git" //depot &&
+ git p4 clone --dest="$git" //depot &&
(
cd "$git" &&
sed -e "/Hello/d" merge2.c >merge2.c.tmp &&
test -f merge2.c &&
git config git-p4.skipSubmitEdit true &&
git config git-p4.attemptRCSCleanup true &&
- !(echo "s" | "$GITP4" submit) &&
+ !(echo "s" | git p4 submit) &&
git rebase --skip &&
! test -f merge2.c
)
--- /dev/null
+#!/bin/sh
+
+test_description='git p4 label tests'
+
+. ./lib-git-p4.sh
+
+test_expect_success 'start p4d' '
+ start_p4d
+'
+
+# Basic p4 label import tests.
+#
+test_expect_success 'basic p4 labels' '
+ test_when_finished cleanup_git &&
+ (
+ cd "$cli" &&
+ mkdir -p main &&
+
+ echo f1 >main/f1 &&
+ p4 add main/f1 &&
+ p4 submit -d "main/f1" &&
+
+ echo f2 >main/f2 &&
+ p4 add main/f2 &&
+ p4 submit -d "main/f2" &&
+
+ echo f3 >main/file_with_\$metachar &&
+ p4 add main/file_with_\$metachar &&
+ p4 submit -d "file with metachar" &&
+
+ p4 tag -l TAG_F1_ONLY main/f1 &&
+ p4 tag -l TAG_WITH\$_SHELL_CHAR main/... &&
+ p4 tag -l this_tag_will_be\ skipped main/... &&
+
+ echo f4 >main/f4 &&
+ p4 add main/f4 &&
+ p4 submit -d "main/f4" &&
+
+ p4 label -i <<-EOF &&
+ Label: TAG_LONG_LABEL
+ Description:
+ A Label first line
+ A Label second line
+ View: //depot/...
+ EOF
+
+ p4 tag -l TAG_LONG_LABEL ... &&
+
+ p4 labels ... &&
+
+ git p4 clone --dest="$git" //depot@all &&
+ cd "$git" &&
+ git config git-p4.labelImportRegexp ".*TAG.*" &&
+ git p4 sync --import-labels --verbose &&
+
+ git tag &&
+ git tag >taglist &&
+ test_line_count = 3 taglist &&
+
+ cd main &&
+ git checkout TAG_F1_ONLY &&
+ ! test -f f2 &&
+ git checkout TAG_WITH\$_SHELL_CHAR &&
+ test -f f1 && test -f f2 && test -f file_with_\$metachar &&
+
+ git show TAG_LONG_LABEL | grep -q "A Label second line"
+ )
+'
+# Test some label corner cases:
+#
+# - two tags on the same file; both should be available
+# - a tag that is only on one file; this kind of tag
+# cannot be imported (at least not easily).
+
+test_expect_success 'two labels on the same changelist' '
+ test_when_finished cleanup_git &&
+ (
+ cd "$cli" &&
+ mkdir -p main &&
+
+ p4 edit main/f1 main/f2 &&
+ echo "hello world" >main/f1 &&
+ echo "not in the tag" >main/f2 &&
+ p4 submit -d "main/f[12]: testing two labels" &&
+
+ p4 tag -l TAG_F1_1 main/... &&
+ p4 tag -l TAG_F1_2 main/... &&
+
+ p4 labels ... &&
+
+ git p4 clone --dest="$git" //depot@all &&
+ cd "$git" &&
+ git p4 sync --import-labels &&
+
+ git tag | grep TAG_F1 &&
+ git tag | grep -q TAG_F1_1 &&
+ git tag | grep -q TAG_F1_2 &&
+
+ cd main &&
+
+ git checkout TAG_F1_1 &&
+ ls &&
+ test -f f1 &&
+
+ git checkout TAG_F1_2 &&
+ ls &&
+ test -f f1
+ )
+'
+
+# Export some git tags to p4
+test_expect_success 'export git tags to p4' '
+ test_when_finished cleanup_git &&
+ git p4 clone --dest="$git" //depot@all &&
+ (
+ cd "$git" &&
+ git tag -m "A tag created in git:xyzzy" GIT_TAG_1 &&
+ echo "hello world" >main/f10 &&
+ git add main/f10 &&
+ git commit -m "Adding file for export test" &&
+ git config git-p4.skipSubmitEdit true &&
+ git p4 submit &&
+ git tag -m "Another git tag" GIT_TAG_2 &&
+ git tag LIGHTWEIGHT_TAG &&
+ git p4 rebase --import-labels --verbose &&
+ git p4 submit --export-labels --verbose
+ ) &&
+ (
+ cd "$cli" &&
+ p4 sync ... &&
+ p4 labels ... | grep GIT_TAG_1 &&
+ p4 labels ... | grep GIT_TAG_2 &&
+ p4 labels ... | grep LIGHTWEIGHT_TAG &&
+ p4 label -o GIT_TAG_1 | grep "tag created in git:xyzzy" &&
+ p4 sync ...@GIT_TAG_1 &&
+ ! test -f main/f10
+ p4 sync ...@GIT_TAG_2 &&
+ test -f main/f10
+ )
+'
+
+# Export a tag from git where an affected file is deleted later on
+# Need to create git tags after rebase, since only then can the
+# git commits be mapped to p4 changelists.
+test_expect_success 'export git tags to p4 with deletion' '
+ test_when_finished cleanup_git &&
+ git p4 clone --dest="$git" //depot@all &&
+ (
+ cd "$git" &&
+ git p4 sync --import-labels &&
+ echo "deleted file" >main/deleted_file &&
+ git add main/deleted_file &&
+ git commit -m "create deleted file" &&
+ git rm main/deleted_file &&
+ echo "new file" >main/f11 &&
+ git add main/f11 &&
+ git commit -m "delete the deleted file" &&
+ git config git-p4.skipSubmitEdit true &&
+ git p4 submit &&
+ git p4 rebase --import-labels --verbose &&
+ git tag -m "tag on deleted file" GIT_TAG_ON_DELETED HEAD~1 &&
+ git tag -m "tag after deletion" GIT_TAG_AFTER_DELETION HEAD &&
+ git p4 submit --export-labels --verbose
+ ) &&
+ (
+ cd "$cli" &&
+ p4 sync ... &&
+ p4 sync ...@GIT_TAG_ON_DELETED &&
+ test -f main/deleted_file &&
+ p4 sync ...@GIT_TAG_AFTER_DELETION &&
+ ! test -f main/deleted_file &&
+ echo "checking label contents" &&
+ p4 label -o GIT_TAG_ON_DELETED | grep "tag on deleted file"
+ )
+'
+
+# Create a tag in git that cannot be exported to p4
+test_expect_success 'tag that cannot be exported' '
+ test_when_finished cleanup_git &&
+ git p4 clone --dest="$git" //depot@all &&
+ (
+ cd "$git" &&
+ git checkout -b a_branch &&
+ echo "hello" >main/f12 &&
+ git add main/f12 &&
+ git commit -m "adding f12" &&
+ git tag -m "tag on a_branch" GIT_TAG_ON_A_BRANCH &&
+ git checkout master &&
+ git p4 submit --export-labels
+ ) &&
+ (
+ cd "$cli" &&
+ p4 sync ... &&
+ !(p4 labels | grep GIT_TAG_ON_A_BRANCH)
+ )
+'
+
+test_expect_success 'kill p4d' '
+ kill_p4d
+'
+
+test_done
--- /dev/null
+#!/bin/sh
+#
+# Copyright (c) 2012 Felipe Contreras
+#
+
+if test -n "$BASH" && test -z "$POSIXLY_CORRECT"; then
+ # we are in full-on bash mode
+ true
+elif type bash >/dev/null 2>&1; then
+ # execute in full-on bash mode
+ unset POSIXLY_CORRECT
+ exec bash "$0" "$@"
+else
+ echo '1..0 #SKIP skipping bash completion tests; bash not available'
+ exit 0
+fi
+
+test_description='test bash completion'
+
+. ./test-lib.sh
+
+complete ()
+{
+ # do nothing
+ return 0
+}
+
+. "$GIT_BUILD_DIR/contrib/completion/git-completion.bash"
+
+# We don't need this function to actually join words or do anything special.
+# Also, it's cleaner to avoid touching bash's internal completion variables.
+# So let's override it with a minimal version for testing purposes.
+_get_comp_words_by_ref ()
+{
+ while [ $# -gt 0 ]; do
+ case "$1" in
+ cur)
+ cur=${_words[_cword]}
+ ;;
+ prev)
+ prev=${_words[_cword-1]}
+ ;;
+ words)
+ words=("${_words[@]}")
+ ;;
+ cword)
+ cword=$_cword
+ ;;
+ esac
+ shift
+ done
+}
+
+print_comp ()
+{
+ local IFS=$'\n'
+ echo "${COMPREPLY[*]}" > out
+}
+
+run_completion ()
+{
+ local -a COMPREPLY _words
+ local _cword
+ _words=( $1 )
+ (( _cword = ${#_words[@]} - 1 ))
+ _git && print_comp
+}
+
+test_completion ()
+{
+ test $# -gt 1 && echo "$2" > expected
+ run_completion "$@" &&
+ test_cmp expected out
+}
+
+newline=$'\n'
+
+test_expect_success '__gitcomp - trailing space - options' '
+ sed -e "s/Z$//" >expected <<-\EOF &&
+ --reuse-message=Z
+ --reedit-message=Z
+ --reset-author Z
+ EOF
+ (
+ local -a COMPREPLY &&
+ cur="--re" &&
+ __gitcomp "--dry-run --reuse-message= --reedit-message=
+ --reset-author" &&
+ IFS="$newline" &&
+ echo "${COMPREPLY[*]}" > out
+ ) &&
+ test_cmp expected out
+'
+
+test_expect_success '__gitcomp - trailing space - config keys' '
+ sed -e "s/Z$//" >expected <<-\EOF &&
+ branch.Z
+ branch.autosetupmerge Z
+ branch.autosetuprebase Z
+ browser.Z
+ EOF
+ (
+ local -a COMPREPLY &&
+ cur="br" &&
+ __gitcomp "branch. branch.autosetupmerge
+ branch.autosetuprebase browser." &&
+ IFS="$newline" &&
+ echo "${COMPREPLY[*]}" > out
+ ) &&
+ test_cmp expected out
+'
+
+test_expect_success '__gitcomp - option parameter' '
+ sed -e "s/Z$//" >expected <<-\EOF &&
+ recursive Z
+ resolve Z
+ EOF
+ (
+ local -a COMPREPLY &&
+ cur="--strategy=re" &&
+ __gitcomp "octopus ours recursive resolve subtree
+ " "" "re" &&
+ IFS="$newline" &&
+ echo "${COMPREPLY[*]}" > out
+ ) &&
+ test_cmp expected out
+'
+
+test_expect_success '__gitcomp - prefix' '
+ sed -e "s/Z$//" >expected <<-\EOF &&
+ branch.maint.merge Z
+ branch.maint.mergeoptions Z
+ EOF
+ (
+ local -a COMPREPLY &&
+ cur="branch.me" &&
+ __gitcomp "remote merge mergeoptions rebase
+ " "branch.maint." "me" &&
+ IFS="$newline" &&
+ echo "${COMPREPLY[*]}" > out
+ ) &&
+ test_cmp expected out
+'
+
+test_expect_success '__gitcomp - suffix' '
+ sed -e "s/Z$//" >expected <<-\EOF &&
+ branch.master.Z
+ branch.maint.Z
+ EOF
+ (
+ local -a COMPREPLY &&
+ cur="branch.me" &&
+ __gitcomp "master maint next pu
+ " "branch." "ma" "." &&
+ IFS="$newline" &&
+ echo "${COMPREPLY[*]}" > out
+ ) &&
+ test_cmp expected out
+'
+
+test_expect_success 'basic' '
+ run_completion "git \"\"" &&
+ # built-in
+ grep -q "^add \$" out &&
+ # script
+ grep -q "^filter-branch \$" out &&
+ # plumbing
+ ! grep -q "^ls-files \$" out &&
+
+ run_completion "git f" &&
+ ! grep -q -v "^f" out
+'
+
+test_expect_success 'double dash "git" itself' '
+ sed -e "s/Z$//" >expected <<-\EOF &&
+ --paginate Z
+ --no-pager Z
+ --git-dir=
+ --bare Z
+ --version Z
+ --exec-path Z
+ --exec-path=
+ --html-path Z
+ --info-path Z
+ --work-tree=
+ --namespace=
+ --no-replace-objects Z
+ --help Z
+ EOF
+ test_completion "git --"
+'
+
+test_expect_success 'double dash "git checkout"' '
+ sed -e "s/Z$//" >expected <<-\EOF &&
+ --quiet Z
+ --ours Z
+ --theirs Z
+ --track Z
+ --no-track Z
+ --merge Z
+ --conflict=
+ --orphan Z
+ --patch Z
+ EOF
+ test_completion "git checkout --"
+'
+
+test_expect_success 'general options' '
+ test_completion "git --ver" "--version " &&
+ test_completion "git --hel" "--help " &&
+ sed -e "s/Z$//" >expected <<-\EOF &&
+ --exec-path Z
+ --exec-path=
+ EOF
+ test_completion "git --exe" &&
+ test_completion "git --htm" "--html-path " &&
+ test_completion "git --pag" "--paginate " &&
+ test_completion "git --no-p" "--no-pager " &&
+ test_completion "git --git" "--git-dir=" &&
+ test_completion "git --wor" "--work-tree=" &&
+ test_completion "git --nam" "--namespace=" &&
+ test_completion "git --bar" "--bare " &&
+ test_completion "git --inf" "--info-path " &&
+ test_completion "git --no-r" "--no-replace-objects "
+'
+
+test_expect_success 'general options plus command' '
+ test_completion "git --version check" "checkout " &&
+ test_completion "git --paginate check" "checkout " &&
+ test_completion "git --git-dir=foo check" "checkout " &&
+ test_completion "git --bare check" "checkout " &&
+ test_completion "git --help des" "describe " &&
+ test_completion "git --exec-path=foo check" "checkout " &&
+ test_completion "git --html-path check" "checkout " &&
+ test_completion "git --no-pager check" "checkout " &&
+ test_completion "git --work-tree=foo check" "checkout " &&
+ test_completion "git --namespace=foo check" "checkout " &&
+ test_completion "git --paginate check" "checkout " &&
+ test_completion "git --info-path check" "checkout " &&
+ test_completion "git --no-replace-objects check" "checkout "
+'
+
+test_done
;;
esac
+( COLUMNS=1 && test $COLUMNS = 1 ) && test_set_prereq COLUMNS_CAN_BE_1
test -z "$NO_PERL" && test_set_prereq PERL
test -z "$NO_PYTHON" && test_set_prereq PYTHON
test -n "$USE_LIBPCRE" && test_set_prereq LIBPCRE
static void show_dates(char **argv, struct timeval *now)
{
- char buf[128];
+ struct strbuf buf = STRBUF_INIT;
for (; *argv; argv++) {
time_t t = atoi(*argv);
- show_date_relative(t, 0, now, buf, sizeof(buf));
- printf("%s -> %s\n", *argv, buf);
+ show_date_relative(t, 0, now, &buf);
+ printf("%s -> %s\n", *argv, buf.buf);
}
+ strbuf_release(&buf);
}
static void parse_dates(char **argv, struct timeval *now)
--- /dev/null
+#include "cache.h"
+#include "mergesort.h"
+
+struct line {
+ char *text;
+ struct line *next;
+};
+
+static void *get_next(const void *a)
+{
+ return ((const struct line *)a)->next;
+}
+
+static void set_next(void *a, void *b)
+{
+ ((struct line *)a)->next = b;
+}
+
+static int compare_strings(const void *a, const void *b)
+{
+ const struct line *x = a, *y = b;
+ return strcmp(x->text, y->text);
+}
+
+int main(int argc, const char **argv)
+{
+ struct line *line, *p = NULL, *lines = NULL;
+ struct strbuf sb = STRBUF_INIT;
+
+ for (;;) {
+ if (strbuf_getwholeline(&sb, stdin, '\n'))
+ break;
+ line = xmalloc(sizeof(struct line));
+ line->text = strbuf_detach(&sb, NULL);
+ if (p) {
+ line->next = p->next;
+ p->next = line;
+ } else {
+ line->next = NULL;
+ lines = line;
+ }
+ p = line;
+ }
+
+ lines = llist_mergesort(lines, get_next, set_next, compare_strings);
+
+ while (lines) {
+ printf("%s", lines->text);
+ lines = lines->next;
+ }
+ return 0;
+}
--- /dev/null
+/*
+ * test-revision-walking.c: test revision walking API.
+ *
+ * (C) 2012 Heiko Voigt <hvoigt@hvoigt.net>
+ *
+ * This code is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License version 2 as
+ * published by the Free Software Foundation.
+ */
+
+#include "cache.h"
+#include "commit.h"
+#include "diff.h"
+#include "revision.h"
+
+static void print_commit(struct commit *commit)
+{
+ struct strbuf sb = STRBUF_INIT;
+ struct pretty_print_context ctx = {0};
+ ctx.date_mode = DATE_NORMAL;
+ format_commit_message(commit, " %m %s", &sb, &ctx);
+ printf("%s\n", sb.buf);
+ strbuf_release(&sb);
+}
+
+static int run_revision_walk(void)
+{
+ struct rev_info rev;
+ struct commit *commit;
+ const char *argv[] = {NULL, "--all", NULL};
+ int argc = ARRAY_SIZE(argv) - 1;
+ int got_revision = 0;
+
+ init_revisions(&rev, NULL);
+ setup_revisions(argc, argv, &rev, NULL);
+ if (prepare_revision_walk(&rev))
+ die("revision walk setup failed");
+
+ while ((commit = get_revision(&rev)) != NULL) {
+ print_commit(commit);
+ got_revision = 1;
+ }
+
+ reset_revision_walk();
+ return got_revision;
+}
+
+int main(int argc, char **argv)
+{
+ if (argc < 2)
+ return 1;
+
+ if (!strcmp(argv[1], "run-twice")) {
+ printf("1st\n");
+ if (!run_revision_walk())
+ return 1;
+ printf("2nd\n");
+ if (!run_revision_walk())
+ return 1;
+
+ return 0;
+ }
+
+ fprintf(stderr, "check usage\n");
+ return 1;
+}
data->import_marks = strbuf_detach(&arg, NULL);
} else if (mandatory) {
die("Unknown mandatory capability %s. This remote "
- "helper probably needs newer version of Git.\n",
+ "helper probably needs newer version of Git.",
capname);
}
}
status = REF_STATUS_REMOTE_REJECT;
refname = buf->buf + 6;
} else
- die("expected ok/error, helper said '%s'\n", buf->buf);
+ die("expected ok/error, helper said '%s'", buf->buf);
msg = strchr(refname, ' ');
if (msg) {
#include "branch.h"
#include "url.h"
#include "submodule.h"
+#include "string-list.h"
/* rsync support */
{
struct ref *ref;
int n = 0;
+ unsigned char head_sha1[20];
+ char *head;
+
+ head = resolve_refdup("HEAD", head_sha1, 1, NULL);
if (verbose) {
for (ref = refs; ref; ref = ref->next)
ref->status != REF_STATUS_UPTODATE &&
ref->status != REF_STATUS_OK)
n += print_one_push_status(ref, dest, n, porcelain);
- if (ref->status == REF_STATUS_REJECT_NONFASTFORWARD)
- *nonfastforward = 1;
+ if (ref->status == REF_STATUS_REJECT_NONFASTFORWARD &&
+ *nonfastforward != NON_FF_HEAD) {
+ if (!strcmp(head, ref->name))
+ *nonfastforward = NON_FF_HEAD;
+ else
+ *nonfastforward = NON_FF_OTHER;
+ }
}
}
transport->progress = verbosity >= 0 && isatty(2);
}
+static void die_with_unpushed_submodules(struct string_list *needs_pushing)
+{
+ int i;
+
+ fprintf(stderr, "The following submodule paths contain changes that can\n"
+ "not be found on any remote:\n");
+ for (i = 0; i < needs_pushing->nr; i++)
+ printf(" %s\n", needs_pushing->items[i].string);
+ fprintf(stderr, "\nPlease try\n\n"
+ " git push --recurse-submodules=on-demand\n\n"
+ "or cd to the path and use\n\n"
+ " git push\n\n"
+ "to push them to a remote.\n\n");
+
+ string_list_clear(needs_pushing, 0);
+
+ die("Aborting.");
+}
+
int transport_push(struct transport *transport,
int refspec_nr, const char **refspec, int flags,
int *nonfastforward)
flags & TRANSPORT_PUSH_MIRROR,
flags & TRANSPORT_PUSH_FORCE);
- if ((flags & TRANSPORT_RECURSE_SUBMODULES_CHECK) && !is_bare_repository()) {
+ if ((flags & TRANSPORT_RECURSE_SUBMODULES_ON_DEMAND) && !is_bare_repository()) {
struct ref *ref = remote_refs;
for (; ref; ref = ref->next)
if (!is_null_sha1(ref->new_sha1) &&
- check_submodule_needs_pushing(ref->new_sha1,transport->remote->name))
- die("There are unpushed submodules, aborting.");
+ !push_unpushed_submodules(ref->new_sha1,
+ transport->remote->name))
+ die ("Failed to push all needed submodules!");
+ }
+
+ if ((flags & (TRANSPORT_RECURSE_SUBMODULES_ON_DEMAND |
+ TRANSPORT_RECURSE_SUBMODULES_CHECK)) && !is_bare_repository()) {
+ struct ref *ref = remote_refs;
+ struct string_list needs_pushing;
+
+ memset(&needs_pushing, 0, sizeof(struct string_list));
+ needs_pushing.strdup_strings = 1;
+ for (; ref; ref = ref->next)
+ if (!is_null_sha1(ref->new_sha1) &&
+ find_unpushed_submodules(ref->new_sha1,
+ transport->remote->name, &needs_pushing))
+ die_with_unpushed_submodules(&needs_pushing);
}
push_ret = transport->push_refs(transport, remote_refs, flags);
#define TRANSPORT_PUSH_SET_UPSTREAM 32
#define TRANSPORT_RECURSE_SUBMODULES_CHECK 64
#define TRANSPORT_PUSH_PRUNE 128
+#define TRANSPORT_RECURSE_SUBMODULES_ON_DEMAND 256
#define TRANSPORT_SUMMARY_WIDTH (2 * DEFAULT_ABBREV + 3)
void transport_set_verbosity(struct transport *transport, int verbosity,
int force_progress);
+#define NON_FF_HEAD 1
+#define NON_FF_OTHER 2
int transport_push(struct transport *connection,
int refspec_nr, const char **refspec, int flags,
int * nonfastforward);
opts->unpack_rejects[i].strdup_strings = 1;
}
-static void add_entry(struct unpack_trees_options *o, struct cache_entry *ce,
- unsigned int set, unsigned int clear)
+static void do_add_entry(struct unpack_trees_options *o, struct cache_entry *ce,
+ unsigned int set, unsigned int clear)
{
- unsigned int size = ce_size(ce);
- struct cache_entry *new = xmalloc(size);
-
clear |= CE_HASHED | CE_UNHASHED;
if (set & CE_REMOVE)
set |= CE_WT_REMOVE;
+ ce->next = NULL;
+ ce->ce_flags = (ce->ce_flags & ~clear) | set;
+ add_index_entry(&o->result, ce,
+ ADD_CACHE_OK_TO_ADD | ADD_CACHE_OK_TO_REPLACE);
+}
+
+static void add_entry(struct unpack_trees_options *o, struct cache_entry *ce,
+ unsigned int set, unsigned int clear)
+{
+ unsigned int size = ce_size(ce);
+ struct cache_entry *new = xmalloc(size);
+
memcpy(new, ce, size);
- new->next = NULL;
- new->ce_flags = (new->ce_flags & ~clear) | set;
- add_index_entry(&o->result, new, ADD_CACHE_OK_TO_ADD|ADD_CACHE_OK_TO_REPLACE);
+ do_add_entry(o, new, set, clear);
}
/*
for (i = 0; i < n; i++)
if (src[i] && src[i] != o->df_conflict_entry)
- add_entry(o, src[i], 0, 0);
+ do_add_entry(o, src[i], 0, 0);
return 0;
}
if (unpack_nondirectories(n, mask, dirmask, src, names, info) < 0)
return -1;
- if (src[0]) {
+ if (o->merge && src[0]) {
if (ce_stage(src[0]))
mark_ce_used_same_name(src[0], o);
else
o->result.initialized = 1;
o->result.timestamp.sec = o->src_index->timestamp.sec;
o->result.timestamp.nsec = o->src_index->timestamp.nsec;
+ o->result.version = o->src_index->version;
o->merge_size = len;
mark_all_ce_unused(o->src_index);
struct cache_entry *a = src[1];
if (o->merge_size != 1)
- return error("Cannot do a bind merge of %d trees\n",
+ return error("Cannot do a bind merge of %d trees",
o->merge_size);
if (a && old)
return o->gently ? -1 :
--- /dev/null
+#include "varint.h"
+
+uintmax_t decode_varint(const unsigned char **bufp)
+{
+ const unsigned char *buf = *bufp;
+ unsigned char c = *buf++;
+ uintmax_t val = c & 127;
+ while (c & 128) {
+ val += 1;
+ if (!val || MSB(val, 7))
+ return 0; /* overflow */
+ c = *buf++;
+ val = (val << 7) + (c & 127);
+ }
+ *bufp = buf;
+ return val;
+}
+
+int encode_varint(uintmax_t value, unsigned char *buf)
+{
+ unsigned char varint[16];
+ unsigned pos = sizeof(varint) - 1;
+ varint[pos] = value & 127;
+ while (value >>= 7)
+ varint[--pos] = 128 | (--value & 127);
+ if (buf)
+ memcpy(buf, varint + pos, sizeof(varint) - pos);
+ return sizeof(varint) - pos;
+}
--- /dev/null
+#ifndef VARINT_H
+#define VARINT_H
+
+#include "git-compat-util.h"
+
+extern int encode_varint(uintmax_t, unsigned char *);
+extern uintmax_t decode_varint(const unsigned char **);
+
+#endif /* VARINT_H */
int ch;
if (!type || t[1] != ' ')
- die("invalid property line: %s\n", t);
+ die("invalid property line: %s", t);
len = atoi(&t[2]);
strbuf_reset(&val);
buffer_read_binary(&input, &val, len);
strbuf_reset(&key);
continue;
default:
- die("invalid property line: %s\n", t);
+ die("invalid property line: %s", t);
}
}
}
static void (*try_to_free_routine)(size_t size) = do_nothing;
+static void memory_limit_check(size_t size)
+{
+ static int limit = -1;
+ if (limit == -1) {
+ const char *env = getenv("GIT_ALLOC_LIMIT");
+ limit = env ? atoi(env) * 1024 : 0;
+ }
+ if (limit && size > limit)
+ die("attempting to allocate %"PRIuMAX" over limit %d",
+ (intmax_t)size, limit);
+}
+
try_to_free_t set_try_to_free_routine(try_to_free_t routine)
{
try_to_free_t old = try_to_free_routine;
void *xmalloc(size_t size)
{
- void *ret = malloc(size);
+ void *ret;
+
+ memory_limit_check(size);
+ ret = malloc(size);
if (!ret && !size)
ret = malloc(1);
if (!ret) {
void *xrealloc(void *ptr, size_t size)
{
- void *ret = realloc(ptr, size);
+ void *ret;
+
+ memory_limit_check(size);
+ ret = realloc(ptr, size);
if (!ret && !size)
ret = realloc(ptr, 1);
if (!ret) {
void *xcalloc(size_t nmemb, size_t size)
{
- void *ret = calloc(nmemb, size);
+ void *ret;
+
+ memory_limit_check(size * nmemb);
+ ret = calloc(nmemb, size);
if (!ret && (!nmemb || !size))
ret = calloc(1, 1);
if (!ret) {
#include "remote.h"
#include "refs.h"
#include "submodule.h"
+#include "column.h"
static char default_wt_status_colors[][COLOR_MAXLEN] = {
GIT_COLOR_NORMAL, /* WT_STATUS_HEADER */
{
int i;
struct strbuf buf = STRBUF_INIT;
+ static struct string_list output = STRING_LIST_INIT_DUP;
+ struct column_options copts;
if (!l->nr)
return;
for (i = 0; i < l->nr; i++) {
struct string_list_item *it;
+ const char *path;
it = &(l->items[i]);
+ path = quote_path(it->string, strlen(it->string),
+ &buf, s->prefix);
+ if (column_active(s->colopts)) {
+ string_list_append(&output, path);
+ continue;
+ }
status_printf(s, color(WT_STATUS_HEADER, s), "\t");
status_printf_more(s, color(WT_STATUS_UNTRACKED, s),
- "%s\n", quote_path(it->string, strlen(it->string),
- &buf, s->prefix));
+ "%s\n", path);
}
+
+ strbuf_release(&buf);
+ if (!column_active(s->colopts))
+ return;
+
+ strbuf_addf(&buf, "%s#\t%s",
+ color(WT_STATUS_HEADER, s),
+ color(WT_STATUS_UNTRACKED, s));
+ memset(&copts, 0, sizeof(copts));
+ copts.padding = 1;
+ copts.indent = buf.buf;
+ if (want_color(s->use_color))
+ copts.nl = GIT_COLOR_RESET "\n";
+ print_columns(&output, s->colopts, &copts);
+ string_list_clear(&output, 0);
strbuf_release(&buf);
}
enum untracked_status_type show_untracked_files;
const char *ignore_submodule_arg;
char color_palette[WT_STATUS_MAXSLOT][COLOR_MAXLEN];
+ int colopts;
/* These are computed during processing of the individual sections */
int commitable;
#define XDF_IGNORE_WHITESPACE (1 << 2)
#define XDF_IGNORE_WHITESPACE_CHANGE (1 << 3)
#define XDF_IGNORE_WHITESPACE_AT_EOL (1 << 4)
-#define XDF_PATIENCE_DIFF (1 << 5)
-#define XDF_HISTOGRAM_DIFF (1 << 6)
#define XDF_WHITESPACE_FLAGS (XDF_IGNORE_WHITESPACE | XDF_IGNORE_WHITESPACE_CHANGE | XDF_IGNORE_WHITESPACE_AT_EOL)
-#define XDL_PATCH_NORMAL '-'
-#define XDL_PATCH_REVERSE '+'
-#define XDL_PATCH_MODEMASK ((1 << 8) - 1)
-#define XDL_PATCH_IGNOREBSPACE (1 << 8)
+#define XDF_PATIENCE_DIFF (1 << 5)
+#define XDF_HISTOGRAM_DIFF (1 << 6)
+#define XDF_DIFF_ALGORITHM_MASK (XDF_PATIENCE_DIFF | XDF_HISTOGRAM_DIFF)
+#define XDF_DIFF_ALG(x) ((x) & XDF_DIFF_ALGORITHM_MASK)
#define XDL_EMIT_FUNCNAMES (1 << 0)
#define XDL_EMIT_COMMON (1 << 1)
xdalgoenv_t xenv;
diffdata_t dd1, dd2;
- if (xpp->flags & XDF_PATIENCE_DIFF)
+ if (XDF_DIFF_ALG(xpp->flags) == XDF_PATIENCE_DIFF)
return xdl_do_patience_diff(mf1, mf2, xpp, xe);
- if (xpp->flags & XDF_HISTOGRAM_DIFF)
+ if (XDF_DIFF_ALG(xpp->flags) == XDF_HISTOGRAM_DIFF)
return xdl_do_histogram_diff(mf1, mf2, xpp, xe);
if (xdl_prepare_env(mf1, mf2, xpp, xe) < 0) {
int line1, int count1, int line2, int count2)
{
xpparam_t xpp;
- xpp.flags = index->xpp->flags & ~XDF_HISTOGRAM_DIFF;
+ xpp.flags = index->xpp->flags & ~XDF_DIFF_ALGORITHM_MASK;
return xdl_fall_back_diff(index->env, &xpp,
line1, count1, line2, count2);
int line1, int count1, int line2, int count2)
{
xpparam_t xpp;
- xpp.flags = map->xpp->flags & ~XDF_PATIENCE_DIFF;
+ xpp.flags = map->xpp->flags & ~XDF_DIFF_ALGORITHM_MASK;
return xdl_fall_back_diff(map->env, &xpp,
line1, count1, line2, count2);
if (!(recs = (xrecord_t **) xdl_malloc(narec * sizeof(xrecord_t *))))
goto abort;
- if (xpp->flags & XDF_HISTOGRAM_DIFF)
+ if (XDF_DIFF_ALG(xpp->flags) == XDF_HISTOGRAM_DIFF)
hbits = hsize = 0;
else {
hbits = xdl_hashbits((unsigned int) narec);
crec->ha = hav;
recs[nrec++] = crec;
- if (!(xpp->flags & XDF_HISTOGRAM_DIFF) &&
- xdl_classify_record(pass, cf, rhash, hbits, crec) < 0)
+ if ((XDF_DIFF_ALG(xpp->flags) != XDF_HISTOGRAM_DIFF) &&
+ xdl_classify_record(pass, cf, rhash, hbits, crec) < 0)
goto abort;
}
}
* (nrecs) will be updated correctly anyway by
* xdl_prepare_ctx().
*/
- sample = xpp->flags & XDF_HISTOGRAM_DIFF ? XDL_GUESS_NLINES2 : XDL_GUESS_NLINES1;
+ sample = (XDF_DIFF_ALG(xpp->flags) == XDF_HISTOGRAM_DIFF
+ ? XDL_GUESS_NLINES2 : XDL_GUESS_NLINES1);
enl1 = xdl_guess_lines(mf1, sample) + 1;
enl2 = xdl_guess_lines(mf2, sample) + 1;
- if (!(xpp->flags & XDF_HISTOGRAM_DIFF) &&
- xdl_init_classifier(&cf, enl1 + enl2 + 1, xpp->flags) < 0) {
-
+ if (XDF_DIFF_ALG(xpp->flags) != XDF_HISTOGRAM_DIFF &&
+ xdl_init_classifier(&cf, enl1 + enl2 + 1, xpp->flags) < 0)
return -1;
- }
if (xdl_prepare_ctx(1, mf1, enl1, xpp, &cf, &xe->xdf1) < 0) {
return -1;
}
- if (!(xpp->flags & XDF_PATIENCE_DIFF) &&
- !(xpp->flags & XDF_HISTOGRAM_DIFF) &&
- xdl_optimize_ctxs(&cf, &xe->xdf1, &xe->xdf2) < 0) {
+ if ((XDF_DIFF_ALG(xpp->flags) != XDF_PATIENCE_DIFF) &&
+ (XDF_DIFF_ALG(xpp->flags) != XDF_HISTOGRAM_DIFF) &&
+ xdl_optimize_ctxs(&cf, &xe->xdf1, &xe->xdf2) < 0) {
xdl_free_ctx(&xe->xdf2);
xdl_free_ctx(&xe->xdf1);
*
*/
+#include <limits.h>
+#include <assert.h>
#include "xinclude.h"
return ha;
}
+#ifdef XDL_FAST_HASH
+
+#define ONEBYTES 0x0101010101010101ul
+#define NEWLINEBYTES 0x0a0a0a0a0a0a0a0aul
+#define HIGHBITS 0x8080808080808080ul
+
+/* Return the high bit set in the first byte that is a zero */
+static inline unsigned long has_zero(unsigned long a)
+{
+ return ((a - ONEBYTES) & ~a) & HIGHBITS;
+}
+
+static inline long count_masked_bytes(unsigned long mask)
+{
+ if (sizeof(long) == 8) {
+ /*
+ * Jan Achrenius on G+: microoptimized version of
+ * the simpler "(mask & ONEBYTES) * ONEBYTES >> 56"
+ * that works for the bytemasks without having to
+ * mask them first.
+ */
+ return mask * 0x0001020304050608 >> 56;
+ } else {
+ /*
+ * Modified Carl Chatfield G+ version for 32-bit *
+ *
+ * (a) gives us
+ * -1 (0, ff), 0 (ffff) or 1 (ffffff)
+ * (b) gives us
+ * 0 for 0, 1 for (ff ffff ffffff)
+ * (a+b+1) gives us
+ * correct 0-3 bytemask count result
+ */
+ long a = (mask - 256) >> 23;
+ long b = mask & 1;
+ return a + b + 1;
+ }
+}
+
+unsigned long xdl_hash_record(char const **data, char const *top, long flags)
+{
+ unsigned long hash = 5381;
+ unsigned long a = 0, mask = 0;
+ char const *ptr = *data;
+ char const *end = top - sizeof(unsigned long) + 1;
+
+ if (flags & XDF_WHITESPACE_FLAGS)
+ return xdl_hash_record_with_whitespace(data, top, flags);
+
+ ptr -= sizeof(unsigned long);
+ do {
+ hash += hash << 5;
+ hash ^= a;
+ ptr += sizeof(unsigned long);
+ if (ptr >= end)
+ break;
+ a = *(unsigned long *)ptr;
+ /* Do we have any '\n' bytes in this word? */
+ mask = has_zero(a ^ NEWLINEBYTES);
+ } while (!mask);
+
+ if (ptr >= end) {
+ /*
+ * There is only a partial word left at the end of the
+ * buffer. Because we may work with a memory mapping,
+ * we have to grab the rest byte by byte instead of
+ * blindly reading it.
+ *
+ * To avoid problems with masking in a signed value,
+ * we use an unsigned char here.
+ */
+ const char *p;
+ for (p = top - 1; p >= ptr; p--)
+ a = (a << 8) + *((const unsigned char *)p);
+ mask = has_zero(a ^ NEWLINEBYTES);
+ if (!mask)
+ /*
+ * No '\n' found in the partial word. Make a
+ * mask that matches what we read.
+ */
+ mask = 1UL << (8 * (top - ptr) + 7);
+ }
+
+ /* The mask *below* the first high bit set */
+ mask = (mask - 1) & ~mask;
+ mask >>= 7;
+ hash += hash << 5;
+ hash ^= a & mask;
+
+ /* Advance past the last (possibly partial) word */
+ ptr += count_masked_bytes(mask);
+
+ if (ptr < top) {
+ assert(*ptr == '\n');
+ ptr++;
+ }
+
+ *data = ptr;
+
+ return hash;
+}
+
+#else /* XDL_FAST_HASH */
unsigned long xdl_hash_record(char const **data, char const *top, long flags) {
unsigned long ha = 5381;
return ha;
}
+#endif /* XDL_FAST_HASH */
unsigned int xdl_hashbits(unsigned int size) {
unsigned int val = 1, bits = 0;