--- /dev/null
+Git v1.7.7.1 Release Notes
+==========================
+
+Fixes since v1.7.7
+------------------
+
+ * "git diff $tree $path" used to apply the pathspec at the output stage,
+ reading the whole tree, wasting resources.
+
+ * The code to check for updated submodules during a "git fetch" of the
+ superproject had an unnecessary quadratic loop.
+
+ * "git fetch" from a large bundle did not enable the progress output.
+
+ * When "git fsck --lost-and-found" found that an empty blob object in the
+ object store is unreachable, it incorrectly reported an error after
+ writing the lost blob out successfully.
+
+ * "git filter-branch" did not refresh the index before checking that the
+ working tree was clean.
+
+ * "git grep $tree" when run with multiple threads had an unsafe access to
+ the object database that should have been protected with mutex.
+
+ * The "--ancestry-path" option to "git log" and friends misbehaved in a
+ history with complex criss-cross merges and showed an uninteresting
+ side history as well.
+
+ * Test t1304 assumed LOGNAME is always set, which may not be true on
+ some systems.
+
+ * Tests with --valgrind failed to find "mergetool" scriptlets.
+
+ * "git patch-id" miscomputed the patch-id in a patch that has a line longer
+ than 1kB.
+
+ * When an "exec" insn failed after modifying the index and/or the working
+ tree during "rebase -i", we now check and warn that the changes need to
+ be cleaned up.
--- /dev/null
+Git v1.7.8 Release Notes (draft)
+================================
+
+Updates since v1.7.7
+--------------------
+
+ * Some git-svn and git-gui updates.
+
+ * The build procedure has been taught to take advantage of computed
+ dependency automatically when the complier supports it.
+
+ * The date parser now accepts timezone designators that lack minutes
+ part and also has a colon between "hh:mm".
+
+ * On some BSD systems, adding +s bit on directories is detrimental
+ (it is not necessary on BSD to begin with). The installation
+ procedure has been updated to take this into account.
+
+ * The contents of the /etc/mailname file, if exists, is used as the
+ default value of the hostname part of the committer/author e-mail.
+
+ * "git am" learned how to read from patches generated by Hg.
+
+ * "git archive" talking with a remote repository can report errors
+ from the remote side in a more informative way.
+
+ * "git branch" learned an explicit --list option to ask for branches
+ listed, optionally with a glob matching pattern to limit its output.
+
+ * "git check-attr" learned "--cached" option to look at .gitattributes
+ files from the index, not from the working tree.
+
+ * Variants of "git cherry-pick" and "git revert" that take multiple
+ commits learned to "--continue".
+
+ * Errors at the network layer is logged by "git daemon".
+
+ * "git diff" learned "--minimal" option to spend extra cycles to come
+ up with a minimal patch output.
+
+ * "git fetch" learned to honor transfer.fsckobjects configuration to
+ validate the objects that were received from the other end, just like
+ "git receive-pack" (the receiving end of "git push") does.
+
+ * "git fetch" makes sure that the set of objects it received from the
+ other end actually completes the history before updating the refs.
+ "git receive-pack" (the receiving end of "git push") learned to do the
+ same.
+
+ * "git for-each-ref" learned "%(contents:subject)", "%(contents:body)"
+ and "%(contents:signature)". The last one is useful for signed tags.
+
+ * "git grep" used to incorrectly pay attention to .gitignore files
+ scattered in the directory it was working in even when "--no-index"
+ option was used. It no longer does this. The "--exclude-standard"
+ option needs to be given to explicitly activate the ignore
+ mechanism.
+
+ * "git grep" learned "--untracked" option, where given patterns are
+ searched in untracked (but not ignored) files as well as tracked
+ files in the working tree, so that matches in new but not yet
+ added files do not get missed.
+
+ * "git ls-remote" learned to respond to "-h"(elp) requests.
+
+ * "git send-email" learned to respond to "-h"(elp) requests.
+
+ * "git send-email" allows the value given to sendemail.aliasfile to begin
+ with "~/" to refer to the $HOME directory.
+
+ * "git send-email" forces use of Authen::SASL::Perl to work around
+ issues between Authen::SASL::Cyrus and AUTH PLAIN/LOGIN.
+
+ * "git stash" learned "--include-untracked" option to stash away
+ untracked/ignored cruft from the working tree.
+
+ * "git submodule update" learned to honor "none" as the value for
+ submodule.<name>.update to specify that the named submodule should
+ not be checked out by default.
+
+ * When populating a new submodule directory with "git submodule init",
+ the $GIT_DIR metainformation directory for submodules is created inside
+ $GIT_DIR/modules/<name>/ directory of the superproject and referenced
+ via the gitfile mechanism. This is to make it possible to switch
+ between commits in the superproject that has and does not have the
+ submodule in the tree without re-cloning.
+
+ * "mediawiki" remote helper can interact with (surprise!) MediaWiki
+ with "git fetch" & "git push".
+
+ * "gitweb" leaked unescaped control characters from syntax hiliter
+ outputs.
+
+
+Also contains other documentation updates and minor code cleanups.
+
+
+Fixes since v1.7.7
+------------------
+
+Unless otherwise noted, all fixes in the 1.7.7.X maintenance track are
+included in this release.
+
+ * We used to drop error messages from libcurl on certain kinds of
+ errors.
+ (merge be22d92eac8 jn/maint-http-error-message later to maint).
+
+ * Error report from smart HTTP transport, when the connection was
+ broken in the middle of a transfer, showed a useless message on
+ a corrupt packet.
+ (merge 6cdf022 sp/smart-http-failure later to maint).
+
+ * Adding many refs to the local repository in one go (e.g. "git fetch"
+ that fetches many tags) and looking up a ref by name in a repository
+ with too many refs were unnecessarily slow.
+ (merge 17d68a54d jp/get-ref-dir-unsorted later to maint).
+
+ * After incorrectly written third-party tools store a tag object in
+ HEAD, git diagnosed it as a repository corruption and refused to
+ proceed in order to avoid spreading the damage. We now gracefully
+ recover from such a situation by pretending as if the commit that
+ is pointed at by the tag were in HEAD.
+ (merge baf18fc nd/maint-autofix-tag-in-head later to maint).
+
+ * "git apply --whitespace=error" did not bother to report the exact
+ line number in the patch that introduced new blank lines at the end
+ of the file.
+ (merge 8557263 jc/apply-blank-at-eof-fix later to maint).
+
+
+ * "git remote rename $a $b" were not careful to match the remote name
+ against $a (i.e. source side of the remote nickname).
+ (merge b52d00aed mz/remote-rename later to maint).
+
+ * "git diff --[num]stat" used to use the number of lines of context
+ different from the default, potentially giving different results from
+ "git diff | diffstat" and confusing the users.
+ (merge f01cae918 jc/maint-diffstat-numstat-context later to maint).
+
+ * "git merge" did not understand ":/<pattern>" as a way to name a commit.
+
+ * "git mergetool" learned to use its arguments as pathspec, not a path to
+ the file that may not even have any conflict.
+ (merge 6d9990a jm/mergetool-pathspec later to maint).
+
+ * "gitweb" used to produce a non-working link while showing the contents
+ of a blob, when JavaScript actions are enabled.
+ (merge 2b07ff3ff ps/gitweb-js-with-lineno later to maint).
+
+---
+exec >/var/tmp/1
+O=v1.7.7-324-g47d45a5
+echo O=$(git describe --always master)
+git log --first-parent --oneline --reverse ^$O master
+echo
+git shortlog --no-merges ^$O master
take effect.
-h::
---help::
Show help message.
core.fileMode::
If false, the executable bit differences between the index and
- the working copy are ignored; useful on broken filesystems like FAT.
+ the working tree are ignored; useful on broken filesystems like FAT.
See linkgit:git-update-index[1].
+
The default is true, except linkgit:git-clone[1] or linkgit:git-init[1]
core.trustctime::
If false, the ctime differences between the index and the
- working copy are ignored; useful when the inode change time
+ working tree are ignored; useful when the inode change time
is regularly modified by something outside Git (file system
crawlers and some backup systems).
See linkgit:git-update-index[1]. True by default.
If true, commands which modify both the working tree and the index
will mark the updated paths with the "assume unchanged" bit in the
index. These marked files are then assumed to stay unchanged in the
- working copy, until you mark them otherwise manually - Git will not
+ working tree, until you mark them otherwise manually - Git will not
detect the file changes by lstat() calls. This is useful on systems
where those are very slow, such as Microsoft Windows.
See linkgit:git-update-index[1].
when its superproject retrieves a commit that updates the submodule's
reference.
+fetch.fsckObjects::
+ If it is set to true, git-fetch-pack will check all fetched
+ objects. It will abort in the case of a malformed object or a
+ broken link. The result of an abort are only dangling objects.
+ Defaults to false. If not set, the value of `transfer.fsckObjects`
+ is used instead.
+
fetch.unpackLimit::
If the number of objects fetched over the git native
transfer is below this
You may also specify this configuration several times.
+
Does not have a default value; you must configure this variable to
-enable note rewriting.
+enable note rewriting. Set it to `refs/notes/commits` to enable
+rewriting for the default commit notes.
+
This setting can be overridden with the `GIT_NOTES_REWRITE_REF`
environment variable, which must be a colon separated list of refs or
If it is set to true, git-receive-pack will check all received
objects. It will abort in the case of a malformed object or a
broken link. The result of an abort are only dangling objects.
- Defaults to false.
+ Defaults to false. If not set, the value of `transfer.fsckObjects`
+ is used instead.
receive.unpackLimit::
If the number of objects received in a push is below this
archiving user's umask will be used instead. See umask(2) and
linkgit:git-archive[1].
+transfer.fsckObjects::
+ When `fetch.fsckObjects` or `receive.fsckObjects` are
+ not set, the value of this variable is used instead.
+ Defaults to false.
+
transfer.unpackLimit::
When `fetch.unpackLimit` or `receive.unpackLimit` are
not set, the value of this variable is used instead.
Synonym for `-p --raw`.
endif::git-format-patch[]
+--minimal::
+ Spend extra time to make sure the smallest possible
+ diff is produced.
+
--patience::
Generate a diff using the "patience diff" algorithm.
--------
[verse]
'git branch' [--color[=<when>] | --no-color] [-r | -a]
- [-v [--abbrev=<length> | --no-abbrev]]
- [(--merged | --no-merged | --contains) [<commit>]]
+ [--list] [-v [--abbrev=<length> | --no-abbrev]]
+ [(--merged | --no-merged | --contains) [<commit>]] [<pattern>...]
'git branch' [--set-upstream | --track | --no-track] [-l] [-f] <branchname> [<start-point>]
'git branch' (-m | -M) [<oldbranch>] <newbranch>
'git branch' (-d | -D) [-r] <branchname>...
With no arguments, existing branches are listed and the current branch will
be highlighted with an asterisk. Option `-r` causes the remote-tracking
-branches to be listed, and option `-a` shows both.
+branches to be listed, and option `-a` shows both. This list mode is also
+activated by the `--list` option (see below).
+<pattern> restricts the output to matching branches, the pattern is a shell
+wildcard (i.e., matched using fnmatch(3))
+Multiple patterns may be given; if any of them matches, the tag is shown.
With `--contains`, shows only the branches that contain the named commit
(in other words, the branches whose tip commits are descendants of the
OPTIONS
-------
-d::
+--delete::
Delete a branch. The branch must be fully merged in its
upstream branch, or in `HEAD` if no upstream was set with
`--track` or `--set-upstream`.
Delete a branch irrespective of its merged status.
-l::
+--create-reflog::
Create the branch's reflog. This activates recording of
all changes made to the branch ref, enabling use of date
based sha1 expressions such as "<branchname>@\{yesterday}".
already. Without `-f` 'git branch' refuses to change an existing branch.
-m::
+--move::
Move/rename a branch and the corresponding reflog.
-M::
Same as `--color=never`.
-r::
+--remotes::
List or delete (if used with -d) the remote-tracking branches.
-a::
+--all::
List both remote-tracking branches and local branches.
+--list::
+ Activate the list mode. `git branch <pattern>` would try to create a branch,
+ use `git branch --list <pattern>` to list matching branches.
+
-v::
--verbose::
- Show sha1 and commit subject line for each head, along with
+ When in list mode,
+ show sha1 and commit subject line for each head, along with
relationship to upstream branch (if any). If given twice, print
the name of the upstream branch, as well.
paths. If this option is used, then 'unspecified' attributes
will not be included in the output.
+--cached::
+ Consider `.gitattributes` in the index only, ignoring the working tree.
+
--stdin::
Read file names from stdin instead of from the command-line.
SYNOPSIS
--------
[verse]
-'git check-ref-format' <refname>
-'git check-ref-format' --print <refname>
+'git check-ref-format' [--normalize]
+ [--[no-]allow-onelevel] [--refspec-pattern]
+ <refname>
'git check-ref-format' --branch <branchname-shorthand>
DESCRIPTION
. They can include slash `/` for hierarchical (directory)
grouping, but no slash-separated component can begin with a
- dot `.`.
+ dot `.` or end with the sequence `.lock`.
. They must contain at least one `/`. This enforces the presence of a
category like `heads/`, `tags/` etc. but the actual names are not
- restricted.
+ restricted. If the `--allow-onelevel` option is used, this rule
+ is waived.
. They cannot have two consecutive dots `..` anywhere.
. They cannot have ASCII control characters (i.e. bytes whose
values are lower than \040, or \177 `DEL`), space, tilde `~`,
- caret `{caret}`, colon `:`, question-mark `?`, asterisk `*`,
- or open bracket `[` anywhere.
+ caret `{caret}`, or colon `:` anywhere.
-. They cannot end with a slash `/` nor a dot `.`.
+. They cannot have question-mark `?`, asterisk `{asterisk}`, or open
+ bracket `[` anywhere. See the `--refspec-pattern` option below for
+ an exception to this rule.
-. They cannot end with the sequence `.lock`.
+. They cannot begin or end with a slash `/` or contain multiple
+ consecutive slashes (see the `--normalize` option below for an
+ exception to this rule)
+
+. They cannot end with a dot `.`.
. They cannot contain a sequence `@{`.
. at-open-brace `@{` is used as a notation to access a reflog entry.
-With the `--print` option, if 'refname' is acceptable, it prints the
-canonicalized name of a hypothetical reference with that name. That is,
-it prints 'refname' with any extra `/` characters removed.
-
With the `--branch` option, it expands the ``previous branch syntax''
`@{-n}`. For example, `@{-1}` is a way to refer the last branch you
were on. This option should be used by porcelains to accept this
syntax anywhere a branch name is expected, so they can act as if you
typed the branch name.
+OPTIONS
+-------
+--allow-onelevel::
+--no-allow-onelevel::
+ Controls whether one-level refnames are accepted (i.e.,
+ refnames that do not contain multiple `/`-separated
+ components). The default is `--no-allow-onelevel`.
+
+--refspec-pattern::
+ Interpret <refname> as a reference name pattern for a refspec
+ (as used with remote repositories). If this option is
+ enabled, <refname> is allowed to contain a single `{asterisk}`
+ in place of a one full pathname component (e.g.,
+ `foo/{asterisk}/bar` but not `foo/bar{asterisk}`).
+
+--normalize::
+ Normalize 'refname' by removing any leading slash (`/`)
+ characters and collapsing runs of adjacent slashes between
+ name components into a single slash. Iff the normalized
+ refname is valid then print it to standard output and exit
+ with a status of 0. (`--print` is a deprecated way to spell
+ `--normalize`.)
+
+
EXAMPLES
--------
* Determine the reference name to use for a new branch:
+
------------
-$ ref=$(git check-ref-format --print "refs/heads/$newbranch") ||
+$ ref=$(git check-ref-format --normalize "refs/heads/$newbranch") ||
die "we do not like '$newbranch' as a branch name."
------------
--------
[verse]
'git cherry-pick' [--edit] [-n] [-m parent-number] [-s] [-x] [--ff] <commit>...
+'git cherry-pick' --reset
+'git cherry-pick' --continue
DESCRIPTION
-----------
Pass the merge strategy-specific option through to the
merge strategy. See linkgit:git-merge[1] for details.
+SEQUENCER SUBCOMMANDS
+---------------------
+include::sequencer.txt[]
+
EXAMPLES
--------
`git cherry-pick master`::
In case (some of) these environment variables are not set, the information
is taken from the configuration items user.name and user.email, or, if not
-present, system user name and fully qualified hostname.
+present, system user name and the hostname used for outgoing mail (taken
+from `/etc/mailname` and falling back to the fully qualified hostname when
+that file does not exist).
A commit comment is read from stdin. If a changelog
entry is not provided via "<" redirection, 'git commit-tree' will just wait
include::i18n.txt[]
+FILES
+-----
+/etc/mailname
+
SEE ALSO
--------
linkgit:git-write-tree[1]
`committer`, and `tagger`) can be suffixed with `name`, `email`,
and `date` to extract the named component.
-The first line of the message in a commit and tag object is
-`subject`, the remaining lines are `body`. The whole message
-is `contents`.
+The complete message in a commit and tag object is `contents`.
+Its first line is `contents:subject`, the remaining lines
+are `contents:body` and the optional GPG signature
+is `contents:signature`.
For sorting purposes, fields with numeric values sort in numeric
order (`objectsize`, `authordate`, `committerdate`, `taggerdate`).
[-A <post-context>] [-B <pre-context>] [-C <context>]
[-f <file>] [-e] <pattern>
[--and|--or|--not|(|)|-e <pattern>...]
- [--cached | --no-index | <tree>...]
+ [ [--exclude-standard] [--cached | --no-index | --untracked] | <tree>...]
[--] [<pathspec>...]
DESCRIPTION
blobs registered in the index file.
--no-index::
- Search files in the current directory, not just those tracked by git.
+ Search files in the current directory that is not managed by git.
+
+--untracked::
+ In addition to searching in the tracked files in the working
+ tree, search also in untracked files.
+
+--no-exclude-standard::
+ Also search in ignored files by not honoring the `.gitignore`
+ mechanism. Only useful with `--untracked`.
+
+--exclude-standard::
+ Do not pay attention to ignored files specified via the `.gitignore`
+ mechanism. Only useful when searching files in the current directory
+ with `--no-index`.
-a::
--text::
merge conflicts. It is typically run after 'git merge'.
If one or more <file> parameters are given, the merge tool program will
-be run to resolve differences on each file. If no <file> names are
-specified, 'git mergetool' will run the merge tool program on every file
-with merge conflicts.
+be run to resolve differences on each file (skipping those without
+conflicts). Specifying a directory will include all unresolved files in
+that path. If no <file> names are specified, 'git mergetool' will run
+the merge tool program on every file with merge conflicts.
OPTIONS
-------
-i::
Usually a merge requires the index file as well as the
- files in the working tree are up to date with the
+ files in the working tree to be up to date with the
current head commit, in order not to lose local
changes. This flag disables the check with the working
tree and is meant to be used when creating a merge of
--aggressive::
Usually a three-way merge by 'git read-tree' resolves
the merge for really trivial cases and leaves other
- cases unresolved in the index, so that Porcelains can
+ cases unresolved in the index, so that porcelains can
implement different merge policies. This flag makes the
- command to resolve a few more cases internally:
+ command resolve a few more cases internally:
+
* when one side removes a path and the other side leaves the path
unmodified. The resolution is to remove that path.
* when both sides remove a path. The resolution is to remove that path.
-* when both sides adds a path identically. The resolution
+* when both sides add a path identically. The resolution
is to add that path.
--prefix=<prefix>/::
Keep the current index contents, and read the contents
- of named tree-ish under directory at `<prefix>`. The
+ of the named tree-ish under the directory at `<prefix>`. The
original index file cannot have anything at the path
- `<prefix>` itself, and have nothing in `<prefix>/`
+ `<prefix>` itself, nor anything in the `<prefix>/`
directory. Note that the `<prefix>/` value must end
with a slash.
Sparse checkout
---------------
-"Sparse checkout" allows to sparsely populate working directory.
-It uses skip-worktree bit (see linkgit:git-update-index[1]) to tell
-Git whether a file on working directory is worth looking at.
+"Sparse checkout" allows populating the working directory sparsely.
+It uses the skip-worktree bit (see linkgit:git-update-index[1]) to tell
+Git whether a file in the working directory is worth looking at.
-"git read-tree" and other merge-based commands ("git merge", "git
-checkout"...) can help maintaining skip-worktree bitmap and working
+'git read-tree' and other merge-based commands ('git merge', 'git
+checkout'...) can help maintaining the skip-worktree bitmap and working
directory update. `$GIT_DIR/info/sparse-checkout` is used to
-define the skip-worktree reference bitmap. When "git read-tree" needs
-to update working directory, it will reset skip-worktree bit in index
+define the skip-worktree reference bitmap. When 'git read-tree' needs
+to update the working directory, it resets the skip-worktree bit in the index
based on this file, which uses the same syntax as .gitignore files.
-If an entry matches a pattern in this file, skip-worktree will be
-set on that entry. Otherwise, skip-worktree will be unset.
+If an entry matches a pattern in this file, skip-worktree will not be
+set on that entry. Otherwise, skip-worktree will be set.
Then it compares the new skip-worktree value with the previous one. If
-skip-worktree turns from unset to set, it will add the corresponding
-file back. If it turns from set to unset, that file will be removed.
+skip-worktree turns from set to unset, it will add the corresponding
+file back. If it turns from unset to set, that file will be removed.
While `$GIT_DIR/info/sparse-checkout` is usually used to specify what
-files are in. You can also specify what files are _not_ in, using
-negate patterns. For example, to remove file "unwanted":
+files are in, you can also specify what files are _not_ in, using
+negate patterns. For example, to remove the file `unwanted`:
----------------
-*
+/*
!unwanted
----------------
-Another tricky thing is fully repopulating working directory when you
+Another tricky thing is fully repopulating the working directory when you
no longer want sparse checkout. You cannot just disable "sparse
-checkout" because skip-worktree are still in the index and you working
-directory is still sparsely populated. You should re-populate working
+checkout" because skip-worktree bits are still in the index and your working
+directory is still sparsely populated. You should re-populate the working
directory with the `$GIT_DIR/info/sparse-checkout` file content as
follows:
----------------
-*
+/*
----------------
-Then you can disable sparse checkout. Sparse checkout support in "git
-read-tree" and similar commands is disabled by default. You need to
+Then you can disable sparse checkout. Sparse checkout support in 'git
+read-tree' and similar commands is disabled by default. You need to
turn `core.sparseCheckout` on in order to have sparse checkout
support.
<args>...::
Flags and parameters to be parsed.
+--resolve-git-dir <path>::
+ Check if <path> is a valid git-dir or a git-file pointing to a valid
+ git-dir. If <path> is a valid git-dir the resolved path to git-dir will
+ be printed.
include::revisions.txt[]
--------
[verse]
'git revert' [--edit | --no-edit] [-n] [-m parent-number] [-s] <commit>...
+'git revert' --reset
+'git revert' --continue
DESCRIPTION
-----------
Pass the merge strategy-specific option through to the
merge strategy. See linkgit:git-merge[1] for details.
+SEQUENCER SUBCOMMANDS
+---------------------
+include::sequencer.txt[]
+
EXAMPLES
--------
`git revert HEAD~3`::
init::
Initialize the submodules, i.e. register each submodule name
and url found in .gitmodules into .git/config.
+ It will also copy the value of `submodule.$name.update` into
+ .git/config.
The key used in .git/config is `submodule.$name.url`.
This command does not alter existing information in .git/config.
You can then customize the submodule clone URLs in .git/config
checkout the commit specified in the index of the containing repository.
This will make the submodules HEAD be detached unless `--rebase` or
`--merge` is specified or the key `submodule.$name.update` is set to
- `rebase` or `merge`.
+ `rebase`, `merge` or `none`.
+
If the submodule is not yet initialized, and you just want to use the
setting as stored in .gitmodules, you can automatically initialize the
+
If `--recursive` is specified, this command will recurse into the
registered submodules, and update any nested submodules within.
++
+If the configuration key `submodule.$name.update` is set to `none` the
+submodule with name `$name` will not be updated by default. This can be
+overriden by adding `--checkout` to the command.
summary::
Show commit summary between the given commit (defaults to HEAD) and
only be done when dcommitting non-fast-forward merges where all parents but the
first have already been pushed into SVN.
+--interactive;;
+ Ask the user to confirm that a patch set should actually be sent to SVN.
+ For each patch, one may answer "yes" (accept this patch), "no" (discard this
+ patch), "all" (accept all patches), or "quit".
+ +
+ 'git svn dcommit' returns immediately if answer if "no" or "quit", without
+ commiting anything to SVN.
+
'branch'::
Create a branch in the SVN repository.
Show what revision and author last modified each line of a file. The
output of this mode is format-compatible with the output of
`svn blame' by default. Like the SVN blame command,
- local uncommitted changes in the working copy are ignored;
+ local uncommitted changes in the working tree are ignored;
the version of the file in the HEAD revision is annotated. Unknown
arguments are passed directly to 'git blame'.
+
OPTIONS
-------
-a::
+--annotate::
Make an unsigned, annotated tag object
-s::
+--sign::
Make a GPG-signed tag, using the default e-mail address's key
-u <key-id>::
+--local-user=<key-id>::
Make a GPG-signed tag, using the given key
-f::
Replace an existing tag with the given name (instead of failing)
-d::
+--delete::
Delete existing tags with the given names.
-v::
+--verify::
Verify the gpg signature of the given tag names.
-n<num>::
If the tag is not annotated, the commit message is displayed instead.
-l <pattern>::
+--list <pattern>::
List tags with names that match the given pattern (or all if no
pattern is given). Running "git tag" without arguments also
lists all tags. The pattern is a shell wildcard (i.e., matched
Only list tags which contain the specified commit.
-m <msg>::
+--message=<msg>::
Use the given tag message (instead of prompting).
If multiple `-m` options are given, their values are
concatenated as separate paragraphs.
is given.
-F <file>::
+--file=<file>::
Take the tag message from the given file. Use '-' to
read the message from the standard input.
Implies `-a` if none of `-a`, `-s`, or `-u <key-id>`
"assume unchanged" bit, either before or after you modify them.
In order to set "assume unchanged" bit, use `--assume-unchanged`
-option. To unset, use `--no-assume-unchanged`.
+option. To unset, use `--no-assume-unchanged`. To see which files
+have the "assume unchanged" bit set, use `git ls-files -v`
+(see linkgit:git-ls-files[1]).
The command looks at `core.ignorestat` configuration variable. When
this is true, paths updated with `git update-index paths...` and
SEE ALSO
--------
linkgit:git-config[1],
-linkgit:git-add[1]
+linkgit:git-add[1],
+linkgit:git-ls-files[1]
GIT
---
--- /dev/null
+--reset::
+ Forget about the current operation in progress. Can be used
+ to clear the sequencer state after a failed cherry-pick or
+ revert.
+
+--continue::
+ Continue the operation in progress using the information in
+ '.git/sequencer'. Can be used to continue after resolving
+ conflicts in a failed cherry-pick or revert.
--- /dev/null
+argv-array API
+==============
+
+The argv-array API allows one to dynamically build and store
+NULL-terminated lists. An argv-array maintains the invariant that the
+`argv` member always points to a non-NULL array, and that the array is
+always NULL-terminated at the element pointed to by `argv[argc]`. This
+makes the result suitable for passing to functions expecting to receive
+argv from main(), or the link:api-run-command.html[run-command API].
+
+The link:api-string-list.html[string-list API] is similar, but cannot be
+used for these purposes; instead of storing a straight string pointer,
+it contains an item structure with a `util` field that is not compatible
+with the traditional argv interface.
+
+Each `argv_array` manages its own memory. Any strings pushed into the
+array are duplicated, and all memory is freed by argv_array_clear().
+
+Data Structures
+---------------
+
+`struct argv_array`::
+
+ A single array. This should be initialized by assignment from
+ `ARGV_ARRAY_INIT`, or by calling `argv_array_init`. The `argv`
+ member contains the actual array; the `argc` member contains the
+ number of elements in the array, not including the terminating
+ NULL.
+
+Functions
+---------
+
+`argv_array_init`::
+ Initialize an array. This is no different than assigning from
+ `ARGV_ARRAY_INIT`.
+
+`argv_array_push`::
+ Push a copy of a string onto the end of the array.
+
+`argv_array_pushf`::
+ Format a string and push it onto the end of the array. This is a
+ convenience wrapper combining `strbuf_addf` and `argv_array_push`.
+
+`argv_array_clear`::
+ Free all memory associated with the array and return it to the
+ initial, empty state.
describes the group or an empty string.
Start the description with an upper-case letter.
-`OPT_BOOLEAN(short, long, &int_var, description)`::
- Introduce a boolean option.
- `int_var` is incremented on each use.
+`OPT_BOOL(short, long, &int_var, description)`::
+ Introduce a boolean option. `int_var` is set to one with
+ `--option` and set to zero with `--no-option`.
+
+`OPT_COUNTUP(short, long, &int_var, description)`::
+ Introduce a count-up option.
+ `int_var` is incremented on each use of `--option`, and
+ reset to zero with `--no-option`.
`OPT_BIT(short, long, &int_var, description, mask)`::
Introduce a boolean option.
If used, `int_var` is bitwise-anded with the inverted `mask`.
`OPT_SET_INT(short, long, &int_var, description, integer)`::
- Introduce a boolean option.
- If used, set `int_var` to `integer`.
+ Introduce an integer option.
+ `int_var` is set to `integer` with `--option`, and
+ reset to zero with `--no-option`.
`OPT_SET_PTR(short, long, &ptr_var, description, ptr)`::
Introduce a boolean option.
"auto", set `int_var` to 1 if stdout is a tty or a pager,
0 otherwise.
+`OPT_NOOP_NOARG(short, long)`::
+ Introduce an option that has no effect and takes no arguments.
+ Use it to hide deprecated options that are still to be recognized
+ and ignored silently.
+
The last element of the array must be `OPT_END()`.
--- /dev/null
+sha1-array API
+==============
+
+The sha1-array API provides storage and manipulation of sets of SHA1
+identifiers. The emphasis is on storage and processing efficiency,
+making them suitable for large lists. Note that the ordering of items is
+not preserved over some operations.
+
+Data Structures
+---------------
+
+`struct sha1_array`::
+
+ A single array of SHA1 hashes. This should be initialized by
+ assignment from `SHA1_ARRAY_INIT`. The `sha1` member contains
+ the actual data. The `nr` member contains the number of items in
+ the set. The `alloc` and `sorted` members are used internally,
+ and should not be needed by API callers.
+
+Functions
+---------
+
+`sha1_array_append`::
+ Add an item to the set. The sha1 will be placed at the end of
+ the array (but note that some operations below may lose this
+ ordering).
+
+`sha1_array_sort`::
+ Sort the elements in the array.
+
+`sha1_array_lookup`::
+ Perform a binary search of the array for a specific sha1.
+ If found, returns the offset (in number of elements) of the
+ sha1. If not found, returns a negative integer. If the array is
+ not sorted, this function has the side effect of sorting it.
+
+`sha1_array_clear`::
+ Free all memory associated with the array and return it to the
+ initial, empty state.
+
+`sha1_array_for_each_unique`::
+ Efficiently iterate over each unique element of the list,
+ executing the callback function for each one. If the array is
+ not sorted, this function has the side effect of sorting it.
+
+Examples
+--------
+
+-----------------------------------------
+void print_callback(const unsigned char sha1[20],
+ void *data)
+{
+ printf("%s\n", sha1_to_hex(sha1));
+}
+
+void some_func(void)
+{
+ struct sha1_array hashes = SHA1_ARRAY_INIT;
+ unsigned char sha1[20];
+
+ /* Read objects into our set */
+ while (read_object_from_stdin(sha1))
+ sha1_array_append(&hashes, sha1);
+
+ /* Check if some objects are in our set */
+ while (read_object_from_stdin(sha1)) {
+ if (sha1_array_lookup(&hashes, sha1) >= 0)
+ printf("it's in there!\n");
+
+ /*
+ * Print the unique set of objects. We could also have
+ * avoided adding duplicate objects in the first place,
+ * but we would end up re-sorting the array repeatedly.
+ * Instead, this will sort once and then skip duplicates
+ * in linear time.
+ */
+ sha1_array_for_each_unique(&hashes, print_callback, NULL);
+}
+-----------------------------------------
"0039git-upload-pack /schacon/gitbook.git\0host=example.com\0" |
nc -v example.com 9418
+If the server refuses the request for some reasons, it could abort
+gracefully with an error message.
+
+----
+ error-line = PKT-LINE("ERR" SP explanation-text)
+----
+
SSH Transport
-------------
#!/bin/sh
GVF=GIT-VERSION-FILE
-DEF_VER=v1.7.7
+DEF_VER=v1.7.7.GIT
LF='
'
# DEFAULT_EDITOR='$GIT_FALLBACK_EDITOR',
# DEFAULT_EDITOR='"C:\Program Files\Vim\gvim.exe" --nofork'
#
-# Define COMPUTE_HEADER_DEPENDENCIES if your compiler supports the -MMD option
-# and you want to avoid rebuilding objects when an unrelated header file
-# changes.
-#
# Define CHECK_HEADER_DEPENDENCIES to check for problems in the hard-coded
# dependency rules.
#
LIB_H += advice.h
LIB_H += archive.h
+LIB_H += argv-array.h
LIB_H += attr.h
LIB_H += blob.h
LIB_H += builtin.h
LIB_H += compat/win32/syslog.h
LIB_H += compat/win32/sys/poll.h
LIB_H += compat/win32/dirent.h
+LIB_H += connected.h
LIB_H += csum-file.h
LIB_H += decorate.h
LIB_H += delta.h
LIB_H += resolve-undo.h
LIB_H += revision.h
LIB_H += run-command.h
+LIB_H += sequencer.h
LIB_H += sha1-array.h
LIB_H += sha1-lookup.h
LIB_H += sideband.h
LIB_OBJS += archive.o
LIB_OBJS += archive-tar.o
LIB_OBJS += archive-zip.o
+LIB_OBJS += argv-array.o
LIB_OBJS += attr.o
LIB_OBJS += base85.o
LIB_OBJS += bisect.o
LIB_OBJS += compat/obstack.o
LIB_OBJS += config.o
LIB_OBJS += connect.o
+LIB_OBJS += connected.o
LIB_OBJS += convert.o
LIB_OBJS += copy.o
LIB_OBJS += csum-file.o
LIB_OBJS += run-command.o
LIB_OBJS += server-info.o
LIB_OBJS += setup.o
+LIB_OBJS += sequencer.o
LIB_OBJS += sha1-array.o
LIB_OBJS += sha1-lookup.o
LIB_OBJS += sha1_file.o
NO_STRLCPY = YesPlease
NO_MKSTEMPS = YesPlease
HAVE_PATHS_H = YesPlease
+ DIR_HAS_BSD_GROUP_SEMANTICS = YesPlease
endif
ifeq ($(uname_S),UnixWare)
CC = cc
ifdef CHECK_HEADER_DEPENDENCIES
COMPUTE_HEADER_DEPENDENCIES =
USE_COMPUTED_HEADER_DEPENDENCIES =
+else
+ifndef COMPUTE_HEADER_DEPENDENCIES
+dep_check = $(shell $(CC) $(ALL_CFLAGS) \
+ -c -MF /dev/null -MMD -MP -x c /dev/null -o /dev/null 2>&1; \
+ echo $$?)
+ifeq ($(dep_check),0)
+COMPUTE_HEADER_DEPENDENCIES=YesPlease
+endif
+endif
endif
ifdef COMPUTE_HEADER_DEPENDENCIES
ifdef COMPUTE_HEADER_DEPENDENCIES
$(dep_dirs):
- mkdir -p $@
+ @mkdir -p $@
missing_dep_dirs := $(filter-out $(wildcard $(dep_dirs)),$(dep_dirs))
dep_file = $(dir $@).depend/$(notdir $@).d
$(INSTALL) $(install_bindir_programs) '$(DESTDIR_SQ)$(bindir_SQ)'
$(MAKE) -C templates DESTDIR='$(DESTDIR_SQ)' install
$(INSTALL) -d -m 755 '$(DESTDIR_SQ)$(mergetools_instdir_SQ)'
- (cd mergetools && $(TAR) cf - .) | \
- (cd '$(DESTDIR_SQ)$(mergetools_instdir_SQ)' && umask 022 && $(TAR) xof -)
+ $(INSTALL) -m 644 mergetools/* '$(DESTDIR_SQ)$(mergetools_instdir_SQ)'
ifndef NO_PERL
$(MAKE) -C perl prefix='$(prefix_SQ)' DESTDIR='$(DESTDIR_SQ)' install
$(MAKE) -C gitweb install
-Documentation/RelNotes/1.7.7.txt
\ No newline at end of file
+Documentation/RelNotes/1.7.8.txt
\ No newline at end of file
{ "detachedhead", &advice_detached_head },
};
+void advise(const char *advice, ...)
+{
+ va_list params;
+
+ va_start(params, advice);
+ vreportf("hint: ", advice, params);
+ va_end(params);
+}
+
int git_default_advice_config(const char *var, const char *value)
{
const char *k = skip_prefix(var, "advice.");
return 0;
}
-void NORETURN die_resolve_conflict(const char *me)
+int error_resolve_conflict(const char *me)
{
- if (advice_resolve_conflict)
+ error("'%s' is not possible because you have unmerged files.", me);
+ if (advice_resolve_conflict) {
/*
* Message used both when 'git commit' fails and when
* other commands doing a merge do.
*/
- die("'%s' is not possible because you have unmerged files.\n"
- "Please, fix them up in the work tree, and then use 'git add/rm <file>' as\n"
- "appropriate to mark resolution and make a commit, or use 'git commit -a'.", me);
- else
- die("'%s' is not possible because you have unmerged files.", me);
+ advise("Fix them up in the work tree,");
+ advise("and then use 'git add/rm <file>' as");
+ advise("appropriate to mark resolution and make a commit,");
+ advise("or use 'git commit -a'.");
+ }
+ return -1;
+}
+
+void NORETURN die_resolve_conflict(const char *me)
+{
+ error_resolve_conflict(me);
+ die("Exiting because of an unresolved conflict.");
}
extern int advice_detached_head;
int git_default_advice_config(const char *var, const char *value);
-
+void advise(const char *advice, ...);
+int error_resolve_conflict(const char *me);
extern void NORETURN die_resolve_conflict(const char *me);
#endif /* ADVICE_H */
"prepend prefix to each pathname in the archive"),
OPT_STRING('o', "output", &output, "file",
"write the archive to this file"),
- OPT_BOOLEAN(0, "worktree-attributes", &worktree_attributes,
+ OPT_BOOL(0, "worktree-attributes", &worktree_attributes,
"read .gitattributes in working directory"),
OPT__VERBOSE(&verbose, "report archived files on stderr"),
OPT__COMPR('0', &compression_level, "store only", 0),
OPT__COMPR_HIDDEN('8', &compression_level, 8),
OPT__COMPR('9', &compression_level, "compress better", 9),
OPT_GROUP(""),
- OPT_BOOLEAN('l', "list", &list,
+ OPT_BOOL('l', "list", &list,
"list supported archive formats"),
OPT_GROUP(""),
OPT_STRING(0, "remote", &remote, "repo",
--- /dev/null
+#include "cache.h"
+#include "argv-array.h"
+#include "strbuf.h"
+
+static const char *empty_argv_storage = NULL;
+const char **empty_argv = &empty_argv_storage;
+
+void argv_array_init(struct argv_array *array)
+{
+ array->argv = empty_argv;
+ array->argc = 0;
+ array->alloc = 0;
+}
+
+static void argv_array_push_nodup(struct argv_array *array, const char *value)
+{
+ if (array->argv == empty_argv)
+ array->argv = NULL;
+
+ ALLOC_GROW(array->argv, array->argc + 2, array->alloc);
+ array->argv[array->argc++] = value;
+ array->argv[array->argc] = NULL;
+}
+
+void argv_array_push(struct argv_array *array, const char *value)
+{
+ argv_array_push_nodup(array, xstrdup(value));
+}
+
+void argv_array_pushf(struct argv_array *array, const char *fmt, ...)
+{
+ va_list ap;
+ struct strbuf v = STRBUF_INIT;
+
+ va_start(ap, fmt);
+ strbuf_vaddf(&v, fmt, ap);
+ va_end(ap);
+
+ argv_array_push_nodup(array, strbuf_detach(&v, NULL));
+}
+
+void argv_array_clear(struct argv_array *array)
+{
+ if (array->argv != empty_argv) {
+ int i;
+ for (i = 0; i < array->argc; i++)
+ free((char **)array->argv[i]);
+ free(array->argv);
+ }
+ argv_array_init(array);
+}
--- /dev/null
+#ifndef ARGV_ARRAY_H
+#define ARGV_ARRAY_H
+
+extern const char **empty_argv;
+
+struct argv_array {
+ const char **argv;
+ int argc;
+ int alloc;
+};
+
+#define ARGV_ARRAY_INIT { empty_argv, 0, 0 }
+
+void argv_array_init(struct argv_array *);
+void argv_array_push(struct argv_array *, const char *);
+__attribute__((format (printf,2,3)))
+void argv_array_pushf(struct argv_array *, const char *fmt, ...);
+void argv_array_clear(struct argv_array *);
+
+#endif /* ARGV_ARRAY_H */
#include "cache.h"
#include "exec_cmd.h"
#include "attr.h"
+#include "dir.h"
const char git_attr__true[] = "(builtin)true";
const char git_attr__false[] = "\0(builtin)false";
#define ATTR__UNSET NULL
#define ATTR__UNKNOWN git_attr__unknown
-static const char *attributes_file;
-
/* This is a randomly chosen prime. */
#define HASHSIZE 257
return !git_env_bool("GIT_ATTR_NOSYSTEM", 0);
}
-static int git_attr_config(const char *var, const char *value, void *dummy)
-{
- if (!strcmp(var, "core.attributesfile"))
- return git_config_pathname(&attributes_file, var, value);
-
- return 0;
-}
-
static void bootstrap_attr_stack(void)
{
if (!attr_stack) {
}
}
- git_config(git_attr_config, NULL);
- if (attributes_file) {
- elem = read_attr_from_file(attributes_file, 1);
+ if (git_attributes_file) {
+ elem = read_attr_from_file(git_attributes_file, 1);
if (elem) {
elem->origin = NULL;
elem->prev = attr_stack;
if (!is_bare_repository() || direction == GIT_ATTR_INDEX) {
elem = read_attr(GITATTRIBUTES_FILE, 1);
- elem->origin = strdup("");
+ elem->origin = xstrdup("");
elem->prev = attr_stack;
attr_stack = elem;
debug_push(elem);
{
struct attr_stack *elem, *info;
int dirlen, len;
- struct strbuf pathbuf;
const char *cp;
cp = strrchr(path, '/');
else
dirlen = cp - path;
- strbuf_init(&pathbuf, dirlen+2+strlen(GITATTRIBUTES_FILE));
-
/*
* At the bottom of the attribute stack is the built-in
* set of attribute definitions, followed by the contents
* Read from parent directories and push them down
*/
if (!is_bare_repository() || direction == GIT_ATTR_INDEX) {
- while (1) {
- char *cp;
+ struct strbuf pathbuf = STRBUF_INIT;
+ while (1) {
len = strlen(attr_stack->origin);
if (dirlen <= len)
break;
- strbuf_reset(&pathbuf);
- strbuf_add(&pathbuf, path, dirlen);
+ cp = memchr(path + len + 1, '/', dirlen - len - 1);
+ if (!cp)
+ cp = path + dirlen;
+ strbuf_add(&pathbuf, path, cp - path);
strbuf_addch(&pathbuf, '/');
- cp = strchr(pathbuf.buf + len + 1, '/');
- strcpy(cp + 1, GITATTRIBUTES_FILE);
+ strbuf_addstr(&pathbuf, GITATTRIBUTES_FILE);
elem = read_attr(pathbuf.buf, 0);
- *cp = '\0';
- elem->origin = strdup(pathbuf.buf);
+ strbuf_setlen(&pathbuf, cp - path);
+ elem->origin = strbuf_detach(&pathbuf, NULL);
elem->prev = attr_stack;
attr_stack = elem;
debug_push(elem);
}
- }
- strbuf_release(&pathbuf);
+ strbuf_release(&pathbuf);
+ }
/*
* Finally push the "info" one at the top of the stack.
/* match basename */
const char *basename = strrchr(pathname, '/');
basename = basename ? basename + 1 : pathname;
- return (fnmatch(pattern, basename, 0) == 0);
+ return (fnmatch_icase(pattern, basename, 0) == 0);
}
/*
* match with FNM_PATHNAME; the pattern has base implicitly
return 0;
if (baselen != 0)
baselen++;
- return fnmatch(pattern, pathname + baselen, FNM_PATHNAME) == 0;
+ return fnmatch_icase(pattern, pathname + baselen, FNM_PATHNAME) == 0;
}
static int macroexpand_one(int attr_nr, int rem);
#include "log-tree.h"
#include "bisect.h"
#include "sha1-array.h"
+#include "argv-array.h"
static struct sha1_array good_revs;
static struct sha1_array skipped_revs;
static const unsigned char *current_bad_sha1;
-struct argv_array {
- const char **argv;
- int argv_nr;
- int argv_alloc;
-};
-
static const char *argv_checkout[] = {"checkout", "-q", NULL, "--", NULL};
static const char *argv_show_branch[] = {"show-branch", NULL, NULL};
static const char *argv_update_ref[] = {"update-ref", "--no-deref", "BISECT_HEAD", NULL, NULL};
return best;
}
-static void argv_array_push(struct argv_array *array, const char *string)
-{
- ALLOC_GROW(array->argv, array->argv_nr + 1, array->argv_alloc);
- array->argv[array->argv_nr++] = string;
-}
-
-static void argv_array_push_sha1(struct argv_array *array,
- const unsigned char *sha1,
- const char *format)
-{
- struct strbuf buf = STRBUF_INIT;
- strbuf_addf(&buf, format, sha1_to_hex(sha1));
- argv_array_push(array, strbuf_detach(&buf, NULL));
-}
-
static int register_ref(const char *refname, const unsigned char *sha1,
int flags, void *cb_data)
{
die_errno("Could not open file '%s'", filename);
while (strbuf_getline(&str, fp, '\n') != EOF) {
- char *quoted;
- int res;
-
strbuf_trim(&str);
- quoted = strbuf_detach(&str, NULL);
- res = sq_dequote_to_argv(quoted, &array->argv,
- &array->argv_nr, &array->argv_alloc);
- if (res)
+ if (sq_dequote_to_argv_array(str.buf, array))
die("Badly quoted content in file '%s': %s",
- filename, quoted);
+ filename, str.buf);
}
strbuf_release(&str);
const char *bad_format, const char *good_format,
int read_paths)
{
- struct argv_array rev_argv = { NULL, 0, 0 };
+ struct argv_array rev_argv = ARGV_ARRAY_INIT;
int i;
init_revisions(revs, prefix);
revs->commit_format = CMIT_FMT_UNSPECIFIED;
/* rev_argv.argv[0] will be ignored by setup_revisions */
- argv_array_push(&rev_argv, xstrdup("bisect_rev_setup"));
- argv_array_push_sha1(&rev_argv, current_bad_sha1, bad_format);
+ argv_array_push(&rev_argv, "bisect_rev_setup");
+ argv_array_pushf(&rev_argv, bad_format, sha1_to_hex(current_bad_sha1));
for (i = 0; i < good_revs.nr; i++)
- argv_array_push_sha1(&rev_argv, good_revs.sha1[i],
- good_format);
- argv_array_push(&rev_argv, xstrdup("--"));
+ argv_array_pushf(&rev_argv, good_format,
+ sha1_to_hex(good_revs.sha1[i]));
+ argv_array_push(&rev_argv, "--");
if (read_paths)
read_bisect_paths(&rev_argv);
- argv_array_push(&rev_argv, NULL);
- setup_revisions(rev_argv.argv_nr, rev_argv.argv, revs, NULL);
+ setup_revisions(rev_argv.argc, rev_argv.argv, revs, NULL);
+ /* XXX leak rev_argv, as "revs" may still be pointing to it */
}
static void bisect_common(struct rev_info *revs)
{
struct rev_info revs;
struct object_array pending_copy;
- int i, res;
+ int res;
bisect_rev_setup(&revs, prefix, "^%s", "%s", 0);
/* Save pending objects, so they can be cleaned up later. */
- memset(&pending_copy, 0, sizeof(pending_copy));
- for (i = 0; i < revs.pending.nr; i++)
- add_object_array(revs.pending.objects[i].item,
- revs.pending.objects[i].name,
- &pending_copy);
+ pending_copy = revs.pending;
+ revs.leak_pending = 1;
+ /*
+ * bisect_common calls prepare_revision_walk right away, which
+ * (together with .leak_pending = 1) makes us the sole owner of
+ * the list of pending objects.
+ */
bisect_common(&revs);
res = (revs.commits != NULL);
/* Clean up objects used, as they will be reused. */
- for (i = 0; i < pending_copy.nr; i++) {
- struct object *o = pending_copy.objects[i].item;
- clear_commit_marks((struct commit *)o, ALL_REV_FLAGS);
- }
+ clear_commit_marks_for_object_array(&pending_copy, ALL_REV_FLAGS);
+ free(pending_copy.objects);
return res;
}
#include "refs.h"
#include "remote.h"
#include "commit.h"
+#include "sequencer.h"
struct tracking {
struct refspec spec;
unlink(git_path("MERGE_MSG"));
unlink(git_path("MERGE_MODE"));
unlink(git_path("SQUASH_MSG"));
+ remove_sequencer_state(0);
}
char *old, *oldlines;
struct strbuf newlines;
int new_blank_lines_at_end = 0;
+ int found_new_blank_lines_at_end = 0;
+ int hunk_linenr = frag->linenr;
unsigned long leading, trailing;
int pos, applied_pos;
struct image preimage;
error("invalid start of line: '%c'", first);
return -1;
}
- if (added_blank_line)
+ if (added_blank_line) {
+ if (!new_blank_lines_at_end)
+ found_new_blank_lines_at_end = hunk_linenr;
new_blank_lines_at_end++;
+ }
else if (is_blank_context)
;
else
new_blank_lines_at_end = 0;
patch += len;
size -= len;
+ hunk_linenr++;
}
if (inaccurate_eof &&
old > oldlines && old[-1] == '\n' &&
preimage.nr + applied_pos >= img->nr &&
(ws_rule & WS_BLANK_AT_EOF) &&
ws_error_action != nowarn_ws_error) {
- record_ws_error(WS_BLANK_AT_EOF, "+", 1, frag->linenr);
+ record_ws_error(WS_BLANK_AT_EOF, "+", 1,
+ found_new_blank_lines_at_end);
if (ws_error_action == correct_ws_error) {
while (new_blank_lines_at_end--)
remove_last_line(&postimage);
int i;
int errs = 0;
int is_not_gitdir = !startup_info->have_repository;
- int binary;
int force_apply = 0;
const char *whitespace_option = NULL;
"ignore additions made by the patch"),
OPT_BOOLEAN(0, "stat", &diffstat,
"instead of applying the patch, output diffstat for the input"),
- { OPTION_BOOLEAN, 0, "allow-binary-replacement", &binary,
- NULL, "old option, now no-op",
- PARSE_OPT_HIDDEN | PARSE_OPT_NOARG },
- { OPTION_BOOLEAN, 0, "binary", &binary,
- NULL, "old option, now no-op",
- PARSE_OPT_HIDDEN | PARSE_OPT_NOARG },
+ OPT_NOOP_NOARG(0, "allow-binary-replacement"),
+ OPT_NOOP_NOARG(0, "binary"),
OPT_BOOLEAN(0, "numstat", &numstat,
"shows number of added and deleted lines in decimal notation"),
OPT_BOOLEAN(0, "summary", &summary,
if (strcmp(buf, "ACK")) {
if (len > 5 && !prefixcmp(buf, "NACK "))
die(_("git archive: NACK %s"), buf + 5);
+ if (len > 4 && !prefixcmp(buf, "ERR "))
+ die(_("remote error: %s"), buf + 4);
die(_("git archive: protocol error"));
}
struct append_ref_cb {
struct ref_list *ref_list;
+ const char **pattern;
int ret;
};
+static int match_patterns(const char **pattern, const char *refname)
+{
+ if (!*pattern)
+ return 1; /* no pattern always matches */
+ while (*pattern) {
+ if (!fnmatch(*pattern, refname, 0))
+ return 1;
+ pattern++;
+ }
+ return 0;
+}
+
static int append_ref(const char *refname, const unsigned char *sha1, int flags, void *cb_data)
{
struct append_ref_cb *cb = (struct append_ref_cb *)(cb_data);
if ((kind & ref_list->kinds) == 0)
return 0;
+ if (!match_patterns(cb->pattern, refname))
+ return 0;
+
commit = NULL;
if (ref_list->verbose || ref_list->with_commit || merge_filter != NO_FILTER) {
commit = lookup_commit_reference_gently(sha1, 1);
}
}
-static int print_ref_list(int kinds, int detached, int verbose, int abbrev, struct commit_list *with_commit)
+static int print_ref_list(int kinds, int detached, int verbose, int abbrev, struct commit_list *with_commit, const char **pattern)
{
int i;
struct append_ref_cb cb;
if (merge_filter != NO_FILTER)
init_revisions(&ref_list.revs, NULL);
cb.ref_list = &ref_list;
+ cb.pattern = pattern;
cb.ret = 0;
for_each_rawref(append_ref, &cb);
if (merge_filter != NO_FILTER) {
qsort(ref_list.list, ref_list.index, sizeof(struct ref_item), ref_cmp);
detached = (detached && (kinds & REF_LOCAL_BRANCH));
- if (detached)
+ if (detached && match_patterns(pattern, "HEAD"))
show_detached(&ref_list);
for (i = 0; i < ref_list.index; i++) {
int cmd_branch(int argc, const char **argv, const char *prefix)
{
- int delete = 0, rename = 0, force_create = 0;
+ int delete = 0, rename = 0, force_create = 0, list = 0;
int verbose = 0, abbrev = -1, detached = 0;
int reflog = 0;
enum branch_track track;
OPT_SET_INT( 0, "set-upstream", &track, "change upstream info",
BRANCH_TRACK_OVERRIDE),
OPT__COLOR(&branch_use_color, "use colored output"),
- OPT_SET_INT('r', NULL, &kinds, "act on remote-tracking branches",
+ OPT_SET_INT('r', "remotes", &kinds, "act on remote-tracking branches",
REF_REMOTE_BRANCH),
{
OPTION_CALLBACK, 0, "contains", &with_commit, "commit",
OPT__ABBREV(&abbrev),
OPT_GROUP("Specific git-branch actions:"),
- OPT_SET_INT('a', NULL, &kinds, "list both remote-tracking and local branches",
+ OPT_SET_INT('a', "all", &kinds, "list both remote-tracking and local branches",
REF_REMOTE_BRANCH | REF_LOCAL_BRANCH),
- OPT_BIT('d', NULL, &delete, "delete fully merged branch", 1),
+ OPT_BIT('d', "delete", &delete, "delete fully merged branch", 1),
OPT_BIT('D', NULL, &delete, "delete branch (even if not merged)", 2),
- OPT_BIT('m', NULL, &rename, "move/rename a branch and its reflog", 1),
+ OPT_BIT('m', "move", &rename, "move/rename a branch and its reflog", 1),
OPT_BIT('M', NULL, &rename, "move/rename a branch, even if target exists", 2),
- OPT_BOOLEAN('l', NULL, &reflog, "create the branch's reflog"),
+ OPT_BOOLEAN(0, "list", &list, "list branch names"),
+ OPT_BOOLEAN('l', "create-reflog", &reflog, "create the branch's reflog"),
OPT__FORCE(&force_create, "force creation (when already exists)"),
{
OPTION_CALLBACK, 0, "no-merged", &merge_filter_ref,
argc = parse_options(argc, argv, prefix, options, builtin_branch_usage,
0);
- if (!!delete + !!rename + !!force_create > 1)
+
+ if (!delete && !rename && !force_create && argc == 0)
+ list = 1;
+
+ if (!!delete + !!rename + !!force_create + !!list > 1)
usage_with_options(builtin_branch_usage, options);
if (abbrev == -1)
if (delete)
return delete_branches(argc, argv, delete > 1, kinds);
- else if (argc == 0)
- return print_ref_list(kinds, detached, verbose, abbrev, with_commit);
+ else if (list)
+ return print_ref_list(kinds, detached, verbose, abbrev,
+ with_commit, argv);
else if (rename && (argc == 1))
rename_branch(head, argv[0], rename > 1);
else if (rename && (argc == 2))
} else if (!strcmp(cmd, "unbundle")) {
if (!startup_info->have_repository)
die(_("Need a repository to unbundle."));
- return !!unbundle(&header, bundle_fd) ||
+ return !!unbundle(&header, bundle_fd, 0) ||
list_bundle_refs(&header, argc, argv);
} else
usage(builtin_bundle_usage);
#include "parse-options.h"
static int all_attrs;
+static int cached_attrs;
static int stdin_paths;
static const char * const check_attr_usage[] = {
"git check-attr [-a | --all | attr...] [--] pathname...",
static const struct option check_attr_options[] = {
OPT_BOOLEAN('a', "all", &all_attrs, "report all attributes set on file"),
+ OPT_BOOLEAN(0, "cached", &cached_attrs, "use .gitattributes only from the index"),
OPT_BOOLEAN(0 , "stdin", &stdin_paths, "read file names from stdin"),
OPT_BOOLEAN('z', NULL, &null_term_line,
"input paths are terminated by a null character"),
struct git_attr_check *check;
int cnt, i, doubledash, filei;
+ git_config(git_default_config, NULL);
+
argc = parse_options(argc, argv, prefix, check_attr_options,
check_attr_usage, PARSE_OPT_KEEP_DASHDASH);
die("invalid cache");
}
+ if (cached_attrs)
+ git_attr_set_direction(GIT_ATTR_INDEX, NULL);
+
doubledash = -1;
for (i = 0; doubledash < 0 && i < argc; i++) {
if (!strcmp(argv[i], "--"))
#include "strbuf.h"
static const char builtin_check_ref_format_usage[] =
-"git check-ref-format [--print] <refname>\n"
+"git check-ref-format [--normalize] [options] <refname>\n"
" or: git check-ref-format --branch <branchname-shorthand>";
/*
- * Remove leading slashes and replace each run of adjacent slashes in
- * src with a single slash, and write the result to dst.
+ * Return a copy of refname but with leading slashes removed and runs
+ * of adjacent slashes replaced with single slashes.
*
* This function is similar to normalize_path_copy(), but stripped down
* to meet check_ref_format's simpler needs.
*/
-static void collapse_slashes(char *dst, const char *src)
+static char *collapse_slashes(const char *refname)
{
+ char *ret = xmalloc(strlen(refname) + 1);
char ch;
char prev = '/';
+ char *cp = ret;
- while ((ch = *src++) != '\0') {
+ while ((ch = *refname++) != '\0') {
if (prev == '/' && ch == prev)
continue;
- *dst++ = ch;
+ *cp++ = ch;
prev = ch;
}
- *dst = '\0';
+ *cp = '\0';
+ return ret;
}
static int check_ref_format_branch(const char *arg)
return 0;
}
-static int check_ref_format_print(const char *arg)
-{
- char *refname = xmalloc(strlen(arg) + 1);
-
- if (check_ref_format(arg))
- return 1;
- collapse_slashes(refname, arg);
- printf("%s\n", refname);
- return 0;
-}
-
int cmd_check_ref_format(int argc, const char **argv, const char *prefix)
{
+ int i;
+ int normalize = 0;
+ int flags = 0;
+ const char *refname;
+
if (argc == 2 && !strcmp(argv[1], "-h"))
usage(builtin_check_ref_format_usage);
if (argc == 3 && !strcmp(argv[1], "--branch"))
return check_ref_format_branch(argv[2]);
- if (argc == 3 && !strcmp(argv[1], "--print"))
- return check_ref_format_print(argv[2]);
- if (argc != 2)
+
+ for (i = 1; i < argc && argv[i][0] == '-'; i++) {
+ if (!strcmp(argv[i], "--normalize") || !strcmp(argv[i], "--print"))
+ normalize = 1;
+ else if (!strcmp(argv[i], "--allow-onelevel"))
+ flags |= REFNAME_ALLOW_ONELEVEL;
+ else if (!strcmp(argv[i], "--no-allow-onelevel"))
+ flags &= ~REFNAME_ALLOW_ONELEVEL;
+ else if (!strcmp(argv[i], "--refspec-pattern"))
+ flags |= REFNAME_REFSPEC_PATTERN;
+ else
+ usage(builtin_check_ref_format_usage);
+ }
+ if (! (i == argc - 1))
usage(builtin_check_ref_format_usage);
- return !!check_ref_format(argv[1]);
+
+ refname = argv[i];
+ if (normalize)
+ refname = collapse_slashes(refname);
+ if (check_refname_format(refname, flags))
+ return 1;
+ if (normalize)
+ printf("%s\n", refname);
+
+ return 0;
}
#include "ll-merge.h"
#include "resolve-undo.h"
#include "submodule.h"
+#include "argv-array.h"
static const char * const checkout_usage[] = {
"git checkout [options] <branch>",
hashcpy(ce->sha1, sha1);
memcpy(ce->name, base, baselen);
memcpy(ce->name + baselen, pathname, len - baselen);
- ce->ce_flags = create_ce_flags(len, 0);
+ ce->ce_flags = create_ce_flags(len, 0) | CE_UPDATE;
ce->ce_mode = create_ce_mode(mode);
add_cache_entry(ce, ADD_CACHE_OK_TO_ADD | ADD_CACHE_OK_TO_REPLACE);
return 0;
for (pos = 0; pos < active_nr; pos++) {
struct cache_entry *ce = active_cache[pos];
+ if (source_tree && !(ce->ce_flags & CE_UPDATE))
+ continue;
match_pathspec(pathspec, ce->name, ce_namelen(ce), 0, ps_matched);
}
state.refresh_cache = 1;
for (pos = 0; pos < active_nr; pos++) {
struct cache_entry *ce = active_cache[pos];
+ if (source_tree && !(ce->ce_flags & CE_UPDATE))
+ continue;
if (match_pathspec(pathspec, ce->name, ce_namelen(ce), 0, NULL)) {
if (!ce_stage(ce)) {
errs |= checkout_entry(ce, &state, NULL);
report_tracking(new);
}
-struct rev_list_args {
- int argc;
- int alloc;
- const char **argv;
-};
-
-static void add_one_rev_list_arg(struct rev_list_args *args, const char *s)
-{
- ALLOC_GROW(args->argv, args->argc + 1, args->alloc);
- args->argv[args->argc++] = s;
-}
-
-static int add_one_ref_to_rev_list_arg(const char *refname,
- const unsigned char *sha1,
- int flags,
- void *cb_data)
-{
- add_one_rev_list_arg(cb_data, refname);
- return 0;
-}
-
-static int clear_commit_marks_from_one_ref(const char *refname,
- const unsigned char *sha1,
- int flags,
- void *cb_data)
+static int add_pending_uninteresting_ref(const char *refname,
+ const unsigned char *sha1,
+ int flags, void *cb_data)
{
- struct commit *commit = lookup_commit_reference_gently(sha1, 1);
- if (commit)
- clear_commit_marks(commit, -1);
+ add_pending_sha1(cb_data, refname, sha1, flags | UNINTERESTING);
return 0;
}
*/
static void orphaned_commit_warning(struct commit *commit)
{
- struct rev_list_args args = { 0, 0, NULL };
struct rev_info revs;
-
- add_one_rev_list_arg(&args, "(internal)");
- add_one_rev_list_arg(&args, sha1_to_hex(commit->object.sha1));
- add_one_rev_list_arg(&args, "--not");
- for_each_ref(add_one_ref_to_rev_list_arg, &args);
- add_one_rev_list_arg(&args, "--");
- add_one_rev_list_arg(&args, NULL);
+ struct object *object = &commit->object;
+ struct object_array refs;
init_revisions(&revs, NULL);
- if (setup_revisions(args.argc - 1, args.argv, &revs, NULL) != 1)
- die(_("internal error: only -- alone should have been left"));
+ setup_revisions(0, NULL, &revs, NULL);
+
+ object->flags &= ~UNINTERESTING;
+ add_pending_object(&revs, object, sha1_to_hex(object->sha1));
+
+ for_each_ref(add_pending_uninteresting_ref, &revs);
+
+ refs = revs.pending;
+ revs.leak_pending = 1;
+
if (prepare_revision_walk(&revs))
die(_("internal error in revision walk"));
if (!(commit->object.flags & UNINTERESTING))
else
describe_detached_head(_("Previous HEAD position was"), commit);
- clear_commit_marks(commit, -1);
- for_each_ref(clear_commit_marks_from_one_ref, NULL);
+ clear_commit_marks_for_object_array(&refs, ALL_REV_FLAGS);
+ free(refs.objects);
}
static int switch_branches(struct checkout_opts *opts, struct branch_info *new)
new->name = arg;
setup_branch_path(new);
- if (check_ref_format(new->path) == CHECK_REF_FORMAT_OK &&
+ if (!check_refname_format(new->path, 0) &&
resolve_ref(new->path, branch_rev, 1, NULL))
hashcpy(rev, branch_rev);
else
m = xcalloc(1, i);
if (with_tree) {
- const char *max_prefix = pathspec_prefix(prefix, pattern);
- overlay_tree_on_cache(with_tree, max_prefix);
+ char *max_prefix = common_prefix(pattern);
+ overlay_tree_on_cache(with_tree, max_prefix ? max_prefix : prefix);
+ free(max_prefix);
}
for (i = 0; i < active_nr; i++) {
if (get_sha1("HEAD", sha1))
current_head = NULL;
else {
- current_head = lookup_commit(sha1);
+ current_head = lookup_commit_or_die(sha1, "HEAD");
if (!current_head || parse_commit(current_head))
die(_("could not parse HEAD commit"));
}
pptr = &commit_list_insert(c->item, pptr)->next;
} else if (whence == FROM_MERGE) {
struct strbuf m = STRBUF_INIT;
+ struct commit *commit;
FILE *fp;
if (!reflog_msg)
unsigned char sha1[20];
if (get_sha1_hex(m.buf, sha1) < 0)
die(_("Corrupt MERGE_HEAD file (%s)"), m.buf);
- pptr = &commit_list_insert(lookup_commit(sha1), pptr)->next;
+ commit = lookup_commit_or_die(sha1, "MERGE_HEAD");
+ pptr = &commit_list_insert(commit, pptr)->next;
}
fclose(fp);
strbuf_release(&m);
const char *vptr = value;
int must_free_vptr = 0;
int dup_error = 0;
+ int must_print_delim = 0;
if (!use_key_regexp && strcmp(key_, key))
return 0;
return 0;
if (show_keys) {
- if (value_)
- printf("%s%c", key_, key_delim);
- else
- printf("%s", key_);
+ printf("%s", key_);
+ must_print_delim = 1;
}
if (seen && !do_all)
dup_error = 1;
} else if (types == TYPE_PATH) {
git_config_pathname(&vptr, key_, value_);
must_free_vptr = 1;
+ } else if (value_) {
+ vptr = value_;
+ } else {
+ /* Just show the key name */
+ vptr = "";
+ must_print_delim = 0;
}
- else
- vptr = value_?value_:"";
seen++;
if (dup_error) {
error("More than one value for the key %s: %s",
key_, vptr);
}
- else
+ else {
+ if (must_print_delim)
+ printf("%c", key_delim);
printf("%s%c", vptr, term);
+ }
if (must_free_vptr)
/* If vptr must be freed, it's a pointer to a
* dynamically allocated buffer, it's safe to cast to
static int fetch_unpack_limit = -1;
static int unpack_limit = 100;
static int prefer_ofs_delta = 1;
-static int no_done = 0;
+static int no_done;
+static int fetch_fsck_objects = -1;
+static int transfer_fsck_objects = -1;
static struct fetch_pack_args args = {
/* .uploadpack = */ "git-upload-pack",
};
for (ref = *refs; ref; ref = next) {
next = ref->next;
if (!memcmp(ref->name, "refs/", 5) &&
- check_ref_format(ref->name + 5))
+ check_refname_format(ref->name + 5, 0))
; /* trash */
else if (args.fetch_all &&
(!args.depth || prefixcmp(ref->name, "refs/tags/") )) {
}
if (*hdr_arg)
*av++ = hdr_arg;
+ if (fetch_fsck_objects >= 0
+ ? fetch_fsck_objects
+ : transfer_fsck_objects >= 0
+ ? transfer_fsck_objects
+ : 0)
+ *av++ = "--strict";
*av++ = NULL;
cmd.in = demux.out;
return 0;
}
+ if (!strcmp(var, "fetch.fsckobjects")) {
+ fetch_fsck_objects = git_config_bool(var, value);
+ return 0;
+ }
+
+ if (!strcmp(var, "transfer.fsckobjects")) {
+ transfer_fsck_objects = git_config_bool(var, value);
+ return 0;
+ }
+
return git_default_config(var, value, cb);
}
#include "sigchain.h"
#include "transport.h"
#include "submodule.h"
+#include "connected.h"
static const char * const builtin_fetch_usage[] = {
"git fetch [<options>] [<repository> [<refspec>...]]",
}
}
+static int iterate_ref_map(void *cb_data, unsigned char sha1[20])
+{
+ struct ref **rm = cb_data;
+ struct ref *ref = *rm;
+
+ if (!ref)
+ return -1; /* end of the list */
+ *rm = ref->next;
+ hashcpy(sha1, ref->old_sha1);
+ return 0;
+}
+
static int store_updated_refs(const char *raw_url, const char *remote_name,
struct ref *ref_map)
{
url = transport_anonymize_url(raw_url);
else
url = xstrdup("foreign");
+
+ rm = ref_map;
+ if (check_everything_connected(iterate_ref_map, 0, &rm)) {
+ rc = error(_("%s did not send all necessary objects\n"), url);
+ goto abort;
+ }
+
for (rm = ref_map; rm; rm = rm->next) {
struct ref *ref = NULL;
fprintf(stderr, " %s\n", note);
}
}
- free(url);
- fclose(fp);
+
if (rc & STORE_REF_ERROR_DF_CONFLICT)
error(_("some local refs could not be updated; try running\n"
" 'git remote prune %s' to remove any old, conflicting "
"branches"), remote_name);
+
+ abort:
+ free(url);
+ fclose(fp);
return rc;
}
* We would want to bypass the object transfer altogether if
* everything we are going to fetch already exists and is connected
* locally.
- *
- * The refs we are going to fetch are in ref_map. If running
- *
- * $ git rev-list --objects --stdin --not --all
- *
- * (feeding all the refs in ref_map on its standard input)
- * does not error out, that means everything reachable from the
- * refs we are going to fetch exists and is connected to some of
- * our existing refs.
*/
static int quickfetch(struct ref *ref_map)
{
- struct child_process revlist;
- struct ref *ref;
- int err;
- const char *argv[] = {"rev-list",
- "--quiet", "--objects", "--stdin", "--not", "--all", NULL};
+ struct ref *rm = ref_map;
/*
* If we are deepening a shallow clone we already have these
*/
if (depth)
return -1;
-
- if (!ref_map)
- return 0;
-
- memset(&revlist, 0, sizeof(revlist));
- revlist.argv = argv;
- revlist.git_cmd = 1;
- revlist.no_stdout = 1;
- revlist.no_stderr = 1;
- revlist.in = -1;
-
- err = start_command(&revlist);
- if (err) {
- error(_("could not run rev-list"));
- return err;
- }
-
- /*
- * If rev-list --stdin encounters an unknown commit, it terminates,
- * which will cause SIGPIPE in the write loop below.
- */
- sigchain_push(SIGPIPE, SIG_IGN);
-
- for (ref = ref_map; ref; ref = ref->next) {
- if (write_in_full(revlist.in, sha1_to_hex(ref->old_sha1), 40) < 0 ||
- write_str_in_full(revlist.in, "\n") < 0) {
- if (errno != EPIPE && errno != EINVAL)
- error(_("failed write to rev-list: %s"), strerror(errno));
- err = -1;
- break;
- }
- }
-
- if (close(revlist.in)) {
- error(_("failed to close rev-list's stdin: %s"), strerror(errno));
- err = -1;
- }
-
- sigchain_pop(SIGPIPE);
-
- return finish_command(&revlist) || err;
+ return check_everything_connected(iterate_ref_map, 1, &rm);
}
static int fetch_refs(struct transport *transport, struct ref *ref_map)
struct commit *head;
struct rev_info rev;
- head = lookup_commit(head_sha1);
+ head = lookup_commit_or_die(head_sha1, "HEAD");
init_revisions(&rev, NULL);
rev.commit_format = CMIT_FMT_ONELINE;
rev.ignore_merges = 1;
{ "subject" },
{ "body" },
{ "contents" },
+ { "contents:subject" },
+ { "contents:body" },
+ { "contents:signature" },
{ "upstream" },
{ "symref" },
{ "flag" },
return xmemdupz(email, eoemail + 1 - email);
}
+static char *copy_subject(const char *buf, unsigned long len)
+{
+ char *r = xmemdupz(buf, len);
+ int i;
+
+ for (i = 0; i < len; i++)
+ if (r[i] == '\n')
+ r[i] = ' ';
+
+ return r;
+}
+
static void grab_date(const char *buf, struct atom_value *v, const char *atomname)
{
const char *eoemail = strstr(buf, "> ");
}
}
-static void find_subpos(const char *buf, unsigned long sz, const char **sub, const char **body)
+static void find_subpos(const char *buf, unsigned long sz,
+ const char **sub, unsigned long *sublen,
+ const char **body, unsigned long *bodylen,
+ unsigned long *nonsiglen,
+ const char **sig, unsigned long *siglen)
{
- while (*buf) {
- const char *eol = strchr(buf, '\n');
- if (!eol)
- return;
- if (eol[1] == '\n') {
- buf = eol + 1;
- break; /* found end of header */
- }
- buf = eol + 1;
+ const char *eol;
+ /* skip past header until we hit empty line */
+ while (*buf && *buf != '\n') {
+ eol = strchrnul(buf, '\n');
+ if (*eol)
+ eol++;
+ buf = eol;
}
+ /* skip any empty lines */
while (*buf == '\n')
buf++;
- if (!*buf)
- return;
- *sub = buf; /* first non-empty line */
- buf = strchr(buf, '\n');
- if (!buf) {
- *body = "";
- return; /* no body */
+
+ /* parse signature first; we might not even have a subject line */
+ *sig = buf + parse_signature(buf, strlen(buf));
+ *siglen = strlen(*sig);
+
+ /* subject is first non-empty line */
+ *sub = buf;
+ /* subject goes to first empty line */
+ while (buf < *sig && *buf && *buf != '\n') {
+ eol = strchrnul(buf, '\n');
+ if (*eol)
+ eol++;
+ buf = eol;
}
+ *sublen = buf - *sub;
+ /* drop trailing newline, if present */
+ if (*sublen && (*sub)[*sublen - 1] == '\n')
+ *sublen -= 1;
+
+ /* skip any empty lines */
while (*buf == '\n')
- buf++; /* skip blank between subject and body */
+ buf++;
*body = buf;
+ *bodylen = strlen(buf);
+ *nonsiglen = *sig - buf;
}
/* See grab_values */
static void grab_sub_body_contents(struct atom_value *val, int deref, struct object *obj, void *buf, unsigned long sz)
{
int i;
- const char *subpos = NULL, *bodypos = NULL;
+ const char *subpos = NULL, *bodypos = NULL, *sigpos = NULL;
+ unsigned long sublen = 0, bodylen = 0, nonsiglen = 0, siglen = 0;
for (i = 0; i < used_atom_cnt; i++) {
const char *name = used_atom[i];
name++;
if (strcmp(name, "subject") &&
strcmp(name, "body") &&
- strcmp(name, "contents"))
+ strcmp(name, "contents") &&
+ strcmp(name, "contents:subject") &&
+ strcmp(name, "contents:body") &&
+ strcmp(name, "contents:signature"))
continue;
if (!subpos)
- find_subpos(buf, sz, &subpos, &bodypos);
- if (!subpos)
- return;
+ find_subpos(buf, sz,
+ &subpos, &sublen,
+ &bodypos, &bodylen, &nonsiglen,
+ &sigpos, &siglen);
if (!strcmp(name, "subject"))
- v->s = copy_line(subpos);
+ v->s = copy_subject(subpos, sublen);
+ else if (!strcmp(name, "contents:subject"))
+ v->s = copy_subject(subpos, sublen);
else if (!strcmp(name, "body"))
- v->s = xstrdup(bodypos);
+ v->s = xmemdupz(bodypos, bodylen);
+ else if (!strcmp(name, "contents:body"))
+ v->s = xmemdupz(bodypos, nonsiglen);
+ else if (!strcmp(name, "contents:signature"))
+ v->s = xmemdupz(sigpos, siglen);
else if (!strcmp(name, "contents"))
v->s = xstrdup(subpos);
}
unsigned long size;
char *buf = read_sha1_file(obj->sha1,
&type, &size);
- if (buf) {
- if (fwrite(buf, size, 1, f) != 1)
- die_errno("Could not write '%s'",
- filename);
- free(buf);
- }
+ if (buf && fwrite(buf, 1, size, f) != size)
+ die_errno("Could not write '%s'", filename);
+ free(buf);
} else
fprintf(f, "%s\n", sha1_to_hex(obj->sha1));
if (fclose(f))
struct strbuf base;
int hit, len;
+ read_sha1_lock();
data = read_object_with_reference(obj->sha1, tree_type,
&size, NULL);
+ read_sha1_unlock();
+
if (!data)
die(_("unable to read tree (%s)"), sha1_to_hex(obj->sha1));
return hit;
}
-static int grep_directory(struct grep_opt *opt, const struct pathspec *pathspec)
+static int grep_directory(struct grep_opt *opt, const struct pathspec *pathspec,
+ int exc_std)
{
struct dir_struct dir;
int i, hit = 0;
memset(&dir, 0, sizeof(dir));
- setup_standard_excludes(&dir);
+ if (exc_std)
+ setup_standard_excludes(&dir);
fill_directory(&dir, pathspec->raw);
for (i = 0; i < dir.nr; i++) {
int cmd_grep(int argc, const char **argv, const char *prefix)
{
int hit = 0;
- int cached = 0;
+ int cached = 0, untracked = 0, opt_exclude = -1;
int seen_dashdash = 0;
int external_grep_allowed__ignored;
const char *show_in_pager = NULL, *default_pager = "dummy";
struct option options[] = {
OPT_BOOLEAN(0, "cached", &cached,
"search in index instead of in the work tree"),
- OPT_BOOLEAN(0, "index", &use_index,
- "--no-index finds in contents not managed by git"),
+ { OPTION_BOOLEAN, 0, "index", &use_index, NULL,
+ "finds in contents not managed by git",
+ PARSE_OPT_NOARG | PARSE_OPT_NEGHELP },
+ OPT_BOOLEAN(0, "untracked", &untracked,
+ "search in both tracked and untracked files"),
+ OPT_SET_INT(0, "exclude-standard", &opt_exclude,
+ "search also in ignored files", 1),
OPT_GROUP(""),
OPT_BOOLEAN('v', "invert-match", &opt.invert,
"show non-matching lines"),
if (!show_in_pager)
setup_pager();
+ if (!use_index && (untracked || cached))
+ die(_("--cached or --untracked cannot be used with --no-index."));
- if (!use_index) {
- if (cached)
- die(_("--cached cannot be used with --no-index."));
+ if (!use_index || untracked) {
+ int use_exclude = (opt_exclude < 0) ? use_index : !!opt_exclude;
if (list.nr)
- die(_("--no-index cannot be used with revs."));
- hit = grep_directory(&opt, &pathspec);
+ die(_("--no-index or --untracked cannot be used with revs."));
+ hit = grep_directory(&opt, &pathspec, use_exclude);
+ } else if (0 <= opt_exclude) {
+ die(_("--[no-]exclude-standard cannot be used for tracked contents."));
} else if (!list.nr) {
if (!cached)
setup_work_tree();
strip_trailing_slash_from_submodules();
/* Find common prefix for all pathspec's */
- max_prefix = pathspec_prefix(prefix, pathspec);
+ max_prefix = common_prefix(pathspec);
max_prefix_len = max_prefix ? strlen(max_prefix) : 0;
/* Treat unmatching pathspec elements as errors */
struct transport *transport;
const struct ref *ref;
+ if (argc == 2 && !strcmp("-h", argv[1]))
+ usage(ls_remote_usage);
+
for (i = 1; i < argc; i++) {
const char *arg = argv[i];
static int allow_trivial = 1, have_message;
static struct strbuf merge_msg;
static struct commit_list *remoteheads;
-static unsigned char head[20], stash[20];
static struct strategy **use_strategies;
static size_t use_strategies_nr, use_strategies_alloc;
static const char **xopts;
unlink(git_path("MERGE_MODE"));
}
-static void save_state(void)
+static int save_state(unsigned char *stash)
{
int len;
struct child_process cp;
if (finish_command(&cp) || len < 0)
die(_("stash failed"));
- else if (!len)
- return;
+ else if (!len) /* no changes */
+ return -1;
strbuf_setlen(&buffer, buffer.len-1);
if (get_sha1(buffer.buf, stash))
die(_("not a valid object: %s"), buffer.buf);
+ return 0;
}
static void read_empty(unsigned const char *sha1, int verbose)
die(_("read-tree failed"));
}
-static void restore_state(void)
+static void restore_state(const unsigned char *head,
+ const unsigned char *stash)
{
struct strbuf sb = STRBUF_INIT;
const char *args[] = { "stash", "apply", NULL, NULL };
drop_save();
}
-static void squash_message(void)
+static void squash_message(struct commit *commit)
{
struct rev_info rev;
- struct commit *commit;
struct strbuf out = STRBUF_INIT;
struct commit_list *j;
int fd;
rev.ignore_merges = 1;
rev.commit_format = CMIT_FMT_MEDIUM;
- commit = lookup_commit(head);
commit->object.flags |= UNINTERESTING;
add_pending_object(&rev, &commit->object, NULL);
strbuf_release(&out);
}
-static void finish(const unsigned char *new_head, const char *msg)
+static void finish(struct commit *head_commit,
+ const unsigned char *new_head, const char *msg)
{
struct strbuf reflog_message = STRBUF_INIT;
+ const unsigned char *head = head_commit->object.sha1;
if (!msg)
strbuf_addstr(&reflog_message, getenv("GIT_REFLOG_ACTION"));
getenv("GIT_REFLOG_ACTION"), msg);
}
if (squash) {
- squash_message();
+ squash_message(head_commit);
} else {
if (verbosity >= 0 && !merge_msg.len)
printf(_("No merge message -- not updating HEAD\n"));
strbuf_release(&reflog_message);
}
+static struct object *want_commit(const char *name)
+{
+ struct object *obj;
+ unsigned char sha1[20];
+ if (get_sha1(name, sha1))
+ return NULL;
+ obj = parse_object(sha1);
+ return peel_to_type(name, 0, obj, OBJ_COMMIT);
+}
+
/* Get the name for the merge commit's message. */
static void merge_name(const char *remote, struct strbuf *msg)
{
remote = bname.buf;
memset(branch_head, 0, sizeof(branch_head));
- remote_head = peel_to_type(remote, 0, NULL, OBJ_COMMIT);
+ remote_head = want_commit(remote);
if (!remote_head)
die(_("'%s' does not point to a commit"), remote);
}
static int try_merge_strategy(const char *strategy, struct commit_list *common,
- const char *head_arg)
+ struct commit *head, const char *head_arg)
{
int index_fd;
struct lock_file *lock = xcalloc(1, sizeof(struct lock_file));
commit_list_insert(j->item, &reversed);
index_fd = hold_locked_index(lock, 1);
- clean = merge_recursive(&o, lookup_commit(head),
+ clean = merge_recursive(&o, head,
remoteheads->item, reversed, &result);
if (active_cache_changed &&
(write_cache(index_fd, active_cache, active_nr) ||
read_merge_msg();
}
-static int merge_trivial(void)
+static int merge_trivial(struct commit *head)
{
unsigned char result_tree[20], result_commit[20];
struct commit_list *parent = xmalloc(sizeof(*parent));
write_tree_trivial(result_tree);
printf(_("Wonderful.\n"));
- parent->item = lookup_commit(head);
+ parent->item = head;
parent->next = xmalloc(sizeof(*parent->next));
parent->next->item = remoteheads->item;
parent->next->next = NULL;
run_prepare_commit_msg();
commit_tree(merge_msg.buf, result_tree, parent, result_commit, NULL);
- finish(result_commit, "In-index merge");
+ finish(head, result_commit, "In-index merge");
drop_save();
return 0;
}
-static int finish_automerge(struct commit_list *common,
+static int finish_automerge(struct commit *head,
+ struct commit_list *common,
unsigned char *result_tree,
const char *wt_strategy)
{
free_commit_list(common);
if (allow_fast_forward) {
parents = remoteheads;
- commit_list_insert(lookup_commit(head), &parents);
+ commit_list_insert(head, &parents);
parents = reduce_heads(parents);
} else {
struct commit_list **pptr = &parents;
- pptr = &commit_list_insert(lookup_commit(head),
+ pptr = &commit_list_insert(head,
pptr)->next;
for (j = remoteheads; j; j = j->next)
pptr = &commit_list_insert(j->item, pptr)->next;
run_prepare_commit_msg();
commit_tree(merge_msg.buf, result_tree, parents, result_commit, NULL);
strbuf_addf(&buf, "Merge made by the '%s' strategy.", wt_strategy);
- finish(result_commit, buf.buf);
+ finish(head, result_commit, buf.buf);
strbuf_release(&buf);
drop_save();
return 0;
return 1;
}
-static struct commit *is_old_style_invocation(int argc, const char **argv)
+static struct commit *is_old_style_invocation(int argc, const char **argv,
+ const unsigned char *head)
{
struct commit *second_token = NULL;
if (argc > 2) {
int cmd_merge(int argc, const char **argv, const char *prefix)
{
unsigned char result_tree[20];
+ unsigned char stash[20];
+ unsigned char head_sha1[20];
+ struct commit *head_commit;
struct strbuf buf = STRBUF_INIT;
const char *head_arg;
- int flag, head_invalid = 0, i;
+ int flag, i;
int best_cnt = -1, merge_was_ok = 0, automerge_was_ok = 0;
struct commit_list *common = NULL;
const char *best_strategy = NULL, *wt_strategy = NULL;
* Check if we are _not_ on a detached HEAD, i.e. if there is a
* current branch.
*/
- branch = resolve_ref("HEAD", head, 0, &flag);
+ branch = resolve_ref("HEAD", head_sha1, 0, &flag);
if (branch && !prefixcmp(branch, "refs/heads/"))
branch += 11;
- if (is_null_sha1(head))
- head_invalid = 1;
+ if (!branch || is_null_sha1(head_sha1))
+ head_commit = NULL;
+ else
+ head_commit = lookup_commit_or_die(head_sha1, "HEAD");
git_config(git_merge_config, NULL);
* additional safety measure to check for it.
*/
- if (!have_message && is_old_style_invocation(argc, argv)) {
+ if (!have_message && head_commit &&
+ is_old_style_invocation(argc, argv, head_commit->object.sha1)) {
strbuf_addstr(&merge_msg, argv[0]);
head_arg = argv[1];
argv += 2;
argc -= 2;
- } else if (head_invalid) {
+ } else if (!head_commit) {
struct object *remote_head;
/*
* If the merged head is a valid one there is no reason
if (!allow_fast_forward)
die(_("Non-fast-forward commit does not make sense into "
"an empty head"));
- remote_head = peel_to_type(argv[0], 0, NULL, OBJ_COMMIT);
+ remote_head = want_commit(argv[0]);
if (!remote_head)
die(_("%s - not something we can merge"), argv[0]);
read_empty(remote_head->sha1, 0);
}
}
- if (head_invalid || !argc)
+ if (!head_commit || !argc)
usage_with_options(builtin_merge_usage,
builtin_merge_options);
struct object *o;
struct commit *commit;
- o = peel_to_type(argv[i], 0, NULL, OBJ_COMMIT);
+ o = want_commit(argv[i]);
if (!o)
die(_("%s - not something we can merge"), argv[i]);
commit = lookup_commit(o->sha1);
}
if (!remoteheads->next)
- common = get_merge_bases(lookup_commit(head),
- remoteheads->item, 1);
+ common = get_merge_bases(head_commit, remoteheads->item, 1);
else {
struct commit_list *list = remoteheads;
- commit_list_insert(lookup_commit(head), &list);
+ commit_list_insert(head_commit, &list);
common = get_octopus_merge_bases(list);
free(list);
}
- update_ref("updating ORIG_HEAD", "ORIG_HEAD", head, NULL, 0,
- DIE_ON_ERR);
+ update_ref("updating ORIG_HEAD", "ORIG_HEAD", head_commit->object.sha1,
+ NULL, 0, DIE_ON_ERR);
if (!common)
; /* No common ancestors found. We need a real merge. */
return 0;
} else if (allow_fast_forward && !remoteheads->next &&
!common->next &&
- !hashcmp(common->item->object.sha1, head)) {
+ !hashcmp(common->item->object.sha1, head_commit->object.sha1)) {
/* Again the most common case of merging one remote. */
struct strbuf msg = STRBUF_INIT;
struct object *o;
char hex[41];
- strcpy(hex, find_unique_abbrev(head, DEFAULT_ABBREV));
+ strcpy(hex, find_unique_abbrev(head_commit->object.sha1, DEFAULT_ABBREV));
if (verbosity >= 0)
printf(_("Updating %s..%s\n"),
if (have_message)
strbuf_addstr(&msg,
" (no commit created; -m option ignored)");
- o = peel_to_type(sha1_to_hex(remoteheads->item->object.sha1),
- 0, NULL, OBJ_COMMIT);
+ o = want_commit(sha1_to_hex(remoteheads->item->object.sha1));
if (!o)
return 1;
- if (checkout_fast_forward(head, remoteheads->item->object.sha1))
+ if (checkout_fast_forward(head_commit->object.sha1, remoteheads->item->object.sha1))
return 1;
- finish(o->sha1, msg.buf);
+ finish(head_commit, o->sha1, msg.buf);
drop_save();
return 0;
} else if (!remoteheads->next && common->next)
git_committer_info(IDENT_ERROR_ON_NO_NAME);
printf(_("Trying really trivial in-index merge...\n"));
if (!read_tree_trivial(common->item->object.sha1,
- head, remoteheads->item->object.sha1))
- return merge_trivial();
+ head_commit->object.sha1, remoteheads->item->object.sha1))
+ return merge_trivial(head_commit);
printf(_("Nope.\n"));
}
} else {
* merge_bases again, otherwise "git merge HEAD^
* HEAD^^" would be missed.
*/
- common_one = get_merge_bases(lookup_commit(head),
- j->item, 1);
+ common_one = get_merge_bases(head_commit, j->item, 1);
if (hashcmp(common_one->item->object.sha1,
j->item->object.sha1)) {
up_to_date = 0;
* sync with the head commit. The strategies are responsible
* to ensure this.
*/
- if (use_strategies_nr != 1) {
- /*
- * Stash away the local changes so that we can try more
- * than one.
- */
- save_state();
- } else {
- memcpy(stash, null_sha1, 20);
- }
+ if (use_strategies_nr == 1 ||
+ /*
+ * Stash away the local changes so that we can try more than one.
+ */
+ save_state(stash))
+ hashcpy(stash, null_sha1);
for (i = 0; i < use_strategies_nr; i++) {
int ret;
if (i) {
printf(_("Rewinding the tree to pristine...\n"));
- restore_state();
+ restore_state(head_commit->object.sha1, stash);
}
if (use_strategies_nr != 1)
printf(_("Trying merge strategy %s...\n"),
wt_strategy = use_strategies[i]->name;
ret = try_merge_strategy(use_strategies[i]->name,
- common, head_arg);
+ common, head_commit, head_arg);
if (!option_commit && !ret) {
merge_was_ok = 1;
/*
* auto resolved the merge cleanly.
*/
if (automerge_was_ok)
- return finish_automerge(common, result_tree, wt_strategy);
+ return finish_automerge(head_commit, common, result_tree,
+ wt_strategy);
/*
* Pick the result from the best strategy and have the user fix
* it up.
*/
if (!best_strategy) {
- restore_state();
+ restore_state(head_commit->object.sha1, stash);
if (use_strategies_nr > 1)
fprintf(stderr,
_("No merge strategy handled the merge.\n"));
; /* We already have its result in the working tree. */
else {
printf(_("Rewinding the tree to pristine...\n"));
- restore_state();
+ restore_state(head_commit->object.sha1, stash);
printf(_("Using the %s to prepare resolving by hand.\n"),
best_strategy);
- try_merge_strategy(best_strategy, common, head_arg);
+ try_merge_strategy(best_strategy, common, head_commit, head_arg);
}
if (squash)
- finish(NULL, NULL);
+ finish(head_commit, NULL, NULL);
else {
int fd;
struct commit_list *j;
to_copy--;
if (to_copy != length || base_name) {
char *it = xmemdupz(result[i], to_copy);
- result[i] = base_name ? strdup(basename(it)) : it;
+ if (base_name) {
+ result[i] = xstrdup(basename(it));
+ free(it);
+ } else
+ result[i] = it;
}
}
return get_pathspec(prefix, result);
}
static char const * const name_rev_usage[] = {
- "git name-rev [options] ( --all | --stdin | <commit>... )",
+ "git name-rev [options] <commit>...",
+ "git name-rev [options] --all",
+ "git name-rev [options] --stdin",
NULL
};
commit->object.flags |= OBJECT_ADDED;
}
-static void show_object(struct object *obj, const struct name_path *path, const char *last)
+static void show_object(struct object *obj,
+ const struct name_path *path, const char *last,
+ void *data)
{
char *name = path_name(path, last);
return 1;
}
-static int get_one_patchid(unsigned char *next_sha1, git_SHA_CTX *ctx)
+static int get_one_patchid(unsigned char *next_sha1, git_SHA_CTX *ctx, struct strbuf *line_buf)
{
- static char line[1000];
int patchlen = 0, found_next = 0;
int before = -1, after = -1;
- while (fgets(line, sizeof(line), stdin) != NULL) {
+ while (strbuf_getwholeline(line_buf, stdin, '\n') != EOF) {
+ char *line = line_buf->buf;
char *p = line;
int len;
unsigned char sha1[20], n[20];
git_SHA_CTX ctx;
int patchlen;
+ struct strbuf line_buf = STRBUF_INIT;
git_SHA1_Init(&ctx);
hashclr(sha1);
while (!feof(stdin)) {
- patchlen = get_one_patchid(n, &ctx);
+ patchlen = get_one_patchid(n, &ctx, &line_buf);
flush_current_id(patchlen, sha1, &ctx);
hashcpy(sha1, n);
}
+ strbuf_release(&line_buf);
}
static const char patch_id_usage[] = "git patch-id < patch";
#include "transport.h"
#include "string-list.h"
#include "sha1-array.h"
+#include "connected.h"
static const char receive_pack_usage[] = "git receive-pack <git-dir>";
static int deny_non_fast_forwards;
static enum deny_action deny_current_branch = DENY_UNCONFIGURED;
static enum deny_action deny_delete_current = DENY_UNCONFIGURED;
-static int receive_fsck_objects;
+static int receive_fsck_objects = -1;
+static int transfer_fsck_objects = -1;
static int receive_unpack_limit = -1;
static int transfer_unpack_limit = -1;
static int unpack_limit = 100;
return 0;
}
+ if (strcmp(var, "transfer.fsckobjects") == 0) {
+ transfer_fsck_objects = git_config_bool(var, value);
+ return 0;
+ }
+
if (!strcmp(var, "receive.denycurrentbranch")) {
deny_current_branch = parse_deny_action(var, value);
return 0;
struct command {
struct command *next;
const char *error_string;
- unsigned int skip_update;
+ unsigned int skip_update:1,
+ did_not_exist:1;
unsigned char old_sha1[20];
unsigned char new_sha1[20];
char ref_name[FLEX_ARRAY]; /* more */
return 0;
}
-static int run_receive_hook(struct command *commands, const char *hook_name)
+typedef int (*feed_fn)(void *, const char **, size_t *);
+static int run_and_feed_hook(const char *hook_name, feed_fn feed, void *feed_state)
{
- static char buf[sizeof(commands->old_sha1) * 2 + PATH_MAX + 4];
- struct command *cmd;
struct child_process proc;
struct async muxer;
const char *argv[2];
- int have_input = 0, code;
-
- for (cmd = commands; !have_input && cmd; cmd = cmd->next) {
- if (!cmd->error_string)
- have_input = 1;
- }
+ int code;
- if (!have_input || access(hook_name, X_OK) < 0)
+ if (access(hook_name, X_OK) < 0)
return 0;
argv[0] = hook_name;
return code;
}
- for (cmd = commands; cmd; cmd = cmd->next) {
- if (!cmd->error_string) {
- size_t n = snprintf(buf, sizeof(buf), "%s %s %s\n",
- sha1_to_hex(cmd->old_sha1),
- sha1_to_hex(cmd->new_sha1),
- cmd->ref_name);
- if (write_in_full(proc.in, buf, n) != n)
- break;
- }
+ while (1) {
+ const char *buf;
+ size_t n;
+ if (feed(feed_state, &buf, &n))
+ break;
+ if (write_in_full(proc.in, buf, n) != n)
+ break;
}
close(proc.in);
if (use_sideband)
return finish_command(&proc);
}
+struct receive_hook_feed_state {
+ struct command *cmd;
+ int skip_broken;
+ struct strbuf buf;
+};
+
+static int feed_receive_hook(void *state_, const char **bufp, size_t *sizep)
+{
+ struct receive_hook_feed_state *state = state_;
+ struct command *cmd = state->cmd;
+
+ while (cmd &&
+ state->skip_broken && (cmd->error_string || cmd->did_not_exist))
+ cmd = cmd->next;
+ if (!cmd)
+ return -1; /* EOF */
+ strbuf_reset(&state->buf);
+ strbuf_addf(&state->buf, "%s %s %s\n",
+ sha1_to_hex(cmd->old_sha1), sha1_to_hex(cmd->new_sha1),
+ cmd->ref_name);
+ state->cmd = cmd->next;
+ if (bufp) {
+ *bufp = state->buf.buf;
+ *sizep = state->buf.len;
+ }
+ return 0;
+}
+
+static int run_receive_hook(struct command *commands, const char *hook_name,
+ int skip_broken)
+{
+ struct receive_hook_feed_state state;
+ int status;
+
+ strbuf_init(&state.buf, 0);
+ state.cmd = commands;
+ state.skip_broken = skip_broken;
+ if (feed_receive_hook(&state, NULL, NULL))
+ return 0;
+ state.cmd = commands;
+ status = run_and_feed_hook(hook_name, feed_receive_hook, &state);
+ strbuf_release(&state.buf);
+ return status;
+}
+
static int run_update_hook(struct command *cmd)
{
static const char update_hook[] = "hooks/update";
struct ref_lock *lock;
/* only refs/... are allowed */
- if (prefixcmp(name, "refs/") || check_ref_format(name + 5)) {
+ if (prefixcmp(name, "refs/") || check_refname_format(name + 5, 0)) {
rp_error("refusing to create funny ref '%s' remotely", name);
return "funny refname";
}
if (is_null_sha1(new_sha1)) {
if (!parse_object(old_sha1)) {
- rp_warning("Allowing deletion of corrupt ref.");
old_sha1 = NULL;
+ if (ref_exists(name)) {
+ rp_warning("Allowing deletion of corrupt ref.");
+ } else {
+ rp_warning("Deleting a non-existent ref.");
+ cmd->did_not_exist = 1;
+ }
}
if (delete_ref(namespaced_name, old_sha1, 0)) {
rp_error("failed to delete %s", name);
struct child_process proc;
for (argc = 0, cmd = commands; cmd; cmd = cmd->next) {
- if (cmd->error_string)
+ if (cmd->error_string || cmd->did_not_exist)
continue;
argc++;
}
for (argc = 1, cmd = commands; cmd; cmd = cmd->next) {
char *p;
- if (cmd->error_string)
+ if (cmd->error_string || cmd->did_not_exist)
continue;
p = xmalloc(strlen(cmd->ref_name) + 1);
strcpy(p, cmd->ref_name);
string_list_clear(&ref_list, 0);
}
+static int command_singleton_iterator(void *cb_data, unsigned char sha1[20])
+{
+ struct command **cmd_list = cb_data;
+ struct command *cmd = *cmd_list;
+
+ if (!cmd)
+ return -1; /* end of list */
+ *cmd_list = NULL; /* this returns only one */
+ hashcpy(sha1, cmd->new_sha1);
+ return 0;
+}
+
+static void set_connectivity_errors(struct command *commands)
+{
+ struct command *cmd;
+
+ for (cmd = commands; cmd; cmd = cmd->next) {
+ struct command *singleton = cmd;
+ if (!check_everything_connected(command_singleton_iterator,
+ 0, &singleton))
+ continue;
+ cmd->error_string = "missing necessary objects";
+ }
+}
+
+static int iterate_receive_command_list(void *cb_data, unsigned char sha1[20])
+{
+ struct command **cmd_list = cb_data;
+ struct command *cmd = *cmd_list;
+
+ if (!cmd)
+ return -1; /* end of list */
+ *cmd_list = cmd->next;
+ hashcpy(sha1, cmd->new_sha1);
+ return 0;
+}
+
static void execute_commands(struct command *commands, const char *unpacker_error)
{
struct command *cmd;
return;
}
- if (run_receive_hook(commands, pre_receive_hook)) {
+ cmd = commands;
+ if (check_everything_connected(iterate_receive_command_list,
+ 0, &cmd))
+ set_connectivity_errors(commands);
+
+ if (run_receive_hook(commands, pre_receive_hook, 0)) {
for (cmd = commands; cmd; cmd = cmd->next)
cmd->error_string = "pre-receive hook declined";
return;
struct pack_header hdr;
const char *hdr_err;
char hdr_arg[38];
+ int fsck_objects = (receive_fsck_objects >= 0
+ ? receive_fsck_objects
+ : transfer_fsck_objects >= 0
+ ? transfer_fsck_objects
+ : 0);
hdr_err = parse_pack_header(&hdr);
if (hdr_err)
int code, i = 0;
const char *unpacker[4];
unpacker[i++] = "unpack-objects";
- if (receive_fsck_objects)
+ if (fsck_objects)
unpacker[i++] = "--strict";
unpacker[i++] = hdr_arg;
unpacker[i++] = NULL;
keeper[i++] = "index-pack";
keeper[i++] = "--stdin";
- if (receive_fsck_objects)
+ if (fsck_objects)
keeper[i++] = "--strict";
keeper[i++] = "--fix-thin";
keeper[i++] = hdr_arg;
unlink_or_warn(pack_lockfile);
if (report_status)
report(commands, unpack_status);
- run_receive_hook(commands, post_receive_hook);
+ run_receive_hook(commands, post_receive_hook, 1);
run_update_post_hook(commands);
if (auto_gc) {
const char *argv_gc_auto[] = {
unsigned char orig_sha1[20];
const char *symref;
- strbuf_addf(&buf, "refs/remotes/%s", rename->old);
+ strbuf_addf(&buf, "refs/remotes/%s/", rename->old);
if (!prefixcmp(refname, buf.buf)) {
item = string_list_append(rename->remote_branches, xstrdup(refname));
symref = resolve_ref(refname, orig_sha1, 1, &flag);
OPT_END()
};
struct remote *oldremote, *newremote;
- struct strbuf buf = STRBUF_INIT, buf2 = STRBUF_INIT, buf3 = STRBUF_INIT;
+ struct strbuf buf = STRBUF_INIT, buf2 = STRBUF_INIT, buf3 = STRBUF_INIT,
+ old_remote_context = STRBUF_INIT;
struct string_list remote_branches = STRING_LIST_INIT_NODUP;
struct rename_info rename;
- int i;
+ int i, refspec_updated = 0;
if (argc != 3)
usage_with_options(builtin_remote_rename_usage, options);
strbuf_addf(&buf, "remote.%s.fetch", rename.new);
if (git_config_set_multivar(buf.buf, NULL, NULL, 1))
return error("Could not remove config section '%s'", buf.buf);
+ strbuf_addf(&old_remote_context, ":refs/remotes/%s/", rename.old);
for (i = 0; i < oldremote->fetch_refspec_nr; i++) {
char *ptr;
strbuf_reset(&buf2);
strbuf_addstr(&buf2, oldremote->fetch_refspec[i]);
- ptr = strstr(buf2.buf, rename.old);
- if (ptr)
- strbuf_splice(&buf2, ptr-buf2.buf, strlen(rename.old),
- rename.new, strlen(rename.new));
+ ptr = strstr(buf2.buf, old_remote_context.buf);
+ if (ptr) {
+ refspec_updated = 1;
+ strbuf_splice(&buf2,
+ ptr-buf2.buf + strlen(":refs/remotes/"),
+ strlen(rename.old), rename.new,
+ strlen(rename.new));
+ } else
+ warning("Not updating non-default fetch respec\n"
+ "\t%s\n"
+ "\tPlease update the configuration manually if necessary.",
+ buf2.buf);
+
if (git_config_set_multivar(buf.buf, buf2.buf, "^$", 0))
return error("Could not append '%s'", buf.buf);
}
}
}
+ if (!refspec_updated)
+ return 0;
+
/*
* First remove symrefs, then rename the rest, finally create
* the new symrefs.
"refs/replace/%s",
sha1_to_hex(object)) > sizeof(ref) - 1)
die("replace ref name too long: %.*s...", 50, ref);
- if (check_ref_format(ref))
+ if (check_refname_format(ref, 0))
die("'%s' is not a valid ref name.", ref);
if (!resolve_ref(ref, prev, 1, NULL))
commit->buffer = NULL;
}
-static void finish_object(struct object *obj, const struct name_path *path, const char *name)
+static void finish_object(struct object *obj,
+ const struct name_path *path, const char *name,
+ void *cb_data)
{
if (obj->type == OBJ_BLOB && !has_sha1_file(obj->sha1))
die("missing blob object '%s'", sha1_to_hex(obj->sha1));
}
-static void show_object(struct object *obj, const struct name_path *path, const char *component)
+static void show_object(struct object *obj,
+ const struct name_path *path, const char *component,
+ void *cb_data)
{
- char *name = path_name(path, component);
- /* An object with name "foo\n0000000..." can be used to
- * confuse downstream "git pack-objects" very badly.
- */
- const char *ep = strchr(name, '\n');
+ struct rev_info *info = cb_data;
- finish_object(obj, path, name);
- if (ep) {
- printf("%s %.*s\n", sha1_to_hex(obj->sha1),
- (int) (ep - name),
- name);
- }
- else
- printf("%s %s\n", sha1_to_hex(obj->sha1), name);
- free(name);
+ finish_object(obj, path, component, cb_data);
+ if (info->verify_objects && !obj->parsed && obj->type != OBJ_COMMIT)
+ parse_object(obj->sha1);
+ show_object_with_name(stdout, obj, path, component);
}
static void show_edge(struct commit *commit)
return 0;
}
+ if (argc > 2 && !strcmp(argv[1], "--resolve-git-dir")) {
+ const char *gitdir = resolve_gitdir(argv[2]);
+ if (!gitdir)
+ die("not a gitdir '%s'", argv[2]);
+ puts(gitdir);
+ return 0;
+ }
+
if (argc > 1 && !strcmp("-h", argv[1]))
usage(builtin_rev_parse_usage);
#include "rerere.h"
#include "merge-recursive.h"
#include "refs.h"
+#include "dir.h"
+#include "sequencer.h"
/*
* This implements the builtins revert and cherry-pick.
static const char * const revert_usage[] = {
"git revert [options] <commit-ish>",
+ "git revert <subcommand>",
NULL
};
static const char * const cherry_pick_usage[] = {
"git cherry-pick [options] <commit-ish>",
+ "git cherry-pick <subcommand>",
NULL
};
-static int edit, no_replay, no_commit, mainline, signoff, allow_ff;
-static enum { REVERT, CHERRY_PICK } action;
-static struct commit *commit;
-static int commit_argc;
-static const char **commit_argv;
-static int allow_rerere_auto;
-
-static const char *me;
-
-/* Merge strategy. */
-static const char *strategy;
-static const char **xopts;
-static size_t xopts_nr, xopts_alloc;
+enum replay_action { REVERT, CHERRY_PICK };
+enum replay_subcommand { REPLAY_NONE, REPLAY_RESET, REPLAY_CONTINUE };
+
+struct replay_opts {
+ enum replay_action action;
+ enum replay_subcommand subcommand;
+
+ /* Boolean options */
+ int edit;
+ int record_origin;
+ int no_commit;
+ int signoff;
+ int allow_ff;
+ int allow_rerere_auto;
+
+ int mainline;
+ int commit_argc;
+ const char **commit_argv;
+
+ /* Merge strategy */
+ const char *strategy;
+ const char **xopts;
+ size_t xopts_nr, xopts_alloc;
+};
#define GIT_REFLOG_ACTION "GIT_REFLOG_ACTION"
+static const char *action_name(const struct replay_opts *opts)
+{
+ return opts->action == REVERT ? "revert" : "cherry-pick";
+}
+
static char *get_encoding(const char *message);
-static const char * const *revert_or_cherry_pick_usage(void)
+static const char * const *revert_or_cherry_pick_usage(struct replay_opts *opts)
{
- return action == REVERT ? revert_usage : cherry_pick_usage;
+ return opts->action == REVERT ? revert_usage : cherry_pick_usage;
}
static int option_parse_x(const struct option *opt,
const char *arg, int unset)
{
+ struct replay_opts **opts_ptr = opt->value;
+ struct replay_opts *opts = *opts_ptr;
+
if (unset)
return 0;
- ALLOC_GROW(xopts, xopts_nr + 1, xopts_alloc);
- xopts[xopts_nr++] = xstrdup(arg);
+ ALLOC_GROW(opts->xopts, opts->xopts_nr + 1, opts->xopts_alloc);
+ opts->xopts[opts->xopts_nr++] = xstrdup(arg);
return 0;
}
-static void parse_args(int argc, const char **argv)
+static void verify_opt_compatible(const char *me, const char *base_opt, ...)
+{
+ const char *this_opt;
+ va_list ap;
+
+ va_start(ap, base_opt);
+ while ((this_opt = va_arg(ap, const char *))) {
+ if (va_arg(ap, int))
+ break;
+ }
+ va_end(ap);
+
+ if (this_opt)
+ die(_("%s: %s cannot be used with %s"), me, this_opt, base_opt);
+}
+
+static void verify_opt_mutually_compatible(const char *me, ...)
+{
+ const char *opt1, *opt2 = NULL;
+ va_list ap;
+
+ va_start(ap, me);
+ while ((opt1 = va_arg(ap, const char *))) {
+ if (va_arg(ap, int))
+ break;
+ }
+ if (opt1) {
+ while ((opt2 = va_arg(ap, const char *))) {
+ if (va_arg(ap, int))
+ break;
+ }
+ }
+
+ if (opt1 && opt2)
+ die(_("%s: %s cannot be used with %s"), me, opt1, opt2);
+}
+
+static void parse_args(int argc, const char **argv, struct replay_opts *opts)
{
- const char * const * usage_str = revert_or_cherry_pick_usage();
- int noop;
+ const char * const * usage_str = revert_or_cherry_pick_usage(opts);
+ const char *me = action_name(opts);
+ int reset = 0;
+ int contin = 0;
struct option options[] = {
- OPT_BOOLEAN('n', "no-commit", &no_commit, "don't automatically commit"),
- OPT_BOOLEAN('e', "edit", &edit, "edit the commit message"),
- { OPTION_BOOLEAN, 'r', NULL, &noop, NULL, "no-op (backward compatibility)",
- PARSE_OPT_NOARG | PARSE_OPT_HIDDEN, NULL, 0 },
- OPT_BOOLEAN('s', "signoff", &signoff, "add Signed-off-by:"),
- OPT_INTEGER('m', "mainline", &mainline, "parent number"),
- OPT_RERERE_AUTOUPDATE(&allow_rerere_auto),
- OPT_STRING(0, "strategy", &strategy, "strategy", "merge strategy"),
- OPT_CALLBACK('X', "strategy-option", &xopts, "option",
+ OPT_BOOLEAN(0, "reset", &reset, "forget the current operation"),
+ OPT_BOOLEAN(0, "continue", &contin, "continue the current operation"),
+ OPT_BOOLEAN('n', "no-commit", &opts->no_commit, "don't automatically commit"),
+ OPT_BOOLEAN('e', "edit", &opts->edit, "edit the commit message"),
+ OPT_NOOP_NOARG('r', NULL),
+ OPT_BOOLEAN('s', "signoff", &opts->signoff, "add Signed-off-by:"),
+ OPT_INTEGER('m', "mainline", &opts->mainline, "parent number"),
+ OPT_RERERE_AUTOUPDATE(&opts->allow_rerere_auto),
+ OPT_STRING(0, "strategy", &opts->strategy, "strategy", "merge strategy"),
+ OPT_CALLBACK('X', "strategy-option", &opts, "option",
"option for merge strategy", option_parse_x),
OPT_END(),
OPT_END(),
OPT_END(),
};
- if (action == CHERRY_PICK) {
+ if (opts->action == CHERRY_PICK) {
struct option cp_extra[] = {
- OPT_BOOLEAN('x', NULL, &no_replay, "append commit name"),
- OPT_BOOLEAN(0, "ff", &allow_ff, "allow fast-forward"),
+ OPT_BOOLEAN('x', NULL, &opts->record_origin, "append commit name"),
+ OPT_BOOLEAN(0, "ff", &opts->allow_ff, "allow fast-forward"),
OPT_END(),
};
if (parse_options_concat(options, ARRAY_SIZE(options), cp_extra))
die(_("program error"));
}
- commit_argc = parse_options(argc, argv, NULL, options, usage_str,
- PARSE_OPT_KEEP_ARGV0 |
- PARSE_OPT_KEEP_UNKNOWN);
- if (commit_argc < 2)
+ opts->commit_argc = parse_options(argc, argv, NULL, options, usage_str,
+ PARSE_OPT_KEEP_ARGV0 |
+ PARSE_OPT_KEEP_UNKNOWN);
+
+ /* Check for incompatible subcommands */
+ verify_opt_mutually_compatible(me,
+ "--reset", reset,
+ "--continue", contin,
+ NULL);
+
+ /* Set the subcommand */
+ if (reset)
+ opts->subcommand = REPLAY_RESET;
+ else if (contin)
+ opts->subcommand = REPLAY_CONTINUE;
+ else
+ opts->subcommand = REPLAY_NONE;
+
+ /* Check for incompatible command line arguments */
+ if (opts->subcommand != REPLAY_NONE) {
+ char *this_operation;
+ if (opts->subcommand == REPLAY_RESET)
+ this_operation = "--reset";
+ else
+ this_operation = "--continue";
+
+ verify_opt_compatible(me, this_operation,
+ "--no-commit", opts->no_commit,
+ "--signoff", opts->signoff,
+ "--mainline", opts->mainline,
+ "--strategy", opts->strategy ? 1 : 0,
+ "--strategy-option", opts->xopts ? 1 : 0,
+ "-x", opts->record_origin,
+ "--ff", opts->allow_ff,
+ NULL);
+ }
+
+ else if (opts->commit_argc < 2)
usage_with_options(usage_str, options);
- commit_argv = argv;
+ if (opts->allow_ff)
+ verify_opt_compatible(me, "--ff",
+ "--signoff", opts->signoff,
+ "--no-commit", opts->no_commit,
+ "-x", opts->record_origin,
+ "--edit", opts->edit,
+ NULL);
+ opts->commit_argv = argv;
}
struct commit_message {
const char *message;
};
-static int get_message(const char *raw_message, struct commit_message *out)
+static int get_message(struct commit *commit, struct commit_message *out)
{
const char *encoding;
const char *abbrev, *subject;
int abbrev_len, subject_len;
char *q;
- if (!raw_message)
+ if (!commit->buffer)
return -1;
- encoding = get_encoding(raw_message);
+ encoding = get_encoding(commit->buffer);
if (!encoding)
encoding = "UTF-8";
if (!git_commit_encoding)
git_commit_encoding = "UTF-8";
out->reencoded_message = NULL;
- out->message = raw_message;
+ out->message = commit->buffer;
if (strcmp(encoding, git_commit_encoding))
- out->reencoded_message = reencode_string(raw_message,
+ out->reencoded_message = reencode_string(commit->buffer,
git_commit_encoding, encoding);
if (out->reencoded_message)
out->message = out->reencoded_message;
{
const char *p = message, *eol;
- if (!p)
- die (_("Could not read commit message of %s"),
- sha1_to_hex(commit->object.sha1));
while (*p && *p != '\n') {
for (eol = p + 1; *eol && *eol != '\n'; eol++)
; /* do nothing */
return NULL;
}
-static void add_message_to_msg(struct strbuf *msgbuf, const char *message)
-{
- const char *p = message;
- while (*p && (*p != '\n' || p[1] != '\n'))
- p++;
-
- if (!*p)
- strbuf_addstr(msgbuf, sha1_to_hex(commit->object.sha1));
-
- p += 2;
- strbuf_addstr(msgbuf, p);
-}
-
-static void write_cherry_pick_head(void)
+static void write_cherry_pick_head(struct commit *commit)
{
int fd;
struct strbuf buf = STRBUF_INIT;
strbuf_release(&buf);
}
-static void advise(const char *advice, ...)
-{
- va_list params;
-
- va_start(params, advice);
- vreportf("hint: ", advice, params);
- va_end(params);
-}
-
static void print_advice(void)
{
char *msg = getenv("GIT_CHERRY_PICK_HELP");
return lookup_tree((const unsigned char *)EMPTY_TREE_SHA1_BIN);
}
-static NORETURN void die_dirty_index(const char *me)
+static int error_dirty_index(struct replay_opts *opts)
{
- if (read_cache_unmerged()) {
- die_resolve_conflict(me);
- } else {
- if (advice_commit_before_merge) {
- if (action == REVERT)
- die(_("Your local changes would be overwritten by revert.\n"
- "Please, commit your changes or stash them to proceed."));
- else
- die(_("Your local changes would be overwritten by cherry-pick.\n"
- "Please, commit your changes or stash them to proceed."));
- } else {
- if (action == REVERT)
- die(_("Your local changes would be overwritten by revert.\n"));
- else
- die(_("Your local changes would be overwritten by cherry-pick.\n"));
- }
- }
+ if (read_cache_unmerged())
+ return error_resolve_conflict(action_name(opts));
+
+ /* Different translation strings for cherry-pick and revert */
+ if (opts->action == CHERRY_PICK)
+ error(_("Your local changes would be overwritten by cherry-pick."));
+ else
+ error(_("Your local changes would be overwritten by revert."));
+
+ if (advice_commit_before_merge)
+ advise(_("Commit your changes or stash them to proceed."));
+ return -1;
}
static int fast_forward_to(const unsigned char *to, const unsigned char *from)
static int do_recursive_merge(struct commit *base, struct commit *next,
const char *base_label, const char *next_label,
- unsigned char *head, struct strbuf *msgbuf)
+ unsigned char *head, struct strbuf *msgbuf,
+ struct replay_opts *opts)
{
struct merge_options o;
struct tree *result, *next_tree, *base_tree, *head_tree;
next_tree = next ? next->tree : empty_tree();
base_tree = base ? base->tree : empty_tree();
- for (xopt = xopts; xopt != xopts + xopts_nr; xopt++)
+ for (xopt = opts->xopts; xopt != opts->xopts + opts->xopts_nr; xopt++)
parse_merge_opt(&o, *xopt);
clean = merge_trees(&o,
(write_cache(index_fd, active_cache, active_nr) ||
commit_locked_index(&index_lock)))
/* TRANSLATORS: %s will be "revert" or "cherry-pick" */
- die(_("%s: Unable to write new index file"), me);
+ die(_("%s: Unable to write new index file"), action_name(opts));
rollback_lock_file(&index_lock);
if (!clean) {
* If we are revert, or if our cherry-pick results in a hand merge,
* we had better say that the current user is responsible for that.
*/
-static int run_git_commit(const char *defmsg)
+static int run_git_commit(const char *defmsg, struct replay_opts *opts)
{
/* 6 is max possible length of our args array including NULL */
const char *args[6];
args[i++] = "commit";
args[i++] = "-n";
- if (signoff)
+ if (opts->signoff)
args[i++] = "-s";
- if (!edit) {
+ if (!opts->edit) {
args[i++] = "-F";
args[i++] = defmsg;
}
return run_command_v_opt(args, RUN_GIT_CMD);
}
-static int do_pick_commit(void)
+static int do_pick_commit(struct commit *commit, struct replay_opts *opts)
{
unsigned char head[20];
struct commit *base, *next, *parent;
struct strbuf msgbuf = STRBUF_INIT;
int res;
- if (no_commit) {
+ if (opts->no_commit) {
/*
* We do not intend to commit immediately. We just want to
* merge the differences in, so let's compute the tree
die (_("Your index file is unmerged."));
} else {
if (get_sha1("HEAD", head))
- die (_("You do not have a valid HEAD"));
+ return error(_("You do not have a valid HEAD"));
if (index_differs_from("HEAD", 0))
- die_dirty_index(me);
+ return error_dirty_index(opts);
}
discard_cache();
int cnt;
struct commit_list *p;
- if (!mainline)
- die(_("Commit %s is a merge but no -m option was given."),
- sha1_to_hex(commit->object.sha1));
+ if (!opts->mainline)
+ return error(_("Commit %s is a merge but no -m option was given."),
+ sha1_to_hex(commit->object.sha1));
for (cnt = 1, p = commit->parents;
- cnt != mainline && p;
+ cnt != opts->mainline && p;
cnt++)
p = p->next;
- if (cnt != mainline || !p)
- die(_("Commit %s does not have parent %d"),
- sha1_to_hex(commit->object.sha1), mainline);
+ if (cnt != opts->mainline || !p)
+ return error(_("Commit %s does not have parent %d"),
+ sha1_to_hex(commit->object.sha1), opts->mainline);
parent = p->item;
- } else if (0 < mainline)
- die(_("Mainline was specified but commit %s is not a merge."),
- sha1_to_hex(commit->object.sha1));
+ } else if (0 < opts->mainline)
+ return error(_("Mainline was specified but commit %s is not a merge."),
+ sha1_to_hex(commit->object.sha1));
else
parent = commit->parents->item;
- if (allow_ff && parent && !hashcmp(parent->object.sha1, head))
+ if (opts->allow_ff && parent && !hashcmp(parent->object.sha1, head))
return fast_forward_to(commit->object.sha1, head);
if (parent && parse_commit(parent) < 0)
/* TRANSLATORS: The first %s will be "revert" or
"cherry-pick", the second %s a SHA1 */
- die(_("%s: cannot parse parent commit %s"),
- me, sha1_to_hex(parent->object.sha1));
+ return error(_("%s: cannot parse parent commit %s"),
+ action_name(opts), sha1_to_hex(parent->object.sha1));
- if (get_message(commit->buffer, &msg) != 0)
- die(_("Cannot get commit message for %s"),
- sha1_to_hex(commit->object.sha1));
+ if (get_message(commit, &msg) != 0)
+ return error(_("Cannot get commit message for %s"),
+ sha1_to_hex(commit->object.sha1));
/*
* "commit" is an existing commit. We would want to apply
defmsg = git_pathdup("MERGE_MSG");
- if (action == REVERT) {
+ if (opts->action == REVERT) {
base = commit;
base_label = msg.label;
next = parent;
}
strbuf_addstr(&msgbuf, ".\n");
} else {
+ const char *p;
+
base = parent;
base_label = msg.parent_label;
next = commit;
next_label = msg.label;
- add_message_to_msg(&msgbuf, msg.message);
- if (no_replay) {
+
+ /*
+ * Append the commit log message to msgbuf; it starts
+ * after the tree, parent, author, committer
+ * information followed by "\n\n".
+ */
+ p = strstr(msg.message, "\n\n");
+ if (p) {
+ p += 2;
+ strbuf_addstr(&msgbuf, p);
+ }
+
+ if (opts->record_origin) {
strbuf_addstr(&msgbuf, "(cherry picked from commit ");
strbuf_addstr(&msgbuf, sha1_to_hex(commit->object.sha1));
strbuf_addstr(&msgbuf, ")\n");
}
- if (!no_commit)
- write_cherry_pick_head();
+ if (!opts->no_commit)
+ write_cherry_pick_head(commit);
}
- if (!strategy || !strcmp(strategy, "recursive") || action == REVERT) {
+ if (!opts->strategy || !strcmp(opts->strategy, "recursive") || opts->action == REVERT) {
res = do_recursive_merge(base, next, base_label, next_label,
- head, &msgbuf);
+ head, &msgbuf, opts);
write_message(&msgbuf, defmsg);
} else {
struct commit_list *common = NULL;
commit_list_insert(base, &common);
commit_list_insert(next, &remotes);
- res = try_merge_command(strategy, xopts_nr, xopts, common,
- sha1_to_hex(head), remotes);
+ res = try_merge_command(opts->strategy, opts->xopts_nr, opts->xopts,
+ common, sha1_to_hex(head), remotes);
free_commit_list(common);
free_commit_list(remotes);
}
if (res) {
- error(action == REVERT
+ error(opts->action == REVERT
? _("could not revert %s... %s")
: _("could not apply %s... %s"),
find_unique_abbrev(commit->object.sha1, DEFAULT_ABBREV),
msg.subject);
print_advice();
- rerere(allow_rerere_auto);
+ rerere(opts->allow_rerere_auto);
} else {
- if (!no_commit)
- res = run_git_commit(defmsg);
+ if (!opts->no_commit)
+ res = run_git_commit(defmsg, opts);
}
free_message(&msg);
return res;
}
-static void prepare_revs(struct rev_info *revs)
+static void prepare_revs(struct rev_info *revs, struct replay_opts *opts)
{
int argc;
init_revisions(revs, NULL);
revs->no_walk = 1;
- if (action != REVERT)
+ if (opts->action != REVERT)
revs->reverse = 1;
- argc = setup_revisions(commit_argc, commit_argv, revs, NULL);
+ argc = setup_revisions(opts->commit_argc, opts->commit_argv, revs, NULL);
if (argc > 1)
- usage(*revert_or_cherry_pick_usage());
+ usage(*revert_or_cherry_pick_usage(opts));
if (prepare_revision_walk(revs))
die(_("revision walk setup failed"));
die(_("empty commit set passed"));
}
-static void read_and_refresh_cache(const char *me)
+static void read_and_refresh_cache(struct replay_opts *opts)
{
static struct lock_file index_lock;
int index_fd = hold_locked_index(&index_lock, 0);
if (read_index_preload(&the_index, NULL) < 0)
- die(_("git %s: failed to read the index"), me);
+ die(_("git %s: failed to read the index"), action_name(opts));
refresh_index(&the_index, REFRESH_QUIET|REFRESH_UNMERGED, NULL, NULL, NULL);
if (the_index.cache_changed) {
if (write_index(&the_index, index_fd) ||
commit_locked_index(&index_lock))
- die(_("git %s: failed to refresh the index"), me);
+ die(_("git %s: failed to refresh the index"), action_name(opts));
}
rollback_lock_file(&index_lock);
}
-static int revert_or_cherry_pick(int argc, const char **argv)
+/*
+ * Append a commit to the end of the commit_list.
+ *
+ * next starts by pointing to the variable that holds the head of an
+ * empty commit_list, and is updated to point to the "next" field of
+ * the last item on the list as new commits are appended.
+ *
+ * Usage example:
+ *
+ * struct commit_list *list;
+ * struct commit_list **next = &list;
+ *
+ * next = commit_list_append(c1, next);
+ * next = commit_list_append(c2, next);
+ * assert(commit_list_count(list) == 2);
+ * return list;
+ */
+static struct commit_list **commit_list_append(struct commit *commit,
+ struct commit_list **next)
+{
+ struct commit_list *new = xmalloc(sizeof(struct commit_list));
+ new->item = commit;
+ *next = new;
+ new->next = NULL;
+ return &new->next;
+}
+
+static int format_todo(struct strbuf *buf, struct commit_list *todo_list,
+ struct replay_opts *opts)
+{
+ struct commit_list *cur = NULL;
+ struct commit_message msg = { NULL, NULL, NULL, NULL, NULL };
+ const char *sha1_abbrev = NULL;
+ const char *action_str = opts->action == REVERT ? "revert" : "pick";
+
+ for (cur = todo_list; cur; cur = cur->next) {
+ sha1_abbrev = find_unique_abbrev(cur->item->object.sha1, DEFAULT_ABBREV);
+ if (get_message(cur->item, &msg))
+ return error(_("Cannot get commit message for %s"), sha1_abbrev);
+ strbuf_addf(buf, "%s %s %s\n", action_str, sha1_abbrev, msg.subject);
+ }
+ return 0;
+}
+
+static struct commit *parse_insn_line(char *start, struct replay_opts *opts)
+{
+ unsigned char commit_sha1[20];
+ char sha1_abbrev[40];
+ enum replay_action action;
+ int insn_len = 0;
+ char *p, *q;
+
+ if (!prefixcmp(start, "pick ")) {
+ action = CHERRY_PICK;
+ insn_len = strlen("pick");
+ p = start + insn_len + 1;
+ } else if (!prefixcmp(start, "revert ")) {
+ action = REVERT;
+ insn_len = strlen("revert");
+ p = start + insn_len + 1;
+ } else
+ return NULL;
+
+ q = strchr(p, ' ');
+ if (!q)
+ return NULL;
+ q++;
+
+ strlcpy(sha1_abbrev, p, q - p);
+
+ /*
+ * Verify that the action matches up with the one in
+ * opts; we don't support arbitrary instructions
+ */
+ if (action != opts->action) {
+ const char *action_str;
+ action_str = action == REVERT ? "revert" : "cherry-pick";
+ error(_("Cannot %s during a %s"), action_str, action_name(opts));
+ return NULL;
+ }
+
+ if (get_sha1(sha1_abbrev, commit_sha1) < 0)
+ return NULL;
+
+ return lookup_commit_reference(commit_sha1);
+}
+
+static int parse_insn_buffer(char *buf, struct commit_list **todo_list,
+ struct replay_opts *opts)
+{
+ struct commit_list **next = todo_list;
+ struct commit *commit;
+ char *p = buf;
+ int i;
+
+ for (i = 1; *p; i++) {
+ commit = parse_insn_line(p, opts);
+ if (!commit)
+ return error(_("Could not parse line %d."), i);
+ next = commit_list_append(commit, next);
+ p = strchrnul(p, '\n');
+ if (*p)
+ p++;
+ }
+ if (!*todo_list)
+ return error(_("No commits parsed."));
+ return 0;
+}
+
+static void read_populate_todo(struct commit_list **todo_list,
+ struct replay_opts *opts)
+{
+ const char *todo_file = git_path(SEQ_TODO_FILE);
+ struct strbuf buf = STRBUF_INIT;
+ int fd, res;
+
+ fd = open(todo_file, O_RDONLY);
+ if (fd < 0)
+ die_errno(_("Could not open %s."), todo_file);
+ if (strbuf_read(&buf, fd, 0) < 0) {
+ close(fd);
+ strbuf_release(&buf);
+ die(_("Could not read %s."), todo_file);
+ }
+ close(fd);
+
+ res = parse_insn_buffer(buf.buf, todo_list, opts);
+ strbuf_release(&buf);
+ if (res)
+ die(_("Unusable instruction sheet: %s"), todo_file);
+}
+
+static int populate_opts_cb(const char *key, const char *value, void *data)
+{
+ struct replay_opts *opts = data;
+ int error_flag = 1;
+
+ if (!value)
+ error_flag = 0;
+ else if (!strcmp(key, "options.no-commit"))
+ opts->no_commit = git_config_bool_or_int(key, value, &error_flag);
+ else if (!strcmp(key, "options.edit"))
+ opts->edit = git_config_bool_or_int(key, value, &error_flag);
+ else if (!strcmp(key, "options.signoff"))
+ opts->signoff = git_config_bool_or_int(key, value, &error_flag);
+ else if (!strcmp(key, "options.record-origin"))
+ opts->record_origin = git_config_bool_or_int(key, value, &error_flag);
+ else if (!strcmp(key, "options.allow-ff"))
+ opts->allow_ff = git_config_bool_or_int(key, value, &error_flag);
+ else if (!strcmp(key, "options.mainline"))
+ opts->mainline = git_config_int(key, value);
+ else if (!strcmp(key, "options.strategy"))
+ git_config_string(&opts->strategy, key, value);
+ else if (!strcmp(key, "options.strategy-option")) {
+ ALLOC_GROW(opts->xopts, opts->xopts_nr + 1, opts->xopts_alloc);
+ opts->xopts[opts->xopts_nr++] = xstrdup(value);
+ } else
+ return error(_("Invalid key: %s"), key);
+
+ if (!error_flag)
+ return error(_("Invalid value for %s: %s"), key, value);
+
+ return 0;
+}
+
+static void read_populate_opts(struct replay_opts **opts_ptr)
+{
+ const char *opts_file = git_path(SEQ_OPTS_FILE);
+
+ if (!file_exists(opts_file))
+ return;
+ if (git_config_from_file(populate_opts_cb, opts_file, *opts_ptr) < 0)
+ die(_("Malformed options sheet: %s"), opts_file);
+}
+
+static void walk_revs_populate_todo(struct commit_list **todo_list,
+ struct replay_opts *opts)
{
struct rev_info revs;
+ struct commit *commit;
+ struct commit_list **next;
- git_config(git_default_config, NULL);
- me = action == REVERT ? "revert" : "cherry-pick";
- setenv(GIT_REFLOG_ACTION, me, 0);
- parse_args(argc, argv);
-
- if (allow_ff) {
- if (signoff)
- die(_("cherry-pick --ff cannot be used with --signoff"));
- if (no_commit)
- die(_("cherry-pick --ff cannot be used with --no-commit"));
- if (no_replay)
- die(_("cherry-pick --ff cannot be used with -x"));
- if (edit)
- die(_("cherry-pick --ff cannot be used with --edit"));
+ prepare_revs(&revs, opts);
+
+ next = todo_list;
+ while ((commit = get_revision(&revs)))
+ next = commit_list_append(commit, next);
+}
+
+static int create_seq_dir(void)
+{
+ const char *seq_dir = git_path(SEQ_DIR);
+
+ if (file_exists(seq_dir))
+ return error(_("%s already exists."), seq_dir);
+ else if (mkdir(seq_dir, 0777) < 0)
+ die_errno(_("Could not create sequencer directory '%s'."), seq_dir);
+ return 0;
+}
+
+static void save_head(const char *head)
+{
+ const char *head_file = git_path(SEQ_HEAD_FILE);
+ static struct lock_file head_lock;
+ struct strbuf buf = STRBUF_INIT;
+ int fd;
+
+ fd = hold_lock_file_for_update(&head_lock, head_file, LOCK_DIE_ON_ERROR);
+ strbuf_addf(&buf, "%s\n", head);
+ if (write_in_full(fd, buf.buf, buf.len) < 0)
+ die_errno(_("Could not write to %s."), head_file);
+ if (commit_lock_file(&head_lock) < 0)
+ die(_("Error wrapping up %s."), head_file);
+}
+
+static void save_todo(struct commit_list *todo_list, struct replay_opts *opts)
+{
+ const char *todo_file = git_path(SEQ_TODO_FILE);
+ static struct lock_file todo_lock;
+ struct strbuf buf = STRBUF_INIT;
+ int fd;
+
+ fd = hold_lock_file_for_update(&todo_lock, todo_file, LOCK_DIE_ON_ERROR);
+ if (format_todo(&buf, todo_list, opts) < 0)
+ die(_("Could not format %s."), todo_file);
+ if (write_in_full(fd, buf.buf, buf.len) < 0) {
+ strbuf_release(&buf);
+ die_errno(_("Could not write to %s."), todo_file);
+ }
+ if (commit_lock_file(&todo_lock) < 0) {
+ strbuf_release(&buf);
+ die(_("Error wrapping up %s."), todo_file);
}
+ strbuf_release(&buf);
+}
- read_and_refresh_cache(me);
+static void save_opts(struct replay_opts *opts)
+{
+ const char *opts_file = git_path(SEQ_OPTS_FILE);
+
+ if (opts->no_commit)
+ git_config_set_in_file(opts_file, "options.no-commit", "true");
+ if (opts->edit)
+ git_config_set_in_file(opts_file, "options.edit", "true");
+ if (opts->signoff)
+ git_config_set_in_file(opts_file, "options.signoff", "true");
+ if (opts->record_origin)
+ git_config_set_in_file(opts_file, "options.record-origin", "true");
+ if (opts->allow_ff)
+ git_config_set_in_file(opts_file, "options.allow-ff", "true");
+ if (opts->mainline) {
+ struct strbuf buf = STRBUF_INIT;
+ strbuf_addf(&buf, "%d", opts->mainline);
+ git_config_set_in_file(opts_file, "options.mainline", buf.buf);
+ strbuf_release(&buf);
+ }
+ if (opts->strategy)
+ git_config_set_in_file(opts_file, "options.strategy", opts->strategy);
+ if (opts->xopts) {
+ int i;
+ for (i = 0; i < opts->xopts_nr; i++)
+ git_config_set_multivar_in_file(opts_file,
+ "options.strategy-option",
+ opts->xopts[i], "^$", 0);
+ }
+}
- prepare_revs(&revs);
+static int pick_commits(struct commit_list *todo_list, struct replay_opts *opts)
+{
+ struct commit_list *cur;
+ int res;
- while ((commit = get_revision(&revs))) {
- int res = do_pick_commit();
- if (res)
+ setenv(GIT_REFLOG_ACTION, action_name(opts), 0);
+ if (opts->allow_ff)
+ assert(!(opts->signoff || opts->no_commit ||
+ opts->record_origin || opts->edit));
+ read_and_refresh_cache(opts);
+
+ for (cur = todo_list; cur; cur = cur->next) {
+ save_todo(cur, opts);
+ res = do_pick_commit(cur->item, opts);
+ if (res) {
+ if (!cur->next)
+ /*
+ * An error was encountered while
+ * picking the last commit; the
+ * sequencer state is useless now --
+ * the user simply needs to resolve
+ * the conflict and commit
+ */
+ remove_sequencer_state(0);
return res;
+ }
}
+ /*
+ * Sequence of picks finished successfully; cleanup by
+ * removing the .git/sequencer directory
+ */
+ remove_sequencer_state(1);
return 0;
}
+static int pick_revisions(struct replay_opts *opts)
+{
+ struct commit_list *todo_list = NULL;
+ unsigned char sha1[20];
+
+ read_and_refresh_cache(opts);
+
+ /*
+ * Decide what to do depending on the arguments; a fresh
+ * cherry-pick should be handled differently from an existing
+ * one that is being continued
+ */
+ if (opts->subcommand == REPLAY_RESET) {
+ remove_sequencer_state(1);
+ return 0;
+ } else if (opts->subcommand == REPLAY_CONTINUE) {
+ if (!file_exists(git_path(SEQ_TODO_FILE)))
+ goto error;
+ read_populate_opts(&opts);
+ read_populate_todo(&todo_list, opts);
+
+ /* Verify that the conflict has been resolved */
+ if (!index_differs_from("HEAD", 0))
+ todo_list = todo_list->next;
+ } else {
+ /*
+ * Start a new cherry-pick/ revert sequence; but
+ * first, make sure that an existing one isn't in
+ * progress
+ */
+
+ walk_revs_populate_todo(&todo_list, opts);
+ if (create_seq_dir() < 0) {
+ error(_("A cherry-pick or revert is in progress."));
+ advise(_("Use --continue to continue the operation"));
+ advise(_("or --reset to forget about it"));
+ return -1;
+ }
+ if (get_sha1("HEAD", sha1)) {
+ if (opts->action == REVERT)
+ return error(_("Can't revert as initial commit"));
+ return error(_("Can't cherry-pick into empty head"));
+ }
+ save_head(sha1_to_hex(sha1));
+ save_opts(opts);
+ }
+ return pick_commits(todo_list, opts);
+error:
+ return error(_("No %s in progress"), action_name(opts));
+}
+
int cmd_revert(int argc, const char **argv, const char *prefix)
{
+ struct replay_opts opts;
+ int res;
+
+ memset(&opts, 0, sizeof(opts));
if (isatty(0))
- edit = 1;
- action = REVERT;
- return revert_or_cherry_pick(argc, argv);
+ opts.edit = 1;
+ opts.action = REVERT;
+ git_config(git_default_config, NULL);
+ parse_args(argc, argv, &opts);
+ res = pick_revisions(&opts);
+ if (res < 0)
+ die(_("revert failed"));
+ return res;
}
int cmd_cherry_pick(int argc, const char **argv, const char *prefix)
{
- action = CHERRY_PICK;
- return revert_or_cherry_pick(argc, argv);
+ struct replay_opts opts;
+ int res;
+
+ memset(&opts, 0, sizeof(opts));
+ opts.action = CHERRY_PICK;
+ git_config(git_default_config, NULL);
+ parse_args(argc, argv, &opts);
+ res = pick_revisions(&opts);
+ if (res < 0)
+ die(_("cherry-pick failed"));
+ return res;
}
if (strncmp(ref, match, matchlen))
continue;
}
- if (check_ref_format(ref)) {
+ if (check_refname_format(ref, 0)) {
warning("ref '%s' ignored", ref);
continue;
}
static int strbuf_check_tag_ref(struct strbuf *sb, const char *name)
{
if (name[0] == '-')
- return CHECK_REF_FORMAT_ERROR;
+ return -1;
strbuf_reset(sb);
strbuf_addf(sb, "refs/tags/%s", name);
- return check_ref_format(sb->buf);
+ return check_refname_format(sb->buf, 0);
}
int cmd_tag(int argc, const char **argv, const char *prefix)
struct msg_arg msg = { 0, STRBUF_INIT };
struct commit_list *with_commit = NULL;
struct option options[] = {
- OPT_BOOLEAN('l', NULL, &list, "list tag names"),
+ OPT_BOOLEAN('l', "list", &list, "list tag names"),
{ OPTION_INTEGER, 'n', NULL, &lines, "n",
"print <n> lines of each tag message",
PARSE_OPT_OPTARG, NULL, 1 },
- OPT_BOOLEAN('d', NULL, &delete, "delete tags"),
- OPT_BOOLEAN('v', NULL, &verify, "verify tags"),
+ OPT_BOOLEAN('d', "delete", &delete, "delete tags"),
+ OPT_BOOLEAN('v', "verify", &verify, "verify tags"),
OPT_GROUP("Tag creation options"),
- OPT_BOOLEAN('a', NULL, &annotate,
+ OPT_BOOLEAN('a', "annotate", &annotate,
"annotated tag, needs a message"),
- OPT_CALLBACK('m', NULL, &msg, "message",
+ OPT_CALLBACK('m', "message", &msg, "message",
"tag message", parse_msg_arg),
- OPT_FILENAME('F', NULL, &msgfile, "read message from file"),
- OPT_BOOLEAN('s', NULL, &sign, "annotated and GPG-signed tag"),
- OPT_STRING('u', NULL, &keyid, "key-id",
+ OPT_FILENAME('F', "file", &msgfile, "read message from file"),
+ OPT_BOOLEAN('s', "sign", &sign, "annotated and GPG-signed tag"),
+ OPT_STRING('u', "local-user", &keyid, "key-id",
"use another key to sign the tag"),
OPT__FORCE(&force, "replace the tag if exists"),
req_nr = revs.pending.nr;
setup_revisions(2, argv, &revs, NULL);
- memset(&refs, 0, sizeof(struct object_array));
- for (i = 0; i < revs.pending.nr; i++) {
- struct object_array_entry *e = revs.pending.objects + i;
- add_object_array(e->item, e->name, &refs);
- }
+ refs = revs.pending;
+ revs.leak_pending = 1;
if (prepare_revision_walk(&revs))
die("revision walk setup failed");
refs.objects[i].name);
}
- for (i = 0; i < refs.nr; i++)
- clear_commit_marks((struct commit *)refs.objects[i].item, -1);
+ clear_commit_marks_for_object_array(&refs, ALL_REV_FLAGS);
+ free(refs.objects);
if (verbose) {
struct ref_list *r;
return 0;
}
-int unbundle(struct bundle_header *header, int bundle_fd)
+int unbundle(struct bundle_header *header, int bundle_fd, int flags)
{
const char *argv_index_pack[] = {"index-pack",
- "--fix-thin", "--stdin", NULL};
+ "--fix-thin", "--stdin", NULL, NULL};
struct child_process ip;
+ if (flags & BUNDLE_VERBOSE)
+ argv_index_pack[3] = "-v";
+
if (verify_bundle(header, 0))
return -1;
memset(&ip, 0, sizeof(ip));
int create_bundle(struct bundle_header *header, const char *path,
int argc, const char **argv);
int verify_bundle(struct bundle_header *header, int verbose);
-int unbundle(struct bundle_header *header, int bundle_fd);
+#define BUNDLE_VERBOSE 1
+int unbundle(struct bundle_header *header, int bundle_fd, int flags);
int list_bundle_refs(struct bundle_header *header,
int argc, const char **argv);
unsigned int ce_flags;
unsigned char sha1[20];
struct cache_entry *next;
+ struct cache_entry *dir_next;
char name[FLEX_ARRAY]; /* more */
};
extern const char *strip_namespace(const char *namespaced_ref);
extern const char *get_git_work_tree(void);
extern const char *read_gitfile(const char *path);
+extern const char *resolve_gitdir(const char *suspect);
extern void set_git_work_tree(const char *tree);
#define ALTERNATE_DB_ENVIRONMENT "GIT_ALTERNATE_OBJECT_DIRECTORIES"
extern const char **get_pathspec(const char *prefix, const char **pathspec);
-extern const char *pathspec_prefix(const char *prefix, const char **pathspec);
extern void setup_work_tree(void);
extern const char *setup_git_directory_gently(int *);
extern const char *setup_git_directory(void);
extern int shared_repository;
extern const char *apply_default_whitespace;
extern const char *apply_default_ignorewhitespace;
+extern const char *git_attributes_file;
extern int zlib_compression_level;
extern int core_compression_level;
extern int core_compression_seen;
{
return get_sha1_with_context_1(str, sha1, orc, 0, NULL);
}
+
+/*
+ * Try to read a SHA1 in hexadecimal format from the 40 characters
+ * starting at hex. Write the 20-byte result to sha1 in binary form.
+ * Return 0 on success. Reading stops if a NUL is encountered in the
+ * input, so it is safe to pass this function an arbitrary
+ * null-terminated string.
+ */
extern int get_sha1_hex(const char *hex, unsigned char *sha1);
+
extern char *sha1_to_hex(const unsigned char *sha1); /* static buffer result! */
extern int read_ref(const char *filename, unsigned char *sha1);
-extern const char *resolve_ref(const char *path, unsigned char *sha1, int, int *);
+
+/*
+ * Resolve a reference, recursively following symbolic refererences.
+ *
+ * Store the referred-to object's name in sha1 and return the name of
+ * the non-symbolic reference that ultimately pointed at it. The
+ * return value, if not NULL, is a pointer into either a static buffer
+ * or the input ref.
+ *
+ * If the reference cannot be resolved to an object, the behavior
+ * depends on the "reading" argument:
+ *
+ * - If reading is set, return NULL.
+ *
+ * - If reading is not set, clear sha1 and return the name of the last
+ * reference name in the chain, which will either be a non-symbolic
+ * reference or an undefined reference. If this is a prelude to
+ * "writing" to the ref, the return value is the name of the ref
+ * that will actually be created or changed.
+ *
+ * If flag is non-NULL, set the value that it points to the
+ * combination of REF_ISPACKED (if the reference was found among the
+ * packed references) and REF_ISSYMREF (if the initial reference was a
+ * symbolic reference).
+ *
+ * If ref is not a properly-formatted, normalized reference, return
+ * NULL. If more than MAXDEPTH recursive symbolic lookups are needed,
+ * give up and return NULL.
+ *
+ * errno is sometimes set on errors, but not always.
+ */
+extern const char *resolve_ref(const char *ref, unsigned char *sha1, int reading, int *flag);
+
extern int dwim_ref(const char *str, int len, unsigned char *sha1, char **ref);
extern int dwim_log(const char *str, int len, unsigned char *sha1, char **ref);
extern int interpret_branch_name(const char *str, struct strbuf *);
extern int git_config_maybe_bool(const char *, const char *);
extern int git_config_string(const char **, const char *, const char *);
extern int git_config_pathname(const char **, const char *, const char *);
+extern int git_config_set_in_file(const char *, const char *, const char *);
extern int git_config_set(const char *, const char *);
extern int git_config_parse_key(const char *, char **, int *);
extern int git_config_set_multivar(const char *, const char *, const char *, int);
+extern int git_config_set_multivar_in_file(const char *, const char *, const char *, const char *, int);
extern int git_config_rename_section(const char *, const char *);
extern const char *git_etc_gitconfig(void);
extern int check_repository_format_version(const char *var, const char *value, void *cb);
return lookup_commit_reference_gently(sha1, 0);
}
+struct commit *lookup_commit_or_die(const unsigned char *sha1, const char *ref_name)
+{
+ struct commit *c = lookup_commit_reference(sha1);
+ if (!c)
+ die(_("could not parse %s"), ref_name);
+ if (hashcmp(sha1, c->object.sha1)) {
+ warning(_("%s %s is not a commit!"),
+ ref_name, sha1_to_hex(sha1));
+ }
+ return c;
+}
+
struct commit *lookup_commit(const unsigned char *sha1)
{
struct object *obj = lookup_object(sha1);
}
}
+void clear_commit_marks_for_object_array(struct object_array *a, unsigned mark)
+{
+ struct object *object;
+ struct commit *commit;
+ unsigned int i;
+
+ for (i = 0; i < a->nr; i++) {
+ object = a->objects[i].item;
+ commit = lookup_commit_reference_gently(object->sha1, 1);
+ if (commit)
+ clear_commit_marks(commit, mark);
+ }
+}
+
struct commit *pop_commit(struct commit_list **stack)
{
struct commit_list *top = *stack;
int quiet);
struct commit *lookup_commit_reference_by_name(const char *name);
+/*
+ * Look up object named by "sha1", dereference tag as necessary,
+ * get a commit and return it. If "sha1" does not dereference to
+ * a commit, use ref_name to report an error and die.
+ */
+struct commit *lookup_commit_or_die(const unsigned char *sha1, const char *ref_name);
+
int parse_commit_buffer(struct commit *item, const void *buffer, unsigned long size);
int parse_commit(struct commit *item);
struct commit *pop_commit(struct commit_list **stack);
void clear_commit_marks(struct commit *commit, unsigned int mark);
+void clear_commit_marks_for_object_array(struct object_array *a, unsigned mark);
/*
* Performs an in-place topological sort of list supplied.
}
ai->ai_addrlen = sizeof(struct sockaddr_in);
if (hints && (hints->ai_flags & AI_CANONNAME))
- ai->ai_canonname = h ? strdup(h->h_name) : NULL;
+ ai->ai_canonname = h ? xstrdup(h->h_name) : NULL;
else
ai->ai_canonname = NULL;
alignment - 1);
h->chunk_limit = chunk->limit
= (char *) chunk + h->chunk_size;
- chunk->prev = 0;
+ chunk->prev = NULL;
/* The initial chunk now contains no empty object. */
h->maybe_empty_object = 0;
h->alloc_failed = 0;
alignment - 1);
h->chunk_limit = chunk->limit
= (char *) chunk + h->chunk_size;
- chunk->prev = 0;
+ chunk->prev = NULL;
/* The initial chunk now contains no empty object. */
h->maybe_empty_object = 0;
h->alloc_failed = 0;
/* We use >= rather than > since the object cannot be exactly at
the beginning of the chunk but might be an empty object exactly
at the end of an adjacent chunk. */
- while (lp != 0 && ((void *) lp >= obj || (void *) (lp)->limit < obj))
+ while (lp != NULL && ((void *) lp >= obj || (void *) (lp)->limit < obj))
{
plp = lp->prev;
lp = plp;
}
- return lp != 0;
+ return lp != NULL;
}
\f
/* Free objects in obstack H, including OBJ and everything allocate
/* We use >= because there cannot be an object at the beginning of a chunk.
But there can be an empty object at that address
at the end of another chunk. */
- while (lp != 0 && ((void *) lp >= obj || (void *) (lp)->limit < obj))
+ while (lp != NULL && ((void *) lp >= obj || (void *) (lp)->limit < obj))
{
plp = lp->prev;
CALL_FREEFUN (h, lp);
h->chunk_limit = lp->limit;
h->chunk = lp;
}
- else if (obj != 0)
+ else if (obj != NULL)
/* obj is not in any of the chunks! */
abort ();
}
register struct _obstack_chunk* lp;
register int nbytes = 0;
- for (lp = h->chunk; lp != 0; lp = lp->prev)
+ for (lp = h->chunk; lp != NULL; lp = lp->prev)
{
nbytes += lp->limit - (char *) lp;
}
# endif
static void
-__attribute__ ((noreturn))
print_and_abort (void)
{
/* Don't change any of these strings. Yes, it would be possible to add
msort_with_tmp(b, n, s, cmp, buf);
} else {
/* It's somewhat large, so malloc it. */
- char *tmp = malloc(size);
+ char *tmp = xmalloc(size);
msort_with_tmp(b, n, s, cmp, tmp);
free(tmp);
}
#include "../../git-compat-util.h"
-#include "../../strbuf.h"
static HANDLE ms_eventlog;
void syslog(int priority, const char *fmt, ...)
{
- struct strbuf sb = STRBUF_INIT;
- struct strbuf_expand_dict_entry dict[] = {
- {"1", "% 1"},
- {NULL, NULL}
- };
WORD logtype;
- char *str;
+ char *str, *pos;
int str_len;
va_list ap;
}
str = malloc(str_len + 1);
+ if (!str) {
+ warning("malloc failed: '%s'", strerror(errno));
+ return;
+ }
+
va_start(ap, fmt);
vsnprintf(str, str_len + 1, fmt, ap);
va_end(ap);
- strbuf_expand(&sb, str, strbuf_expand_dict_cb, &dict);
- free(str);
+
+ while ((pos = strstr(str, "%1")) != NULL) {
+ str = realloc(str, ++str_len + 1);
+ if (!str) {
+ warning("realloc failed: '%s'", strerror(errno));
+ return;
+ }
+ memmove(pos + 2, pos + 1, strlen(pos));
+ pos[1] = ' ';
+ }
switch (priority) {
case LOG_EMERG:
}
ReportEventA(ms_eventlog, logtype, 0, 0, NULL, 1, 0,
- (const char **)&sb.buf, NULL);
-
- strbuf_release(&sb);
+ (const char **)&str, NULL);
+ free(str);
}
return 0;
}
+ if (!strcmp(var, "core.attributesfile"))
+ return git_config_pathname(&git_attributes_file, var, value);
+
if (!strcmp(var, "core.bare")) {
is_bare_repository_cfg = git_config_bool(var, value);
return 0;
return offset;
}
+int git_config_set_in_file(const char *config_filename,
+ const char *key, const char *value)
+{
+ return git_config_set_multivar_in_file(config_filename, key, value, NULL, 0);
+}
+
int git_config_set(const char *key, const char *value)
{
return git_config_set_multivar(key, value, NULL, 0);
* - the config file is removed and the lock file rename()d to it.
*
*/
-int git_config_set_multivar(const char *key, const char *value,
- const char *value_regex, int multi_replace)
+int git_config_set_multivar_in_file(const char *config_filename,
+ const char *key, const char *value,
+ const char *value_regex, int multi_replace)
{
int fd = -1, in_fd;
int ret;
- char *config_filename;
struct lock_file *lock = NULL;
- if (config_exclusive_filename)
- config_filename = xstrdup(config_exclusive_filename);
- else
- config_filename = git_pathdup("config");
-
/* parse-key returns negative; flip the sign to feed exit(3) */
ret = 0 - git_config_parse_key(key, &store.key, &store.baselen);
if (ret)
out_free:
if (lock)
rollback_lock_file(lock);
- free(config_filename);
return ret;
write_err_out:
}
+int git_config_set_multivar(const char *key, const char *value,
+ const char *value_regex, int multi_replace)
+{
+ const char *config_filename;
+ char *buf = NULL;
+ int ret;
+
+ if (config_exclusive_filename)
+ config_filename = config_exclusive_filename;
+ else
+ config_filename = buf = git_pathdup("config");
+
+ ret = git_config_set_multivar_in_file(config_filename, key, value,
+ value_regex, multi_replace);
+ free(buf);
+ return ret;
+}
+
static int section_name_match (const char *buf, const char *name)
{
int i = 0, j = 0, dot = 0;
len -= 5;
/* REF_NORMAL means that we don't want the magic fake tag refs */
- if ((flags & REF_NORMAL) && check_ref_format(name) < 0)
+ if ((flags & REF_NORMAL) && check_refname_format(name, 0))
return 0;
/* REF_HEADS means that we want regular branch heads */
--- /dev/null
+#include "cache.h"
+#include "run-command.h"
+#include "sigchain.h"
+#include "connected.h"
+
+/*
+ * If we feed all the commits we want to verify to this command
+ *
+ * $ git rev-list --verify-objects --stdin --not --all
+ *
+ * and if it does not error out, that means everything reachable from
+ * these commits locally exists and is connected to some of our
+ * existing refs.
+ *
+ * Returns 0 if everything is connected, non-zero otherwise.
+ */
+int check_everything_connected(sha1_iterate_fn fn, int quiet, void *cb_data)
+{
+ struct child_process rev_list;
+ const char *argv[] = {"rev-list", "--verify-objects",
+ "--stdin", "--not", "--all", NULL, NULL};
+ char commit[41];
+ unsigned char sha1[20];
+ int err = 0;
+
+ if (fn(cb_data, sha1))
+ return err;
+
+ if (quiet)
+ argv[5] = "--quiet";
+
+ memset(&rev_list, 0, sizeof(rev_list));
+ rev_list.argv = argv;
+ rev_list.git_cmd = 1;
+ rev_list.in = -1;
+ rev_list.no_stdout = 1;
+ rev_list.no_stderr = quiet;
+ if (start_command(&rev_list))
+ return error(_("Could not run 'git rev-list'"));
+
+ sigchain_push(SIGPIPE, SIG_IGN);
+
+ commit[40] = '\n';
+ do {
+ memcpy(commit, sha1_to_hex(sha1), 40);
+ if (write_in_full(rev_list.in, commit, 41) < 0) {
+ if (errno != EPIPE && errno != EINVAL)
+ error(_("failed write to rev-list: %s"),
+ strerror(errno));
+ err = -1;
+ break;
+ }
+ } while (!fn(cb_data, sha1));
+
+ if (close(rev_list.in)) {
+ error(_("failed to close rev-list's stdin: %s"), strerror(errno));
+ err = -1;
+ }
+
+ sigchain_pop(SIGPIPE);
+ return finish_command(&rev_list) || err;
+}
--- /dev/null
+#ifndef CONNECTED_H
+#define CONNECTED_H
+
+/*
+ * Take callback data, and return next object name in the buffer.
+ * When called after returning the name for the last object, return -1
+ * to signal EOF, otherwise return 0.
+ */
+typedef int (*sha1_iterate_fn)(void *, unsigned char [20]);
+
+/*
+ * Make sure that our object store has all the commits necessary to
+ * connect the ancestry chain to some of our existing refs, and all
+ * the trees and blobs that these commits use.
+ *
+ * Return 0 if Ok, non zero otherwise (i.e. some missing objects)
+ */
+extern int check_everything_connected(sha1_iterate_fn, int quiet, void *cb_data);
+
+#endif /* CONNECTED_H */
" "" "${cur##--cleanup=}"
return
;;
- --reuse-message=*)
- __gitcomp "$(__git_refs)" "" "${cur##--reuse-message=}"
- return
- ;;
- --reedit-message=*)
- __gitcomp "$(__git_refs)" "" "${cur##--reedit-message=}"
+ --reuse-message=*|--reedit-message=*|\
+ --fixup=*|--squash=*)
+ __gitcomp "$(__git_refs)" "" "${cur#*=}"
return
;;
--untracked-files=*)
--dry-run --reuse-message= --reedit-message=
--reset-author --file= --message= --template=
--cleanup= --untracked-files --untracked-files=
- --verbose --quiet
+ --verbose --quiet --fixup= --squash=
"
return
esac
merge="--merge"
fi
case "$cur" in
- --pretty=*)
- __gitcomp "$__git_log_pretty_formats $(__git_pretty_aliases)
- " "" "${cur##--pretty=}"
- return
- ;;
- --format=*)
+ --pretty=*|--format=*)
__gitcomp "$__git_log_pretty_formats $(__git_pretty_aliases)
- " "" "${cur##--format=}"
+ " "" "${cur#*=}"
return
;;
--date=*)
;;
esac
;;
- add,--reuse-message=*|append,--reuse-message=*)
- __gitcomp "$(__git_refs)" "" "${cur##--reuse-message=}"
- ;;
+ add,--reuse-message=*|append,--reuse-message=*|\
add,--reedit-message=*|append,--reedit-message=*)
- __gitcomp "$(__git_refs)" "" "${cur##--reedit-message=}"
+ __gitcomp "$(__git_refs)" "" "${cur#*=}"
;;
add,--*|append,--*)
__gitcomp '--file= --message= --reedit-message=
--*)
__gitcomp "
--all --mirror --tags --dry-run --force --verbose
- --receive-pack= --repo=
+ --receive-pack= --repo= --set-upstream
"
return
;;
__git_has_doubledash && return
case "$cur" in
- --pretty=*)
- __gitcomp "$__git_log_pretty_formats $(__git_pretty_aliases)
- " "" "${cur##--pretty=}"
- return
- ;;
- --format=*)
+ --pretty=*|--format=*)
__gitcomp "$__git_log_pretty_formats $(__git_pretty_aliases)
- " "" "${cur##--format=}"
+ " "" "${cur#*=}"
return
;;
--*)
--- /dev/null
+#! /usr/bin/perl
+
+# Copyright (C) 2011
+# Jérémie Nikaes <jeremie.nikaes@ensimag.imag.fr>
+# Arnaud Lacurie <arnaud.lacurie@ensimag.imag.fr>
+# Claire Fousse <claire.fousse@ensimag.imag.fr>
+# David Amouyal <david.amouyal@ensimag.imag.fr>
+# Matthieu Moy <matthieu.moy@grenoble-inp.fr>
+# License: GPL v2 or later
+
+# Gateway between Git and MediaWiki.
+# https://github.com/Bibzball/Git-Mediawiki/wiki
+#
+# Known limitations:
+#
+# - Only wiki pages are managed, no support for [[File:...]]
+# attachments.
+#
+# - Poor performance in the best case: it takes forever to check
+# whether we're up-to-date (on fetch or push) or to fetch a few
+# revisions from a large wiki, because we use exclusively a
+# page-based synchronization. We could switch to a wiki-wide
+# synchronization when the synchronization involves few revisions
+# but the wiki is large.
+#
+# - Git renames could be turned into MediaWiki renames (see TODO
+# below)
+#
+# - login/password support requires the user to write the password
+# cleartext in a file (see TODO below).
+#
+# - No way to import "one page, and all pages included in it"
+#
+# - Multiple remote MediaWikis have not been very well tested.
+
+use strict;
+use MediaWiki::API;
+use DateTime::Format::ISO8601;
+use encoding 'utf8';
+
+# use encoding 'utf8' doesn't change STDERROR
+# but we're going to output UTF-8 filenames to STDERR
+binmode STDERR, ":utf8";
+
+use URI::Escape;
+use warnings;
+
+# Mediawiki filenames can contain forward slashes. This variable decides by which pattern they should be replaced
+use constant SLASH_REPLACEMENT => "%2F";
+
+# It's not always possible to delete pages (may require some
+# priviledges). Deleted pages are replaced with this content.
+use constant DELETED_CONTENT => "[[Category:Deleted]]\n";
+
+# It's not possible to create empty pages. New empty files in Git are
+# sent with this content instead.
+use constant EMPTY_CONTENT => "<!-- empty page -->\n";
+
+# used to reflect file creation or deletion in diff.
+use constant NULL_SHA1 => "0000000000000000000000000000000000000000";
+
+my $remotename = $ARGV[0];
+my $url = $ARGV[1];
+
+# Accept both space-separated and multiple keys in config file.
+# Spaces should be written as _ anyway because we'll use chomp.
+my @tracked_pages = split(/[ \n]/, run_git("config --get-all remote.". $remotename .".pages"));
+chomp(@tracked_pages);
+
+# Just like @tracked_pages, but for MediaWiki categories.
+my @tracked_categories = split(/[ \n]/, run_git("config --get-all remote.". $remotename .".categories"));
+chomp(@tracked_categories);
+
+my $wiki_login = run_git("config --get remote.". $remotename .".mwLogin");
+# TODO: ideally, this should be able to read from keyboard, but we're
+# inside a remote helper, so our stdin is connect to git, not to a
+# terminal.
+my $wiki_passwd = run_git("config --get remote.". $remotename .".mwPassword");
+my $wiki_domain = run_git("config --get remote.". $remotename .".mwDomain");
+chomp($wiki_login);
+chomp($wiki_passwd);
+chomp($wiki_domain);
+
+# Import only last revisions (both for clone and fetch)
+my $shallow_import = run_git("config --get --bool remote.". $remotename .".shallow");
+chomp($shallow_import);
+$shallow_import = ($shallow_import eq "true");
+
+# Dumb push: don't update notes and mediawiki ref to reflect the last push.
+#
+# Configurable with mediawiki.dumbPush, or per-remote with
+# remote.<remotename>.dumbPush.
+#
+# This means the user will have to re-import the just-pushed
+# revisions. On the other hand, this means that the Git revisions
+# corresponding to MediaWiki revisions are all imported from the wiki,
+# regardless of whether they were initially created in Git or from the
+# web interface, hence all users will get the same history (i.e. if
+# the push from Git to MediaWiki loses some information, everybody
+# will get the history with information lost). If the import is
+# deterministic, this means everybody gets the same sha1 for each
+# MediaWiki revision.
+my $dumb_push = run_git("config --get --bool remote.$remotename.dumbPush");
+unless ($dumb_push) {
+ $dumb_push = run_git("config --get --bool mediawiki.dumbPush");
+}
+chomp($dumb_push);
+$dumb_push = ($dumb_push eq "true");
+
+my $wiki_name = $url;
+$wiki_name =~ s/[^\/]*:\/\///;
+
+# Commands parser
+my $entry;
+my @cmd;
+while (<STDIN>) {
+ chomp;
+ @cmd = split(/ /);
+ if (defined($cmd[0])) {
+ # Line not blank
+ if ($cmd[0] eq "capabilities") {
+ die("Too many arguments for capabilities") unless (!defined($cmd[1]));
+ mw_capabilities();
+ } elsif ($cmd[0] eq "list") {
+ die("Too many arguments for list") unless (!defined($cmd[2]));
+ mw_list($cmd[1]);
+ } elsif ($cmd[0] eq "import") {
+ die("Invalid arguments for import") unless ($cmd[1] ne "" && !defined($cmd[2]));
+ mw_import($cmd[1]);
+ } elsif ($cmd[0] eq "option") {
+ die("Too many arguments for option") unless ($cmd[1] ne "" && $cmd[2] ne "" && !defined($cmd[3]));
+ mw_option($cmd[1],$cmd[2]);
+ } elsif ($cmd[0] eq "push") {
+ mw_push($cmd[1]);
+ } else {
+ print STDERR "Unknown command. Aborting...\n";
+ last;
+ }
+ } else {
+ # blank line: we should terminate
+ last;
+ }
+
+ BEGIN { $| = 1 } # flush STDOUT, to make sure the previous
+ # command is fully processed.
+}
+
+########################## Functions ##############################
+
+# MediaWiki API instance, created lazily.
+my $mediawiki;
+
+sub mw_connect_maybe {
+ if ($mediawiki) {
+ return;
+ }
+ $mediawiki = MediaWiki::API->new;
+ $mediawiki->{config}->{api_url} = "$url/api.php";
+ if ($wiki_login) {
+ if (!$mediawiki->login({
+ lgname => $wiki_login,
+ lgpassword => $wiki_passwd,
+ lgdomain => $wiki_domain,
+ })) {
+ print STDERR "Failed to log in mediawiki user \"$wiki_login\" on $url\n";
+ print STDERR "(error " .
+ $mediawiki->{error}->{code} . ': ' .
+ $mediawiki->{error}->{details} . ")\n";
+ exit 1;
+ } else {
+ print STDERR "Logged in with user \"$wiki_login\".\n";
+ }
+ }
+}
+
+sub get_mw_first_pages {
+ my $some_pages = shift;
+ my @some_pages = @{$some_pages};
+
+ my $pages = shift;
+
+ # pattern 'page1|page2|...' required by the API
+ my $titles = join('|', @some_pages);
+
+ my $mw_pages = $mediawiki->api({
+ action => 'query',
+ titles => $titles,
+ });
+ if (!defined($mw_pages)) {
+ print STDERR "fatal: could not query the list of wiki pages.\n";
+ print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
+ print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
+ exit 1;
+ }
+ while (my ($id, $page) = each(%{$mw_pages->{query}->{pages}})) {
+ if ($id < 0) {
+ print STDERR "Warning: page $page->{title} not found on wiki\n";
+ } else {
+ $pages->{$page->{title}} = $page;
+ }
+ }
+}
+
+sub get_mw_pages {
+ mw_connect_maybe();
+
+ my %pages; # hash on page titles to avoid duplicates
+ my $user_defined;
+ if (@tracked_pages) {
+ $user_defined = 1;
+ # The user provided a list of pages titles, but we
+ # still need to query the API to get the page IDs.
+
+ my @some_pages = @tracked_pages;
+ while (@some_pages) {
+ my $last = 50;
+ if ($#some_pages < $last) {
+ $last = $#some_pages;
+ }
+ my @slice = @some_pages[0..$last];
+ get_mw_first_pages(\@slice, \%pages);
+ @some_pages = @some_pages[51..$#some_pages];
+ }
+ }
+ if (@tracked_categories) {
+ $user_defined = 1;
+ foreach my $category (@tracked_categories) {
+ if (index($category, ':') < 0) {
+ # Mediawiki requires the Category
+ # prefix, but let's not force the user
+ # to specify it.
+ $category = "Category:" . $category;
+ }
+ my $mw_pages = $mediawiki->list( {
+ action => 'query',
+ list => 'categorymembers',
+ cmtitle => $category,
+ cmlimit => 'max' } )
+ || die $mediawiki->{error}->{code} . ': ' . $mediawiki->{error}->{details};
+ foreach my $page (@{$mw_pages}) {
+ $pages{$page->{title}} = $page;
+ }
+ }
+ }
+ if (!$user_defined) {
+ # No user-provided list, get the list of pages from
+ # the API.
+ my $mw_pages = $mediawiki->list({
+ action => 'query',
+ list => 'allpages',
+ aplimit => 500,
+ });
+ if (!defined($mw_pages)) {
+ print STDERR "fatal: could not get the list of wiki pages.\n";
+ print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
+ print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
+ exit 1;
+ }
+ foreach my $page (@{$mw_pages}) {
+ $pages{$page->{title}} = $page;
+ }
+ }
+ return values(%pages);
+}
+
+sub run_git {
+ open(my $git, "-|:encoding(UTF-8)", "git " . $_[0]);
+ my $res = do { local $/; <$git> };
+ close($git);
+
+ return $res;
+}
+
+
+sub get_last_local_revision {
+ # Get note regarding last mediawiki revision
+ my $note = run_git("notes --ref=$remotename/mediawiki show refs/mediawiki/$remotename/master 2>/dev/null");
+ my @note_info = split(/ /, $note);
+
+ my $lastrevision_number;
+ if (!(defined($note_info[0]) && $note_info[0] eq "mediawiki_revision:")) {
+ print STDERR "No previous mediawiki revision found";
+ $lastrevision_number = 0;
+ } else {
+ # Notes are formatted : mediawiki_revision: #number
+ $lastrevision_number = $note_info[1];
+ chomp($lastrevision_number);
+ print STDERR "Last local mediawiki revision found is $lastrevision_number";
+ }
+ return $lastrevision_number;
+}
+
+# Remember the timestamp corresponding to a revision id.
+my %basetimestamps;
+
+sub get_last_remote_revision {
+ mw_connect_maybe();
+
+ my @pages = get_mw_pages();
+
+ my $max_rev_num = 0;
+
+ foreach my $page (@pages) {
+ my $id = $page->{pageid};
+
+ my $query = {
+ action => 'query',
+ prop => 'revisions',
+ rvprop => 'ids|timestamp',
+ pageids => $id,
+ };
+
+ my $result = $mediawiki->api($query);
+
+ my $lastrev = pop(@{$result->{query}->{pages}->{$id}->{revisions}});
+
+ $basetimestamps{$lastrev->{revid}} = $lastrev->{timestamp};
+
+ $max_rev_num = ($lastrev->{revid} > $max_rev_num ? $lastrev->{revid} : $max_rev_num);
+ }
+
+ print STDERR "Last remote revision found is $max_rev_num.\n";
+ return $max_rev_num;
+}
+
+# Clean content before sending it to MediaWiki
+sub mediawiki_clean {
+ my $string = shift;
+ my $page_created = shift;
+ # Mediawiki does not allow blank space at the end of a page and ends with a single \n.
+ # This function right trims a string and adds a \n at the end to follow this rule
+ $string =~ s/\s+$//;
+ if ($string eq "" && $page_created) {
+ # Creating empty pages is forbidden.
+ $string = EMPTY_CONTENT;
+ }
+ return $string."\n";
+}
+
+# Filter applied on MediaWiki data before adding them to Git
+sub mediawiki_smudge {
+ my $string = shift;
+ if ($string eq EMPTY_CONTENT) {
+ $string = "";
+ }
+ # This \n is important. This is due to mediawiki's way to handle end of files.
+ return $string."\n";
+}
+
+sub mediawiki_clean_filename {
+ my $filename = shift;
+ $filename =~ s/@{[SLASH_REPLACEMENT]}/\//g;
+ # [, ], |, {, and } are forbidden by MediaWiki, even URL-encoded.
+ # Do a variant of URL-encoding, i.e. looks like URL-encoding,
+ # but with _ added to prevent MediaWiki from thinking this is
+ # an actual special character.
+ $filename =~ s/[\[\]\{\}\|]/sprintf("_%%_%x", ord($&))/ge;
+ # If we use the uri escape before
+ # we should unescape here, before anything
+
+ return $filename;
+}
+
+sub mediawiki_smudge_filename {
+ my $filename = shift;
+ $filename =~ s/\//@{[SLASH_REPLACEMENT]}/g;
+ $filename =~ s/ /_/g;
+ # Decode forbidden characters encoded in mediawiki_clean_filename
+ $filename =~ s/_%_([0-9a-fA-F][0-9a-fA-F])/sprintf("%c", hex($1))/ge;
+ return $filename;
+}
+
+sub literal_data {
+ my ($content) = @_;
+ print STDOUT "data ", bytes::length($content), "\n", $content;
+}
+
+sub mw_capabilities {
+ # Revisions are imported to the private namespace
+ # refs/mediawiki/$remotename/ by the helper and fetched into
+ # refs/remotes/$remotename later by fetch.
+ print STDOUT "refspec refs/heads/*:refs/mediawiki/$remotename/*\n";
+ print STDOUT "import\n";
+ print STDOUT "list\n";
+ print STDOUT "push\n";
+ print STDOUT "\n";
+}
+
+sub mw_list {
+ # MediaWiki do not have branches, we consider one branch arbitrarily
+ # called master, and HEAD pointing to it.
+ print STDOUT "? refs/heads/master\n";
+ print STDOUT "\@refs/heads/master HEAD\n";
+ print STDOUT "\n";
+}
+
+sub mw_option {
+ print STDERR "remote-helper command 'option $_[0]' not yet implemented\n";
+ print STDOUT "unsupported\n";
+}
+
+sub fetch_mw_revisions_for_page {
+ my $page = shift;
+ my $id = shift;
+ my $fetch_from = shift;
+ my @page_revs = ();
+ my $query = {
+ action => 'query',
+ prop => 'revisions',
+ rvprop => 'ids',
+ rvdir => 'newer',
+ rvstartid => $fetch_from,
+ rvlimit => 500,
+ pageids => $id,
+ };
+
+ my $revnum = 0;
+ # Get 500 revisions at a time due to the mediawiki api limit
+ while (1) {
+ my $result = $mediawiki->api($query);
+
+ # Parse each of those 500 revisions
+ foreach my $revision (@{$result->{query}->{pages}->{$id}->{revisions}}) {
+ my $page_rev_ids;
+ $page_rev_ids->{pageid} = $page->{pageid};
+ $page_rev_ids->{revid} = $revision->{revid};
+ push(@page_revs, $page_rev_ids);
+ $revnum++;
+ }
+ last unless $result->{'query-continue'};
+ $query->{rvstartid} = $result->{'query-continue'}->{revisions}->{rvstartid};
+ }
+ if ($shallow_import && @page_revs) {
+ print STDERR " Found 1 revision (shallow import).\n";
+ @page_revs = sort {$b->{revid} <=> $a->{revid}} (@page_revs);
+ return $page_revs[0];
+ }
+ print STDERR " Found ", $revnum, " revision(s).\n";
+ return @page_revs;
+}
+
+sub fetch_mw_revisions {
+ my $pages = shift; my @pages = @{$pages};
+ my $fetch_from = shift;
+
+ my @revisions = ();
+ my $n = 1;
+ foreach my $page (@pages) {
+ my $id = $page->{pageid};
+
+ print STDERR "page $n/", scalar(@pages), ": ". $page->{title} ."\n";
+ $n++;
+ my @page_revs = fetch_mw_revisions_for_page($page, $id, $fetch_from);
+ @revisions = (@page_revs, @revisions);
+ }
+
+ return ($n, @revisions);
+}
+
+sub import_file_revision {
+ my $commit = shift;
+ my %commit = %{$commit};
+ my $full_import = shift;
+ my $n = shift;
+
+ my $title = $commit{title};
+ my $comment = $commit{comment};
+ my $content = $commit{content};
+ my $author = $commit{author};
+ my $date = $commit{date};
+
+ print STDOUT "commit refs/mediawiki/$remotename/master\n";
+ print STDOUT "mark :$n\n";
+ print STDOUT "committer $author <$author\@$wiki_name> ", $date->epoch, " +0000\n";
+ literal_data($comment);
+
+ # If it's not a clone, we need to know where to start from
+ if (!$full_import && $n == 1) {
+ print STDOUT "from refs/mediawiki/$remotename/master^0\n";
+ }
+ if ($content ne DELETED_CONTENT) {
+ print STDOUT "M 644 inline $title.mw\n";
+ literal_data($content);
+ print STDOUT "\n\n";
+ } else {
+ print STDOUT "D $title.mw\n";
+ }
+
+ # mediawiki revision number in the git note
+ if ($full_import && $n == 1) {
+ print STDOUT "reset refs/notes/$remotename/mediawiki\n";
+ }
+ print STDOUT "commit refs/notes/$remotename/mediawiki\n";
+ print STDOUT "committer $author <$author\@$wiki_name> ", $date->epoch, " +0000\n";
+ literal_data("Note added by git-mediawiki during import");
+ if (!$full_import && $n == 1) {
+ print STDOUT "from refs/notes/$remotename/mediawiki^0\n";
+ }
+ print STDOUT "N inline :$n\n";
+ literal_data("mediawiki_revision: " . $commit{mw_revision});
+ print STDOUT "\n\n";
+}
+
+# parse a sequence of
+# <cmd> <arg1>
+# <cmd> <arg2>
+# \n
+# (like batch sequence of import and sequence of push statements)
+sub get_more_refs {
+ my $cmd = shift;
+ my @refs;
+ while (1) {
+ my $line = <STDIN>;
+ if ($line =~ m/^$cmd (.*)$/) {
+ push(@refs, $1);
+ } elsif ($line eq "\n") {
+ return @refs;
+ } else {
+ die("Invalid command in a '$cmd' batch: ". $_);
+ }
+ }
+}
+
+sub mw_import {
+ # multiple import commands can follow each other.
+ my @refs = (shift, get_more_refs("import"));
+ foreach my $ref (@refs) {
+ mw_import_ref($ref);
+ }
+ print STDOUT "done\n";
+}
+
+sub mw_import_ref {
+ my $ref = shift;
+ # The remote helper will call "import HEAD" and
+ # "import refs/heads/master".
+ # Since HEAD is a symbolic ref to master (by convention,
+ # followed by the output of the command "list" that we gave),
+ # we don't need to do anything in this case.
+ if ($ref eq "HEAD") {
+ return;
+ }
+
+ mw_connect_maybe();
+
+ my @pages = get_mw_pages();
+
+ print STDERR "Searching revisions...\n";
+ my $last_local = get_last_local_revision();
+ my $fetch_from = $last_local + 1;
+ if ($fetch_from == 1) {
+ print STDERR ", fetching from beginning.\n";
+ } else {
+ print STDERR ", fetching from here.\n";
+ }
+ my ($n, @revisions) = fetch_mw_revisions(\@pages, $fetch_from);
+
+ # Creation of the fast-import stream
+ print STDERR "Fetching & writing export data...\n";
+
+ $n = 0;
+ my $last_timestamp = 0; # Placeholer in case $rev->timestamp is undefined
+
+ foreach my $pagerevid (sort {$a->{revid} <=> $b->{revid}} @revisions) {
+ # fetch the content of the pages
+ my $query = {
+ action => 'query',
+ prop => 'revisions',
+ rvprop => 'content|timestamp|comment|user|ids',
+ revids => $pagerevid->{revid},
+ };
+
+ my $result = $mediawiki->api($query);
+
+ my $rev = pop(@{$result->{query}->{pages}->{$pagerevid->{pageid}}->{revisions}});
+
+ $n++;
+
+ my %commit;
+ $commit{author} = $rev->{user} || 'Anonymous';
+ $commit{comment} = $rev->{comment} || '*Empty MediaWiki Message*';
+ $commit{title} = mediawiki_smudge_filename(
+ $result->{query}->{pages}->{$pagerevid->{pageid}}->{title}
+ );
+ $commit{mw_revision} = $pagerevid->{revid};
+ $commit{content} = mediawiki_smudge($rev->{'*'});
+
+ if (!defined($rev->{timestamp})) {
+ $last_timestamp++;
+ } else {
+ $last_timestamp = $rev->{timestamp};
+ }
+ $commit{date} = DateTime::Format::ISO8601->parse_datetime($last_timestamp);
+
+ print STDERR "$n/", scalar(@revisions), ": Revision #$pagerevid->{revid} of $commit{title}\n";
+
+ import_file_revision(\%commit, ($fetch_from == 1), $n);
+ }
+
+ if ($fetch_from == 1 && $n == 0) {
+ print STDERR "You appear to have cloned an empty MediaWiki.\n";
+ # Something has to be done remote-helper side. If nothing is done, an error is
+ # thrown saying that HEAD is refering to unknown object 0000000000000000000
+ # and the clone fails.
+ }
+}
+
+sub error_non_fast_forward {
+ my $advice = run_git("config --bool advice.pushNonFastForward");
+ chomp($advice);
+ if ($advice ne "false") {
+ # Native git-push would show this after the summary.
+ # We can't ask it to display it cleanly, so print it
+ # ourselves before.
+ print STDERR "To prevent you from losing history, non-fast-forward updates were rejected\n";
+ print STDERR "Merge the remote changes (e.g. 'git pull') before pushing again. See the\n";
+ print STDERR "'Note about fast-forwards' section of 'git push --help' for details.\n";
+ }
+ print STDOUT "error $_[0] \"non-fast-forward\"\n";
+ return 0;
+}
+
+sub mw_push_file {
+ my $diff_info = shift;
+ # $diff_info contains a string in this format:
+ # 100644 100644 <sha1_of_blob_before_commit> <sha1_of_blob_now> <status>
+ my @diff_info_split = split(/[ \t]/, $diff_info);
+
+ # Filename, including .mw extension
+ my $complete_file_name = shift;
+ # Commit message
+ my $summary = shift;
+ # MediaWiki revision number. Keep the previous one by default,
+ # in case there's no edit to perform.
+ my $newrevid = shift;
+
+ my $new_sha1 = $diff_info_split[3];
+ my $old_sha1 = $diff_info_split[2];
+ my $page_created = ($old_sha1 eq NULL_SHA1);
+ my $page_deleted = ($new_sha1 eq NULL_SHA1);
+ $complete_file_name = mediawiki_clean_filename($complete_file_name);
+
+ if (substr($complete_file_name,-3) eq ".mw") {
+ my $title = substr($complete_file_name,0,-3);
+
+ my $file_content;
+ if ($page_deleted) {
+ # Deleting a page usually requires
+ # special priviledges. A common
+ # convention is to replace the page
+ # with this content instead:
+ $file_content = DELETED_CONTENT;
+ } else {
+ $file_content = run_git("cat-file blob $new_sha1");
+ }
+
+ mw_connect_maybe();
+
+ my $result = $mediawiki->edit( {
+ action => 'edit',
+ summary => $summary,
+ title => $title,
+ basetimestamp => $basetimestamps{$newrevid},
+ text => mediawiki_clean($file_content, $page_created),
+ }, {
+ skip_encoding => 1 # Helps with names with accentuated characters
+ });
+ if (!$result) {
+ if ($mediawiki->{error}->{code} == 3) {
+ # edit conflicts, considered as non-fast-forward
+ print STDERR 'Warning: Error ' .
+ $mediawiki->{error}->{code} .
+ ' from mediwiki: ' . $mediawiki->{error}->{details} .
+ ".\n";
+ return ($newrevid, "non-fast-forward");
+ } else {
+ # Other errors. Shouldn't happen => just die()
+ die 'Fatal: Error ' .
+ $mediawiki->{error}->{code} .
+ ' from mediwiki: ' . $mediawiki->{error}->{details};
+ }
+ }
+ $newrevid = $result->{edit}->{newrevid};
+ print STDERR "Pushed file: $new_sha1 - $title\n";
+ } else {
+ print STDERR "$complete_file_name not a mediawiki file (Not pushable on this version of git-remote-mediawiki).\n"
+ }
+ return ($newrevid, "ok");
+}
+
+sub mw_push {
+ # multiple push statements can follow each other
+ my @refsspecs = (shift, get_more_refs("push"));
+ my $pushed;
+ for my $refspec (@refsspecs) {
+ my ($force, $local, $remote) = $refspec =~ /^(\+)?([^:]*):([^:]*)$/
+ or die("Invalid refspec for push. Expected <src>:<dst> or +<src>:<dst>");
+ if ($force) {
+ print STDERR "Warning: forced push not allowed on a MediaWiki.\n";
+ }
+ if ($local eq "") {
+ print STDERR "Cannot delete remote branch on a MediaWiki\n";
+ print STDOUT "error $remote cannot delete\n";
+ next;
+ }
+ if ($remote ne "refs/heads/master") {
+ print STDERR "Only push to the branch 'master' is supported on a MediaWiki\n";
+ print STDOUT "error $remote only master allowed\n";
+ next;
+ }
+ if (mw_push_revision($local, $remote)) {
+ $pushed = 1;
+ }
+ }
+
+ # Notify Git that the push is done
+ print STDOUT "\n";
+
+ if ($pushed && $dumb_push) {
+ print STDERR "Just pushed some revisions to MediaWiki.\n";
+ print STDERR "The pushed revisions now have to be re-imported, and your current branch\n";
+ print STDERR "needs to be updated with these re-imported commits. You can do this with\n";
+ print STDERR "\n";
+ print STDERR " git pull --rebase\n";
+ print STDERR "\n";
+ }
+}
+
+sub mw_push_revision {
+ my $local = shift;
+ my $remote = shift; # actually, this has to be "refs/heads/master" at this point.
+ my $last_local_revid = get_last_local_revision();
+ print STDERR ".\n"; # Finish sentence started by get_last_local_revision()
+ my $last_remote_revid = get_last_remote_revision();
+ my $mw_revision = $last_remote_revid;
+
+ # Get sha1 of commit pointed by local HEAD
+ my $HEAD_sha1 = run_git("rev-parse $local 2>/dev/null"); chomp($HEAD_sha1);
+ # Get sha1 of commit pointed by remotes/$remotename/master
+ my $remoteorigin_sha1 = run_git("rev-parse refs/remotes/$remotename/master 2>/dev/null");
+ chomp($remoteorigin_sha1);
+
+ if ($last_local_revid > 0 &&
+ $last_local_revid < $last_remote_revid) {
+ return error_non_fast_forward($remote);
+ }
+
+ if ($HEAD_sha1 eq $remoteorigin_sha1) {
+ # nothing to push
+ return 0;
+ }
+
+ # Get every commit in between HEAD and refs/remotes/origin/master,
+ # including HEAD and refs/remotes/origin/master
+ my @commit_pairs = ();
+ if ($last_local_revid > 0) {
+ my $parsed_sha1 = $remoteorigin_sha1;
+ # Find a path from last MediaWiki commit to pushed commit
+ while ($parsed_sha1 ne $HEAD_sha1) {
+ my @commit_info = grep(/^$parsed_sha1/, split(/\n/, run_git("rev-list --children $local")));
+ if (!@commit_info) {
+ return error_non_fast_forward($remote);
+ }
+ my @commit_info_split = split(/ |\n/, $commit_info[0]);
+ # $commit_info_split[1] is the sha1 of the commit to export
+ # $commit_info_split[0] is the sha1 of its direct child
+ push(@commit_pairs, \@commit_info_split);
+ $parsed_sha1 = $commit_info_split[1];
+ }
+ } else {
+ # No remote mediawiki revision. Export the whole
+ # history (linearized with --first-parent)
+ print STDERR "Warning: no common ancestor, pushing complete history\n";
+ my $history = run_git("rev-list --first-parent --children $local");
+ my @history = split('\n', $history);
+ @history = @history[1..$#history];
+ foreach my $line (reverse @history) {
+ my @commit_info_split = split(/ |\n/, $line);
+ push(@commit_pairs, \@commit_info_split);
+ }
+ }
+
+ foreach my $commit_info_split (@commit_pairs) {
+ my $sha1_child = @{$commit_info_split}[0];
+ my $sha1_commit = @{$commit_info_split}[1];
+ my $diff_infos = run_git("diff-tree -r --raw -z $sha1_child $sha1_commit");
+ # TODO: we could detect rename, and encode them with a #redirect on the wiki.
+ # TODO: for now, it's just a delete+add
+ my @diff_info_list = split(/\0/, $diff_infos);
+ # Keep the first line of the commit message as mediawiki comment for the revision
+ my $commit_msg = (split(/\n/, run_git("show --pretty=format:\"%s\" $sha1_commit")))[0];
+ chomp($commit_msg);
+ # Push every blob
+ while (@diff_info_list) {
+ my $status;
+ # git diff-tree -z gives an output like
+ # <metadata>\0<filename1>\0
+ # <metadata>\0<filename2>\0
+ # and we've split on \0.
+ my $info = shift(@diff_info_list);
+ my $file = shift(@diff_info_list);
+ ($mw_revision, $status) = mw_push_file($info, $file, $commit_msg, $mw_revision);
+ if ($status eq "non-fast-forward") {
+ # we may already have sent part of the
+ # commit to MediaWiki, but it's too
+ # late to cancel it. Stop the push in
+ # the middle, but still give an
+ # accurate error message.
+ return error_non_fast_forward($remote);
+ }
+ if ($status ne "ok") {
+ die("Unknown error from mw_push_file()");
+ }
+ }
+ unless ($dumb_push) {
+ run_git("notes --ref=$remotename/mediawiki add -m \"mediawiki_revision: $mw_revision\" $sha1_commit");
+ run_git("update-ref -m \"Git-MediaWiki push\" refs/mediawiki/$remotename/master $sha1_commit $sha1_child");
+ }
+ }
+
+ print STDOUT "ok $remote\n";
+ return 1;
+}
--- /dev/null
+Git-Mediawiki is a project which aims the creation of a gate
+between git and mediawiki, allowing git users to push and pull
+objects from mediawiki just as one would do with a classic git
+repository thanks to remote-helpers.
+
+For more information, visit the wiki at
+https://github.com/Bibzball/Git-Mediawiki/wiki
if (!enabled && !service->overridable) {
logerror("'%s': service not enabled.", service->name);
errno = EACCES;
- return -1;
+ goto failed;
}
if (!(path = path_ok(dir)))
- return -1;
+ goto failed;
/*
* Security on the cheap.
if (!export_all_trees && access("git-daemon-export-ok", F_OK)) {
logerror("'%s': repository not exported.", path);
errno = EACCES;
- return -1;
+ goto failed;
}
if (service->overridable) {
logerror("'%s': service not enabled for '%s'",
service->name, path);
errno = EACCES;
- return -1;
+ goto failed;
}
/*
signal(SIGTERM, SIG_IGN);
return service->fn();
+
+failed:
+ packet_write(1, "ERR %s: access denied", dir);
+ return -1;
}
static void copy_to_log(int fd)
size_t alloc;
};
+static const char *ip2str(int family, struct sockaddr *sin, socklen_t len)
+{
+#ifdef NO_IPV6
+ static char ip[INET_ADDRSTRLEN];
+#else
+ static char ip[INET6_ADDRSTRLEN];
+#endif
+
+ switch (family) {
+#ifndef NO_IPV6
+ case AF_INET6:
+ inet_ntop(family, &((struct sockaddr_in6*)sin)->sin6_addr, ip, len);
+ break;
+#endif
+ case AF_INET:
+ inet_ntop(family, &((struct sockaddr_in*)sin)->sin_addr, ip, len);
+ break;
+ default:
+ strcpy(ip, "<unknown>");
+ }
+ return ip;
+}
+
#ifndef NO_IPV6
static int setup_named_sock(char *listen_addr, int listen_port, struct socketlist *socklist)
#endif
if (set_reuse_addr(sockfd)) {
+ logerror("Could not set SO_REUSEADDR: %s", strerror(errno));
close(sockfd);
continue;
}
if (bind(sockfd, ai->ai_addr, ai->ai_addrlen) < 0) {
+ logerror("Could not bind to %s: %s",
+ ip2str(ai->ai_family, ai->ai_addr, ai->ai_addrlen),
+ strerror(errno));
close(sockfd);
continue; /* not fatal */
}
if (listen(sockfd, 5) < 0) {
+ logerror("Could not listen to %s: %s",
+ ip2str(ai->ai_family, ai->ai_addr, ai->ai_addrlen),
+ strerror(errno));
close(sockfd);
continue; /* not fatal */
}
return 0;
if (set_reuse_addr(sockfd)) {
+ logerror("Could not set SO_REUSEADDR: %s", strerror(errno));
close(sockfd);
return 0;
}
if ( bind(sockfd, (struct sockaddr *)&sin, sizeof sin) < 0 ) {
+ logerror("Could not listen to %s: %s",
+ ip2str(AF_INET, (struct sockaddr *)&sin, sizeof(sin)),
+ strerror(errno));
close(sockfd);
return 0;
}
if (listen(sockfd, 5) < 0) {
+ logerror("Could not listen to %s: %s",
+ ip2str(AF_INET, (struct sockaddr *)&sin, sizeof(sin)),
+ strerror(errno));
close(sockfd);
return 0;
}
static int match_tz(const char *date, int *offp)
{
char *end;
- int offset = strtoul(date+1, &end, 10);
- int min, hour;
- int n = end - date - 1;
+ int hour = strtoul(date + 1, &end, 10);
+ int n = end - (date + 1);
+ int min = 0;
- min = offset % 100;
- hour = offset / 100;
+ if (n == 4) {
+ /* hhmm */
+ min = hour % 100;
+ hour = hour / 100;
+ } else if (n != 2) {
+ min = 99; /* random crap */
+ } else if (*end == ':') {
+ /* hh:mm? */
+ min = strtoul(end + 1, &end, 10);
+ if (end - (date + 1) != 5)
+ min = 99; /* random crap */
+ } /* otherwise we parsed "hh" */
/*
- * Don't accept any random crap.. At least 3 digits, and
- * a valid minute. We might want to check that the minutes
- * are divisible by 30 or something too.
+ * Don't accept any random crap. Even though some places have
+ * offset larger than 12 hours (e.g. Pacific/Kiritimati is at
+ * UTC+14), there is something wrong if hour part is much
+ * larger than that. We might also want to check that the
+ * minutes are divisible by 15 or something too. (Offset of
+ * Kathmandu, Nepal is UTC+5:45)
*/
- if (min < 60 && n > 2) {
- offset = hour*60+min;
+ if (min < 60 && hour < 24) {
+ int offset = hour * 60 + min;
if (*date == '-')
offset = -offset;
-
*offp = offset;
}
return end - date;
/*
* New file in the index: it might actually be different in
- * the working copy.
+ * the working tree.
*/
if (get_stat_data(new, &sha1, &mode, cached, match_missing,
&dirty_submodule, &revs->diffopt) < 0)
opts.unpack_data = revs;
opts.src_index = &the_index;
opts.dst_index = NULL;
+ opts.pathspec = &revs->diffopt.pathspec;
init_tree_desc(&t, tree->buffer, tree->size);
return unpack_trees(1, &t, &opts);
memset(&xpp, 0, sizeof(xpp));
memset(&xecfg, 0, sizeof(xecfg));
xpp.flags = o->xdl_opts;
+ xecfg.ctxlen = o->context;
+ xecfg.interhunkctxlen = o->interhunkcontext;
xdi_diff_outf(&mf1, &mf2, diffstat_consume, diffstat,
&xpp, &xecfg);
}
}
/* xdiff options */
+ else if (!strcmp(arg, "--minimal"))
+ DIFF_XDL_SET(options, NEED_MINIMAL);
+ else if (!strcmp(arg, "--no-minimal"))
+ DIFF_XDL_CLR(options, NEED_MINIMAL);
else if (!strcmp(arg, "-w") || !strcmp(arg, "--ignore-all-space"))
DIFF_XDL_SET(options, IGNORE_WHITESPACE);
else if (!strcmp(arg, "-b") || !strcmp(arg, "--ignore-space-change"))
return fnmatch(pattern, string, flags | (ignore_case ? FNM_CASEFOLD : 0));
}
-static int common_prefix(const char **pathspec)
+static size_t common_prefix_len(const char **pathspec)
{
- const char *path, *slash, *next;
- int prefix;
+ const char *n, *first;
+ size_t max = 0;
if (!pathspec)
- return 0;
+ return max;
+
+ first = *pathspec;
+ while ((n = *pathspec++)) {
+ size_t i, len = 0;
+ for (i = 0; first == n || i < max; i++) {
+ char c = n[i];
+ if (!c || c != first[i] || is_glob_special(c))
+ break;
+ if (c == '/')
+ len = i + 1;
+ }
+ if (first == n || len < max) {
+ max = len;
+ if (!max)
+ break;
+ }
+ }
+ return max;
+}
- path = *pathspec;
- slash = strrchr(path, '/');
- if (!slash)
- return 0;
+/*
+ * Returns a copy of the longest leading path common among all
+ * pathspecs.
+ */
+char *common_prefix(const char **pathspec)
+{
+ unsigned long len = common_prefix_len(pathspec);
- /*
- * The first 'prefix' characters of 'path' are common leading
- * path components among the pathspecs we have seen so far,
- * including the trailing slash.
- */
- prefix = slash - path + 1;
- while ((next = *++pathspec) != NULL) {
- int len, last_matching_slash = -1;
- for (len = 0; len < prefix && next[len] == path[len]; len++)
- if (next[len] == '/')
- last_matching_slash = len;
- if (len == prefix)
- continue;
- if (last_matching_slash < 0)
- return 0;
- prefix = last_matching_slash + 1;
- }
- return prefix;
+ return len ? xmemdupz(*pathspec, len) : NULL;
}
int fill_directory(struct dir_struct *dir, const char **pathspec)
{
const char *path;
- int len;
+ size_t len;
/*
* Calculate common prefix for the pathspec, and
* use that to optimize the directory walk
*/
- len = common_prefix(pathspec);
+ len = common_prefix_len(pathspec);
path = "";
if (len)
/* Read the directory and prune it */
read_directory(dir, path, len, pathspec);
+ if (*path)
+ free((char *)path);
return len;
}
#define MATCHED_RECURSIVELY 1
#define MATCHED_FNMATCH 2
#define MATCHED_EXACTLY 3
+extern char *common_prefix(const char **pathspec);
extern int match_pathspec(const char **pathspec, const char *name, int namelen, int prefix, char *seen);
extern int match_pathspec_depth(const struct pathspec *pathspec,
const char *name, int namelen,
int shared_repository = PERM_UMASK;
const char *apply_default_whitespace;
const char *apply_default_ignorewhitespace;
+const char *git_attributes_file;
int zlib_compression_level = Z_BEST_SPEED;
int core_compression_level;
int core_compression_seen;
if (strcmp((*c)->buf, "/") != 0)
strbuf_addf(&buf, "refs/namespaces/%s", (*c)->buf);
strbuf_list_free(components);
- if (check_ref_format(buf.buf) != CHECK_REF_FORMAT_OK)
+ if (check_refname_format(buf.buf, 0))
die("bad git namespace path \"%s\"", raw_namespace);
strbuf_addch(&buf, '/');
return strbuf_detach(&buf, NULL);
if (b)
die("Invalid attempt to create duplicate branch: %s", name);
- switch (check_ref_format(name)) {
- case 0: break; /* its valid */
- case CHECK_REF_FORMAT_ONELEVEL:
- break; /* valid, but too few '/', allow anyway */
- default:
+ if (check_refname_format(name, REFNAME_ALLOW_ONELEVEL))
die("Branch name doesn't conform to GIT standards: %s", name);
- }
b = pool_calloc(1, sizeof(struct branch));
b->name = pool_strdup(name);
/* <committish> */
s = lookup_branch(p);
if (s) {
+ if (is_null_sha1(s->sha1))
+ die("Can't add a note on empty branch.");
hashcpy(commit_sha1, s->sha1);
} else if (*p == ':') {
uintmax_t commit_mark = strtoumax(p + 1, NULL, 10);
from = strchr(command_buf.buf, ' ') + 1;
s = lookup_branch(from);
if (s) {
+ if (is_null_sha1(s->sha1))
+ die("Can't tag an empty branch.");
hashcpy(sha1, s->sha1);
type = OBJ_COMMIT;
} else if (*from == ':') {
this=
msgnum=
;;
+ hg)
+ this=0
+ for hg in "$@"
+ do
+ this=$(( $this + 1 ))
+ msgnum=$(printf "%0${prec}d" $this)
+ # hg stores changeset metadata in #-commented lines preceding
+ # the commit message and diff(s). The only metadata we care about
+ # are the User and Date (Node ID and Parent are hashes which are
+ # only relevant to the hg repository and thus not useful to us)
+ # Since we cannot guarantee that the commit message is in
+ # git-friendly format, we put no Subject: line and just consume
+ # all of the message as the body
+ perl -M'POSIX qw(strftime)' -ne 'BEGIN { $subject = 0 }
+ if ($subject) { print ; }
+ elsif (/^\# User /) { s/\# User/From:/ ; print ; }
+ elsif (/^\# Date /) {
+ my ($hashsign, $str, $time, $tz) = split ;
+ $tz = sprintf "%+05d", (0-$tz)/36;
+ print "Date: " .
+ strftime("%a, %d %b %Y %H:%M:%S ",
+ localtime($time))
+ . "$tz\n";
+ } elsif (/^\# /) { next ; }
+ else {
+ print "\n", $_ ;
+ $subject = 1;
+ }
+ ' <"$hg" >"$dotest/$msgnum" || clean_abort
+ done
+ echo "$this" >"$dotest/last"
+ this=
+ msgnum=
+ ;;
*)
if test -n "$patch_format"
then
start_head=$(cat "$GIT_DIR/BISECT_START")
if test "z$mode" != "z--no-checkout"
then
- git checkout "$start_head" --
+ git checkout "$start_head" -- ||
+ die "$(eval_gettext "Checking out '\$start_head' failed. Try 'git bisect reset <validbranch>'.")"
fi
else
# Get rev from where we start.
" --strict-paths : Don't allow recursing into subdirectories\n".
" --export-all : Don't check for gitcvs.enabled in config\n".
" --version, -V : Print version information and exit\n".
- " --help, -h, -H : Print usage information and exit\n".
+ " -h, -H : Print usage information and exit\n".
"\n".
"<directory> ... is a list of allowed directories. If no directories\n".
"are given, all are allowed. This is an additional restriction, gitcvs\n".
"access still needs to be enabled by the gitcvs.enabled config option.\n".
"Alternately, one directory may be specified in GIT_CVSSERVER_ROOT.\n";
-my @opts = ( 'help|h|H', 'version|V',
+my @opts = ( 'h|H', 'version|V',
'base-path=s', 'strict-paths', 'export-all' );
GetOptions( $state, @opts )
or die $usage;
$prompt = 'yes';
next;
}
- if ($arg eq '-h' || $arg eq '--help') {
+ if ($arg eq '-h') {
usage();
}
push @command, $arg;
. git-sh-setup
if [ "$(is_bare_repository)" = false ]; then
- git diff-files --ignore-submodules --quiet &&
- git diff-index --cached --quiet HEAD -- ||
- die "Cannot rewrite branch(es) with a dirty working directory."
+ require_clean_work_tree 'rewrite branches'
fi
tempdir=.git-rewrite
# TODO: this option should be added to the git-config documentation
set default_config(gui.maxfilesdisplayed) 5000
set default_config(gui.usettk) 1
+set default_config(gui.warndetachedcommit) 1
set font_descs {
{fontui font_ui {mc "Main Font"}}
{fontdiff font_diff {mc "Diff/Console Font"}}
# prepare-commit-msg requires PREPARE_COMMIT_MSG exist. From git-gui
# it will be .git/MERGE_MSG (merge), .git/SQUASH_MSG (squash), or an
- # empty file but existant file.
+ # empty file but existent file.
set fd_pcm [open [gitdir PREPARE_COMMIT_MSG] a]
field w_file ; # text column: actual file data
field w_cviewer ; # pane showing commit message
field finder ; # find mini-dialog frame
+field gotoline ; # line goto mini-dialog frame
field status ; # status mega-widget instance
field old_height ; # last known height of $w.file_pane
-column [expr {[llength $w_columns] - 1}] \
]
+ set gotoline [::linebar::new \
+ $w.file_pane.out.lf $w_file \
+ -column [expr {[llength $w_columns] - 1}] \
+ ]
+
set w_cviewer $w.file_pane.cm.t
text $w_cviewer \
-background white \
$w.ctxm add command \
-label [mc "Find Text..."] \
-accelerator F7 \
- -command [list searchbar::show $finder]
+ -command [cb _show_finder]
+ $w.ctxm add command \
+ -label [mc "Goto Line..."] \
+ -accelerator "Ctrl-G" \
+ -command [cb _show_linebar]
menu $w.ctxm.enc
build_encoding_menu $w.ctxm.enc [cb _setencoding]
$w.ctxm add cascade \
bind $w_cviewer <Tab> "[list focus $w_file];break"
bind $w_cviewer <Button-1> [list focus $w_cviewer]
bind $w_file <Visibility> [cb _focus_search $w_file]
- bind $top <F7> [list searchbar::show $finder]
+ bind $top <F7> [cb _show_finder]
+ bind $top <Key-slash> [cb _show_finder]
+ bind $top <Control-Key-s> [cb _show_finder]
bind $top <Escape> [list searchbar::hide $finder]
bind $top <F3> [list searchbar::find_next $finder]
bind $top <Shift-F3> [list searchbar::find_prev $finder]
+ bind $top <Control-Key-g> [cb _show_linebar]
catch { bind $top <Shift-Key-XF86_Switch_VT_3> [list searchbar::find_prev $finder] }
grid configure $w.header -sticky ew
set pos_y [expr {[winfo pointery .] + 10}]
set g "${req_w}x${req_h}"
- if {$pos_x >= 0} {append g +}
+ if {[tk windowingsystem] eq "win32" || $pos_x >= 0} {append g +}
append g $pos_x
- if {$pos_y >= 0} {append g +}
+ if {[tk windowingsystem] eq "win32" || $pos_y >= 0} {append g +}
append g $pos_y
wm geometry $tooltip_wm $g
set old_height $new_height
}
+method _show_finder {} {
+ linebar::hide $gotoline
+ searchbar::show $finder
+}
+
+method _show_linebar {} {
+ searchbar::hide $finder
+ linebar::show $gotoline
+}
+
}
set pos_y [expr {[winfo pointery .] + 10}]
set g "${req_w}x${req_h}"
- if {$pos_x >= 0} {append g +}
+ if {[tk windowingsystem] eq "win32" || $pos_x >= 0} {append g +}
append g $pos_x
- if {$pos_y >= 0} {append g +}
+ if {[tk windowingsystem] eq "win32" || $pos_y >= 0} {append g +}
append g $pos_y
wm geometry $tooltip_wm $g
}
proc commit_commitmsg {curHEAD msg_p} {
+ global is_detached repo_config
global pch_error
+ if {$is_detached && $repo_config(gui.warndetachedcommit)} {
+ set msg [mc "You are about to commit on a detached head.\
+This is a potentially dangerous thing to do because if you switch\
+to another branch you will loose your changes and it can be difficult\
+to retrieve them later from the reflog. You should probably cancel this\
+commit and create a new branch to continue.\n\
+\n\
+Do you really want to proceed with your Commit?"]
+ if {[ask_popup $msg] ne yes} {
+ unlock_index
+ return
+ }
+ }
+
# -- Run the commit-msg hook.
#
set fd_ph [githook_read commit-msg $msg_p]
global file_states
set paths [list]
+ set unknown_paths [list]
foreach path [array names file_states] {
switch -glob -- [lindex $file_states($path) 0] {
U? {continue}
?M -
?T -
?D {lappend paths $path}
+ ?O {lappend unknown_paths $path}
+ }
+ }
+ if {[llength $unknown_paths]} {
+ set reply [ask_popup [mc "There are unknown files do you also want
+to stage those?"]]
+ if {$reply} {
+ set paths [concat $paths $unknown_paths]
}
}
add_helper {Adding all changed files} $paths
--- /dev/null
+# goto line number
+# based on code from gitk, Copyright (C) Paul Mackerras
+
+class linebar {
+
+field w
+field ctext
+
+field linenum {}
+
+constructor new {i_w i_text args} {
+ global use_ttk NS
+ set w $i_w
+ set ctext $i_text
+
+ ${NS}::frame $w
+ ${NS}::label $w.l -text [mc "Goto Line:"]
+ entry $w.ent \
+ -textvariable ${__this}::linenum \
+ -background lightgreen \
+ -validate key \
+ -validatecommand [cb _validate %P]
+ ${NS}::button $w.bn -text [mc Go] -command [cb _goto]
+
+ pack $w.l -side left
+ pack $w.bn -side right
+ pack $w.ent -side left -expand 1 -fill x
+
+ eval grid conf $w -sticky we $args
+ grid remove $w
+
+ trace add variable linenum write [cb _goto_cb]
+ bind $w.ent <Return> [cb _goto]
+ bind $w.ent <Escape> [cb hide]
+
+ bind $w <Destroy> [list delete_this $this]
+ return $this
+}
+
+method show {} {
+ if {![visible $this]} {
+ grid $w
+ }
+ focus -force $w.ent
+}
+
+method hide {} {
+ if {[visible $this]} {
+ $w.ent delete 0 end
+ focus $ctext
+ grid remove $w
+ }
+}
+
+method visible {} {
+ return [winfo ismapped $w]
+}
+
+method editor {} {
+ return $w.ent
+}
+
+method _validate {P} {
+ # only accept numbers as input
+ string is integer $P
+}
+
+method _goto_cb {name ix op} {
+ after idle [cb _goto 1]
+}
+
+method _goto {{nohide {0}}} {
+ if {$linenum ne {}} {
+ $ctext see $linenum.0
+ if {!$nohide} {
+ hide $this
+ }
+ }
+}
+
+}
grid remove $w
trace add variable searchstring write [cb _incrsearch_cb]
+ bind $w.ent <Return> [cb find_next]
+ bind $w.ent <Shift-Return> [cb find_prev]
bind $w <Destroy> [list delete_this $this]
return $this
}
}
-}
\ No newline at end of file
+}
poedit, GTranslator --- any of them would work well). Please install
them.
-You would then need to clone the git-gui internationalization project
-repository, so that you can work on it:
+You would then need to clone the git-gui project repository and create
+a feature branch to begin working:
- $ git clone mob@repo.or.cz:/srv/git/git-gui/git-gui-i18n.git/
- $ cd git-gui-i18n
- $ git checkout --track -b mob origin/mob
- $ git config remote.origin.push mob
+ $ git clone git://repo.or.cz/git-gui.git
+ $ cd git-gui.git
+ $ git checkout -b my-translation
-The "git checkout" command creates a 'mob' branch from upstream's
-corresponding branch and makes it your current branch. You will be
-working on this branch.
-
-The "git config" command records in your repository configuration file
-that you would push "mob" branch to the upstream when you say "git
-push".
+The "git checkout" command creates a new branch to keep your work
+isolated and to make it simple to post your patch series when
+completed. You will be working on this branch.
2. Starting a new language.
-In the git-gui-i18n directory is a po/ subdirectory. It has a
-handful files whose names end with ".po". Is there a file that has
-messages in your language?
+In the git-gui directory is a po/ subdirectory. It has a handful of
+files whose names end with ".po". Is there a file that has messages
+in your language?
If you do not know what your language should be named, you need to find
it. This currently follows ISO 639-1 two letter codes:
$ make
$ LANG=af ./git-gui.sh
-When you are satisfied with your translation, commit your changes, and
-push it back to the 'mob' branch:
+When you are satisfied with your translation, commit your changes then submit
+your patch series to the maintainer and the Git mailing list:
$ edit po/af.po
... be sure to update Last-Translator: and
... PO-Revision-Date: lines.
$ git add po/af.po
- $ git commit -m 'Started Afrikaans translation.'
- $ git push
+ $ git commit -s -m 'git-gui: added Afrikaans translation.'
+ $ git send-email --to 'git@vger.kernel.org' \
+ --cc 'Pat Thoyts <patthoyts@users.sourceforge.net>' \
+ --subject 'git-gui: Afrikaans translation' \
+ master..
3. Updating your translation.
In any case, make sure you are up-to-date before starting your work:
+ $ git checkout master
$ git pull
In the former case, you will edit po/af.po (again, replace "af" with
#: lib/index.tcl:30
msgid "Continue"
-msgstr "Forstätt"
+msgstr "Fortsätt"
#: lib/index.tcl:33
msgid "Unlock Index"
# If we do not have enough common material, it is not
# worth trying two-file merge using common subsections.
- expr "$sz0" \< "$sz1" \* 2 >/dev/null || : >$orig
+ expr $sz0 \< $sz1 \* 2 >/dev/null || : >$orig
;;
*)
echo "Auto-merging $4"
last_status=0
rollup_status=0
-rerere=false
-
-files_to_merge() {
- if test "$rerere" = true
- then
- git rerere remaining
- else
- git ls-files -u | sed -e 's/^[^ ]* //' | sort -u
- fi
-}
-
+files=
if test $# -eq 0 ; then
cd_to_toplevel
if test -e "$GIT_DIR/MERGE_RR"
then
- rerere=true
- fi
-
- files=$(files_to_merge)
- if test -z "$files" ; then
- echo "No files need merging"
- exit 0
+ files=$(git rerere remaining)
+ else
+ files=$(git ls-files -u | sed -e 's/^[^ ]* //' | sort -u)
fi
+else
+ files=$(git ls-files -u -- "$@" | sed -e 's/^[^ ]* //' | sort -u)
+fi
- # Save original stdin
- exec 3<&0
+if test -z "$files" ; then
+ echo "No files need merging"
+ exit 0
+fi
- printf "Merging:\n"
- printf "$files\n"
+printf "Merging:\n"
+printf "$files\n"
- files_to_merge |
- while IFS= read i
- do
- if test $last_status -ne 0; then
- prompt_after_failed_merge <&3 || exit 1
- fi
- printf "\n"
- merge_file "$i" <&3
- last_status=$?
- if test $last_status -ne 0; then
- rollup_status=1
- fi
- done
-else
- while test $# -gt 0; do
- if test $last_status -ne 0; then
- prompt_after_failed_merge || exit 1
- fi
- printf "\n"
- merge_file "$1"
- last_status=$?
- if test $last_status -ne 0; then
- rollup_status=1
- fi
- shift
- done
-fi
+IFS='
+'
+for i in $files
+do
+ if test $last_status -ne 0; then
+ prompt_after_failed_merge || exit 1
+ fi
+ printf "\n"
+ merge_file "$i"
+ last_status=$?
+ if test $last_status -ne 0; then
+ rollup_status=1
+ fi
+done
exit $rollup_status
--d|--dr|--dry|--dry-|--dry-r|--dry-ru|--dry-run)
dry_run=--dry-run
;;
- -h|--h|--he|--hel|--help|--help-|--help-a|--help-al|--help-all)
+ -h|--help-all)
usage
;;
*)
git rev-parse --verify HEAD > "$state_dir"/stopped-sha
${SHELL:-@SHELL_PATH@} -c "$rest" # Actual execution
status=$?
+ # Run in subshell because require_clean_work_tree can die.
+ dirty=f
+ (require_clean_work_tree "rebase" 2>/dev/null) || dirty=t
if test "$status" -ne 0
then
warn "Execution failed: $rest"
+ test "$dirty" = f ||
+ warn "and made changes to the index and/or the working tree"
+
warn "You can fix the problem, and then run"
warn
warn " git rebase --continue"
warn
exit "$status"
- fi
- # Run in subshell because require_clean_work_tree can die.
- if ! (require_clean_work_tree "rebase")
+ elif test "$dirty" = t
then
+ warn "Execution succeeded: $rest"
+ warn "but left changes to the index and/or the working tree"
warn "Commit or stash your changes, and then run"
warn
warn " git rebase --continue"
then
: Nothing to commit -- skip this
else
+ if ! test -f "$author_script"
+ then
+ die "You have staged changes in your working tree. If these changes are meant to be
+squashed into the previous commit, run:
+
+ git commit --amend
+
+If they are meant to go into a new commit, run:
+
+ git commit
+
+In both case, once you're done, continue with:
+
+ git rebase --continue
+"
+ fi
. "$author_script" ||
- die "Cannot find the author identity"
+ die "Error trying to find the author identity to amend commit"
current_head=
if test -f "$amend"
then
"cccmd" => \$cc_cmd,
"aliasfiletype" => \$aliasfiletype,
"bcc" => \@bcclist,
- "aliasesfile" => \@alias_files,
"suppresscc" => \@suppress_cc,
"envelopesender" => \$envelope_sender,
"multiedit" => \$multiedit,
"assume8bitencoding" => \$auto_8bit_encoding,
);
+my %config_path_settings = (
+ "aliasesfile" => \@alias_files,
+);
+
# Help users prepare for 1.7.0
sub chain_reply_to {
if (defined $chain_reply_to &&
# Begin by accumulating all the variables (defined above), that we will end up
# needing, first, from the command line:
-my $rc = GetOptions("sender|from=s" => \$sender,
+my $help;
+my $rc = GetOptions("h" => \$help,
+ "sender|from=s" => \$sender,
"in-reply-to=s" => \$initial_reply_to,
"subject=s" => \$initial_subject,
"to=s" => \@initial_to,
"force" => \$force,
);
+usage() if $help;
unless ($rc) {
usage();
}
$$target = Git::config_bool(@repo, "$prefix.$setting") unless (defined $$target);
}
+ foreach my $setting (keys %config_path_settings) {
+ my $target = $config_path_settings{$setting};
+ if (ref($target) eq "ARRAY") {
+ unless (@$target) {
+ my @values = Git::config_path(@repo, "$prefix.$setting");
+ @$target = @values if (@values && defined $values[0]);
+ }
+ }
+ else {
+ $$target = Git::config_path(@repo, "$prefix.$setting") unless (defined $$target);
+ }
+ }
+
foreach my $setting (keys %config_settings) {
my $target = $config_settings{$setting};
next if $setting eq "to" and defined $no_to;
}
if (defined $smtp_authuser) {
+ # Workaround AUTH PLAIN/LOGIN interaction defect
+ # with Authen::SASL::Cyrus
+ eval {
+ require Authen::SASL;
+ Authen::SASL->import(qw(Perl));
+ };
if (!defined $smtp_authpass) {
fi
case "$1" in
- -h|--h|--he|--hel|--help)
+ -h)
echo "$LONG_USAGE"
exit
esac
if test -n "$patch_mode" && test -n "$untracked"
then
- die "Can't use --patch and ---include-untracked or --all at the same time"
+ die "Can't use --patch and --include-untracked or --all at the same time"
fi
stash_msg="$*"
test "$untracked" = "all" && CLEAN_X_OPTION=-x || CLEAN_X_OPTION=
if test -n "$untracked"
then
- git clean --force --quiet $CLEAN_X_OPTION
+ git clean --force --quiet -d $CLEAN_X_OPTION
fi
if test "$keep_index" = "t" && test -n $i_tree
quiet=-q
fi
- if test -n "$reference"
+ gitdir=
+ gitdir_base=
+ name=$(module_name "$path")
+ base_path=$(dirname "$path")
+
+ gitdir=$(git rev-parse --git-dir)
+ gitdir_base="$gitdir/modules/$base_path"
+ gitdir="$gitdir/modules/$path"
+
+ case $gitdir in
+ /*)
+ a="$(cd_to_toplevel && pwd)/"
+ b=$gitdir
+ while [ "$b" ] && [ "${a%%/*}" = "${b%%/*}" ]
+ do
+ a=${a#*/} b=${b#*/};
+ done
+
+ rel="$a$name"
+ rel=`echo $rel | sed -e 's|[^/]*|..|g'`
+ rel_gitdir="$rel/$b"
+ ;;
+ *)
+ rel=`echo $name | sed -e 's|[^/]*|..|g'`
+ rel_gitdir="$rel/$gitdir"
+ ;;
+ esac
+
+ if test -d "$gitdir"
then
- git-clone $quiet "$reference" -n "$url" "$path"
+ mkdir -p "$path"
+ echo "gitdir: $rel_gitdir" >"$path/.git"
+ rm -f "$gitdir/index"
else
- git-clone $quiet -n "$url" "$path"
- fi ||
- die "$(eval_gettext "Clone of '\$url' into submodule path '\$path' failed")"
+ mkdir -p "$gitdir_base"
+ if test -n "$reference"
+ then
+ git-clone $quiet "$reference" -n "$url" "$path" --separate-git-dir "$gitdir"
+ else
+ git-clone $quiet -n "$url" "$path" --separate-git-dir "$gitdir"
+ fi ||
+ die "$(eval_gettext "Clone of '\$url' into submodule path '\$path' failed")"
+ fi
}
#
--recursive)
recursive=1
;;
+ --checkout)
+ update="checkout"
+ ;;
--)
shift
break
fi
name=$(module_name "$path") || exit
url=$(git config submodule."$name".url)
- update_module=$(git config submodule."$name".update)
+ if ! test -z "$update"
+ then
+ update_module=$update
+ else
+ update_module=$(git config submodule."$name".update)
+ fi
+
+ if test "$update_module" = "none"
+ then
+ echo "Skipping submodule '$path'"
+ continue
+ fi
+
if test -z "$url"
then
# Only mention uninitialized submodules when its
die "$(eval_gettext "Unable to find current revision in submodule path '\$path'")"
fi
- if ! test -z "$update"
- then
- update_module=$update
- fi
-
if test "$subsha1" != "$sha1"
then
subforce=$force
$_version, $_fetch_all, $_no_rebase, $_fetch_parent,
$_merge, $_strategy, $_dry_run, $_local,
$_prefix, $_no_checkout, $_url, $_verbose,
- $_git_format, $_commit_url, $_tag, $_merge_info);
+ $_git_format, $_commit_url, $_tag, $_merge_info, $_interactive);
$Git::SVN::_follow_parent = 1;
$SVN::Git::Fetcher::_placeholder_filename = ".gitignore";
$_q ||= 0;
my %remote_opts = ( 'username=s' => \$Git::SVN::Prompt::_username,
'config-dir=s' => \$Git::SVN::Ra::config_dir,
'no-auth-cache' => \$Git::SVN::Prompt::_no_auth_cache,
- 'ignore-paths=s' => \$SVN::Git::Fetcher::_ignore_regex );
+ 'ignore-paths=s' => \$SVN::Git::Fetcher::_ignore_regex,
+ 'ignore-refs=s' => \$Git::SVN::Ra::_ignore_refs_regex );
my %fc_opts = ( 'follow-parent|follow!' => \$Git::SVN::_follow_parent,
'authors-file|A=s' => \$_authors,
'authors-prog=s' => \$_authors_prog,
'revision|r=i' => \$_revision,
'no-rebase' => \$_no_rebase,
'mergeinfo=s' => \$_merge_info,
+ 'interactive|i' => \$_interactive,
%cmt_opts, %fc_opts } ],
branch => [ \&cmd_branch,
'Create a branch in the SVN repository',
{} ],
);
+use Term::ReadLine;
+package FakeTerm;
+sub new {
+ my ($class, $reason) = @_;
+ return bless \$reason, shift;
+}
+sub readline {
+ my $self = shift;
+ die "Cannot use readline on FakeTerm: $$self";
+}
+package main;
+
+my $term = eval {
+ $ENV{"GIT_SVN_NOTTY"}
+ ? new Term::ReadLine 'git-svn', \*STDIN, \*STDOUT
+ : new Term::ReadLine 'git-svn';
+};
+if ($@) {
+ $term = new FakeTerm "$@: going non-interactive";
+}
+
my $cmd;
for (my $i = 0; $i < @ARGV; $i++) {
if (defined $cmd{$ARGV[$i]}) {
if ($cmd && ($cmd eq 'log' || $cmd eq 'blame')) {
Getopt::Long::Configure('pass_through');
}
-my $rv = GetOptions(%opts, 'help|H|h' => \$_help, 'version|V' => \$_version,
+my $rv = GetOptions(%opts, 'h|H' => \$_help, 'version|V' => \$_version,
'minimize-connections' => \$Git::SVN::Migration::_minimize,
'id|i=s' => \$Git::SVN::default_ref_id,
'svn-remote|remote|R=s' => sub {
exit 0;
}
+sub ask {
+ my ($prompt, %arg) = @_;
+ my $valid_re = $arg{valid_re};
+ my $default = $arg{default};
+ my $resp;
+ my $i = 0;
+
+ if ( !( defined($term->IN)
+ && defined( fileno($term->IN) )
+ && defined( $term->OUT )
+ && defined( fileno($term->OUT) ) ) ){
+ return defined($default) ? $default : undef;
+ }
+
+ while ($i++ < 10) {
+ $resp = $term->readline($prompt);
+ if (!defined $resp) { # EOF
+ print "\n";
+ return defined $default ? $default : undef;
+ }
+ if ($resp eq '' and defined $default) {
+ return $default;
+ }
+ if (!defined $valid_re or $resp =~ /$valid_re/) {
+ return $resp;
+ }
+ }
+ return undef;
+}
+
sub do_git_init_db {
unless (-d $ENV{GIT_DIR}) {
my @init_db = ('init');
command_noisy('config', "$pfx.$i", $icv{$i});
$set = $i;
}
- my $ignore_regex = \$SVN::Git::Fetcher::_ignore_regex;
- command_noisy('config', "$pfx.ignore-paths", $$ignore_regex)
- if defined $$ignore_regex;
+ my $ignore_paths_regex = \$SVN::Git::Fetcher::_ignore_regex;
+ command_noisy('config', "$pfx.ignore-paths", $$ignore_paths_regex)
+ if defined $$ignore_paths_regex;
+ my $ignore_refs_regex = \$Git::SVN::Ra::_ignore_refs_regex;
+ command_noisy('config', "$pfx.ignore-refs", $$ignore_refs_regex)
+ if defined $$ignore_refs_regex;
if (defined $SVN::Git::Fetcher::_preserve_empty_dirs) {
my $fname = \$SVN::Git::Fetcher::_placeholder_filename;
"If these changes depend on each other, re-running ",
"without --no-rebase may be required."
}
+
+ if (defined $_interactive){
+ my $ask_default = "y";
+ foreach my $d (@$linear_refs){
+ my ($fh, $ctx) = command_output_pipe(qw(show --summary), "$d");
+ while (<$fh>){
+ print $_;
+ }
+ command_close_pipe($fh, $ctx);
+ $_ = ask("Commit this patch to SVN? ([y]es (default)|[n]o|[q]uit|[a]ll): ",
+ valid_re => qr/^(?:yes|y|no|n|quit|q|all|a)/i,
+ default => $ask_default);
+ die "Commit this patch reply required" unless defined $_;
+ if (/^[nq]/i) {
+ exit(0);
+ } elsif (/^a/i) {
+ last;
+ }
+ }
+ }
+
my $expect_url = $url;
my $push_merge_info = eval {
$r->{$1}->{url} = $2;
} elsif (m!^(.+)\.pushurl=\s*(.*)\s*$!) {
$r->{$1}->{pushurl} = $2;
+ } elsif (m!^(.+)\.ignore-refs=\s*(.*)\s*$!) {
+ $r->{$1}->{ignore_refs_regex} = $2;
} elsif (m!^(.+)\.(branches|tags)=$svn_refspec$!) {
my ($remote, $t, $local_ref, $remote_ref) =
($1, $2, $3, $4);
}
} keys %$r;
+ foreach my $remote (keys %$r) {
+ foreach ( grep { defined $_ }
+ map { $r->{$remote}->{$_} } qw(branches tags) ) {
+ foreach my $rs ( @$_ ) {
+ $rs->{ignore_refs_regex} =
+ $r->{$remote}->{ignore_refs_regex};
+ }
+ }
+ }
+
$r;
}
}
package Git::SVN::Ra;
-use vars qw/@ISA $config_dir $_log_window_size/;
+use vars qw/@ISA $config_dir $_ignore_refs_regex $_log_window_size/;
use strict;
use warnings;
my ($ra_invalid, $can_do_switch, %ignored_err, $RA);
@finalents;
}
+# return value: 0 -- don't ignore, 1 -- ignore
+sub is_ref_ignored {
+ my ($g, $p) = @_;
+ my $refname = $g->{ref}->full_path($p);
+ return 1 if defined($g->{ignore_refs_regex}) &&
+ $refname =~ m!$g->{ignore_refs_regex}!;
+ return 0 unless defined($_ignore_refs_regex);
+ return 1 if $refname =~ m!$_ignore_refs_regex!o;
+ return 0;
+}
+
sub match_globs {
my ($self, $exists, $paths, $globs, $r) = @_;
next unless /$g->{path}->{regex}/;
my $p = $1;
my $pathname = $g->{path}->full_path($p);
+ next if is_ref_ignored($g, $p);
next if $exists->{$pathname};
next if ($self->check_path($pathname, $r) !=
$SVN::Node::dir);
;;
google-chrome|chrome|chromium|chromium-browser)
# No need to specify newTab. It's default in chromium
- eval "$browser_path" "$@" &
+ "$browser_path" "$@" &
;;
konqueror)
case "$(basename "$browser_path")" in
# It's simpler to use kfmclient to open a new tab in konqueror.
browser_path="$(echo "$browser_path" | sed -e 's/konqueror$/kfmclient/')"
type "$browser_path" > /dev/null 2>&1 || die "No '$browser_path' found."
- eval "$browser_path" newTab "$@"
+ "$browser_path" newTab "$@" &
;;
kfmclient)
- eval "$browser_path" newTab "$@"
+ "$browser_path" newTab "$@" &
;;
*)
"$browser_path" "$@" &
esac
;;
w3m|elinks|links|lynx|open)
- eval "$browser_path" "$@"
+ "$browser_path" "$@"
;;
start)
exec "$browser_path" '"web-browse"' "$@"
;;
*)
if test -n "$browser_cmd"; then
- ( eval $browser_cmd "$@" )
+ ( eval "$browser_cmd \"\$@\"" )
fi
;;
esac
# The following is a reimplementation of the git check-ref-format
# command. The rules were derived from the git check-ref-format(1)
# manual page. This code should be replaced by a call to
- # check_ref_format() in the git library, when such is available.
+ # check_refname_format() in the git library, when such is available.
if ref_name.endswith('/') or \
ref_name.startswith('.') or \
ref_name.count('/.') or \
return $str;
}
+# Sanitize for use in XHTML + application/xml+xhtm (valid XML 1.0)
+sub sanitize {
+ my $str = shift;
+
+ return undef unless defined $str;
+
+ $str = to_utf8($str);
+ $str =~ s|([[:cntrl:]])|($1 =~ /[\t\n\r]/ ? $1 : quot_cec($1))|eg;
+ return $str;
+}
+
# Make control characters "printable", using character escape codes (CEC)
sub quot_cec {
my $cntrl = shift;
$nr++;
$line = untabify($line);
printf qq!<div class="pre"><a id="l%i" href="%s#l%i" class="linenr">%4i</a> %s</div>\n!,
- $nr, esc_attr(href(-replay => 1)), $nr, $nr, $syntax ? to_utf8($line) : esc_html($line, -nbsp=>1);
+ $nr, esc_attr(href(-replay => 1)), $nr, $nr,
+ $syntax ? sanitize($line) : esc_html($line, -nbsp=>1);
}
}
close $fd
* and other reasons to not add 'js=1' param at the end of link
* @constant
*/
-var jsExceptionsRe = /[;?]js=[01]$/;
+var jsExceptionsRe = /[;?]js=[01](#.*)?$/;
/**
* Add '?js=1' or ';js=1' to the end of every link in the document
var allLinks = document.getElementsByTagName("a") || document.links;
for (var i = 0, len = allLinks.length; i < len; i++) {
var link = allLinks[i];
- if (!jsExceptionsRe.test(link)) { // =~ /[;?]js=[01]$/;
- link.href +=
- (link.href.indexOf('?') === -1 ? '?' : ';') + 'js=1';
+ if (!jsExceptionsRe.test(link)) {
+ link.href = link.href.replace(/(#|$)/,
+ (link.href.indexOf('?') === -1 ? '?' : ';') + 'js=1$1');
}
}
}
{
int i;
for (i = 0; i < 20; i++) {
- unsigned int val = (hexval(hex[0]) << 4) | hexval(hex[1]);
+ unsigned int val;
+ /*
+ * hex[1]=='\0' is caught when val is checked below,
+ * but if hex[0] is NUL we have to avoid reading
+ * past the end of the string:
+ */
+ if (!hex[0])
+ return -1;
+ val = (hexval(hex[0]) << 4) | hexval(hex[1]);
if (val & ~0xff)
return -1;
*sha1++ = val;
strbuf_release(&buffer);
}
-static int verify_merge_base(unsigned char *head_sha1, unsigned char *branch_sha1)
+static int verify_merge_base(unsigned char *head_sha1, struct ref *remote)
{
- struct commit *head = lookup_commit(head_sha1);
- struct commit *branch = lookup_commit(branch_sha1);
+ struct commit *head = lookup_commit_or_die(head_sha1, "HEAD");
+ struct commit *branch = lookup_commit_or_die(remote->old_sha1, remote->name);
struct commit_list *merge_bases = get_merge_bases(head, branch, 1);
return (merge_bases && !merge_bases->next && merge_bases->item == branch);
return error("Remote branch %s resolves to object %s\nwhich does not exist locally, perhaps you need to fetch?", remote_ref->name, sha1_to_hex(remote_ref->old_sha1));
/* Remote branch must be an ancestor of remote HEAD */
- if (!verify_merge_base(head_sha1, remote_ref->old_sha1)) {
+ if (!verify_merge_base(head_sha1, remote_ref)) {
return error("The branch '%s' is not an ancestor "
"of your current HEAD.\n"
"If you are sure you want to delete it,"
init_curl_http_auth(slot->curl);
ret = HTTP_REAUTH;
}
- } else
+ } else {
+ if (!curl_errorstr[0])
+ strlcpy(curl_errorstr,
+ curl_easy_strerror(results.curl_result),
+ sizeof(curl_errorstr));
ret = HTTP_ERROR;
+ }
} else {
error("Unable to start HTTP request for %s", url);
ret = HTTP_START_FAILED;
{
/* http_request has already handled HTTP_START_FAILED. */
if (ret != HTTP_START_FAILED)
- error("%s while accessing %s\n", curl_errorstr, url);
+ error("%s while accessing %s", curl_errorstr, url);
return ret;
}
}
+static int add_mailname_host(char *buf, size_t len)
+{
+ FILE *mailname;
+
+ mailname = fopen("/etc/mailname", "r");
+ if (!mailname) {
+ if (errno != ENOENT)
+ warning("cannot open /etc/mailname: %s",
+ strerror(errno));
+ return -1;
+ }
+ if (!fgets(buf, len, mailname)) {
+ if (ferror(mailname))
+ warning("cannot read /etc/mailname: %s",
+ strerror(errno));
+ fclose(mailname);
+ return -1;
+ }
+ /* success! */
+ fclose(mailname);
+ return 0;
+}
+
+static void add_domainname(char *buf, size_t len)
+{
+ struct hostent *he;
+ size_t namelen;
+ const char *domainname;
+
+ if (gethostname(buf, len)) {
+ warning("cannot get host name: %s", strerror(errno));
+ strlcpy(buf, "(none)", len);
+ return;
+ }
+ namelen = strlen(buf);
+ if (memchr(buf, '.', namelen))
+ return;
+
+ he = gethostbyname(buf);
+ buf[namelen++] = '.';
+ buf += namelen;
+ len -= namelen;
+ if (he && (domainname = strchr(he->h_name, '.')))
+ strlcpy(buf, domainname + 1, len);
+ else
+ strlcpy(buf, "(none)", len);
+}
+
static void copy_email(const struct passwd *pw)
{
/*
die("Your sysadmin must hate you!");
memcpy(git_default_email, pw->pw_name, len);
git_default_email[len++] = '@';
- gethostname(git_default_email + len, sizeof(git_default_email) - len);
- if (!strchr(git_default_email+len, '.')) {
- struct hostent *he = gethostbyname(git_default_email + len);
- char *domainname;
-
- len = strlen(git_default_email);
- git_default_email[len++] = '.';
- if (he && (domainname = strchr(he->h_name, '.')))
- strlcpy(git_default_email + len, domainname + 1,
- sizeof(git_default_email) - len);
- else
- strlcpy(git_default_email + len, "(none)",
- sizeof(git_default_email) - len);
- }
+
+ if (!add_mailname_host(git_default_email + len,
+ sizeof(git_default_email) - len))
+ return; /* read from "/etc/mailname" (Debian) */
+ add_domainname(git_default_email + len,
+ sizeof(git_default_email) - len);
}
-static void setup_ident(void)
+static void setup_ident(const char **name, const char **emailp)
{
struct passwd *pw = NULL;
/* Get the name ("gecos") */
- if (!git_default_name[0]) {
+ if (!*name && !git_default_name[0]) {
pw = getpwuid(getuid());
if (!pw)
die("You don't exist. Go away!");
copy_gecos(pw, git_default_name, sizeof(git_default_name));
}
+ if (!*name)
+ *name = git_default_name;
- if (!git_default_email[0]) {
+ if (!*emailp && !git_default_email[0]) {
const char *email = getenv("EMAIL");
if (email && email[0]) {
copy_email(pw);
}
}
+ if (!*emailp)
+ *emailp = git_default_email;
/* And set the default date */
if (!git_default_date[0])
int warn_on_no_name = (flag & IDENT_WARN_ON_NO_NAME);
int name_addr_only = (flag & IDENT_NO_DATE);
- setup_ident();
- if (!name)
- name = git_default_name;
- if (!email)
- email = git_default_email;
+ setup_ident(&name, &email);
if (!*name) {
struct passwd *pw;
copy of the preceding main search loops. */
if (lim - mch > kwset->maxd)
lim = mch + kwset->maxd;
- lmch = 0;
+ lmch = NULL;
d = 1;
while (lim - end >= d)
{
struct blob *blob,
show_object_fn show,
struct name_path *path,
- const char *name)
+ const char *name,
+ void *cb_data)
{
struct object *obj = &blob->object;
if (obj->flags & (UNINTERESTING | SEEN))
return;
obj->flags |= SEEN;
- show(obj, path, name);
+ show(obj, path, name, cb_data);
}
/*
const unsigned char *sha1,
show_object_fn show,
struct name_path *path,
- const char *name)
+ const char *name,
+ void *cb_data)
{
/* Nothing to do */
}
show_object_fn show,
struct name_path *path,
struct strbuf *base,
- const char *name)
+ const char *name,
+ void *cb_data)
{
struct object *obj = &tree->object;
struct tree_desc desc;
if (parse_tree(tree) < 0)
die("bad tree object %s", sha1_to_hex(obj->sha1));
obj->flags |= SEEN;
- show(obj, path, name);
+ show(obj, path, name, cb_data);
me.up = path;
me.elem = name;
me.elem_len = strlen(name);
if (S_ISDIR(entry.mode))
process_tree(revs,
lookup_tree(entry.sha1),
- show, &me, base, entry.path);
+ show, &me, base, entry.path,
+ cb_data);
else if (S_ISGITLINK(entry.mode))
process_gitlink(revs, entry.sha1,
- show, &me, entry.path);
+ show, &me, entry.path,
+ cb_data);
else
process_blob(revs,
lookup_blob(entry.sha1),
- show, &me, entry.path);
+ show, &me, entry.path,
+ cb_data);
}
strbuf_setlen(base, baselen);
free(tree->buffer);
continue;
if (obj->type == OBJ_TAG) {
obj->flags |= SEEN;
- show_object(obj, NULL, name);
+ show_object(obj, NULL, name, data);
continue;
}
if (obj->type == OBJ_TREE) {
process_tree(revs, (struct tree *)obj, show_object,
- NULL, &base, name);
+ NULL, &base, name, data);
continue;
}
if (obj->type == OBJ_BLOB) {
process_blob(revs, (struct blob *)obj, show_object,
- NULL, name);
+ NULL, name, data);
continue;
}
die("unknown pending object %s (%s)",
#define LIST_OBJECTS_H
typedef void (*show_commit_fn)(struct commit *, void *);
-typedef void (*show_object_fn)(struct object *, const struct name_path *, const char *);
-typedef void (*show_edge_fn)(struct commit *);
-
+typedef void (*show_object_fn)(struct object *, const struct name_path *, const char *, void *);
void traverse_commit_list(struct rev_info *, show_commit_fn, show_object_fn, void *);
+typedef void (*show_edge_fn)(struct commit *);
void mark_edges_uninteresting(struct commit_list *, struct rev_info *, show_edge_fn);
#endif
struct string_list *entries)
{
/* If there is a D/F conflict and the file for such a conflict
- * currently exist in the working copy, we want to allow it to be
+ * currently exist in the working tree, we want to allow it to be
* removed to make room for the corresponding directory if needed.
* The files underneath the directories of such D/F conflicts will
* be processed before the corresponding file involved in the D/F
if (*ptr == '/') {
++ptr;
hash = hash_name(ce->name, ptr - ce->name);
- if (!lookup_hash(hash, &istate->name_hash)) {
- pos = insert_hash(hash, ce, &istate->name_hash);
- if (pos) {
- ce->next = *pos;
- *pos = ce;
- }
+ pos = insert_hash(hash, ce, &istate->name_hash);
+ if (pos) {
+ ce->dir_next = *pos;
+ *pos = ce;
}
}
}
if (same_name(ce, name, namelen, icase))
return ce;
}
- ce = ce->next;
+ if (icase && name[namelen - 1] == '/')
+ ce = ce->dir_next;
+ else
+ ce = ce->next;
}
/*
/* Dereference o->local_ref into local_sha1 */
if (!resolve_ref(o->local_ref, local_sha1, 0, NULL))
die("Failed to resolve local notes ref '%s'", o->local_ref);
- else if (!check_ref_format(o->local_ref) && is_null_sha1(local_sha1))
+ else if (!check_refname_format(o->local_ref, 0) &&
+ is_null_sha1(local_sha1))
local = NULL; /* local_sha1 == null_sha1 indicates unborn ref */
else if (!(local = lookup_commit_reference(local_sha1)))
die("Could not parse local commit %s (%s)",
* Failed to get remote_sha1. If o->remote_ref looks like an
* unborn ref, perform the merge using an empty notes tree.
*/
- if (!check_ref_format(o->remote_ref)) {
+ if (!check_refname_format(o->remote_ref, 0)) {
hashclr(remote_sha1);
remote = NULL;
} else {
* Finally store the new commit object SHA1 into 'result_sha1'.
*/
struct dir_struct dir;
- const char *path = git_path(NOTES_MERGE_WORKTREE "/");
+ char *path = xstrdup(git_path(NOTES_MERGE_WORKTREE "/"));
int path_len = strlen(path), i;
const char *msg = strstr(partial_commit->buffer, "\n\n");
result_sha1);
OUTPUT(o, 4, "Finalized notes merge commit: %s",
sha1_to_hex(result_sha1));
+ free(path);
return 0;
}
for (i = 0; i < 2; i++) { /* refs/{heads,tags,...}/ */
while (*p && *p != '/')
p++;
- /* tolerate duplicate slashes; see check_ref_format() */
+ /* tolerate duplicate slashes; see check_refname_format() */
while (*p == '/')
p++;
}
string_list_append(v, xstrdup(arg));
return 0;
}
+
+int parse_opt_noop_cb(const struct option *opt, const char *arg, int unset)
+{
+ return 0;
+}
*(int *)opt->value &= ~opt->defval;
return 0;
- case OPTION_BOOLEAN:
+ case OPTION_COUNTUP:
*(int *)opt->value = unset ? 0 : *(int *)opt->value + 1;
return 0;
err |= optbug(opts, "uses feature "
"not supported for dashless options");
switch (opts->type) {
- case OPTION_BOOLEAN:
+ case OPTION_COUNTUP:
case OPTION_BIT:
case OPTION_NEGBIT:
case OPTION_SET_INT:
/* options with no arguments */
OPTION_BIT,
OPTION_NEGBIT,
- OPTION_BOOLEAN, /* _INCR would have been a better name */
+ OPTION_COUNTUP,
OPTION_SET_INT,
OPTION_SET_PTR,
/* options with arguments (usually) */
OPTION_FILENAME
};
+/* Deprecated synonym */
+#define OPTION_BOOLEAN OPTION_COUNTUP
+
enum parse_opt_flags {
PARSE_OPT_KEEP_DASHDASH = 1,
PARSE_OPT_STOP_AT_NON_OPTION = 2,
PARSE_OPT_NOARG, NULL, (b) }
#define OPT_NEGBIT(s, l, v, h, b) { OPTION_NEGBIT, (s), (l), (v), NULL, \
(h), PARSE_OPT_NOARG, NULL, (b) }
-#define OPT_BOOLEAN(s, l, v, h) { OPTION_BOOLEAN, (s), (l), (v), NULL, \
+#define OPT_COUNTUP(s, l, v, h) { OPTION_COUNTUP, (s), (l), (v), NULL, \
(h), PARSE_OPT_NOARG }
#define OPT_SET_INT(s, l, v, h, i) { OPTION_SET_INT, (s), (l), (v), NULL, \
(h), PARSE_OPT_NOARG, NULL, (i) }
+#define OPT_BOOL(s, l, v, h) OPT_SET_INT(s, l, v, h, 1)
#define OPT_SET_PTR(s, l, v, h, p) { OPTION_SET_PTR, (s), (l), (v), NULL, \
(h), PARSE_OPT_NOARG, NULL, (p) }
#define OPT_INTEGER(s, l, v, h) { OPTION_INTEGER, (s), (l), (v), "n", (h) }
{ OPTION_CALLBACK, (s), (l), (v), "when", (h), PARSE_OPT_OPTARG, \
parse_opt_color_flag_cb, (intptr_t)"always" }
+#define OPT_NOOP_NOARG(s, l) \
+ { OPTION_CALLBACK, (s), (l), NULL, NULL, \
+ "no-op (backward compatibility)", \
+ PARSE_OPT_HIDDEN | PARSE_OPT_NOARG, parse_opt_noop_cb }
+
+/* Deprecated synonym */
+#define OPT_BOOLEAN OPT_COUNTUP
/* parse_options() will filter out the processed options and leave the
* non-option arguments in argv[].
extern int parse_opt_with_commit(const struct option *, const char *, int);
extern int parse_opt_tertiary(const struct option *, const char *, int);
extern int parse_opt_string_list(const struct option *, const char *, int);
+extern int parse_opt_noop_cb(const struct option *, const char *, int);
#define OPT__VERBOSE(var, h) OPT_BOOLEAN('v', "verbose", (var), (h))
#define OPT__QUIET(var, h) OPT_BOOLEAN('q', "quiet", (var), (h))
};
}
+
+=item config_path ( VARIABLE )
+
+Retrieve the path configuration C<VARIABLE>. The return value
+is an expanded path or C<undef> if it's not defined.
+
+This currently wraps command('config') so it is not so fast.
+
+=cut
+
+sub config_path {
+ my ($self, $var) = _maybe_self(@_);
+
+ try {
+ my @cmd = ('config', '--path');
+ unshift @cmd, $self if $self;
+ if (wantarray) {
+ return command(@cmd, '--get-all', $var);
+ } else {
+ return command_oneline(@cmd, '--get', $var);
+ }
+ } catch Git::Error::Command with {
+ my $E = shift;
+ if ($E->value() == 1) {
+ # Key not found.
+ return undef;
+ } else {
+ throw $E;
+ }
+ };
+}
+
=item config_int ( VARIABLE )
Retrieve the integer configuration C<VARIABLE>. The return value
#include "cache.h"
#include "quote.h"
+#include "argv-array.h"
int quote_path_fully = 1;
return sq_dequote_step(arg, NULL);
}
-int sq_dequote_to_argv(char *arg, const char ***argv, int *nr, int *alloc)
+static int sq_dequote_to_argv_internal(char *arg,
+ const char ***argv, int *nr, int *alloc,
+ struct argv_array *array)
{
char *next = arg;
char *dequoted = sq_dequote_step(next, &next);
if (!dequoted)
return -1;
- ALLOC_GROW(*argv, *nr + 1, *alloc);
- (*argv)[(*nr)++] = dequoted;
+ if (argv) {
+ ALLOC_GROW(*argv, *nr + 1, *alloc);
+ (*argv)[(*nr)++] = dequoted;
+ }
+ if (array)
+ argv_array_push(array, dequoted);
} while (next);
return 0;
}
+int sq_dequote_to_argv(char *arg, const char ***argv, int *nr, int *alloc)
+{
+ return sq_dequote_to_argv_internal(arg, argv, nr, alloc, NULL);
+}
+
+int sq_dequote_to_argv_array(char *arg, struct argv_array *array)
+{
+ return sq_dequote_to_argv_internal(arg, NULL, NULL, NULL, array);
+}
+
/* 1 means: quote as octal
* 0 means: quote as octal if (quote_path_fully)
* -1 means: never quote
/*
* Same as the above, but can be used to unwrap many arguments in the
- * same string separated by space. "next" is changed to point to the
- * next argument that should be passed as first parameter. When there
- * is no more argument to be dequoted, "next" is updated to point to NULL.
+ * same string separated by space. Like sq_quote, it works in place,
+ * modifying arg and appending pointers into it to argv.
*/
extern int sq_dequote_to_argv(char *arg, const char ***argv, int *nr, int *alloc);
+/*
+ * Same as above, but store the unquoted strings in an argv_array. We will
+ * still modify arg in place, but unlike sq_dequote_to_argv, the argv_array
+ * will duplicate and take ownership of the strings.
+ */
+struct argv_array;
+extern int sq_dequote_to_argv_array(char *arg, struct argv_array *);
+
extern int unquote_c_style(struct strbuf *, const char *quoted, const char **endp);
extern size_t quote_c_style(const char *name, struct strbuf *, FILE *, int no_dq);
extern void quote_two_c_style(struct strbuf *, const char *, const char *, int);
#define REF_KNOWS_PEELED 04
#define REF_BROKEN 010
-struct ref_list {
- struct ref_list *next;
+struct ref_entry {
unsigned char flag; /* ISSYMREF? ISPACKED? */
unsigned char sha1[20];
unsigned char peeled[20];
char name[FLEX_ARRAY];
};
+struct ref_array {
+ int nr, alloc;
+ struct ref_entry **refs;
+};
+
static const char *parse_ref_line(char *line, unsigned char *sha1)
{
/*
return line;
}
-static struct ref_list *add_ref(const char *name, const unsigned char *sha1,
- int flag, struct ref_list *list,
- struct ref_list **new_entry)
+static void add_ref(const char *name, const unsigned char *sha1,
+ int flag, struct ref_array *refs,
+ struct ref_entry **new_entry)
{
int len;
- struct ref_list *entry;
+ struct ref_entry *entry;
/* Allocate it and add it in.. */
len = strlen(name) + 1;
- entry = xmalloc(sizeof(struct ref_list) + len);
+ entry = xmalloc(sizeof(struct ref_entry) + len);
hashcpy(entry->sha1, sha1);
hashclr(entry->peeled);
+ if (check_refname_format(name, REFNAME_ALLOW_ONELEVEL|REFNAME_DOT_COMPONENT))
+ die("Reference has invalid format: '%s'", name);
memcpy(entry->name, name, len);
entry->flag = flag;
- entry->next = list;
if (new_entry)
*new_entry = entry;
- return entry;
+ ALLOC_GROW(refs->refs, refs->nr + 1, refs->alloc);
+ refs->refs[refs->nr++] = entry;
}
-/* merge sort the ref list */
-static struct ref_list *sort_ref_list(struct ref_list *list)
+static int ref_entry_cmp(const void *a, const void *b)
{
- int psize, qsize, last_merge_count, cmp;
- struct ref_list *p, *q, *l, *e;
- struct ref_list *new_list = list;
- int k = 1;
- int merge_count = 0;
+ struct ref_entry *one = *(struct ref_entry **)a;
+ struct ref_entry *two = *(struct ref_entry **)b;
+ return strcmp(one->name, two->name);
+}
- if (!list)
- return list;
+static void sort_ref_array(struct ref_array *array)
+{
+ int i = 0, j = 1;
- do {
- last_merge_count = merge_count;
- merge_count = 0;
+ /* Nothing to sort unless there are at least two entries */
+ if (array->nr < 2)
+ return;
- psize = 0;
+ qsort(array->refs, array->nr, sizeof(*array->refs), ref_entry_cmp);
+
+ /* Remove any duplicates from the ref_array */
+ for (; j < array->nr; j++) {
+ struct ref_entry *a = array->refs[i];
+ struct ref_entry *b = array->refs[j];
+ if (!strcmp(a->name, b->name)) {
+ if (hashcmp(a->sha1, b->sha1))
+ die("Duplicated ref, and SHA1s don't match: %s",
+ a->name);
+ warning("Duplicated ref: %s", a->name);
+ free(b);
+ continue;
+ }
+ i++;
+ array->refs[i] = array->refs[j];
+ }
+ array->nr = i + 1;
+}
- p = new_list;
- q = new_list;
- new_list = NULL;
- l = NULL;
+static struct ref_entry *search_ref_array(struct ref_array *array, const char *name)
+{
+ struct ref_entry *e, **r;
+ int len;
- while (p) {
- merge_count++;
+ if (name == NULL)
+ return NULL;
- while (psize < k && q->next) {
- q = q->next;
- psize++;
- }
- qsize = k;
-
- while ((psize > 0) || (qsize > 0 && q)) {
- if (qsize == 0 || !q) {
- e = p;
- p = p->next;
- psize--;
- } else if (psize == 0) {
- e = q;
- q = q->next;
- qsize--;
- } else {
- cmp = strcmp(q->name, p->name);
- if (cmp < 0) {
- e = q;
- q = q->next;
- qsize--;
- } else if (cmp > 0) {
- e = p;
- p = p->next;
- psize--;
- } else {
- if (hashcmp(q->sha1, p->sha1))
- die("Duplicated ref, and SHA1s don't match: %s",
- q->name);
- warning("Duplicated ref: %s", q->name);
- e = q;
- q = q->next;
- qsize--;
- free(e);
- e = p;
- p = p->next;
- psize--;
- }
- }
+ if (!array->nr)
+ return NULL;
- e->next = NULL;
+ len = strlen(name) + 1;
+ e = xmalloc(sizeof(struct ref_entry) + len);
+ memcpy(e->name, name, len);
- if (l)
- l->next = e;
- if (!new_list)
- new_list = e;
- l = e;
- }
+ r = bsearch(&e, array->refs, array->nr, sizeof(*array->refs), ref_entry_cmp);
- p = q;
- };
+ free(e);
- k = k * 2;
- } while ((last_merge_count != merge_count) || (last_merge_count != 1));
+ if (r == NULL)
+ return NULL;
- return new_list;
+ return *r;
}
/*
* when doing a full libification.
*/
static struct cached_refs {
+ struct cached_refs *next;
char did_loose;
char did_packed;
- struct ref_list *loose;
- struct ref_list *packed;
-} cached_refs, submodule_refs;
-static struct ref_list *current_ref;
+ struct ref_array loose;
+ struct ref_array packed;
+ /* The submodule name, or "" for the main repo. */
+ char name[FLEX_ARRAY];
+} *cached_refs;
+
+static struct ref_entry *current_ref;
+
+static struct ref_array extra_refs;
-static struct ref_list *extra_refs;
+static void free_ref_array(struct ref_array *array)
+{
+ int i;
+ for (i = 0; i < array->nr; i++)
+ free(array->refs[i]);
+ free(array->refs);
+ array->nr = array->alloc = 0;
+ array->refs = NULL;
+}
+
+static void clear_cached_refs(struct cached_refs *ca)
+{
+ if (ca->did_loose)
+ free_ref_array(&ca->loose);
+ if (ca->did_packed)
+ free_ref_array(&ca->packed);
+ ca->did_loose = ca->did_packed = 0;
+}
-static void free_ref_list(struct ref_list *list)
+static struct cached_refs *create_cached_refs(const char *submodule)
{
- struct ref_list *next;
- for ( ; list; list = next) {
- next = list->next;
- free(list);
+ int len;
+ struct cached_refs *refs;
+ if (!submodule)
+ submodule = "";
+ len = strlen(submodule) + 1;
+ refs = xcalloc(1, sizeof(struct cached_refs) + len);
+ memcpy(refs->name, submodule, len);
+ return refs;
+}
+
+/*
+ * Return a pointer to a cached_refs for the specified submodule. For
+ * the main repository, use submodule==NULL. The returned structure
+ * will be allocated and initialized but not necessarily populated; it
+ * should not be freed.
+ */
+static struct cached_refs *get_cached_refs(const char *submodule)
+{
+ struct cached_refs *refs = cached_refs;
+ if (!submodule)
+ submodule = "";
+ while (refs) {
+ if (!strcmp(submodule, refs->name))
+ return refs;
+ refs = refs->next;
}
+
+ refs = create_cached_refs(submodule);
+ refs->next = cached_refs;
+ cached_refs = refs;
+ return refs;
}
static void invalidate_cached_refs(void)
{
- struct cached_refs *ca = &cached_refs;
-
- if (ca->did_loose && ca->loose)
- free_ref_list(ca->loose);
- if (ca->did_packed && ca->packed)
- free_ref_list(ca->packed);
- ca->loose = ca->packed = NULL;
- ca->did_loose = ca->did_packed = 0;
+ struct cached_refs *refs = cached_refs;
+ while (refs) {
+ clear_cached_refs(refs);
+ refs = refs->next;
+ }
}
-static void read_packed_refs(FILE *f, struct cached_refs *cached_refs)
+static void read_packed_refs(FILE *f, struct ref_array *array)
{
- struct ref_list *list = NULL;
- struct ref_list *last = NULL;
+ struct ref_entry *last = NULL;
char refline[PATH_MAX];
int flag = REF_ISPACKED;
name = parse_ref_line(refline, sha1);
if (name) {
- list = add_ref(name, sha1, flag, list, &last);
+ add_ref(name, sha1, flag, array, &last);
continue;
}
if (last &&
!get_sha1_hex(refline + 1, sha1))
hashcpy(last->peeled, sha1);
}
- cached_refs->packed = sort_ref_list(list);
+ sort_ref_array(array);
}
void add_extra_ref(const char *name, const unsigned char *sha1, int flag)
{
- extra_refs = add_ref(name, sha1, flag, extra_refs, NULL);
+ add_ref(name, sha1, flag, &extra_refs, NULL);
}
void clear_extra_refs(void)
{
- free_ref_list(extra_refs);
- extra_refs = NULL;
+ free_ref_array(&extra_refs);
}
-static struct ref_list *get_packed_refs(const char *submodule)
+static struct ref_array *get_packed_refs(const char *submodule)
{
- const char *packed_refs_file;
- struct cached_refs *refs;
+ struct cached_refs *refs = get_cached_refs(submodule);
- if (submodule) {
- packed_refs_file = git_path_submodule(submodule, "packed-refs");
- refs = &submodule_refs;
- free_ref_list(refs->packed);
- } else {
- packed_refs_file = git_path("packed-refs");
- refs = &cached_refs;
- }
+ if (!refs->did_packed) {
+ const char *packed_refs_file;
+ FILE *f;
- if (!refs->did_packed || submodule) {
- FILE *f = fopen(packed_refs_file, "r");
- refs->packed = NULL;
+ if (submodule)
+ packed_refs_file = git_path_submodule(submodule, "packed-refs");
+ else
+ packed_refs_file = git_path("packed-refs");
+ f = fopen(packed_refs_file, "r");
if (f) {
- read_packed_refs(f, refs);
+ read_packed_refs(f, &refs->packed);
fclose(f);
}
refs->did_packed = 1;
}
- return refs->packed;
+ return &refs->packed;
}
-static struct ref_list *get_ref_dir(const char *submodule, const char *base,
- struct ref_list *list)
+static void get_ref_dir(const char *submodule, const char *base,
+ struct ref_array *array)
{
DIR *dir;
const char *path;
if (stat(refdir, &st) < 0)
continue;
if (S_ISDIR(st.st_mode)) {
- list = get_ref_dir(submodule, ref, list);
+ get_ref_dir(submodule, ref, array);
continue;
}
if (submodule) {
hashclr(sha1);
flag |= REF_BROKEN;
}
- list = add_ref(ref, sha1, flag, list, NULL);
+ add_ref(ref, sha1, flag, array, NULL);
}
free(ref);
closedir(dir);
}
- return sort_ref_list(list);
}
struct warn_if_dangling_data {
for_each_rawref(warn_if_dangling_symref, &data);
}
-static struct ref_list *get_loose_refs(const char *submodule)
+static struct ref_array *get_loose_refs(const char *submodule)
{
- if (submodule) {
- free_ref_list(submodule_refs.loose);
- submodule_refs.loose = get_ref_dir(submodule, "refs", NULL);
- return submodule_refs.loose;
- }
+ struct cached_refs *refs = get_cached_refs(submodule);
- if (!cached_refs.did_loose) {
- cached_refs.loose = get_ref_dir(NULL, "refs", NULL);
- cached_refs.did_loose = 1;
+ if (!refs->did_loose) {
+ get_ref_dir(submodule, "refs", &refs->loose);
+ sort_ref_array(&refs->loose);
+ refs->did_loose = 1;
}
- return cached_refs.loose;
+ return &refs->loose;
}
/* We allow "recursive" symbolic refs. Only within reason, though */
#define MAXDEPTH 5
#define MAXREFLEN (1024)
+/*
+ * Called by resolve_gitlink_ref_recursive() after it failed to read
+ * from "name", which is "module/.git/<refname>". Find <refname> in
+ * the packed-refs file for the submodule.
+ */
static int resolve_gitlink_packed_ref(char *name, int pathlen, const char *refname, unsigned char *result)
{
- FILE *f;
- struct cached_refs refs;
- struct ref_list *ref;
- int retval;
+ int retval = -1;
+ struct ref_entry *ref;
+ struct ref_array *array;
- strcpy(name + pathlen, "packed-refs");
- f = fopen(name, "r");
- if (!f)
- return -1;
- read_packed_refs(f, &refs);
- fclose(f);
- ref = refs.packed;
- retval = -1;
- while (ref) {
- if (!strcmp(ref->name, refname)) {
- retval = 0;
- memcpy(result, ref->sha1, 20);
- break;
- }
- ref = ref->next;
+ /* being defensive: resolve_gitlink_ref() did this for us */
+ if (pathlen < 6 || memcmp(name + pathlen - 6, "/.git/", 6))
+ die("Oops");
+ name[pathlen - 6] = '\0'; /* make it path to the submodule */
+ array = get_packed_refs(name);
+ ref = search_ref_array(array, refname);
+ if (ref != NULL) {
+ memcpy(result, ref->sha1, 20);
+ retval = 0;
}
- free_ref_list(refs.packed);
return retval;
}
}
/*
- * If the "reading" argument is set, this function finds out what _object_
- * the ref points at by "reading" the ref. The ref, if it is not symbolic,
- * has to exist, and if it is symbolic, it has to point at an existing ref,
- * because the "read" goes through the symref to the ref it points at.
- *
- * The access that is not "reading" may often be "writing", but does not
- * have to; it can be merely checking _where it leads to_. If it is a
- * prelude to "writing" to the ref, a write to a symref that points at
- * yet-to-be-born ref will create the real ref pointed by the symref.
- * reading=0 allows the caller to check where such a symref leads to.
+ * Try to read ref from the packed references. On success, set sha1
+ * and return 0; otherwise, return -1.
*/
+static int get_packed_ref(const char *ref, unsigned char *sha1)
+{
+ struct ref_array *packed = get_packed_refs(NULL);
+ struct ref_entry *entry = search_ref_array(packed, ref);
+ if (entry) {
+ hashcpy(sha1, entry->sha1);
+ return 0;
+ }
+ return -1;
+}
+
const char *resolve_ref(const char *ref, unsigned char *sha1, int reading, int *flag)
{
int depth = MAXDEPTH;
ssize_t len;
char buffer[256];
static char ref_buffer[256];
+ char path[PATH_MAX];
if (flag)
*flag = 0;
+ if (check_refname_format(ref, REFNAME_ALLOW_ONELEVEL))
+ return NULL;
+
for (;;) {
- char path[PATH_MAX];
struct stat st;
char *buf;
int fd;
return NULL;
git_snpath(path, sizeof(path), "%s", ref);
- /* Special case: non-existing file. */
+
if (lstat(path, &st) < 0) {
- struct ref_list *list = get_packed_refs(NULL);
- while (list) {
- if (!strcmp(ref, list->name)) {
- hashcpy(sha1, list->sha1);
- if (flag)
- *flag |= REF_ISPACKED;
- return ref;
- }
- list = list->next;
+ if (errno != ENOENT)
+ return NULL;
+ /*
+ * The loose reference file does not exist;
+ * check for a packed reference.
+ */
+ if (!get_packed_ref(ref, sha1)) {
+ if (flag)
+ *flag |= REF_ISPACKED;
+ return ref;
}
- if (reading || errno != ENOENT)
+ /* The reference is not a packed reference, either. */
+ if (reading) {
return NULL;
- hashclr(sha1);
- return ref;
+ } else {
+ hashclr(sha1);
+ return ref;
+ }
}
/* Follow "normalized" - ie "refs/.." symlinks by hand */
if (S_ISLNK(st.st_mode)) {
len = readlink(path, buffer, sizeof(buffer)-1);
- if (len >= 5 && !memcmp("refs/", buffer, 5)) {
- buffer[len] = 0;
+ if (len < 0)
+ return NULL;
+ buffer[len] = 0;
+ if (!prefixcmp(buffer, "refs/") &&
+ !check_refname_format(buffer, 0)) {
strcpy(ref_buffer, buffer);
ref = ref_buffer;
if (flag)
return NULL;
len = read_in_full(fd, buffer, sizeof(buffer)-1);
close(fd);
+ if (len < 0)
+ return NULL;
+ while (len && isspace(buffer[len-1]))
+ len--;
+ buffer[len] = '\0';
/*
* Is it a symbolic ref?
*/
- if (len < 4 || memcmp("ref:", buffer, 4))
+ if (prefixcmp(buffer, "ref:"))
break;
buf = buffer + 4;
- len -= 4;
- while (len && isspace(*buf))
- buf++, len--;
- while (len && isspace(buf[len-1]))
- len--;
- buf[len] = 0;
- memcpy(ref_buffer, buf, len + 1);
- ref = ref_buffer;
+ while (isspace(*buf))
+ buf++;
+ if (check_refname_format(buf, REFNAME_ALLOW_ONELEVEL)) {
+ warning("symbolic reference in %s is formatted incorrectly",
+ path);
+ return NULL;
+ }
+ ref = strcpy(ref_buffer, buf);
if (flag)
*flag |= REF_ISSYMREF;
}
- if (len < 40 || get_sha1_hex(buffer, sha1))
+ /* Please note that FETCH_HEAD has a second line containing other data. */
+ if (get_sha1_hex(buffer, sha1) || (buffer[40] != '\0' && !isspace(buffer[40]))) {
+ warning("reference in %s is formatted incorrectly", path);
return NULL;
+ }
return ref;
}
#define DO_FOR_EACH_INCLUDE_BROKEN 01
static int do_one_ref(const char *base, each_ref_fn fn, int trim,
- int flags, void *cb_data, struct ref_list *entry)
+ int flags, void *cb_data, struct ref_entry *entry)
{
if (prefixcmp(entry->name, base))
return 0;
return -1;
if ((flag & REF_ISPACKED)) {
- struct ref_list *list = get_packed_refs(NULL);
+ struct ref_array *array = get_packed_refs(NULL);
+ struct ref_entry *r = search_ref_array(array, ref);
- while (list) {
- if (!strcmp(list->name, ref)) {
- if (list->flag & REF_KNOWS_PEELED) {
- hashcpy(sha1, list->peeled);
- return 0;
- }
- /* older pack-refs did not leave peeled ones */
- break;
- }
- list = list->next;
+ if (r != NULL && r->flag & REF_KNOWS_PEELED) {
+ hashcpy(sha1, r->peeled);
+ return 0;
}
}
static int do_for_each_ref(const char *submodule, const char *base, each_ref_fn fn,
int trim, int flags, void *cb_data)
{
- int retval = 0;
- struct ref_list *packed = get_packed_refs(submodule);
- struct ref_list *loose = get_loose_refs(submodule);
+ int retval = 0, i, p = 0, l = 0;
+ struct ref_array *packed = get_packed_refs(submodule);
+ struct ref_array *loose = get_loose_refs(submodule);
- struct ref_list *extra;
+ struct ref_array *extra = &extra_refs;
- for (extra = extra_refs; extra; extra = extra->next)
- retval = do_one_ref(base, fn, trim, flags, cb_data, extra);
+ for (i = 0; i < extra->nr; i++)
+ retval = do_one_ref(base, fn, trim, flags, cb_data, extra->refs[i]);
- while (packed && loose) {
- struct ref_list *entry;
- int cmp = strcmp(packed->name, loose->name);
+ while (p < packed->nr && l < loose->nr) {
+ struct ref_entry *entry;
+ int cmp = strcmp(packed->refs[p]->name, loose->refs[l]->name);
if (!cmp) {
- packed = packed->next;
+ p++;
continue;
}
if (cmp > 0) {
- entry = loose;
- loose = loose->next;
+ entry = loose->refs[l++];
} else {
- entry = packed;
- packed = packed->next;
+ entry = packed->refs[p++];
}
retval = do_one_ref(base, fn, trim, flags, cb_data, entry);
if (retval)
goto end_each;
}
- for (packed = packed ? packed : loose; packed; packed = packed->next) {
- retval = do_one_ref(base, fn, trim, flags, cb_data, packed);
+ if (l < loose->nr) {
+ p = l;
+ packed = loose;
+ }
+
+ for (; p < packed->nr; p++) {
+ retval = do_one_ref(base, fn, trim, flags, cb_data, packed->refs[p]);
if (retval)
goto end_each;
}
* - it contains a "\" (backslash)
*/
+/* Return true iff ch is not allowed in reference names. */
static inline int bad_ref_char(int ch)
{
if (((unsigned) ch) <= ' ' || ch == 0x7f ||
ch == '~' || ch == '^' || ch == ':' || ch == '\\')
return 1;
/* 2.13 Pattern Matching Notation */
- if (ch == '?' || ch == '[') /* Unsupported */
+ if (ch == '*' || ch == '?' || ch == '[') /* Unsupported */
return 1;
- if (ch == '*') /* Supported at the end */
- return 2;
return 0;
}
-int check_ref_format(const char *ref)
+/*
+ * Try to read one refname component from the front of ref. Return
+ * the length of the component found, or -1 if the component is not
+ * legal.
+ */
+static int check_refname_component(const char *ref, int flags)
{
- int ch, level, bad_type, last;
- int ret = CHECK_REF_FORMAT_OK;
- const char *cp = ref;
-
- level = 0;
- while (1) {
- while ((ch = *cp++) == '/')
- ; /* tolerate duplicated slashes */
- if (!ch)
- /* should not end with slashes */
- return CHECK_REF_FORMAT_ERROR;
-
- /* we are at the beginning of the path component */
- if (ch == '.')
- return CHECK_REF_FORMAT_ERROR;
- bad_type = bad_ref_char(ch);
- if (bad_type) {
- if (bad_type == 2 && (!*cp || *cp == '/') &&
- ret == CHECK_REF_FORMAT_OK)
- ret = CHECK_REF_FORMAT_WILDCARD;
- else
- return CHECK_REF_FORMAT_ERROR;
- }
+ const char *cp;
+ char last = '\0';
+ for (cp = ref; ; cp++) {
+ char ch = *cp;
+ if (ch == '\0' || ch == '/')
+ break;
+ if (bad_ref_char(ch))
+ return -1; /* Illegal character in refname. */
+ if (last == '.' && ch == '.')
+ return -1; /* Refname contains "..". */
+ if (last == '@' && ch == '{')
+ return -1; /* Refname contains "@{". */
last = ch;
- /* scan the rest of the path component */
- while ((ch = *cp++) != 0) {
- bad_type = bad_ref_char(ch);
- if (bad_type)
- return CHECK_REF_FORMAT_ERROR;
- if (ch == '/')
- break;
- if (last == '.' && ch == '.')
- return CHECK_REF_FORMAT_ERROR;
- if (last == '@' && ch == '{')
- return CHECK_REF_FORMAT_ERROR;
- last = ch;
- }
- level++;
- if (!ch) {
- if (ref <= cp - 2 && cp[-2] == '.')
- return CHECK_REF_FORMAT_ERROR;
- if (level < 2)
- return CHECK_REF_FORMAT_ONELEVEL;
- if (has_extension(ref, ".lock"))
- return CHECK_REF_FORMAT_ERROR;
- return ret;
+ }
+ if (cp == ref)
+ return -1; /* Component has zero length. */
+ if (ref[0] == '.') {
+ if (!(flags & REFNAME_DOT_COMPONENT))
+ return -1; /* Component starts with '.'. */
+ /*
+ * Even if leading dots are allowed, don't allow "."
+ * as a component (".." is prevented by a rule above).
+ */
+ if (ref[1] == '\0')
+ return -1; /* Component equals ".". */
+ }
+ if (cp - ref >= 5 && !memcmp(cp - 5, ".lock", 5))
+ return -1; /* Refname ends with ".lock". */
+ return cp - ref;
+}
+
+int check_refname_format(const char *ref, int flags)
+{
+ int component_len, component_count = 0;
+
+ while (1) {
+ /* We are at the start of a path component. */
+ component_len = check_refname_component(ref, flags);
+ if (component_len < 0) {
+ if ((flags & REFNAME_REFSPEC_PATTERN) &&
+ ref[0] == '*' &&
+ (ref[1] == '\0' || ref[1] == '/')) {
+ /* Accept one wildcard as a full refname component. */
+ flags &= ~REFNAME_REFSPEC_PATTERN;
+ component_len = 1;
+ } else {
+ return -1;
+ }
}
+ component_count++;
+ if (ref[component_len] == '\0')
+ break;
+ /* Skip to next component. */
+ ref += component_len + 1;
}
+
+ if (ref[component_len - 1] == '.')
+ return -1; /* Refname ends with '.'. */
+ if (!(flags & REFNAME_ALLOW_ONELEVEL) && component_count < 2)
+ return -1; /* Refname has only one component. */
+ return 0;
}
const char *prettify_refname(const char *name)
}
static int is_refname_available(const char *ref, const char *oldref,
- struct ref_list *list, int quiet)
-{
- int namlen = strlen(ref); /* e.g. 'foo/bar' */
- while (list) {
- /* list->name could be 'foo' or 'foo/bar/baz' */
- if (!oldref || strcmp(oldref, list->name)) {
- int len = strlen(list->name);
+ struct ref_array *array, int quiet)
+{
+ int i, namlen = strlen(ref); /* e.g. 'foo/bar' */
+ for (i = 0; i < array->nr; i++ ) {
+ struct ref_entry *entry = array->refs[i];
+ /* entry->name could be 'foo' or 'foo/bar/baz' */
+ if (!oldref || strcmp(oldref, entry->name)) {
+ int len = strlen(entry->name);
int cmplen = (namlen < len) ? namlen : len;
- const char *lead = (namlen < len) ? list->name : ref;
- if (!strncmp(ref, list->name, cmplen) &&
+ const char *lead = (namlen < len) ? entry->name : ref;
+ if (!strncmp(ref, entry->name, cmplen) &&
lead[cmplen] == '/') {
if (!quiet)
error("'%s' exists; cannot create '%s'",
- list->name, ref);
+ entry->name, ref);
return 0;
}
}
- list = list->next;
}
return 1;
}
struct ref_lock *lock_ref_sha1(const char *ref, const unsigned char *old_sha1)
{
char refpath[PATH_MAX];
- if (check_ref_format(ref))
+ if (check_refname_format(ref, 0))
return NULL;
strcpy(refpath, mkpath("refs/%s", ref));
return lock_ref_sha1_basic(refpath, old_sha1, 0, NULL);
struct ref_lock *lock_any_ref_for_update(const char *ref, const unsigned char *old_sha1, int flags)
{
- switch (check_ref_format(ref)) {
- default:
+ if (check_refname_format(ref, REFNAME_ALLOW_ONELEVEL))
return NULL;
- case 0:
- case CHECK_REF_FORMAT_ONELEVEL:
- return lock_ref_sha1_basic(ref, old_sha1, flags, NULL);
- }
+ return lock_ref_sha1_basic(ref, old_sha1, flags, NULL);
}
static struct lock_file packlock;
static int repack_without_ref(const char *refname)
{
- struct ref_list *list, *packed_ref_list;
- int fd;
- int found = 0;
+ struct ref_array *packed;
+ struct ref_entry *ref;
+ int fd, i;
- packed_ref_list = get_packed_refs(NULL);
- for (list = packed_ref_list; list; list = list->next) {
- if (!strcmp(refname, list->name)) {
- found = 1;
- break;
- }
- }
- if (!found)
+ packed = get_packed_refs(NULL);
+ ref = search_ref_array(packed, refname);
+ if (ref == NULL)
return 0;
fd = hold_lock_file_for_update(&packlock, git_path("packed-refs"), 0);
if (fd < 0) {
return error("cannot delete '%s' from packed refs", refname);
}
- for (list = packed_ref_list; list; list = list->next) {
+ for (i = 0; i < packed->nr; i++) {
char line[PATH_MAX + 100];
int len;
- if (!strcmp(refname, list->name))
+ ref = packed->refs[i];
+
+ if (!strcmp(refname, ref->name))
continue;
len = snprintf(line, sizeof(line), "%s %s\n",
- sha1_to_hex(list->sha1), list->name);
+ sha1_to_hex(ref->sha1), ref->name);
/* this should not happen but just being defensive */
if (len > sizeof(line))
- die("too long a refname '%s'", list->name);
+ die("too long a refname '%s'", ref->name);
write_or_die(fd, line, len);
}
return commit_lock_file(&packlock);
return 0;
}
-int ref_exists(char *refname)
+int ref_exists(const char *refname)
{
unsigned char sha1[20];
return !!resolve_ref(refname, sha1, 1, NULL);
*/
extern void add_extra_ref(const char *refname, const unsigned char *sha1, int flags);
extern void clear_extra_refs(void);
-extern int ref_exists(char *);
+extern int ref_exists(const char *);
extern int peel_ref(const char *, unsigned char *);
*/
extern int for_each_reflog(each_ref_fn, void *);
-#define CHECK_REF_FORMAT_OK 0
-#define CHECK_REF_FORMAT_ERROR (-1)
-#define CHECK_REF_FORMAT_ONELEVEL (-2)
-#define CHECK_REF_FORMAT_WILDCARD (-3)
-extern int check_ref_format(const char *target);
+#define REFNAME_ALLOW_ONELEVEL 1
+#define REFNAME_REFSPEC_PATTERN 2
+#define REFNAME_DOT_COMPONENT 4
+
+/*
+ * Return 0 iff ref has the correct format for a refname according to
+ * the rules described in Documentation/git-check-ref-format.txt. If
+ * REFNAME_ALLOW_ONELEVEL is set in flags, then accept one-level
+ * reference names. If REFNAME_REFSPEC_PATTERN is set in flags, then
+ * allow a "*" wildcard character in place of one of the name
+ * components. No leading or repeated slashes are accepted. If
+ * REFNAME_DOT_COMPONENT is set in flags, then allow refname
+ * components to start with "." (but not a whole component equal to
+ * "." or "..").
+ */
+extern int check_refname_format(const char *ref, int flags);
extern const char *prettify_refname(const char *refname);
extern char *shorten_unambiguous_ref(const char *ref, int strict);
close(client.in);
client.in = -1;
- strbuf_read(&rpc->result, client.out, 0);
+ if (!err) {
+ strbuf_read(&rpc->result, client.out, 0);
+ } else {
+ char buf[4096];
+ for (;;)
+ if (xread(client.out, buf, sizeof(buf)) <= 0)
+ break;
+ }
close(client.out);
client.out = -1;
alias_all_urls();
}
-/*
- * We need to make sure the remote-tracking branches are well formed, but a
- * wildcard refspec in "struct refspec" must have a trailing slash. We
- * temporarily drop the trailing '/' while calling check_ref_format(),
- * and put it back. The caller knows that a CHECK_REF_FORMAT_ONELEVEL
- * error return is Ok for a wildcard refspec.
- */
-static int verify_refname(char *name, int is_glob)
-{
- int result;
-
- result = check_ref_format(name);
- if (is_glob && result == CHECK_REF_FORMAT_WILDCARD)
- result = CHECK_REF_FORMAT_OK;
- return result;
-}
-
/*
* This function frees a refspec array.
* Warning: code paths should be checked to ensure that the src
static struct refspec *parse_refspec_internal(int nr_refspec, const char **refspec, int fetch, int verify)
{
int i;
- int st;
struct refspec *rs = xcalloc(sizeof(*rs), nr_refspec);
for (i = 0; i < nr_refspec; i++) {
size_t llen;
int is_glob;
const char *lhs, *rhs;
+ int flags;
is_glob = 0;
rs[i].pattern = is_glob;
rs[i].src = xstrndup(lhs, llen);
+ flags = REFNAME_ALLOW_ONELEVEL | (is_glob ? REFNAME_REFSPEC_PATTERN : 0);
if (fetch) {
/*
*/
if (!*rs[i].src)
; /* empty is ok */
- else {
- st = verify_refname(rs[i].src, is_glob);
- if (st && st != CHECK_REF_FORMAT_ONELEVEL)
- goto invalid;
- }
+ else if (check_refname_format(rs[i].src, flags))
+ goto invalid;
/*
* RHS
* - missing is ok, and is same as empty.
* - empty is ok; it means not to store.
* - otherwise it must be a valid looking ref.
*/
- if (!rs[i].dst) {
+ if (!rs[i].dst)
; /* ok */
- } else if (!*rs[i].dst) {
+ else if (!*rs[i].dst)
; /* ok */
- } else {
- st = verify_refname(rs[i].dst, is_glob);
- if (st && st != CHECK_REF_FORMAT_ONELEVEL)
- goto invalid;
- }
+ else if (check_refname_format(rs[i].dst, flags))
+ goto invalid;
} else {
/*
* LHS
if (!*rs[i].src)
; /* empty is ok */
else if (is_glob) {
- st = verify_refname(rs[i].src, is_glob);
- if (st && st != CHECK_REF_FORMAT_ONELEVEL)
+ if (check_refname_format(rs[i].src, flags))
goto invalid;
}
else
* - otherwise it must be a valid looking ref.
*/
if (!rs[i].dst) {
- st = verify_refname(rs[i].src, is_glob);
- if (st && st != CHECK_REF_FORMAT_ONELEVEL)
+ if (check_refname_format(rs[i].src, flags))
goto invalid;
} else if (!*rs[i].dst) {
goto invalid;
} else {
- st = verify_refname(rs[i].dst, is_glob);
- if (st && st != CHECK_REF_FORMAT_ONELEVEL)
+ if (check_refname_format(rs[i].dst, flags))
goto invalid;
}
}
refspec->dst, &ret))
return ret;
} else if (!strcmp(refspec->src, name))
- return strdup(refspec->dst);
+ return xstrdup(refspec->dst);
}
return NULL;
}
for (rmp = &ref_map; *rmp; ) {
if ((*rmp)->peer_ref) {
- int st = check_ref_format((*rmp)->peer_ref->name + 5);
- if (st && st != CHECK_REF_FORMAT_ONELEVEL) {
+ if (check_refname_format((*rmp)->peer_ref->name + 5,
+ REFNAME_ALLOW_ONELEVEL)) {
struct ref *ignore = *rmp;
error("* Ignoring funny ref '%s' locally",
(*rmp)->peer_ref->name);
int len;
/* we already know it starts with refs/ to get here */
- if (check_ref_format(refname + 5))
+ if (check_refname_format(refname + 5, 0))
return 0;
len = strlen(refname) + 1;
return n;
}
+static int show_path_component_truncated(FILE *out, const char *name, int len)
+{
+ int cnt;
+ for (cnt = 0; cnt < len; cnt++) {
+ int ch = name[cnt];
+ if (!ch || ch == '\n')
+ return -1;
+ fputc(ch, out);
+ }
+ return len;
+}
+
+static int show_path_truncated(FILE *out, const struct name_path *path)
+{
+ int emitted, ours;
+
+ if (!path)
+ return 0;
+ emitted = show_path_truncated(out, path->up);
+ if (emitted < 0)
+ return emitted;
+ if (emitted)
+ fputc('/', out);
+ ours = show_path_component_truncated(out, path->elem, path->elem_len);
+ if (ours < 0)
+ return ours;
+ return ours || emitted;
+}
+
+void show_object_with_name(FILE *out, struct object *obj, const struct name_path *path, const char *component)
+{
+ struct name_path leaf;
+ leaf.up = (struct name_path *)path;
+ leaf.elem = component;
+ leaf.elem_len = strlen(component);
+
+ fprintf(out, "%s ", sha1_to_hex(obj->sha1));
+ show_path_truncated(out, &leaf);
+ fputc('\n', out);
+}
+
void add_object(struct object *obj,
struct object_array *p,
struct name_path *path,
return object;
}
+void add_pending_sha1(struct rev_info *revs, const char *name,
+ const unsigned char *sha1, unsigned int flags)
+{
+ struct object *object = get_reference(revs, name, sha1, flags);
+ add_pending_object(revs, object, name);
+}
+
static struct commit *handle_commit(struct rev_info *revs, struct object *object, const char *name)
{
unsigned long flags = object->flags;
* to filter the result of "A..B" further to the ones that can actually
* reach A.
*/
-static struct commit_list *collect_bottom_commits(struct commit_list *list)
+static struct commit_list *collect_bottom_commits(struct rev_info *revs)
{
- struct commit_list *elem, *bottom = NULL;
- for (elem = list; elem; elem = elem->next)
- if (elem->item->object.flags & UNINTERESTING)
- commit_list_insert(elem->item, &bottom);
+ struct commit_list *bottom = NULL;
+ int i;
+ for (i = 0; i < revs->cmdline.nr; i++) {
+ struct rev_cmdline_entry *elem = &revs->cmdline.rev[i];
+ if ((elem->flags & UNINTERESTING) &&
+ elem->item->type == OBJ_COMMIT)
+ commit_list_insert((struct commit *)elem->item, &bottom);
+ }
return bottom;
}
struct commit_list *bottom = NULL;
if (revs->ancestry_path) {
- bottom = collect_bottom_commits(list);
+ bottom = collect_bottom_commits(revs);
if (!bottom)
die("--ancestry-path given but there are no bottom commits");
}
return 0;
}
+static void add_rev_cmdline(struct rev_info *revs,
+ struct object *item,
+ const char *name,
+ int whence,
+ unsigned flags)
+{
+ struct rev_cmdline_info *info = &revs->cmdline;
+ int nr = info->nr;
+
+ ALLOC_GROW(info->rev, nr + 1, info->alloc);
+ info->rev[nr].item = item;
+ info->rev[nr].name = name;
+ info->rev[nr].whence = whence;
+ info->rev[nr].flags = flags;
+ info->nr++;
+}
+
struct all_refs_cb {
int all_flags;
int warned_bad_reflog;
struct all_refs_cb *cb = cb_data;
struct object *object = get_reference(cb->all_revs, path, sha1,
cb->all_flags);
- add_pending_object(cb->all_revs, object, path);
+ add_rev_cmdline(cb->all_revs, object, path, REV_CMD_REF, cb->all_flags);
+ add_pending_sha1(cb->all_revs, path, sha1, cb->all_flags);
return 0;
}
struct object *o = parse_object(sha1);
if (o) {
o->flags |= cb->all_flags;
+ /* ??? CMDLINEFLAGS ??? */
add_pending_object(cb->all_revs, o, "");
}
else if (!cb->warned_bad_reflog) {
for_each_reflog(handle_one_reflog, &cb);
}
-static int add_parents_only(struct rev_info *revs, const char *arg, int flags)
+static int add_parents_only(struct rev_info *revs, const char *arg_, int flags)
{
unsigned char sha1[20];
struct object *it;
struct commit *commit;
struct commit_list *parents;
+ const char *arg = arg_;
if (*arg == '^') {
flags ^= UNINTERESTING;
for (parents = commit->parents; parents; parents = parents->next) {
it = &parents->item->object;
it->flags |= flags;
+ add_rev_cmdline(revs, it, arg_, REV_CMD_PARENTS_ONLY, flags);
add_pending_object(revs, it, arg);
}
return 1;
const char **prune = NULL;
int i, prune_num = 1; /* counting terminating NULL */
- if (get_sha1("HEAD", sha1) || !(head = lookup_commit(sha1)))
+ if (get_sha1("HEAD", sha1))
die("--merge without HEAD?");
- if (get_sha1("MERGE_HEAD", sha1) || !(other = lookup_commit(sha1)))
+ head = lookup_commit_or_die(sha1, "HEAD");
+ if (get_sha1("MERGE_HEAD", sha1))
die("--merge without MERGE_HEAD?");
+ other = lookup_commit_or_die(sha1, "MERGE_HEAD");
add_pending_object(revs, &head->object, "HEAD");
add_pending_object(revs, &other->object, "MERGE_HEAD");
bases = get_merge_bases(head, other, 1);
revs->limited = 1;
}
-int handle_revision_arg(const char *arg, struct rev_info *revs,
+int handle_revision_arg(const char *arg_, struct rev_info *revs,
int flags,
int cant_be_filename)
{
struct object *object;
unsigned char sha1[20];
int local_flags;
+ const char *arg = arg_;
dotdot = strstr(arg, "..");
if (dotdot) {
const char *this = arg;
int symmetric = *next == '.';
unsigned int flags_exclude = flags ^ UNINTERESTING;
+ unsigned int a_flags;
*dotdot = 0;
next += symmetric;
add_pending_commit_list(revs, exclude,
flags_exclude);
free_commit_list(exclude);
- a->object.flags |= flags | SYMMETRIC_LEFT;
+ a_flags = flags | SYMMETRIC_LEFT;
} else
- a->object.flags |= flags_exclude;
+ a_flags = flags_exclude;
+ a->object.flags |= a_flags;
b->object.flags |= flags;
+ add_rev_cmdline(revs, &a->object, this,
+ REV_CMD_LEFT, a_flags);
+ add_rev_cmdline(revs, &b->object, next,
+ REV_CMD_RIGHT, flags);
add_pending_object(revs, &a->object, this);
add_pending_object(revs, &b->object, next);
return 0;
if (!cant_be_filename)
verify_non_filename(revs->prefix, arg);
object = get_reference(revs, arg, sha1, flags ^ local_flags);
+ add_rev_cmdline(revs, object, arg_, REV_CMD_REV, flags ^ local_flags);
add_pending_object_with_mode(revs, object, arg, mode);
return 0;
}
revs->tree_objects = 1;
revs->blob_objects = 1;
revs->edge_hint = 1;
+ } else if (!strcmp(arg, "--verify-objects")) {
+ revs->tag_objects = 1;
+ revs->tree_objects = 1;
+ revs->blob_objects = 1;
+ revs->verify_objects = 1;
} else if (!strcmp(arg, "--unpacked")) {
revs->unpacked = 1;
} else if (!prefixcmp(arg, "--unpacked=")) {
}
e++;
}
- free(list);
+ if (!revs->leak_pending)
+ free(list);
if (revs->no_walk)
return 0;
struct log_info;
struct string_list;
+struct rev_cmdline_info {
+ unsigned int nr;
+ unsigned int alloc;
+ struct rev_cmdline_entry {
+ struct object *item;
+ const char *name;
+ enum {
+ REV_CMD_REF,
+ REV_CMD_PARENTS_ONLY,
+ REV_CMD_LEFT,
+ REV_CMD_RIGHT,
+ REV_CMD_REV
+ } whence;
+ unsigned flags;
+ } *rev;
+};
+
struct rev_info {
/* Starting list */
struct commit_list *commits;
/* Parents of shown commits */
struct object_array boundary_commits;
+ /* The end-points specified by the end user */
+ struct rev_cmdline_info cmdline;
+
/* Basic information */
const char *prefix;
const char *def;
tag_objects:1,
tree_objects:1,
blob_objects:1,
+ verify_objects:1,
edge_hint:1,
limited:1,
unpacked:1,
date_mode_explicit:1,
preserve_subject:1;
unsigned int disable_stdin:1;
+ unsigned int leak_pending:1;
enum date_mode date_mode;
char *path_name(const struct name_path *path, const char *name);
+extern void show_object_with_name(FILE *, struct object *, const struct name_path *, const char *);
+
extern void add_object(struct object *obj,
struct object_array *p,
struct name_path *path,
const char *name);
extern void add_pending_object(struct rev_info *revs, struct object *obj, const char *name);
+extern void add_pending_sha1(struct rev_info *revs, const char *name, const unsigned char *sha1, unsigned int flags);
extern void add_head_to_pending(struct rev_info *);
#include "cache.h"
#include "run-command.h"
#include "exec_cmd.h"
+#include "argv-array.h"
static inline void close_pair(int fd[2])
{
int run_hook(const char *index_file, const char *name, ...)
{
struct child_process hook;
- const char **argv = NULL, *env[2];
+ struct argv_array argv = ARGV_ARRAY_INIT;
+ const char *p, *env[2];
char index[PATH_MAX];
va_list args;
int ret;
- size_t i = 0, alloc = 0;
if (access(git_path("hooks/%s", name), X_OK) < 0)
return 0;
va_start(args, name);
- ALLOC_GROW(argv, i + 1, alloc);
- argv[i++] = git_path("hooks/%s", name);
- while (argv[i-1]) {
- ALLOC_GROW(argv, i + 1, alloc);
- argv[i++] = va_arg(args, const char *);
- }
+ argv_array_push(&argv, git_path("hooks/%s", name));
+ while ((p = va_arg(args, const char *)))
+ argv_array_push(&argv, p);
va_end(args);
memset(&hook, 0, sizeof(hook));
- hook.argv = argv;
+ hook.argv = argv.argv;
hook.no_stdin = 1;
hook.stdout_to_stderr = 1;
if (index_file) {
}
ret = run_command(&hook);
- free(argv);
+ argv_array_clear(&argv);
return ret;
}
--- /dev/null
+#include "cache.h"
+#include "sequencer.h"
+#include "strbuf.h"
+#include "dir.h"
+
+void remove_sequencer_state(int aggressive)
+{
+ struct strbuf seq_dir = STRBUF_INIT;
+ struct strbuf seq_old_dir = STRBUF_INIT;
+
+ strbuf_addf(&seq_dir, "%s", git_path(SEQ_DIR));
+ strbuf_addf(&seq_old_dir, "%s", git_path(SEQ_OLD_DIR));
+ remove_dir_recursively(&seq_old_dir, 0);
+ rename(git_path(SEQ_DIR), git_path(SEQ_OLD_DIR));
+ if (aggressive)
+ remove_dir_recursively(&seq_old_dir, 0);
+ strbuf_release(&seq_dir);
+ strbuf_release(&seq_old_dir);
+}
--- /dev/null
+#ifndef SEQUENCER_H
+#define SEQUENCER_H
+
+#define SEQ_DIR "sequencer"
+#define SEQ_OLD_DIR "sequencer-old"
+#define SEQ_HEAD_FILE "sequencer/head"
+#define SEQ_TODO_FILE "sequencer/todo"
+#define SEQ_OPTS_FILE "sequencer/opts"
+
+/*
+ * Removes SEQ_OLD_DIR and renames SEQ_DIR to SEQ_OLD_DIR, ignoring
+ * any errors. Intended to be used by 'git reset'.
+ *
+ * With the aggressive flag, it additionally removes SEQ_OLD_DIR,
+ * ignoring any errors. Inteded to be used by the sequencer's
+ * '--reset' subcommand.
+ */
+void remove_sequencer_state(int aggressive);
+
+#endif
return pathspec;
}
-const char *pathspec_prefix(const char *prefix, const char **pathspec)
-{
- const char **p, *n, *prev;
- unsigned long max;
-
- if (!pathspec)
- return prefix ? xmemdupz(prefix, strlen(prefix)) : NULL;
-
- prev = NULL;
- max = PATH_MAX;
- for (p = pathspec; (n = *p) != NULL; p++) {
- int i, len = 0;
- for (i = 0; i < max; i++) {
- char c = n[i];
- if (prev && prev[i] != c)
- break;
- if (!c || c == '*' || c == '?')
- break;
- if (c == '/')
- len = i+1;
- }
- prev = n;
- if (len < max) {
- max = len;
- if (!max)
- break;
- }
- }
-
- return max ? xmemdupz(prev, max) : NULL;
-}
-
/*
* Test if it looks like we're at a git directory.
* We want to see:
{
return setup_git_directory_gently(NULL);
}
+
+const char *resolve_gitdir(const char *suspect)
+{
+ if (is_git_directory(suspect))
+ return suspect;
+ return read_gitfile(suspect);
+}
const char *objdir = get_object_directory();
struct alternate_object_database *ent;
struct alternate_object_database *alt;
- /* 43 = 40-byte + 2 '/' + terminating NUL */
- int pfxlen = len;
- int entlen = pfxlen + 43;
- int base_len = -1;
+ int pfxlen, entlen;
+ struct strbuf pathbuf = STRBUF_INIT;
if (!is_absolute_path(entry) && relative_base) {
- /* Relative alt-odb */
- if (base_len < 0)
- base_len = strlen(relative_base) + 1;
- entlen += base_len;
- pfxlen += base_len;
+ strbuf_addstr(&pathbuf, real_path(relative_base));
+ strbuf_addch(&pathbuf, '/');
}
- ent = xmalloc(sizeof(*ent) + entlen);
+ strbuf_add(&pathbuf, entry, len);
- if (!is_absolute_path(entry) && relative_base) {
- memcpy(ent->base, relative_base, base_len - 1);
- ent->base[base_len - 1] = '/';
- memcpy(ent->base + base_len, entry, len);
- }
- else
- memcpy(ent->base, entry, pfxlen);
+ normalize_path_copy(pathbuf.buf, pathbuf.buf);
+
+ pfxlen = strlen(pathbuf.buf);
+
+ /*
+ * The trailing slash after the directory name is given by
+ * this function at the end. Remove duplicates.
+ */
+ while (pfxlen && pathbuf.buf[pfxlen-1] == '/')
+ pfxlen -= 1;
+
+ entlen = pfxlen + 43; /* '/' + 2 hex + '/' + 38 hex + NUL */
+ ent = xmalloc(sizeof(*ent) + entlen);
+ memcpy(ent->base, pathbuf.buf, pfxlen);
+ strbuf_release(&pathbuf);
ent->name = ent->base + pfxlen + 1;
ent->base[pfxlen + 3] = '/';
{
if (name && !namelen)
namelen = strlen(name);
- if (!o) {
- unsigned char sha1[20];
- if (get_sha1_1(name, namelen, sha1))
- return NULL;
- o = parse_object(sha1);
- }
while (1) {
if (!o || (!o->parsed && !parse_object(o->sha1)))
return NULL;
{
strbuf_branchname(sb, name);
if (name[0] == '-')
- return CHECK_REF_FORMAT_ERROR;
+ return -1;
strbuf_splice(sb, 0, 0, "refs/heads/", 11);
- return check_ref_format(sb->buf);
+ return check_refname_format(sb->buf, 0);
}
/*
unsigned char sha1[20];
uint32_t crc;
uint32_t off;
- } *entries = malloc(nr * sizeof(entries[0]));
+ } *entries = xmalloc(nr * sizeof(entries[0]));
for (i = 0; i < nr; i++)
if (fread(entries[i].sha1, 20, 1, stdin) != 1)
die("unable to read sha1 %u/%u", i, nr);
#include "diffcore.h"
#include "refs.h"
#include "string-list.h"
+#include "sha1-array.h"
+#include "argv-array.h"
static struct string_list config_name_for_path;
static struct string_list config_fetch_recurse_submodules_for_name;
static struct string_list config_ignore_for_name;
static int config_fetch_recurse_submodules = RECURSE_SUBMODULES_ON_DEMAND;
static struct string_list changed_submodule_paths;
+static int initialized_fetch_ref_tips;
+static struct sha1_array ref_tips_before_fetch;
+static struct sha1_array ref_tips_after_fetch;
+
/*
* The following flag is set if the .gitmodules file is unmerged. We then
* disable recursion for all submodules where .git/config doesn't have a
}
}
+static int add_sha1_to_array(const char *ref, const unsigned char *sha1,
+ int flags, void *data)
+{
+ sha1_array_append(data, sha1);
+ return 0;
+}
+
void check_for_new_submodule_commits(unsigned char new_sha1[20])
+{
+ if (!initialized_fetch_ref_tips) {
+ for_each_ref(add_sha1_to_array, &ref_tips_before_fetch);
+ initialized_fetch_ref_tips = 1;
+ }
+
+ sha1_array_append(&ref_tips_after_fetch, new_sha1);
+}
+
+static void add_sha1_to_argv(const unsigned char sha1[20], void *data)
+{
+ argv_array_push(data, sha1_to_hex(sha1));
+}
+
+static void calculate_changed_submodule_paths(void)
{
struct rev_info rev;
struct commit *commit;
- const char *argv[] = {NULL, NULL, "--not", "--all", NULL};
- int argc = ARRAY_SIZE(argv) - 1;
+ struct argv_array argv = ARGV_ARRAY_INIT;
/* No need to check if there are no submodules configured */
if (!config_name_for_path.nr)
return;
init_revisions(&rev, NULL);
- argv[1] = xstrdup(sha1_to_hex(new_sha1));
- setup_revisions(argc, argv, &rev, NULL);
+ argv_array_push(&argv, "--"); /* argv[0] program name */
+ sha1_array_for_each_unique(&ref_tips_after_fetch,
+ add_sha1_to_argv, &argv);
+ argv_array_push(&argv, "--not");
+ sha1_array_for_each_unique(&ref_tips_before_fetch,
+ add_sha1_to_argv, &argv);
+ setup_revisions(argv.argc, argv.argv, &rev, NULL);
if (prepare_revision_walk(&rev))
die("revision walk setup failed");
parent = parent->next;
}
}
- free((char *)argv[1]);
+
+ argv_array_clear(&argv);
+ sha1_array_clear(&ref_tips_before_fetch);
+ sha1_array_clear(&ref_tips_after_fetch);
+ initialized_fetch_ref_tips = 0;
}
int fetch_populated_submodules(int num_options, const char **options,
cp.git_cmd = 1;
cp.no_stdin = 1;
+ calculate_changed_submodule_paths();
+
for (i = 0; i < active_nr; i++) {
struct strbuf submodule_path = STRBUF_INIT;
struct strbuf submodule_git_dir = STRBUF_INIT;
--- /dev/null
+#!/bin/sh
+
+gpg_version=`gpg --version 2>&1`
+if test $? = 127; then
+ say "You do not seem to have gpg installed"
+else
+ # As said here: http://www.gnupg.org/documentation/faqs.html#q6.19
+ # the gpg version 1.0.6 didn't parse trust packets correctly, so for
+ # that version, creation of signed tags using the generated key fails.
+ case "$gpg_version" in
+ 'gpg (GnuPG) 1.0.6'*)
+ say "Your version of gpg (1.0.6) is too buggy for testing"
+ ;;
+ *)
+ # key generation info: gpg --homedir t/lib-gpg --gen-key
+ # Type DSA and Elgamal, size 2048 bits, no expiration date.
+ # Name and email: C O Mitter <committer@example.com>
+ # No password given, to enable non-interactive operation.
+ cp -R "$TEST_DIRECTORY"/lib-gpg ./gpghome
+ chmod 0700 gpghome
+ GNUPGHOME="$(pwd)/gpghome"
+ export GNUPGHOME
+ test_set_prereq GPG
+ ;;
+ esac
+fi
+
+sanitize_pgp() {
+ perl -ne '
+ /^-----END PGP/ and $in_pgp = 0;
+ print unless $in_pgp;
+ /^-----BEGIN PGP/ and $in_pgp = 1;
+ '
+}
. ./test-lib.sh
attr_check () {
+ path="$1" expect="$2"
- path="$1"
- expect="$2"
-
- git check-attr test -- "$path" >actual 2>err &&
+ git $3 check-attr test -- "$path" >actual 2>err &&
echo "$path: test: $2" >expect &&
test_cmp expect actual &&
test_line_count = 0 err
-
}
test_expect_success 'setup' '
-
mkdir -p a/b/d a/c b &&
(
echo "[attr]notest !test"
echo "onoff test -test"
echo "offon -test test"
echo "no notest"
+ echo "A/e/F test=A/e/F"
) >.gitattributes &&
(
echo "g test=a/g" &&
(
echo "global test=global"
) >"$HOME"/global-gitattributes &&
- cat <<EOF >expect-all
-f: test: f
-a/f: test: f
-a/c/f: test: f
-a/g: test: a/g
-a/b/g: test: a/b/g
-b/g: test: unspecified
-a/b/h: test: a/b/h
-a/b/d/g: test: a/b/d/*
-onoff: test: unset
-offon: test: set
-no: notest: set
-no: test: unspecified
-a/b/d/no: notest: set
-a/b/d/no: test: a/b/d/*
-a/b/d/yes: notest: set
-a/b/d/yes: test: unspecified
-EOF
-
+ cat <<-EOF >expect-all
+ f: test: f
+ a/f: test: f
+ a/c/f: test: f
+ a/g: test: a/g
+ a/b/g: test: a/b/g
+ b/g: test: unspecified
+ a/b/h: test: a/b/h
+ a/b/d/g: test: a/b/d/*
+ onoff: test: unset
+ offon: test: set
+ no: notest: set
+ no: test: unspecified
+ a/b/d/no: notest: set
+ a/b/d/no: test: a/b/d/*
+ a/b/d/yes: notest: set
+ a/b/d/yes: test: unspecified
+ EOF
'
test_expect_success 'command line checks' '
-
test_must_fail git check-attr &&
test_must_fail git check-attr -- &&
test_must_fail git check-attr test &&
echo "f" | test_must_fail git check-attr --stdin -- f &&
echo "f" | test_must_fail git check-attr --stdin test -- f &&
test_must_fail git check-attr "" -- f
-
'
test_expect_success 'attribute test' '
-
attr_check f f &&
attr_check a/f f &&
attr_check a/c/f f &&
attr_check no unspecified &&
attr_check a/b/d/no "a/b/d/*" &&
attr_check a/b/d/yes unspecified
+'
+
+test_expect_success 'attribute matching is case sensitive when core.ignorecase=0' '
+
+ test_must_fail attr_check F f "-c core.ignorecase=0" &&
+ test_must_fail attr_check a/F f "-c core.ignorecase=0" &&
+ test_must_fail attr_check a/c/F f "-c core.ignorecase=0" &&
+ test_must_fail attr_check a/G a/g "-c core.ignorecase=0" &&
+ test_must_fail attr_check a/B/g a/b/g "-c core.ignorecase=0" &&
+ test_must_fail attr_check a/b/G a/b/g "-c core.ignorecase=0" &&
+ test_must_fail attr_check a/b/H a/b/h "-c core.ignorecase=0" &&
+ test_must_fail attr_check a/b/D/g "a/b/d/*" "-c core.ignorecase=0" &&
+ test_must_fail attr_check oNoFf unset "-c core.ignorecase=0" &&
+ test_must_fail attr_check oFfOn set "-c core.ignorecase=0" &&
+ attr_check NO unspecified "-c core.ignorecase=0" &&
+ test_must_fail attr_check a/b/D/NO "a/b/d/*" "-c core.ignorecase=0" &&
+ attr_check a/b/d/YES a/b/d/* "-c core.ignorecase=0" &&
+ test_must_fail attr_check a/E/f "A/e/F" "-c core.ignorecase=0"
'
-test_expect_success 'unnormalized paths' '
+test_expect_success 'attribute matching is case insensitive when core.ignorecase=1' '
+
+ attr_check F f "-c core.ignorecase=1" &&
+ attr_check a/F f "-c core.ignorecase=1" &&
+ attr_check a/c/F f "-c core.ignorecase=1" &&
+ attr_check a/G a/g "-c core.ignorecase=1" &&
+ attr_check a/B/g a/b/g "-c core.ignorecase=1" &&
+ attr_check a/b/G a/b/g "-c core.ignorecase=1" &&
+ attr_check a/b/H a/b/h "-c core.ignorecase=1" &&
+ attr_check a/b/D/g "a/b/d/*" "-c core.ignorecase=1" &&
+ attr_check oNoFf unset "-c core.ignorecase=1" &&
+ attr_check oFfOn set "-c core.ignorecase=1" &&
+ attr_check NO unspecified "-c core.ignorecase=1" &&
+ attr_check a/b/D/NO "a/b/d/*" "-c core.ignorecase=1" &&
+ attr_check a/b/d/YES unspecified "-c core.ignorecase=1" &&
+ attr_check a/E/f "A/e/F" "-c core.ignorecase=1"
+
+'
+
+test_expect_success 'check whether FS is case-insensitive' '
+ mkdir junk &&
+ echo good >junk/CamelCase &&
+ echo bad >junk/camelcase &&
+ if test "$(cat junk/CamelCase)" != good
+ then
+ test_set_prereq CASE_INSENSITIVE_FS
+ fi
+'
+
+test_expect_success CASE_INSENSITIVE_FS 'additional case insensitivity tests' '
+ test_must_fail attr_check a/B/D/g "a/b/d/*" "-c core.ignorecase=0" &&
+ test_must_fail attr_check A/B/D/NO "a/b/d/*" "-c core.ignorecase=0" &&
+ attr_check A/b/h a/b/h "-c core.ignorecase=1" &&
+ attr_check a/B/D/g "a/b/d/*" "-c core.ignorecase=1" &&
+ attr_check A/B/D/NO "a/b/d/*" "-c core.ignorecase=1"
+'
+test_expect_success 'unnormalized paths' '
attr_check ./f f &&
attr_check ./a/g a/g &&
attr_check a/./g a/g &&
attr_check a/c/../b/g a/b/g
-
'
test_expect_success 'relative paths' '
-
(cd a && attr_check ../f f) &&
(cd a && attr_check f f) &&
(cd a && attr_check i a/i) &&
(cd b && attr_check ../a/f f) &&
(cd b && attr_check ../a/g a/g) &&
(cd b && attr_check ../a/b/g a/b/g)
-
'
test_expect_success 'core.attributesfile' '
attr_check global global &&
git config core.attributesfile "~/global-gitattributes" &&
attr_check global global &&
- echo "global test=precedence" >> .gitattributes &&
+ echo "global test=precedence" >>.gitattributes &&
attr_check global precedence
'
test_expect_success 'attribute test: read paths from stdin' '
-
- grep -v notest < expect-all > expect &&
- sed -e "s/:.*//" < expect | git check-attr --stdin test > actual &&
+ grep -v notest <expect-all >expect &&
+ sed -e "s/:.*//" <expect | git check-attr --stdin test >actual &&
test_cmp expect actual
'
test_expect_success 'attribute test: --all option' '
+ grep -v unspecified <expect-all | sort >specified-all &&
+ sed -e "s/:.*//" <expect-all | uniq >stdin-all &&
+ git check-attr --stdin --all <stdin-all | sort >actual &&
+ test_cmp specified-all actual
+'
- grep -v unspecified < expect-all | sort > expect &&
- sed -e "s/:.*//" < expect-all | uniq |
- git check-attr --stdin --all | sort > actual &&
- test_cmp expect actual
+test_expect_success 'attribute test: --cached option' '
+ : >empty &&
+ git check-attr --cached --stdin --all <stdin-all | sort >actual &&
+ test_cmp empty actual &&
+ git add .gitattributes a/.gitattributes a/b/.gitattributes &&
+ git check-attr --cached --stdin --all <stdin-all | sort >actual &&
+ test_cmp specified-all actual
'
test_expect_success 'root subdir attribute test' '
-
attr_check a/i a/i &&
attr_check subdir/a/i unspecified
-
'
test_expect_success 'setup bare' '
-
git clone --bare . bare.git &&
cd bare.git
-
'
test_expect_success 'bare repository: check that .gitattribute is ignored' '
-
(
echo "f test=f"
echo "a/i test=a/i"
attr_check a/c/f unspecified &&
attr_check a/i unspecified &&
attr_check subdir/a/i unspecified
+'
+test_expect_success 'bare repository: check that --cached honors index' '
+ GIT_INDEX_FILE=../.git/index \
+ git check-attr --cached --stdin --all <../stdin-all |
+ sort >actual &&
+ test_cmp ../specified-all actual
'
test_expect_success 'bare repository: test info/attributes' '
-
(
echo "f test=f"
echo "a/i test=a/i"
attr_check a/c/f f &&
attr_check a/i a/i &&
attr_check subdir/a/i unspecified
-
'
test_done
check_parse 2008-02-14 bad
check_parse '2008-02-14 20:30:45' '2008-02-14 20:30:45 +0000'
check_parse '2008-02-14 20:30:45 -0500' '2008-02-14 20:30:45 -0500'
+check_parse '2008-02-14 20:30:45 -0015' '2008-02-14 20:30:45 -0015'
+check_parse '2008-02-14 20:30:45 -5' '2008-02-14 20:30:45 +0000'
+check_parse '2008-02-14 20:30:45 -5:' '2008-02-14 20:30:45 +0000'
+check_parse '2008-02-14 20:30:45 -05' '2008-02-14 20:30:45 -0500'
+check_parse '2008-02-14 20:30:45 -:30' '2008-02-14 20:30:45 +0000'
+check_parse '2008-02-14 20:30:45 -05:00' '2008-02-14 20:30:45 -0500'
check_parse '2008-02-14 20:30:45' '2008-02-14 20:30:45 -0500' EST5
check_approxidate() {
test_expect_success 'long options' '
test-parse-options --boolean --integer 1729 --boolean --string2=321 \
--verbose --verbose --no-dry-run --abbrev=10 --file fi.le\
- > output 2> output.err &&
+ --obsolete > output 2> output.err &&
test ! -s output.err &&
test_cmp expect output
'
test_cmp empty result
'
+test_expect_success 'print errors when failed to update worktree' '
+ echo sub >.git/info/sparse-checkout &&
+ git checkout -f init &&
+ mkdir sub &&
+ touch sub/added sub/addedtoo &&
+ test_must_fail git checkout top 2>actual &&
+ cat >expected <<\EOF &&
+error: The following untracked working tree files would be overwritten by checkout:
+ sub/added
+ sub/addedtoo
+Please move or remove them before you can switch branches.
+Aborting
+EOF
+ test_cmp expected actual
+'
+
test_done
'git config --get-regexp novalue > output &&
cmp output expect'
+echo 'novalue.variable true' > expect
+
+test_expect_success 'get-regexp --bool variable with no value' \
+ 'git config --bool --get-regexp novalue > output &&
+ cmp output expect'
+
echo 'emptyvalue.variable ' > expect
test_expect_success 'get-regexp variable with empty value' \
test_set_prereq SETFACL
fi
+if test -z "$LOGNAME"
+then
+ LOGNAME=$USER
+fi
+
check_perms_and_acl () {
test -r "$1" &&
getfacl "$1" > actual &&
. ./test-lib.sh
valid_ref() {
- test_expect_success "ref name '$1' is valid" \
- "git check-ref-format '$1'"
+ if test "$#" = 1
+ then
+ test_expect_success "ref name '$1' is valid" \
+ "git check-ref-format '$1'"
+ else
+ test_expect_success "ref name '$1' is valid with options $2" \
+ "git check-ref-format $2 '$1'"
+ fi
}
invalid_ref() {
- test_expect_success "ref name '$1' is not valid" \
- "test_must_fail git check-ref-format '$1'"
+ if test "$#" = 1
+ then
+ test_expect_success "ref name '$1' is invalid" \
+ "test_must_fail git check-ref-format '$1'"
+ else
+ test_expect_success "ref name '$1' is invalid with options $2" \
+ "test_must_fail git check-ref-format $2 '$1'"
+ fi
}
-valid_ref 'heads/foo'
-invalid_ref 'foo'
+invalid_ref ''
+invalid_ref '/'
+invalid_ref '/' --allow-onelevel
+invalid_ref '/' --normalize
+invalid_ref '/' '--allow-onelevel --normalize'
valid_ref 'foo/bar/baz'
-valid_ref 'refs///heads/foo'
+valid_ref 'foo/bar/baz' --normalize
+invalid_ref 'refs///heads/foo'
+valid_ref 'refs///heads/foo' --normalize
invalid_ref 'heads/foo/'
-valid_ref '/heads/foo'
-valid_ref '///heads/foo'
-invalid_ref '/foo'
+invalid_ref '/heads/foo'
+valid_ref '/heads/foo' --normalize
+invalid_ref '///heads/foo'
+valid_ref '///heads/foo' --normalize
invalid_ref './foo'
+invalid_ref './foo/bar'
+invalid_ref 'foo/./bar'
+invalid_ref 'foo/bar/.'
invalid_ref '.refs/foo'
invalid_ref 'heads/foo..bar'
invalid_ref 'heads/foo?bar'
valid_ref 'foo./bar'
invalid_ref 'heads/foo.lock'
+invalid_ref 'heads///foo.lock'
+invalid_ref 'foo.lock/bar'
+invalid_ref 'foo.lock///bar'
valid_ref 'heads/foo@bar'
invalid_ref 'heads/v@{ation'
invalid_ref 'heads/foo\bar'
invalid_ref "$(printf 'heads/foo\t')"
invalid_ref "$(printf 'heads/foo\177')"
valid_ref "$(printf 'heads/fu\303\237')"
+invalid_ref 'heads/*foo/bar' --refspec-pattern
+invalid_ref 'heads/foo*/bar' --refspec-pattern
+invalid_ref 'heads/f*o/bar' --refspec-pattern
+
+ref='foo'
+invalid_ref "$ref"
+valid_ref "$ref" --allow-onelevel
+invalid_ref "$ref" --refspec-pattern
+valid_ref "$ref" '--refspec-pattern --allow-onelevel'
+invalid_ref "$ref" --normalize
+valid_ref "$ref" '--allow-onelevel --normalize'
+
+ref='foo/bar'
+valid_ref "$ref"
+valid_ref "$ref" --allow-onelevel
+valid_ref "$ref" --refspec-pattern
+valid_ref "$ref" '--refspec-pattern --allow-onelevel'
+valid_ref "$ref" --normalize
+
+ref='foo/*'
+invalid_ref "$ref"
+invalid_ref "$ref" --allow-onelevel
+valid_ref "$ref" --refspec-pattern
+valid_ref "$ref" '--refspec-pattern --allow-onelevel'
+
+ref='*/foo'
+invalid_ref "$ref"
+invalid_ref "$ref" --allow-onelevel
+valid_ref "$ref" --refspec-pattern
+valid_ref "$ref" '--refspec-pattern --allow-onelevel'
+invalid_ref "$ref" --normalize
+valid_ref "$ref" '--refspec-pattern --normalize'
+
+ref='foo/*/bar'
+invalid_ref "$ref"
+invalid_ref "$ref" --allow-onelevel
+valid_ref "$ref" --refspec-pattern
+valid_ref "$ref" '--refspec-pattern --allow-onelevel'
+
+ref='*'
+invalid_ref "$ref"
+invalid_ref "$ref" --allow-onelevel
+invalid_ref "$ref" --refspec-pattern
+valid_ref "$ref" '--refspec-pattern --allow-onelevel'
+
+ref='foo/*/*'
+invalid_ref "$ref" --refspec-pattern
+invalid_ref "$ref" '--refspec-pattern --allow-onelevel'
+
+ref='*/foo/*'
+invalid_ref "$ref" --refspec-pattern
+invalid_ref "$ref" '--refspec-pattern --allow-onelevel'
+
+ref='*/*/foo'
+invalid_ref "$ref" --refspec-pattern
+invalid_ref "$ref" '--refspec-pattern --allow-onelevel'
+
+ref='/foo'
+invalid_ref "$ref"
+invalid_ref "$ref" --allow-onelevel
+invalid_ref "$ref" --refspec-pattern
+invalid_ref "$ref" '--refspec-pattern --allow-onelevel'
+invalid_ref "$ref" --normalize
+valid_ref "$ref" '--allow-onelevel --normalize'
+invalid_ref "$ref" '--refspec-pattern --normalize'
+valid_ref "$ref" '--refspec-pattern --allow-onelevel --normalize'
test_expect_success "check-ref-format --branch @{-1}" '
T=$(git write-tree) &&
valid_ref_normalized() {
test_expect_success "ref name '$1' simplifies to '$2'" "
- refname=\$(git check-ref-format --print '$1') &&
+ refname=\$(git check-ref-format --normalize '$1') &&
test \"\$refname\" = '$2'"
}
invalid_ref_normalized() {
- test_expect_success "check-ref-format --print rejects '$1'" "
- test_must_fail git check-ref-format --print '$1'"
+ test_expect_success "check-ref-format --normalize rejects '$1'" "
+ test_must_fail git check-ref-format --normalize '$1'"
}
valid_ref_normalized 'heads/foo' 'heads/foo'
invalid_ref_normalized 'heads/foo/../bar'
invalid_ref_normalized 'heads/./foo'
invalid_ref_normalized 'heads\foo'
+invalid_ref_normalized 'heads/foo.lock'
+invalid_ref_normalized 'heads///foo.lock'
+invalid_ref_normalized 'foo.lock/bar'
+invalid_ref_normalized 'foo.lock///bar'
test_done
}
ORPHAN_WARNING='you are leaving .* commit.*behind'
+PREV_HEAD_DESC='Previous HEAD position was'
check_orphan_warning() {
- test_i18ngrep "$ORPHAN_WARNING" "$1"
+ test_i18ngrep "$ORPHAN_WARNING" "$1" &&
+ test_i18ngrep ! "$PREV_HEAD_DESC" "$1"
}
check_no_orphan_warning() {
- test_i18ngrep ! "$ORPHAN_WARNING" "$1"
+ test_i18ngrep ! "$ORPHAN_WARNING" "$1" &&
+ test_i18ngrep "$PREV_HEAD_DESC" "$1"
}
reset () {
--- /dev/null
+#!/bin/sh
+
+test_description='checkout $tree -- $paths'
+. ./test-lib.sh
+
+test_expect_success setup '
+ mkdir dir &&
+ >dir/master &&
+ echo common >dir/common &&
+ git add dir/master dir/common &&
+ test_tick && git commit -m "master has dir/master" &&
+ git checkout -b next &&
+ git mv dir/master dir/next0 &&
+ echo next >dir/next1 &&
+ git add dir &&
+ test_tick && git commit -m "next has dir/next but not dir/master"
+'
+
+test_expect_success 'checking out paths out of a tree does not clobber unrelated paths' '
+ git checkout next &&
+ git reset --hard &&
+ rm dir/next0 &&
+ cat dir/common >expect.common &&
+ echo modified >expect.next1 &&
+ cat expect.next1 >dir/next1 &&
+ echo untracked >expect.next2 &&
+ cat expect.next2 >dir/next2 &&
+
+ git checkout master dir &&
+
+ test_cmp expect.common dir/common &&
+ test_path_is_file dir/master &&
+ git diff --exit-code master dir/master &&
+
+ test_path_is_missing dir/next0 &&
+ test_cmp expect.next1 dir/next1 &&
+ test_path_is_file dir/next2 &&
+ test_must_fail git ls-files --error-unmatch dir/next2 &&
+ test_cmp expect.next2 dir/next2
+'
+
+test_done
test_cmp expected3 output
'
+test_expect_success SYMLINKS 'ls-files --others with symlinked submodule' '
+ git init super &&
+ git init sub &&
+ (
+ cd sub &&
+ >a &&
+ git add a &&
+ git commit -m sub &&
+ git pack-refs --all
+ ) &&
+ (
+ cd super &&
+ "$TEST_DIRECTORY/../contrib/workdir/git-new-workdir" ../sub sub
+ git ls-files --others --exclude-standard >../actual
+ ) &&
+ echo sub/ >expect &&
+ test_cmp expect actual
+'
+
test_done
test_expect_success \
'git branch --help should not have created a bogus branch' '
git branch --help </dev/null >/dev/null 2>/dev/null;
- ! test -f .git/refs/heads/--help
+ test_path_is_missing .git/refs/heads/--help
'
test_expect_success 'branch -h in broken repository' '
test_expect_success \
'git branch abc should create a branch' \
- 'git branch abc && test -f .git/refs/heads/abc'
+ 'git branch abc && test_path_is_file .git/refs/heads/abc'
test_expect_success \
'git branch a/b/c should create a branch' \
- 'git branch a/b/c && test -f .git/refs/heads/a/b/c'
+ 'git branch a/b/c && test_path_is_file .git/refs/heads/a/b/c'
cat >expect <<EOF
$_z40 $HEAD $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL> 1117150200 +0000 branch: Created from master
'git branch -l d/e/f should create a branch and a log' \
'GIT_COMMITTER_DATE="2005-05-26 23:30" \
git branch -l d/e/f &&
- test -f .git/refs/heads/d/e/f &&
- test -f .git/logs/refs/heads/d/e/f &&
+ test_path_is_file .git/refs/heads/d/e/f &&
+ test_path_is_file .git/logs/refs/heads/d/e/f &&
test_cmp expect .git/logs/refs/heads/d/e/f'
test_expect_success \
'git branch -d d/e/f should delete a branch and a log' \
'git branch -d d/e/f &&
- test ! -f .git/refs/heads/d/e/f &&
- test ! -f .git/logs/refs/heads/d/e/f'
+ test_path_is_missing .git/refs/heads/d/e/f &&
+ test_path_is_missing .git/logs/refs/heads/d/e/f'
test_expect_success \
'git branch j/k should work after branch j has been deleted' \
'git branch -m m m/m should work' \
'git branch -l m &&
git branch -m m m/m &&
- test -f .git/logs/refs/heads/m/m'
+ test_path_is_file .git/logs/refs/heads/m/m'
test_expect_success \
'git branch -m n/n n should work' \
'git branch -l n/n &&
git branch -m n/n n
- test -f .git/logs/refs/heads/n'
+ test_path_is_file .git/logs/refs/heads/n'
test_expect_success 'git branch -m o/o o should fail when o/p exists' '
git branch o/o &&
git branch -M baz bam
'
+test_expect_success 'git branch -v -d t should work' '
+ git branch t &&
+ test_path_is_file .git/refs/heads/t &&
+ git branch -v -d t &&
+ test_path_is_missing .git/refs/heads/t
+'
+
+test_expect_success 'git branch -v -m t s should work' '
+ git branch t &&
+ test_path_is_file .git/refs/heads/t &&
+ git branch -v -m t s &&
+ test_path_is_missing .git/refs/heads/t &&
+ test_path_is_file .git/refs/heads/s &&
+ git branch -d s
+'
+
+test_expect_success 'git branch -m -d t s should fail' '
+ git branch t &&
+ test_path_is_file .git/refs/heads/t &&
+ test_must_fail git branch -m -d t s &&
+ git branch -d t &&
+ test_path_is_missing .git/refs/heads/t
+'
+
+test_expect_success 'git branch --list -d t should fail' '
+ git branch t &&
+ test_path_is_file .git/refs/heads/t &&
+ test_must_fail git branch --list -d t &&
+ git branch -d t &&
+ test_path_is_missing .git/refs/heads/t
+'
+
mv .git/config .git/config-saved
test_expect_success 'git branch -m q q2 without config should succeed' '
test_expect_success \
'git branch -m s/s s should work when s/t is deleted' \
'git branch -l s/s &&
- test -f .git/logs/refs/heads/s/s &&
+ test_path_is_file .git/logs/refs/heads/s/s &&
git branch -l s/t &&
- test -f .git/logs/refs/heads/s/t &&
+ test_path_is_file .git/logs/refs/heads/s/t &&
git branch -d s/t &&
git branch -m s/s s &&
- test -f .git/logs/refs/heads/s'
+ test_path_is_file .git/logs/refs/heads/s'
test_expect_success 'config information was renamed, too' \
"test $(git config branch.s.dummy) = Hello &&
git symbolic-ref refs/heads/master2 refs/heads/master &&
test_must_fail git branch -m master2 master3 &&
git symbolic-ref refs/heads/master2 &&
- test -f .git/refs/heads/master &&
- ! test -f .git/refs/heads/master3
+ test_path_is_file .git/refs/heads/master &&
+ test_path_is_missing .git/refs/heads/master3
'
test_expect_success SYMLINKS \
'git checkout -b g/h/i -l should create a branch and a log' \
'GIT_COMMITTER_DATE="2005-05-26 23:30" \
git checkout -b g/h/i -l master &&
- test -f .git/refs/heads/g/h/i &&
- test -f .git/logs/refs/heads/g/h/i &&
+ test_path_is_file .git/refs/heads/g/h/i &&
+ test_path_is_file .git/logs/refs/heads/g/h/i &&
test_cmp expect .git/logs/refs/heads/g/h/i'
test_expect_success 'checkout -b makes reflog by default' '
test_cmp expect actual
'
+test_expect_success 'git branch --list shows local branches' '
+ git branch --list >actual &&
+ test_cmp expect actual
+'
+
+cat >expect <<'EOF'
+ branch-one
+ branch-two
+EOF
+test_expect_success 'git branch --list pattern shows matching local branches' '
+ git branch --list branch* >actual &&
+ test_cmp expect actual
+'
+
cat >expect <<'EOF'
origin/HEAD -> origin/branch-one
origin/branch-one
test_cmp expect actual
'
+cat >expect <<'EOF'
+two
+one
+EOF
+test_expect_success 'git branch --list -v pattern shows branch summaries' '
+ git branch --list -v branch* >tmp &&
+ awk "{print \$NF}" <tmp >actual &&
+ test_cmp expect actual
+'
+
+test_expect_success 'git branch -v pattern does not show branch summaries' '
+ test_must_fail git branch -v branch*
+'
+
cat >expect <<'EOF'
* (no branch)
branch-one
git rebase --abort
'
+test_expect_success 'clean error after failed "exec"' '
+ test_tick &&
+ test_when_finished "git rebase --abort || :" &&
+ (
+ FAKE_LINES="1 exec_false" &&
+ export FAKE_LINES &&
+ test_must_fail git rebase -i HEAD^
+ ) &&
+ echo "edited again" > file7 &&
+ git add file7 &&
+ test_must_fail git rebase --continue 2>error &&
+ grep "You have staged changes in your working tree." error
+'
+
test_expect_success 'rebase a detached HEAD' '
grandparent=$(git rev-parse HEAD~2) &&
git checkout $(git rev-parse HEAD) &&
--- /dev/null
+#!/bin/sh
+
+test_description='Test cherry-pick continuation features
+
+ + anotherpick: rewrites foo to d
+ + picked: rewrites foo to c
+ + unrelatedpick: rewrites unrelated to reallyunrelated
+ + base: rewrites foo to b
+ + initial: writes foo as a, unrelated as unrelated
+
+'
+
+. ./test-lib.sh
+
+pristine_detach () {
+ git cherry-pick --reset &&
+ git checkout -f "$1^0" &&
+ git read-tree -u --reset HEAD &&
+ git clean -d -f -f -q -x
+}
+
+test_expect_success setup '
+ echo unrelated >unrelated &&
+ git add unrelated &&
+ test_commit initial foo a &&
+ test_commit base foo b &&
+ test_commit unrelatedpick unrelated reallyunrelated &&
+ test_commit picked foo c &&
+ test_commit anotherpick foo d &&
+ git config advice.detachedhead false
+
+'
+
+test_expect_success 'cherry-pick persists data on failure' '
+ pristine_detach initial &&
+ test_must_fail git cherry-pick -s base..anotherpick &&
+ test_path_is_dir .git/sequencer &&
+ test_path_is_file .git/sequencer/head &&
+ test_path_is_file .git/sequencer/todo &&
+ test_path_is_file .git/sequencer/opts
+'
+
+test_expect_success 'cherry-pick persists opts correctly' '
+ pristine_detach initial &&
+ test_must_fail git cherry-pick -s -m 1 --strategy=recursive -X patience -X ours base..anotherpick &&
+ test_path_is_dir .git/sequencer &&
+ test_path_is_file .git/sequencer/head &&
+ test_path_is_file .git/sequencer/todo &&
+ test_path_is_file .git/sequencer/opts &&
+ echo "true" >expect &&
+ git config --file=.git/sequencer/opts --get-all options.signoff >actual &&
+ test_cmp expect actual &&
+ echo "1" >expect &&
+ git config --file=.git/sequencer/opts --get-all options.mainline >actual &&
+ test_cmp expect actual &&
+ echo "recursive" >expect &&
+ git config --file=.git/sequencer/opts --get-all options.strategy >actual &&
+ test_cmp expect actual &&
+ cat >expect <<-\EOF &&
+ patience
+ ours
+ EOF
+ git config --file=.git/sequencer/opts --get-all options.strategy-option >actual &&
+ test_cmp expect actual
+'
+
+test_expect_success 'cherry-pick cleans up sequencer state upon success' '
+ pristine_detach initial &&
+ git cherry-pick initial..picked &&
+ test_path_is_missing .git/sequencer
+'
+
+test_expect_success '--reset does not complain when no cherry-pick is in progress' '
+ pristine_detach initial &&
+ git cherry-pick --reset
+'
+
+test_expect_success '--reset cleans up sequencer state' '
+ pristine_detach initial &&
+ test_must_fail git cherry-pick base..picked &&
+ git cherry-pick --reset &&
+ test_path_is_missing .git/sequencer
+'
+
+test_expect_success 'cherry-pick cleans up sequencer state when one commit is left' '
+ pristine_detach initial &&
+ test_must_fail git cherry-pick base..picked &&
+ test_path_is_missing .git/sequencer &&
+ echo "resolved" >foo &&
+ git add foo &&
+ git commit &&
+ {
+ git rev-list HEAD |
+ git diff-tree --root --stdin |
+ sed "s/$_x40/OBJID/g"
+ } >actual &&
+ cat >expect <<-\EOF &&
+ OBJID
+ :100644 100644 OBJID OBJID M foo
+ OBJID
+ :100644 100644 OBJID OBJID M unrelated
+ OBJID
+ :000000 100644 OBJID OBJID A foo
+ :000000 100644 OBJID OBJID A unrelated
+ EOF
+ test_cmp expect actual
+'
+
+test_expect_success 'cherry-pick does not implicitly stomp an existing operation' '
+ pristine_detach initial &&
+ test_must_fail git cherry-pick base..anotherpick &&
+ test-chmtime -v +0 .git/sequencer >expect &&
+ test_must_fail git cherry-pick unrelatedpick &&
+ test-chmtime -v +0 .git/sequencer >actual &&
+ test_cmp expect actual
+'
+
+test_expect_success '--continue complains when no cherry-pick is in progress' '
+ pristine_detach initial &&
+ test_must_fail git cherry-pick --continue
+'
+
+test_expect_success '--continue complains when there are unresolved conflicts' '
+ pristine_detach initial &&
+ test_must_fail git cherry-pick base..anotherpick &&
+ test_must_fail git cherry-pick --continue
+'
+
+test_expect_success '--continue continues after conflicts are resolved' '
+ pristine_detach initial &&
+ test_must_fail git cherry-pick base..anotherpick &&
+ echo "c" >foo &&
+ git add foo &&
+ git commit &&
+ git cherry-pick --continue &&
+ test_path_is_missing .git/sequencer &&
+ {
+ git rev-list HEAD |
+ git diff-tree --root --stdin |
+ sed "s/$_x40/OBJID/g"
+ } >actual &&
+ cat >expect <<-\EOF &&
+ OBJID
+ :100644 100644 OBJID OBJID M foo
+ OBJID
+ :100644 100644 OBJID OBJID M foo
+ OBJID
+ :100644 100644 OBJID OBJID M unrelated
+ OBJID
+ :000000 100644 OBJID OBJID A foo
+ :000000 100644 OBJID OBJID A unrelated
+ EOF
+ test_cmp expect actual
+'
+
+test_expect_success '--continue respects opts' '
+ pristine_detach initial &&
+ test_must_fail git cherry-pick -x base..anotherpick &&
+ echo "c" >foo &&
+ git add foo &&
+ git commit &&
+ git cherry-pick --continue &&
+ test_path_is_missing .git/sequencer &&
+ git cat-file commit HEAD >anotherpick_msg &&
+ git cat-file commit HEAD~1 >picked_msg &&
+ git cat-file commit HEAD~2 >unrelatedpick_msg &&
+ git cat-file commit HEAD~3 >initial_msg &&
+ test_must_fail grep "cherry picked from" initial_msg &&
+ grep "cherry picked from" unrelatedpick_msg &&
+ grep "cherry picked from" picked_msg &&
+ grep "cherry picked from" anotherpick_msg
+'
+
+test_expect_success '--signoff is not automatically propagated to resolved conflict' '
+ pristine_detach initial &&
+ test_must_fail git cherry-pick --signoff base..anotherpick &&
+ echo "c" >foo &&
+ git add foo &&
+ git commit &&
+ git cherry-pick --continue &&
+ test_path_is_missing .git/sequencer &&
+ git cat-file commit HEAD >anotherpick_msg &&
+ git cat-file commit HEAD~1 >picked_msg &&
+ git cat-file commit HEAD~2 >unrelatedpick_msg &&
+ git cat-file commit HEAD~3 >initial_msg &&
+ test_must_fail grep "Signed-off-by:" initial_msg &&
+ grep "Signed-off-by:" unrelatedpick_msg &&
+ test_must_fail grep "Signed-off-by:" picked_msg &&
+ grep "Signed-off-by:" anotherpick_msg
+'
+
+test_expect_success 'malformed instruction sheet 1' '
+ pristine_detach initial &&
+ test_must_fail git cherry-pick base..anotherpick &&
+ echo "resolved" >foo &&
+ git add foo &&
+ git commit &&
+ sed "s/pick /pick/" .git/sequencer/todo >new_sheet &&
+ cp new_sheet .git/sequencer/todo &&
+ test_must_fail git cherry-pick --continue
+'
+
+test_expect_success 'malformed instruction sheet 2' '
+ pristine_detach initial &&
+ test_must_fail git cherry-pick base..anotherpick &&
+ echo "resolved" >foo &&
+ git add foo &&
+ git commit &&
+ sed "s/pick/revert/" .git/sequencer/todo >new_sheet &&
+ cp new_sheet .git/sequencer/todo &&
+ test_must_fail git cherry-pick --continue
+'
+
+test_done
echo 3 > file &&
test_tick &&
echo 1 > file2 &&
+ mkdir untracked &&
+ echo untracked >untracked/untracked &&
git stash --include-untracked &&
git diff-files --quiet &&
git diff-index --cached --quiet HEAD
'
cat > expect <<EOF
+?? actual
?? expect
-?? output
EOF
test_expect_success 'stash save --include-untracked cleaned the untracked files' '
- git status --porcelain > output
- test_cmp output expect
+ git status --porcelain >actual &&
+ test_cmp expect actual
'
cat > expect.diff <<EOF
+++ b/file2
@@ -0,0 +1 @@
+1
+diff --git a/untracked/untracked b/untracked/untracked
+new file mode 100644
+index 0000000..5a72eb2
+--- /dev/null
++++ b/untracked/untracked
+@@ -0,0 +1 @@
++untracked
EOF
cat > expect.lstree <<EOF
file2
+untracked
EOF
test_expect_success 'stash save --include-untracked stashed the untracked files' '
test "!" -f file2 &&
- git diff HEAD..stash^3 -- file2 > output &&
- test_cmp output expect.diff &&
- git ls-tree --name-only stash^3: > output &&
- test_cmp output expect.lstree
+ test ! -e untracked &&
+ git diff HEAD stash^3 -- file2 untracked >actual &&
+ test_cmp expect.diff actual &&
+ git ls-tree --name-only stash^3: >actual &&
+ test_cmp expect.lstree actual
'
test_expect_success 'stash save --patch --include-untracked fails' '
test_must_fail git stash --patch --include-untracked
cat > expect <<EOF
M file
+?? actual
?? expect
?? file2
-?? output
+?? untracked/
EOF
test_expect_success 'stash pop after save --include-untracked leaves files untracked again' '
git stash pop &&
- git status --porcelain > output
- test_cmp output expect
+ git status --porcelain >actual &&
+ test_cmp expect actual &&
+ test "1" = "`cat file2`" &&
+ test untracked = "`cat untracked/untracked`"
'
-git clean --force --quiet
+git clean --force --quiet -d
test_expect_success 'stash save -u dirty index' '
echo 4 > file3 &&
test_expect_success 'stash save --include-untracked dirty index got stashed' '
git stash pop --index &&
- git diff --cached > output &&
- test_cmp output expect
+ git diff --cached >actual &&
+ test_cmp expect actual
'
git reset > /dev/null
cat > .gitignore <<EOF
.gitignore
ignored
+ignored.d/
EOF
test_expect_success 'stash save --include-untracked respects .gitignore' '
echo ignored > ignored &&
+ mkdir ignored.d &&
+ echo ignored >ignored.d/untracked &&
git stash -u &&
test -s ignored &&
+ test -s ignored.d/untracked &&
test -s .gitignore
'
test_expect_success 'stash save -u can stash with only untracked files different' '
echo 4 > file4 &&
- git stash -u
+ git stash -u &&
test "!" -f file4
'
test_expect_success 'stash save --all does not respect .gitignore' '
git stash -a &&
test "!" -f ignored &&
+ test "!" -e ignored.d &&
test "!" -f .gitignore
'
test_expect_success 'stash save --all is stash poppable' '
git stash pop &&
test -s ignored &&
+ test -s ignored.d/untracked &&
test -s .gitignore
'
test_expect_exists worktree/ignored-by-tree
test_expect_missing worktree/ignored-by-worktree
+test_expect_success 'git archive --worktree-attributes option' '
+ git archive --worktree-attributes --worktree-attributes HEAD >worktree.tar &&
+ (mkdir worktree2 && cd worktree2 && "$TAR" xf -) <worktree.tar
+'
+
+test_expect_missing worktree2/ignored
+test_expect_exists worktree2/ignored-by-tree
+test_expect_missing worktree2/ignored-by-worktree
+
test_expect_success 'git archive vs. bare' '
(cd bare && git archive HEAD) >bare-archive.tar &&
test_cmp archive.tar bare-archive.tar
done
test_expect_success 'post-checkout runs as expected ' '
- GIT_DIR=clone1/.git git checkout master &&
- test -e clone1/.git/post-checkout.args
+ GIT_DIR=clone1/.git git checkout master &&
+ test -e clone1/.git/post-checkout.args
'
test_expect_success 'post-checkout receives the right arguments with HEAD unchanged ' '
- old=$(awk "{print \$1}" clone1/.git/post-checkout.args) &&
- new=$(awk "{print \$2}" clone1/.git/post-checkout.args) &&
- flag=$(awk "{print \$3}" clone1/.git/post-checkout.args) &&
- test $old = $new -a $flag = 1
+ old=$(awk "{print \$1}" clone1/.git/post-checkout.args) &&
+ new=$(awk "{print \$2}" clone1/.git/post-checkout.args) &&
+ flag=$(awk "{print \$3}" clone1/.git/post-checkout.args) &&
+ test $old = $new -a $flag = 1
'
test_expect_success 'post-checkout runs as expected ' '
- GIT_DIR=clone1/.git git checkout master &&
- test -e clone1/.git/post-checkout.args
+ GIT_DIR=clone1/.git git checkout master &&
+ test -e clone1/.git/post-checkout.args
'
test_expect_success 'post-checkout args are correct with git checkout -b ' '
- GIT_DIR=clone1/.git git checkout -b new1 &&
- old=$(awk "{print \$1}" clone1/.git/post-checkout.args) &&
- new=$(awk "{print \$2}" clone1/.git/post-checkout.args) &&
- flag=$(awk "{print \$3}" clone1/.git/post-checkout.args) &&
- test $old = $new -a $flag = 1
+ GIT_DIR=clone1/.git git checkout -b new1 &&
+ old=$(awk "{print \$1}" clone1/.git/post-checkout.args) &&
+ new=$(awk "{print \$2}" clone1/.git/post-checkout.args) &&
+ flag=$(awk "{print \$3}" clone1/.git/post-checkout.args) &&
+ test $old = $new -a $flag = 1
'
test_expect_success 'post-checkout receives the right args with HEAD changed ' '
- GIT_DIR=clone2/.git git checkout new2 &&
- old=$(awk "{print \$1}" clone2/.git/post-checkout.args) &&
- new=$(awk "{print \$2}" clone2/.git/post-checkout.args) &&
- flag=$(awk "{print \$3}" clone2/.git/post-checkout.args) &&
- test $old != $new -a $flag = 1
+ GIT_DIR=clone2/.git git checkout new2 &&
+ old=$(awk "{print \$1}" clone2/.git/post-checkout.args) &&
+ new=$(awk "{print \$2}" clone2/.git/post-checkout.args) &&
+ flag=$(awk "{print \$3}" clone2/.git/post-checkout.args) &&
+ test $old != $new -a $flag = 1
'
test_expect_success 'post-checkout receives the right args when not switching branches ' '
- GIT_DIR=clone2/.git git checkout master b &&
- old=$(awk "{print \$1}" clone2/.git/post-checkout.args) &&
- new=$(awk "{print \$2}" clone2/.git/post-checkout.args) &&
- flag=$(awk "{print \$3}" clone2/.git/post-checkout.args) &&
- test $old = $new -a $flag = 0
+ GIT_DIR=clone2/.git git checkout master b &&
+ old=$(awk "{print \$1}" clone2/.git/post-checkout.args) &&
+ new=$(awk "{print \$2}" clone2/.git/post-checkout.args) &&
+ flag=$(awk "{print \$3}" clone2/.git/post-checkout.args) &&
+ test $old = $new -a $flag = 0
'
if test "$(git config --bool core.filemode)" = true; then
--- /dev/null
+#!/bin/sh
+
+test_description='fetch/receive strict mode'
+. ./test-lib.sh
+
+test_expect_success setup '
+ echo hello >greetings &&
+ git add greetings &&
+ git commit -m greetings &&
+
+ S=$(git rev-parse :greetings | sed -e "s|^..|&/|") &&
+ X=$(echo bye | git hash-object -w --stdin | sed -e "s|^..|&/|") &&
+ mv -f .git/objects/$X .git/objects/$S &&
+
+ test_must_fail git fsck
+'
+
+test_expect_success 'fetch without strict' '
+ rm -rf dst &&
+ git init dst &&
+ (
+ cd dst &&
+ git config fetch.fsckobjects false &&
+ git config transfer.fsckobjects false &&
+ test_must_fail git fetch ../.git master
+ )
+'
+
+test_expect_success 'fetch with !fetch.fsckobjects' '
+ rm -rf dst &&
+ git init dst &&
+ (
+ cd dst &&
+ git config fetch.fsckobjects false &&
+ git config transfer.fsckobjects true &&
+ test_must_fail git fetch ../.git master
+ )
+'
+
+test_expect_success 'fetch with fetch.fsckobjects' '
+ rm -rf dst &&
+ git init dst &&
+ (
+ cd dst &&
+ git config fetch.fsckobjects true &&
+ git config transfer.fsckobjects false &&
+ test_must_fail git fetch ../.git master
+ )
+'
+
+test_expect_success 'fetch with transfer.fsckobjects' '
+ rm -rf dst &&
+ git init dst &&
+ (
+ cd dst &&
+ git config transfer.fsckobjects true &&
+ test_must_fail git fetch ../.git master
+ )
+'
+
+test_expect_success 'push without strict' '
+ rm -rf dst &&
+ git init dst &&
+ (
+ cd dst &&
+ git config fetch.fsckobjects false &&
+ git config transfer.fsckobjects false
+ ) &&
+ git push dst master:refs/heads/test
+'
+
+test_expect_success 'push with !receive.fsckobjects' '
+ rm -rf dst &&
+ git init dst &&
+ (
+ cd dst &&
+ git config receive.fsckobjects false &&
+ git config transfer.fsckobjects true
+ ) &&
+ git push dst master:refs/heads/test
+'
+
+test_expect_success 'push with receive.fsckobjects' '
+ rm -rf dst &&
+ git init dst &&
+ (
+ cd dst &&
+ git config receive.fsckobjects true &&
+ git config transfer.fsckobjects false
+ ) &&
+ test_must_fail git push dst master:refs/heads/test
+'
+
+test_expect_success 'push with transfer.fsckobjects' '
+ rm -rf dst &&
+ git init dst &&
+ (
+ cd dst &&
+ git config transfer.fsckobjects true
+ ) &&
+ test_must_fail git push dst master:refs/heads/test
+'
+
+test_done
'
+test_expect_success 'rename does not update a non-default fetch refspec' '
+
+ git clone one four.one &&
+ (cd four.one &&
+ git config remote.origin.fetch +refs/heads/*:refs/heads/origin/* &&
+ git remote rename origin upstream &&
+ test "$(git config remote.upstream.fetch)" = "+refs/heads/*:refs/heads/origin/*" &&
+ git rev-parse -q origin/master)
+
+'
+
+test_expect_success 'rename a remote with name part of fetch spec' '
+
+ git clone one four.two &&
+ (cd four.two &&
+ git remote rename origin remote &&
+ git remote rename remote upstream &&
+ test "$(git config remote.upstream.fetch)" = "+refs/heads/*:refs/remotes/upstream/*")
+
+'
+
+test_expect_success 'rename a remote with name prefix of other remote' '
+
+ git clone one four.three &&
+ (cd four.three &&
+ git remote add o git://example.com/repo.git &&
+ git remote rename o upstream &&
+ test "$(git rev-parse origin/master)" = "$(git rev-parse master)")
+
+'
+
cat > remotes_origin << EOF
URL: $(pwd)/one
Push: refs/heads/master:refs/heads/upstream
)
}
+mk_test_with_hooks() {
+ mk_test "$@" &&
+ (
+ cd testrepo &&
+ mkdir .git/hooks &&
+ cd .git/hooks &&
+
+ cat >pre-receive <<-'EOF' &&
+ #!/bin/sh
+ cat - >>pre-receive.actual
+ EOF
+
+ cat >update <<-'EOF' &&
+ #!/bin/sh
+ printf "%s %s %s\n" "$@" >>update.actual
+ EOF
+
+ cat >post-receive <<-'EOF' &&
+ #!/bin/sh
+ cat - >>post-receive.actual
+ EOF
+
+ cat >post-update <<-'EOF' &&
+ #!/bin/sh
+ for ref in "$@"
+ do
+ printf "%s\n" "$ref" >>post-update.actual
+ done
+ EOF
+
+ chmod +x pre-receive update post-receive post-update
+ )
+}
+
mk_child() {
rm -rf "$1" &&
git clone testrepo "$1"
'
+test_expect_success 'pushing valid refs triggers post-receive and post-update hooks' '
+ mk_test_with_hooks heads/master heads/next &&
+ orgmaster=$(cd testrepo && git show-ref -s --verify refs/heads/master) &&
+ newmaster=$(git show-ref -s --verify refs/heads/master) &&
+ orgnext=$(cd testrepo && git show-ref -s --verify refs/heads/next) &&
+ newnext=$_z40 &&
+ git push testrepo refs/heads/master:refs/heads/master :refs/heads/next &&
+ (
+ cd testrepo/.git &&
+ cat >pre-receive.expect <<-EOF &&
+ $orgmaster $newmaster refs/heads/master
+ $orgnext $newnext refs/heads/next
+ EOF
+
+ cat >update.expect <<-EOF &&
+ refs/heads/master $orgmaster $newmaster
+ refs/heads/next $orgnext $newnext
+ EOF
+
+ cat >post-receive.expect <<-EOF &&
+ $orgmaster $newmaster refs/heads/master
+ $orgnext $newnext refs/heads/next
+ EOF
+
+ cat >post-update.expect <<-EOF &&
+ refs/heads/master
+ refs/heads/next
+ EOF
+
+ test_cmp pre-receive.expect pre-receive.actual &&
+ test_cmp update.expect update.actual &&
+ test_cmp post-receive.expect post-receive.actual &&
+ test_cmp post-update.expect post-update.actual
+ )
+'
+
+test_expect_success 'deleting dangling ref triggers hooks with correct args' '
+ mk_test_with_hooks heads/master &&
+ rm -f testrepo/.git/objects/??/* &&
+ git push testrepo :refs/heads/master &&
+ (
+ cd testrepo/.git &&
+ cat >pre-receive.expect <<-EOF &&
+ $_z40 $_z40 refs/heads/master
+ EOF
+
+ cat >update.expect <<-EOF &&
+ refs/heads/master $_z40 $_z40
+ EOF
+
+ cat >post-receive.expect <<-EOF &&
+ $_z40 $_z40 refs/heads/master
+ EOF
+
+ cat >post-update.expect <<-EOF &&
+ refs/heads/master
+ EOF
+
+ test_cmp pre-receive.expect pre-receive.actual &&
+ test_cmp update.expect update.actual &&
+ test_cmp post-receive.expect post-receive.actual &&
+ test_cmp post-update.expect post-update.actual
+ )
+'
+
+test_expect_success 'deletion of a non-existent ref is not fed to post-receive and post-update hooks' '
+ mk_test_with_hooks heads/master &&
+ orgmaster=$(cd testrepo && git show-ref -s --verify refs/heads/master) &&
+ newmaster=$(git show-ref -s --verify refs/heads/master) &&
+ git push testrepo master :refs/heads/nonexistent &&
+ (
+ cd testrepo/.git &&
+ cat >pre-receive.expect <<-EOF &&
+ $orgmaster $newmaster refs/heads/master
+ $_z40 $_z40 refs/heads/nonexistent
+ EOF
+
+ cat >update.expect <<-EOF &&
+ refs/heads/master $orgmaster $newmaster
+ refs/heads/nonexistent $_z40 $_z40
+ EOF
+
+ cat >post-receive.expect <<-EOF &&
+ $orgmaster $newmaster refs/heads/master
+ EOF
+
+ cat >post-update.expect <<-EOF &&
+ refs/heads/master
+ EOF
+
+ test_cmp pre-receive.expect pre-receive.actual &&
+ test_cmp update.expect update.actual &&
+ test_cmp post-receive.expect post-receive.actual &&
+ test_cmp post-update.expect post-update.actual
+ )
+'
+
+test_expect_success 'deletion of a non-existent ref alone does trigger post-receive and post-update hooks' '
+ mk_test_with_hooks heads/master &&
+ git push testrepo :refs/heads/nonexistent &&
+ (
+ cd testrepo/.git &&
+ cat >pre-receive.expect <<-EOF &&
+ $_z40 $_z40 refs/heads/nonexistent
+ EOF
+
+ cat >update.expect <<-EOF &&
+ refs/heads/nonexistent $_z40 $_z40
+ EOF
+
+ test_cmp pre-receive.expect pre-receive.actual &&
+ test_cmp update.expect update.actual &&
+ test_path_is_missing post-receive.actual &&
+ test_path_is_missing post-update.actual
+ )
+'
+
+test_expect_success 'mixed ref updates, deletes, invalid deletes trigger hooks with correct input' '
+ mk_test_with_hooks heads/master heads/next heads/pu &&
+ orgmaster=$(cd testrepo && git show-ref -s --verify refs/heads/master) &&
+ newmaster=$(git show-ref -s --verify refs/heads/master) &&
+ orgnext=$(cd testrepo && git show-ref -s --verify refs/heads/next) &&
+ newnext=$_z40 &&
+ orgpu=$(cd testrepo && git show-ref -s --verify refs/heads/pu) &&
+ newpu=$(git show-ref -s --verify refs/heads/master) &&
+ git push testrepo refs/heads/master:refs/heads/master \
+ refs/heads/master:refs/heads/pu :refs/heads/next \
+ :refs/heads/nonexistent &&
+ (
+ cd testrepo/.git &&
+ cat >pre-receive.expect <<-EOF &&
+ $orgmaster $newmaster refs/heads/master
+ $orgnext $newnext refs/heads/next
+ $orgpu $newpu refs/heads/pu
+ $_z40 $_z40 refs/heads/nonexistent
+ EOF
+
+ cat >update.expect <<-EOF &&
+ refs/heads/master $orgmaster $newmaster
+ refs/heads/next $orgnext $newnext
+ refs/heads/pu $orgpu $newpu
+ refs/heads/nonexistent $_z40 $_z40
+ EOF
+
+ cat >post-receive.expect <<-EOF &&
+ $orgmaster $newmaster refs/heads/master
+ $orgnext $newnext refs/heads/next
+ $orgpu $newpu refs/heads/pu
+ EOF
+
+ cat >post-update.expect <<-EOF &&
+ refs/heads/master
+ refs/heads/next
+ refs/heads/pu
+ EOF
+
+ test_cmp pre-receive.expect pre-receive.actual &&
+ test_cmp update.expect update.actual &&
+ test_cmp post-receive.expect post-receive.actual &&
+ test_cmp post-update.expect post-update.actual
+ )
+'
+
test_expect_success 'allow deleting a ref using --delete' '
mk_test heads/master &&
(cd testrepo && git config receive.denyDeleteCurrent warn) &&
test_cmp expect actual
'
+# b---bc
+# / \ /
+# a X
+# \ / \
+# c---cb
+#
+# All refnames prefixed with 'x' to avoid confusion with the tags
+# generated by test_commit on case-insensitive systems.
+test_expect_success 'setup criss-cross' '
+ mkdir criss-cross &&
+ (cd criss-cross &&
+ git init &&
+ test_commit A &&
+ git checkout -b xb master &&
+ test_commit B &&
+ git checkout -b xc master &&
+ test_commit C &&
+ git checkout -b xbc xb -- &&
+ git merge xc &&
+ git checkout -b xcb xc -- &&
+ git merge xb &&
+ git checkout master)
+'
+
+# no commits in bc descend from cb
+test_expect_success 'criss-cross: rev-list --ancestry-path cb..bc' '
+ (cd criss-cross &&
+ git rev-list --ancestry-path xcb..xbc > actual &&
+ test -z "$(cat actual)")
+'
+
+# no commits in repository descend from cb
+test_expect_success 'criss-cross: rev-list --ancestry-path --all ^cb' '
+ (cd criss-cross &&
+ git rev-list --ancestry-path --all ^xcb > actual &&
+ test -z "$(cat actual)")
+'
+
test_done
test_i18ncmp expect actual
'
+cat >expect <<\EOF
+b1 origin/master: ahead 1, behind 1
+b2 origin/master: ahead 1, behind 1
+b3 origin/master: behind 1
+b4 origin/master: ahead 2
+EOF
+
+test_expect_success 'branch -vv' '
+ (
+ cd test &&
+ git branch -vv
+ ) |
+ sed -n -e "$script" >actual &&
+ test_i18ncmp expect actual
+'
+
test_expect_success 'checkout' '
(
cd test && git checkout b1
test_description='for-each-ref test'
. ./test-lib.sh
+. "$TEST_DIRECTORY"/lib-gpg.sh
# Mon Jul 3 15:18:43 2006 +0000
datestamp=1151939923
case "$1" in
head) ref=refs/heads/master ;;
tag) ref=refs/tags/testtag ;;
+ *) ref=$1 ;;
esac
printf '%s\n' "$3" >expected
- test_expect_${4:-success} "basic atom: $1 $2" "
+ test_expect_${4:-success} $PREREQ "basic atom: $1 $2" "
git for-each-ref --format='%($2)' $ref >actual &&
- test_cmp expected actual
+ sanitize_pgp <actual >actual.clean &&
+ test_cmp expected actual.clean
"
}
test_atom head creator 'C O Mitter <committer@example.com> 1151939923 +0200'
test_atom head creatordate 'Mon Jul 3 17:18:43 2006 +0200'
test_atom head subject 'Initial'
+test_atom head contents:subject 'Initial'
test_atom head body ''
+test_atom head contents:body ''
+test_atom head contents:signature ''
test_atom head contents 'Initial
'
test_atom tag creator 'C O Mitter <committer@example.com> 1151939925 +0200'
test_atom tag creatordate 'Mon Jul 3 17:18:45 2006 +0200'
test_atom tag subject 'Tagging at 1151939927'
+test_atom tag contents:subject 'Tagging at 1151939927'
test_atom tag body ''
+test_atom tag contents:body ''
+test_atom tag contents:signature ''
test_atom tag contents 'Tagging at 1151939927
'
'
+test_expect_success 'create tag with subject and body content' '
+ cat >>msg <<-\EOF &&
+ the subject line
+
+ first body line
+ second body line
+ EOF
+ git tag -F msg subject-body
+'
+test_atom refs/tags/subject-body subject 'the subject line'
+test_atom refs/tags/subject-body body 'first body line
+second body line
+'
+test_atom refs/tags/subject-body contents 'the subject line
+
+first body line
+second body line
+'
+
+test_expect_success 'create tag with multiline subject' '
+ cat >msg <<-\EOF &&
+ first subject line
+ second subject line
+
+ first body line
+ second body line
+ EOF
+ git tag -F msg multiline
+'
+test_atom refs/tags/multiline subject 'first subject line second subject line'
+test_atom refs/tags/multiline contents:subject 'first subject line second subject line'
+test_atom refs/tags/multiline body 'first body line
+second body line
+'
+test_atom refs/tags/multiline contents:body 'first body line
+second body line
+'
+test_atom refs/tags/multiline contents:signature ''
+test_atom refs/tags/multiline contents 'first subject line
+second subject line
+
+first body line
+second body line
+'
+
+test_expect_success GPG 'create signed tags' '
+ git tag -s -m "" signed-empty &&
+ git tag -s -m "subject line" signed-short &&
+ cat >msg <<-\EOF &&
+ subject line
+
+ body contents
+ EOF
+ git tag -s -F msg signed-long
+'
+
+sig='-----BEGIN PGP SIGNATURE-----
+-----END PGP SIGNATURE-----
+'
+
+PREREQ=GPG
+test_atom refs/tags/signed-empty subject ''
+test_atom refs/tags/signed-empty contents:subject ''
+test_atom refs/tags/signed-empty body "$sig"
+test_atom refs/tags/signed-empty contents:body ''
+test_atom refs/tags/signed-empty contents:signature "$sig"
+test_atom refs/tags/signed-empty contents "$sig"
+
+test_atom refs/tags/signed-short subject 'subject line'
+test_atom refs/tags/signed-short contents:subject 'subject line'
+test_atom refs/tags/signed-short body "$sig"
+test_atom refs/tags/signed-short contents:body ''
+test_atom refs/tags/signed-short contents:signature "$sig"
+test_atom refs/tags/signed-short contents "subject line
+$sig"
+
+test_atom refs/tags/signed-long subject 'subject line'
+test_atom refs/tags/signed-long contents:subject 'subject line'
+test_atom refs/tags/signed-long body "body contents
+$sig"
+test_atom refs/tags/signed-long contents:body 'body contents
+'
+test_atom refs/tags/signed-long contents:signature "$sig"
+test_atom refs/tags/signed-long contents "subject line
+
+body contents
+$sig"
+
test_done
Tests for operations with tags.'
. ./test-lib.sh
+. "$TEST_DIRECTORY"/lib-gpg.sh
# creating and listing lightweight tags:
test_cmp expect actual
'
-# subsequent tests require gpg; check if it is available
-gpg --version >/dev/null 2>/dev/null
-if [ $? -eq 127 ]; then
- say "# gpg not found - skipping tag signing and verification tests"
-else
- # As said here: http://www.gnupg.org/documentation/faqs.html#q6.19
- # the gpg version 1.0.6 didn't parse trust packets correctly, so for
- # that version, creation of signed tags using the generated key fails.
- case "$(gpg --version)" in
- 'gpg (GnuPG) 1.0.6'*)
- say "Skipping signed tag tests, because a bug in 1.0.6 version"
- ;;
- *)
- test_set_prereq GPG
- ;;
- esac
-fi
-
# trying to verify annotated non-signed tags:
test_expect_success GPG \
# creating and verifying signed tags:
-# key generation info: gpg --homedir t/t7004 --gen-key
-# Type DSA and Elgamal, size 2048 bits, no expiration date.
-# Name and email: C O Mitter <committer@example.com>
-# No password given, to enable non-interactive operation.
-
-cp -R "$TEST_DIRECTORY"/t7004 ./gpghome
-chmod 0700 gpghome
-GNUPGHOME="$(pwd)/gpghome"
-export GNUPGHOME
-
get_tag_header signed-tag $commit commit $time >expect
echo 'A signed tag message' >>expect
echo '-----BEGIN PGP SIGNATURE-----' >>expect
--- /dev/null
+#!/bin/sh
+
+test_description='Test interaction of reset --hard with sequencer
+
+ + anotherpick: rewrites foo to d
+ + picked: rewrites foo to c
+ + unrelatedpick: rewrites unrelated to reallyunrelated
+ + base: rewrites foo to b
+ + initial: writes foo as a, unrelated as unrelated
+'
+
+. ./test-lib.sh
+
+pristine_detach () {
+ git cherry-pick --reset &&
+ git checkout -f "$1^0" &&
+ git read-tree -u --reset HEAD &&
+ git clean -d -f -f -q -x
+}
+
+test_expect_success setup '
+ echo unrelated >unrelated &&
+ git add unrelated &&
+ test_commit initial foo a &&
+ test_commit base foo b &&
+ test_commit unrelatedpick unrelated reallyunrelated &&
+ test_commit picked foo c &&
+ test_commit anotherpick foo d &&
+ git config advice.detachedhead false
+
+'
+
+test_expect_success 'reset --hard cleans up sequencer state, providing one-level undo' '
+ pristine_detach initial &&
+ test_must_fail git cherry-pick base..anotherpick &&
+ test_path_is_dir .git/sequencer &&
+ git reset --hard &&
+ test_path_is_missing .git/sequencer &&
+ test_path_is_dir .git/sequencer-old &&
+ git reset --hard &&
+ test_path_is_missing .git/sequencer-old
+'
+
+test_done
git submodule update init > update.out &&
cat update.out &&
test_i18ngrep "not initialized" update.out &&
- ! test -d init/.git &&
+ test_must_fail git rev-parse --resolve-git-dir init/.git &&
git submodule update --init init &&
- test -d init/.git
+ git rev-parse --resolve-git-dir init/.git
'
test_expect_success 'do not add files from a submodule' '
git pull --no-recurse-submodules &&
git submodule sync
) &&
- test -d "$(git config -f super-clone/submodule/.git/config \
- remote.origin.url)" &&
+ test -d "$(cd super-clone/submodule &&
+ git config remote.origin.url
+ )" &&
(cd super-clone/submodule &&
git checkout master &&
git pull
git clone super submodule &&
git clone super rebasing &&
git clone super merging &&
+ git clone super none &&
(cd super &&
git submodule add ../submodule submodule &&
test_tick &&
test_tick &&
git commit -m "rebasing"
)
+ (cd super &&
+ git submodule add ../none none &&
+ test_tick &&
+ git commit -m "none"
+ )
'
test_expect_success 'submodule update detaching the HEAD ' '
)
'
+test_expect_success 'submodule init picks up update=none' '
+ (cd super &&
+ git config -f .gitmodules submodule.none.update none &&
+ git submodule init none &&
+ test "none" = "$(git config submodule.none.update)"
+ )
+'
+
+test_expect_success 'submodule update - update=none in .git/config' '
+ (cd super &&
+ git config submodule.submodule.update none &&
+ (cd submodule &&
+ git checkout master &&
+ compare_head
+ ) &&
+ git diff --raw | grep " submodule" &&
+ git submodule update &&
+ git diff --raw | grep " submodule" &&
+ (cd submodule &&
+ compare_head
+ ) &&
+ git config --unset submodule.submodule.update &&
+ git submodule update submodule
+ )
+'
+
+test_expect_success 'submodule update - update=none in .git/config but --checkout given' '
+ (cd super &&
+ git config submodule.submodule.update none &&
+ (cd submodule &&
+ git checkout master &&
+ compare_head
+ ) &&
+ git diff --raw | grep " submodule" &&
+ git submodule update --checkout &&
+ test_must_fail git diff --raw \| grep " submodule" &&
+ (cd submodule &&
+ test_must_fail compare_head
+ ) &&
+ git config --unset submodule.submodule.update
+ )
+'
+
+test_expect_success 'submodule update --init skips submodule with update=none' '
+ (cd super &&
+ git add .gitmodules &&
+ git commit -m ".gitmodules"
+ ) &&
+ git clone super cloned &&
+ (cd cloned &&
+ git submodule update --init &&
+ test -e submodule/.git &&
+ test_must_fail test -e none/.git
+ )
+'
+
test_expect_success 'submodule update continues after checkout error' '
(cd super &&
git reset --hard HEAD &&
test_cmp expect actual
)
'
+
test_expect_success 'submodule update exit immediately after recursive rebase error' '
(cd super &&
git checkout master &&
test_cmp expect actual
)
'
+
+test_expect_success 'add different submodules to the same path' '
+ (cd super &&
+ git submodule add ../submodule s1 &&
+ test_must_fail git submodule add ../merging s1
+ )
+'
+
+test_expect_success 'submodule add places git-dir in superprojects git-dir' '
+ (cd super &&
+ mkdir deeper &&
+ git submodule add ../submodule deeper/submodule &&
+ (cd deeper/submodule &&
+ git log > ../../expected
+ ) &&
+ (cd .git/modules/deeper/submodule &&
+ git log > ../../../../actual
+ ) &&
+ test_cmp actual expected
+ )
+'
+
+test_expect_success 'submodule update places git-dir in superprojects git-dir' '
+ (cd super &&
+ git commit -m "added submodule"
+ ) &&
+ git clone super super2 &&
+ (cd super2 &&
+ git submodule init deeper/submodule &&
+ git submodule update &&
+ (cd deeper/submodule &&
+ git log > ../../expected
+ ) &&
+ (cd .git/modules/deeper/submodule &&
+ git log > ../../../../actual
+ ) &&
+ test_cmp actual expected
+ )
+'
+
+test_expect_success 'submodule add places git-dir in superprojects git-dir recursive' '
+ (cd super2 &&
+ (cd deeper/submodule &&
+ git submodule add ../submodule subsubmodule &&
+ (cd subsubmodule &&
+ git log > ../../../expected
+ ) &&
+ git commit -m "added subsubmodule" &&
+ git push
+ ) &&
+ (cd .git/modules/deeper/submodule/modules/subsubmodule &&
+ git log > ../../../../../actual
+ ) &&
+ git add deeper/submodule &&
+ git commit -m "update submodule" &&
+ git push &&
+ test_cmp actual expected
+ )
+'
+
+test_expect_success 'submodule update places git-dir in superprojects git-dir recursive' '
+ mkdir super_update_r &&
+ (cd super_update_r &&
+ git init --bare
+ ) &&
+ mkdir subsuper_update_r &&
+ (cd subsuper_update_r &&
+ git init --bare
+ ) &&
+ mkdir subsubsuper_update_r &&
+ (cd subsubsuper_update_r &&
+ git init --bare
+ ) &&
+ git clone subsubsuper_update_r subsubsuper_update_r2 &&
+ (cd subsubsuper_update_r2 &&
+ test_commit "update_subsubsuper" file &&
+ git push origin master
+ ) &&
+ git clone subsuper_update_r subsuper_update_r2 &&
+ (cd subsuper_update_r2 &&
+ test_commit "update_subsuper" file &&
+ git submodule add ../subsubsuper_update_r subsubmodule &&
+ git commit -am "subsubmodule" &&
+ git push origin master
+ ) &&
+ git clone super_update_r super_update_r2 &&
+ (cd super_update_r2 &&
+ test_commit "update_super" file &&
+ git submodule add ../subsuper_update_r submodule &&
+ git commit -am "submodule" &&
+ git push origin master
+ ) &&
+ rm -rf super_update_r2 &&
+ git clone super_update_r super_update_r2 &&
+ (cd super_update_r2 &&
+ git submodule update --init --recursive &&
+ (cd submodule/subsubmodule &&
+ git log > ../../expected
+ ) &&
+ (cd .git/modules/submodule/modules/subsubmodule
+ git log > ../../../../../actual
+ )
+ test_cmp actual expected
+ )
+'
+
test_done
git clone super clone2 &&
(
cd clone2 &&
- test ! -d sub1/.git &&
- test ! -d sub2/.git &&
- test ! -d sub3/.git &&
- test ! -d nested1/.git &&
+ test_must_fail git rev-parse --resolve-git-dir sub1/.git &&
+ test_must_fail git rev-parse --resolve-git-dir sub2/.git &&
+ test_must_fail git rev-parse --resolve-git-dir sub3/.git &&
+ test_must_fail git rev-parse --resolve-git-dir nested1/.git &&
git submodule update --init &&
- test -d sub1/.git &&
- test -d sub2/.git &&
- test -d sub3/.git &&
- test -d nested1/.git &&
- test ! -d nested1/nested2/.git &&
+ git rev-parse --resolve-git-dir sub1/.git &&
+ git rev-parse --resolve-git-dir sub2/.git &&
+ git rev-parse --resolve-git-dir sub3/.git &&
+ git rev-parse --resolve-git-dir nested1/.git &&
+ test_must_fail git rev-parse --resolve-git-dir nested1/nested2/.git &&
git submodule foreach "git submodule update --init" &&
- test -d nested1/nested2/.git &&
- test ! -d nested1/nested2/nested3/.git
+ git rev-parse --resolve-git-dir nested1/nested1/nested2/.git
+ test_must_fail git rev-parse --resolve-git-dir nested1/nested2/nested3/.git
)
'
(
cd clone2 &&
git submodule foreach --recursive "git submodule update --init" &&
- test -d nested1/nested2/nested3/.git &&
- test -d nested1/nested2/nested3/submodule/.git
+ git rev-parse --resolve-git-dir nested1/nested2/nested3/.git &&
+ git rev-parse --resolve-git-dir nested1/nested2/nested3/submodule/.git
)
'
git clone super clone3 &&
(
cd clone3 &&
- test ! -d sub1/.git &&
- test ! -d sub2/.git &&
- test ! -d sub3/.git &&
- test ! -d nested1/.git &&
+ test_must_fail git rev-parse --resolve-git-dir sub1/.git &&
+ test_must_fail git rev-parse --resolve-git-dir sub2/.git &&
+ test_must_fail git rev-parse --resolve-git-dir sub3/.git &&
+ test_must_fail git rev-parse --resolve-git-dir nested1/.git &&
git submodule update --init --recursive &&
- test -d sub1/.git &&
- test -d sub2/.git &&
- test -d sub3/.git &&
- test -d nested1/.git &&
- test -d nested1/nested2/.git &&
- test -d nested1/nested2/nested3/.git &&
- test -d nested1/nested2/nested3/submodule/.git
+ git rev-parse --resolve-git-dir sub1/.git &&
+ git rev-parse --resolve-git-dir sub2/.git &&
+ git rev-parse --resolve-git-dir sub3/.git &&
+ git rev-parse --resolve-git-dir nested1/.git &&
+ git rev-parse --resolve-git-dir nested1/nested2/.git &&
+ git rev-parse --resolve-git-dir nested1/nested2/nested3/.git &&
+ git rev-parse --resolve-git-dir nested1/nested2/nested3/submodule/.git
)
'
test_expect_success 'use "git clone --recursive" to checkout all submodules' '
git clone --recursive super clone4 &&
- test -d clone4/.git &&
- test -d clone4/sub1/.git &&
- test -d clone4/sub2/.git &&
- test -d clone4/sub3/.git &&
- test -d clone4/nested1/.git &&
- test -d clone4/nested1/nested2/.git &&
- test -d clone4/nested1/nested2/nested3/.git &&
- test -d clone4/nested1/nested2/nested3/submodule/.git
+ (
+ cd clone4 &&
+ git rev-parse --resolve-git-dir .git &&
+ git rev-parse --resolve-git-dir sub1/.git &&
+ git rev-parse --resolve-git-dir sub2/.git &&
+ git rev-parse --resolve-git-dir sub3/.git &&
+ git rev-parse --resolve-git-dir nested1/.git &&
+ git rev-parse --resolve-git-dir nested1/nested2/.git &&
+ git rev-parse --resolve-git-dir nested1/nested2/nested3/.git &&
+ git rev-parse --resolve-git-dir nested1/nested2/nested3/submodule/.git
+ )
'
test_expect_success 'test "update --recursive" with a flag with spaces' '
git clone super clone5 &&
(
cd clone5 &&
- test ! -d nested1/.git &&
+ test_must_fail git rev-parse --resolve-git-dir d nested1/.git &&
git submodule update --init --recursive --reference="$(dirname "$PWD")/common objects" &&
- test -d nested1/.git &&
- test -d nested1/nested2/.git &&
- test -d nested1/nested2/nested3/.git &&
- test -f nested1/.git/objects/info/alternates &&
- test -f nested1/nested2/.git/objects/info/alternates &&
- test -f nested1/nested2/nested3/.git/objects/info/alternates
+ git rev-parse --resolve-git-dir nested1/.git &&
+ git rev-parse --resolve-git-dir nested1/nested2/.git &&
+ git rev-parse --resolve-git-dir nested1/nested2/nested3/.git &&
+ test -f .git/modules/nested1/objects/info/alternates &&
+ test -f .git/modules/nested1/modules/nested2/objects/info/alternates &&
+ test -f .git/modules/nested1/modules/nested2/modules/nested3/objects/info/alternates
)
'
git clone super clone6 &&
(
cd clone6 &&
- test ! -d sub1/.git &&
- test ! -d sub2/.git &&
- test ! -d sub3/.git &&
- test ! -d nested1/.git &&
+ test_must_fail git rev-parse --resolve-git-dir sub1/.git &&
+ test_must_fail git rev-parse --resolve-git-dir sub2/.git &&
+ test_must_fail git rev-parse --resolve-git-dir sub3/.git &&
+ test_must_fail git rev-parse --resolve-git-dir nested1/.git &&
git submodule update --init --recursive -- nested1 &&
- test ! -d sub1/.git &&
- test ! -d sub2/.git &&
- test ! -d sub3/.git &&
- test -d nested1/.git &&
- test -d nested1/nested2/.git &&
- test -d nested1/nested2/nested3/.git &&
- test -d nested1/nested2/nested3/submodule/.git
+ test_must_fail git rev-parse --resolve-git-dir sub1/.git &&
+ test_must_fail git rev-parse --resolve-git-dir sub2/.git &&
+ test_must_fail git rev-parse --resolve-git-dir sub3/.git &&
+ git rev-parse --resolve-git-dir nested1/.git &&
+ git rev-parse --resolve-git-dir nested1/nested2/.git &&
+ git rev-parse --resolve-git-dir nested1/nested2/nested3/.git &&
+ git rev-parse --resolve-git-dir nested1/nested2/nested3/submodule/.git
)
'
cd "$base_dir"
test_expect_success 'after add: existence of info/alternates' \
-'test `wc -l <super/sub/.git/objects/info/alternates` = 1'
+'test `wc -l <super/.git/modules/sub/objects/info/alternates` = 1'
cd "$base_dir"
cd "$base_dir"
test_expect_success 'after update: existence of info/alternates' \
-'test `wc -l <super-clone/sub/.git/objects/info/alternates` = 1'
+'test `wc -l <super-clone/.git/modules/sub/objects/info/alternates` = 1'
cd "$base_dir"
sub
sub2
Please move or remove them before you can merge.
+Aborting
EOF
test_expect_success 'will not overwrite untracked file in leading path' '
three
two
Please move or remove them before you can merge.
+Aborting
EOF
test_expect_success 'untracked files overwritten by merge (fast and non-fast forward)' '
error: The following untracked working tree files would be overwritten by merge:
five
Please move or remove them before you can merge.
+Aborting
EOF
test_expect_success 'untracked files or local changes ovewritten by merge' '
rep/one
rep/two
Please, commit your changes or stash them before you can switch branches.
+Aborting
EOF
test_expect_success 'cannot switch branches because of local changes' '
rep/one
rep/two
Please, commit your changes or stash them before you can switch branches.
+Aborting
EOF
test_expect_success 'not uptodate file porcelain checkout error' '
rep
rep2
+Aborting
EOF
test_expect_success 'not_uptodate_dir porcelain checkout error' '
test_expect_success 'setup' '
git config rerere.enabled true &&
echo master >file1 &&
+ echo master spaced >"spaced name" &&
echo master file11 >file11 &&
echo master file12 >file12 &&
echo master file13 >file13 &&
git commit -m "Add foo"
) &&
git submodule add git://example.com/submod submod &&
- git add file1 file1[1-4] subdir/file3 .gitmodules submod &&
+ git add file1 "spaced name" file1[1-4] subdir/file3 .gitmodules submod &&
git commit -m "add initial versions" &&
git checkout -b branch1 master &&
git submodule update -N &&
echo branch1 change >file1 &&
echo branch1 newfile >file2 &&
+ echo branch1 spaced >"spaced name" &&
echo branch1 change file11 >file11 &&
echo branch1 change file13 >file13 &&
echo branch1 sub >subdir/file3 &&
git commit -m "Add bar on branch1" &&
git checkout -b submod-branch1
) &&
- git add file1 file11 file13 file2 subdir/file3 submod &&
+ git add file1 "spaced name" file11 file13 file2 subdir/file3 submod &&
git rm file12 &&
git commit -m "branch1 changes" &&
git submodule update -N &&
echo master updated >file1 &&
echo master new >file2 &&
+ echo master updated spaced >"spaced name" &&
echo master updated file12 >file12 &&
echo master updated file14 >file14 &&
echo master new sub >subdir/file3 &&
git commit -m "Add bar on master" &&
git checkout -b submod-master
) &&
- git add file1 file12 file14 file2 subdir/file3 submod &&
+ git add file1 "spaced name" file12 file14 file2 subdir/file3 submod &&
git rm file11 &&
git commit -m "master updates" &&
git checkout -b test1 branch1 &&
git submodule update -N &&
test_must_fail git merge master >/dev/null 2>&1 &&
- ( yes "" | git mergetool file1 >/dev/null 2>&1 ) &&
- ( yes "" | git mergetool file2 >/dev/null 2>&1 ) &&
+ ( yes "" | git mergetool file1 file1 ) &&
+ ( yes "" | git mergetool file2 "spaced name" >/dev/null 2>&1 ) &&
( yes "" | git mergetool subdir/file3 >/dev/null 2>&1 ) &&
( yes "d" | git mergetool file11 >/dev/null 2>&1 ) &&
( yes "d" | git mergetool file12 >/dev/null 2>&1 ) &&
test_must_fail git merge master >/dev/null 2>&1 &&
( yes "" | git mergetool file1 >/dev/null 2>&1 ) &&
( yes "" | git mergetool file2 >/dev/null 2>&1 ) &&
+ ( yes "" | git mergetool "spaced name" >/dev/null 2>&1 ) &&
( yes "" | git mergetool subdir/file3 >/dev/null 2>&1 ) &&
( yes "d" | git mergetool file11 >/dev/null 2>&1 ) &&
( yes "d" | git mergetool file12 >/dev/null 2>&1 ) &&
(
cd subdir &&
( yes "" | git mergetool ../file1 >/dev/null 2>&1 ) &&
- ( yes "" | git mergetool ../file2 >/dev/null 2>&1 ) &&
+ ( yes "" | git mergetool ../file2 ../spaced\ name >/dev/null 2>&1 ) &&
( yes "d" | git mergetool ../file11 >/dev/null 2>&1 ) &&
( yes "d" | git mergetool ../file12 >/dev/null 2>&1 ) &&
( yes "l" | git mergetool ../submod >/dev/null 2>&1 ) &&
git reset --hard
'
+test_expect_success 'mergetool takes partial path' '
+ git config rerere.enabled false &&
+ git checkout -b test12 branch1 &&
+ git submodule update -N &&
+ test_must_fail git merge master &&
+
+ #shouldnt need these lines
+ #( yes "d" | git mergetool file11 >/dev/null 2>&1 ) &&
+ #( yes "d" | git mergetool file12 >/dev/null 2>&1 ) &&
+ #( yes "l" | git mergetool submod >/dev/null 2>&1 ) &&
+ #( yes "" | git mergetool file1 file2 >/dev/null 2>&1 ) &&
+
+ ( yes "" | git mergetool subdir ) &&
+
+ test "$(cat subdir/file3)" = "master new sub" &&
+ git reset --hard
+'
+
test_expect_success 'deleted vs modified submodule' '
git checkout -b test6 branch1 &&
git submodule update -N &&
git checkout -b test6.a test6 &&
test_must_fail git merge master &&
test -n "$(git ls-files -u)" &&
- ( yes "" | git mergetool file1 file2 subdir/file3 >/dev/null 2>&1 ) &&
+ ( yes "" | git mergetool file1 file2 spaced\ name subdir/file3 >/dev/null 2>&1 ) &&
( yes "d" | git mergetool file11 file12 >/dev/null 2>&1 ) &&
( yes "r" | git mergetool submod ) &&
rmdir submod && mv submod-movedaside submod &&
git submodule update -N &&
test_must_fail git merge master &&
test -n "$(git ls-files -u)" &&
- ( yes "" | git mergetool file1 file2 subdir/file3 >/dev/null 2>&1 ) &&
+ ( yes "" | git mergetool file1 file2 spaced\ name subdir/file3 >/dev/null 2>&1 ) &&
( yes "d" | git mergetool file11 file12 >/dev/null 2>&1 ) &&
( yes "l" | git mergetool submod ) &&
test ! -e submod &&
git submodule update -N &&
test_must_fail git merge test6 &&
test -n "$(git ls-files -u)" &&
- ( yes "" | git mergetool file1 file2 subdir/file3 >/dev/null 2>&1 ) &&
+ ( yes "" | git mergetool file1 file2 spaced\ name subdir/file3 >/dev/null 2>&1 ) &&
( yes "d" | git mergetool file11 file12 >/dev/null 2>&1 ) &&
( yes "r" | git mergetool submod ) &&
test ! -e submod &&
git submodule update -N &&
test_must_fail git merge test6 &&
test -n "$(git ls-files -u)" &&
- ( yes "" | git mergetool file1 file2 subdir/file3 >/dev/null 2>&1 ) &&
+ ( yes "" | git mergetool file1 file2 spaced\ name subdir/file3 >/dev/null 2>&1 ) &&
( yes "d" | git mergetool file11 file12 >/dev/null 2>&1 ) &&
( yes "l" | git mergetool submod ) &&
test "$(cat submod/bar)" = "master submodule" &&
git checkout -b test7.a branch1 &&
test_must_fail git merge master &&
test -n "$(git ls-files -u)" &&
- ( yes "" | git mergetool file1 file2 subdir/file3 >/dev/null 2>&1 ) &&
+ ( yes "" | git mergetool file1 file2 spaced\ name subdir/file3 >/dev/null 2>&1 ) &&
( yes "d" | git mergetool file11 file12 >/dev/null 2>&1 ) &&
( yes "r" | git mergetool submod ) &&
rmdir submod && mv submod-movedaside submod &&
git checkout -b test7.b test7 &&
test_must_fail git merge master &&
test -n "$(git ls-files -u)" &&
- ( yes "" | git mergetool file1 file2 subdir/file3 >/dev/null 2>&1 ) &&
+ ( yes "" | git mergetool file1 file2 spaced\ name subdir/file3 >/dev/null 2>&1 ) &&
( yes "d" | git mergetool file11 file12 >/dev/null 2>&1 ) &&
( yes "l" | git mergetool submod ) &&
git submodule update -N &&
git submodule update -N &&
test_must_fail git merge test7 &&
test -n "$(git ls-files -u)" &&
- ( yes "" | git mergetool file1 file2 subdir/file3 >/dev/null 2>&1 ) &&
+ ( yes "" | git mergetool file1 file2 spaced\ name subdir/file3 >/dev/null 2>&1 ) &&
( yes "d" | git mergetool file11 file12 >/dev/null 2>&1 ) &&
( yes "r" | git mergetool submod ) &&
test -d submod.orig &&
git submodule update -N &&
test_must_fail git merge test7 &&
test -n "$(git ls-files -u)" &&
- ( yes "" | git mergetool file1 file2 subdir/file3 >/dev/null 2>&1 ) &&
+ ( yes "" | git mergetool file1 file2 spaced\ name subdir/file3 >/dev/null 2>&1 ) &&
( yes "d" | git mergetool file11 file12 >/dev/null 2>&1 ) &&
( yes "l" | git mergetool submod ) &&
test "$(cat submod/bar)" = "master submodule" &&
test "$(cat submod/file16)" = "not a submodule" &&
rm -rf submod.orig &&
- git reset --hard &&
+ git reset --hard >/dev/null 2>&1 &&
test_must_fail git merge master &&
test -n "$(git ls-files -u)" &&
test ! -e submod.orig &&
( cd submod && git clean -f && git reset --hard ) &&
git submodule update -N &&
test "$(cat submod/bar)" = "master submodule" &&
- git reset --hard && rm -rf submod-movedaside &&
+ git reset --hard >/dev/null 2>&1 && rm -rf submod-movedaside &&
git checkout -b test11.c master &&
git submodule update -N &&
git submodule update -N &&
test "$(cat submod/bar)" = "master submodule" &&
- git reset --hard &&
+ git reset --hard >/dev/null 2>&1 &&
git submodule update -N &&
test_must_fail git merge test11 &&
test -n "$(git ls-files -u)" &&
( yes "r" | git mergetool submod ) &&
test "$(cat submod/file16)" = "not a submodule" &&
- git reset --hard master &&
+ git reset --hard master >/dev/null 2>&1 &&
( cd submod && git clean -f && git reset --hard ) &&
git submodule update -N
'
mkdir -p non/git/sub &&
echo hello >non/git/file1 &&
echo world >non/git/sub/file2 &&
- echo ".*o*" >non/git/.gitignore &&
{
echo file1:hello &&
echo sub/file2:world
test_must_fail git grep o &&
git grep --no-index o >../../actual.sub &&
test_cmp ../../expect.sub ../../actual.sub
+ ) &&
+
+ echo ".*o*" >non/git/.gitignore &&
+ (
+ GIT_CEILING_DIRECTORIES="$(pwd)/non/git" &&
+ export GIT_CEILING_DIRECTORIES &&
+ cd non/git &&
+ test_must_fail git grep o &&
+ git grep --no-index --exclude-standard o >../actual.full &&
+ test_cmp ../expect.full ../actual.full &&
+
+ {
+ echo ".gitignore:.*o*"
+ cat ../expect.full
+ } >../expect.with.ignored &&
+ git grep --no-index --no-exclude o >../actual.full &&
+ test_cmp ../expect.with.ignored ../actual.full
)
'
{
echo file1:hello &&
echo sub/file2:world
+ } >is/expect.unignored &&
+ {
+ echo ".gitignore:.*o*" &&
+ cat is/expect.unignored
} >is/expect.full &&
: >is/expect.empty &&
echo file2:world >is/expect.sub &&
git init &&
test_must_fail git grep o >../actual.full &&
test_cmp ../expect.empty ../actual.full &&
+
+ git grep --untracked o >../actual.unignored &&
+ test_cmp ../expect.unignored ../actual.unignored &&
+
git grep --no-index o >../actual.full &&
test_cmp ../expect.full ../actual.full &&
+
+ git grep --no-index --exclude-standard o >../actual.unignored &&
+ test_cmp ../expect.unignored ../actual.unignored &&
+
cd sub &&
test_must_fail git grep o >../../actual.sub &&
test_cmp ../../expect.empty ../../actual.sub &&
+
git grep --no-index o >../../actual.sub &&
+ test_cmp ../../expect.sub ../../actual.sub &&
+
+ git grep --untracked o >../../actual.sub &&
test_cmp ../../expect.sub ../../actual.sub
)
'
test -n "$(ls msgtxt*)"
'
+test_expect_success $PREREQ 'sendemail.aliasfiletype=mailrc' '
+ clean_fake_sendmail &&
+ echo "alias sbd somebody@example.org" >.mailrc &&
+ git config --replace-all sendemail.aliasesfile "$(pwd)/.mailrc" &&
+ git config sendemail.aliasfiletype mailrc &&
+ git send-email \
+ --from="Example <nobody@example.com>" \
+ --to=sbd \
+ --smtp-server="$(pwd)/fake.sendmail" \
+ outdir/0001-*.patch \
+ 2>errors >out &&
+ grep "^!somebody@example\.org!$" commandline1
+'
+
+test_expect_success $PREREQ 'sendemail.aliasfile=~/.mailrc' '
+ clean_fake_sendmail &&
+ echo "alias sbd someone@example.org" >~/.mailrc &&
+ git config --replace-all sendemail.aliasesfile "~/.mailrc" &&
+ git config sendemail.aliasfiletype mailrc &&
+ git send-email \
+ --from="Example <nobody@example.com>" \
+ --to=sbd \
+ --smtp-server="$(pwd)/fake.sendmail" \
+ outdir/0001-*.patch \
+ 2>errors >out &&
+ grep "^!someone@example\.org!$" commandline1
+'
+
test_done
test_description='git svn handling of root commits in merge ranges'
. ./lib-git-svn.sh
+svn_ver="$(svn --version --quiet)"
+case $svn_ver in
+0.* | 1.[0-4].*)
+ skip_all="skipping git-svn test - SVN too old ($svn_ver)"
+ test_done
+ ;;
+esac
+
test_expect_success 'test handling of root commits in merge ranges' '
mkdir -p init/trunk init/branches init/tags &&
echo "r1" > init/trunk/file.txt &&
--- /dev/null
+#!/bin/sh
+#
+# Copyright (c) 2011 Frédéric Heitzmann
+
+test_description='git svn dcommit --interactive series'
+. ./lib-git-svn.sh
+
+test_expect_success 'initialize repo' '
+ svn_cmd mkdir -m"mkdir test-interactive" "$svnrepo/test-interactive" &&
+ git svn clone "$svnrepo/test-interactive" test-interactive &&
+ cd test-interactive &&
+ touch foo && git add foo && git commit -m"foo: first commit" &&
+ git svn dcommit
+ '
+
+test_expect_success 'answers: y [\n] yes' '
+ (
+ echo "change #1" >> foo && git commit -a -m"change #1" &&
+ echo "change #2" >> foo && git commit -a -m"change #2" &&
+ echo "change #3" >> foo && git commit -a -m"change #3" &&
+ ( echo "y
+
+y" | GIT_SVN_NOTTY=1 git svn dcommit --interactive ) &&
+ test $(git rev-parse HEAD) = $(git rev-parse remotes/git-svn)
+ )
+ '
+
+test_expect_success 'answers: yes yes no' '
+ (
+ echo "change #1" >> foo && git commit -a -m"change #1" &&
+ echo "change #2" >> foo && git commit -a -m"change #2" &&
+ echo "change #3" >> foo && git commit -a -m"change #3" &&
+ ( echo "yes
+yes
+no" | GIT_SVN_NOTTY=1 git svn dcommit --interactive ) &&
+ test $(git rev-parse HEAD^^^) = $(git rev-parse remotes/git-svn) &&
+ git reset --hard remotes/git-svn
+ )
+ '
+
+test_expect_success 'answers: yes quit' '
+ (
+ echo "change #1" >> foo && git commit -a -m"change #1" &&
+ echo "change #2" >> foo && git commit -a -m"change #2" &&
+ echo "change #3" >> foo && git commit -a -m"change #3" &&
+ ( echo "yes
+quit" | GIT_SVN_NOTTY=1 git svn dcommit --interactive ) &&
+ test $(git rev-parse HEAD^^^) = $(git rev-parse remotes/git-svn) &&
+ git reset --hard remotes/git-svn
+ )
+ '
+
+test_expect_success 'answers: all' '
+ (
+ echo "change #1" >> foo && git commit -a -m"change #1" &&
+ echo "change #2" >> foo && git commit -a -m"change #2" &&
+ echo "change #3" >> foo && git commit -a -m"change #3" &&
+ ( echo "all" | GIT_SVN_NOTTY=1 git svn dcommit --interactive ) &&
+ test $(git rev-parse HEAD) = $(git rev-parse remotes/git-svn) &&
+ git reset --hard remotes/git-svn
+ )
+ '
+
+test_done
'test 1 = `git rev-list J | wc -l` &&
test 0 = `git ls-tree J | wc -l`'
+cat >input <<INPUT_END
+reset refs/heads/J2
+
+tag wrong_tag
+from refs/heads/J2
+data <<EOF
+Tag branch that was reset.
+EOF
+INPUT_END
+test_expect_success \
+ 'J: tag must fail on empty branch' \
+ 'test_must_fail git fast-import <input'
###
### series K
###
'Q: verify second note for second commit' \
'git cat-file blob refs/notes/foobar:$commit2 >actual && test_cmp expect actual'
+cat >input <<EOF
+reset refs/heads/Q0
+
+commit refs/heads/note-Q0
+committer $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL> $GIT_COMMITTER_DATE
+data <<COMMIT
+Note for an empty branch.
+COMMIT
+
+N inline refs/heads/Q0
+data <<NOTE
+some note
+NOTE
+EOF
+test_expect_success \
+ 'Q: deny note on empty branch' \
+ 'test_must_fail git fast-import <input'
###
### series R (feature and option)
###
--- /dev/null
+#!/bin/sh
+#
+
+test_description='git web--browse basic tests
+
+This test checks that git web--browse can handle various valid URLs.'
+
+. ./test-lib.sh
+
+test_expect_success \
+ 'URL with an ampersand in it' '
+ echo http://example.com/foo\&bar >expect &&
+ git config browser.custom.cmd echo &&
+ git web--browse --browser=custom \
+ http://example.com/foo\&bar >actual &&
+ test_cmp expect actual
+'
+
+test_expect_success \
+ 'URL with a semi-colon in it' '
+ echo http://example.com/foo\;bar >expect &&
+ git config browser.custom.cmd echo &&
+ git web--browse --browser=custom \
+ http://example.com/foo\;bar >actual &&
+ test_cmp expect actual
+'
+
+test_expect_success \
+ 'URL with a hash in it' '
+ echo http://example.com/foo#bar >expect &&
+ git config browser.custom.cmd echo &&
+ git web--browse --browser=custom \
+ http://example.com/foo#bar >actual &&
+ test_cmp expect actual
+'
+
+test_expect_success \
+ 'browser paths are properly quoted' '
+ echo fake: http://example.com/foo >expect &&
+ cat >"fake browser" <<-\EOF &&
+ #!/bin/sh
+ echo fake: "$@"
+ EOF
+ chmod +x "fake browser" &&
+ git config browser.w3m.path "`pwd`/fake browser" &&
+ git web--browse --browser=w3m \
+ http://example.com/foo >actual &&
+ test_cmp expect actual
+'
+
+test_expect_success \
+ 'browser command allows arbitrary shell code' '
+ echo "arg: http://example.com/foo" >expect &&
+ git config browser.custom.cmd "
+ f() {
+ for i in \"\$@\"; do
+ echo arg: \$i
+ done
+ }
+ f" &&
+ git web--browse --browser=custom \
+ http://example.com/foo >actual &&
+ test_cmp expect actual
+'
+
+test_done
do
make_valgrind_symlink $file
done
+ # special-case the mergetools loadables
+ make_symlink "$GIT_BUILD_DIR"/mergetools "$GIT_VALGRIND/bin/mergetools"
OLDIFS=$IFS
IFS=:
for path in $PATH
#include "cache.h"
+static int rc;
-static int test_isdigit(int c)
+static void report_error(const char *class, int ch)
{
- return isdigit(c);
+ printf("%s classifies char %d (0x%02x) wrongly\n", class, ch, ch);
+ rc = 1;
}
-static int test_isspace(int c)
+static int is_in(const char *s, int ch)
{
- return isspace(c);
+ /* We can't find NUL using strchr. It's classless anyway. */
+ if (ch == '\0')
+ return 0;
+ return !!strchr(s, ch);
}
-static int test_isalpha(int c)
-{
- return isalpha(c);
-}
-
-static int test_isalnum(int c)
-{
- return isalnum(c);
-}
-
-static int test_is_glob_special(int c)
-{
- return is_glob_special(c);
-}
-
-static int test_is_regex_special(int c)
-{
- return is_regex_special(c);
+#define TEST_CLASS(t,s) { \
+ int i; \
+ for (i = 0; i < 256; i++) { \
+ if (is_in(s, i) != t(i)) \
+ report_error(#t, i); \
+ } \
}
#define DIGIT "0123456789"
#define LOWER "abcdefghijklmnopqrstuvwxyz"
#define UPPER "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
-static const struct ctype_class {
- const char *name;
- int (*test_fn)(int);
- const char *members;
-} classes[] = {
- { "isdigit", test_isdigit, DIGIT },
- { "isspace", test_isspace, " \n\r\t" },
- { "isalpha", test_isalpha, LOWER UPPER },
- { "isalnum", test_isalnum, LOWER UPPER DIGIT },
- { "is_glob_special", test_is_glob_special, "*?[\\" },
- { "is_regex_special", test_is_regex_special, "$()*+.?[\\^{|" },
- { NULL }
-};
-
-static int test_class(const struct ctype_class *test)
-{
- int i, rc = 0;
-
- for (i = 0; i < 256; i++) {
- int expected = i ? !!strchr(test->members, i) : 0;
- int actual = test->test_fn(i);
-
- if (actual != expected) {
- rc = 1;
- printf("%s classifies char %d (0x%02x) wrongly\n",
- test->name, i, i);
- }
- }
- return rc;
-}
-
int main(int argc, char **argv)
{
- const struct ctype_class *test;
- int rc = 0;
-
- for (test = classes; test->name; test++)
- rc |= test_class(test);
+ TEST_CLASS(isdigit, DIGIT);
+ TEST_CLASS(isspace, " \n\r\t");
+ TEST_CLASS(isalpha, LOWER UPPER);
+ TEST_CLASS(isalnum, LOWER UPPER DIGIT);
+ TEST_CLASS(is_glob_special, "*?[\\");
+ TEST_CLASS(is_regex_special, "$()*+.?[\\^{|");
+ TEST_CLASS(is_pathspec_magic, "!\"#%&',-/:;<=>@_`~");
return rc;
}
OPT_STRING(0, "string2", &string, "str", "get another string"),
OPT_STRING(0, "st", &string, "st", "get another string (pervert ordering)"),
OPT_STRING('o', NULL, &string, "str", "get another string"),
+ OPT_NOOP_NOARG(0, "obsolete"),
OPT_SET_PTR(0, "default-string", &string,
"set string to default", (unsigned long)"default"),
OPT_STRING_LIST(0, "list", &list, "str", "add str to list"),
ALLOC_GROW(refspecs,
refspec_nr + 1,
refspec_alloc);
- refspecs[refspec_nr++] = strdup(capname + strlen("refspec "));
+ refspecs[refspec_nr++] = xstrdup(capname + strlen("refspec "));
} else if (!strcmp(capname, "connect")) {
data->connect = 1;
} else if (!prefixcmp(capname, "export-marks ")) {
if (data->refspecs)
private = apply_refspecs(data->refspecs, data->refspec_nr, posn->name);
else
- private = strdup(posn->name);
- read_ref(private, posn->old_sha1);
- free(private);
+ private = xstrdup(posn->name);
+ if (private) {
+ read_ref(private, posn->old_sha1);
+ free(private);
+ }
}
strbuf_release(&buf);
return 0;
int nr_heads, struct ref **to_fetch)
{
struct bundle_transport_data *data = transport->data;
- return unbundle(&data->header, data->fd);
+ return unbundle(&data->header, data->fd,
+ transport->progress ? BUNDLE_VERBOSE : 0);
}
static int close_bundle(struct transport *transport)
continue;
remote = remote ? (remote + 1) : local;
- switch (check_ref_format(remote)) {
- case 0: /* ok */
- case CHECK_REF_FORMAT_ONELEVEL:
- /* ok but a single level -- that is fine for
- * a match pattern.
- */
- case CHECK_REF_FORMAT_WILDCARD:
- /* ok but ends with a pattern-match character */
- continue;
- }
- die("remote part of refspec is not a valid name in %s",
- heads[i]);
+ if (check_refname_format(remote,
+ REFNAME_ALLOW_ONELEVEL|REFNAME_REFSPEC_PATTERN))
+ die("remote part of refspec is not a valid name in %s",
+ heads[i]);
}
}
}
}
+static inline int prune_traversal(struct name_entry *e,
+ struct traverse_info *info,
+ struct strbuf *base,
+ int still_interesting)
+{
+ if (!info->pathspec || still_interesting == 2)
+ return 2;
+ if (still_interesting < 0)
+ return still_interesting;
+ return tree_entry_interesting(e, base, 0, info->pathspec);
+}
+
int traverse_trees(int n, struct tree_desc *t, struct traverse_info *info)
{
int ret = 0;
struct name_entry *entry = xmalloc(n*sizeof(*entry));
int i;
struct tree_desc_x *tx = xcalloc(n, sizeof(*tx));
+ struct strbuf base = STRBUF_INIT;
+ int interesting = 1;
for (i = 0; i < n; i++)
tx[i].d = t[i];
+ if (info->prev) {
+ strbuf_grow(&base, info->pathlen);
+ make_traverse_path(base.buf, info->prev, &info->name);
+ base.buf[info->pathlen-1] = '/';
+ strbuf_setlen(&base, info->pathlen);
+ }
for (;;) {
unsigned long mask, dirmask;
const char *first = NULL;
int first_len = 0;
- struct name_entry *e;
+ struct name_entry *e = NULL;
int len;
for (i = 0; i < n; i++) {
mask |= 1ul << i;
if (S_ISDIR(entry[i].mode))
dirmask |= 1ul << i;
+ e = &entry[i];
}
if (!mask)
break;
- ret = info->fn(n, mask, dirmask, entry, info);
- if (ret < 0) {
- error = ret;
- if (!info->show_all_errors)
- break;
+ interesting = prune_traversal(e, info, &base, interesting);
+ if (interesting < 0)
+ break;
+ if (interesting) {
+ ret = info->fn(n, mask, dirmask, entry, info);
+ if (ret < 0) {
+ error = ret;
+ if (!info->show_all_errors)
+ break;
+ }
+ mask &= ret;
}
- mask &= ret;
ret = 0;
for (i = 0; i < n; i++)
if (mask & (1ul << i))
for (i = 0; i < n; i++)
free_extended_entry(tx + i);
free(tx);
+ strbuf_release(&base);
return error;
}
return 0;
}
-static int match_dir_prefix(const char *base, int baselen,
+static int match_dir_prefix(const char *base,
const char *match, int matchlen)
{
if (strncmp(base, match, matchlen))
if (baselen >= matchlen) {
/* If it doesn't match, move along... */
- if (!match_dir_prefix(base_str, baselen, match, matchlen))
+ if (!match_dir_prefix(base_str, match, matchlen))
goto match_wildcards;
if (!ps->recursive || ps->max_depth == -1)
ps->max_depth);
}
- /* Does the base match? */
- if (!strncmp(base_str, match, baselen)) {
+ /* Either there must be no base, or the base must match. */
+ if (baselen == 0 || !strncmp(base_str, match, baselen)) {
if (match_entry(entry, pathlen,
match + baselen, matchlen - baselen,
&never_interesting))
struct traverse_info *prev;
struct name_entry name;
int pathlen;
+ struct pathspec *pathspec;
unsigned long conflicts;
traverse_callback_t fn;
string_list_clear(rejects, 0);
}
if (something_displayed)
- printf("Aborting\n");
+ fprintf(stderr, "Aborting\n");
}
/*
newinfo = *info;
newinfo.prev = info;
+ newinfo.pathspec = info->pathspec;
newinfo.name = *p;
newinfo.pathlen += tree_entry_len(p->path, p->sha1) + 1;
newinfo.conflicts |= df_conflicts;
info.fn = unpack_callback;
info.data = o;
info.show_all_errors = o->show_all_errors;
+ info.pathspec = o->pathspec;
if (o->prefix) {
/*
*/
mark_new_skip_worktree(o->el, &o->result, CE_ADDED, CE_SKIP_WORKTREE | CE_NEW_SKIP_WORKTREE);
+ ret = 0;
for (i = 0; i < o->result.cache_nr; i++) {
struct cache_entry *ce = o->result.cache[i];
* correct CE_NEW_SKIP_WORKTREE
*/
if (ce->ce_flags & CE_ADDED &&
- verify_absent(ce, ERROR_WOULD_LOSE_UNTRACKED_OVERWRITTEN, o))
- return -1;
+ verify_absent(ce, ERROR_WOULD_LOSE_UNTRACKED_OVERWRITTEN, o)) {
+ if (!o->show_all_errors)
+ goto return_failed;
+ ret = -1;
+ }
if (apply_sparse_checkout(ce, o)) {
+ if (!o->show_all_errors)
+ goto return_failed;
ret = -1;
- goto done;
}
if (!ce_skip_worktree(ce))
empty_worktree = 0;
}
+ if (ret < 0)
+ goto return_failed;
+ /*
+ * Sparse checkout is meant to narrow down checkout area
+ * but it does not make sense to narrow down to empty working
+ * tree. This is usually a mistake in sparse checkout rules.
+ * Do not allow users to do that.
+ */
if (o->result.cache_nr && empty_worktree) {
- /* dubious---why should this fail??? */
ret = unpack_failed(o, "Sparse checkout leaves no entry on working directory");
goto done;
}
const char *prefix;
int cache_bottom;
struct dir_struct *dir;
+ struct pathspec *pathspec;
merge_fn_t fn;
const char *msgs[NB_UNPACK_TREES_ERROR_TYPES];
/*
commit->buffer = NULL;
}
-static void show_object(struct object *obj, const struct name_path *path, const char *component)
+static void show_object(struct object *obj,
+ const struct name_path *path, const char *component,
+ void *cb_data)
{
- /* An object with name "foo\n0000000..." can be used to
- * confuse downstream git-pack-objects very badly.
- */
- const char *name = path_name(path, component);
- const char *ep = strchr(name, '\n');
- if (ep) {
- fprintf(pack_pipe, "%s %.*s\n", sha1_to_hex(obj->sha1),
- (int) (ep - name),
- name);
- }
- else
- fprintf(pack_pipe, "%s %s\n",
- sha1_to_hex(obj->sha1), name);
- free((char *)name);
+ show_object_with_name(pack_pipe, obj, path, component);
}
static void show_edge(struct commit *commit)
int is_url(const char *url)
{
- const char *url2, *first_slash;
-
- if (!url)
- return 0;
- url2 = url;
- first_slash = strchr(url, '/');
-
- /* Input with no slash at all or slash first can't be URL. */
- if (!first_slash || first_slash == url)
- return 0;
- /* Character before must be : and next must be /. */
- if (first_slash[-1] != ':' || first_slash[1] != '/')
+ /* Is "scheme" part reasonable? */
+ if (!url || !is_urlschemechar(1, *url++))
return 0;
- /* There must be something before the :// */
- if (first_slash == url + 1)
- return 0;
- /*
- * Check all characters up to first slash - 1. Only alphanum
- * is allowed.
- */
- url2 = url;
- while (url2 < first_slash - 1) {
- if (!is_urlschemechar(url2 == url, (unsigned char)*url2))
+ while (*url && *url != ':') {
+ if (!is_urlschemechar(0, *url++))
return 0;
- url2++;
}
-
- /* Valid enough. */
- return 1;
+ /* We've seen "scheme"; we want colon-slash-slash */
+ return (url[0] == ':' && url[1] == '/' && url[2] == '/');
}
static int url_decode_char(const char *q)
{
if (!get_sha1_hex(target, sha1))
return 0;
- if (!check_ref_format(target)) {
+ if (!check_refname_format(target, 0)) {
struct ref *ref = alloc_ref(target);
if (!walker->fetch_ref(walker, ref)) {
hashcpy(sha1, ref->old_sha1);
* might be potentially discarded if they happear in a run of discardable.
*/
static int xdl_cleanup_records(xdlclassifier_t *cf, xdfile_t *xdf1, xdfile_t *xdf2) {
- long i, nm, nreff;
+ long i, nm, nreff, mlim;
xrecord_t **recs;
xdlclass_t *rcrec;
char *dis, *dis1, *dis2;
dis1 = dis;
dis2 = dis1 + xdf1->nrec + 1;
+ if ((mlim = xdl_bogosqrt(xdf1->nrec)) > XDL_MAX_EQLIMIT)
+ mlim = XDL_MAX_EQLIMIT;
for (i = xdf1->dstart, recs = &xdf1->recs[xdf1->dstart]; i <= xdf1->dend; i++, recs++) {
rcrec = cf->rcrecs[(*recs)->ha];
nm = rcrec ? rcrec->len2 : 0;
- dis1[i] = (nm == 0) ? 0: 1;
+ dis1[i] = (nm == 0) ? 0: (nm >= mlim) ? 2: 1;
}
+ if ((mlim = xdl_bogosqrt(xdf2->nrec)) > XDL_MAX_EQLIMIT)
+ mlim = XDL_MAX_EQLIMIT;
for (i = xdf2->dstart, recs = &xdf2->recs[xdf2->dstart]; i <= xdf2->dend; i++, recs++) {
rcrec = cf->rcrecs[(*recs)->ha];
nm = rcrec ? rcrec->len1 : 0;
- dis2[i] = (nm == 0) ? 0: 1;
+ dis2[i] = (nm == 0) ? 0: (nm >= mlim) ? 2: 1;
}
for (nreff = 0, i = xdf1->dstart, recs = &xdf1->recs[xdf1->dstart];