/git-commit-tree
/git-config
/git-count-objects
+/git-credential-cache
+/git-credential-cache--daemon
+/git-credential-store
/git-cvsexportcommit
/git-cvsimport
/git-cvsserver
/gitweb/static/gitweb.js
/gitweb/static/gitweb.min.*
/test-chmtime
+/test-credential
/test-ctype
/test-date
/test-delta
/test-dump-cache-tree
+/test-scrap-cache-tree
/test-genrandom
/test-index-version
/test-line-buffer
MAN7_TXT=gitcli.txt gittutorial.txt gittutorial-2.txt \
gitcvs-migration.txt gitcore-tutorial.txt gitglossary.txt \
gitdiffcore.txt gitnamespaces.txt gitrevisions.txt gitworkflows.txt
+MAN7_TXT += gitcredentials.txt
MAN_TXT = $(MAN1_TXT) $(MAN5_TXT) $(MAN7_TXT)
MAN_XML=$(patsubst %.txt,%.xml,$(MAN_TXT))
--- /dev/null
+Git v1.7.6.5 Release Notes
+==========================
+
+Fixes since v1.7.6.4
+--------------------
+
+ * The date parser did not accept timezone designators that lack minutes
+ part and also has a colon between "hh:mm".
+
+ * After fetching from a remote that has very long refname, the reporting
+ output could have corrupted by overrunning a static buffer.
+
+ * "git mergetool" did not use its arguments as pathspec, but as a path to
+ the file that may not even have any conflict.
+
+ * "git name-rev --all" tried to name all _objects_, naturally failing to
+ describe many blobs and trees, instead of showing only commits as
+ advertised in its documentation.
+
+ * "git remote rename $a $b" were not careful to match the remote name
+ against $a (i.e. source side of the remote nickname).
+
+ * "gitweb" used to produce a non-working link while showing the contents
+ of a blob, when JavaScript actions are enabled.
+
+Also contains minor fixes and documentation updates.
--- /dev/null
+Git v1.7.7.5 Release Notes
+==========================
+
+Fixes since v1.7.7.4
+--------------------
+
+ * After fetching from a remote that has very long refname, the reporting
+ output could have corrupted by overrunning a static buffer.
+
+ * "git checkout" and "git merge" treated in-tree .gitignore and exclude
+ file in $GIT_DIR/info/ directory inconsistently when deciding which
+ untracked files are ignored and expendable.
+
+Also contains minor fixes and documentation updates.
--- /dev/null
+Git v1.7.8.1 Release Notes
+==========================
+
+Fixes since v1.7.8.1
+--------------------
+
+ * In some codepaths (notably, checkout and merge), the ignore patterns
+ recorded in $GIT_DIR/info/exclude were not honored. They now are.
+
+ * After fetching from a remote that has very long refname, the reporting
+ output could have corrupted by overrunning a static buffer.
+
+ * "git checkout" and "git merge" treated in-tree .gitignore and exclude
+ file in $GIT_DIR/info/ directory inconsistently when deciding which
+ untracked files are ignored and expendable.
+
+Also contains minor fixes and documentation updates.
Updates since v1.7.8
--------------------
+ * gitk updates accumulated since early 2011.
+
+ * git-gui updated to 0.16.0.
+
+ * git-p4 (in contrib/) updates.
+
* Porcelain commands like "git reset" did not distinguish deletions
and type-changes from ordinary modification, and reported them with
the same 'M' moniker. They now use 'D' (for deletion) and 'T' (for
type-change) to match "git status -s" and "git diff --name-status".
+ * "git add" learned to stream large files directly into a packfile
+ instead of writing them into individual loose object files.
+
+ * "git branch -m <current branch> HEAD" is an obvious no-op and is
+ now allowed.
+
+ * "git checkout -B <current branch> <elsewhere>" is a more intuitive
+ way to spell "git reset --keep <elsewhere>".
+
+ * "git checkout" and "git merge" learned "--no-overwrite-ignore" option
+ to tell Git that untracked and ignored files are not expendable.
+
+ * "git commit --amend" learned "--no-edit" option to say that the
+ user is amending the tree being recorded, without updating the
+ commit log message.
+
* fsck and prune are relatively lengthy operations that still go
silent while making the end-user wait. They learned to give progress
output like other slow operations.
* The set of built-in function-header patterns for various languages
knows MATLAB.
-May also contain documentation updates and code clean-ups.
+ * "git pull" can be used to fetch and merge an annotated/signed tag,
+ instead of the tip of a topic branch. The GPG signature from the
+ signed tag is recorded in the resulting merge commit for later
+ auditing.
+
+ * "git branch --edit-description" can be used to add descriptive text
+ to explain what a topic branch is about.
+
+ * "git fmt-merge-msg" learned to take the branch description into
+ account when preparing a merge summary that "git merge" records
+ when merging a local branch.
+
+ * "git request-pull" has been updated to convey more information
+ useful for integrators to decide if a topic is worth merging and
+ what is pulled is indeed what the requestor asked to pull,
+ including:
+
+ - the tip of the branch being requested to be merged;
+ - the branch description describing what the topic is about;
+ - the contents of the annotated tag, when requesting to pull a tag.
+
+ * "git pull" learned to notice 'pull.rebase' configuration variable,
+ which serves as a global fallback for setting 'branch.<name>.rebase'
+ configuration variable per branch.
+
+ * "git tag" learned "--cleanup" option to control how the whitespaces
+ and empty lines in tag message are cleaned up.
+
+ * "gitweb" learned to show side-by-side diff.
+
+Also contains minor documentation updates and code clean-ups.
Fixes since v1.7.8
------------------
- * In some codepaths (notably, checkout and merge), the ignore patterns
- recorded in $GIT_DIR/info/exclude were not honored. They now are.
- (merge fc001b5 nd/maint-ignore-exclude later to maint).
+ * The function header pattern for files with "diff=cpp" attribute did
+ not consider "type *funcname(type param1,..." as the beginning of a
+ function.
+ (merge 37e7793 tr/userdiff-c-returns-pointer later to maint).
+
+ * LF-to-CRLF streaming filter used when checking out a large-ish blob
+ fell into an infinite loop with a rare input.
+ (merge 284e3d2 cn/maint-lf-to-crlf-filter later to maint).
+
+ * "git archive" mistakenly allowed remote clients to ask for commits
+ that are not at the tip of any ref.
+ (merge 7b51c33 jk/maint-upload-archive later to maint).
+
+ * "git apply --check" did not error out when given an empty input
+ without any patch.
+ (merge cc64b31 bc/maint-apply-check-no-patch later to maint).
+
+ * The error message from "git diff" and "git status" when they fail
+ to inspect changes in submodules did not report which submodule they
+ had trouble with.
+ (merge 6a5ceda jl/submodule-status-failure-report later to maint).
+
+ * "fast-import" did not correctly update an existing notes tree,
+ possibly corrupting the fan-out.
+
+ * When a "reword" action in "git rebase -i" failed to run "commit --amend",
+ we did not give the control back to the user to resolve the situation, and
+ instead kept the original commit log message.
+ (merge 0becb3e aw/rebase-i-stop-on-failure-to-amend later to maint).
+
+--
+exec >/var/tmp/1
+O=v1.7.8-282-ga2add85
+echo O=$(git describe master)
+git log --first-parent --oneline --reverse ^$O master
+echo
+git shortlog --no-merges ^$O ^maint master
branch.<name>.rebase::
When true, rebase the branch <name> on top of the fetched branch,
instead of merging the default branch from the default remote when
- "git pull" is run.
- *NOTE*: this is a possibly dangerous operation; do *not* use
- it unless you understand the implications (see linkgit:git-rebase[1]
- for details).
+ "git pull" is run. See "pull.rebase" for doing this in a non
+ branch-specific manner.
++
+*NOTE*: this is a possibly dangerous operation; do *not* use
+it unless you understand the implications (see linkgit:git-rebase[1]
+for details).
browser.<tool>.cmd::
Specify the command to invoke the specified browser. The
"{tilde}/" is expanded to the value of `$HOME` and "{tilde}user/" to the
specified user's home directory.
+credential.helper::
+ Specify an external helper to be called when a username or
+ password credential is needed; the helper may consult external
+ storage to avoid prompting the user for the credentials. See
+ linkgit:gitcredentials[7] for details.
+
+credential.useHttpPath::
+ When acquiring credentials, consider the "path" component of an http
+ or https URL to be important. Defaults to false. See
+ linkgit:gitcredentials[7] for more information.
+
+credential.username::
+ If no username is set for a network authentication, use this username
+ by default. See credential.<context>.* below, and
+ linkgit:gitcredentials[7].
+
+credential.<url>.*::
+ Any of the credential.* options above can be applied selectively to
+ some credentials. For example "credential.https://example.com.username"
+ would set the default username only for https connections to
+ example.com. See linkgit:gitcredentials[7] for details on how URLs are
+ matched.
+
include::diff-config.txt[]
difftool.<tool>.path::
Note that an alias with the same name as a built-in format
will be silently ignored.
+pull.rebase::
+ When true, rebase branches on top of the fetched branch, instead
+ of merging the default branch from the default remote when "git
+ pull" is run. See "branch.<name>.rebase" for setting this on a
+ per-branch basis.
++
+*NOTE*: this is a possibly dangerous operation; do *not* use
+it unless you understand the implications (see linkgit:git-rebase[1]
+for details).
+
pull.octopus::
The default merge strategy to use when pulling multiple branches
at once.
'git branch' [--set-upstream | --track | --no-track] [-l] [-f] <branchname> [<start-point>]
'git branch' (-m | -M) [<oldbranch>] <newbranch>
'git branch' (-d | -D) [-r] <branchname>...
+'git branch' --edit-description [<branchname>]
DESCRIPTION
-----------
like '--track' would when creating the branch, except that where
branch points to is not changed.
+--edit-description::
+ Open an editor and edit the text to explain what the branch is
+ for, to be used by various other commands (e.g. `request-pull`).
+
--contains <commit>::
Only list branches which contain the specified commit.
SYNOPSIS
--------
[verse]
-'git commit-tree' <tree> [(-p <parent commit>)...] < changelog
+'git commit-tree' <tree> [(-p <parent>)...] < changelog
+'git commit-tree' [(-p <parent>)...] [(-m <message>)...] [(-F <file>)...] <tree>
DESCRIPTION
-----------
linkgit:git-commit[1] instead.
Creates a new commit object based on the provided tree object and
-emits the new commit object id on stdout.
+emits the new commit object id on stdout. The log message is read
+from the standard input, unless `-m` or `-F` options are given.
A commit object may have any number of parents. With exactly one
parent, it is an ordinary commit. Having more than one parent makes
<tree>::
An existing tree object
--p <parent commit>::
+-p <parent>::
Each '-p' indicates the id of a parent commit object.
+-m <message>::
+ A paragraph in the commig log message. This can be given more than
+ once and each <message> becomes its own paragraph.
+
+-F <file>::
+ Read the commit log message from the given file. Use `-` to read
+ from the standard input.
+
Commit Information
------------------
--- /dev/null
+git-credential-cache--daemon(1)
+===============================
+
+NAME
+----
+git-credential-cache--daemon - temporarily store user credentials in memory
+
+SYNOPSIS
+--------
+[verse]
+git credential-cache--daemon <socket>
+
+DESCRIPTION
+-----------
+
+NOTE: You probably don't want to invoke this command yourself; it is
+started automatically when you use linkgit:git-credential-cache[1].
+
+This command listens on the Unix domain socket specified by `<socket>`
+for `git-credential-cache` clients. Clients may store and retrieve
+credentials. Each credential is held for a timeout specified by the
+client; once no credentials are held, the daemon exits.
+
+GIT
+---
+Part of the linkgit:git[1] suite
--- /dev/null
+git-credential-cache(1)
+=======================
+
+NAME
+----
+git-credential-cache - helper to temporarily store passwords in memory
+
+SYNOPSIS
+--------
+-----------------------------
+git config credential.helper 'cache [options]'
+-----------------------------
+
+DESCRIPTION
+-----------
+
+This command caches credentials in memory for use by future git
+programs. The stored credentials never touch the disk, and are forgotten
+after a configurable timeout. The cache is accessible over a Unix
+domain socket, restricted to the current user by filesystem permissions.
+
+You probably don't want to invoke this command directly; it is meant to
+be used as a credential helper by other parts of git. See
+linkgit:gitcredentials[7] or `EXAMPLES` below.
+
+OPTIONS
+-------
+
+--timeout <seconds>::
+
+ Number of seconds to cache credentials (default: 900).
+
+--socket <path>::
+
+ Use `<path>` to contact a running cache daemon (or start a new
+ cache daemon if one is not started). Defaults to
+ `~/.git-credential-cache/socket`. If your home directory is on a
+ network-mounted filesystem, you may need to change this to a
+ local filesystem.
+
+CONTROLLING THE DAEMON
+----------------------
+
+If you would like the daemon to exit early, forgetting all cached
+credentials before their timeout, you can issue an `exit` action:
+
+--------------------------------------
+git credential-cache exit
+--------------------------------------
+
+EXAMPLES
+--------
+
+The point of this helper is to reduce the number of times you must type
+your username or password. For example:
+
+------------------------------------
+$ git config credential.helper cache
+$ git push http://example.com/repo.git
+Username: <type your username>
+Password: <type your password>
+
+[work for 5 more minutes]
+$ git push http://example.com/repo.git
+[your credentials are used automatically]
+------------------------------------
+
+You can provide options via the credential.helper configuration
+variable (this example drops the cache time to 5 minutes):
+
+-------------------------------------------------------
+$ git config credential.helper 'cache --timeout=300'
+-------------------------------------------------------
+
+GIT
+---
+Part of the linkgit:git[1] suite
--- /dev/null
+git-credential-store(1)
+=======================
+
+NAME
+----
+git-credential-store - helper to store credentials on disk
+
+SYNOPSIS
+--------
+-------------------
+git config credential.helper 'store [options]'
+-------------------
+
+DESCRIPTION
+-----------
+
+NOTE: Using this helper will store your passwords unencrypted on disk,
+protected only by filesystem permissions. If this is not an acceptable
+security tradeoff, try linkgit:git-credential-cache[1], or find a helper
+that integrates with secure storage provided by your operating system.
+
+This command stores credentials indefinitely on disk for use by future
+git programs.
+
+You probably don't want to invoke this command directly; it is meant to
+be used as a credential helper by other parts of git. See
+linkgit:gitcredentials[7] or `EXAMPLES` below.
+
+OPTIONS
+-------
+
+--store=<path>::
+
+ Use `<path>` to store credentials. The file will have its
+ filesystem permissions set to prevent other users on the system
+ from reading it, but will not be encrypted or otherwise
+ protected. Defaults to `~/.git-credentials`.
+
+EXAMPLES
+--------
+
+The point of this helper is to reduce the number of times you must type
+your username or password. For example:
+
+------------------------------------------
+$ git config credential.helper store
+$ git push http://example.com/repo.git
+Username: <type your username>
+Password: <type your password>
+
+[several days later]
+$ git push http://example.com/repo.git
+[your credentials are used automatically]
+------------------------------------------
+
+STORAGE FORMAT
+--------------
+
+The `.git-credentials` file is stored in plaintext. Each credential is
+stored on its own line as a URL like:
+
+------------------------------
+https://user:pass@example.com
+------------------------------
+
+When git needs authentication for a particular URL context,
+credential-store will consider that context a pattern to match against
+each entry in the credentials file. If the protocol, hostname, and
+username (if we already have one) match, then the password is returned
+to git. See the discussion of configuration in linkgit:gitcredentials[7]
+for more information.
+
+GIT
+---
+Part of the linkgit:git[1] suite
fetched, the rebase uses that information to avoid rebasing
non-local changes.
+
-See `branch.<name>.rebase` and `branch.autosetuprebase` in
+See `pull.rebase`, `branch.<name>.rebase` and `branch.autosetuprebase` in
linkgit:git-config[1] if you want to make `git pull` always use
`{litdd}rebase` instead of merging.
+
`refs/heads/master`. When we wanted to switch to another branch,
we did `ln -sf refs/heads/newbranch .git/HEAD`, and when we wanted
to find out which branch we are on, we did `readlink .git/HEAD`.
-This was fine, and internally that is what still happens by
-default, but on platforms that do not have working symlinks,
-or that do not have the `readlink(1)` command, this was a bit
-cumbersome. On some platforms, `ln -sf` does not even work as
-advertised (horrors). Therefore symbolic links are now deprecated
-and symbolic refs are used by default.
+But symbolic links are not entirely portable, so they are now
+deprecated and symbolic refs (as described above) are used by
+default.
'git symbolic-ref' will exit with status 0 if the contents of the
symbolic ref were printed correctly, with status 1 if the requested
Implies `-a` if none of `-a`, `-s`, or `-u <key-id>`
is given.
+--cleanup=<mode>::
+ This option sets how the tag message is cleaned up.
+ The '<mode>' can be one of 'verbatim', 'whitespace' and 'strip'. The
+ 'strip' mode is default. The 'verbatim' mode does not change message at
+ all, 'whitespace' removes just leading/trailing whitespace lines and
+ 'strip' removes both whitespace and commentary.
+
<tagname>::
The name of the tag to create, delete, or describe.
The new tag name must pass all checks defined by
* release notes for
link:RelNotes/1.7.8.txt[1.7.8].
-* link:v1.7.7.1/git.html[documentation for release 1.7.7.1]
+* link:v1.7.7.5/git.html[documentation for release 1.7.7.5]
* release notes for
+ link:RelNotes/1.7.7.5.txt[1.7.7.5],
+ link:RelNotes/1.7.7.4.txt[1.7.7.4],
+ link:RelNotes/1.7.7.3.txt[1.7.7.3],
+ link:RelNotes/1.7.7.2.txt[1.7.7.2],
link:RelNotes/1.7.7.1.txt[1.7.7.1],
link:RelNotes/1.7.7.txt[1.7.7].
-* link:v1.7.6.4/git.html[documentation for release 1.7.6.4]
+* link:v1.7.6.5/git.html[documentation for release 1.7.6.5]
* release notes for
+ link:RelNotes/1.7.6.5.txt[1.7.6.5],
link:RelNotes/1.7.6.4.txt[1.7.6.4],
link:RelNotes/1.7.6.3.txt[1.7.6.3],
link:RelNotes/1.7.6.2.txt[1.7.6.2],
--- /dev/null
+gitcredentials(7)
+=================
+
+NAME
+----
+gitcredentials - providing usernames and passwords to git
+
+SYNOPSIS
+--------
+------------------
+git config credential.https://example.com.username myusername
+git config credential.helper "$helper $options"
+------------------
+
+DESCRIPTION
+-----------
+
+Git will sometimes need credentials from the user in order to perform
+operations; for example, it may need to ask for a username and password
+in order to access a remote repository over HTTP. This manual describes
+the mechanisms git uses to request these credentials, as well as some
+features to avoid inputting these credentials repeatedly.
+
+REQUESTING CREDENTIALS
+----------------------
+
+Without any credential helpers defined, git will try the following
+strategies to ask the user for usernames and passwords:
+
+1. If the `GIT_ASKPASS` environment variable is set, the program
+ specified by the variable is invoked. A suitable prompt is provided
+ to the program on the command line, and the user's input is read
+ from its standard output.
+
+2. Otherwise, if the `core.askpass` configuration variable is set, its
+ value is used as above.
+
+3. Otherwise, if the `SSH_ASKPASS` environment variable is set, its
+ value is used as above.
+
+4. Otherwise, the user is prompted on the terminal.
+
+AVOIDING REPETITION
+-------------------
+
+It can be cumbersome to input the same credentials over and over. Git
+provides two methods to reduce this annoyance:
+
+1. Static configuration of usernames for a given authentication context.
+
+2. Credential helpers to cache or store passwords, or to interact with
+ a system password wallet or keychain.
+
+The first is simple and appropriate if you do not have secure storage available
+for a password. It is generally configured by adding this to your config:
+
+---------------------------------------
+[credential "https://example.com"]
+ username = me
+---------------------------------------
+
+Credential helpers, on the other hand, are external programs from which git can
+request both usernames and passwords; they typically interface with secure
+storage provided by the OS or other programs.
+
+To use a helper, you must first select one to use. Git currently
+includes the following helpers:
+
+cache::
+
+ Cache credentials in memory for a short period of time. See
+ linkgit:git-credential-cache[1] for details.
+
+store::
+
+ Store credentials indefinitely on disk. See
+ linkgit:git-credential-store[1] for details.
+
+You may also have third-party helpers installed; search for
+`credential-*` in the output of `git help -a`, and consult the
+documentation of individual helpers. Once you have selected a helper,
+you can tell git to use it by putting its name into the
+credential.helper variable.
+
+1. Find a helper.
++
+-------------------------------------------
+$ git help -a | grep credential-
+credential-foo
+-------------------------------------------
+
+2. Read its description.
++
+-------------------------------------------
+$ git help credential-foo
+-------------------------------------------
+
+3. Tell git to use it.
++
+-------------------------------------------
+$ git config --global credential.helper foo
+-------------------------------------------
+
+If there are multiple instances of the `credential.helper` configuration
+variable, each helper will be tried in turn, and may provide a username,
+password, or nothing. Once git has acquired both a username and a
+password, no more helpers will be tried.
+
+
+CREDENTIAL CONTEXTS
+-------------------
+
+Git considers each credential to have a context defined by a URL. This context
+is used to look up context-specific configuration, and is passed to any
+helpers, which may use it as an index into secure storage.
+
+For instance, imagine we are accessing `https://example.com/foo.git`. When git
+looks into a config file to see if a section matches this context, it will
+consider the two a match if the context is a more-specific subset of the
+pattern in the config file. For example, if you have this in your config file:
+
+--------------------------------------
+[credential "https://example.com"]
+ username = foo
+--------------------------------------
+
+then we will match: both protocols are the same, both hosts are the same, and
+the "pattern" URL does not care about the path component at all. However, this
+context would not match:
+
+--------------------------------------
+[credential "https://kernel.org"]
+ username = foo
+--------------------------------------
+
+because the hostnames differ. Nor would it match `foo.example.com`; git
+compares hostnames exactly, without considering whether two hosts are part of
+the same domain. Likewise, a config entry for `http://example.com` would not
+match: git compares the protocols exactly.
+
+
+CONFIGURATION OPTIONS
+---------------------
+
+Options for a credential context can be configured either in
+`credential.\*` (which applies to all credentials), or
+`credential.<url>.\*`, where <url> matches the context as described
+above.
+
+The following options are available in either location:
+
+helper::
+
+ The name of an external credential helper, and any associated options.
+ If the helper name is not an absolute path, then the string `git
+ credential-` is prepended. The resulting string is executed by the
+ shell (so, for example, setting this to `foo --option=bar` will execute
+ `git credential-foo --option=bar` via the shell. See the manual of
+ specific helpers for examples of their use.
+
+username::
+
+ A default username, if one is not provided in the URL.
+
+useHttpPath::
+
+ By default, git does not consider the "path" component of an http URL
+ to be worth matching via external helpers. This means that a credential
+ stored for `https://example.com/foo.git` will also be used for
+ `https://example.com/bar.git`. If you do want to distinguish these
+ cases, set this option to `true`.
+
+
+CUSTOM HELPERS
+--------------
+
+You can write your own custom helpers to interface with any system in
+which you keep credentials. See the documentation for git's
+link:technical/api-credentials.html[credentials API] for details.
+
+GIT
+---
+Part of the linkgit:git[1] suite
--- /dev/null
+credentials API
+===============
+
+The credentials API provides an abstracted way of gathering username and
+password credentials from the user (even though credentials in the wider
+world can take many forms, in this document the word "credential" always
+refers to a username and password pair).
+
+Data Structures
+---------------
+
+`struct credential`::
+
+ This struct represents a single username/password combination
+ along with any associated context. All string fields should be
+ heap-allocated (or NULL if they are not known or not applicable).
+ The meaning of the individual context fields is the same as
+ their counterparts in the helper protocol; see the section below
+ for a description of each field.
++
+The `helpers` member of the struct is a `string_list` of helpers. Each
+string specifies an external helper which will be run, in order, to
+either acquire or store credentials. See the section on credential
+helpers below.
++
+This struct should always be initialized with `CREDENTIAL_INIT` or
+`credential_init`.
+
+
+Functions
+---------
+
+`credential_init`::
+
+ Initialize a credential structure, setting all fields to empty.
+
+`credential_clear`::
+
+ Free any resources associated with the credential structure,
+ returning it to a pristine initialized state.
+
+`credential_fill`::
+
+ Instruct the credential subsystem to fill the username and
+ password fields of the passed credential struct by first
+ consulting helpers, then asking the user. After this function
+ returns, the username and password fields of the credential are
+ guaranteed to be non-NULL. If an error occurs, the function will
+ die().
+
+`credential_reject`::
+
+ Inform the credential subsystem that the provided credentials
+ have been rejected. This will cause the credential subsystem to
+ notify any helpers of the rejection (which allows them, for
+ example, to purge the invalid credentials from storage). It
+ will also free() the username and password fields of the
+ credential and set them to NULL (readying the credential for
+ another call to `credential_fill`). Any errors from helpers are
+ ignored.
+
+`credential_approve`::
+
+ Inform the credential subsystem that the provided credentials
+ were successfully used for authentication. This will cause the
+ credential subsystem to notify any helpers of the approval, so
+ that they may store the result to be used again. Any errors
+ from helpers are ignored.
+
+`credential_from_url`::
+
+ Parse a URL into broken-down credential fields.
+
+Example
+-------
+
+The example below shows how the functions of the credential API could be
+used to login to a fictitious "foo" service on a remote host:
+
+-----------------------------------------------------------------------
+int foo_login(struct foo_connection *f)
+{
+ int status;
+ /*
+ * Create a credential with some context; we don't yet know the
+ * username or password.
+ */
+
+ struct credential c = CREDENTIAL_INIT;
+ c.protocol = xstrdup("foo");
+ c.host = xstrdup(f->hostname);
+
+ /*
+ * Fill in the username and password fields by contacting
+ * helpers and/or asking the user. The function will die if it
+ * fails.
+ */
+ credential_fill(&c);
+
+ /*
+ * Otherwise, we have a username and password. Try to use it.
+ */
+ status = send_foo_login(f, c.username, c.password);
+ switch (status) {
+ case FOO_OK:
+ /* It worked. Store the credential for later use. */
+ credential_accept(&c);
+ break;
+ case FOO_BAD_LOGIN:
+ /* Erase the credential from storage so we don't try it
+ * again. */
+ credential_reject(&c);
+ break;
+ default:
+ /*
+ * Some other error occured. We don't know if the
+ * credential is good or bad, so report nothing to the
+ * credential subsystem.
+ */
+ }
+
+ /* Free any associated resources. */
+ credential_clear(&c);
+
+ return status;
+}
+-----------------------------------------------------------------------
+
+
+Credential Helpers
+------------------
+
+Credential helpers are programs executed by git to fetch or save
+credentials from and to long-term storage (where "long-term" is simply
+longer than a single git process; e.g., credentials may be stored
+in-memory for a few minutes, or indefinitely on disk).
+
+Each helper is specified by a single string. The string is transformed
+by git into a command to be executed using these rules:
+
+ 1. If the helper string begins with "!", it is considered a shell
+ snippet, and everything after the "!" becomes the command.
+
+ 2. Otherwise, if the helper string begins with an absolute path, the
+ verbatim helper string becomes the command.
+
+ 3. Otherwise, the string "git credential-" is prepended to the helper
+ string, and the result becomes the command.
+
+The resulting command then has an "operation" argument appended to it
+(see below for details), and the result is executed by the shell.
+
+Here are some example specifications:
+
+----------------------------------------------------
+# run "git credential-foo"
+foo
+
+# same as above, but pass an argument to the helper
+foo --bar=baz
+
+# the arguments are parsed by the shell, so use shell
+# quoting if necessary
+foo --bar="whitespace arg"
+
+# you can also use an absolute path, which will not use the git wrapper
+/path/to/my/helper --with-arguments
+
+# or you can specify your own shell snippet
+!f() { echo "password=`cat $HOME/.secret`"; }; f
+----------------------------------------------------
+
+Generally speaking, rule (3) above is the simplest for users to specify.
+Authors of credential helpers should make an effort to assist their
+users by naming their program "git-credential-$NAME", and putting it in
+the $PATH or $GIT_EXEC_PATH during installation, which will allow a user
+to enable it with `git config credential.helper $NAME`.
+
+When a helper is executed, it will have one "operation" argument
+appended to its command line, which is one of:
+
+`get`::
+
+ Return a matching credential, if any exists.
+
+`store`::
+
+ Store the credential, if applicable to the helper.
+
+`erase`::
+
+ Remove a matching credential, if any, from the helper's storage.
+
+The details of the credential will be provided on the helper's stdin
+stream. The credential is split into a set of named attributes.
+Attributes are provided to the helper, one per line. Each attribute is
+specified by a key-value pair, separated by an `=` (equals) sign,
+followed by a newline. The key may contain any bytes except `=`,
+newline, or NUL. The value may contain any bytes except newline or NUL.
+In both cases, all bytes are treated as-is (i.e., there is no quoting,
+and one cannot transmit a value with newline or NUL in it). The list of
+attributes is terminated by a blank line or end-of-file.
+
+Git will send the following attributes (but may not send all of
+them for a given credential; for example, a `host` attribute makes no
+sense when dealing with a non-network protocol):
+
+`protocol`::
+
+ The protocol over which the credential will be used (e.g.,
+ `https`).
+
+`host`::
+
+ The remote hostname for a network credential.
+
+`path`::
+
+ The path with which the credential will be used. E.g., for
+ accessing a remote https repository, this will be the
+ repository's path on the server.
+
+`username`::
+
+ The credential's username, if we already have one (e.g., from a
+ URL, from the user, or from a previously run helper).
+
+`password`::
+
+ The credential's password, if we are asking it to be stored.
+
+For a `get` operation, the helper should produce a list of attributes
+on stdout in the same format. A helper is free to produce a subset, or
+even no values at all if it has nothing useful to provide. Any provided
+attributes will overwrite those already known about by git.
+
+For a `store` or `erase` operation, the helper's output is ignored.
+If it fails to perform the requested operation, it may complain to
+stderr to inform the user. If it does not support the requested
+operation (e.g., a read-only store), it should silently ignore the
+request.
+
+If a helper receives any other operation, it should silently ignore the
+request. This leaves room for future operations to be added (older
+helpers will just ignore the new requests).
#
# Define NO_IPV6 if you lack IPv6 support and getaddrinfo().
#
+# Define NO_UNIX_SOCKETS if your system does not offer unix sockets.
+#
# Define NO_SOCKADDR_STORAGE if your platform does not have struct
# sockaddr_storage.
#
PROGRAM_OBJS += upload-pack.o
PROGRAM_OBJS += http-backend.o
PROGRAM_OBJS += sh-i18n--envsubst.o
+PROGRAM_OBJS += credential-store.o
PROGRAMS += $(patsubst %.o,git-%$X,$(PROGRAM_OBJS))
TEST_PROGRAMS_NEED_X += test-chmtime
+TEST_PROGRAMS_NEED_X += test-credential
TEST_PROGRAMS_NEED_X += test-ctype
TEST_PROGRAMS_NEED_X += test-date
TEST_PROGRAMS_NEED_X += test-delta
TEST_PROGRAMS_NEED_X += test-dump-cache-tree
+TEST_PROGRAMS_NEED_X += test-scrap-cache-tree
TEST_PROGRAMS_NEED_X += test-genrandom
TEST_PROGRAMS_NEED_X += test-index-version
TEST_PROGRAMS_NEED_X += test-line-buffer
LIB_H += attr.h
LIB_H += blob.h
LIB_H += builtin.h
+LIB_H += bulk-checkin.h
LIB_H += cache.h
LIB_H += cache-tree.h
LIB_H += color.h
LIB_H += compat/win32/dirent.h
LIB_H += connected.h
LIB_H += convert.h
+LIB_H += credential.h
LIB_H += csum-file.h
LIB_H += decorate.h
LIB_H += delta.h
LIB_H += diff.h
LIB_H += dir.h
LIB_H += exec_cmd.h
+LIB_H += fmt-merge-msg.h
LIB_H += fsck.h
LIB_H += gettext.h
LIB_H += git-compat-util.h
+LIB_H += gpg-interface.h
LIB_H += graph.h
LIB_H += grep.h
LIB_H += hash.h
LIB_OBJS += bisect.o
LIB_OBJS += blob.o
LIB_OBJS += branch.o
+LIB_OBJS += bulk-checkin.o
LIB_OBJS += bundle.o
LIB_OBJS += cache-tree.o
LIB_OBJS += color.o
LIB_OBJS += connected.o
LIB_OBJS += convert.o
LIB_OBJS += copy.o
+LIB_OBJS += credential.o
LIB_OBJS += csum-file.o
LIB_OBJS += ctype.o
LIB_OBJS += date.o
LIB_OBJS += environment.o
LIB_OBJS += exec_cmd.o
LIB_OBJS += fsck.o
+LIB_OBJS += gpg-interface.o
LIB_OBJS += graph.o
LIB_OBJS += grep.o
LIB_OBJS += hash.o
NO_SYS_POLL_H = YesPlease
NO_SYMLINK_HEAD = YesPlease
NO_IPV6 = YesPlease
+ NO_UNIX_SOCKETS = YesPlease
NO_SETENV = YesPlease
NO_UNSETENV = YesPlease
NO_STRCASESTR = YesPlease
NO_LIBGEN_H = YesPlease
NO_SYS_POLL_H = YesPlease
NO_SYMLINK_HEAD = YesPlease
+ NO_UNIX_SOCKETS = YesPlease
NO_SETENV = YesPlease
NO_UNSETENV = YesPlease
NO_STRCASESTR = YesPlease
LIB_OBJS += compat/inet_pton.o
BASIC_CFLAGS += -DNO_INET_PTON
endif
+ifndef NO_UNIX_SOCKETS
+ LIB_OBJS += unix-socket.o
+ LIB_H += unix-socket.h
+ PROGRAM_OBJS += credential-cache.o
+ PROGRAM_OBJS += credential-cache--daemon.o
+endif
ifdef NO_ICONV
BASIC_CFLAGS += -DNO_ICONV
@echo GIT_TEST_CMP_USE_COPIED_CONTEXT=YesPlease >>$@
endif
@echo GETTEXT_POISON=\''$(subst ','\'',$(subst ','\'',$(GETTEXT_POISON)))'\' >>$@
+ @echo NO_UNIX_SOCKETS=\''$(subst ','\'',$(subst ','\'',$(NO_UNIX_SOCKETS)))'\' >>$@
### Detect Tck/Tk interpreter path changes
ifndef NO_TCLTK
}
static void parse_treeish_arg(const char **argv,
- struct archiver_args *ar_args, const char *prefix)
+ struct archiver_args *ar_args, const char *prefix,
+ int remote)
{
const char *name = argv[0];
const unsigned char *commit_sha1;
const struct commit *commit;
unsigned char sha1[20];
- if (get_sha1(name, sha1))
- die("Not a valid object name");
+ /* Remotes are only allowed to fetch actual refs */
+ if (remote) {
+ char *ref = NULL;
+ if (!dwim_ref(name, strlen(name), sha1, &ref))
+ die("no such ref: %s", name);
+ free(ref);
+ }
+ else {
+ if (get_sha1(name, sha1))
+ die("Not a valid object name");
+ }
commit = lookup_commit_reference_gently(sha1, 1);
if (commit) {
setup_git_directory();
}
- parse_treeish_arg(argv, &args, prefix);
+ parse_treeish_arg(argv, &args, prefix, remote);
parse_pathspec_arg(argv + 1, &args);
return ar->write_archive(ar, &args);
return 0;
}
+struct branch_desc_cb {
+ const char *config_name;
+ const char *value;
+};
+
+static int read_branch_desc_cb(const char *var, const char *value, void *cb)
+{
+ struct branch_desc_cb *desc = cb;
+ if (strcmp(desc->config_name, var))
+ return 0;
+ free((char *)desc->value);
+ return git_config_string(&desc->value, var, value);
+}
+
+int read_branch_desc(struct strbuf *buf, const char *branch_name)
+{
+ struct branch_desc_cb cb;
+ struct strbuf name = STRBUF_INIT;
+ strbuf_addf(&name, "branch.%s.description", branch_name);
+ cb.config_name = name.buf;
+ cb.value = NULL;
+ if (git_config(read_branch_desc_cb, &cb) < 0) {
+ strbuf_release(&name);
+ return -1;
+ }
+ if (cb.value)
+ strbuf_addstr(buf, cb.value);
+ strbuf_release(&name);
+ return 0;
+}
+
int validate_new_branchname(const char *name, struct strbuf *ref,
int force, int attr_only)
{
void create_branch(const char *head,
const char *name, const char *start_name,
- int force, int reflog, enum branch_track track)
+ int force, int reflog, int clobber_head,
+ enum branch_track track)
{
struct ref_lock *lock = NULL;
struct commit *commit;
explicit_tracking = 1;
if (validate_new_branchname(name, &ref, force,
- track == BRANCH_TRACK_OVERRIDE)) {
+ track == BRANCH_TRACK_OVERRIDE ||
+ clobber_head)) {
if (!force)
dont_change_ref = 1;
else
* branch for (if any).
*/
void create_branch(const char *head, const char *name, const char *start_name,
- int force, int reflog, enum branch_track track);
+ int force, int reflog,
+ int clobber_head, enum branch_track track);
/*
* Validates that the requested branch may be created, returning the
#define BRANCH_CONFIG_VERBOSE 01
extern void install_branch_config(int flag, const char *local, const char *origin, const char *remote);
+/*
+ * Read branch description
+ */
+extern int read_branch_desc(struct strbuf *, const char *branch_name);
+
#endif
extern const char git_more_info_string[];
extern void prune_packed_objects(int);
+
+struct fmt_merge_msg_opts {
+ unsigned add_title:1;
+ int shortlog_len;
+};
+
extern int fmt_merge_msg(struct strbuf *in, struct strbuf *out,
- int merge_title, int shortlog_len);
+ struct fmt_merge_msg_opts *);
extern void commit_notes(struct notes_tree *t, const char *msg);
struct notes_rewrite_cfg {
extern int cmd_update_ref(int argc, const char **argv, const char *prefix);
extern int cmd_update_server_info(int argc, const char **argv, const char *prefix);
extern int cmd_upload_archive(int argc, const char **argv, const char *prefix);
+extern int cmd_upload_archive_writer(int argc, const char **argv, const char *prefix);
extern int cmd_upload_tar(int argc, const char **argv, const char *prefix);
extern int cmd_var(int argc, const char **argv, const char *prefix);
extern int cmd_verify_tag(int argc, const char **argv, const char *prefix);
#include "diff.h"
#include "diffcore.h"
#include "revision.h"
+#include "bulk-checkin.h"
static const char * const builtin_add_usage[] = {
"git add [options] [--] <filepattern>...",
free(seen);
}
+ plug_bulk_checkin();
+
exit_status |= add_files_to_cache(prefix, pathspec, flags);
if (add_new_files)
exit_status |= add_files(&dir, flags);
+ unplug_bulk_checkin();
+
finish:
if (active_cache_changed) {
if (write_cache(newfd, active_cache, active_nr) ||
return -1;
}
-static int write_out_results(struct patch *list, int skipped_patch)
+static int write_out_results(struct patch *list)
{
int phase;
int errs = 0;
struct patch *l;
- if (!list && !skipped_patch)
- return error("No changes");
-
for (phase = 0; phase < 2; phase++) {
l = list;
while (l) {
offset += nr;
}
+ if (!list && !skipped_patch)
+ die("unrecognized input");
+
if (whitespace_error && (ws_error_action == die_on_ws_error))
apply = 0;
!apply_with_reject)
exit(1);
- if (apply && write_out_results(list, skipped_patch))
+ if (apply && write_out_results(list))
exit(1);
if (fake_ancestor)
int tz;
if (show_raw_time) {
- sprintf(time_buf, "%lu %s", time, tz_str);
+ snprintf(time_buf, sizeof(time_buf), "%lu %s", time, tz_str);
}
else {
tz = atoi(tz_str);
branch->merge[0] &&
branch->merge[0]->dst &&
(reference_name =
- resolve_ref(branch->merge[0]->dst, sha1, 1, NULL)) != NULL)
+ resolve_ref(branch->merge[0]->dst, sha1, 1, NULL)) != NULL) {
+ reference_name = xstrdup(reference_name);
reference_rev = lookup_commit_reference(sha1);
+ }
}
if (!reference_rev)
reference_rev = head_rev;
" '%s', even though it is merged to HEAD."),
name, reference_name);
}
+ free((char *)reference_name);
return merged;
}
free(name);
name = xstrdup(mkpath(fmt, bname.buf));
- if (!resolve_ref(name, sha1, 1, NULL)) {
+ if (read_ref(name, sha1)) {
error(_("%sbranch '%s' not found."),
remote, bname.buf);
ret = 1;
static void rename_branch(const char *oldname, const char *newname, int force)
{
struct strbuf oldref = STRBUF_INIT, newref = STRBUF_INIT, logmsg = STRBUF_INIT;
- unsigned char sha1[20];
struct strbuf oldsection = STRBUF_INIT, newsection = STRBUF_INIT;
int recovery = 0;
+ int clobber_head_ok;
if (!oldname)
die(_("cannot rename the current branch while not on any."));
* Bad name --- this could be an attempt to rename a
* ref that we used to allow to be created by accident.
*/
- if (resolve_ref(oldref.buf, sha1, 1, NULL))
+ if (ref_exists(oldref.buf))
recovery = 1;
else
die(_("Invalid branch name: '%s'"), oldname);
}
- validate_new_branchname(newname, &newref, force, 0);
+ /*
+ * A command like "git branch -M currentbranch currentbranch" cannot
+ * cause the worktree to become inconsistent with HEAD, so allow it.
+ */
+ clobber_head_ok = !strcmp(oldname, newname);
+
+ validate_new_branchname(newname, &newref, force, clobber_head_ok);
strbuf_addf(&logmsg, "Branch: renamed %s to %s",
oldref.buf, newref.buf);
return 0;
}
+static const char edit_description[] = "BRANCH_DESCRIPTION";
+
+static int edit_branch_description(const char *branch_name)
+{
+ FILE *fp;
+ int status;
+ struct strbuf buf = STRBUF_INIT;
+ struct strbuf name = STRBUF_INIT;
+
+ read_branch_desc(&buf, branch_name);
+ if (!buf.len || buf.buf[buf.len-1] != '\n')
+ strbuf_addch(&buf, '\n');
+ strbuf_addf(&buf,
+ "# Please edit the description for the branch\n"
+ "# %s\n"
+ "# Lines starting with '#' will be stripped.\n",
+ branch_name);
+ fp = fopen(git_path(edit_description), "w");
+ if ((fwrite(buf.buf, 1, buf.len, fp) < buf.len) || fclose(fp)) {
+ strbuf_release(&buf);
+ return error(_("could not write branch description template: %s\n"),
+ strerror(errno));
+ }
+ strbuf_reset(&buf);
+ if (launch_editor(git_path(edit_description), &buf, NULL)) {
+ strbuf_release(&buf);
+ return -1;
+ }
+ stripspace(&buf, 1);
+
+ strbuf_addf(&name, "branch.%s.description", branch_name);
+ status = git_config_set(name.buf, buf.buf);
+ strbuf_release(&name);
+ strbuf_release(&buf);
+
+ return status;
+}
+
int cmd_branch(int argc, const char **argv, const char *prefix)
{
int delete = 0, rename = 0, force_create = 0, list = 0;
int verbose = 0, abbrev = -1, detached = 0;
- int reflog = 0;
+ int reflog = 0, edit_description = 0;
enum branch_track track;
int kinds = REF_LOCAL_BRANCH;
struct commit_list *with_commit = NULL;
OPT_BIT('M', NULL, &rename, "move/rename a branch, even if target exists", 2),
OPT_BOOLEAN(0, "list", &list, "list branch names"),
OPT_BOOLEAN('l', "create-reflog", &reflog, "create the branch's reflog"),
+ OPT_BOOLEAN(0, "edit-description", &edit_description,
+ "edit the description for the branch"),
OPT__FORCE(&force_create, "force creation (when already exists)"),
{
OPTION_CALLBACK, 0, "no-merged", &merge_filter_ref,
argc = parse_options(argc, argv, prefix, options, builtin_branch_usage,
0);
- if (!delete && !rename && argc == 0)
+ if (!delete && !rename && !edit_description && argc == 0)
list = 1;
if (!!delete + !!rename + !!force_create + !!list > 1)
else if (list)
return print_ref_list(kinds, detached, verbose, abbrev,
with_commit, argv);
- else if (rename) {
+ else if (edit_description) {
+ const char *branch_name;
+ if (detached)
+ die("Cannot give description to detached HEAD");
+ if (!argc)
+ branch_name = head;
+ else if (argc == 1)
+ branch_name = argv[0];
+ else
+ usage_with_options(builtin_branch_usage, options);
+ if (edit_branch_description(branch_name))
+ return 1;
+ } else if (rename) {
if (argc == 1)
rename_branch(head, argv[0], rename > 1);
else if (argc == 2)
if (kinds != REF_LOCAL_BRANCH)
die(_("-a and -r options to 'git branch' do not make sense with a branch name"));
create_branch(head, argv[0], (argc == 2) ? argv[1] : head,
- force_create, reflog, track);
+ force_create, reflog, 0, track);
} else
usage_with_options(builtin_branch_usage, options);
int force_detach;
int writeout_stage;
int writeout_error;
+ int overwrite_ignore;
/* not set by parse_options */
int branch_exists;
commit_locked_index(lock_file))
die(_("unable to write new index file"));
- resolve_ref("HEAD", rev, 0, &flag);
+ read_ref_full("HEAD", rev, 0, &flag);
head = lookup_commit_reference_gently(rev, 1);
errs |= post_checkout_hook(head, head, 0);
topts.gently = opts->merge && old->commit;
topts.verbose_update = !opts->quiet;
topts.fn = twoway_merge;
- topts.dir = xcalloc(1, sizeof(*topts.dir));
- topts.dir->flags |= DIR_SHOW_IGNORED;
- setup_standard_excludes(topts.dir);
+ if (opts->overwrite_ignore) {
+ topts.dir = xcalloc(1, sizeof(*topts.dir));
+ topts.dir->flags |= DIR_SHOW_IGNORED;
+ setup_standard_excludes(topts.dir);
+ }
tree = parse_tree_indirect(old->commit ?
old->commit->object.sha1 :
EMPTY_TREE_SHA1_BIN);
else
create_branch(old->name, opts->new_branch, new->name,
opts->new_branch_force ? 1 : 0,
- opts->new_branch_log, opts->track);
+ opts->new_branch_log,
+ opts->new_branch_force ? 1 : 0,
+ opts->track);
new->name = opts->new_branch;
setup_branch_path(new);
}
create_symref("HEAD", new->path, msg.buf);
if (!opts->quiet) {
if (old->path && !strcmp(new->path, old->path)) {
- fprintf(stderr, _("Already on '%s'\n"),
- new->name);
+ if (opts->new_branch_force)
+ fprintf(stderr, _("Reset branch '%s'\n"),
+ new->name);
+ else
+ fprintf(stderr, _("Already on '%s'\n"),
+ new->name);
} else if (opts->new_branch) {
if (opts->branch_exists)
fprintf(stderr, _("Switched to and reset branch '%s'\n"), new->name);
unsigned char rev[20];
int flag;
memset(&old, 0, sizeof(old));
- old.path = xstrdup(resolve_ref("HEAD", rev, 0, &flag));
+ old.path = resolve_ref("HEAD", rev, 0, &flag);
+ if (old.path)
+ old.path = xstrdup(old.path);
old.commit = lookup_commit_reference_gently(rev, 1);
if (!(flag & REF_ISSYMREF)) {
free((char *)old.path);
setup_branch_path(new);
if (!check_refname_format(new->path, 0) &&
- resolve_ref(new->path, branch_rev, 1, NULL))
+ !read_ref(new->path, branch_rev))
hashcpy(rev, branch_rev);
else
new->path = NULL; /* not an existing branch */
3),
OPT__FORCE(&opts.force, "force checkout (throw away local modifications)"),
OPT_BOOLEAN('m', "merge", &opts.merge, "perform a 3-way merge with the new branch"),
+ OPT_BOOLEAN(0, "overwrite-ignore", &opts.overwrite_ignore, "update ignored files (default)"),
OPT_STRING(0, "conflict", &conflict_style, "style",
"conflict style (merge or diff3)"),
OPT_BOOLEAN('p', "patch", &patch_mode, "select hunks interactively"),
memset(&opts, 0, sizeof(opts));
memset(&new, 0, sizeof(new));
+ opts.overwrite_ignore = 1;
gitmodules_config();
git_config(git_checkout_config, &opts);
struct strbuf buf = STRBUF_INIT;
opts.branch_exists = validate_new_branchname(opts.new_branch, &buf,
- !!opts.new_branch_force, 0);
+ !!opts.new_branch_force,
+ !!opts.new_branch_force);
strbuf_release(&buf);
}
#include "builtin.h"
#include "utf8.h"
-static const char commit_tree_usage[] = "git commit-tree <sha1> [(-p <sha1>)...] < changelog";
+static const char commit_tree_usage[] = "git commit-tree [(-p <sha1>)...] [-m <message>] [-F <file>] <sha1> <changelog";
static void new_parent(struct commit *parent, struct commit_list **parents_p)
{
int cmd_commit_tree(int argc, const char **argv, const char *prefix)
{
- int i;
+ int i, got_tree = 0;
struct commit_list *parents = NULL;
unsigned char tree_sha1[20];
unsigned char commit_sha1[20];
if (argc < 2 || !strcmp(argv[1], "-h"))
usage(commit_tree_usage);
- if (get_sha1(argv[1], tree_sha1))
- die("Not a valid object name %s", argv[1]);
- for (i = 2; i < argc; i += 2) {
- unsigned char sha1[20];
- const char *a, *b;
- a = argv[i]; b = argv[i+1];
- if (!b || strcmp(a, "-p"))
- usage(commit_tree_usage);
+ for (i = 1; i < argc; i++) {
+ const char *arg = argv[i];
+ if (!strcmp(arg, "-p")) {
+ unsigned char sha1[20];
+ if (argc <= ++i)
+ usage(commit_tree_usage);
+ if (get_sha1(argv[i], sha1))
+ die("Not a valid object name %s", argv[i]);
+ assert_sha1_type(sha1, OBJ_COMMIT);
+ new_parent(lookup_commit(sha1), &parents);
+ continue;
+ }
+
+ if (!strcmp(arg, "-m")) {
+ if (argc <= ++i)
+ usage(commit_tree_usage);
+ if (buffer.len)
+ strbuf_addch(&buffer, '\n');
+ strbuf_addstr(&buffer, argv[i]);
+ strbuf_complete_line(&buffer);
+ continue;
+ }
+
+ if (!strcmp(arg, "-F")) {
+ int fd;
- if (get_sha1(b, sha1))
- die("Not a valid object name %s", b);
- assert_sha1_type(sha1, OBJ_COMMIT);
- new_parent(lookup_commit(sha1), &parents);
+ if (argc <= ++i)
+ usage(commit_tree_usage);
+ if (buffer.len)
+ strbuf_addch(&buffer, '\n');
+ if (!strcmp(argv[i], "-"))
+ fd = 0;
+ else {
+ fd = open(argv[i], O_RDONLY);
+ if (fd < 0)
+ die_errno("git commit-tree: failed to open '%s'",
+ argv[i]);
+ }
+ if (strbuf_read(&buffer, fd, 0) < 0)
+ die_errno("git commit-tree: failed to read '%s'",
+ argv[i]);
+ if (fd && close(fd))
+ die_errno("git commit-tree: failed to close '%s'",
+ argv[i]);
+ strbuf_complete_line(&buffer);
+ continue;
+ }
+
+ if (get_sha1(arg, tree_sha1))
+ die("Not a valid object name %s", arg);
+ if (got_tree)
+ die("Cannot give more than one trees");
+ got_tree = 1;
}
- if (strbuf_read(&buffer, 0, 0) < 0)
- die_errno("git commit-tree: failed to read");
+ if (!buffer.len) {
+ if (strbuf_read(&buffer, 0, 0) < 0)
+ die_errno("git commit-tree: failed to read");
+ }
if (commit_tree(buffer.buf, tree_sha1, parents, commit_sha1, NULL)) {
strbuf_release(&buffer);
static const char *author_message, *author_message_buffer;
static char *edit_message, *use_message;
static char *fixup_message, *squash_message;
-static int all, edit_flag, also, interactive, patch_interactive, only, amend, signoff;
+static int all, also, interactive, patch_interactive, only, amend, signoff;
+static int edit_flag = -1; /* unspecified */
static int quiet, verbose, no_verify, allow_empty, dry_run, renew_authorship;
static int no_post_rewrite, allow_empty_message;
static char *untracked_files_arg, *force_date, *ignore_submodule_arg;
OPT_BOOLEAN(0, "reset-author", &renew_authorship, "the commit is authored by me now (used with -C-c/--amend)"),
OPT_BOOLEAN('s', "signoff", &signoff, "add Signed-off-by:"),
OPT_FILENAME('t', "template", &template_file, "use specified template file"),
- OPT_BOOLEAN('e', "edit", &edit_flag, "force edit of commit"),
+ OPT_BOOL('e', "edit", &edit_flag, "force edit of commit"),
OPT_STRING(0, "cleanup", &cleanup_arg, "default", "how to strip spaces and #comments from message"),
OPT_BOOLEAN(0, "status", &include_status, "include status in commit message template"),
/* end commit message options */
fd = hold_locked_index(&index_lock, 1);
add_files_to_cache(also ? prefix : NULL, pathspec, 0);
refresh_cache_or_die(refresh_flags);
+ update_main_cache_tree(1);
if (write_cache(fd, active_cache, active_nr) ||
close_lock_file(&index_lock))
die(_("unable to write new_index file"));
fd = hold_locked_index(&index_lock, 1);
refresh_cache_or_die(refresh_flags);
if (active_cache_changed) {
+ update_main_cache_tree(1);
if (write_cache(fd, active_cache, active_nr) ||
commit_locked_index(&index_lock))
die(_("unable to write new_index file"));
*/
discard_cache();
read_cache_from(index_file);
- if (!active_cache_tree)
- active_cache_tree = cache_tree();
- if (cache_tree_update(active_cache_tree,
- active_cache, active_nr, 0, 0) < 0) {
+ if (update_main_cache_tree(0)) {
error(_("Error building trees"));
return 0;
}
if (logfile || message.len || use_message || fixup_message)
use_editor = 0;
- if (edit_flag)
- use_editor = 1;
+ if (0 <= edit_flag)
+ use_editor = edit_flag;
if (!use_editor)
setenv("GIT_EDITOR", ":", 1);
struct commit *commit;
struct strbuf format = STRBUF_INIT;
unsigned char junk_sha1[20];
- const char *head = resolve_ref("HEAD", junk_sha1, 0, NULL);
+ const char *head;
struct pretty_print_context pctx = {0};
struct strbuf author_ident = STRBUF_INIT;
struct strbuf committer_ident = STRBUF_INIT;
rev.diffopt.break_opt = 0;
diff_setup_done(&rev.diffopt);
+ head = resolve_ref("HEAD", junk_sha1, 0, NULL);
printf("[%s%s ",
!prefixcmp(head, "refs/heads/") ?
head + 11 :
int allow_fast_forward = 1;
struct wt_status s;
struct commit *current_head = NULL;
+ struct commit_extra_header *extra = NULL;
if (argc == 2 && !strcmp(argv[1], "-h"))
usage_with_options(builtin_commit_usage, builtin_commit_options);
pptr = &commit_list_insert(c->item, pptr)->next;
} else if (whence == FROM_MERGE) {
struct strbuf m = STRBUF_INIT;
- struct commit *commit;
FILE *fp;
if (!reflog_msg)
die_errno(_("could not open '%s' for reading"),
git_path("MERGE_HEAD"));
while (strbuf_getline(&m, fp, '\n') != EOF) {
- unsigned char sha1[20];
- if (get_sha1_hex(m.buf, sha1) < 0)
+ struct commit *parent;
+
+ parent = get_merge_parent(m.buf);
+ if (!parent)
die(_("Corrupt MERGE_HEAD file (%s)"), m.buf);
- commit = lookup_commit_or_die(sha1, "MERGE_HEAD");
- pptr = &commit_list_insert(commit, pptr)->next;
+ pptr = &commit_list_insert(parent, pptr)->next;
}
fclose(fp);
strbuf_release(&m);
exit(1);
}
- if (commit_tree(sb.buf, active_cache_tree->sha1, parents, sha1,
- author_ident.buf)) {
+ if (amend)
+ extra = read_commit_extra_headers(current_head);
+
+ if (commit_tree_extended(sb.buf, active_cache_tree->sha1, parents, sha1,
+ author_ident.buf, extra)) {
rollback_index_files();
die(_("failed to write commit object"));
}
strbuf_release(&author_ident);
+ free_commit_extra_headers(extra);
ref_lock = lock_any_ref_for_update("HEAD",
!current_head
static int update_local_ref(struct ref *ref,
const char *remote,
- char *display)
+ struct strbuf *display)
{
struct commit *current = NULL, *updated;
enum object_type type;
struct branch *current_branch = branch_get(NULL);
const char *pretty_ref = prettify_refname(ref->name);
- *display = 0;
type = sha1_object_info(ref->new_sha1, NULL);
if (type < 0)
die(_("object %s not found"), sha1_to_hex(ref->new_sha1));
if (!hashcmp(ref->old_sha1, ref->new_sha1)) {
if (verbosity > 0)
- sprintf(display, "= %-*s %-*s -> %s", TRANSPORT_SUMMARY_WIDTH,
- _("[up to date]"), REFCOL_WIDTH, remote,
- pretty_ref);
+ strbuf_addf(display, "= %-*s %-*s -> %s",
+ TRANSPORT_SUMMARY_WIDTH,
+ _("[up to date]"), REFCOL_WIDTH,
+ remote, pretty_ref);
return 0;
}
* If this is the head, and it's not okay to update
* the head, and the old value of the head isn't empty...
*/
- sprintf(display, _("! %-*s %-*s -> %s (can't fetch in current branch)"),
- TRANSPORT_SUMMARY_WIDTH, _("[rejected]"), REFCOL_WIDTH, remote,
- pretty_ref);
+ strbuf_addf(display,
+ _("! %-*s %-*s -> %s (can't fetch in current branch)"),
+ TRANSPORT_SUMMARY_WIDTH, _("[rejected]"),
+ REFCOL_WIDTH, remote, pretty_ref);
return 1;
}
!prefixcmp(ref->name, "refs/tags/")) {
int r;
r = s_update_ref("updating tag", ref, 0);
- sprintf(display, "%c %-*s %-*s -> %s%s", r ? '!' : '-',
- TRANSPORT_SUMMARY_WIDTH, _("[tag update]"), REFCOL_WIDTH, remote,
- pretty_ref, r ? _(" (unable to update local ref)") : "");
+ strbuf_addf(display, "%c %-*s %-*s -> %s%s",
+ r ? '!' : '-',
+ TRANSPORT_SUMMARY_WIDTH, _("[tag update]"),
+ REFCOL_WIDTH, remote, pretty_ref,
+ r ? _(" (unable to update local ref)") : "");
return r;
}
}
r = s_update_ref(msg, ref, 0);
- sprintf(display, "%c %-*s %-*s -> %s%s", r ? '!' : '*',
- TRANSPORT_SUMMARY_WIDTH, what, REFCOL_WIDTH, remote, pretty_ref,
- r ? _(" (unable to update local ref)") : "");
+ strbuf_addf(display, "%c %-*s %-*s -> %s%s",
+ r ? '!' : '*',
+ TRANSPORT_SUMMARY_WIDTH, what,
+ REFCOL_WIDTH, remote, pretty_ref,
+ r ? _(" (unable to update local ref)") : "");
return r;
}
(recurse_submodules != RECURSE_SUBMODULES_ON))
check_for_new_submodule_commits(ref->new_sha1);
r = s_update_ref("fast-forward", ref, 1);
- sprintf(display, "%c %-*s %-*s -> %s%s", r ? '!' : ' ',
- TRANSPORT_SUMMARY_WIDTH, quickref, REFCOL_WIDTH, remote,
- pretty_ref, r ? _(" (unable to update local ref)") : "");
+ strbuf_addf(display, "%c %-*s %-*s -> %s%s",
+ r ? '!' : ' ',
+ TRANSPORT_SUMMARY_WIDTH, quickref,
+ REFCOL_WIDTH, remote, pretty_ref,
+ r ? _(" (unable to update local ref)") : "");
return r;
} else if (force || ref->force) {
char quickref[84];
(recurse_submodules != RECURSE_SUBMODULES_ON))
check_for_new_submodule_commits(ref->new_sha1);
r = s_update_ref("forced-update", ref, 1);
- sprintf(display, "%c %-*s %-*s -> %s (%s)", r ? '!' : '+',
- TRANSPORT_SUMMARY_WIDTH, quickref, REFCOL_WIDTH, remote,
- pretty_ref,
- r ? _("unable to update local ref") : _("forced update"));
+ strbuf_addf(display, "%c %-*s %-*s -> %s (%s)",
+ r ? '!' : '+',
+ TRANSPORT_SUMMARY_WIDTH, quickref,
+ REFCOL_WIDTH, remote, pretty_ref,
+ r ? _("unable to update local ref") : _("forced update"));
return r;
} else {
- sprintf(display, "! %-*s %-*s -> %s %s",
- TRANSPORT_SUMMARY_WIDTH, _("[rejected]"), REFCOL_WIDTH, remote,
- pretty_ref, _("(non-fast-forward)"));
+ strbuf_addf(display, "! %-*s %-*s -> %s %s",
+ TRANSPORT_SUMMARY_WIDTH, _("[rejected]"),
+ REFCOL_WIDTH, remote, pretty_ref,
+ _("(non-fast-forward)"));
return 1;
}
}
{
FILE *fp;
struct commit *commit;
- int url_len, i, note_len, shown_url = 0, rc = 0;
- char note[1024];
+ int url_len, i, shown_url = 0, rc = 0;
+ struct strbuf note = STRBUF_INIT;
const char *what, *kind;
struct ref *rm;
char *url, *filename = dry_run ? "/dev/null" : git_path("FETCH_HEAD");
if (4 < i && !strncmp(".git", url + i - 3, 4))
url_len = i - 3;
- note_len = 0;
+ strbuf_reset(¬e);
if (*what) {
if (*kind)
- note_len += sprintf(note + note_len, "%s ",
- kind);
- note_len += sprintf(note + note_len, "'%s' of ", what);
+ strbuf_addf(¬e, "%s ", kind);
+ strbuf_addf(¬e, "'%s' of ", what);
}
- note[note_len] = '\0';
fprintf(fp, "%s\t%s\t%s",
- sha1_to_hex(commit ? commit->object.sha1 :
- rm->old_sha1),
+ sha1_to_hex(rm->old_sha1),
rm->merge ? "" : "not-for-merge",
- note);
+ note.buf);
for (i = 0; i < url_len; ++i)
if ('\n' == url[i])
fputs("\\n", fp);
fputc(url[i], fp);
fputc('\n', fp);
+ strbuf_reset(¬e);
if (ref) {
- rc |= update_local_ref(ref, what, note);
+ rc |= update_local_ref(ref, what, ¬e);
free(ref);
} else
- sprintf(note, "* %-*s %-*s -> FETCH_HEAD",
- TRANSPORT_SUMMARY_WIDTH, *kind ? kind : "branch",
- REFCOL_WIDTH, *what ? what : "HEAD");
- if (*note) {
+ strbuf_addf(¬e, "* %-*s %-*s -> FETCH_HEAD",
+ TRANSPORT_SUMMARY_WIDTH,
+ *kind ? kind : "branch",
+ REFCOL_WIDTH,
+ *what ? what : "HEAD");
+ if (note.len) {
if (verbosity >= 0 && !shown_url) {
fprintf(stderr, _("From %.*s\n"),
url_len, url);
shown_url = 1;
}
if (verbosity >= 0)
- fprintf(stderr, " %s\n", note);
+ fprintf(stderr, " %s\n", note.buf);
}
}
"branches"), remote_name);
abort:
+ strbuf_release(¬e);
free(url);
fclose(fp);
return rc;
#include "revision.h"
#include "tag.h"
#include "string-list.h"
+#include "branch.h"
+#include "fmt-merge-msg.h"
+#include "gpg-interface.h"
static const char * const fmt_merge_msg_usage[] = {
"git fmt-merge-msg [-m <message>] [--log[=<n>]|--no-log] [--file <file>]",
NULL
};
-static int shortlog_len;
+static int use_branch_desc;
-static int fmt_merge_msg_config(const char *key, const char *value, void *cb)
+int fmt_merge_msg_config(const char *key, const char *value, void *cb)
{
if (!strcmp(key, "merge.log") || !strcmp(key, "merge.summary")) {
int is_bool;
- shortlog_len = git_config_bool_or_int(key, value, &is_bool);
- if (!is_bool && shortlog_len < 0)
+ merge_log_config = git_config_bool_or_int(key, value, &is_bool);
+ if (!is_bool && merge_log_config < 0)
return error("%s: negative length %s", key, value);
- if (is_bool && shortlog_len)
- shortlog_len = DEFAULT_MERGE_LOG_LEN;
+ if (is_bool && merge_log_config)
+ merge_log_config = DEFAULT_MERGE_LOG_LEN;
+ } else if (!strcmp(key, "merge.branchdesc")) {
+ use_branch_desc = git_config_bool(key, value);
}
return 0;
}
+/* merge data per repository where the merged tips came from */
struct src_data {
struct string_list branch, tag, r_branch, generic;
int head_status;
};
+struct origin_data {
+ unsigned char sha1[20];
+ unsigned is_local_branch:1;
+};
+
static void init_src_data(struct src_data *data)
{
data->branch.strdup_strings = 1;
static int handle_line(char *line)
{
int i, len = strlen(line);
- unsigned char *sha1;
+ struct origin_data *origin_data;
char *src, *origin;
struct src_data *src_data;
struct string_list_item *item;
return 2;
line[40] = 0;
- sha1 = xmalloc(20);
- i = get_sha1(line, sha1);
+ origin_data = xcalloc(1, sizeof(struct origin_data));
+ i = get_sha1(line, origin_data->sha1);
line[40] = '\t';
- if (i)
+ if (i) {
+ free(origin_data);
return 3;
+ }
if (line[len - 1] == '\n')
line[len - 1] = 0;
line += 42;
+ /*
+ * At this point, line points at the beginning of comment e.g.
+ * "branch 'frotz' of git://that/repository.git".
+ * Find the repository name and point it with src.
+ */
src = strstr(line, " of ");
if (src) {
*src = 0;
origin = src;
src_data->head_status |= 1;
} else if (!prefixcmp(line, "branch ")) {
+ origin_data->is_local_branch = 1;
origin = line + 7;
string_list_append(&src_data->branch, origin);
src_data->head_status |= 2;
sprintf(new_origin, "%s of %s", origin, src);
origin = new_origin;
}
- string_list_append(&origins, origin)->util = sha1;
+ if (strcmp(".", src))
+ origin_data->is_local_branch = 0;
+ string_list_append(&origins, origin)->util = origin_data;
return 0;
}
}
}
-static void shortlog(const char *name, unsigned char *sha1,
- struct commit *head, struct rev_info *rev, int limit,
- struct strbuf *out)
+static void add_branch_desc(struct strbuf *out, const char *name)
+{
+ struct strbuf desc = STRBUF_INIT;
+
+ if (!read_branch_desc(&desc, name)) {
+ const char *bp = desc.buf;
+ while (*bp) {
+ const char *ep = strchrnul(bp, '\n');
+ if (*ep)
+ ep++;
+ strbuf_addf(out, " : %.*s", (int)(ep - bp), bp);
+ bp = ep;
+ }
+ if (out->buf[out->len - 1] != '\n')
+ strbuf_addch(out, '\n');
+ }
+ strbuf_release(&desc);
+}
+
+static void shortlog(const char *name,
+ struct origin_data *origin_data,
+ struct commit *head,
+ struct rev_info *rev, int limit,
+ struct strbuf *out)
{
int i, count = 0;
struct commit *commit;
struct string_list subjects = STRING_LIST_INIT_DUP;
int flags = UNINTERESTING | TREESAME | SEEN | SHOWN | ADDED;
struct strbuf sb = STRBUF_INIT;
+ const unsigned char *sha1 = origin_data->sha1;
branch = deref_tag(parse_object(sha1), sha1_to_hex(sha1), 40);
if (!branch || branch->type != OBJ_COMMIT)
else
strbuf_addf(out, "\n* %s:\n", name);
+ if (origin_data->is_local_branch && use_branch_desc)
+ add_branch_desc(out, name);
+
for (i = 0; i < subjects.nr; i++)
if (i >= limit)
strbuf_addf(out, " ...\n");
string_list_clear(&subjects, 0);
}
-static void do_fmt_merge_msg_title(struct strbuf *out,
+static void fmt_merge_msg_title(struct strbuf *out,
const char *current_branch) {
int i = 0;
char *sep = "";
strbuf_addf(out, " into %s\n", current_branch);
}
-static int do_fmt_merge_msg(int merge_title, struct strbuf *in,
- struct strbuf *out, int shortlog_len) {
+static void fmt_tag_signature(struct strbuf *tagbuf,
+ struct strbuf *sig,
+ const char *buf,
+ unsigned long len)
+{
+ const char *tag_body = strstr(buf, "\n\n");
+ if (tag_body) {
+ tag_body += 2;
+ strbuf_add(tagbuf, tag_body, buf + len - tag_body);
+ }
+ strbuf_complete_line(tagbuf);
+ strbuf_add_lines(tagbuf, "# ", sig->buf, sig->len);
+}
+
+static void fmt_merge_msg_sigs(struct strbuf *out)
+{
+ int i, tag_number = 0, first_tag = 0;
+ struct strbuf tagbuf = STRBUF_INIT;
+
+ for (i = 0; i < origins.nr; i++) {
+ unsigned char *sha1 = origins.items[i].util;
+ enum object_type type;
+ unsigned long size, len;
+ char *buf = read_sha1_file(sha1, &type, &size);
+ struct strbuf sig = STRBUF_INIT;
+
+ if (!buf || type != OBJ_TAG)
+ goto next;
+ len = parse_signature(buf, size);
+
+ if (size == len)
+ ; /* merely annotated */
+ else if (verify_signed_buffer(buf, len, buf + len, size - len, &sig)) {
+ if (!sig.len)
+ strbuf_addstr(&sig, "gpg verification failed.\n");
+ }
+
+ if (!tag_number++) {
+ fmt_tag_signature(&tagbuf, &sig, buf, len);
+ first_tag = i;
+ } else {
+ if (tag_number == 2) {
+ struct strbuf tagline = STRBUF_INIT;
+ strbuf_addf(&tagline, "\n# %s\n",
+ origins.items[first_tag].string);
+ strbuf_insert(&tagbuf, 0, tagline.buf,
+ tagline.len);
+ strbuf_release(&tagline);
+ }
+ strbuf_addf(&tagbuf, "\n# %s\n",
+ origins.items[i].string);
+ fmt_tag_signature(&tagbuf, &sig, buf, len);
+ }
+ strbuf_release(&sig);
+ next:
+ free(buf);
+ }
+ if (tagbuf.len) {
+ strbuf_addch(out, '\n');
+ strbuf_addbuf(out, &tagbuf);
+ }
+ strbuf_release(&tagbuf);
+}
+
+int fmt_merge_msg(struct strbuf *in, struct strbuf *out,
+ struct fmt_merge_msg_opts *opts)
+{
int i = 0, pos = 0;
unsigned char head_sha1[20];
const char *current_branch;
die("No current branch");
if (!prefixcmp(current_branch, "refs/heads/"))
current_branch += 11;
+ current_branch = xstrdup(current_branch);
/* get a line */
while (pos < in->len) {
die ("Error in line %d: %.*s", i, len, p);
}
- if (!srcs.nr)
- return 0;
+ if (opts->add_title && srcs.nr)
+ fmt_merge_msg_title(out, current_branch);
- if (merge_title)
- do_fmt_merge_msg_title(out, current_branch);
+ if (origins.nr)
+ fmt_merge_msg_sigs(out);
- if (shortlog_len) {
+ if (opts->shortlog_len) {
struct commit *head;
struct rev_info rev;
strbuf_addch(out, '\n');
for (i = 0; i < origins.nr; i++)
- shortlog(origins.items[i].string, origins.items[i].util,
- head, &rev, shortlog_len, out);
+ shortlog(origins.items[i].string,
+ origins.items[i].util,
+ head, &rev, opts->shortlog_len, out);
}
- return 0;
-}
-int fmt_merge_msg(struct strbuf *in, struct strbuf *out,
- int merge_title, int shortlog_len) {
- return do_fmt_merge_msg(merge_title, in, out, shortlog_len);
+ strbuf_complete_line(out);
+ free((char *)current_branch);
+ return 0;
}
int cmd_fmt_merge_msg(int argc, const char **argv, const char *prefix)
{
const char *inpath = NULL;
const char *message = NULL;
+ int shortlog_len = -1;
struct option options[] = {
{ OPTION_INTEGER, 0, "log", &shortlog_len, "n",
"populate log with at most <n> entries from shortlog",
FILE *in = stdin;
struct strbuf input = STRBUF_INIT, output = STRBUF_INIT;
int ret;
+ struct fmt_merge_msg_opts opts;
git_config(fmt_merge_msg_config, NULL);
argc = parse_options(argc, argv, prefix, options, fmt_merge_msg_usage,
0);
if (argc > 0)
usage_with_options(fmt_merge_msg_usage, options);
- if (message && !shortlog_len) {
- char nl = '\n';
- write_in_full(STDOUT_FILENO, message, strlen(message));
- write_in_full(STDOUT_FILENO, &nl, 1);
- return 0;
- }
if (shortlog_len < 0)
- die("Negative --log=%d", shortlog_len);
+ shortlog_len = (merge_log_config > 0) ? merge_log_config : 0;
if (inpath && strcmp(inpath, "-")) {
in = fopen(inpath, "r");
if (message)
strbuf_addstr(&output, message);
- ret = fmt_merge_msg(&input, &output,
- message ? 0 : 1,
- shortlog_len);
+ memset(&opts, 0, sizeof(opts));
+ opts.add_title = !message;
+ opts.shortlog_len = shortlog_len;
+
+ ret = fmt_merge_msg(&input, &output, &opts);
if (ret)
return ret;
write_in_full(STDOUT_FILENO, output.buf, output.len);
#include "remote.h"
#include "string-list.h"
#include "parse-options.h"
+#include "branch.h"
/* Set a default date-time format for git log ("log.date" config variable) */
static const char *default_date_mode = NULL;
printf("-- \n%s\n\n", signature);
}
+static void add_branch_description(struct strbuf *buf, const char *branch_name)
+{
+ struct strbuf desc = STRBUF_INIT;
+ if (!branch_name || !*branch_name)
+ return;
+ read_branch_desc(&desc, branch_name);
+ if (desc.len) {
+ strbuf_addch(buf, '\n');
+ strbuf_add(buf, desc.buf, desc.len);
+ strbuf_addch(buf, '\n');
+ }
+}
+
static void make_cover_letter(struct rev_info *rev, int use_stdout,
int numbered, int numbered_files,
struct commit *origin,
int nr, struct commit **list, struct commit *head,
+ const char *branch_name,
int quiet)
{
const char *committer;
pp_user_info(&pp, NULL, &sb, committer, encoding);
pp_title_line(&pp, &msg, &sb, encoding, need_8bit_cte);
pp_remainder(&pp, &msg, &sb, 0);
+ add_branch_description(&sb, branch_name);
printf("%s\n", sb.buf);
strbuf_release(&sb);
return 0;
}
+static char *find_branch_name(struct rev_info *rev)
+{
+ int i, positive = -1;
+ unsigned char branch_sha1[20];
+ struct strbuf buf = STRBUF_INIT;
+ const char *branch;
+
+ for (i = 0; i < rev->cmdline.nr; i++) {
+ if (rev->cmdline.rev[i].flags & UNINTERESTING)
+ continue;
+ if (positive < 0)
+ positive = i;
+ else
+ return NULL;
+ }
+ if (positive < 0)
+ return NULL;
+ strbuf_addf(&buf, "refs/heads/%s", rev->cmdline.rev[positive].name);
+ branch = resolve_ref(buf.buf, branch_sha1, 1, NULL);
+ if (!branch ||
+ prefixcmp(branch, "refs/heads/") ||
+ hashcmp(rev->cmdline.rev[positive].item->sha1, branch_sha1))
+ branch = NULL;
+ strbuf_release(&buf);
+ if (branch)
+ return xstrdup(rev->cmdline.rev[positive].name);
+ return NULL;
+}
+
int cmd_format_patch(int argc, const char **argv, const char *prefix)
{
struct commit *commit;
struct strbuf buf = STRBUF_INIT;
int use_patch_format = 0;
int quiet = 0;
+ char *branch_name = NULL;
const struct option builtin_format_patch_options[] = {
{ OPTION_CALLBACK, 'n', "numbered", &numbered, NULL,
"use [PATCH n/m] even with a single patch",
* origin" that prepares what the origin side still
* does not have.
*/
+ unsigned char sha1[20];
+ const char *ref;
+
rev.pending.objects[0].item->flags |= UNINTERESTING;
add_head_to_pending(&rev);
+ ref = resolve_ref("HEAD", sha1, 1, NULL);
+ if (ref && !prefixcmp(ref, "refs/heads/"))
+ branch_name = xstrdup(ref + strlen("refs/heads/"));
+ else
+ branch_name = xstrdup(""); /* no branch */
}
/*
* Otherwise, it is "format-patch -22 HEAD", and/or
rev.show_root_diff = 1;
if (cover_letter) {
- /* remember the range */
+ /*
+ * NEEDSWORK:randomly pick one positive commit to show
+ * diffstat; this is often the tip and the command
+ * happens to do the right thing in most cases, but a
+ * complex command like "--cover-letter a b c ^bottom"
+ * picks "c" and shows diffstat between bottom..c
+ * which may not match what the series represents at
+ * all and totally broken.
+ */
int i;
for (i = 0; i < rev.pending.nr; i++) {
struct object *o = rev.pending.objects[i].item;
if (!(o->flags & UNINTERESTING))
head = (struct commit *)o;
}
- /* We can't generate a cover letter without any patches */
+ /* There is nothing to show; it is not an error, though. */
if (!head)
return 0;
+ if (!branch_name)
+ branch_name = find_branch_name(&rev);
}
if (ignore_if_in_upstream) {
if (thread)
gen_message_id(&rev, "cover");
make_cover_letter(&rev, use_stdout, numbered, numbered_files,
- origin, nr, list, head, quiet);
+ origin, nr, list, head, branch_name, quiet);
total++;
start_number--;
}
fclose(stdout);
}
free(list);
+ free(branch_name);
string_list_clear(&extra_to, 0);
string_list_clear(&extra_cc, 0);
string_list_clear(&extra_hdr, 0);
#include "merge-recursive.h"
#include "resolve-undo.h"
#include "remote.h"
+#include "fmt-merge-msg.h"
#define DEFAULT_TWOHEAD (1<<0)
#define DEFAULT_OCTOPUS (1<<1)
NULL
};
-static int show_diffstat = 1, shortlog_len, squash;
+static int show_diffstat = 1, shortlog_len = -1, squash;
static int option_commit = 1, allow_fast_forward = 1;
static int fast_forward_only, option_edit;
static int allow_trivial = 1, have_message;
+static int overwrite_ignore = 1;
static struct strbuf merge_msg;
static struct commit_list *remoteheads;
static struct strategy **use_strategies;
OPT_BOOLEAN(0, "abort", &abort_current_merge,
"abort the current in-progress merge"),
OPT_SET_INT(0, "progress", &show_progress, "force progress reporting", 1),
+ OPT_BOOLEAN(0, "overwrite-ignore", &overwrite_ignore, "update ignored files (default)"),
OPT_END()
};
strbuf_release(&reflog_message);
}
-static struct object *want_commit(const char *name)
-{
- struct object *obj;
- unsigned char sha1[20];
- if (get_sha1(name, sha1))
- return NULL;
- obj = parse_object(sha1);
- return peel_to_type(name, 0, obj, OBJ_COMMIT);
-}
-
/* Get the name for the merge commit's message. */
static void merge_name(const char *remote, struct strbuf *msg)
{
- struct object *remote_head;
- unsigned char branch_head[20], buf_sha[20];
+ struct commit *remote_head;
+ unsigned char branch_head[20];
struct strbuf buf = STRBUF_INIT;
struct strbuf bname = STRBUF_INIT;
const char *ptr;
remote = bname.buf;
memset(branch_head, 0, sizeof(branch_head));
- remote_head = want_commit(remote);
+ remote_head = get_merge_parent(remote);
if (!remote_head)
die(_("'%s' does not point to a commit"), remote);
sha1_to_hex(branch_head), remote);
goto cleanup;
}
+ if (!prefixcmp(found_ref, "refs/tags/")) {
+ strbuf_addf(msg, "%s\t\ttag '%s' of .\n",
+ sha1_to_hex(branch_head), remote);
+ goto cleanup;
+ }
if (!prefixcmp(found_ref, "refs/remotes/")) {
strbuf_addf(msg, "%s\t\tremote-tracking branch '%s' of .\n",
sha1_to_hex(branch_head), remote);
strbuf_addstr(&truname, "refs/heads/");
strbuf_addstr(&truname, remote);
strbuf_setlen(&truname, truname.len - len);
- if (resolve_ref(truname.buf, buf_sha, 1, NULL)) {
+ if (ref_exists(truname.buf)) {
strbuf_addf(msg,
"%s\t\tbranch '%s'%s of .\n",
- sha1_to_hex(remote_head->sha1),
+ sha1_to_hex(remote_head->object.sha1),
truname.buf + 11,
(early ? " (early part)" : ""));
strbuf_release(&truname);
goto cleanup;
}
strbuf_addf(msg, "%s\t\tcommit '%s'\n",
- sha1_to_hex(remote_head->sha1), remote);
+ sha1_to_hex(remote_head->object.sha1), remote);
cleanup:
strbuf_release(&buf);
strbuf_release(&bname);
static int git_merge_config(const char *k, const char *v, void *cb)
{
+ int status;
+
if (branch && !prefixcmp(k, "branch.") &&
!prefixcmp(k + 7, branch) &&
!strcmp(k + 7 + strlen(branch), ".mergeoptions")) {
return git_config_string(&pull_octopus, k, v);
else if (!strcmp(k, "merge.renormalize"))
option_renormalize = git_config_bool(k, v);
- else if (!strcmp(k, "merge.log") || !strcmp(k, "merge.summary")) {
- int is_bool;
- shortlog_len = git_config_bool_or_int(k, v, &is_bool);
- if (!is_bool && shortlog_len < 0)
- return error(_("%s: negative length %s"), k, v);
- if (is_bool && shortlog_len)
- shortlog_len = DEFAULT_MERGE_LOG_LEN;
- return 0;
- } else if (!strcmp(k, "merge.ff")) {
+ else if (!strcmp(k, "merge.ff")) {
int boolval = git_config_maybe_bool(k, v);
if (0 <= boolval) {
allow_fast_forward = boolval;
default_to_upstream = git_config_bool(k, v);
return 0;
}
+ status = fmt_merge_msg_config(k, v, cb);
+ if (status)
+ return status;
return git_diff_ui_config(k, v, cb);
}
die(_("Unknown option for merge-recursive: -X%s"), xopts[x]);
o.branch1 = head_arg;
- o.branch2 = remoteheads->item->util;
+ o.branch2 = merge_remote_util(remoteheads->item)->name;
for (j = common; j; j = j->next)
commit_list_insert(j->item, &reversed);
memset(&trees, 0, sizeof(trees));
memset(&opts, 0, sizeof(opts));
memset(&t, 0, sizeof(t));
- memset(&dir, 0, sizeof(dir));
- dir.flags |= DIR_SHOW_IGNORED;
- setup_standard_excludes(&dir);
- opts.dir = &dir;
+ if (overwrite_ignore) {
+ memset(&dir, 0, sizeof(dir));
+ dir.flags |= DIR_SHOW_IGNORED;
+ setup_standard_excludes(&dir);
+ opts.dir = &dir;
+ }
opts.head_idx = 1;
opts.src_index = &the_index;
struct commit_list *j;
struct strbuf buf = STRBUF_INIT;
- for (j = remoteheads; j; j = j->next)
- strbuf_addf(&buf, "%s\n",
- sha1_to_hex(j->item->object.sha1));
+ for (j = remoteheads; j; j = j->next) {
+ unsigned const char *sha1;
+ struct commit *c = j->item;
+ if (c->util && merge_remote_util(c)->obj) {
+ sha1 = merge_remote_util(c)->obj->sha1;
+ } else {
+ sha1 = c->object.sha1;
+ }
+ strbuf_addf(&buf, "%s\n", sha1_to_hex(sha1));
+ }
filename = git_path("MERGE_HEAD");
fd = open(filename, O_WRONLY | O_CREAT, 0666);
if (fd < 0)
struct commit *head_commit;
struct strbuf buf = STRBUF_INIT;
const char *head_arg;
- int flag, i;
+ int flag, i, ret = 0;
int best_cnt = -1, merge_was_ok = 0, automerge_was_ok = 0;
struct commit_list *common = NULL;
const char *best_strategy = NULL, *wt_strategy = NULL;
* current branch.
*/
branch = resolve_ref("HEAD", head_sha1, 0, &flag);
- if (branch && !prefixcmp(branch, "refs/heads/"))
- branch += 11;
+ if (branch) {
+ if (!prefixcmp(branch, "refs/heads/"))
+ branch += 11;
+ branch = xstrdup(branch);
+ }
if (!branch || is_null_sha1(head_sha1))
head_commit = NULL;
else
parse_branch_merge_options(branch_mergeoptions);
argc = parse_options(argc, argv, prefix, builtin_merge_options,
builtin_merge_usage, 0);
+ if (shortlog_len < 0)
+ shortlog_len = (merge_log_config > 0) ? merge_log_config : 0;
if (verbosity < 0 && show_progress == -1)
show_progress = 0;
die(_("There is no merge to abort (MERGE_HEAD missing)."));
/* Invoke 'git reset --merge' */
- return cmd_reset(nargc, nargv, prefix);
+ ret = cmd_reset(nargc, nargv, prefix);
+ goto done;
}
if (read_cache_unmerged())
argv += 2;
argc -= 2;
} else if (!head_commit) {
- struct object *remote_head;
+ struct commit *remote_head;
/*
* If the merged head is a valid one there is no reason
* to forbid "git merge" into a branch yet to be born.
if (!allow_fast_forward)
die(_("Non-fast-forward commit does not make sense into "
"an empty head"));
- remote_head = want_commit(argv[0]);
+ remote_head = get_merge_parent(argv[0]);
if (!remote_head)
die(_("%s - not something we can merge"), argv[0]);
- read_empty(remote_head->sha1, 0);
- update_ref("initial pull", "HEAD", remote_head->sha1, NULL, 0,
- DIE_ON_ERR);
- return 0;
+ read_empty(remote_head->object.sha1, 0);
+ update_ref("initial pull", "HEAD", remote_head->object.sha1,
+ NULL, 0, DIE_ON_ERR);
+ goto done;
} else {
struct strbuf merge_names = STRBUF_INIT;
head_arg = "HEAD";
/*
- * All the rest are the commits being merged;
- * prepare the standard merge summary message to
- * be appended to the given message. If remote
- * is invalid we will die later in the common
- * codepath so we discard the error in this
- * loop.
+ * All the rest are the commits being merged; prepare
+ * the standard merge summary message to be appended
+ * to the given message.
*/
for (i = 0; i < argc; i++)
merge_name(argv[i], &merge_names);
if (!have_message || shortlog_len) {
- fmt_merge_msg(&merge_names, &merge_msg, !have_message,
- shortlog_len);
+ struct fmt_merge_msg_opts opts;
+ memset(&opts, 0, sizeof(opts));
+ opts.add_title = !have_message;
+ opts.shortlog_len = shortlog_len;
+
+ fmt_merge_msg(&merge_names, &merge_msg, &opts);
if (merge_msg.len)
strbuf_setlen(&merge_msg, merge_msg.len - 1);
}
strbuf_reset(&buf);
for (i = 0; i < argc; i++) {
- struct object *o;
- struct commit *commit;
-
- o = want_commit(argv[i]);
- if (!o)
+ struct commit *commit = get_merge_parent(argv[i]);
+ if (!commit)
die(_("%s - not something we can merge"), argv[i]);
- commit = lookup_commit(o->sha1);
- commit->util = (void *)argv[i];
remotes = &commit_list_insert(commit, remotes)->next;
-
- strbuf_addf(&buf, "GITHEAD_%s", sha1_to_hex(o->sha1));
+ strbuf_addf(&buf, "GITHEAD_%s",
+ sha1_to_hex(commit->object.sha1));
setenv(buf.buf, argv[i], 1);
strbuf_reset(&buf);
+ if (merge_remote_util(commit) &&
+ merge_remote_util(commit)->obj &&
+ merge_remote_util(commit)->obj->type == OBJ_TAG) {
+ option_edit = 1;
+ allow_fast_forward = 0;
+ }
}
if (!use_strategies) {
* but first the most common case of merging one remote.
*/
finish_up_to_date("Already up-to-date.");
- return 0;
+ goto done;
} else if (allow_fast_forward && !remoteheads->next &&
!common->next &&
!hashcmp(common->item->object.sha1, head_commit->object.sha1)) {
/* Again the most common case of merging one remote. */
struct strbuf msg = STRBUF_INIT;
- struct object *o;
+ struct commit *commit;
char hex[41];
strcpy(hex, find_unique_abbrev(head_commit->object.sha1, DEFAULT_ABBREV));
if (have_message)
strbuf_addstr(&msg,
" (no commit created; -m option ignored)");
- o = want_commit(sha1_to_hex(remoteheads->item->object.sha1));
- if (!o)
- return 1;
+ commit = remoteheads->item;
+ if (!commit) {
+ ret = 1;
+ goto done;
+ }
- if (checkout_fast_forward(head_commit->object.sha1, remoteheads->item->object.sha1))
- return 1;
+ if (checkout_fast_forward(head_commit->object.sha1,
+ commit->object.sha1)) {
+ ret = 1;
+ goto done;
+ }
- finish(head_commit, o->sha1, msg.buf);
+ finish(head_commit, commit->object.sha1, msg.buf);
drop_save();
- return 0;
+ goto done;
} else if (!remoteheads->next && common->next)
;
/*
git_committer_info(IDENT_ERROR_ON_NO_NAME);
printf(_("Trying really trivial in-index merge...\n"));
if (!read_tree_trivial(common->item->object.sha1,
- head_commit->object.sha1, remoteheads->item->object.sha1))
- return merge_trivial(head_commit);
+ head_commit->object.sha1,
+ remoteheads->item->object.sha1)) {
+ ret = merge_trivial(head_commit);
+ goto done;
+ }
printf(_("Nope.\n"));
}
} else {
}
if (up_to_date) {
finish_up_to_date("Already up-to-date. Yeeah!");
- return 0;
+ goto done;
}
}
* If we have a resulting tree, that means the strategy module
* auto resolved the merge cleanly.
*/
- if (automerge_was_ok)
- return finish_automerge(head_commit, common, result_tree,
- wt_strategy);
+ if (automerge_was_ok) {
+ ret = finish_automerge(head_commit, common, result_tree,
+ wt_strategy);
+ goto done;
+ }
/*
* Pick the result from the best strategy and have the user fix
else
fprintf(stderr, _("Merge with strategy %s failed.\n"),
use_strategies[0]->name);
- return 2;
+ ret = 2;
+ goto done;
} else if (best_strategy == wt_strategy)
; /* We already have its result in the working tree. */
else {
else
write_merge_state();
- if (merge_was_ok) {
+ if (merge_was_ok)
fprintf(stderr, _("Automatic merge went well; "
"stopped before committing as requested\n"));
- return 0;
- } else
- return suggest_conflicts(option_renormalize);
+ else
+ ret = suggest_conflicts(option_renormalize);
+
+done:
+ free((char *)branch);
+ return ret;
}
struct notes_tree *t;
struct commit *partial;
struct pretty_print_context pretty_ctx;
+ int ret;
/*
* Read partial merge result from .git/NOTES_MERGE_PARTIAL,
o->local_ref = resolve_ref("NOTES_MERGE_REF", sha1, 0, NULL);
if (!o->local_ref)
die("Failed to resolve NOTES_MERGE_REF");
+ o->local_ref = xstrdup(o->local_ref);
if (notes_merge_commit(o, t, partial, sha1))
die("Failed to finalize notes merge");
free_notes(t);
strbuf_release(&msg);
- return merge_abort(o);
+ ret = merge_abort(o);
+ free((char *)o->local_ref);
+ return ret;
}
static int merge(int argc, const char **argv, const char *prefix)
static const char *base_name;
static int progress = 1;
static int window = 10;
-static unsigned long pack_size_limit, pack_size_limit_cfg;
+static unsigned long pack_size_limit;
static int depth = 50;
static int delta_search_threads;
static int pack_to_stdout;
uint32_t i = 0, j;
struct sha1file *f;
off_t offset;
- struct pack_header hdr;
uint32_t nr_remaining = nr_result;
time_t last_mtime = 0;
struct object_entry **write_order;
unsigned char sha1[20];
char *pack_tmp_name = NULL;
- if (pack_to_stdout) {
+ if (pack_to_stdout)
f = sha1fd_throughput(1, "<stdout>", progress_state);
- } else {
- char tmpname[PATH_MAX];
- int fd;
- fd = odb_mkstemp(tmpname, sizeof(tmpname),
- "pack/tmp_pack_XXXXXX");
- pack_tmp_name = xstrdup(tmpname);
- f = sha1fd(fd, pack_tmp_name);
- }
-
- hdr.hdr_signature = htonl(PACK_SIGNATURE);
- hdr.hdr_version = htonl(PACK_VERSION);
- hdr.hdr_entries = htonl(nr_remaining);
- sha1write(f, &hdr, sizeof(hdr));
- offset = sizeof(hdr);
+ else
+ f = create_tmp_packfile(&pack_tmp_name);
+
+ offset = write_pack_header(f, nr_remaining);
+ if (!offset)
+ die_errno("unable to write pack header");
nr_written = 0;
for (; i < nr_objects; i++) {
struct object_entry *e = write_order[i];
if (!pack_to_stdout) {
struct stat st;
- const char *idx_tmp_name;
char tmpname[PATH_MAX];
- idx_tmp_name = write_idx_file(NULL, written_list, nr_written,
- &pack_idx_opts, sha1);
-
- snprintf(tmpname, sizeof(tmpname), "%s-%s.pack",
- base_name, sha1_to_hex(sha1));
- free_pack_by_name(tmpname);
- if (adjust_shared_perm(pack_tmp_name))
- die_errno("unable to make temporary pack file readable");
- if (rename(pack_tmp_name, tmpname))
- die_errno("unable to rename temporary pack file");
-
/*
* Packs are runtime accessed in their mtime
* order since newer packs are more likely to contain
* packs then we should modify the mtime of later ones
* to preserve this property.
*/
- if (stat(tmpname, &st) < 0) {
+ if (stat(pack_tmp_name, &st) < 0) {
warning("failed to stat %s: %s",
- tmpname, strerror(errno));
+ pack_tmp_name, strerror(errno));
} else if (!last_mtime) {
last_mtime = st.st_mtime;
} else {
struct utimbuf utb;
utb.actime = st.st_atime;
utb.modtime = --last_mtime;
- if (utime(tmpname, &utb) < 0)
+ if (utime(pack_tmp_name, &utb) < 0)
warning("failed utime() on %s: %s",
tmpname, strerror(errno));
}
- snprintf(tmpname, sizeof(tmpname), "%s-%s.idx",
- base_name, sha1_to_hex(sha1));
- if (adjust_shared_perm(idx_tmp_name))
- die_errno("unable to make temporary index file readable");
- if (rename(idx_tmp_name, tmpname))
- die_errno("unable to rename temporary index file");
-
- free((void *) idx_tmp_name);
+ /* Enough space for "-<sha-1>.pack"? */
+ if (sizeof(tmpname) <= strlen(base_name) + 50)
+ die("pack base name '%s' too long", base_name);
+ snprintf(tmpname, sizeof(tmpname), "%s-", base_name);
+ finish_tmp_packfile(tmpname, pack_tmp_name,
+ written_list, nr_written,
+ &pack_idx_opts, sha1);
free(pack_tmp_name);
puts(sha1_to_hex(sha1));
}
pack_idx_opts.version);
return 0;
}
- if (!strcmp(k, "pack.packsizelimit")) {
- pack_size_limit_cfg = git_config_ulong(k, v);
- return 0;
- }
return git_default_config(k, v, cb);
}
check_aliased_updates(commands);
+ free((char *)head_name);
head_name = resolve_ref("HEAD", sha1, 0, NULL);
+ if (head_name)
+ head_name = xstrdup(head_name);
for (cmd = commands; cmd; cmd = cmd->next)
if (!cmd->skip_update)
states->tracked.strdup_strings = 1;
states->stale.strdup_strings = 1;
for (ref = fetch_map; ref; ref = ref->next) {
- unsigned char sha1[20];
- if (!ref->peer_ref || read_ref(ref->peer_ref->name, sha1))
+ if (!ref->peer_ref || !ref_exists(ref->peer_ref->name))
string_list_append(&states->new, abbrev_branch(ref->name));
else
string_list_append(&states->tracked, abbrev_branch(ref->name));
int flag = 0;
unsigned char sha1[20];
- resolve_ref(item->string, sha1, 1, &flag);
+ read_ref_full(item->string, sha1, 1, &flag);
if (!(flag & REF_ISSYMREF))
continue;
if (delete_ref(item->string, NULL, REF_NODEREF))
usage_with_options(builtin_remote_sethead_usage, options);
if (head_name) {
- unsigned char sha1[20];
strbuf_addf(&buf2, "refs/remotes/%s/%s", argv[0], head_name);
/* make sure it's valid */
- if (!resolve_ref(buf2.buf, sha1, 1, NULL))
+ if (!ref_exists(buf2.buf))
result |= error("Not a valid ref: %s", buf2.buf);
else if (create_symref(buf.buf, buf2.buf, "remote set-head"))
result |= error("Could not setup %s", buf.buf);
had_error = 1;
continue;
}
- if (!resolve_ref(ref, sha1, 1, NULL)) {
+ if (read_ref(ref, sha1)) {
error("replace ref '%s' not found.", *p);
had_error = 1;
continue;
if (check_refname_format(ref, 0))
die("'%s' is not a valid ref name.", ref);
- if (!resolve_ref(ref, prev, 1, NULL))
+ if (read_ref(ref, prev))
hashclr(prev);
else if (!force)
die("replace ref '%s' already exists", ref);
int nr = 1;
int newfd;
struct tree_desc desc[2];
+ struct tree *tree;
struct unpack_trees_options opts;
struct lock_file *lock = xcalloc(1, sizeof(struct lock_file));
return error(_("Failed to find tree of %s."), sha1_to_hex(sha1));
if (unpack_trees(nr, desc, &opts))
return -1;
+
+ if (reset_type == MIXED || reset_type == HARD) {
+ tree = parse_tree_indirect(sha1);
+ prime_cache_tree(&active_cache_tree, tree);
+ }
+
if (write_cache(newfd, active_cache, active_nr) ||
commit_locked_index(lock))
return error(_("Could not write new index file."));
unsigned char sha1[20];
if (!prefixcmp(*pattern, "refs/") &&
- resolve_ref(*pattern, sha1, 1, NULL)) {
+ !read_ref(*pattern, sha1)) {
if (!quiet)
show_one(*pattern, sha1);
}
#include "parse-options.h"
#include "diff.h"
#include "revision.h"
+#include "gpg-interface.h"
static const char * const git_tag_usage[] = {
"git tag [-a|-s|-u <key-id>] [-f] [-m <msg>|-F <file>] <tagname> [<head>]",
NULL
};
-static char signingkey[1000];
-
struct tag_filter {
const char **patterns;
int lines;
had_error = 1;
continue;
}
- if (!resolve_ref(ref, sha1, 1, NULL)) {
+ if (read_ref(ref, sha1)) {
error(_("tag '%s' not found."), *p);
had_error = 1;
continue;
static int do_sign(struct strbuf *buffer)
{
- struct child_process gpg;
- const char *args[4];
- char *bracket;
- int len;
- int i, j;
-
- if (!*signingkey) {
- if (strlcpy(signingkey, git_committer_info(IDENT_ERROR_ON_NO_NAME),
- sizeof(signingkey)) > sizeof(signingkey) - 1)
- return error(_("committer info too long."));
- bracket = strchr(signingkey, '>');
- if (bracket)
- bracket[1] = '\0';
- }
-
- /* When the username signingkey is bad, program could be terminated
- * because gpg exits without reading and then write gets SIGPIPE. */
- signal(SIGPIPE, SIG_IGN);
-
- memset(&gpg, 0, sizeof(gpg));
- gpg.argv = args;
- gpg.in = -1;
- gpg.out = -1;
- args[0] = "gpg";
- args[1] = "-bsau";
- args[2] = signingkey;
- args[3] = NULL;
-
- if (start_command(&gpg))
- return error(_("could not run gpg."));
-
- if (write_in_full(gpg.in, buffer->buf, buffer->len) != buffer->len) {
- close(gpg.in);
- close(gpg.out);
- finish_command(&gpg);
- return error(_("gpg did not accept the tag data"));
- }
- close(gpg.in);
- len = strbuf_read(buffer, gpg.out, 1024);
- close(gpg.out);
-
- if (finish_command(&gpg) || !len || len < 0)
- return error(_("gpg failed to sign the tag"));
-
- /* Strip CR from the line endings, in case we are on Windows. */
- for (i = j = 0; i < buffer->len; i++)
- if (buffer->buf[i] != '\r') {
- if (i != j)
- buffer->buf[j] = buffer->buf[i];
- j++;
- }
- strbuf_setlen(buffer, j);
-
- return 0;
+ return sign_buffer(buffer, buffer, get_signing_key());
}
static const char tag_template[] =
N_("\n"
"#\n"
"# Write a tag message\n"
+ "# Lines starting with '#' will be ignored.\n"
"#\n");
-static void set_signingkey(const char *value)
-{
- if (strlcpy(signingkey, value, sizeof(signingkey)) >= sizeof(signingkey))
- die(_("signing key value too long (%.10s...)"), value);
-}
+static const char tag_template_nocleanup[] =
+ N_("\n"
+ "#\n"
+ "# Write a tag message\n"
+ "# Lines starting with '#' will be kept; you may remove them"
+ " yourself if you want to.\n"
+ "#\n");
static int git_tag_config(const char *var, const char *value, void *cb)
{
- if (!strcmp(var, "user.signingkey")) {
- if (!value)
- return config_error_nonbool(var);
- set_signingkey(value);
- return 0;
- }
-
+ int status = git_gpg_config(var, value, cb);
+ if (status)
+ return status;
return git_default_config(var, value, cb);
}
return 0;
}
+struct create_tag_options {
+ unsigned int message_given:1;
+ unsigned int sign;
+ enum {
+ CLEANUP_NONE,
+ CLEANUP_SPACE,
+ CLEANUP_ALL
+ } cleanup_mode;
+};
+
static void create_tag(const unsigned char *object, const char *tag,
- struct strbuf *buf, int message, int sign,
+ struct strbuf *buf, struct create_tag_options *opt,
unsigned char *prev, unsigned char *result)
{
enum object_type type;
if (header_len > sizeof(header_buf) - 1)
die(_("tag header too big."));
- if (!message) {
+ if (!opt->message_given) {
int fd;
/* write the template message before editing: */
if (!is_null_sha1(prev))
write_tag_body(fd, prev);
+ else if (opt->cleanup_mode == CLEANUP_ALL)
+ write_or_die(fd, _(tag_template),
+ strlen(_(tag_template)));
else
- write_or_die(fd, _(tag_template), strlen(_(tag_template)));
+ write_or_die(fd, _(tag_template_nocleanup),
+ strlen(_(tag_template_nocleanup)));
close(fd);
if (launch_editor(path, buf, NULL)) {
}
}
- stripspace(buf, 1);
+ if (opt->cleanup_mode != CLEANUP_NONE)
+ stripspace(buf, opt->cleanup_mode == CLEANUP_ALL);
- if (!message && !buf->len)
+ if (!opt->message_given && !buf->len)
die(_("no tag message?"));
strbuf_insert(buf, 0, header_buf, header_len);
- if (build_tag_object(buf, sign, result) < 0) {
+ if (build_tag_object(buf, opt->sign, result) < 0) {
if (path)
fprintf(stderr, _("The tag message has been left in %s\n"),
path);
unsigned char object[20], prev[20];
const char *object_ref, *tag;
struct ref_lock *lock;
-
- int annotate = 0, sign = 0, force = 0, lines = -1,
- list = 0, delete = 0, verify = 0;
+ struct create_tag_options opt;
+ char *cleanup_arg = NULL;
+ int annotate = 0, force = 0, lines = -1, list = 0,
+ delete = 0, verify = 0;
const char *msgfile = NULL, *keyid = NULL;
struct msg_arg msg = { 0, STRBUF_INIT };
struct commit_list *with_commit = NULL;
OPT_CALLBACK('m', "message", &msg, "message",
"tag message", parse_msg_arg),
OPT_FILENAME('F', "file", &msgfile, "read message from file"),
- OPT_BOOLEAN('s', "sign", &sign, "annotated and GPG-signed tag"),
+ OPT_BOOLEAN('s', "sign", &opt.sign, "annotated and GPG-signed tag"),
+ OPT_STRING(0, "cleanup", &cleanup_arg, "mode",
+ "how to strip spaces and #comments from message"),
OPT_STRING('u', "local-user", &keyid, "key-id",
"use another key to sign the tag"),
OPT__FORCE(&force, "replace the tag if exists"),
git_config(git_tag_config, NULL);
+ memset(&opt, 0, sizeof(opt));
+
argc = parse_options(argc, argv, prefix, options, git_tag_usage, 0);
if (keyid) {
- sign = 1;
- set_signingkey(keyid);
+ opt.sign = 1;
+ set_signing_key(keyid);
}
- if (sign)
+ if (opt.sign)
annotate = 1;
if (argc == 0 && !(delete || verify))
list = 1;
if (strbuf_check_tag_ref(&ref, tag))
die(_("'%s' is not a valid tag name."), tag);
- if (!resolve_ref(ref.buf, prev, 1, NULL))
+ if (read_ref(ref.buf, prev))
hashclr(prev);
else if (!force)
die(_("tag '%s' already exists"), tag);
+ opt.message_given = msg.given || msgfile;
+
+ if (!cleanup_arg || !strcmp(cleanup_arg, "strip"))
+ opt.cleanup_mode = CLEANUP_ALL;
+ else if (!strcmp(cleanup_arg, "verbatim"))
+ opt.cleanup_mode = CLEANUP_NONE;
+ else if (!strcmp(cleanup_arg, "whitespace"))
+ opt.cleanup_mode = CLEANUP_SPACE;
+ else
+ die(_("Invalid cleanup mode %s"), cleanup_arg);
+
if (annotate)
- create_tag(object, tag, &buf, msg.given || msgfile,
- sign, prev, object);
+ create_tag(object, tag, &buf, &opt, prev, object);
lock = lock_any_ref_for_update(ref.buf, prev, 0);
if (!lock)
#include "archive.h"
#include "pkt-line.h"
#include "sideband.h"
+#include "run-command.h"
static const char upload_archive_usage[] =
"git upload-archive <repo>";
static const char deadchild[] =
"git upload-archive: archiver died with error";
-static const char lostchild[] =
-"git upload-archive: archiver process was lost";
-
#define MAX_ARGS (64)
-static int run_upload_archive(int argc, const char **argv, const char *prefix)
+int cmd_upload_archive_writer(int argc, const char **argv, const char *prefix)
{
const char *sent_argv[MAX_ARGS];
const char *arg_cmd = "argument ";
int cmd_upload_archive(int argc, const char **argv, const char *prefix)
{
- pid_t writer;
- int fd1[2], fd2[2];
+ struct child_process writer = { argv };
+
/*
* Set up sideband subprocess.
*
* multiplexed out to our fd#1. If the child dies, we tell the other
* end over channel #3.
*/
- if (pipe(fd1) < 0 || pipe(fd2) < 0) {
- int err = errno;
- packet_write(1, "NACK pipe failed on the remote side\n");
- die("upload-archive: %s", strerror(err));
- }
- writer = fork();
- if (writer < 0) {
+ argv[0] = "upload-archive--writer";
+ writer.out = writer.err = -1;
+ writer.git_cmd = 1;
+ if (start_command(&writer)) {
int err = errno;
- packet_write(1, "NACK fork failed on the remote side\n");
+ packet_write(1, "NACK unable to spawn subprocess\n");
die("upload-archive: %s", strerror(err));
}
- if (!writer) {
- /* child - connect fd#1 and fd#2 to the pipe */
- dup2(fd1[1], 1);
- dup2(fd2[1], 2);
- close(fd1[1]); close(fd2[1]);
- close(fd1[0]); close(fd2[0]); /* we do not read from pipe */
-
- exit(run_upload_archive(argc, argv, prefix));
- }
- /* parent - read from child, multiplex and send out to fd#1 */
- close(fd1[1]); close(fd2[1]); /* we do not write to pipe */
packet_write(1, "ACK\n");
packet_flush(1);
while (1) {
struct pollfd pfd[2];
- int status;
- pfd[0].fd = fd1[0];
+ pfd[0].fd = writer.out;
pfd[0].events = POLLIN;
- pfd[1].fd = fd2[0];
+ pfd[1].fd = writer.err;
pfd[1].events = POLLIN;
if (poll(pfd, 2, -1) < 0) {
if (errno != EINTR) {
if (process_input(pfd[0].fd, 1))
continue;
- if (waitpid(writer, &status, 0) < 0)
- error_clnt("%s", lostchild);
- else if (!WIFEXITED(status) || WEXITSTATUS(status) > 0)
+ if (finish_command(&writer))
error_clnt("%s", deadchild);
packet_flush(1);
break;
#include "run-command.h"
#include <signal.h>
#include "parse-options.h"
+#include "gpg-interface.h"
static const char * const verify_tag_usage[] = {
"git verify-tag [-v|--verbose] <tag>...",
static int run_gpg_verify(const char *buf, unsigned long size, int verbose)
{
- struct child_process gpg;
- const char *args_gpg[] = {"gpg", "--verify", "FILE", "-", NULL};
- char path[PATH_MAX];
- size_t len;
- int fd, ret;
+ int len;
- fd = git_mkstemp(path, PATH_MAX, ".git_vtag_tmpXXXXXX");
- if (fd < 0)
- return error("could not create temporary file '%s': %s",
- path, strerror(errno));
- if (write_in_full(fd, buf, size) < 0)
- return error("failed writing temporary file '%s': %s",
- path, strerror(errno));
- close(fd);
-
- /* find the length without signature */
len = parse_signature(buf, size);
if (verbose)
write_in_full(1, buf, len);
- memset(&gpg, 0, sizeof(gpg));
- gpg.argv = args_gpg;
- gpg.in = -1;
- args_gpg[2] = path;
- if (start_command(&gpg)) {
- unlink(path);
- return error("could not run gpg.");
- }
-
- write_in_full(gpg.in, buf, len);
- close(gpg.in);
- ret = finish_command(&gpg);
+ if (size == len)
+ return error("no signature found");
- unlink_or_warn(path);
-
- return ret;
+ return verify_signed_buffer(buf, len, buf + len, size - len, NULL);
}
static int verify_tag(const char *name, int verbose)
--- /dev/null
+/*
+ * Copyright (c) 2011, Google Inc.
+ */
+#include "bulk-checkin.h"
+#include "csum-file.h"
+#include "pack.h"
+
+static int pack_compression_level = Z_DEFAULT_COMPRESSION;
+
+static struct bulk_checkin_state {
+ unsigned plugged:1;
+
+ char *pack_tmp_name;
+ struct sha1file *f;
+ off_t offset;
+ struct pack_idx_option pack_idx_opts;
+
+ struct pack_idx_entry **written;
+ uint32_t alloc_written;
+ uint32_t nr_written;
+} state;
+
+static void finish_bulk_checkin(struct bulk_checkin_state *state)
+{
+ unsigned char sha1[20];
+ char packname[PATH_MAX];
+ int i;
+
+ if (!state->f)
+ return;
+
+ if (state->nr_written == 0) {
+ close(state->f->fd);
+ unlink(state->pack_tmp_name);
+ goto clear_exit;
+ } else if (state->nr_written == 1) {
+ sha1close(state->f, sha1, CSUM_FSYNC);
+ } else {
+ int fd = sha1close(state->f, sha1, 0);
+ fixup_pack_header_footer(fd, sha1, state->pack_tmp_name,
+ state->nr_written, sha1,
+ state->offset);
+ close(fd);
+ }
+
+ sprintf(packname, "%s/pack/pack-", get_object_directory());
+ finish_tmp_packfile(packname, state->pack_tmp_name,
+ state->written, state->nr_written,
+ &state->pack_idx_opts, sha1);
+ for (i = 0; i < state->nr_written; i++)
+ free(state->written[i]);
+
+clear_exit:
+ free(state->written);
+ memset(state, 0, sizeof(*state));
+
+ /* Make objects we just wrote available to ourselves */
+ reprepare_packed_git();
+}
+
+static int already_written(struct bulk_checkin_state *state, unsigned char sha1[])
+{
+ int i;
+
+ /* The object may already exist in the repository */
+ if (has_sha1_file(sha1))
+ return 1;
+
+ /* Might want to keep the list sorted */
+ for (i = 0; i < state->nr_written; i++)
+ if (!hashcmp(state->written[i]->sha1, sha1))
+ return 1;
+
+ /* This is a new object we need to keep */
+ return 0;
+}
+
+/*
+ * Read the contents from fd for size bytes, streaming it to the
+ * packfile in state while updating the hash in ctx. Signal a failure
+ * by returning a negative value when the resulting pack would exceed
+ * the pack size limit and this is not the first object in the pack,
+ * so that the caller can discard what we wrote from the current pack
+ * by truncating it and opening a new one. The caller will then call
+ * us again after rewinding the input fd.
+ *
+ * The already_hashed_to pointer is kept untouched by the caller to
+ * make sure we do not hash the same byte when we are called
+ * again. This way, the caller does not have to checkpoint its hash
+ * status before calling us just in case we ask it to call us again
+ * with a new pack.
+ */
+static int stream_to_pack(struct bulk_checkin_state *state,
+ git_SHA_CTX *ctx, off_t *already_hashed_to,
+ int fd, size_t size, enum object_type type,
+ const char *path, unsigned flags)
+{
+ git_zstream s;
+ unsigned char obuf[16384];
+ unsigned hdrlen;
+ int status = Z_OK;
+ int write_object = (flags & HASH_WRITE_OBJECT);
+ off_t offset = 0;
+
+ memset(&s, 0, sizeof(s));
+ git_deflate_init(&s, pack_compression_level);
+
+ hdrlen = encode_in_pack_object_header(type, size, obuf);
+ s.next_out = obuf + hdrlen;
+ s.avail_out = sizeof(obuf) - hdrlen;
+
+ while (status != Z_STREAM_END) {
+ unsigned char ibuf[16384];
+
+ if (size && !s.avail_in) {
+ ssize_t rsize = size < sizeof(ibuf) ? size : sizeof(ibuf);
+ if (xread(fd, ibuf, rsize) != rsize)
+ die("failed to read %d bytes from '%s'",
+ (int)rsize, path);
+ offset += rsize;
+ if (*already_hashed_to < offset) {
+ size_t hsize = offset - *already_hashed_to;
+ if (rsize < hsize)
+ hsize = rsize;
+ if (hsize)
+ git_SHA1_Update(ctx, ibuf, hsize);
+ *already_hashed_to = offset;
+ }
+ s.next_in = ibuf;
+ s.avail_in = rsize;
+ size -= rsize;
+ }
+
+ status = git_deflate(&s, size ? 0 : Z_FINISH);
+
+ if (!s.avail_out || status == Z_STREAM_END) {
+ if (write_object) {
+ size_t written = s.next_out - obuf;
+
+ /* would we bust the size limit? */
+ if (state->nr_written &&
+ pack_size_limit_cfg &&
+ pack_size_limit_cfg < state->offset + written) {
+ git_deflate_abort(&s);
+ return -1;
+ }
+
+ sha1write(state->f, obuf, written);
+ state->offset += written;
+ }
+ s.next_out = obuf;
+ s.avail_out = sizeof(obuf);
+ }
+
+ switch (status) {
+ case Z_OK:
+ case Z_BUF_ERROR:
+ case Z_STREAM_END:
+ continue;
+ default:
+ die("unexpected deflate failure: %d", status);
+ }
+ }
+ git_deflate_end(&s);
+ return 0;
+}
+
+/* Lazily create backing packfile for the state */
+static void prepare_to_stream(struct bulk_checkin_state *state,
+ unsigned flags)
+{
+ if (!(flags & HASH_WRITE_OBJECT) || state->f)
+ return;
+
+ state->f = create_tmp_packfile(&state->pack_tmp_name);
+ reset_pack_idx_option(&state->pack_idx_opts);
+
+ /* Pretend we are going to write only one object */
+ state->offset = write_pack_header(state->f, 1);
+ if (!state->offset)
+ die_errno("unable to write pack header");
+}
+
+static int deflate_to_pack(struct bulk_checkin_state *state,
+ unsigned char result_sha1[],
+ int fd, size_t size,
+ enum object_type type, const char *path,
+ unsigned flags)
+{
+ off_t seekback, already_hashed_to;
+ git_SHA_CTX ctx;
+ unsigned char obuf[16384];
+ unsigned header_len;
+ struct sha1file_checkpoint checkpoint;
+ struct pack_idx_entry *idx = NULL;
+
+ seekback = lseek(fd, 0, SEEK_CUR);
+ if (seekback == (off_t) -1)
+ return error("cannot find the current offset");
+
+ header_len = sprintf((char *)obuf, "%s %" PRIuMAX,
+ typename(type), (uintmax_t)size) + 1;
+ git_SHA1_Init(&ctx);
+ git_SHA1_Update(&ctx, obuf, header_len);
+
+ /* Note: idx is non-NULL when we are writing */
+ if ((flags & HASH_WRITE_OBJECT) != 0)
+ idx = xcalloc(1, sizeof(*idx));
+
+ already_hashed_to = 0;
+
+ while (1) {
+ prepare_to_stream(state, flags);
+ if (idx) {
+ sha1file_checkpoint(state->f, &checkpoint);
+ idx->offset = state->offset;
+ crc32_begin(state->f);
+ }
+ if (!stream_to_pack(state, &ctx, &already_hashed_to,
+ fd, size, type, path, flags))
+ break;
+ /*
+ * Writing this object to the current pack will make
+ * it too big; we need to truncate it, start a new
+ * pack, and write into it.
+ */
+ if (!idx)
+ die("BUG: should not happen");
+ sha1file_truncate(state->f, &checkpoint);
+ state->offset = checkpoint.offset;
+ finish_bulk_checkin(state);
+ if (lseek(fd, seekback, SEEK_SET) == (off_t) -1)
+ return error("cannot seek back");
+ }
+ git_SHA1_Final(result_sha1, &ctx);
+ if (!idx)
+ return 0;
+
+ idx->crc32 = crc32_end(state->f);
+ if (already_written(state, result_sha1)) {
+ sha1file_truncate(state->f, &checkpoint);
+ state->offset = checkpoint.offset;
+ free(idx);
+ } else {
+ hashcpy(idx->sha1, result_sha1);
+ ALLOC_GROW(state->written,
+ state->nr_written + 1,
+ state->alloc_written);
+ state->written[state->nr_written++] = idx;
+ }
+ return 0;
+}
+
+int index_bulk_checkin(unsigned char *sha1,
+ int fd, size_t size, enum object_type type,
+ const char *path, unsigned flags)
+{
+ int status = deflate_to_pack(&state, sha1, fd, size, type,
+ path, flags);
+ if (!state.plugged)
+ finish_bulk_checkin(&state);
+ return status;
+}
+
+void plug_bulk_checkin(void)
+{
+ state.plugged = 1;
+}
+
+void unplug_bulk_checkin(void)
+{
+ state.plugged = 0;
+ if (state.f)
+ finish_bulk_checkin(&state);
+}
--- /dev/null
+/*
+ * Copyright (c) 2011, Google Inc.
+ */
+#ifndef BULK_CHECKIN_H
+#define BULK_CHECKIN_H
+
+#include "cache.h"
+
+extern int index_bulk_checkin(unsigned char sha1[],
+ int fd, size_t size, enum object_type type,
+ const char *path, unsigned flags);
+
+extern void plug_bulk_checkin(void);
+extern void unplug_bulk_checkin(void);
+
+#endif
continue;
if (dwim_ref(e->name, strlen(e->name), sha1, &ref) != 1)
continue;
- if (!resolve_ref(e->name, sha1, 1, &flag))
+ if (read_ref_full(e->name, sha1, 1, &flag))
flag = 0;
display_ref = (flag & REF_ISSYMREF) ? e->name : ref;
}
static int verify_cache(struct cache_entry **cache,
- int entries)
+ int entries, int silent)
{
int i, funny;
for (i = 0; i < entries; i++) {
struct cache_entry *ce = cache[i];
if (ce_stage(ce) || (ce->ce_flags & CE_INTENT_TO_ADD)) {
+ if (silent)
+ return -1;
if (10 < ++funny) {
fprintf(stderr, "...\n");
break;
struct cache_entry **cache,
int entries,
int missing_ok,
- int dryrun)
+ int dryrun,
+ int silent)
{
int i;
- i = verify_cache(cache, entries);
+ i = verify_cache(cache, entries, silent);
if (i)
return i;
i = update_one(it, cache, entries, "", 0, missing_ok, dryrun);
if (cache_tree_update(active_cache_tree,
active_cache, active_nr,
- missing_ok, 0) < 0)
+ missing_ok, 0, 0) < 0)
return WRITE_TREE_UNMERGED_INDEX;
if (0 <= newfd) {
if (!write_cache(newfd, active_cache, active_nr) &&
return it->entry_count;
return 0;
}
+
+int update_main_cache_tree (int silent)
+{
+ if (!the_index.cache_tree)
+ the_index.cache_tree = cache_tree();
+ return cache_tree_update(the_index.cache_tree,
+ the_index.cache, the_index.cache_nr, 0, 0, silent);
+}
struct cache_tree *cache_tree_read(const char *buffer, unsigned long size);
int cache_tree_fully_valid(struct cache_tree *);
-int cache_tree_update(struct cache_tree *, struct cache_entry **, int, int, int);
+int cache_tree_update(struct cache_tree *, struct cache_entry **, int, int, int, int);
+
+int update_main_cache_tree(int);
/* bitmasks to write_cache_as_tree flags */
#define WRITE_TREE_MISSING_OK 1
void git_deflate_init(git_zstream *, int level);
void git_deflate_init_gzip(git_zstream *, int level);
void git_deflate_end(git_zstream *);
+int git_deflate_abort(git_zstream *);
int git_deflate_end_gently(git_zstream *);
int git_deflate(git_zstream *, int flush);
unsigned long git_deflate_bound(git_zstream *, unsigned long);
}
#define flexible_size(STRUCT,len) ((offsetof(struct STRUCT,name) + (len) + 8) & ~7)
-#define cache_entry_size(len) flexible_size(cache_entry,len)
+#define cache_entry_size(len) (offsetof(struct cache_entry,name) + (len) + 1)
#define ondisk_cache_entry_size(len) flexible_size(ondisk_cache_entry,len)
#define ondisk_cache_entry_extended_size(len) flexible_size(ondisk_cache_entry_extended,len)
struct string_list *resolve_undo;
struct cache_tree *cache_tree;
struct cache_time timestamp;
- void *alloc;
unsigned name_hash_initialized : 1,
initialized : 1;
struct hash_table name_hash;
extern size_t packed_git_limit;
extern size_t delta_base_cache_limit;
extern unsigned long big_file_threshold;
+extern unsigned long pack_size_limit_cfg;
extern int read_replace_refs;
extern int fsync_object_files;
extern int core_preload_index;
extern int get_sha1_hex(const char *hex, unsigned char *sha1);
extern char *sha1_to_hex(const unsigned char *sha1); /* static buffer result! */
+extern int read_ref_full(const char *filename, unsigned char *sha1,
+ int reading, int *flags);
extern int read_ref(const char *filename, unsigned char *sha1);
/*
extern int refname_match(const char *abbrev_name, const char *full_name, const char **rules);
extern const char *ref_rev_parse_rules[];
-extern const char *ref_fetch_rules[];
+#define ref_fetch_rules ref_rev_parse_rules
extern int create_symref(const char *ref, const char *refs_heads_master, const char *logmsg);
extern int validate_headref(const char *ref);
return result;
}
+static void handle_signed_tag(struct commit *parent, struct commit_extra_header ***tail)
+{
+ struct merge_remote_desc *desc;
+ struct commit_extra_header *mergetag;
+ char *buf;
+ unsigned long size, len;
+ enum object_type type;
+
+ desc = merge_remote_util(parent);
+ if (!desc || !desc->obj)
+ return;
+ buf = read_sha1_file(desc->obj->sha1, &type, &size);
+ if (!buf || type != OBJ_TAG)
+ goto free_return;
+ len = parse_signature(buf, size);
+ if (size == len)
+ goto free_return;
+ /*
+ * We could verify this signature and either omit the tag when
+ * it does not validate, but the integrator may not have the
+ * public key of the signer of the tag he is merging, while a
+ * later auditor may have it while auditing, so let's not run
+ * verify-signed-buffer here for now...
+ *
+ * if (verify_signed_buffer(buf, len, buf + len, size - len, ...))
+ * warn("warning: signed tag unverified.");
+ */
+ mergetag = xcalloc(1, sizeof(*mergetag));
+ mergetag->key = xstrdup("mergetag");
+ mergetag->value = buf;
+ mergetag->len = size;
+
+ **tail = mergetag;
+ *tail = &mergetag->next;
+ return;
+
+free_return:
+ free(buf);
+}
+
+void append_merge_tag_headers(struct commit_list *parents,
+ struct commit_extra_header ***tail)
+{
+ while (parents) {
+ struct commit *parent = parents->item;
+ handle_signed_tag(parent, tail);
+ parents = parents->next;
+ }
+}
+
+static void add_extra_header(struct strbuf *buffer,
+ struct commit_extra_header *extra)
+{
+ strbuf_addstr(buffer, extra->key);
+ if (extra->len)
+ strbuf_add_lines(buffer, " ", extra->value, extra->len);
+ else
+ strbuf_addch(buffer, '\n');
+}
+
+struct commit_extra_header *read_commit_extra_headers(struct commit *commit)
+{
+ struct commit_extra_header *extra = NULL;
+ unsigned long size;
+ enum object_type type;
+ char *buffer = read_sha1_file(commit->object.sha1, &type, &size);
+ if (buffer && type == OBJ_COMMIT)
+ extra = read_commit_extra_header_lines(buffer, size);
+ free(buffer);
+ return extra;
+}
+
+static inline int standard_header_field(const char *field, size_t len)
+{
+ return ((len == 4 && !memcmp(field, "tree ", 5)) ||
+ (len == 6 && !memcmp(field, "parent ", 7)) ||
+ (len == 6 && !memcmp(field, "author ", 7)) ||
+ (len == 9 && !memcmp(field, "committer ", 10)) ||
+ (len == 8 && !memcmp(field, "encoding ", 9)));
+}
+
+struct commit_extra_header *read_commit_extra_header_lines(const char *buffer, size_t size)
+{
+ struct commit_extra_header *extra = NULL, **tail = &extra, *it = NULL;
+ const char *line, *next, *eof, *eob;
+ struct strbuf buf = STRBUF_INIT;
+
+ for (line = buffer, eob = line + size;
+ line < eob && *line != '\n';
+ line = next) {
+ next = memchr(line, '\n', eob - line);
+ next = next ? next + 1 : eob;
+ if (*line == ' ') {
+ /* continuation */
+ if (it)
+ strbuf_add(&buf, line + 1, next - (line + 1));
+ continue;
+ }
+ if (it)
+ it->value = strbuf_detach(&buf, &it->len);
+ strbuf_reset(&buf);
+ it = NULL;
+
+ eof = strchr(line, ' ');
+ if (next <= eof)
+ eof = next;
+
+ if (standard_header_field(line, eof - line))
+ continue;
+
+ it = xcalloc(1, sizeof(*it));
+ it->key = xmemdupz(line, eof-line);
+ *tail = it;
+ tail = &it->next;
+ if (eof + 1 < next)
+ strbuf_add(&buf, eof + 1, next - (eof + 1));
+ }
+ if (it)
+ it->value = strbuf_detach(&buf, &it->len);
+ return extra;
+}
+
+void free_commit_extra_headers(struct commit_extra_header *extra)
+{
+ while (extra) {
+ struct commit_extra_header *next = extra->next;
+ free(extra->key);
+ free(extra->value);
+ free(extra);
+ extra = next;
+ }
+}
+
+int commit_tree(const char *msg, unsigned char *tree,
+ struct commit_list *parents, unsigned char *ret,
+ const char *author)
+{
+ struct commit_extra_header *extra = NULL, **tail = &extra;
+ int result;
+
+ append_merge_tag_headers(parents, &tail);
+ result = commit_tree_extended(msg, tree, parents, ret, author, extra);
+ free_commit_extra_headers(extra);
+ return result;
+}
+
static const char commit_utf8_warn[] =
"Warning: commit message does not conform to UTF-8.\n"
"You may want to amend it after fixing the message, or set the config\n"
"variable i18n.commitencoding to the encoding your project uses.\n";
-int commit_tree(const char *msg, unsigned char *tree,
- struct commit_list *parents, unsigned char *ret,
- const char *author)
+int commit_tree_extended(const char *msg, unsigned char *tree,
+ struct commit_list *parents, unsigned char *ret,
+ const char *author, struct commit_extra_header *extra)
{
int result;
int encoding_is_utf8;
*/
while (parents) {
struct commit_list *next = parents->next;
+ struct commit *parent = parents->item;
+
strbuf_addf(&buffer, "parent %s\n",
- sha1_to_hex(parents->item->object.sha1));
+ sha1_to_hex(parent->object.sha1));
free(parents);
parents = next;
}
strbuf_addf(&buffer, "committer %s\n", git_committer_info(IDENT_ERROR_ON_NO_NAME));
if (!encoding_is_utf8)
strbuf_addf(&buffer, "encoding %s\n", git_commit_encoding);
+
+ while (extra) {
+ add_extra_header(&buffer, extra);
+ extra = extra->next;
+ }
strbuf_addch(&buffer, '\n');
/* And add the comment */
strbuf_release(&buffer);
return result;
}
+
+struct commit *get_merge_parent(const char *name)
+{
+ struct object *obj;
+ struct commit *commit;
+ unsigned char sha1[20];
+ if (get_sha1(name, sha1))
+ return NULL;
+ obj = parse_object(sha1);
+ commit = (struct commit *)peel_to_type(name, 0, obj, OBJ_COMMIT);
+ if (commit && !commit->util) {
+ struct merge_remote_desc *desc;
+ desc = xmalloc(sizeof(*desc));
+ desc->obj = obj;
+ desc->name = strdup(name);
+ commit->util = desc;
+ }
+ return commit;
+}
struct commit_list *reduce_heads(struct commit_list *heads);
+struct commit_extra_header {
+ struct commit_extra_header *next;
+ char *key;
+ char *value;
+ size_t len;
+};
+
+extern void append_merge_tag_headers(struct commit_list *parents,
+ struct commit_extra_header ***tail);
+
extern int commit_tree(const char *msg, unsigned char *tree,
- struct commit_list *parents, unsigned char *ret,
- const char *author);
+ struct commit_list *parents, unsigned char *ret,
+ const char *author);
+
+extern int commit_tree_extended(const char *msg, unsigned char *tree,
+ struct commit_list *parents, unsigned char *ret,
+ const char *author,
+ struct commit_extra_header *);
+
+extern struct commit_extra_header *read_commit_extra_headers(struct commit *);
+extern struct commit_extra_header *read_commit_extra_header_lines(const char *buf, size_t len);
+
+extern void free_commit_extra_headers(struct commit_extra_header *extra);
+
+struct merge_remote_desc {
+ struct object *obj; /* the named object, could be a tag */
+ const char *name;
+};
+#define merge_remote_util(commit) ((struct merge_remote_desc *)((commit)->util))
+
+/*
+ * Given "name" from the command line to merge, find the commit object
+ * and return it, while storing merge_remote_desc in its ->util field,
+ * to allow callers to tell if we are told to merge a tag.
+ */
+struct commit *get_merge_parent(const char *name);
#endif /* COMMIT_H */
return strbuf_detach(&buf, NULL);
}
-pid_t waitpid(pid_t pid, int *status, unsigned options)
+pid_t waitpid(pid_t pid, int *status, int options)
{
HANDLE h = OpenProcess(SYNCHRONIZE | PROCESS_QUERY_INFORMATION,
FALSE, pid);
#define mkdir mingw_mkdir
#define WNOHANG 1
-pid_t waitpid(pid_t pid, int *status, unsigned options);
+pid_t waitpid(pid_t pid, int *status, int options);
#define kill mingw_kill
int mingw_kill(pid_t pid, int sig);
return 0;
}
+ if (!strcmp(var, "pack.packsizelimit")) {
+ pack_size_limit_cfg = git_config_ulong(var, value);
+ return 0;
+ }
/* Add other config variables here and to Documentation/config.txt. */
return 0;
}
fi
fi
-# __gitcomp accepts 1, 2, 3, or 4 arguments
-# generates completion reply with compgen
+# Generates completion reply with compgen, appending a space to possible
+# completion words, if necessary.
+# It accepts 1 to 4 arguments:
+# 1: List of possible completion words.
+# 2: A prefix to be added to each possible completion word (optional).
+# 3: Generate possible completion matches for this word (optional).
+# 4: A suffix to be appended to each possible completion word (optional).
__gitcomp ()
{
local cur_="$cur"
esac
}
-# __git_heads accepts 0 or 1 arguments (to pass to __gitdir)
+# Generates completion reply with compgen from newline-separated possible
+# completion words by appending a space to all of them.
+# It accepts 1 to 4 arguments:
+# 1: List of possible completion words, separated by a single newline.
+# 2: A prefix to be added to each possible completion word (optional).
+# 3: Generate possible completion matches for this word (optional).
+# 4: A suffix to be appended to each possible completion word instead of
+# the default space (optional). If specified but empty, nothing is
+# appended.
+__gitcomp_nl ()
+{
+ local s=$'\n' IFS=' '$'\t'$'\n'
+ local cur_="$cur" suffix=" "
+
+ if [ $# -gt 2 ]; then
+ cur_="$3"
+ if [ $# -gt 3 ]; then
+ suffix="$4"
+ fi
+ fi
+
+ IFS=$s
+ COMPREPLY=($(compgen -P "${2-}" -S "$suffix" -W "$1" -- "$cur_"))
+}
+
__git_heads ()
{
- local cmd i is_hash=y dir="$(__gitdir "${1-}")"
+ local dir="$(__gitdir)"
if [ -d "$dir" ]; then
git --git-dir="$dir" for-each-ref --format='%(refname:short)' \
refs/heads
return
fi
- for i in $(git ls-remote "${1-}" 2>/dev/null); do
- case "$is_hash,$i" in
- y,*) is_hash=n ;;
- n,*^{}) is_hash=y ;;
- n,refs/heads/*) is_hash=y; echo "${i#refs/heads/}" ;;
- n,*) is_hash=y; echo "$i" ;;
- esac
- done
}
-# __git_tags accepts 0 or 1 arguments (to pass to __gitdir)
__git_tags ()
{
- local cmd i is_hash=y dir="$(__gitdir "${1-}")"
+ local dir="$(__gitdir)"
if [ -d "$dir" ]; then
git --git-dir="$dir" for-each-ref --format='%(refname:short)' \
refs/tags
return
fi
- for i in $(git ls-remote "${1-}" 2>/dev/null); do
- case "$is_hash,$i" in
- y,*) is_hash=n ;;
- n,*^{}) is_hash=y ;;
- n,refs/tags/*) is_hash=y; echo "${i#refs/tags/}" ;;
- n,*) is_hash=y; echo "$i" ;;
- esac
- done
}
# __git_refs accepts 0, 1 (to pass to __gitdir), or 2 arguments
# by checkout for tracking branches
__git_refs ()
{
- local i is_hash=y dir="$(__gitdir "${1-}")" track="${2-}"
+ local i hash dir="$(__gitdir "${1-}")" track="${2-}"
local format refs
if [ -d "$dir" ]; then
case "$cur" in
fi
return
fi
- for i in $(git ls-remote "$dir" 2>/dev/null); do
- case "$is_hash,$i" in
- y,*) is_hash=n ;;
- n,*^{}) is_hash=y ;;
- n,refs/tags/*) is_hash=y; echo "${i#refs/tags/}" ;;
- n,refs/heads/*) is_hash=y; echo "${i#refs/heads/}" ;;
- n,refs/remotes/*) is_hash=y; echo "${i#refs/remotes/}" ;;
- n,*) is_hash=y; echo "$i" ;;
- esac
- done
+ case "$cur" in
+ refs|refs/*)
+ git ls-remote "$dir" "$cur*" 2>/dev/null | \
+ while read hash i; do
+ case "$i" in
+ *^{}) ;;
+ *) echo "$i" ;;
+ esac
+ done
+ ;;
+ *)
+ git ls-remote "$dir" HEAD ORIG_HEAD 'refs/tags/*' 'refs/heads/*' 'refs/remotes/*' 2>/dev/null | \
+ while read hash i; do
+ case "$i" in
+ *^{}) ;;
+ refs/*) echo "${i#refs/*/}" ;;
+ *) echo "$i" ;;
+ esac
+ done
+ ;;
+ esac
}
# __git_refs2 requires 1 argument (to pass to __git_refs)
# __git_refs_remotes requires 1 argument (to pass to ls-remote)
__git_refs_remotes ()
{
- local cmd i is_hash=y
- for i in $(git ls-remote "$1" 2>/dev/null); do
- case "$is_hash,$i" in
- n,refs/heads/*)
- is_hash=y
- echo "$i:refs/remotes/$1/${i#refs/heads/}"
- ;;
- y,*) is_hash=n ;;
- n,*^{}) is_hash=y ;;
- n,refs/tags/*) is_hash=y;;
- n,*) is_hash=y; ;;
- esac
+ local i hash
+ git ls-remote "$1" 'refs/heads/*' 2>/dev/null | \
+ while read hash i; do
+ echo "$i:refs/remotes/$1/${i#refs/heads/}"
done
}
*...*)
pfx="${cur_%...*}..."
cur_="${cur_#*...}"
- __gitcomp "$(__git_refs)" "$pfx" "$cur_"
+ __gitcomp_nl "$(__git_refs)" "$pfx" "$cur_"
;;
*..*)
pfx="${cur_%..*}.."
cur_="${cur_#*..}"
- __gitcomp "$(__git_refs)" "$pfx" "$cur_"
+ __gitcomp_nl "$(__git_refs)" "$pfx" "$cur_"
;;
*)
- __gitcomp "$(__git_refs)"
+ __gitcomp_nl "$(__git_refs)"
;;
esac
}
c=$((++c))
done
if [ -z "$remote" ]; then
- __gitcomp "$(__git_remotes)"
+ __gitcomp_nl "$(__git_remotes)"
return
fi
if [ $no_complete_refspec = 1 ]; then
case "$cmd" in
fetch)
if [ $lhs = 1 ]; then
- __gitcomp "$(__git_refs2 "$remote")" "$pfx" "$cur_"
+ __gitcomp_nl "$(__git_refs2 "$remote")" "$pfx" "$cur_"
else
- __gitcomp "$(__git_refs)" "$pfx" "$cur_"
+ __gitcomp_nl "$(__git_refs)" "$pfx" "$cur_"
fi
;;
pull)
if [ $lhs = 1 ]; then
- __gitcomp "$(__git_refs "$remote")" "$pfx" "$cur_"
+ __gitcomp_nl "$(__git_refs "$remote")" "$pfx" "$cur_"
else
- __gitcomp "$(__git_refs)" "$pfx" "$cur_"
+ __gitcomp_nl "$(__git_refs)" "$pfx" "$cur_"
fi
;;
push)
if [ $lhs = 1 ]; then
- __gitcomp "$(__git_refs)" "$pfx" "$cur_"
+ __gitcomp_nl "$(__git_refs)" "$pfx" "$cur_"
else
- __gitcomp "$(__git_refs "$remote")" "$pfx" "$cur_"
+ __gitcomp_nl "$(__git_refs "$remote")" "$pfx" "$cur_"
fi
;;
esac
return
;;
--remote=*)
- __gitcomp "$(__git_remotes)" "" "${cur##--remote=}"
+ __gitcomp_nl "$(__git_remotes)" "" "${cur##--remote=}"
return
;;
--*)
case "$subcommand" in
bad|good|reset|skip|start)
- __gitcomp "$(__git_refs)"
+ __gitcomp_nl "$(__git_refs)"
;;
*)
COMPREPLY=()
;;
*)
if [ $only_local_ref = "y" -a $has_r = "n" ]; then
- __gitcomp "$(__git_heads)"
+ __gitcomp_nl "$(__git_heads)"
else
- __gitcomp "$(__git_refs)"
+ __gitcomp_nl "$(__git_refs)"
fi
;;
esac
if [ -n "$(__git_find_on_cmdline "$flags")" ]; then
track=''
fi
- __gitcomp "$(__git_refs '' $track)"
+ __gitcomp_nl "$(__git_refs '' $track)"
;;
esac
}
__gitcomp "--edit --no-commit"
;;
*)
- __gitcomp "$(__git_refs)"
+ __gitcomp_nl "$(__git_refs)"
;;
esac
}
;;
--reuse-message=*|--reedit-message=*|\
--fixup=*|--squash=*)
- __gitcomp "$(__git_refs)" "" "${cur#*=}"
+ __gitcomp_nl "$(__git_refs)" "" "${cur#*=}"
return
;;
--untracked-files=*)
"
return
esac
- __gitcomp "$(__git_refs)"
+ __gitcomp_nl "$(__git_refs)"
}
__git_diff_common_options="--stat --numstat --shortstat --summary
case "$cword,$prev" in
2,*|*,-*)
if test -r tags; then
- __gitcomp "$(__git_match_ctag "$cur" tags)"
+ __gitcomp_nl "$(__git_match_ctag "$cur" tags)"
return
fi
;;
esac
- __gitcomp "$(__git_refs)"
+ __gitcomp_nl "$(__git_refs)"
}
_git_help ()
_git_ls_remote ()
{
- __gitcomp "$(__git_remotes)"
+ __gitcomp_nl "$(__git_remotes)"
}
_git_ls_tree ()
__gitcomp "$__git_merge_options"
return
esac
- __gitcomp "$(__git_refs)"
+ __gitcomp_nl "$(__git_refs)"
}
_git_mergetool ()
_git_merge_base ()
{
- __gitcomp "$(__git_refs)"
+ __gitcomp_nl "$(__git_refs)"
}
_git_mv ()
,*)
case "${words[cword-1]}" in
--ref)
- __gitcomp "$(__git_refs)"
+ __gitcomp_nl "$(__git_refs)"
;;
*)
__gitcomp "$subcommands --ref"
;;
add,--reuse-message=*|append,--reuse-message=*|\
add,--reedit-message=*|append,--reedit-message=*)
- __gitcomp "$(__git_refs)" "" "${cur#*=}"
+ __gitcomp_nl "$(__git_refs)" "" "${cur#*=}"
;;
add,--*|append,--*)
__gitcomp '--file= --message= --reedit-message=
-m|-F)
;;
*)
- __gitcomp "$(__git_refs)"
+ __gitcomp_nl "$(__git_refs)"
;;
esac
;;
{
case "$prev" in
--repo)
- __gitcomp "$(__git_remotes)"
+ __gitcomp_nl "$(__git_remotes)"
return
esac
case "$cur" in
--repo=*)
- __gitcomp "$(__git_remotes)" "" "${cur##--repo=}"
+ __gitcomp_nl "$(__git_remotes)" "" "${cur##--repo=}"
return
;;
--*)
return
esac
- __gitcomp "$(__git_refs)"
+ __gitcomp_nl "$(__git_refs)"
}
_git_reflog ()
if [ -z "$subcommand" ]; then
__gitcomp "$subcommands"
else
- __gitcomp "$(__git_refs)"
+ __gitcomp_nl "$(__git_refs)"
fi
}
{
case "$prev" in
branch.*.remote)
- __gitcomp "$(__git_remotes)"
+ __gitcomp_nl "$(__git_remotes)"
return
;;
branch.*.merge)
- __gitcomp "$(__git_refs)"
+ __gitcomp_nl "$(__git_refs)"
return
;;
remote.*.fetch)
local remote="${prev#remote.}"
remote="${remote%.fetch}"
- __gitcomp "$(__git_refs_remotes "$remote")"
+ if [ -z "$cur" ]; then
+ COMPREPLY=("refs/heads/")
+ return
+ fi
+ __gitcomp_nl "$(__git_refs_remotes "$remote")"
return
;;
remote.*.push)
local remote="${prev#remote.}"
remote="${remote%.push}"
- __gitcomp "$(git --git-dir="$(__gitdir)" \
+ __gitcomp_nl "$(git --git-dir="$(__gitdir)" \
for-each-ref --format='%(refname):%(refname)' \
refs/heads)"
return
return
;;
--get|--get-all|--unset|--unset-all)
- __gitcomp "$(__git_config_get_set_variables)"
+ __gitcomp_nl "$(__git_config_get_set_variables)"
return
;;
*.*)
;;
branch.*)
local pfx="${cur%.*}." cur_="${cur#*.}"
- __gitcomp "$(__git_heads)" "$pfx" "$cur_" "."
+ __gitcomp_nl "$(__git_heads)" "$pfx" "$cur_" "."
return
;;
guitool.*.*)
pager.*)
local pfx="${cur%.*}." cur_="${cur#*.}"
__git_compute_all_commands
- __gitcomp "$__git_all_commands" "$pfx" "$cur_"
+ __gitcomp_nl "$__git_all_commands" "$pfx" "$cur_"
return
;;
remote.*.*)
;;
remote.*)
local pfx="${cur%.*}." cur_="${cur#*.}"
- __gitcomp "$(__git_remotes)" "$pfx" "$cur_" "."
+ __gitcomp_nl "$(__git_remotes)" "$pfx" "$cur_" "."
return
;;
url.*.*)
case "$subcommand" in
rename|rm|show|prune)
- __gitcomp "$(__git_remotes)"
+ __gitcomp_nl "$(__git_remotes)"
;;
update)
local i c='' IFS=$'\n'
_git_replace ()
{
- __gitcomp "$(__git_refs)"
+ __gitcomp_nl "$(__git_refs)"
}
_git_reset ()
return
;;
esac
- __gitcomp "$(__git_refs)"
+ __gitcomp_nl "$(__git_refs)"
}
_git_revert ()
return
;;
esac
- __gitcomp "$(__git_refs)"
+ __gitcomp_nl "$(__git_refs)"
}
_git_rm ()
COMPREPLY=()
;;
show,*|apply,*|drop,*|pop,*|branch,*)
- __gitcomp "$(git --git-dir="$(__gitdir)" stash list \
+ __gitcomp_nl "$(git --git-dir="$(__gitdir)" stash list \
| sed -n -e 's/:.*//p')"
;;
*)
i="${words[c]}"
case "$i" in
-d|-v)
- __gitcomp "$(__git_tags)"
+ __gitcomp_nl "$(__git_tags)"
return
;;
-f)
;;
-*|tag)
if [ $f = 1 ]; then
- __gitcomp "$(__git_tags)"
+ __gitcomp_nl "$(__git_tags)"
else
COMPREPLY=()
fi
;;
*)
- __gitcomp "$(__git_refs)"
+ __gitcomp_nl "$(__git_refs)"
;;
esac
}
def chdir(dir):
# P4 uses the PWD environment variable rather than getcwd(). Since we're
- # not using the shell, we have to set it ourselves.
- os.environ['PWD']=dir
+ # not using the shell, we have to set it ourselves. This path could
+ # be relative, so go there first, then figure out where we ended up.
os.chdir(dir)
+ os.environ['PWD'] = os.getcwd()
def die(msg):
if verbose:
if gitConfig("git-p4.skipSubmitEditCheck") == "true":
return True
- if os.stat(template_file).st_mtime <= mtime:
- while True:
- response = raw_input("Submit template unchanged. Submit anyway? [y]es, [n]o (skip this patch) ")
- if response == 'y':
- return True
- if response == 'n':
- return False
+ # modification time updated means user saved the file
+ if os.stat(template_file).st_mtime > mtime:
+ return True
+
+ while True:
+ response = raw_input("Submit template unchanged. Submit anyway? [y]es, [n]o (skip this patch) ")
+ if response == 'y':
+ return True
+ if response == 'n':
+ return False
def applyCommit(self, id):
print "Applying %s" % (read_pipe("git log --max-count=1 --pretty=oneline %s" % id))
self.modifyChangelistUser(changelist, p4User)
else:
# skip this patch
+ print "Submission cancelled, undoing p4 changes."
for f in editedFiles:
p4_revert(f)
for f in filesToAdd:
print "Perforce checkout for depot path %s located at %s" % (self.depotPath, self.clientPath)
self.oldWorkingDirectory = os.getcwd()
+ # ensure the clientPath exists
+ if not os.path.exists(self.clientPath):
+ os.makedirs(self.clientPath)
+
chdir(self.clientPath)
print "Synchronizing p4 checkout..."
p4_sync("...")
/*
* LF-to-CRLF filter
*/
+
+struct lf_to_crlf_filter {
+ struct stream_filter filter;
+ int want_lf;
+};
+
static int lf_to_crlf_filter_fn(struct stream_filter *filter,
const char *input, size_t *isize_p,
char *output, size_t *osize_p)
{
- size_t count;
+ size_t count, o = 0;
+ struct lf_to_crlf_filter *lf_to_crlf = (struct lf_to_crlf_filter *)filter;
+
+ /* Output a pending LF if we need to */
+ if (lf_to_crlf->want_lf) {
+ output[o++] = '\n';
+ lf_to_crlf->want_lf = 0;
+ }
if (!input)
- return 0; /* we do not keep any states */
+ return 0; /* We've already dealt with the state */
+
count = *isize_p;
if (count) {
- size_t i, o;
- for (i = o = 0; o < *osize_p && i < count; i++) {
+ size_t i;
+ for (i = 0; o < *osize_p && i < count; i++) {
char ch = input[i];
if (ch == '\n') {
- if (o + 1 < *osize_p)
- output[o++] = '\r';
- else
- break;
+ output[o++] = '\r';
+ if (o >= *osize_p) {
+ lf_to_crlf->want_lf = 1;
+ continue; /* We need to increase i */
+ }
}
output[o++] = ch;
}
return 0;
}
+static void lf_to_crlf_free_fn(struct stream_filter *filter)
+{
+ free(filter);
+}
+
static struct stream_filter_vtbl lf_to_crlf_vtbl = {
lf_to_crlf_filter_fn,
- null_free_fn,
+ lf_to_crlf_free_fn,
};
-static struct stream_filter lf_to_crlf_filter_singleton = {
- &lf_to_crlf_vtbl,
-};
+static struct stream_filter *lf_to_crlf_filter(void)
+{
+ struct lf_to_crlf_filter *lf_to_crlf = xmalloc(sizeof(*lf_to_crlf));
+ lf_to_crlf->filter.vtbl = &lf_to_crlf_vtbl;
+ lf_to_crlf->want_lf = 0;
+ return (struct stream_filter *)lf_to_crlf;
+}
/*
* Cascade filter
else if (output_eol(crlf_action) == EOL_CRLF &&
!(crlf_action == CRLF_AUTO || crlf_action == CRLF_GUESS))
- filter = cascade_filter(filter, &lf_to_crlf_filter_singleton);
+ filter = cascade_filter(filter, lf_to_crlf_filter());
return filter;
}
--- /dev/null
+#include "cache.h"
+#include "credential.h"
+#include "unix-socket.h"
+#include "sigchain.h"
+
+static const char *socket_path;
+
+static void cleanup_socket(void)
+{
+ if (socket_path)
+ unlink(socket_path);
+}
+
+static void cleanup_socket_on_signal(int sig)
+{
+ cleanup_socket();
+ sigchain_pop(sig);
+ raise(sig);
+}
+
+struct credential_cache_entry {
+ struct credential item;
+ unsigned long expiration;
+};
+static struct credential_cache_entry *entries;
+static int entries_nr;
+static int entries_alloc;
+
+static void cache_credential(struct credential *c, int timeout)
+{
+ struct credential_cache_entry *e;
+
+ ALLOC_GROW(entries, entries_nr + 1, entries_alloc);
+ e = &entries[entries_nr++];
+
+ /* take ownership of pointers */
+ memcpy(&e->item, c, sizeof(*c));
+ memset(c, 0, sizeof(*c));
+ e->expiration = time(NULL) + timeout;
+}
+
+static struct credential_cache_entry *lookup_credential(const struct credential *c)
+{
+ int i;
+ for (i = 0; i < entries_nr; i++) {
+ struct credential *e = &entries[i].item;
+ if (credential_match(c, e))
+ return &entries[i];
+ }
+ return NULL;
+}
+
+static void remove_credential(const struct credential *c)
+{
+ struct credential_cache_entry *e;
+
+ e = lookup_credential(c);
+ if (e)
+ e->expiration = 0;
+}
+
+static int check_expirations(void)
+{
+ static unsigned long wait_for_entry_until;
+ int i = 0;
+ unsigned long now = time(NULL);
+ unsigned long next = (unsigned long)-1;
+
+ /*
+ * Initially give the client 30 seconds to actually contact us
+ * and store a credential before we decide there's no point in
+ * keeping the daemon around.
+ */
+ if (!wait_for_entry_until)
+ wait_for_entry_until = now + 30;
+
+ while (i < entries_nr) {
+ if (entries[i].expiration <= now) {
+ entries_nr--;
+ credential_clear(&entries[i].item);
+ if (i != entries_nr)
+ memcpy(&entries[i], &entries[entries_nr], sizeof(*entries));
+ /*
+ * Stick around 30 seconds in case a new credential
+ * shows up (e.g., because we just removed a failed
+ * one, and we will soon get the correct one).
+ */
+ wait_for_entry_until = now + 30;
+ }
+ else {
+ if (entries[i].expiration < next)
+ next = entries[i].expiration;
+ i++;
+ }
+ }
+
+ if (!entries_nr) {
+ if (wait_for_entry_until <= now)
+ return 0;
+ next = wait_for_entry_until;
+ }
+
+ return next - now;
+}
+
+static int read_request(FILE *fh, struct credential *c,
+ struct strbuf *action, int *timeout) {
+ static struct strbuf item = STRBUF_INIT;
+ const char *p;
+
+ strbuf_getline(&item, fh, '\n');
+ p = skip_prefix(item.buf, "action=");
+ if (!p)
+ return error("client sent bogus action line: %s", item.buf);
+ strbuf_addstr(action, p);
+
+ strbuf_getline(&item, fh, '\n');
+ p = skip_prefix(item.buf, "timeout=");
+ if (!p)
+ return error("client sent bogus timeout line: %s", item.buf);
+ *timeout = atoi(p);
+
+ if (credential_read(c, fh) < 0)
+ return -1;
+ return 0;
+}
+
+static void serve_one_client(FILE *in, FILE *out)
+{
+ struct credential c = CREDENTIAL_INIT;
+ struct strbuf action = STRBUF_INIT;
+ int timeout = -1;
+
+ if (read_request(in, &c, &action, &timeout) < 0)
+ /* ignore error */ ;
+ else if (!strcmp(action.buf, "get")) {
+ struct credential_cache_entry *e = lookup_credential(&c);
+ if (e) {
+ fprintf(out, "username=%s\n", e->item.username);
+ fprintf(out, "password=%s\n", e->item.password);
+ }
+ }
+ else if (!strcmp(action.buf, "exit"))
+ exit(0);
+ else if (!strcmp(action.buf, "erase"))
+ remove_credential(&c);
+ else if (!strcmp(action.buf, "store")) {
+ if (timeout < 0)
+ warning("cache client didn't specify a timeout");
+ else if (!c.username || !c.password)
+ warning("cache client gave us a partial credential");
+ else {
+ remove_credential(&c);
+ cache_credential(&c, timeout);
+ }
+ }
+ else
+ warning("cache client sent unknown action: %s", action.buf);
+
+ credential_clear(&c);
+ strbuf_release(&action);
+}
+
+static int serve_cache_loop(int fd)
+{
+ struct pollfd pfd;
+ unsigned long wakeup;
+
+ wakeup = check_expirations();
+ if (!wakeup)
+ return 0;
+
+ pfd.fd = fd;
+ pfd.events = POLLIN;
+ if (poll(&pfd, 1, 1000 * wakeup) < 0) {
+ if (errno != EINTR)
+ die_errno("poll failed");
+ return 1;
+ }
+
+ if (pfd.revents & POLLIN) {
+ int client, client2;
+ FILE *in, *out;
+
+ client = accept(fd, NULL, NULL);
+ if (client < 0) {
+ warning("accept failed: %s", strerror(errno));
+ return 1;
+ }
+ client2 = dup(client);
+ if (client2 < 0) {
+ warning("dup failed: %s", strerror(errno));
+ close(client);
+ return 1;
+ }
+
+ in = xfdopen(client, "r");
+ out = xfdopen(client2, "w");
+ serve_one_client(in, out);
+ fclose(in);
+ fclose(out);
+ }
+ return 1;
+}
+
+static void serve_cache(const char *socket_path)
+{
+ int fd;
+
+ fd = unix_stream_listen(socket_path);
+ if (fd < 0)
+ die_errno("unable to bind to '%s'", socket_path);
+
+ printf("ok\n");
+ fclose(stdout);
+
+ while (serve_cache_loop(fd))
+ ; /* nothing */
+
+ close(fd);
+ unlink(socket_path);
+}
+
+static const char permissions_advice[] =
+"The permissions on your socket directory are too loose; other\n"
+"users may be able to read your cached credentials. Consider running:\n"
+"\n"
+" chmod 0700 %s";
+static void check_socket_directory(const char *path)
+{
+ struct stat st;
+ char *path_copy = xstrdup(path);
+ char *dir = dirname(path_copy);
+
+ if (!stat(dir, &st)) {
+ if (st.st_mode & 077)
+ die(permissions_advice, dir);
+ free(path_copy);
+ return;
+ }
+
+ /*
+ * We must be sure to create the directory with the correct mode,
+ * not just chmod it after the fact; otherwise, there is a race
+ * condition in which somebody can chdir to it, sleep, then try to open
+ * our protected socket.
+ */
+ if (safe_create_leading_directories_const(dir) < 0)
+ die_errno("unable to create directories for '%s'", dir);
+ if (mkdir(dir, 0700) < 0)
+ die_errno("unable to mkdir '%s'", dir);
+ free(path_copy);
+}
+
+int main(int argc, const char **argv)
+{
+ socket_path = argv[1];
+
+ if (!socket_path)
+ die("usage: git-credential-cache--daemon <socket_path>");
+ check_socket_directory(socket_path);
+
+ atexit(cleanup_socket);
+ sigchain_push_common(cleanup_socket_on_signal);
+
+ serve_cache(socket_path);
+
+ return 0;
+}
--- /dev/null
+#include "cache.h"
+#include "credential.h"
+#include "string-list.h"
+#include "parse-options.h"
+#include "unix-socket.h"
+#include "run-command.h"
+
+#define FLAG_SPAWN 0x1
+#define FLAG_RELAY 0x2
+
+static int send_request(const char *socket, const struct strbuf *out)
+{
+ int got_data = 0;
+ int fd = unix_stream_connect(socket);
+
+ if (fd < 0)
+ return -1;
+
+ if (write_in_full(fd, out->buf, out->len) < 0)
+ die_errno("unable to write to cache daemon");
+ shutdown(fd, SHUT_WR);
+
+ while (1) {
+ char in[1024];
+ int r;
+
+ r = read_in_full(fd, in, sizeof(in));
+ if (r == 0)
+ break;
+ if (r < 0)
+ die_errno("read error from cache daemon");
+ write_or_die(1, in, r);
+ got_data = 1;
+ }
+ return got_data;
+}
+
+static void spawn_daemon(const char *socket)
+{
+ struct child_process daemon;
+ const char *argv[] = { NULL, NULL, NULL };
+ char buf[128];
+ int r;
+
+ memset(&daemon, 0, sizeof(daemon));
+ argv[0] = "git-credential-cache--daemon";
+ argv[1] = socket;
+ daemon.argv = argv;
+ daemon.no_stdin = 1;
+ daemon.out = -1;
+
+ if (start_command(&daemon))
+ die_errno("unable to start cache daemon");
+ r = read_in_full(daemon.out, buf, sizeof(buf));
+ if (r < 0)
+ die_errno("unable to read result code from cache daemon");
+ if (r != 3 || memcmp(buf, "ok\n", 3))
+ die("cache daemon did not start: %.*s", r, buf);
+ close(daemon.out);
+}
+
+static void do_cache(const char *socket, const char *action, int timeout,
+ int flags)
+{
+ struct strbuf buf = STRBUF_INIT;
+
+ strbuf_addf(&buf, "action=%s\n", action);
+ strbuf_addf(&buf, "timeout=%d\n", timeout);
+ if (flags & FLAG_RELAY) {
+ if (strbuf_read(&buf, 0, 0) < 0)
+ die_errno("unable to relay credential");
+ }
+
+ if (!send_request(socket, &buf))
+ return;
+ if (flags & FLAG_SPAWN) {
+ spawn_daemon(socket);
+ send_request(socket, &buf);
+ }
+ strbuf_release(&buf);
+}
+
+int main(int argc, const char **argv)
+{
+ char *socket_path = NULL;
+ int timeout = 900;
+ const char *op;
+ const char * const usage[] = {
+ "git credential-cache [options] <action>",
+ NULL
+ };
+ struct option options[] = {
+ OPT_INTEGER(0, "timeout", &timeout,
+ "number of seconds to cache credentials"),
+ OPT_STRING(0, "socket", &socket_path, "path",
+ "path of cache-daemon socket"),
+ OPT_END()
+ };
+
+ argc = parse_options(argc, argv, NULL, options, usage, 0);
+ if (!argc)
+ usage_with_options(usage, options);
+ op = argv[0];
+
+ if (!socket_path)
+ socket_path = expand_user_path("~/.git-credential-cache/socket");
+ if (!socket_path)
+ die("unable to find a suitable socket path; use --socket");
+
+ if (!strcmp(op, "exit"))
+ do_cache(socket_path, op, timeout, 0);
+ else if (!strcmp(op, "get") || !strcmp(op, "erase"))
+ do_cache(socket_path, op, timeout, FLAG_RELAY);
+ else if (!strcmp(op, "store"))
+ do_cache(socket_path, op, timeout, FLAG_RELAY|FLAG_SPAWN);
+ else
+ ; /* ignore unknown operation */
+
+ return 0;
+}
--- /dev/null
+#include "cache.h"
+#include "credential.h"
+#include "string-list.h"
+#include "parse-options.h"
+
+static struct lock_file credential_lock;
+
+static void parse_credential_file(const char *fn,
+ struct credential *c,
+ void (*match_cb)(struct credential *),
+ void (*other_cb)(struct strbuf *))
+{
+ FILE *fh;
+ struct strbuf line = STRBUF_INIT;
+ struct credential entry = CREDENTIAL_INIT;
+
+ fh = fopen(fn, "r");
+ if (!fh) {
+ if (errno != ENOENT)
+ die_errno("unable to open %s", fn);
+ return;
+ }
+
+ while (strbuf_getline(&line, fh, '\n') != EOF) {
+ credential_from_url(&entry, line.buf);
+ if (entry.username && entry.password &&
+ credential_match(c, &entry)) {
+ if (match_cb) {
+ match_cb(&entry);
+ break;
+ }
+ }
+ else if (other_cb)
+ other_cb(&line);
+ }
+
+ credential_clear(&entry);
+ strbuf_release(&line);
+ fclose(fh);
+}
+
+static void print_entry(struct credential *c)
+{
+ printf("username=%s\n", c->username);
+ printf("password=%s\n", c->password);
+}
+
+static void print_line(struct strbuf *buf)
+{
+ strbuf_addch(buf, '\n');
+ write_or_die(credential_lock.fd, buf->buf, buf->len);
+}
+
+static void rewrite_credential_file(const char *fn, struct credential *c,
+ struct strbuf *extra)
+{
+ if (hold_lock_file_for_update(&credential_lock, fn, 0) < 0)
+ die_errno("unable to get credential storage lock");
+ if (extra)
+ print_line(extra);
+ parse_credential_file(fn, c, NULL, print_line);
+ if (commit_lock_file(&credential_lock) < 0)
+ die_errno("unable to commit credential store");
+}
+
+static void store_credential(const char *fn, struct credential *c)
+{
+ struct strbuf buf = STRBUF_INIT;
+
+ /*
+ * Sanity check that what we are storing is actually sensible.
+ * In particular, we can't make a URL without a protocol field.
+ * Without either a host or pathname (depending on the scheme),
+ * we have no primary key. And without a username and password,
+ * we are not actually storing a credential.
+ */
+ if (!c->protocol || !(c->host || c->path) ||
+ !c->username || !c->password)
+ return;
+
+ strbuf_addf(&buf, "%s://", c->protocol);
+ strbuf_addstr_urlencode(&buf, c->username, 1);
+ strbuf_addch(&buf, ':');
+ strbuf_addstr_urlencode(&buf, c->password, 1);
+ strbuf_addch(&buf, '@');
+ if (c->host)
+ strbuf_addstr_urlencode(&buf, c->host, 1);
+ if (c->path) {
+ strbuf_addch(&buf, '/');
+ strbuf_addstr_urlencode(&buf, c->path, 0);
+ }
+
+ rewrite_credential_file(fn, c, &buf);
+ strbuf_release(&buf);
+}
+
+static void remove_credential(const char *fn, struct credential *c)
+{
+ /*
+ * Sanity check that we actually have something to match
+ * against. The input we get is a restrictive pattern,
+ * so technically a blank credential means "erase everything".
+ * But it is too easy to accidentally send this, since it is equivalent
+ * to empty input. So explicitly disallow it, and require that the
+ * pattern have some actual content to match.
+ */
+ if (c->protocol || c->host || c->path || c->username)
+ rewrite_credential_file(fn, c, NULL);
+}
+
+static int lookup_credential(const char *fn, struct credential *c)
+{
+ parse_credential_file(fn, c, print_entry, NULL);
+ return c->username && c->password;
+}
+
+int main(int argc, const char **argv)
+{
+ const char * const usage[] = {
+ "git credential-store [options] <action>",
+ NULL
+ };
+ const char *op;
+ struct credential c = CREDENTIAL_INIT;
+ char *file = NULL;
+ struct option options[] = {
+ OPT_STRING(0, "file", &file, "path",
+ "fetch and store credentials in <path>"),
+ OPT_END()
+ };
+
+ umask(077);
+
+ argc = parse_options(argc, argv, NULL, options, usage, 0);
+ if (argc != 1)
+ usage_with_options(usage, options);
+ op = argv[0];
+
+ if (!file)
+ file = expand_user_path("~/.git-credentials");
+ if (!file)
+ die("unable to set up default path; use --file");
+
+ if (credential_read(&c, stdin) < 0)
+ die("unable to read credential");
+
+ if (!strcmp(op, "get"))
+ lookup_credential(file, &c);
+ else if (!strcmp(op, "erase"))
+ remove_credential(file, &c);
+ else if (!strcmp(op, "store"))
+ store_credential(file, &c);
+ else
+ ; /* Ignore unknown operation. */
+
+ return 0;
+}
--- /dev/null
+#include "cache.h"
+#include "credential.h"
+#include "string-list.h"
+#include "run-command.h"
+#include "url.h"
+
+void credential_init(struct credential *c)
+{
+ memset(c, 0, sizeof(*c));
+ c->helpers.strdup_strings = 1;
+}
+
+void credential_clear(struct credential *c)
+{
+ free(c->protocol);
+ free(c->host);
+ free(c->path);
+ free(c->username);
+ free(c->password);
+ string_list_clear(&c->helpers, 0);
+
+ credential_init(c);
+}
+
+int credential_match(const struct credential *want,
+ const struct credential *have)
+{
+#define CHECK(x) (!want->x || (have->x && !strcmp(want->x, have->x)))
+ return CHECK(protocol) &&
+ CHECK(host) &&
+ CHECK(path) &&
+ CHECK(username);
+#undef CHECK
+}
+
+static int credential_config_callback(const char *var, const char *value,
+ void *data)
+{
+ struct credential *c = data;
+ const char *key, *dot;
+
+ key = skip_prefix(var, "credential.");
+ if (!key)
+ return 0;
+
+ if (!value)
+ return config_error_nonbool(var);
+
+ dot = strrchr(key, '.');
+ if (dot) {
+ struct credential want = CREDENTIAL_INIT;
+ char *url = xmemdupz(key, dot - key);
+ int matched;
+
+ credential_from_url(&want, url);
+ matched = credential_match(&want, c);
+
+ credential_clear(&want);
+ free(url);
+
+ if (!matched)
+ return 0;
+ key = dot + 1;
+ }
+
+ if (!strcmp(key, "helper"))
+ string_list_append(&c->helpers, value);
+ else if (!strcmp(key, "username")) {
+ if (!c->username)
+ c->username = xstrdup(value);
+ }
+ else if (!strcmp(key, "usehttppath"))
+ c->use_http_path = git_config_bool(var, value);
+
+ return 0;
+}
+
+static int proto_is_http(const char *s)
+{
+ if (!s)
+ return 0;
+ return !strcmp(s, "https") || !strcmp(s, "http");
+}
+
+static void credential_apply_config(struct credential *c)
+{
+ if (c->configured)
+ return;
+ git_config(credential_config_callback, c);
+ c->configured = 1;
+
+ if (!c->use_http_path && proto_is_http(c->protocol)) {
+ free(c->path);
+ c->path = NULL;
+ }
+}
+
+static void credential_describe(struct credential *c, struct strbuf *out)
+{
+ if (!c->protocol)
+ return;
+ strbuf_addf(out, "%s://", c->protocol);
+ if (c->username && *c->username)
+ strbuf_addf(out, "%s@", c->username);
+ if (c->host)
+ strbuf_addstr(out, c->host);
+ if (c->path)
+ strbuf_addf(out, "/%s", c->path);
+}
+
+static char *credential_ask_one(const char *what, struct credential *c)
+{
+ struct strbuf desc = STRBUF_INIT;
+ struct strbuf prompt = STRBUF_INIT;
+ char *r;
+
+ credential_describe(c, &desc);
+ if (desc.len)
+ strbuf_addf(&prompt, "%s for '%s': ", what, desc.buf);
+ else
+ strbuf_addf(&prompt, "%s: ", what);
+
+ /* FIXME: for usernames, we should do something less magical that
+ * actually echoes the characters. However, we need to read from
+ * /dev/tty and not stdio, which is not portable (but getpass will do
+ * it for us). http.c uses the same workaround. */
+ r = git_getpass(prompt.buf);
+
+ strbuf_release(&desc);
+ strbuf_release(&prompt);
+ return xstrdup(r);
+}
+
+static void credential_getpass(struct credential *c)
+{
+ if (!c->username)
+ c->username = credential_ask_one("Username", c);
+ if (!c->password)
+ c->password = credential_ask_one("Password", c);
+}
+
+int credential_read(struct credential *c, FILE *fp)
+{
+ struct strbuf line = STRBUF_INIT;
+
+ while (strbuf_getline(&line, fp, '\n') != EOF) {
+ char *key = line.buf;
+ char *value = strchr(key, '=');
+
+ if (!line.len)
+ break;
+
+ if (!value) {
+ warning("invalid credential line: %s", key);
+ strbuf_release(&line);
+ return -1;
+ }
+ *value++ = '\0';
+
+ if (!strcmp(key, "username")) {
+ free(c->username);
+ c->username = xstrdup(value);
+ } else if (!strcmp(key, "password")) {
+ free(c->password);
+ c->password = xstrdup(value);
+ } else if (!strcmp(key, "protocol")) {
+ free(c->protocol);
+ c->protocol = xstrdup(value);
+ } else if (!strcmp(key, "host")) {
+ free(c->host);
+ c->host = xstrdup(value);
+ } else if (!strcmp(key, "path")) {
+ free(c->path);
+ c->path = xstrdup(value);
+ }
+ /*
+ * Ignore other lines; we don't know what they mean, but
+ * this future-proofs us when later versions of git do
+ * learn new lines, and the helpers are updated to match.
+ */
+ }
+
+ strbuf_release(&line);
+ return 0;
+}
+
+static void credential_write_item(FILE *fp, const char *key, const char *value)
+{
+ if (!value)
+ return;
+ fprintf(fp, "%s=%s\n", key, value);
+}
+
+static void credential_write(const struct credential *c, FILE *fp)
+{
+ credential_write_item(fp, "protocol", c->protocol);
+ credential_write_item(fp, "host", c->host);
+ credential_write_item(fp, "path", c->path);
+ credential_write_item(fp, "username", c->username);
+ credential_write_item(fp, "password", c->password);
+}
+
+static int run_credential_helper(struct credential *c,
+ const char *cmd,
+ int want_output)
+{
+ struct child_process helper;
+ const char *argv[] = { NULL, NULL };
+ FILE *fp;
+
+ memset(&helper, 0, sizeof(helper));
+ argv[0] = cmd;
+ helper.argv = argv;
+ helper.use_shell = 1;
+ helper.in = -1;
+ if (want_output)
+ helper.out = -1;
+ else
+ helper.no_stdout = 1;
+
+ if (start_command(&helper) < 0)
+ return -1;
+
+ fp = xfdopen(helper.in, "w");
+ credential_write(c, fp);
+ fclose(fp);
+
+ if (want_output) {
+ int r;
+ fp = xfdopen(helper.out, "r");
+ r = credential_read(c, fp);
+ fclose(fp);
+ if (r < 0) {
+ finish_command(&helper);
+ return -1;
+ }
+ }
+
+ if (finish_command(&helper))
+ return -1;
+ return 0;
+}
+
+static int credential_do(struct credential *c, const char *helper,
+ const char *operation)
+{
+ struct strbuf cmd = STRBUF_INIT;
+ int r;
+
+ if (helper[0] == '!')
+ strbuf_addstr(&cmd, helper + 1);
+ else if (is_absolute_path(helper))
+ strbuf_addstr(&cmd, helper);
+ else
+ strbuf_addf(&cmd, "git credential-%s", helper);
+
+ strbuf_addf(&cmd, " %s", operation);
+ r = run_credential_helper(c, cmd.buf, !strcmp(operation, "get"));
+
+ strbuf_release(&cmd);
+ return r;
+}
+
+void credential_fill(struct credential *c)
+{
+ int i;
+
+ if (c->username && c->password)
+ return;
+
+ credential_apply_config(c);
+
+ for (i = 0; i < c->helpers.nr; i++) {
+ credential_do(c, c->helpers.items[i].string, "get");
+ if (c->username && c->password)
+ return;
+ }
+
+ credential_getpass(c);
+ if (!c->username && !c->password)
+ die("unable to get password from user");
+}
+
+void credential_approve(struct credential *c)
+{
+ int i;
+
+ if (c->approved)
+ return;
+ if (!c->username || !c->password)
+ return;
+
+ credential_apply_config(c);
+
+ for (i = 0; i < c->helpers.nr; i++)
+ credential_do(c, c->helpers.items[i].string, "store");
+ c->approved = 1;
+}
+
+void credential_reject(struct credential *c)
+{
+ int i;
+
+ credential_apply_config(c);
+
+ for (i = 0; i < c->helpers.nr; i++)
+ credential_do(c, c->helpers.items[i].string, "erase");
+
+ free(c->username);
+ c->username = NULL;
+ free(c->password);
+ c->password = NULL;
+ c->approved = 0;
+}
+
+void credential_from_url(struct credential *c, const char *url)
+{
+ const char *at, *colon, *cp, *slash, *host, *proto_end;
+
+ credential_clear(c);
+
+ /*
+ * Match one of:
+ * (1) proto://<host>/...
+ * (2) proto://<user>@<host>/...
+ * (3) proto://<user>:<pass>@<host>/...
+ */
+ proto_end = strstr(url, "://");
+ if (!proto_end)
+ return;
+ cp = proto_end + 3;
+ at = strchr(cp, '@');
+ colon = strchr(cp, ':');
+ slash = strchrnul(cp, '/');
+
+ if (!at || slash <= at) {
+ /* Case (1) */
+ host = cp;
+ }
+ else if (!colon || at <= colon) {
+ /* Case (2) */
+ c->username = url_decode_mem(cp, at - cp);
+ host = at + 1;
+ } else {
+ /* Case (3) */
+ c->username = url_decode_mem(cp, colon - cp);
+ c->password = url_decode_mem(colon + 1, at - (colon + 1));
+ host = at + 1;
+ }
+
+ if (proto_end - url > 0)
+ c->protocol = xmemdupz(url, proto_end - url);
+ if (slash - host > 0)
+ c->host = url_decode_mem(host, slash - host);
+ /* Trim leading and trailing slashes from path */
+ while (*slash == '/')
+ slash++;
+ if (*slash) {
+ char *p;
+ c->path = url_decode(slash);
+ p = c->path + strlen(c->path) - 1;
+ while (p > c->path && *p == '/')
+ *p-- = '\0';
+ }
+}
--- /dev/null
+#ifndef CREDENTIAL_H
+#define CREDENTIAL_H
+
+#include "string-list.h"
+
+struct credential {
+ struct string_list helpers;
+ unsigned approved:1,
+ configured:1,
+ use_http_path:1;
+
+ char *username;
+ char *password;
+ char *protocol;
+ char *host;
+ char *path;
+};
+
+#define CREDENTIAL_INIT { STRING_LIST_INIT_DUP }
+
+void credential_init(struct credential *);
+void credential_clear(struct credential *);
+
+void credential_fill(struct credential *);
+void credential_approve(struct credential *);
+void credential_reject(struct credential *);
+
+int credential_read(struct credential *, FILE *);
+void credential_from_url(struct credential *, const char *url);
+int credential_match(const struct credential *have,
+ const struct credential *want);
+
+#endif /* CREDENTIAL_H */
return f;
}
+void sha1file_checkpoint(struct sha1file *f, struct sha1file_checkpoint *checkpoint)
+{
+ sha1flush(f);
+ checkpoint->offset = f->total;
+ checkpoint->ctx = f->ctx;
+}
+
+int sha1file_truncate(struct sha1file *f, struct sha1file_checkpoint *checkpoint)
+{
+ off_t offset = checkpoint->offset;
+
+ if (ftruncate(f->fd, offset) ||
+ lseek(f->fd, offset, SEEK_SET) != offset)
+ return -1;
+ f->total = offset;
+ f->ctx = checkpoint->ctx;
+ f->offset = 0; /* sha1flush() was called in checkpoint */
+ return 0;
+}
+
void crc32_begin(struct sha1file *f)
{
f->crc32 = crc32(0, NULL, 0);
unsigned char buffer[8192];
};
+/* Checkpoint */
+struct sha1file_checkpoint {
+ off_t offset;
+ git_SHA_CTX ctx;
+};
+
+extern void sha1file_checkpoint(struct sha1file *, struct sha1file_checkpoint *);
+extern int sha1file_truncate(struct sha1file *, struct sha1file_checkpoint *);
+
/* sha1close flags */
#define CSUM_CLOSE 1
#define CSUM_FSYNC 2
*/
#include "cache.h"
#include "refs.h"
+#include "fmt-merge-msg.h"
char git_default_email[MAX_GITNAME];
char git_default_name[MAX_GITNAME];
char *notes_ref_name;
int grafts_replace_parents = 1;
int core_apply_sparse_checkout;
+int merge_log_config = -1;
struct startup_info *startup_info;
+unsigned long pack_size_limit_cfg;
/* Parallel index stat data preload? */
int core_preload_index = 0;
return 0;
}
-static void truncate_pack(off_t to, git_SHA_CTX *ctx)
+static void truncate_pack(struct sha1file_checkpoint *checkpoint)
{
- if (ftruncate(pack_data->pack_fd, to)
- || lseek(pack_data->pack_fd, to, SEEK_SET) != to)
+ if (sha1file_truncate(pack_file, checkpoint))
die_errno("cannot truncate pack to skip duplicate");
- pack_size = to;
-
- /* yes this is a layering violation */
- pack_file->total = to;
- pack_file->offset = 0;
- pack_file->ctx = *ctx;
+ pack_size = checkpoint->offset;
}
static void stream_blob(uintmax_t len, unsigned char *sha1out, uintmax_t mark)
unsigned long hdrlen;
off_t offset;
git_SHA_CTX c;
- git_SHA_CTX pack_file_ctx;
git_zstream s;
+ struct sha1file_checkpoint checkpoint;
int status = Z_OK;
/* Determine if we should auto-checkpoint. */
|| (pack_size + 60 + len) < pack_size)
cycle_packfile();
- offset = pack_size;
-
- /* preserve the pack_file SHA1 ctx in case we have to truncate later */
- sha1flush(pack_file);
- pack_file_ctx = pack_file->ctx;
+ sha1file_checkpoint(pack_file, &checkpoint);
+ offset = checkpoint.offset;
hdrlen = snprintf((char *)out_buf, out_sz, "blob %" PRIuMAX, len) + 1;
if (out_sz <= hdrlen)
if (e->idx.offset) {
duplicate_count_by_type[OBJ_BLOB]++;
- truncate_pack(offset, &pack_file_ctx);
+ truncate_pack(&checkpoint);
} else if (find_sha1_pack(sha1, packed_git)) {
e->type = OBJ_BLOB;
e->pack_id = MAX_PACK_ID;
e->idx.offset = 1; /* just not zero! */
duplicate_count_by_type[OBJ_BLOB]++;
- truncate_pack(offset, &pack_file_ctx);
+ truncate_pack(&checkpoint);
} else {
e->depth = 0;
if (tmp_hex_sha1_len == 40 && !get_sha1_hex(hex_sha1, sha1)) {
/* This is a note entry */
+ if (fanout == 0xff) {
+ /* Counting mode, no rename */
+ num_notes++;
+ continue;
+ }
construct_path_with_fanout(hex_sha1, fanout, realpath);
if (!strcmp(fullpath, realpath)) {
/* Note entry is in correct location */
leaf.tree);
}
-static void note_change_n(struct branch *b, unsigned char old_fanout)
+static void note_change_n(struct branch *b, unsigned char *old_fanout)
{
const char *p = command_buf.buf + 2;
static struct strbuf uq = STRBUF_INIT;
uint16_t inline_data = 0;
unsigned char new_fanout;
+ /*
+ * When loading a branch, we don't traverse its tree to count the real
+ * number of notes (too expensive to do this for all non-note refs).
+ * This means that recently loaded notes refs might incorrectly have
+ * b->num_notes == 0, and consequently, old_fanout might be wrong.
+ *
+ * Fix this by traversing the tree and counting the number of notes
+ * when b->num_notes == 0. If the notes tree is truly empty, the
+ * calculation should not take long.
+ */
+ if (b->num_notes == 0 && *old_fanout == 0) {
+ /* Invoke change_note_fanout() in "counting mode". */
+ b->num_notes = change_note_fanout(&b->branch_tree, 0xff);
+ *old_fanout = convert_num_notes_to_fanout(b->num_notes);
+ }
+
+ /* Now parse the notemodify command. */
/* <dataref> or 'inline' */
if (*p == ':') {
char *x;
typename(type), command_buf.buf);
}
- construct_path_with_fanout(sha1_to_hex(commit_sha1), old_fanout, path);
+ construct_path_with_fanout(sha1_to_hex(commit_sha1), *old_fanout, path);
if (tree_content_remove(&b->branch_tree, path, NULL))
b->num_notes--;
else if (!prefixcmp(command_buf.buf, "C "))
file_change_cr(b, 0);
else if (!prefixcmp(command_buf.buf, "N "))
- note_change_n(b, prev_fanout);
+ note_change_n(b, &prev_fanout);
else if (!strcmp("deleteall", command_buf.buf))
file_change_deleteall(b);
else if (!prefixcmp(command_buf.buf, "ls "))
--- /dev/null
+#ifndef FMT_MERGE_MSG_H
+#define FMT_MERGE_MSG_H
+
+extern int merge_log_config;
+extern int fmt_merge_msg_config(const char *key, const char *value, void *cb);
+
+#endif /* FMT_MERGE_MSG_H */
echo "$sign" >"$dotest/sign"
echo "$utf8" >"$dotest/utf8"
echo "$keep" >"$dotest/keep"
- echo "$keepcr" >"$dotest/keepcr"
echo "$scissors" >"$dotest/scissors"
echo "$no_inbody_headers" >"$dotest/no_inbody_headers"
echo "$GIT_QUIET" >"$dotest/quiet"
then
keep=-k
fi
-case "$(cat "$dotest/keepcr")" in
-t)
- keepcr=--keep-cr ;;
-f)
- keepcr=--no-keep-cr ;;
-esac
case "$(cat "$dotest/scissors")" in
t)
scissors=--scissors ;;
#include <arpa/inet.h>
#include <netdb.h>
#include <pwd.h>
+#include <sys/un.h>
#ifndef NO_INTTYPES_H
#include <inttypes.h>
#else
+* whitespace=indent-with-non-tab,trailing-space,space-before-tab,tabwidth=4
* encoding=US-ASCII
git-gui.sh encoding=UTF-8
/po/*.po encoding=UTF-8
#!/bin/sh
GVF=GIT-VERSION-FILE
-DEF_VER=0.13.GITGUI
+DEF_VER=0.16.GITGUI
LF='
'
global repo_config
if {[catch {set v $repo_config($name)}]} {
return 0
- } elseif {$v eq {true} || $v eq {1} || $v eq {yes}} {
+ }
+ set v [string tolower $v]
+ if {$v eq {} || $v eq {true} || $v eq {1} || $v eq {yes} || $v eq {on}} {
return 1
} else {
return 0
global repo_config
if {[catch {set v $repo_config($name)}]} {
return 0
- } elseif {$v eq {false} || $v eq {0} || $v eq {no}} {
+ }
+ set v [string tolower $v]
+ if {$v eq {false} || $v eq {0} || $v eq {no} || $v eq {off}} {
return 1
} else {
return 0
return {}
}
+# Test a file for a hashbang to identify executable scripts on Windows.
+proc is_shellscript {filename} {
+ if {![file exists $filename]} {return 0}
+ set f [open $filename r]
+ fconfigure $f -encoding binary
+ set magic [read $f 2]
+ close $f
+ return [expr {$magic eq "#!"}]
+}
+
+# Run a command connected via pipes on stdout.
+# This is for use with textconv filters and uses sh -c "..." to allow it to
+# contain a command with arguments. On windows we must check for shell
+# scripts specifically otherwise just call the filter command.
+proc open_cmd_pipe {cmd path} {
+ global env
+ if {![file executable [shellpath]]} {
+ set exe [auto_execok [lindex $cmd 0]]
+ if {[is_shellscript [lindex $exe 0]]} {
+ set run [linsert [auto_execok sh] end -c "$cmd \"\$0\"" $path]
+ } else {
+ set run [concat $exe [lrange $cmd 1 end] $path]
+ }
+ } else {
+ set run [list [shellpath] -c "$cmd \"\$0\"" $path]
+ }
+ return [open |$run r]
+}
+
proc _lappend_nice {cmd_var} {
global _nice
upvar $cmd_var cmd
gitlogo put gray26 -to 5 15 11 16
gitlogo redither
- wm iconphoto . -default gitlogo
+ image create photo gitlogo32 -width 32 -height 32
+ gitlogo32 copy gitlogo -zoom 2 2
+
+ wm iconphoto . -default gitlogo gitlogo32
}
}
set default_config(gui.copyblamethreshold) 40
set default_config(gui.blamehistoryctx) 7
set default_config(gui.diffcontext) 5
+set default_config(gui.diffopts) {}
set default_config(gui.commitmsgwidth) 75
set default_config(gui.newbranchtemplate) {}
set default_config(gui.spellingdictionary) {}
{fontui font_ui {mc "Main Font"}}
{fontdiff font_diff {mc "Diff/Console Font"}}
}
+set default_config(gui.stageuntracked) ask
######################################################################
##
} else {
set arr($name) $value
}
+ } elseif {[regexp {^([^\n]+)$} $line line name]} {
+ # no value given, but interpreting them as
+ # boolean will be handled as true
+ set arr($name) {}
}
}
}
} else {
set arr($name) $value
}
+ } elseif {[regexp {^([^=]+)$} $line line name]} {
+ # no value given, but interpreting them as
+ # boolean will be handled as true
+ set arr($name) {}
}
}
close $fd_rc
[concat $after [list ui_ready]]
}
} else {
+ set selected_paths($path) 1
show_diff $path $w $lno
}
}
$ui_diff tag configure clri3$n -background $c
}
$ui_diff tag configure clr1 -font font_diffbold
+$ui_diff tag configure clr4 -underline 1
$ui_diff tag conf d_info -foreground blue -font font_diffbold
$ui_comm configure -state disabled -background gray
}
}
-if {[is_enabled multicommit]} {
+if {[is_enabled multicommit] && ![is_config_false gui.gcwarning]} {
after 1000 hint_gc
}
if {[is_enabled retcode]} {
eval grid $w_columns $w.file_pane.out.sby -sticky nsew
grid conf \
$w.file_pane.out.sbx \
- -column [expr {[llength $w_columns] - 1}] \
+ -column 0 \
+ -columnspan [expr {[llength $w_columns] + 1}] \
-sticky we
grid columnconfigure \
$w.file_pane.out \
set finder [::searchbar::new \
$w.file_pane.out.ff $w_file \
- -column [expr {[llength $w_columns] - 1}] \
+ -column 0 \
+ -columnspan [expr {[llength $w_columns] + 1}] \
]
set gotoline [::linebar::new \
$w.file_pane.out.lf $w_file \
- -column [expr {[llength $w_columns] - 1}] \
+ -column 0 \
+ -columnspan [expr {[llength $w_columns] + 1}] \
]
set w_cviewer $w.file_pane.cm.t
}
if {$commit eq {}} {
if {$do_textconv ne 0} {
- # Run textconv with sh -c "..." to allow it to
- # contain command + arguments. On windows, just
- # call the filter command.
- if {![file executable [shellpath]]} {
- set fd [open |[linsert $textconv end $path] r]
- } else {
- set fd [open |[list [shellpath] -c "$textconv \"\$0\"" $path] r]
- }
+ set fd [open_cmd_pipe $textconv $path]
} else {
set fd [open $path r]
}
foreach i $w_columns {$i conf -state disabled}
if {[eof $fd]} {
- close $fd
+ fconfigure $fd -blocking 1; # enable error reporting on close
+ if {[catch {close $fd} err]} {
+ tk_messageBox -icon error -title [mc Error] \
+ -message $err
+ }
# If we don't force Tk to update the widgets *right now*
# none of our jump commands will cause a change in the UI.
set radius [get_config gui.blamehistoryctx]
set cmdline [list --select-commit=$cmit]
- if {$radius > 0} {
+ if {$radius > 0} {
set author_time {}
set committer_time {}
}
if {[eof $fd]} {
- close $fd;
+ close $fd
set current_fd {}
_load_new_commit $this \
_hide_tooltip $this
set tooltip_wm [toplevel $cur_w.tooltip -borderwidth 1]
+ catch {wm attributes $tooltip_wm -type tooltip}
wm overrideredirect $tooltip_wm 1
wm transient $tooltip_wm [winfo toplevel $cur_w]
set tooltip_t $tooltip_wm.label
wm withdraw $top
wm title $top [append "[appname] ([reponame]): " [mc "File Browser"]]
+ if {$path ne {}} {
+ if {[string index $path end] ne {/}} {
+ append path /
+ }
+ }
+
set browser_commit $commit
- set browser_path $browser_commit:$path
+ set browser_path "$browser_commit:[escape_path $path]"
${NS}::label $w.path \
-textvariable @browser_path \
if {$tooltip_wm eq {}} {
set tooltip_wm [toplevel $w_list.tooltip -borderwidth 1]
+ catch {wm attributes $tooltip_wm -type tooltip}
wm overrideredirect $tooltip_wm 1
wm transient $tooltip_wm [winfo toplevel $w_list]
set tooltip_t $tooltip_wm.label
upvar $t top $w pfx this this
global use_ttk
uplevel [linsert $args 0 make_toplevel $t $w]
+ catch {wm attributes $top -type dialog}
pave_toplevel $pfx
}
global is_detached repo_config
global pch_error
- if {$is_detached && $repo_config(gui.warndetachedcommit)} {
+ if {$is_detached
+ && ![file exists [gitdir rebase-merge head-name]]
+ && [is_config_true gui.warndetachedcommit]} {
set msg [mc "You are about to commit on a detached head.\
This is a potentially dangerous thing to do because if you switch\
to another branch you will loose your changes and it can be difficult\
lappend cmd -p
lappend cmd --color
+ set cmd [concat $cmd $repo_config(gui.diffopts)]
if {$repo_config(gui.diffcontext) >= 1} {
lappend cmd "-U$repo_config(gui.diffcontext)"
}
foreach {posbegin colbegin posend colend} $markup {
set prefix clr
- foreach style [split $colbegin ";"] {
+ foreach style [lsort -integer [split $colbegin ";"]] {
if {$style eq "7"} {append prefix i; continue}
- if {$style < 30 || $style > 47} {continue}
+ if {$style != 4 && ($style < 30 || $style > 47)} {continue}
set a "$mark linestart + $posbegin chars"
set b "$mark linestart + $posend chars"
catch {$ui_diff tag add $prefix$style $a $b}
global file_states
set paths [list]
- set unknown_paths [list]
+ set untracked_paths [list]
foreach path [array names file_states] {
switch -glob -- [lindex $file_states($path) 0] {
U? {continue}
?M -
?T -
?D {lappend paths $path}
- ?O {lappend unknown_paths $path}
+ ?O {lappend untracked_paths $path}
}
}
- if {[llength $unknown_paths]} {
- set reply [ask_popup [mc "There are unknown files do you also want
-to stage those?"]]
+ if {[llength $untracked_paths]} {
+ set reply 0
+ switch -- [get_config gui.stageuntracked] {
+ no {
+ set reply 0
+ }
+ yes {
+ set reply 1
+ }
+ ask -
+ default {
+ set reply [ask_popup [mc "Stage %d untracked files?" \
+ [llength $untracked_paths]]]
+ }
+ }
if {$reply} {
- set paths [concat $paths $unknown_paths]
+ set paths [concat $paths $untracked_paths]
}
}
add_helper {Adding all changed files} $paths
${NS}::frame $w
${NS}::label $w.l -text [mc "Goto Line:"]
- entry $w.ent \
+ tentry $w.ent \
-textvariable ${__this}::linenum \
-background lightgreen \
-validate key \
{i-20..200 gui.copyblamethreshold {mc "Minimum Letters To Blame Copy On"}}
{i-0..300 gui.blamehistoryctx {mc "Blame History Context Radius (days)"}}
{i-1..99 gui.diffcontext {mc "Number of Diff Context Lines"}}
+ {t gui.diffopts {mc "Additional Diff Parameters"}}
{i-0..99 gui.commitmsgwidth {mc "Commit Message Text Width"}}
{t gui.newbranchtemplate {mc "New Branch Name Template"}}
{c gui.encoding {mc "Default File Contents Encoding"}}
+ {b gui.warndetachedcommit {mc "Warn before committing to a detached head"}}
+ {s gui.stageuntracked {mc "Staging of untracked files"} {list "yes" "no" "ask"}}
} {
set type [lindex $option 0]
set name [lindex $option 1]
}
pack $w.$f.$optid -side top -anchor w -fill x
}
+ s {
+ set opts [eval [lindex $option 3]]
+ ${NS}::frame $w.$f.$optid
+ ${NS}::label $w.$f.$optid.l -text "$text:"
+ if {$use_ttk} {
+ ttk::combobox $w.$f.$optid.v \
+ -textvariable ${f}_config_new($name) \
+ -values $opts -state readonly
+ } else {
+ eval tk_optionMenu $w.$f.$optid.v \
+ ${f}_config_new($name) \
+ $opts
+ }
+ pack $w.$f.$optid.l -side left -anchor w -fill x
+ pack $w.$f.$optid.v -side right -anchor e -padx 5
+ pack $w.$f.$optid -side top -anchor w -fill x
+ }
}
}
}
field ctext
field searchstring {}
-field casesensitive 1
+field regexpsearch
+field default_regexpsearch
+field casesensitive
+field default_casesensitive
+field smartcase
field searchdirn -forwards
+field history
+field history_index
+
field smarktop
field smarkbot
set w $i_w
set ctext $i_text
+ set default_regexpsearch [is_config_true gui.search.regexp]
+ switch -- [get_config gui.search.case] {
+ no {
+ set default_casesensitive 0
+ set smartcase 0
+ }
+ smart {
+ set default_casesensitive 0
+ set smartcase 1
+ }
+ yes -
+ default {
+ set default_casesensitive 1
+ set smartcase 0
+ }
+ }
+
+ set history [list]
+
${NS}::frame $w
${NS}::label $w.l -text [mc Find:]
- entry $w.ent -textvariable ${__this}::searchstring -background lightgreen
+ tentry $w.ent -textvariable ${__this}::searchstring -background lightgreen
${NS}::button $w.bn -text [mc Next] -command [cb find_next]
${NS}::button $w.bp -text [mc Prev] -command [cb find_prev]
- ${NS}::checkbutton $w.cs -text [mc Case-Sensitive] \
+ ${NS}::checkbutton $w.re -text [mc RegExp] \
+ -variable ${__this}::regexpsearch -command [cb _incrsearch]
+ ${NS}::checkbutton $w.cs -text [mc Case] \
-variable ${__this}::casesensitive -command [cb _incrsearch]
pack $w.l -side left
pack $w.cs -side right
+ pack $w.re -side right
pack $w.bp -side right
pack $w.bn -side right
pack $w.ent -side left -expand 1 -fill x
trace add variable searchstring write [cb _incrsearch_cb]
bind $w.ent <Return> [cb find_next]
bind $w.ent <Shift-Return> [cb find_prev]
+ bind $w.ent <Key-Up> [cb _prev_search]
+ bind $w.ent <Key-Down> [cb _next_search]
bind $w <Destroy> [list delete_this $this]
return $this
method show {} {
if {![visible $this]} {
grid $w
+ $w.ent delete 0 end
+ set regexpsearch $default_regexpsearch
+ set casesensitive $default_casesensitive
+ set history_index [llength $history]
}
focus -force $w.ent
}
if {[visible $this]} {
focus $ctext
grid remove $w
+ _save_search $this
}
}
upvar $mlenvar mlen
lappend cmd -count mlen
}
+ if {$regexpsearch} {
+ lappend cmd -regexp
+ }
if {!$casesensitive} {
lappend cmd -nocase
}
set dir $searchdirn
}
lappend cmd $dir -- $searchstring
- if {$endbound ne {}} {
- set here [eval $cmd [list $start] [list $endbound]]
- } else {
- set here [eval $cmd [list $start]]
- if {$here eq {}} {
- set here [eval $cmd [_get_wrap_anchor $this $dir]]
+ if {[catch {
+ if {$endbound ne {}} {
+ set here [eval $cmd [list $start] [list $endbound]]
+ } else {
+ set here [eval $cmd [list $start]]
+ if {$here eq {}} {
+ set here [eval $cmd [_get_wrap_anchor $this $dir]]
+ }
}
- }
+ } err]} { set here {} }
return $here
}
$ctext mark set anchor [_get_new_anchor $this]
}
if {$searchstring ne {}} {
+ if {$smartcase && [regexp {[[:upper:]]} $searchstring]} {
+ set casesensitive 1
+ }
set here [_do_search $this anchor mlen]
if {$here ne {}} {
$ctext see $here
$ctext tag remove sel 1.0 end
$ctext tag add sel $here "$here + $mlen c"
- $w.ent configure -background lightgreen
+ #$w.ent configure -background lightgreen
+ $w.ent state !pressed
_set_marks $this 1
} else {
- $w.ent configure -background lightpink
+ #$w.ent configure -background lightpink
+ $w.ent state pressed
}
+ } elseif {$smartcase} {
+ # clearing the field resets the smart case detection
+ set casesensitive 0
+ }
+}
+
+method _save_search {} {
+ if {$searchstring eq {}} {
+ return
+ }
+ if {[llength $history] > 0} {
+ foreach {s_regexp s_case s_expr} [lindex $history end] break
+ } else {
+ set s_regexp $regexpsearch
+ set s_case $casesensitive
+ set s_expr ""
+ }
+ if {$searchstring eq $s_expr} {
+ # update modes
+ set history [lreplace $history end end \
+ [list $regexpsearch $casesensitive $searchstring]]
+ } else {
+ lappend history [list $regexpsearch $casesensitive $searchstring]
+ }
+ set history_index [llength $history]
+}
+
+method _prev_search {} {
+ if {$history_index > 0} {
+ incr history_index -1
+ foreach {s_regexp s_case s_expr} [lindex $history $history_index] break
+ $w.ent delete 0 end
+ $w.ent insert 0 $s_expr
+ set regexpsearch $s_regexp
+ set casesensitive $s_case
+ }
+}
+
+method _next_search {} {
+ if {$history_index < [llength $history]} {
+ incr history_index
+ }
+ if {$history_index < [llength $history]} {
+ foreach {s_regexp s_case s_expr} [lindex $history $history_index] break
+ } else {
+ set s_regexp $default_regexpsearch
+ set s_case $default_casesensitive
+ set s_expr ""
}
+ $w.ent delete 0 end
+ $w.ent insert 0 $s_expr
+ set regexpsearch $s_regexp
+ set casesensitive $s_case
}
method find_prev {} {
set searchdirn $dir
$ctext mark unset anchor
if {$searchstring ne {}} {
+ _save_search $this
set start [_get_new_anchor $this]
if {$dir eq "-forwards"} {
set start "$start + 1c"
} else {
set finfo [find_ssh_key]
if {$finfo eq {}} {
- set sshkey_title [mc "Generation succeded, but no keys found."]
+ set sshkey_title [mc "Generation succeeded, but no keys found."]
$w.contents insert end $sshkey_output
} else {
set sshkey_title [mc "Your key is in: %s" [lindex $finfo 0]]
ttk::style configure Gold.TFrame -background gold -relief flat
# listboxes should have a theme border so embed in ttk::frame
ttk::style layout SListbox.TFrame {
- SListbox.Frame.Entry.field -sticky news -border true -children {
- SListbox.Frame.padding -sticky news
- }
- }
+ SListbox.Frame.Entry.field -sticky news -border true -children {
+ SListbox.Frame.padding -sticky news
+ }
+ }
+
+ # Handle either current Tk or older versions of 8.5
+ if {[catch {set theme [ttk::style theme use]}]} {
+ set theme $::ttk::currentTheme
+ }
+
+ if {[lsearch -exact {default alt classic clam} $theme] != -1} {
+ # Simple override of standard ttk::entry to change the field
+ # packground according to a state flag. We should use 'user1'
+ # but not all versions of 8.5 support that so make use of 'pressed'
+ # which is not normally in use for entry widgets.
+ ttk::style layout Edged.Entry [ttk::style layout TEntry]
+ ttk::style map Edged.Entry {*}[ttk::style map TEntry]
+ ttk::style configure Edged.Entry {*}[ttk::style configure TEntry] \
+ -fieldbackground lightgreen
+ ttk::style map Edged.Entry -fieldbackground {
+ {pressed !disabled} lightpink
+ }
+ } else {
+ # For fancier themes, in particular the Windows ones, the field
+ # element may not support changing the background color. So instead
+ # override the fill using the default fill element. If we overrode
+ # the vista theme field element we would loose the themed border
+ # of the widget.
+ catch {
+ ttk::style element create color.fill from default
+ }
+
+ ttk::style layout Edged.Entry {
+ Edged.Entry.field -sticky nswe -border 0 -children {
+ Edged.Entry.border -sticky nswe -border 1 -children {
+ Edged.Entry.padding -sticky nswe -children {
+ Edged.Entry.color.fill -sticky nswe -children {
+ Edged.Entry.textarea -sticky nswe
+ }
+ }
+ }
+ }
+ }
+
+ ttk::style configure Edged.Entry {*}[ttk::style configure TEntry] \
+ -background lightgreen -padding 0 -borderwidth 0
+ ttk::style map Edged.Entry {*}[ttk::style map TEntry] \
+ -background {{pressed !disabled} lightpink}
+ }
+
+ if {[lsearch [bind . <<ThemeChanged>>] InitTheme] == -1} {
+ bind . <<ThemeChanged>> +[namespace code [list InitTheme]]
+ }
}
proc gold_frame {w args} {
# place a themed frame over the surface.
proc Dialog {w args} {
eval [linsert $args 0 toplevel $w -class Dialog]
+ catch {wm attributes $w -type dialog}
pave_toplevel $w
return $w
}
}
}
+proc tentry {w args} {
+ global use_ttk
+ if {$use_ttk} {
+ InitTheme
+ ttk::entry $w -style Edged.Entry
+ } else {
+ entry $w
+ }
+
+ rename $w _$w
+ interp alias {} $w {} tentry_widgetproc $w
+ eval [linsert $args 0 tentry_widgetproc $w configure]
+ return $w
+}
+proc tentry_widgetproc {w cmd args} {
+ global use_ttk
+ switch -- $cmd {
+ state {
+ if {$use_ttk} {
+ return [uplevel 1 [list _$w $cmd] $args]
+ } else {
+ if {[lsearch -exact $args pressed] != -1} {
+ _$w configure -background lightpink
+ } else {
+ _$w configure -background lightgreen
+ }
+ }
+ }
+ configure {
+ if {$use_ttk} {
+ if {[set n [lsearch -exact $args -background]] != -1} {
+ set args [lreplace $args $n [incr n]]
+ if {[llength $args] == 0} {return}
+ }
+ }
+ return [uplevel 1 [list _$w $cmd] $args]
+ }
+ default { return [uplevel 1 [list _$w $cmd] $args] }
+ }
+}
+
# Tk 8.6 provides a standard font selection dialog. This uses the native
# dialogs on Windows and MacOSX or a standard Tk dialog on X11.
proc tchoosefont {w title familyvar sizevar} {
return
}
} elseif {[is_config_true "guitool.$fullname.confirm"]} {
- if {[ask_popup [mc "Are you sure you want to run %s?" $fullname]] ne {yes}} {
- return
+ if {[is_config_true "guitool.$fullname.needsfile"]} {
+ if {[ask_popup [mc "Are you sure you want to run %1\$s on file \"%2\$s\"?" $fullname $current_diff_path]] ne {yes}} {
+ return
+ }
+ } else {
+ if {[ask_popup [mc "Are you sure you want to run %s?" $fullname]] ne {yes}} {
+ return
+ }
}
}
set w .push_setup
toplevel $w
+ catch {wm attributes $w -type dialog}
wm withdraw $w
wm geometry $w "+[winfo rootx .]+[winfo rooty .]"
pave_toplevel $w
curr_branch=$(git symbolic-ref -q HEAD)
curr_branch_short="${curr_branch#refs/heads/}"
rebase=$(git config --bool branch.$curr_branch_short.rebase)
+if test -z "$rebase"
+then
+ rebase=$(git config --bool pull.rebase)
+fi
dry_run=
while :
do
die "$2"
}
+exit_with_patch () {
+ echo "$1" > "$state_dir"/stopped-sha
+ make_patch $1
+ git rev-parse --verify HEAD > "$amend"
+ warn "You can amend the commit now, with"
+ warn
+ warn " git commit --amend"
+ warn
+ warn "Once you are satisfied with your changes, run"
+ warn
+ warn " git rebase --continue"
+ warn
+ exit $2
+}
+
die_abort () {
rm -rf "$state_dir"
die "$1"
mark_action_done
pick_one $sha1 ||
die_with_patch $sha1 "Could not apply $sha1... $rest"
- git commit --amend --no-post-rewrite
+ git commit --amend --no-post-rewrite || {
+ warn "Could not amend commit after successfully picking $sha1... $rest"
+ warn "This is most likely due to an empty commit message, or the pre-commit hook"
+ warn "failed. If the pre-commit hook failed, you may need to resolve the issue before"
+ warn "you are able to reword the commit."
+ exit_with_patch $sha1 1
+ }
record_in_rewritten $sha1
;;
edit|e)
mark_action_done
pick_one $sha1 ||
die_with_patch $sha1 "Could not apply $sha1... $rest"
- echo "$sha1" > "$state_dir"/stopped-sha
- make_patch $sha1
- git rev-parse --verify HEAD > "$amend"
warn "Stopped at $sha1... $rest"
- warn "You can amend the commit now, with"
- warn
- warn " git commit --amend"
- warn
- warn "Once you are satisfied with your changes, run"
- warn
- warn " git rebase --continue"
- warn
- exit 0
+ exit_with_patch $sha1 0
;;
squash|s|fixup|f)
case "$command" in
shift
done
-base=$1
-url=$2
-head=${3-HEAD}
+base=$1 url=$2 head=${3-HEAD} status=0 branch_name=
-[ "$base" ] || usage
-[ "$url" ] || usage
+headref=$(git symbolic-ref -q "$head")
+if git show-ref -q --verify "$headref"
+then
+ branch_name=${headref#refs/heads/}
+ if test "z$branch_name" = "z$headref" ||
+ ! git config "branch.$branch_name.description" >/dev/null
+ then
+ branch_name=
+ fi
+fi
+
+tag_name=$(git describe --exact "$head^0" 2>/dev/null)
-baserev=`git rev-parse --verify "$base"^0` &&
-headrev=`git rev-parse --verify "$head"^0` || exit
+test -n "$base" && test -n "$url" || usage
+baserev=$(git rev-parse --verify "$base"^0) &&
+headrev=$(git rev-parse --verify "$head"^0) || exit
-merge_base=`git merge-base $baserev $headrev` ||
+merge_base=$(git merge-base $baserev $headrev) ||
die "fatal: No commits in common between $base and $head"
-branch=$(git ls-remote "$url" \
- | sed -n -e "/^$headrev refs.heads./{
- s/^.* refs.heads.//
- p
- q
- }")
+find_matching_branch="/^$headrev "'refs\/heads\//{
+ s/^.* refs\/heads\///
+ p
+ q
+}'
+branch=$(git ls-remote "$url" | sed -n -e "$find_matching_branch")
url=$(git ls-remote --get-url "$url")
-if [ -z "$branch" ]; then
- echo "warn: No branch of $url is at:" >&2
- git log --max-count=1 --pretty='tformat:warn: %h: %s' $headrev >&2
- echo "warn: Are you sure you pushed $head there?" >&2
- echo >&2
- echo >&2
- branch=..BRANCH.NOT.VERIFIED..
- status=1
-fi
git show -s --format='The following changes since commit %H:
%s (%ci)
-are available in the git repository at:' $baserev &&
-echo " $url $branch" &&
-echo &&
+are available in the git repository at:
+' $baserev &&
+echo " $url${branch+ $branch}" &&
+git show -s --format='
+for you to fetch changes up to %H:
+
+ %s (%ci)
+
+----------------------------------------------------------------' $headrev &&
+
+if test -n "$branch_name"
+then
+ echo "(from the branch description for $branch local branch)"
+ echo
+ git config "branch.$branch_name.description"
+fi &&
+
+if test -n "$tag_name"
+then
+ git cat-file tag "$tag_name" |
+ sed -n -e '1,/^$/d' -e '/^-----BEGIN PGP /q' -e p
+ echo
+fi &&
+
+if test -n "$branch_name" || test -n "$tag_name"
+then
+ echo "----------------------------------------------------------------"
+fi &&
git shortlog ^$baserev $headrev &&
-git diff -M --stat --summary $patch $merge_base..$headrev || exit
+git diff -M --stat --summary $patch $merge_base..$headrev || status=1
+
+if test -z "$branch"
+then
+ echo "warn: No branch of $url is at:" >&2
+ git show -s --format='warn: %h: %s' $headrev >&2
+ echo "warn: Are you sure you pushed '$head' there?" >&2
+ status=1
+fi
exit $status
{ "update-ref", cmd_update_ref, RUN_SETUP },
{ "update-server-info", cmd_update_server_info, RUN_SETUP },
{ "upload-archive", cmd_upload_archive },
+ { "upload-archive--writer", cmd_upload_archive_writer },
{ "var", cmd_var, RUN_SETUP_GENTLY },
{ "verify-pack", cmd_verify_pack },
{ "verify-tag", cmd_verify_tag, RUN_SETUP },
# Tcl ignores the next line -*- tcl -*- \
exec wish "$0" -- "$@"
-# Copyright © 2005-2009 Paul Mackerras. All rights reserved.
+# Copyright © 2005-2011 Paul Mackerras. All rights reserved.
# This program is free software; it may be used, copied, modified
# and distributed under the terms of the GNU General Public Licence,
# either version 2, or (at your option) any later version.
package require Tk
-proc gitdir {} {
- global env
- if {[info exists env(GIT_DIR)]} {
- return $env(GIT_DIR)
- } else {
- return [exec git rev-parse --git-dir]
- }
+proc hasworktree {} {
+ return [expr {[exec git rev-parse --is-bare-repository] == "false" &&
+ [exec git rev-parse --is-inside-git-dir] == "false"}]
}
# A simple scheduler for compute-intensive stuff.
global viewactive viewcomplete tclencoding
global startmsecs showneartags showlocalchanges
global mainheadid viewmainheadid viewmainheadid_orig pending_select
- global isworktree
+ global hasworktree
global varcid vposids vnegids vflags vrevs
global show_notes
- set isworktree [expr {[exec git rev-parse --is-inside-work-tree] == "true"}]
+ set hasworktree [hasworktree]
rereadrefs
set view $curview
if {$mainheadid ne $viewmainheadid_orig($view)} {
if {![info exists commitinfo($id)]} {
parsecommit $id $commitdata($id) 1
}
- set cdate [lindex $commitinfo($id) 4]
+ set cdate [lindex [lindex $commitinfo($id) 4] 0]
if {![string is integer -strict $cdate]} {
set cdate 0
}
}
proc parsecommit {id contents listed} {
- global commitinfo cdate
+ global commitinfo
set inhdr 1
set comment {}
set line [split $line " "]
set tag [lindex $line 0]
if {$tag == "author"} {
- set audate [lindex $line end-1]
+ set audate [lrange $line end-1 end]
set auname [join [lrange $line 1 end-2] " "]
} elseif {$tag == "committer"} {
- set comdate [lindex $line end-1]
+ set comdate [lrange $line end-1 end]
set comname [join [lrange $line 1 end-2] " "]
}
}
}
set comment $newcomment
}
- if {$comdate != {}} {
- set cdate($id) $comdate
- }
+ set hasnote [string first "\nNotes:\n" $contents]
set commitinfo($id) [list $headline $auname $audate \
- $comname $comdate $comment]
+ $comname $comdate $comment $hasnote]
}
proc getcommit {id} {
bindkey n "selnextline 1"
bindkey z "goback"
bindkey x "goforw"
- bindkey i "selnextline -1"
- bindkey k "selnextline 1"
- bindkey j "goback"
+ bindkey k "selnextline -1"
+ bindkey j "selnextline 1"
+ bindkey h "goback"
bindkey l "goforw"
bindkey b prevfile
bindkey d "$ctext yview scroll 18 units"
message $w.m -text [mc "
Gitk - a commit viewer for git
-Copyright \u00a9 2005-2010 Paul Mackerras
+Copyright \u00a9 2005-2011 Paul Mackerras
Use and redistribute under the terms of the GNU General Public License"] \
-justify center -aspect 400 -border 2 -bg white -relief groove
[mc "<%s-W> Close window" $M1T]
[mc "<Home> Move to first commit"]
[mc "<End> Move to last commit"]
-[mc "<Up>, p, i Move up one commit"]
-[mc "<Down>, n, k Move down one commit"]
-[mc "<Left>, z, j Go back in history list"]
+[mc "<Up>, p, k Move up one commit"]
+[mc "<Down>, n, j Move down one commit"]
+[mc "<Left>, z, h Go back in history list"]
[mc "<Right>, x, l Go forward in history list"]
[mc "<PageUp> Move up one page in commit list"]
[mc "<PageDown> Move down one page in commit list"]
global diffnum gitktmpdir gitdir
if {![info exists gitktmpdir]} {
- set gitktmpdir [file join [file dirname $gitdir] \
- [format ".gitk-tmp.%s" [pid]]]
+ set gitktmpdir [file join $gitdir [format ".gitk-tmp.%s" [pid]]]
if {[catch {file mkdir $gitktmpdir} err]} {
error_popup "[mc "Error creating temporary directory %s:" $gitktmpdir] $err"
unset gitktmpdir
proc external_diff_get_one_file {diffid filename diffdir} {
global nullid nullid2 nullfile
- global gitdir
+ global worktree
if {$diffid == $nullid} {
- set difffile [file join [file dirname $gitdir] $filename]
+ set difffile [file join $worktree $filename]
if {[file exists $difffile]} {
return $difffile
}
}
proc external_blame {parent_idx {line {}}} {
- global flist_menu_file gitdir
+ global flist_menu_file cdup
global nullid nullid2
global parentlist selectedline currentid
if {$line ne {} && $line > 1} {
lappend cmdline "--line=$line"
}
- set f [file join [file dirname $gitdir] $flist_menu_file]
+ set f [file join $cdup $flist_menu_file]
# Unfortunately it seems git gui blame doesn't like
# being given an absolute path...
set f [make_relative $f]
proc show_line_source {} {
global cmitmode currentid parents curview blamestuff blameinst
global diff_menu_line diff_menu_filebase flist_menu_file
- global nullid nullid2 gitdir
+ global nullid nullid2 gitdir cdup
set from_index {}
if {$cmitmode eq "tree"} {
} else {
lappend blameargs $id
}
- lappend blameargs -- [file join [file dirname $gitdir] $flist_menu_file]
+ lappend blameargs -- [file join $cdup $flist_menu_file]
if {[catch {
set f [open $blameargs r]
} err]} {
proc do_file_hl {serial} {
global highlight_files filehighlight highlight_paths gdttype fhl_list
+ global cdup findtype
if {$gdttype eq [mc "touching paths:"]} {
+ # If "exact" match then convert backslashes to forward slashes.
+ # Most useful to support Windows-flavoured file paths.
+ if {$findtype eq [mc "Exact"]} {
+ set highlight_files [string map {"\\" "/"} $highlight_files]
+ }
if {[catch {set paths [shellsplit $highlight_files]}]} return
set highlight_paths [makepatterns $paths]
highlight_filelist
- set gdtargs [concat -- $paths]
+ set relative_paths {}
+ foreach path $paths {
+ lappend relative_paths [file join $cdup $path]
+ }
+ set gdtargs [concat -- $relative_paths]
} elseif {$gdttype eq [mc "adding/removing string:"]} {
set gdtargs [list "-S$highlight_files"]
} else {
# spawn off a process to do git diff-index --cached HEAD
proc dodiffindex {} {
global lserial showlocalchanges vfilelimit curview
- global isworktree
+ global hasworktree
- if {!$showlocalchanges || !$isworktree} return
+ if {!$showlocalchanges || !$hasworktree} return
incr lserial
set cmd "|git diff-index --cached HEAD"
if {$vfilelimit($curview) ne {}} {
|| [info exists idotherrefs($id)]} {
set xt [drawtags $id $x $xt $y]
}
+ if {[lindex $commitinfo($id) 6] > 0} {
+ set xt [drawnotesign $xt $y]
+ }
set headline [lindex $commitinfo($id) 0]
set name [lindex $commitinfo($id) 1]
set date [lindex $commitinfo($id) 2]
return $xt
}
+proc drawnotesign {xt y} {
+ global linespc canv fgcolor
+
+ set orad [expr {$linespc / 3}]
+ set t [$canv create rectangle [expr {$xt - $orad}] [expr {$y - $orad}] \
+ [expr {$xt + $orad - 1}] [expr {$y + $orad - 1}] \
+ -fill yellow -outline $fgcolor -width 1 -tags circle]
+ set xt [expr {$xt + $orad * 3}]
+ return $xt
+}
+
proc xcoord {i level ln} {
global canvx0 xspc1 xspc2
proc cherrypick {} {
global rowmenuid curview
global mainhead mainheadid
+ global gitdir
set oldhead [exec git rev-parse HEAD]
set dheads [descheads $rowmenuid]
conflict.\nDo you wish to run git citool to\
resolve it?"]]} {
# Force citool to read MERGE_MSG
- file delete [file join [gitdir] "GITGUI_MSG"]
+ file delete [file join $gitdir "GITGUI_MSG"]
exec_citool {} $rowmenuid
}
} else {
proc getallcommits {} {
global allcommits nextarc seeds allccache allcwait cachedarcs allcupdate
global idheads idtags idotherrefs allparents tagobjid
+ global gitdir
if {![info exists allcommits]} {
set nextarc 0
set seeds {}
set allcwait 0
set cachedarcs 0
- set allccache [file join [gitdir] "gitk.cache"]
+ set allccache [file join $gitdir "gitk.cache"]
if {![catch {
set f [open $allccache r]
set allcwait 1
proc formatdate {d} {
global datetimeformat
if {$d ne {}} {
- set d [clock format $d -format $datetimeformat]
+ set d [clock format [lindex $d 0] -format $datetimeformat]
}
return $d
}
setoptions
# check that we can find a .git directory somewhere...
-if {[catch {set gitdir [gitdir]}]} {
+if {[catch {set gitdir [exec git rev-parse --git-dir]}]} {
show_error {} . [mc "Cannot find a git repository here."]
exit 1
}
-if {![file isdirectory $gitdir]} {
- show_error {} . [mc "Cannot find the git directory \"%s\"." $gitdir]
- exit 1
-}
set selecthead {}
set selectheadid {}
set stuffsaved 0
set patchnum 0
set lserial 0
-set isworktree [expr {[exec git rev-parse --is-inside-work-tree] == "true"}]
+set hasworktree [hasworktree]
+set cdup {}
+if {[expr {[exec git rev-parse --is-inside-work-tree] == "true"}]} {
+ set cdup [exec git rev-parse --show-cdup]
+}
+set worktree [exec git rev-parse --show-toplevel]
setcoords
makewindow
catch {
extra_options => "opt",
search_use_regexp => "sr",
ctag => "by_tag",
+ diff_style => "ds",
# this must be last entry (for manipulation from JavaScript)
javascript => "js"
);
return $result;
}
-# format patch (diff) line (not to be used for diff headers)
-sub format_diff_line {
- my $line = shift;
- my ($from, $to) = @_;
- my $diff_class = "";
-
- chomp $line;
+sub diff_line_class {
+ my ($line, $from, $to) = @_;
+ # ordinary diff
+ my $num_sign = 1;
+ # combined diff
if ($from && $to && ref($from->{'href'}) eq "ARRAY") {
- # combined diff
- my $prefix = substr($line, 0, scalar @{$from->{'href'}});
- if ($line =~ m/^\@{3}/) {
- $diff_class = " chunk_header";
- } elsif ($line =~ m/^\\/) {
- $diff_class = " incomplete";
- } elsif ($prefix =~ tr/+/+/) {
- $diff_class = " add";
- } elsif ($prefix =~ tr/-/-/) {
- $diff_class = " rem";
- }
- } else {
- # assume ordinary diff
- my $char = substr($line, 0, 1);
- if ($char eq '+') {
- $diff_class = " add";
- } elsif ($char eq '-') {
- $diff_class = " rem";
- } elsif ($char eq '@') {
- $diff_class = " chunk_header";
- } elsif ($char eq "\\") {
- $diff_class = " incomplete";
- }
+ $num_sign = scalar @{$from->{'href'}};
+ }
+
+ my @diff_line_classifier = (
+ { regexp => qr/^\@\@{$num_sign} /, class => "chunk_header"},
+ { regexp => qr/^\\/, class => "incomplete" },
+ { regexp => qr/^ {$num_sign}/, class => "ctx" },
+ # classifier for context must come before classifier add/rem,
+ # or we would have to use more complicated regexp, for example
+ # qr/(?= {0,$m}\+)[+ ]{$num_sign}/, where $m = $num_sign - 1;
+ { regexp => qr/^[+ ]{$num_sign}/, class => "add" },
+ { regexp => qr/^[- ]{$num_sign}/, class => "rem" },
+ );
+ for my $clsfy (@diff_line_classifier) {
+ return $clsfy->{'class'}
+ if ($line =~ $clsfy->{'regexp'});
}
- $line = untabify($line);
- if ($from && $to && $line =~ m/^\@{2} /) {
- my ($from_text, $from_start, $from_lines, $to_text, $to_start, $to_lines, $section) =
- $line =~ m/^\@{2} (-(\d+)(?:,(\d+))?) (\+(\d+)(?:,(\d+))?) \@{2}(.*)$/;
- $from_lines = 0 unless defined $from_lines;
- $to_lines = 0 unless defined $to_lines;
+ # fallback
+ return "";
+}
- if ($from->{'href'}) {
- $from_text = $cgi->a({-href=>"$from->{'href'}#l$from_start",
- -class=>"list"}, $from_text);
- }
- if ($to->{'href'}) {
- $to_text = $cgi->a({-href=>"$to->{'href'}#l$to_start",
- -class=>"list"}, $to_text);
- }
- $line = "<span class=\"chunk_info\">@@ $from_text $to_text @@</span>" .
- "<span class=\"section\">" . esc_html($section, -nbsp=>1) . "</span>";
- return "<div class=\"diff$diff_class\">$line</div>\n";
- } elsif ($from && $to && $line =~ m/^\@{3}/) {
- my ($prefix, $ranges, $section) = $line =~ m/^(\@+) (.*?) \@+(.*)$/;
- my (@from_text, @from_start, @from_nlines, $to_text, $to_start, $to_nlines);
+# assumes that $from and $to are defined and correctly filled,
+# and that $line holds a line of chunk header for unified diff
+sub format_unidiff_chunk_header {
+ my ($line, $from, $to) = @_;
- @from_text = split(' ', $ranges);
- for (my $i = 0; $i < @from_text; ++$i) {
- ($from_start[$i], $from_nlines[$i]) =
- (split(',', substr($from_text[$i], 1)), 0);
- }
+ my ($from_text, $from_start, $from_lines, $to_text, $to_start, $to_lines, $section) =
+ $line =~ m/^\@{2} (-(\d+)(?:,(\d+))?) (\+(\d+)(?:,(\d+))?) \@{2}(.*)$/;
- $to_text = pop @from_text;
- $to_start = pop @from_start;
- $to_nlines = pop @from_nlines;
+ $from_lines = 0 unless defined $from_lines;
+ $to_lines = 0 unless defined $to_lines;
- $line = "<span class=\"chunk_info\">$prefix ";
- for (my $i = 0; $i < @from_text; ++$i) {
- if ($from->{'href'}[$i]) {
- $line .= $cgi->a({-href=>"$from->{'href'}[$i]#l$from_start[$i]",
- -class=>"list"}, $from_text[$i]);
- } else {
- $line .= $from_text[$i];
- }
- $line .= " ";
- }
- if ($to->{'href'}) {
- $line .= $cgi->a({-href=>"$to->{'href'}#l$to_start",
- -class=>"list"}, $to_text);
+ if ($from->{'href'}) {
+ $from_text = $cgi->a({-href=>"$from->{'href'}#l$from_start",
+ -class=>"list"}, $from_text);
+ }
+ if ($to->{'href'}) {
+ $to_text = $cgi->a({-href=>"$to->{'href'}#l$to_start",
+ -class=>"list"}, $to_text);
+ }
+ $line = "<span class=\"chunk_info\">@@ $from_text $to_text @@</span>" .
+ "<span class=\"section\">" . esc_html($section, -nbsp=>1) . "</span>";
+ return $line;
+}
+
+# assumes that $from and $to are defined and correctly filled,
+# and that $line holds a line of chunk header for combined diff
+sub format_cc_diff_chunk_header {
+ my ($line, $from, $to) = @_;
+
+ my ($prefix, $ranges, $section) = $line =~ m/^(\@+) (.*?) \@+(.*)$/;
+ my (@from_text, @from_start, @from_nlines, $to_text, $to_start, $to_nlines);
+
+ @from_text = split(' ', $ranges);
+ for (my $i = 0; $i < @from_text; ++$i) {
+ ($from_start[$i], $from_nlines[$i]) =
+ (split(',', substr($from_text[$i], 1)), 0);
+ }
+
+ $to_text = pop @from_text;
+ $to_start = pop @from_start;
+ $to_nlines = pop @from_nlines;
+
+ $line = "<span class=\"chunk_info\">$prefix ";
+ for (my $i = 0; $i < @from_text; ++$i) {
+ if ($from->{'href'}[$i]) {
+ $line .= $cgi->a({-href=>"$from->{'href'}[$i]#l$from_start[$i]",
+ -class=>"list"}, $from_text[$i]);
} else {
- $line .= $to_text;
+ $line .= $from_text[$i];
}
- $line .= " $prefix</span>" .
- "<span class=\"section\">" . esc_html($section, -nbsp=>1) . "</span>";
- return "<div class=\"diff$diff_class\">$line</div>\n";
+ $line .= " ";
}
- return "<div class=\"diff$diff_class\">" . esc_html($line, -nbsp=>1) . "</div>\n";
+ if ($to->{'href'}) {
+ $line .= $cgi->a({-href=>"$to->{'href'}#l$to_start",
+ -class=>"list"}, $to_text);
+ } else {
+ $line .= $to_text;
+ }
+ $line .= " $prefix</span>" .
+ "<span class=\"section\">" . esc_html($section, -nbsp=>1) . "</span>";
+ return $line;
+}
+
+# process patch (diff) line (not to be used for diff headers),
+# returning class and HTML-formatted (but not wrapped) line
+sub process_diff_line {
+ my $line = shift;
+ my ($from, $to) = @_;
+
+ my $diff_class = diff_line_class($line, $from, $to);
+
+ chomp $line;
+ $line = untabify($line);
+
+ if ($from && $to && $line =~ m/^\@{2} /) {
+ $line = format_unidiff_chunk_header($line, $from, $to);
+ return $diff_class, $line;
+
+ } elsif ($from && $to && $line =~ m/^\@{3}/) {
+ $line = format_cc_diff_chunk_header($line, $from, $to);
+ return $diff_class, $line;
+
+ }
+ return $diff_class, esc_html($line, -nbsp=>1);
}
# Generates undef or something like "_snapshot_" or "snapshot (_tbz2_ _zip_)",
print "</table>\n";
}
+sub print_sidebyside_diff_chunk {
+ my @chunk = @_;
+ my (@ctx, @rem, @add);
+
+ return unless @chunk;
+
+ # incomplete last line might be among removed or added lines,
+ # or both, or among context lines: find which
+ for (my $i = 1; $i < @chunk; $i++) {
+ if ($chunk[$i][0] eq 'incomplete') {
+ $chunk[$i][0] = $chunk[$i-1][0];
+ }
+ }
+
+ # guardian
+ push @chunk, ["", ""];
+
+ foreach my $line_info (@chunk) {
+ my ($class, $line) = @$line_info;
+
+ # print chunk headers
+ if ($class && $class eq 'chunk_header') {
+ print $line;
+ next;
+ }
+
+ ## print from accumulator when type of class of lines change
+ # empty contents block on start rem/add block, or end of chunk
+ if (@ctx && (!$class || $class eq 'rem' || $class eq 'add')) {
+ print join '',
+ '<div class="chunk_block ctx">',
+ '<div class="old">',
+ @ctx,
+ '</div>',
+ '<div class="new">',
+ @ctx,
+ '</div>',
+ '</div>';
+ @ctx = ();
+ }
+ # empty add/rem block on start context block, or end of chunk
+ if ((@rem || @add) && (!$class || $class eq 'ctx')) {
+ if (!@add) {
+ # pure removal
+ print join '',
+ '<div class="chunk_block rem">',
+ '<div class="old">',
+ @rem,
+ '</div>',
+ '</div>';
+ } elsif (!@rem) {
+ # pure addition
+ print join '',
+ '<div class="chunk_block add">',
+ '<div class="new">',
+ @add,
+ '</div>',
+ '</div>';
+ } else {
+ # assume that it is change
+ print join '',
+ '<div class="chunk_block chg">',
+ '<div class="old">',
+ @rem,
+ '</div>',
+ '<div class="new">',
+ @add,
+ '</div>',
+ '</div>';
+ }
+ @rem = @add = ();
+ }
+
+ ## adding lines to accumulator
+ # guardian value
+ last unless $line;
+ # rem, add or change
+ if ($class eq 'rem') {
+ push @rem, $line;
+ } elsif ($class eq 'add') {
+ push @add, $line;
+ }
+ # context line
+ if ($class eq 'ctx') {
+ push @ctx, $line;
+ }
+ }
+}
+
sub git_patchset_body {
- my ($fd, $difftree, $hash, @hash_parents) = @_;
+ my ($fd, $diff_style, $difftree, $hash, @hash_parents) = @_;
my ($hash_parent) = $hash_parents[0];
my $is_combined = (@hash_parents > 1);
my $diffinfo;
my $to_name;
my (%from, %to);
+ my @chunk; # for side-by-side diff
print "<div class=\"patchset\">\n";
next PATCH if ($patch_line =~ m/^diff /);
- print format_diff_line($patch_line, \%from, \%to);
+ my ($class, $line) = process_diff_line($patch_line, \%from, \%to);
+ my $diff_classes = "diff";
+ $diff_classes .= " $class" if ($class);
+ $line = "<div class=\"$diff_classes\">$line</div>\n";
+
+ if ($diff_style eq 'sidebyside' && !$is_combined) {
+ if ($class eq 'chunk_header') {
+ print_sidebyside_diff_chunk(@chunk);
+ @chunk = ( [ $class, $line ] );
+ } else {
+ push @chunk, [ $class, $line ];
+ }
+ } else {
+ # default 'inline' style and unknown styles
+ print $line;
+ }
}
} continue {
+ if (@chunk) {
+ print_sidebyside_diff_chunk(@chunk);
+ @chunk = ();
+ }
print "</div>\n"; # class="patch"
}
sub git_blobdiff {
my $format = shift || 'html';
+ my $diff_style = $input_params{'diff_style'} || 'inline';
my $fd;
my @difftree;
my $formats_nav =
$cgi->a({-href => href(action=>"blobdiff_plain", -replay=>1)},
"raw");
+ $formats_nav .= diff_style_nav($diff_style);
git_header_html(undef, $expires);
if (defined $hash_base && (my %co = parse_commit($hash_base))) {
git_print_page_nav('','', $hash_base,$co{'tree'},$hash_base, $formats_nav);
if ($format eq 'html') {
print "<div class=\"page_body\">\n";
- git_patchset_body($fd, [ \%diffinfo ], $hash_base, $hash_parent_base);
+ git_patchset_body($fd, $diff_style,
+ [ \%diffinfo ], $hash_base, $hash_parent_base);
close $fd;
print "</div>\n"; # class="page_body"
git_blobdiff('plain');
}
+# assumes that it is added as later part of already existing navigation,
+# so it returns "| foo | bar" rather than just "foo | bar"
+sub diff_style_nav {
+ my ($diff_style, $is_combined) = @_;
+ $diff_style ||= 'inline';
+
+ return "" if ($is_combined);
+
+ my @styles = (inline => 'inline', 'sidebyside' => 'side by side');
+ my %styles = @styles;
+ @styles =
+ @styles[ map { $_ * 2 } 0..$#styles/2 ];
+
+ return join '',
+ map { " | ".$_ }
+ map {
+ $_ eq $diff_style ? $styles{$_} :
+ $cgi->a({-href => href(-replay=>1, diff_style => $_)}, $styles{$_})
+ } @styles;
+}
+
sub git_commitdiff {
my %params = @_;
my $format = $params{-format} || 'html';
+ my $diff_style = $input_params{'diff_style'} || 'inline';
my ($patch_max) = gitweb_get_feature('patches');
if ($format eq 'patch') {
$cgi->a({-href => href(action=>"patch", -replay=>1)},
"patch");
}
+ $formats_nav .= diff_style_nav($diff_style, @{$co{'parents'}} > 1);
if (defined $hash_parent &&
$hash_parent ne '-c' && $hash_parent ne '--cc') {
}
}
$formats_nav .= ': ' .
- $cgi->a({-href => href(action=>"commitdiff",
- hash=>$hash_parent)},
+ $cgi->a({-href => href(-replay=>1,
+ hash=>$hash_parent, hash_base=>undef)},
esc_html($hash_parent_short)) .
')';
} elsif (!$co{'parent'}) {
# single parent commit
$formats_nav .=
' (parent: ' .
- $cgi->a({-href => href(action=>"commitdiff",
- hash=>$co{'parent'})},
+ $cgi->a({-href => href(-replay=>1,
+ hash=>$co{'parent'}, hash_base=>undef)},
esc_html(substr($co{'parent'}, 0, 7))) .
')';
} else {
# merge commit
if ($hash_parent eq '--cc') {
$formats_nav .= ' | ' .
- $cgi->a({-href => href(action=>"commitdiff",
+ $cgi->a({-href => href(-replay=>1,
hash=>$hash, hash_parent=>'-c')},
'combined');
} else { # $hash_parent eq '-c'
$formats_nav .= ' | ' .
- $cgi->a({-href => href(action=>"commitdiff",
+ $cgi->a({-href => href(-replay=>1,
hash=>$hash, hash_parent=>'--cc')},
'compact');
}
$formats_nav .=
' (merge: ' .
join(' ', map {
- $cgi->a({-href => href(action=>"commitdiff",
- hash=>$_)},
+ $cgi->a({-href => href(-replay=>1,
+ hash=>$_, hash_base=>undef)},
esc_html(substr($_, 0, 7)));
} @{$co{'parents'}} ) .
')';
$use_parents ? @{$co{'parents'}} : $hash_parent);
print "<br/>\n";
- git_patchset_body($fd, \@difftree, $hash,
+ git_patchset_body($fd, $diff_style,
+ \@difftree, $hash,
$use_parents ? @{$co{'parents'}} : $hash_parent);
close $fd;
print "</div>\n"; # class="page_body"
color: #600000;
}
+/* side-by-side diff */
+div.chunk_block {
+ overflow: hidden;
+}
+
+div.chunk_block div.old {
+ float: left;
+ width: 50%;
+ overflow: hidden;
+}
+
+div.chunk_block div.new {
+ margin-left: 50%;
+ width: 50%;
+}
+
+div.chunk_block.rem div.old div.diff.rem {
+ background-color: #fff5f5;
+}
+div.chunk_block.add div.new div.diff.add {
+ background-color: #f8fff8;
+}
+div.chunk_block.chg div div.diff {
+ background-color: #fffff0;
+}
+div.chunk_block.ctx div div.diff.ctx {
+ color: #404040;
+}
+
+
div.index_include {
border: solid #d9d8d1;
border-width: 0px 0px 1px;
--- /dev/null
+#include "cache.h"
+#include "run-command.h"
+#include "strbuf.h"
+#include "gpg-interface.h"
+#include "sigchain.h"
+
+static char *configured_signing_key;
+
+void set_signing_key(const char *key)
+{
+ free(configured_signing_key);
+ configured_signing_key = xstrdup(key);
+}
+
+int git_gpg_config(const char *var, const char *value, void *cb)
+{
+ if (!strcmp(var, "user.signingkey")) {
+ if (!value)
+ return config_error_nonbool(var);
+ set_signing_key(value);
+ }
+ return 0;
+}
+
+const char *get_signing_key(void)
+{
+ if (configured_signing_key)
+ return configured_signing_key;
+ return git_committer_info(IDENT_ERROR_ON_NO_NAME|IDENT_NO_DATE);
+}
+
+/*
+ * Create a detached signature for the contents of "buffer" and append
+ * it after "signature"; "buffer" and "signature" can be the same
+ * strbuf instance, which would cause the detached signature appended
+ * at the end.
+ */
+int sign_buffer(struct strbuf *buffer, struct strbuf *signature, const char *signing_key)
+{
+ struct child_process gpg;
+ const char *args[4];
+ ssize_t len;
+ size_t i, j, bottom;
+
+ memset(&gpg, 0, sizeof(gpg));
+ gpg.argv = args;
+ gpg.in = -1;
+ gpg.out = -1;
+ args[0] = "gpg";
+ args[1] = "-bsau";
+ args[2] = signing_key;
+ args[3] = NULL;
+
+ if (start_command(&gpg))
+ return error(_("could not run gpg."));
+
+ /*
+ * When the username signingkey is bad, program could be terminated
+ * because gpg exits without reading and then write gets SIGPIPE.
+ */
+ sigchain_push(SIGPIPE, SIG_IGN);
+
+ if (write_in_full(gpg.in, buffer->buf, buffer->len) != buffer->len) {
+ close(gpg.in);
+ close(gpg.out);
+ finish_command(&gpg);
+ return error(_("gpg did not accept the data"));
+ }
+ close(gpg.in);
+
+ bottom = signature->len;
+ len = strbuf_read(signature, gpg.out, 1024);
+ close(gpg.out);
+
+ sigchain_pop(SIGPIPE);
+
+ if (finish_command(&gpg) || !len || len < 0)
+ return error(_("gpg failed to sign the data"));
+
+ /* Strip CR from the line endings, in case we are on Windows. */
+ for (i = j = bottom; i < signature->len; i++)
+ if (signature->buf[i] != '\r') {
+ if (i != j)
+ signature->buf[j] = signature->buf[i];
+ j++;
+ }
+ strbuf_setlen(signature, j);
+
+ return 0;
+}
+
+/*
+ * Run "gpg" to see if the payload matches the detached signature.
+ * gpg_output_to tells where the output from "gpg" should go:
+ * < 0: /dev/null
+ * = 0: standard error of the calling process
+ * > 0: the specified file descriptor
+ */
+int verify_signed_buffer(const char *payload, size_t payload_size,
+ const char *signature, size_t signature_size,
+ struct strbuf *gpg_output)
+{
+ struct child_process gpg;
+ const char *args_gpg[] = {"gpg", "--verify", "FILE", "-", NULL};
+ char path[PATH_MAX];
+ int fd, ret;
+
+ fd = git_mkstemp(path, PATH_MAX, ".git_vtag_tmpXXXXXX");
+ if (fd < 0)
+ return error("could not create temporary file '%s': %s",
+ path, strerror(errno));
+ if (write_in_full(fd, signature, signature_size) < 0)
+ return error("failed writing detached signature to '%s': %s",
+ path, strerror(errno));
+ close(fd);
+
+ memset(&gpg, 0, sizeof(gpg));
+ gpg.argv = args_gpg;
+ gpg.in = -1;
+ if (gpg_output)
+ gpg.err = -1;
+ args_gpg[2] = path;
+ if (start_command(&gpg)) {
+ unlink(path);
+ return error("could not run gpg.");
+ }
+
+ write_in_full(gpg.in, payload, payload_size);
+ close(gpg.in);
+
+ if (gpg_output)
+ strbuf_read(gpg_output, gpg.err, 0);
+ ret = finish_command(&gpg);
+
+ unlink_or_warn(path);
+
+ return ret;
+}
--- /dev/null
+#ifndef GPG_INTERFACE_H
+#define GPG_INTERFACE_H
+
+extern int sign_buffer(struct strbuf *buffer, struct strbuf *signature, const char *signing_key);
+extern int verify_signed_buffer(const char *payload, size_t payload_size, const char *signature, size_t signature_size, struct strbuf *gpg_output);
+extern int git_gpg_config(const char *, const char *, void *);
+extern void set_signing_key(const char *);
+extern const char *get_signing_key(void);
+
+#endif
#include "sideband.h"
#include "run-command.h"
#include "url.h"
+#include "credential.h"
int active_requests;
int http_is_verbose;
static int curl_ftp_no_epsv;
static const char *curl_http_proxy;
static const char *curl_cookie_file;
-static char *user_name, *user_pass, *description;
+static struct credential http_auth = CREDENTIAL_INIT;
static const char *user_agent;
#if LIBCURL_VERSION_NUM >= 0x071700
#define CURLOPT_KEYPASSWD CURLOPT_SSLCERTPASSWD
#endif
-static char *ssl_cert_password;
+static struct credential cert_auth = CREDENTIAL_INIT;
static int ssl_cert_password_required;
static struct curl_slist *pragma_header;
}
#endif
-static char *git_getpass_with_description(const char *what, const char *desc)
-{
- struct strbuf prompt = STRBUF_INIT;
- char *r;
-
- if (desc)
- strbuf_addf(&prompt, "%s for '%s': ", what, desc);
- else
- strbuf_addf(&prompt, "%s: ", what);
- /*
- * NEEDSWORK: for usernames, we should do something less magical that
- * actually echoes the characters. However, we need to read from
- * /dev/tty and not stdio, which is not portable (but getpass will do
- * it for us). http.c uses the same workaround.
- */
- r = git_getpass(prompt.buf);
-
- strbuf_release(&prompt);
- return xstrdup(r);
-}
-
static int http_options(const char *var, const char *value, void *cb)
{
if (!strcmp("http.sslverify", var)) {
static void init_curl_http_auth(CURL *result)
{
- if (user_name) {
+ if (http_auth.username) {
struct strbuf up = STRBUF_INIT;
- if (!user_pass)
- user_pass = xstrdup(git_getpass_with_description("Password", description));
- strbuf_addf(&up, "%s:%s", user_name, user_pass);
+ credential_fill(&http_auth);
+ strbuf_addf(&up, "%s:%s",
+ http_auth.username, http_auth.password);
curl_easy_setopt(result, CURLOPT_USERPWD,
strbuf_detach(&up, NULL));
}
static int has_cert_password(void)
{
- if (ssl_cert_password != NULL)
- return 1;
if (ssl_cert == NULL || ssl_cert_password_required != 1)
return 0;
- /* Only prompt the user once. */
- ssl_cert_password_required = -1;
- ssl_cert_password = git_getpass_with_description("Certificate Password", description);
- if (ssl_cert_password != NULL) {
- ssl_cert_password = xstrdup(ssl_cert_password);
- return 1;
- } else
- return 0;
+ if (!cert_auth.password) {
+ cert_auth.protocol = xstrdup("cert");
+ cert_auth.path = xstrdup(ssl_cert);
+ credential_fill(&cert_auth);
+ }
+ return 1;
}
static CURL *get_curl_handle(void)
if (ssl_cert != NULL)
curl_easy_setopt(result, CURLOPT_SSLCERT, ssl_cert);
if (has_cert_password())
- curl_easy_setopt(result, CURLOPT_KEYPASSWD, ssl_cert_password);
+ curl_easy_setopt(result, CURLOPT_KEYPASSWD, cert_auth.password);
#if LIBCURL_VERSION_NUM >= 0x070903
if (ssl_key != NULL)
curl_easy_setopt(result, CURLOPT_SSLKEY, ssl_key);
return result;
}
-static void http_auth_init(const char *url)
-{
- const char *at, *colon, *cp, *slash, *host;
-
- cp = strstr(url, "://");
- if (!cp)
- return;
-
- /*
- * Ok, the URL looks like "proto://something". Which one?
- * "proto://<user>:<pass>@<host>/...",
- * "proto://<user>@<host>/...", or just
- * "proto://<host>/..."?
- */
- cp += 3;
- at = strchr(cp, '@');
- colon = strchr(cp, ':');
- slash = strchrnul(cp, '/');
- if (!at || slash <= at) {
- /* No credentials, but we may have to ask for some later */
- host = cp;
- }
- else if (!colon || at <= colon) {
- /* Only username */
- user_name = url_decode_mem(cp, at - cp);
- user_pass = NULL;
- host = at + 1;
- } else {
- user_name = url_decode_mem(cp, colon - cp);
- user_pass = url_decode_mem(colon + 1, at - (colon + 1));
- host = at + 1;
- }
-
- description = url_decode_mem(host, slash - host);
-}
-
static void set_from_env(const char **var, const char *envname)
{
const char *val = getenv(envname);
curl_ftp_no_epsv = 1;
if (url) {
- http_auth_init(url);
+ credential_from_url(&http_auth, url);
if (!ssl_cert_password_required &&
getenv("GIT_SSL_CERT_PASSWORD_PROTECTED") &&
!prefixcmp(url, "https://"))
curl_http_proxy = NULL;
}
- if (ssl_cert_password != NULL) {
- memset(ssl_cert_password, 0, strlen(ssl_cert_password));
- free(ssl_cert_password);
- ssl_cert_password = NULL;
+ if (cert_auth.password != NULL) {
+ memset(cert_auth.password, 0, strlen(cert_auth.password));
+ free(cert_auth.password);
+ cert_auth.password = NULL;
}
ssl_cert_password_required = 0;
}
else if (missing_target(&results))
ret = HTTP_MISSING_TARGET;
else if (results.http_code == 401) {
- if (user_name && user_pass) {
+ if (http_auth.username && http_auth.password) {
+ credential_reject(&http_auth);
ret = HTTP_NOAUTH;
} else {
- /*
- * git_getpass is needed here because its very likely stdin/stdout are
- * pipes to our parent process. So we instead need to use /dev/tty,
- * but that is non-portable. Using git_getpass() can at least be stubbed
- * on other platforms with a different implementation if/when necessary.
- */
- if (!user_name)
- user_name = xstrdup(git_getpass_with_description("Username", description));
+ credential_fill(&http_auth);
init_curl_http_auth(slot->curl);
ret = HTTP_REAUTH;
}
curl_slist_free_all(headers);
strbuf_release(&buf);
+ if (ret == HTTP_OK)
+ credential_approve(&http_auth);
+
return ret;
}
struct imap_store_conf {
struct store_conf gen;
struct imap_server_conf *server;
- unsigned use_namespace:1;
};
#define NIL (void *)0x1
return lookup_tree(shifted);
}
-/*
- * A virtual commit has (const char *)commit->util set to the name.
- */
-
static struct commit *make_virtual_commit(struct tree *tree, const char *comment)
{
struct commit *commit = xcalloc(1, sizeof(struct commit));
+ struct merge_remote_desc *desc = xmalloc(sizeof(*desc));
+
+ desc->name = comment;
+ desc->obj = (struct object *)commit;
commit->tree = tree;
- commit->util = (void*)comment;
- /* avoid warnings */
+ commit->util = desc;
commit->object.parsed = 1;
return commit;
}
for (i = o->call_depth; i--;)
fputs(" ", stdout);
if (commit->util)
- printf("virtual %s\n", (char *)commit->util);
+ printf("virtual %s\n", merge_remote_util(commit)->name);
else {
printf("%s ", find_unique_abbrev(commit->object.sha1, DEFAULT_ABBREV));
if (parse_commit(commit) != 0)
if (!cache_tree_fully_valid(active_cache_tree) &&
cache_tree_update(active_cache_tree,
- active_cache, active_nr, 0, 0) < 0)
+ active_cache, active_nr, 0, 0, 0) < 0)
die("error building trees");
result = lookup_tree(active_cache_tree->sha1);
o->local_ref, o->remote_ref);
/* Dereference o->local_ref into local_sha1 */
- if (!resolve_ref(o->local_ref, local_sha1, 0, NULL))
+ if (read_ref_full(o->local_ref, local_sha1, 0, NULL))
die("Failed to resolve local notes ref '%s'", o->local_ref);
else if (!check_refname_format(o->local_ref, 0) &&
is_null_sha1(local_sha1))
return index_name;
}
+off_t write_pack_header(struct sha1file *f, uint32_t nr_entries)
+{
+ struct pack_header hdr;
+
+ hdr.hdr_signature = htonl(PACK_SIGNATURE);
+ hdr.hdr_version = htonl(PACK_VERSION);
+ hdr.hdr_entries = htonl(nr_entries);
+ if (sha1write(f, &hdr, sizeof(hdr)))
+ return 0;
+ return sizeof(hdr);
+}
+
/*
* Update pack header with object_count and compute new SHA1 for pack data
* associated to pack_fd, and write that SHA1 at the end. That new SHA1
*hdr = c;
return n;
}
+
+struct sha1file *create_tmp_packfile(char **pack_tmp_name)
+{
+ char tmpname[PATH_MAX];
+ int fd;
+
+ fd = odb_mkstemp(tmpname, sizeof(tmpname), "pack/tmp_pack_XXXXXX");
+ *pack_tmp_name = xstrdup(tmpname);
+ return sha1fd(fd, *pack_tmp_name);
+}
+
+void finish_tmp_packfile(char *name_buffer,
+ const char *pack_tmp_name,
+ struct pack_idx_entry **written_list,
+ uint32_t nr_written,
+ struct pack_idx_option *pack_idx_opts,
+ unsigned char sha1[])
+{
+ const char *idx_tmp_name;
+ char *end_of_name_prefix = strrchr(name_buffer, 0);
+
+ if (adjust_shared_perm(pack_tmp_name))
+ die_errno("unable to make temporary pack file readable");
+
+ idx_tmp_name = write_idx_file(NULL, written_list, nr_written,
+ pack_idx_opts, sha1);
+ if (adjust_shared_perm(idx_tmp_name))
+ die_errno("unable to make temporary index file readable");
+
+ sprintf(end_of_name_prefix, "%s.pack", sha1_to_hex(sha1));
+ free_pack_by_name(name_buffer);
+
+ if (rename(pack_tmp_name, name_buffer))
+ die_errno("unable to rename temporary pack file");
+
+ sprintf(end_of_name_prefix, "%s.idx", sha1_to_hex(sha1));
+ if (rename(idx_tmp_name, name_buffer))
+ die_errno("unable to rename temporary index file");
+
+ free((void *)idx_tmp_name);
+}
#define PACK_H
#include "object.h"
+#include "csum-file.h"
/*
* Packed object header
extern int check_pack_crc(struct packed_git *p, struct pack_window **w_curs, off_t offset, off_t len, unsigned int nr);
extern int verify_pack_index(struct packed_git *);
extern int verify_pack(struct packed_git *, verify_fn fn, struct progress *, uint32_t);
+extern off_t write_pack_header(struct sha1file *f, uint32_t);
extern void fixup_pack_header_footer(int, unsigned char *, const char *, uint32_t, unsigned char *, off_t);
extern char *index_pack_lockfile(int fd);
extern int encode_in_pack_object_header(enum object_type, uintmax_t, unsigned char *);
#define PH_ERROR_PACK_SIGNATURE (-2)
#define PH_ERROR_PROTOCOL (-3)
extern int read_pack_header(int fd, struct pack_header *);
+
+extern struct sha1file *create_tmp_packfile(char **pack_tmp_name);
+extern void finish_tmp_packfile(char *name_buffer, const char *pack_tmp_name, struct pack_idx_entry **written_list, uint32_t nr_written, struct pack_idx_option *pack_idx_opts, unsigned char sha1[]);
+
#endif
return read_index_from(istate, get_index_file());
}
-static void convert_from_disk(struct ondisk_cache_entry *ondisk, struct cache_entry *ce)
+static struct cache_entry *create_from_disk(struct ondisk_cache_entry *ondisk)
{
+ struct cache_entry *ce;
size_t len;
const char *name;
+ unsigned int flags;
- ce->ce_ctime.sec = ntohl(ondisk->ctime.sec);
- ce->ce_mtime.sec = ntohl(ondisk->mtime.sec);
- ce->ce_ctime.nsec = ntohl(ondisk->ctime.nsec);
- ce->ce_mtime.nsec = ntohl(ondisk->mtime.nsec);
- ce->ce_dev = ntohl(ondisk->dev);
- ce->ce_ino = ntohl(ondisk->ino);
- ce->ce_mode = ntohl(ondisk->mode);
- ce->ce_uid = ntohl(ondisk->uid);
- ce->ce_gid = ntohl(ondisk->gid);
- ce->ce_size = ntohl(ondisk->size);
/* On-disk flags are just 16 bits */
- ce->ce_flags = ntohs(ondisk->flags);
-
- hashcpy(ce->sha1, ondisk->sha1);
+ flags = ntohs(ondisk->flags);
+ len = flags & CE_NAMEMASK;
- len = ce->ce_flags & CE_NAMEMASK;
-
- if (ce->ce_flags & CE_EXTENDED) {
+ if (flags & CE_EXTENDED) {
struct ondisk_cache_entry_extended *ondisk2;
int extended_flags;
ondisk2 = (struct ondisk_cache_entry_extended *)ondisk;
/* We do not yet understand any bit out of CE_EXTENDED_FLAGS */
if (extended_flags & ~CE_EXTENDED_FLAGS)
die("Unknown index entry format %08x", extended_flags);
- ce->ce_flags |= extended_flags;
+ flags |= extended_flags;
name = ondisk2->name;
}
else
if (len == CE_NAMEMASK)
len = strlen(name);
- /*
- * NEEDSWORK: If the original index is crafted, this copy could
- * go unchecked.
- */
- memcpy(ce->name, name, len + 1);
-}
-static inline size_t estimate_cache_size(size_t ondisk_size, unsigned int entries)
-{
- size_t fix_size_mem = offsetof(struct cache_entry, name);
- size_t fix_size_dsk = offsetof(struct ondisk_cache_entry, name);
- long per_entry = (fix_size_mem - fix_size_dsk + 7) & ~7;
+ ce = xmalloc(cache_entry_size(len));
- /*
- * Alignment can cause differences. This should be "alignof", but
- * since that's a gcc'ism, just use the size of a pointer.
- */
- per_entry += sizeof(void *);
- return ondisk_size + entries*per_entry;
+ ce->ce_ctime.sec = ntohl(ondisk->ctime.sec);
+ ce->ce_mtime.sec = ntohl(ondisk->mtime.sec);
+ ce->ce_ctime.nsec = ntohl(ondisk->ctime.nsec);
+ ce->ce_mtime.nsec = ntohl(ondisk->mtime.nsec);
+ ce->ce_dev = ntohl(ondisk->dev);
+ ce->ce_ino = ntohl(ondisk->ino);
+ ce->ce_mode = ntohl(ondisk->mode);
+ ce->ce_uid = ntohl(ondisk->uid);
+ ce->ce_gid = ntohl(ondisk->gid);
+ ce->ce_size = ntohl(ondisk->size);
+ ce->ce_flags = flags;
+
+ hashcpy(ce->sha1, ondisk->sha1);
+
+ memcpy(ce->name, name, len);
+ ce->name[len] = '\0';
+ return ce;
}
/* remember to discard_cache() before reading a different cache! */
{
int fd, i;
struct stat st;
- unsigned long src_offset, dst_offset;
+ unsigned long src_offset;
struct cache_header *hdr;
void *mmap;
size_t mmap_size;
istate->cache_nr = ntohl(hdr->hdr_entries);
istate->cache_alloc = alloc_nr(istate->cache_nr);
istate->cache = xcalloc(istate->cache_alloc, sizeof(struct cache_entry *));
-
- /*
- * The disk format is actually larger than the in-memory format,
- * due to space for nsec etc, so even though the in-memory one
- * has room for a few more flags, we can allocate using the same
- * index size
- */
- istate->alloc = xmalloc(estimate_cache_size(mmap_size, istate->cache_nr));
istate->initialized = 1;
src_offset = sizeof(*hdr);
- dst_offset = 0;
for (i = 0; i < istate->cache_nr; i++) {
struct ondisk_cache_entry *disk_ce;
struct cache_entry *ce;
disk_ce = (struct ondisk_cache_entry *)((char *)mmap + src_offset);
- ce = (struct cache_entry *)((char *)istate->alloc + dst_offset);
- convert_from_disk(disk_ce, ce);
+ ce = create_from_disk(disk_ce);
set_index_entry(istate, i, ce);
src_offset += ondisk_ce_size(ce);
- dst_offset += ce_size(ce);
}
istate->timestamp.sec = st.st_mtime;
istate->timestamp.nsec = ST_MTIME_NSEC(st);
int is_index_unborn(struct index_state *istate)
{
- return (!istate->cache_nr && !istate->alloc && !istate->timestamp.sec);
+ return (!istate->cache_nr && !istate->timestamp.sec);
}
int discard_index(struct index_state *istate)
{
+ int i;
+
+ for (i = 0; i < istate->cache_nr; i++)
+ free(istate->cache[i]);
resolve_undo_clear_index(istate);
istate->cache_nr = 0;
istate->cache_changed = 0;
istate->name_hash_initialized = 0;
free_hash(&istate->name_hash);
cache_tree_free(&(istate->cache_tree));
- free(istate->alloc);
- istate->alloc = NULL;
istate->initialized = 0;
/* no need to throw away allocated active_cache */
if (reflogs->nr == 0) {
unsigned char sha1[20];
const char *name = resolve_ref(ref, sha1, 1, NULL);
- if (name)
+ if (name) {
+ name = xstrdup(name);
for_each_reflog_ent(name, read_one_reflog, reflogs);
+ free((char *)name);
+ }
}
if (reflogs->nr == 0) {
int len = strlen(ref);
hashclr(sha1);
flag |= REF_ISBROKEN;
}
- } else if (!resolve_ref(ref, sha1, 1, &flag)) {
+ } else if (read_ref_full(ref, sha1, 1, &flag)) {
hashclr(sha1);
flag |= REF_ISBROKEN;
}
void *cb_data;
};
-int read_ref(const char *ref, unsigned char *sha1)
+int read_ref_full(const char *ref, unsigned char *sha1, int reading, int *flags)
{
- if (resolve_ref(ref, sha1, 1, NULL))
+ if (resolve_ref(ref, sha1, reading, flags))
return 0;
return -1;
}
+int read_ref(const char *ref, unsigned char *sha1)
+{
+ return read_ref_full(ref, sha1, 1, NULL);
+}
+
#define DO_FOR_EACH_INCLUDE_BROKEN 01
static int do_one_ref(const char *base, each_ref_fn fn, int trim,
int flags, void *cb_data, struct ref_entry *entry)
goto fallback;
}
- if (!resolve_ref(ref, base, 1, &flag))
+ if (read_ref_full(ref, base, 1, &flag))
return -1;
if ((flag & REF_ISPACKED)) {
return 0;
}
- if (resolve_ref("HEAD", sha1, 1, &flag))
+ if (!read_ref_full("HEAD", sha1, 1, &flag))
return fn("HEAD", sha1, flag, cb_data);
return 0;
int flag;
strbuf_addf(&buf, "%sHEAD", get_git_namespace());
- if (resolve_ref(buf.buf, sha1, 1, &flag))
+ if (!read_ref_full(buf.buf, sha1, 1, &flag))
ret = fn(buf.buf, sha1, flag, cb_data);
strbuf_release(&buf);
NULL
};
-const char *ref_fetch_rules[] = {
- "%.*s",
- "refs/%.*s",
- "refs/heads/%.*s",
- NULL
-};
-
int refname_match(const char *abbrev_name, const char *full_name, const char **rules)
{
const char **p;
static struct ref_lock *verify_lock(struct ref_lock *lock,
const unsigned char *old_sha1, int mustexist)
{
- if (!resolve_ref(lock->ref_name, lock->old_sha1, mustexist, NULL)) {
+ if (read_ref_full(lock->ref_name, lock->old_sha1, mustexist, NULL)) {
error("Can't verify ref %s", lock->ref_name);
unlock_ref(lock);
return NULL;
goto rollback;
}
- if (resolve_ref(newref, sha1, 1, &flag) && delete_ref(newref, sha1, REF_NODEREF)) {
+ if (!read_ref_full(newref, sha1, 1, &flag) &&
+ delete_ref(newref, sha1, REF_NODEREF)) {
if (errno==EISDIR) {
if (remove_empty_directories(git_path("%s", newref))) {
error("Directory not empty: %s", newref);
retval = do_for_each_reflog(log, fn, cb_data);
} else {
unsigned char sha1[20];
- if (!resolve_ref(log, sha1, 0, NULL))
+ if (read_ref_full(log, sha1, 0, NULL))
retval = error("bad ref for %s", log);
else
retval = fn(log, sha1, 0, cb_data);
*/
for (j = 0; j < rules_to_fail; j++) {
const char *rule = ref_rev_parse_rules[j];
- unsigned char short_objectname[20];
char refname[PATH_MAX];
/* skip matched rule */
*/
mksnpath(refname, sizeof(refname),
rule, short_name_len, short_name);
- if (!read_ref(refname, short_objectname))
+ if (ref_exists(refname))
break;
}
* nothing to report.
*/
base = branch->merge[0]->dst;
- if (!resolve_ref(base, sha1, 1, NULL))
+ if (read_ref(base, sha1))
return 0;
theirs = lookup_commit_reference(sha1);
if (!theirs)
return 0;
- if (!resolve_ref(branch->refname, sha1, 1, NULL))
+ if (read_ref(branch->refname, sha1))
return 0;
ours = lookup_commit_reference(sha1);
if (!ours)
#include "refs.h"
#include "pack-revindex.h"
#include "sha1-lookup.h"
+#include "bulk-checkin.h"
#ifndef O_NOATIME
#if defined(__linux__) && (defined(__i386__) || defined(__PPC__))
}
/*
- * This creates one packfile per large blob, because the caller
- * immediately wants the result sha1, and fast-import can report the
- * object name via marks mechanism only by closing the created
- * packfile.
+ * This creates one packfile per large blob unless bulk-checkin
+ * machinery is "plugged".
*
* This also bypasses the usual "convert-to-git" dance, and that is on
* purpose. We could write a streaming version of the converting
enum object_type type, const char *path,
unsigned flags)
{
- struct child_process fast_import;
- char export_marks[512];
- const char *argv[] = { "fast-import", "--quiet", export_marks, NULL };
- char tmpfile[512];
- char fast_import_cmd[512];
- char buf[512];
- int len, tmpfd;
-
- strcpy(tmpfile, git_path("hashstream_XXXXXX"));
- tmpfd = git_mkstemp_mode(tmpfile, 0600);
- if (tmpfd < 0)
- die_errno("cannot create tempfile: %s", tmpfile);
- if (close(tmpfd))
- die_errno("cannot close tempfile: %s", tmpfile);
- sprintf(export_marks, "--export-marks=%s", tmpfile);
-
- memset(&fast_import, 0, sizeof(fast_import));
- fast_import.in = -1;
- fast_import.argv = argv;
- fast_import.git_cmd = 1;
- if (start_command(&fast_import))
- die_errno("index-stream: git fast-import failed");
-
- len = sprintf(fast_import_cmd, "blob\nmark :1\ndata %lu\n",
- (unsigned long) size);
- write_or_whine(fast_import.in, fast_import_cmd, len,
- "index-stream: feeding fast-import");
- while (size) {
- char buf[10240];
- size_t sz = size < sizeof(buf) ? size : sizeof(buf);
- ssize_t actual;
-
- actual = read_in_full(fd, buf, sz);
- if (actual < 0)
- die_errno("index-stream: reading input");
- if (write_in_full(fast_import.in, buf, actual) != actual)
- die_errno("index-stream: feeding fast-import");
- size -= actual;
- }
- if (close(fast_import.in))
- die_errno("index-stream: closing fast-import");
- if (finish_command(&fast_import))
- die_errno("index-stream: finishing fast-import");
-
- tmpfd = open(tmpfile, O_RDONLY);
- if (tmpfd < 0)
- die_errno("index-stream: cannot open fast-import mark");
- len = read(tmpfd, buf, sizeof(buf));
- if (len < 0)
- die_errno("index-stream: reading fast-import mark");
- if (close(tmpfd) < 0)
- die_errno("index-stream: closing fast-import mark");
- if (unlink(tmpfile))
- die_errno("index-stream: unlinking fast-import mark");
- if (len != 44 ||
- memcmp(":1 ", buf, 3) ||
- get_sha1_hex(buf + 3, sha1))
- die_errno("index-stream: unexpected fast-import mark: <%s>", buf);
- return 0;
+ return index_bulk_checkin(sha1, fd, size, type, path, flags);
}
int index_fd(unsigned char *sha1, int fd, struct stat *st,
return len;
}
+
+void strbuf_add_lines(struct strbuf *out, const char *prefix,
+ const char *buf, size_t size)
+{
+ while (size) {
+ const char *next = memchr(buf, '\n', size);
+ next = next ? (next + 1) : (buf + size);
+ strbuf_addstr(out, prefix);
+ strbuf_add(out, buf, next - buf);
+ size -= next - buf;
+ buf = next;
+ }
+ strbuf_complete_line(out);
+}
+
+static int is_rfc3986_reserved(char ch)
+{
+ switch (ch) {
+ case '!': case '*': case '\'': case '(': case ')': case ';':
+ case ':': case '@': case '&': case '=': case '+': case '$':
+ case ',': case '/': case '?': case '#': case '[': case ']':
+ return 1;
+ }
+ return 0;
+}
+
+static int is_rfc3986_unreserved(char ch)
+{
+ return isalnum(ch) ||
+ ch == '-' || ch == '_' || ch == '.' || ch == '~';
+}
+
+void strbuf_add_urlencode(struct strbuf *sb, const char *s, size_t len,
+ int reserved)
+{
+ strbuf_grow(sb, len);
+ while (len--) {
+ char ch = *s++;
+ if (is_rfc3986_unreserved(ch) ||
+ (!reserved && is_rfc3986_reserved(ch)))
+ strbuf_addch(sb, ch);
+ else
+ strbuf_addf(sb, "%%%02x", ch);
+ }
+}
+
+void strbuf_addstr_urlencode(struct strbuf *sb, const char *s,
+ int reserved)
+{
+ strbuf_add_urlencode(sb, s, strlen(s), reserved);
+}
__attribute__((format (printf,2,0)))
extern void strbuf_vaddf(struct strbuf *sb, const char *fmt, va_list ap);
+extern void strbuf_add_lines(struct strbuf *sb, const char *prefix, const char *buf, size_t size);
+
+static inline void strbuf_complete_line(struct strbuf *sb)
+{
+ if (sb->len && sb->buf[sb->len - 1] != '\n')
+ strbuf_addch(sb, '\n');
+}
+
extern size_t strbuf_fread(struct strbuf *, size_t, FILE *);
/* XXX: if read fails, any partial read is undone */
extern ssize_t strbuf_read(struct strbuf *, int fd, size_t hint);
extern int strbuf_branchname(struct strbuf *sb, const char *name);
extern int strbuf_check_branch_ref(struct strbuf *sb, const char *name);
+extern void strbuf_add_urlencode(struct strbuf *, const char *, size_t,
+ int reserved);
+extern void strbuf_addstr_urlencode(struct strbuf *, const char *,
+ int reserved);
+
#endif /* STRBUF_H */
cp.out = -1;
cp.dir = path;
if (start_command(&cp))
- die("Could not run git status --porcelain");
+ die("Could not run 'git status --porcelain' in submodule %s", path);
len = strbuf_read(&buf, cp.out, 1024);
line = buf.buf;
close(cp.out);
if (finish_command(&cp))
- die("git status --porcelain failed");
+ die("'git status --porcelain' failed in submodule %s", path);
strbuf_release(&buf);
return dirty_submodule;
--- /dev/null
+#!/bin/sh
+
+# Try a set of credential helpers; the expected stdin,
+# stdout and stderr should be provided on stdin,
+# separated by "--".
+check() {
+ read_chunk >stdin &&
+ read_chunk >expect-stdout &&
+ read_chunk >expect-stderr &&
+ test-credential "$@" <stdin >stdout 2>stderr &&
+ test_cmp expect-stdout stdout &&
+ test_cmp expect-stderr stderr
+}
+
+read_chunk() {
+ while read line; do
+ case "$line" in
+ --) break ;;
+ *) echo "$line" ;;
+ esac
+ done
+}
+
+# Clear any residual data from previous tests. We only
+# need this when testing third-party helpers which read and
+# write outside of our trash-directory sandbox.
+#
+# Don't bother checking for success here, as it is
+# outside the scope of tests and represents a best effort to
+# clean up after ourselves.
+helper_test_clean() {
+ reject $1 https example.com store-user
+ reject $1 https example.com user1
+ reject $1 https example.com user2
+ reject $1 http path.tld user
+ reject $1 https timeout.tld user
+}
+
+reject() {
+ (
+ echo protocol=$2
+ echo host=$3
+ echo username=$4
+ ) | test-credential reject $1
+}
+
+helper_test() {
+ HELPER=$1
+
+ test_expect_success "helper ($HELPER) has no existing data" '
+ check fill $HELPER <<-\EOF
+ protocol=https
+ host=example.com
+ --
+ username=askpass-username
+ password=askpass-password
+ --
+ askpass: Username for '\''https://example.com'\'':
+ askpass: Password for '\''https://askpass-username@example.com'\'':
+ EOF
+ '
+
+ test_expect_success "helper ($HELPER) stores password" '
+ check approve $HELPER <<-\EOF
+ protocol=https
+ host=example.com
+ username=store-user
+ password=store-pass
+ EOF
+ '
+
+ test_expect_success "helper ($HELPER) can retrieve password" '
+ check fill $HELPER <<-\EOF
+ protocol=https
+ host=example.com
+ --
+ username=store-user
+ password=store-pass
+ --
+ EOF
+ '
+
+ test_expect_success "helper ($HELPER) requires matching protocol" '
+ check fill $HELPER <<-\EOF
+ protocol=http
+ host=example.com
+ --
+ username=askpass-username
+ password=askpass-password
+ --
+ askpass: Username for '\''http://example.com'\'':
+ askpass: Password for '\''http://askpass-username@example.com'\'':
+ EOF
+ '
+
+ test_expect_success "helper ($HELPER) requires matching host" '
+ check fill $HELPER <<-\EOF
+ protocol=https
+ host=other.tld
+ --
+ username=askpass-username
+ password=askpass-password
+ --
+ askpass: Username for '\''https://other.tld'\'':
+ askpass: Password for '\''https://askpass-username@other.tld'\'':
+ EOF
+ '
+
+ test_expect_success "helper ($HELPER) requires matching username" '
+ check fill $HELPER <<-\EOF
+ protocol=https
+ host=example.com
+ username=other
+ --
+ username=other
+ password=askpass-password
+ --
+ askpass: Password for '\''https://other@example.com'\'':
+ EOF
+ '
+
+ test_expect_success "helper ($HELPER) requires matching path" '
+ test_config credential.usehttppath true &&
+ check approve $HELPER <<-\EOF &&
+ protocol=http
+ host=path.tld
+ path=foo.git
+ username=user
+ password=pass
+ EOF
+ check fill $HELPER <<-\EOF
+ protocol=http
+ host=path.tld
+ path=bar.git
+ --
+ username=askpass-username
+ password=askpass-password
+ --
+ askpass: Username for '\''http://path.tld/bar.git'\'':
+ askpass: Password for '\''http://askpass-username@path.tld/bar.git'\'':
+ EOF
+ '
+
+ test_expect_success "helper ($HELPER) can forget host" '
+ check reject $HELPER <<-\EOF &&
+ protocol=https
+ host=example.com
+ EOF
+ check fill $HELPER <<-\EOF
+ protocol=https
+ host=example.com
+ --
+ username=askpass-username
+ password=askpass-password
+ --
+ askpass: Username for '\''https://example.com'\'':
+ askpass: Password for '\''https://askpass-username@example.com'\'':
+ EOF
+ '
+
+ test_expect_success "helper ($HELPER) can store multiple users" '
+ check approve $HELPER <<-\EOF &&
+ protocol=https
+ host=example.com
+ username=user1
+ password=pass1
+ EOF
+ check approve $HELPER <<-\EOF &&
+ protocol=https
+ host=example.com
+ username=user2
+ password=pass2
+ EOF
+ check fill $HELPER <<-\EOF &&
+ protocol=https
+ host=example.com
+ username=user1
+ --
+ username=user1
+ password=pass1
+ EOF
+ check fill $HELPER <<-\EOF
+ protocol=https
+ host=example.com
+ username=user2
+ --
+ username=user2
+ password=pass2
+ EOF
+ '
+
+ test_expect_success "helper ($HELPER) can forget user" '
+ check reject $HELPER <<-\EOF &&
+ protocol=https
+ host=example.com
+ username=user1
+ EOF
+ check fill $HELPER <<-\EOF
+ protocol=https
+ host=example.com
+ username=user1
+ --
+ username=user1
+ password=askpass-password
+ --
+ askpass: Password for '\''https://user1@example.com'\'':
+ EOF
+ '
+
+ test_expect_success "helper ($HELPER) remembers other user" '
+ check fill $HELPER <<-\EOF
+ protocol=https
+ host=example.com
+ username=user2
+ --
+ username=user2
+ password=pass2
+ EOF
+ '
+}
+
+helper_test_timeout() {
+ HELPER="$*"
+
+ test_expect_success "helper ($HELPER) times out" '
+ check approve "$HELPER" <<-\EOF &&
+ protocol=https
+ host=timeout.tld
+ username=user
+ password=pass
+ EOF
+ sleep 2 &&
+ check fill "$HELPER" <<-\EOF
+ protocol=https
+ host=timeout.tld
+ --
+ username=askpass-username
+ password=askpass-password
+ --
+ askpass: Username for '\''https://timeout.tld'\'':
+ askpass: Password for '\''https://askpass-username@timeout.tld'\'':
+ EOF
+ '
+}
+
+cat >askpass <<\EOF
+#!/bin/sh
+echo >&2 askpass: $*
+what=`echo $1 | cut -d" " -f1 | tr A-Z a-z | tr -cd a-z`
+echo "askpass-$what"
+EOF
+chmod +x askpass
+GIT_ASKPASS="$PWD/askpass"
+export GIT_ASKPASS
--- /dev/null
+#!/bin/sh
+
+test_description="Test whether cache-tree is properly updated
+
+Tests whether various commands properly update and/or rewrite the
+cache-tree extension.
+"
+ . ./test-lib.sh
+
+cmp_cache_tree () {
+ test-dump-cache-tree >actual &&
+ sed "s/$_x40/SHA/" <actual >filtered &&
+ test_cmp "$1" filtered
+}
+
+# We don't bother with actually checking the SHA1:
+# test-dump-cache-tree already verifies that all existing data is
+# correct.
+test_shallow_cache_tree () {
+ echo "SHA " \
+ "($(git ls-files|wc -l) entries, 0 subtrees)" >expect &&
+ cmp_cache_tree expect
+}
+
+test_invalid_cache_tree () {
+ echo "invalid (0 subtrees)" >expect &&
+ echo "SHA #(ref) " \
+ "($(git ls-files|wc -l) entries, 0 subtrees)" >>expect &&
+ cmp_cache_tree expect
+}
+
+test_no_cache_tree () {
+ : >expect &&
+ cmp_cache_tree expect
+}
+
+test_expect_failure 'initial commit has cache-tree' '
+ test_commit foo &&
+ test_shallow_cache_tree
+'
+
+test_expect_success 'read-tree HEAD establishes cache-tree' '
+ git read-tree HEAD &&
+ test_shallow_cache_tree
+'
+
+test_expect_success 'git-add invalidates cache-tree' '
+ test_when_finished "git reset --hard; git read-tree HEAD" &&
+ echo "I changed this file" > foo &&
+ git add foo &&
+ test_invalid_cache_tree
+'
+
+test_expect_success 'update-index invalidates cache-tree' '
+ test_when_finished "git reset --hard; git read-tree HEAD" &&
+ echo "I changed this file" > foo &&
+ git update-index --add foo &&
+ test_invalid_cache_tree
+'
+
+test_expect_success 'write-tree establishes cache-tree' '
+ test-scrap-cache-tree &&
+ git write-tree &&
+ test_shallow_cache_tree
+'
+
+test_expect_success 'test-scrap-cache-tree works' '
+ git read-tree HEAD &&
+ test-scrap-cache-tree &&
+ test_no_cache_tree
+'
+
+test_expect_success 'second commit has cache-tree' '
+ test_commit bar &&
+ test_shallow_cache_tree
+'
+
+test_expect_success 'reset --hard gives cache-tree' '
+ test-scrap-cache-tree &&
+ git reset --hard &&
+ test_shallow_cache_tree
+'
+
+test_expect_success 'reset --hard without index gives cache-tree' '
+ rm -f .git/index &&
+ git reset --hard &&
+ test_shallow_cache_tree
+'
+
+test_expect_failure 'checkout gives cache-tree' '
+ git checkout HEAD^ &&
+ test_shallow_cache_tree
+'
+
+test_done
--- /dev/null
+#!/bin/sh
+
+test_description='basic credential helper tests'
+. ./test-lib.sh
+. "$TEST_DIRECTORY"/lib-credential.sh
+
+test_expect_success 'setup helper scripts' '
+ cat >dump <<-\EOF &&
+ whoami=`echo $0 | sed s/.*git-credential-//`
+ echo >&2 "$whoami: $*"
+ while IFS== read key value; do
+ echo >&2 "$whoami: $key=$value"
+ eval "$key=$value"
+ done
+ EOF
+
+ cat >git-credential-useless <<-\EOF &&
+ #!/bin/sh
+ . ./dump
+ exit 0
+ EOF
+ chmod +x git-credential-useless &&
+
+ cat >git-credential-verbatim <<-\EOF &&
+ #!/bin/sh
+ user=$1; shift
+ pass=$1; shift
+ . ./dump
+ test -z "$user" || echo username=$user
+ test -z "$pass" || echo password=$pass
+ EOF
+ chmod +x git-credential-verbatim &&
+
+ PATH="$PWD:$PATH"
+'
+
+test_expect_success 'credential_fill invokes helper' '
+ check fill "verbatim foo bar" <<-\EOF
+ --
+ username=foo
+ password=bar
+ --
+ verbatim: get
+ EOF
+'
+
+test_expect_success 'credential_fill invokes multiple helpers' '
+ check fill useless "verbatim foo bar" <<-\EOF
+ --
+ username=foo
+ password=bar
+ --
+ useless: get
+ verbatim: get
+ EOF
+'
+
+test_expect_success 'credential_fill stops when we get a full response' '
+ check fill "verbatim one two" "verbatim three four" <<-\EOF
+ --
+ username=one
+ password=two
+ --
+ verbatim: get
+ EOF
+'
+
+test_expect_success 'credential_fill continues through partial response' '
+ check fill "verbatim one \"\"" "verbatim two three" <<-\EOF
+ --
+ username=two
+ password=three
+ --
+ verbatim: get
+ verbatim: get
+ verbatim: username=one
+ EOF
+'
+
+test_expect_success 'credential_fill passes along metadata' '
+ check fill "verbatim one two" <<-\EOF
+ protocol=ftp
+ host=example.com
+ path=foo.git
+ --
+ username=one
+ password=two
+ --
+ verbatim: get
+ verbatim: protocol=ftp
+ verbatim: host=example.com
+ verbatim: path=foo.git
+ EOF
+'
+
+test_expect_success 'credential_approve calls all helpers' '
+ check approve useless "verbatim one two" <<-\EOF
+ username=foo
+ password=bar
+ --
+ --
+ useless: store
+ useless: username=foo
+ useless: password=bar
+ verbatim: store
+ verbatim: username=foo
+ verbatim: password=bar
+ EOF
+'
+
+test_expect_success 'do not bother storing password-less credential' '
+ check approve useless <<-\EOF
+ username=foo
+ --
+ --
+ EOF
+'
+
+
+test_expect_success 'credential_reject calls all helpers' '
+ check reject useless "verbatim one two" <<-\EOF
+ username=foo
+ password=bar
+ --
+ --
+ useless: erase
+ useless: username=foo
+ useless: password=bar
+ verbatim: erase
+ verbatim: username=foo
+ verbatim: password=bar
+ EOF
+'
+
+test_expect_success 'usernames can be preserved' '
+ check fill "verbatim \"\" three" <<-\EOF
+ username=one
+ --
+ username=one
+ password=three
+ --
+ verbatim: get
+ verbatim: username=one
+ EOF
+'
+
+test_expect_success 'usernames can be overridden' '
+ check fill "verbatim two three" <<-\EOF
+ username=one
+ --
+ username=two
+ password=three
+ --
+ verbatim: get
+ verbatim: username=one
+ EOF
+'
+
+test_expect_success 'do not bother completing already-full credential' '
+ check fill "verbatim three four" <<-\EOF
+ username=one
+ password=two
+ --
+ username=one
+ password=two
+ --
+ EOF
+'
+
+# We can't test the basic terminal password prompt here because
+# getpass() tries too hard to find the real terminal. But if our
+# askpass helper is run, we know the internal getpass is working.
+test_expect_success 'empty helper list falls back to internal getpass' '
+ check fill <<-\EOF
+ --
+ username=askpass-username
+ password=askpass-password
+ --
+ askpass: Username:
+ askpass: Password:
+ EOF
+'
+
+test_expect_success 'internal getpass does not ask for known username' '
+ check fill <<-\EOF
+ username=foo
+ --
+ username=foo
+ password=askpass-password
+ --
+ askpass: Password:
+ EOF
+'
+
+HELPER="!f() {
+ cat >/dev/null
+ echo username=foo
+ echo password=bar
+ }; f"
+test_expect_success 'respect configured credentials' '
+ test_config credential.helper "$HELPER" &&
+ check fill <<-\EOF
+ --
+ username=foo
+ password=bar
+ --
+ EOF
+'
+
+test_expect_success 'match configured credential' '
+ test_config credential.https://example.com.helper "$HELPER" &&
+ check fill <<-\EOF
+ protocol=https
+ host=example.com
+ path=repo.git
+ --
+ username=foo
+ password=bar
+ --
+ EOF
+'
+
+test_expect_success 'do not match configured credential' '
+ test_config credential.https://foo.helper "$HELPER" &&
+ check fill <<-\EOF
+ protocol=https
+ host=bar
+ --
+ username=askpass-username
+ password=askpass-password
+ --
+ askpass: Username for '\''https://bar'\'':
+ askpass: Password for '\''https://askpass-username@bar'\'':
+ EOF
+'
+
+test_expect_success 'pull username from config' '
+ test_config credential.https://example.com.username foo &&
+ check fill <<-\EOF
+ protocol=https
+ host=example.com
+ --
+ username=foo
+ password=askpass-password
+ --
+ askpass: Password for '\''https://foo@example.com'\'':
+ EOF
+'
+
+test_expect_success 'http paths can be part of context' '
+ check fill "verbatim foo bar" <<-\EOF &&
+ protocol=https
+ host=example.com
+ path=foo.git
+ --
+ username=foo
+ password=bar
+ --
+ verbatim: get
+ verbatim: protocol=https
+ verbatim: host=example.com
+ EOF
+ test_config credential.https://example.com.useHttpPath true &&
+ check fill "verbatim foo bar" <<-\EOF
+ protocol=https
+ host=example.com
+ path=foo.git
+ --
+ username=foo
+ password=bar
+ --
+ verbatim: get
+ verbatim: protocol=https
+ verbatim: host=example.com
+ verbatim: path=foo.git
+ EOF
+'
+
+test_done
--- /dev/null
+#!/bin/sh
+
+test_description='credential-cache tests'
+. ./test-lib.sh
+. "$TEST_DIRECTORY"/lib-credential.sh
+
+test -z "$NO_UNIX_SOCKETS" || {
+ skip_all='skipping credential-cache tests, unix sockets not available'
+ test_done
+}
+
+# don't leave a stale daemon running
+trap 'code=$?; git credential-cache exit; (exit $code); die' EXIT
+
+helper_test cache
+helper_test_timeout cache --timeout=1
+
+# we can't rely on our "trap" above working after test_done,
+# as test_done will delete the trash directory containing
+# our socket, leaving us with no way to access the daemon.
+git credential-cache exit
+
+test_done
--- /dev/null
+#!/bin/sh
+
+test_description='credential-store tests'
+. ./test-lib.sh
+. "$TEST_DIRECTORY"/lib-credential.sh
+
+helper_test store
+
+test_done
--- /dev/null
+#!/bin/sh
+
+test_description='external credential helper tests'
+. ./test-lib.sh
+. "$TEST_DIRECTORY"/lib-credential.sh
+
+pre_test() {
+ test -z "$GIT_TEST_CREDENTIAL_HELPER_SETUP" ||
+ eval "$GIT_TEST_CREDENTIAL_HELPER_SETUP"
+
+ # clean before the test in case there is cruft left
+ # over from a previous run that would impact results
+ helper_test_clean "$GIT_TEST_CREDENTIAL_HELPER"
+}
+
+post_test() {
+ # clean afterwards so that we are good citizens
+ # and don't leave cruft in the helper's storage, which
+ # might be long-term system storage
+ helper_test_clean "$GIT_TEST_CREDENTIAL_HELPER"
+}
+
+if test -z "$GIT_TEST_CREDENTIAL_HELPER"; then
+ say "# skipping external helper tests (set GIT_TEST_CREDENTIAL_HELPER)"
+else
+ pre_test
+ helper_test "$GIT_TEST_CREDENTIAL_HELPER"
+ post_test
+fi
+
+if test -z "$GIT_TEST_CREDENTIAL_HELPER_TIMEOUT"; then
+ say "# skipping external helper timeout tests"
+else
+ pre_test
+ helper_test_timeout "$GIT_TEST_CREDENTIAL_HELPER_TIMEOUT"
+ post_test
+fi
+
+test_done
test_expect_success setup '
git config core.bigfilethreshold 200k &&
- echo X | dd of=large bs=1k seek=2000
+ echo X | dd of=large1 bs=1k seek=2000 &&
+ echo X | dd of=large2 bs=1k seek=2000 &&
+ echo X | dd of=large3 bs=1k seek=2000 &&
+ echo Y | dd of=huge bs=1k seek=2500
'
-test_expect_success 'add a large file' '
- git add large &&
- # make sure we got a packfile and no loose objects
- test -f .git/objects/pack/pack-*.pack &&
- test ! -f .git/objects/??/??????????????????????????????????????
+test_expect_success 'add a large file or two' '
+ git add large1 huge large2 &&
+ # make sure we got a single packfile and no loose objects
+ bad= count=0 idx= &&
+ for p in .git/objects/pack/pack-*.pack
+ do
+ count=$(( $count + 1 ))
+ if test -f "$p" && idx=${p%.pack}.idx && test -f "$idx"
+ then
+ continue
+ fi
+ bad=t
+ done &&
+ test -z "$bad" &&
+ test $count = 1 &&
+ cnt=$(git show-index <"$idx" | wc -l) &&
+ test $cnt = 2 &&
+ for l in .git/objects/??/??????????????????????????????????????
+ do
+ test -f "$l" || continue
+ bad=t
+ done &&
+ test -z "$bad" &&
+
+ # attempt to add another copy of the same
+ git add large3 &&
+ bad= count=0 &&
+ for p in .git/objects/pack/pack-*.pack
+ do
+ count=$(( $count + 1 ))
+ if test -f "$p" && idx=${p%.pack}.idx && test -f "$idx"
+ then
+ continue
+ fi
+ bad=t
+ done &&
+ test -z "$bad" &&
+ test $count = 1
'
test_expect_success 'checkout a large file' '
- large=$(git rev-parse :large) &&
- git update-index --add --cacheinfo 100644 $large another &&
+ large1=$(git rev-parse :large1) &&
+ git update-index --add --cacheinfo 100644 $large1 another &&
git checkout another &&
- cmp large another ;# this must not be test_cmp
+ cmp large1 another ;# this must not be test_cmp
+'
+
+test_expect_success 'packsize limit' '
+ test_create_repo mid &&
+ (
+ cd mid &&
+ git config core.bigfilethreshold 64k &&
+ git config pack.packsizelimit 256k &&
+
+ # mid1 and mid2 will fit within 256k limit but
+ # appending mid3 will bust the limit and will
+ # result in a separate packfile.
+ test-genrandom "a" $(( 66 * 1024 )) >mid1 &&
+ test-genrandom "b" $(( 80 * 1024 )) >mid2 &&
+ test-genrandom "c" $(( 128 * 1024 )) >mid3 &&
+ git add mid1 mid2 mid3 &&
+
+ count=0
+ for pi in .git/objects/pack/pack-*.idx
+ do
+ test -f "$pi" && count=$(( $count + 1 ))
+ done &&
+ test $count = 2 &&
+
+ (
+ git hash-object --stdin <mid1
+ git hash-object --stdin <mid2
+ git hash-object --stdin <mid3
+ ) |
+ sort >expect &&
+
+ for pi in .git/objects/pack/pack-*.idx
+ do
+ git show-index <"$pi"
+ done |
+ sed -e "s/^[0-9]* \([0-9a-f]*\) .*/\1/" |
+ sort >actual &&
+
+ test_cmp expect actual
+ )
'
test_done
test_cmp expect actual
'
-test_expect_success 'checkout -B to the current branch fails before merging' '
+test_expect_success 'checkout -B to the current branch works' '
git checkout branch1 &&
+ git checkout -B branch1-scratch &&
+
setup_dirty_mergeable &&
- git commit -mfooble &&
- test_must_fail git checkout -B branch1 initial &&
- test_must_fail test_dirty_mergeable
+ git checkout -B branch1-scratch initial &&
+ test_dirty_mergeable
'
test_done
git branch -M baz bam
'
+test_expect_success 'git branch -M master should work when master is checked out' '
+ git checkout master &&
+ git branch -M master
+'
+
+test_expect_success 'git branch -M master master should work when master is checked out' '
+ git checkout master &&
+ git branch -M master master
+'
+
+test_expect_success 'git branch -M master2 master2 should work when master is checked out' '
+ git checkout master &&
+ git branch master2 &&
+ git branch -M master2 master2
+'
+
test_expect_success 'git branch -v -d t should work' '
git branch t &&
test_path_is_file .git/refs/heads/t &&
test_commit 1 &&
test_commit 2 &&
mkdir sub &&
- test_commit 3 sub/3 &&
+ test_commit 3 sub/3.t &&
test_commit 4
'
--- /dev/null
+#!/bin/sh
+
+test_description='git apply should exit non-zero with unrecognized input.'
+
+. ./test-lib.sh
+
+test_expect_success 'setup' '
+ test_commit 1
+'
+
+test_expect_success 'apply --check exits non-zero with unrecognized input' '
+ test_must_fail git apply --check - <<-\EOF
+ I am not a patch
+ I look nothing like a patch
+ git apply must fail
+ EOF
+'
+
+test_done
'
cat > expect <<\EOF
-* Merge commit 'reach'
+* Merge tag 'reach'
|\
| \
| \
-*-. \ Merge commit 'octopus-a'; commit 'octopus-b'
+*-. \ Merge tags 'octopus-a' and 'octopus-b'
|\ \ \
* | | | seventh
| | * | octopus-b
'git archive --output=b4.tar HEAD &&
test_cmp b.tar b4.tar'
-test_expect_success NOT_MINGW 'git archive --remote' \
+test_expect_success 'git archive --remote' \
'git archive --remote=. HEAD >b5.tar &&
test_cmp b.tar b5.tar'
'git archive --list outside of a git repo' \
'GIT_DIR=some/non-existing/directory git archive --list'
+test_expect_success 'clients cannot access unreachable commits' '
+ test_commit unreachable &&
+ sha1=`git rev-parse HEAD` &&
+ git reset --hard HEAD^ &&
+ git archive $sha1 >remote.tar &&
+ test_must_fail git archive --remote=. $sha1 >remote.tar
+'
+
test_expect_success 'git-archive --prefix=olde-' '
git archive --prefix=olde- >h.tar HEAD &&
(
grep "^bar\$" output
'
-test_expect_success NOT_MINGW 'archive --list shows only enabled remote filters' '
+test_expect_success 'archive --list shows only enabled remote filters' '
git archive --list --remote=. >output &&
! grep "^tar\.foo\$" output &&
grep "^bar\$" output
test_cmp b.tar config-implicittar.foo
'
-test_expect_success NOT_MINGW 'only enabled filters are available remotely' '
+test_expect_success 'only enabled filters are available remotely' '
test_must_fail git archive --remote=. --format=tar.foo HEAD \
>remote.tar.foo &&
git archive --remote=. --format=bar >remote.bar HEAD &&
test_cmp b.tar j.tar
'
-test_expect_success GZIP,NOT_MINGW 'remote tar.gz is allowed by default' '
+test_expect_success GZIP 'remote tar.gz is allowed by default' '
git archive --remote=. --format=tar.gz HEAD >remote.tar.gz &&
test_cmp j.tgz remote.tar.gz
'
-test_expect_success GZIP,NOT_MINGW 'remote tar.gz can be disabled' '
+test_expect_success GZIP 'remote tar.gz can be disabled' '
git config tar.tar.gz.remote false &&
test_must_fail git archive --remote=. --format=tar.gz HEAD \
>remote.tar.gz
s/$downstream_url_for_sed/URL/g
s/for-upstream/BRANCH/g
s/mnemonic.txt/FILENAME/g
+ s/^version [0-9]/VERSION/
/^ FILENAME | *[0-9]* [-+]*\$/ b diffstat
/^AUTHOR ([0-9]*):\$/ b shortlog
p
SUBJECT (DATE)
are available in the git repository at:
+
URL BRANCH
+ for you to fetch changes up to OBJECT_NAME:
+
+ SUBJECT (DATE)
+
+ ----------------------------------------------------------------
+ VERSION
+
+ ----------------------------------------------------------------
SHORTLOG
DIFFSTAT
'
-test_expect_success 'fetch must not resolve short remote name' '
+test_expect_success 'fetch can now resolve short remote name' '
cd "$D" &&
git update-ref refs/remotes/six/HEAD HEAD &&
cd six &&
git init &&
- test_must_fail git fetch .. six:six
-
+ git fetch .. six:six
'
test_expect_success 'create bundle 1' '
# br-branches-default
754b754407bf032e9a2f9d5a9ad05ca79a6b228f branch 'master' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
# br-branches-default-merge
754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge branch 'master' of ../
0567da4d5edd2ff4bb292a465ba9e64dcad9536b branch 'three' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
# br-branches-default-merge branches-default
754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge branch 'master' of ../
0567da4d5edd2ff4bb292a465ba9e64dcad9536b branch 'three' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge branch 'master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 branch 'one' of ../
6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 branch 'two' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge branch 'master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 branch 'one' of ../
6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 branch 'two' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
# br-branches-default branches-default
754b754407bf032e9a2f9d5a9ad05ca79a6b228f branch 'master' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
# br-branches-one
8e32a6d901327a23ef831511badce7bf3bf46689 branch 'one' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
# br-branches-one-merge
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge branch 'one' of ../
0567da4d5edd2ff4bb292a465ba9e64dcad9536b branch 'three' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
# br-branches-one-merge branches-one
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge branch 'one' of ../
0567da4d5edd2ff4bb292a465ba9e64dcad9536b branch 'three' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
# br-branches-one-octopus
8e32a6d901327a23ef831511badce7bf3bf46689 branch 'one' of ../
6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 branch 'two' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
# br-branches-one-octopus branches-one
8e32a6d901327a23ef831511badce7bf3bf46689 branch 'one' of ../
6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 branch 'two' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
# br-branches-one branches-one
8e32a6d901327a23ef831511badce7bf3bf46689 branch 'one' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge branch 'one' of ../
6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge branch 'two' of ../
0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge branch 'three' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge branch 'one' of ../
6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge branch 'two' of ../
0567da4d5edd2ff4bb292a465ba9e64dcad9536b branch 'three' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge branch 'one' of ../
6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge branch 'two' of ../
0567da4d5edd2ff4bb292a465ba9e64dcad9536b branch 'three' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 branch 'one' of ../
6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 branch 'two' of ../
0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge branch 'three' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 branch 'one' of ../
6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 branch 'two' of ../
0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge branch 'three' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge branch 'one' of ../
6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge branch 'two' of ../
0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge branch 'three' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge branch 'one' of ../
0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge branch 'three' of ../
6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge branch 'two' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge branch 'one' of ../
0567da4d5edd2ff4bb292a465ba9e64dcad9536b branch 'three' of ../
6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge branch 'two' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge branch 'one' of ../
0567da4d5edd2ff4bb292a465ba9e64dcad9536b branch 'three' of ../
6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge branch 'two' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 branch 'one' of ../
0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge branch 'three' of ../
6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 branch 'two' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 branch 'one' of ../
0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge branch 'three' of ../
6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 branch 'two' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge branch 'one' of ../
0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge branch 'three' of ../
6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge branch 'two' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge branch 'one' of ../
6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge branch 'two' of ../
0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge branch 'three' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge branch 'one' of ../
6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge branch 'two' of ../
0567da4d5edd2ff4bb292a465ba9e64dcad9536b branch 'three' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge branch 'one' of ../
6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge branch 'two' of ../
0567da4d5edd2ff4bb292a465ba9e64dcad9536b branch 'three' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 branch 'one' of ../
6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 branch 'two' of ../
0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge branch 'three' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 branch 'one' of ../
6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 branch 'two' of ../
0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge branch 'three' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge branch 'one' of ../
6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge branch 'two' of ../
0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge branch 'three' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge branch 'one' of ../
0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge branch 'three' of ../
6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge branch 'two' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge branch 'one' of ../
0567da4d5edd2ff4bb292a465ba9e64dcad9536b branch 'three' of ../
6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge branch 'two' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge branch 'one' of ../
0567da4d5edd2ff4bb292a465ba9e64dcad9536b branch 'three' of ../
6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge branch 'two' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 branch 'one' of ../
0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge branch 'three' of ../
6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 branch 'two' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 branch 'one' of ../
0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge branch 'three' of ../
6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 branch 'two' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge branch 'one' of ../
0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge branch 'three' of ../
6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge branch 'two' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge branch 'one' of ../
0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge branch 'three' of ../
6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge branch 'two' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
# br-unconfig --tags ../.git
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 branch 'one' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 tag 'tag-one' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
# br-unconfig ../.git tag tag-one-tree tag tag-three-file
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
# br-unconfig ../.git tag tag-one tag tag-three
8e32a6d901327a23ef831511badce7bf3bf46689 tag 'tag-one' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b tag 'tag-three' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 tag 'tag-three' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
# br-unconfig branches-default
754b754407bf032e9a2f9d5a9ad05ca79a6b228f branch 'master' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
# br-unconfig branches-one
8e32a6d901327a23ef831511badce7bf3bf46689 branch 'one' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge branch 'one' of ../
6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge branch 'two' of ../
0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge branch 'three' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge branch 'one' of ../
0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge branch 'three' of ../
6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge branch 'two' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge branch 'one' of ../
6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge branch 'two' of ../
0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge branch 'three' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge branch 'one' of ../
0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge branch 'three' of ../
6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge branch 'two' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge branch 'one' of ../
0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge branch 'three' of ../
6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge branch 'two' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
# master --tags ../.git
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 branch 'one' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 tag 'tag-one' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
# master ../.git tag tag-one-tree tag tag-three-file
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
# master ../.git tag tag-one tag tag-three
8e32a6d901327a23ef831511badce7bf3bf46689 tag 'tag-one' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b tag 'tag-three' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 tag 'tag-three' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
# master branches-default
754b754407bf032e9a2f9d5a9ad05ca79a6b228f branch 'master' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
# master branches-one
8e32a6d901327a23ef831511badce7bf3bf46689 branch 'one' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge branch 'one' of ../
6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge branch 'two' of ../
0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge branch 'three' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge branch 'one' of ../
0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge branch 'three' of ../
6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge branch 'two' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge branch 'one' of ../
6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge branch 'two' of ../
0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge branch 'three' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge branch 'one' of ../
0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge branch 'three' of ../
6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge branch 'two' of ../
-754b754407bf032e9a2f9d5a9ad05ca79a6b228f not-for-merge tag 'tag-master' of ../
+6c9dec2b923228c9ff994c6cfe4ae16c12408dc5 not-for-merge tag 'tag-master' of ../
8e32a6d901327a23ef831511badce7bf3bf46689 not-for-merge tag 'tag-one' of ../
22feea448b023a2d864ef94b013735af34d238ba not-for-merge tag 'tag-one-tree' of ../
-0567da4d5edd2ff4bb292a465ba9e64dcad9536b not-for-merge tag 'tag-three' of ../
+c61a82b60967180544e3c19f819ddbd0c9f89899 not-for-merge tag 'tag-three' of ../
0e3b14047d3ee365f4f2a1b673db059c3972589c not-for-merge tag 'tag-three-file' of ../
-6134ee8f857693b96ff1cc98d3e2fd62b199e5a8 not-for-merge tag 'tag-two' of ../
+525b7fb068d59950d185a8779dc957c77eed73ba not-for-merge tag 'tag-two' of ../
test $(git rev-parse HEAD^) = $(git rev-parse copy) &&
test new = $(git show HEAD:file2)
'
+test_expect_success 'pull.rebase' '
+ git reset --hard before-rebase &&
+ git config --bool pull.rebase true &&
+ test_when_finished "git config --unset pull.rebase" &&
+ git pull . copy &&
+ test $(git rev-parse HEAD^) = $(git rev-parse copy) &&
+ test new = $(git show HEAD:file2)
+'
test_expect_success 'branch.to-rebase.rebase' '
git reset --hard before-rebase &&
- git config branch.to-rebase.rebase 1 &&
+ git config --bool branch.to-rebase.rebase true &&
+ test_when_finished "git config --unset branch.to-rebase.rebase" &&
git pull . copy &&
- git config branch.to-rebase.rebase 0 &&
test $(git rev-parse HEAD^) = $(git rev-parse copy) &&
test new = $(git show HEAD:file2)
'
+test_expect_success 'branch.to-rebase.rebase should override pull.rebase' '
+ git reset --hard before-rebase &&
+ git config --bool pull.rebase true &&
+ test_when_finished "git config --unset pull.rebase" &&
+ git config --bool branch.to-rebase.rebase false &&
+ test_when_finished "git config --unset branch.to-rebase.rebase" &&
+ git pull . copy &&
+ test $(git rev-parse HEAD^) != $(git rev-parse copy) &&
+ test new = $(git show HEAD:file2)
+'
+
test_expect_success '--rebase with rebased upstream' '
git remote add -f me . &&
EOF
chmod +x askpass &&
GIT_ASKPASS="$PWD/askpass" &&
- export GIT_ASKPASS &&
- >askpass-expect-none &&
- echo "askpass: Password for '\''$HTTPD_DEST'\'': " >askpass-expect-pass &&
- { echo "askpass: Username for '\''$HTTPD_DEST'\'': " &&
- cat askpass-expect-pass
- } >askpass-expect-both
-'
+ export GIT_ASKPASS
+'
+
+expect_askpass() {
+ dest=$HTTPD_DEST
+ {
+ case "$1" in
+ none)
+ ;;
+ pass)
+ echo "askpass: Password for 'http://$2@$dest': "
+ ;;
+ both)
+ echo "askpass: Username for 'http://$dest': "
+ echo "askpass: Password for 'http://$2@$dest': "
+ ;;
+ *)
+ false
+ ;;
+ esac
+ } >askpass-expect &&
+ test_cmp askpass-expect askpass-query
+}
test_expect_success 'cloning password-protected repository can fail' '
>askpass-query &&
echo wrong >askpass-response &&
test_must_fail git clone "$HTTPD_URL/auth/repo.git" clone-auth-fail &&
- test_cmp askpass-expect-both askpass-query
+ expect_askpass both wrong
'
test_expect_success 'http auth can use user/pass in URL' '
>askpass-query &&
- echo wrong >askpass-reponse &&
+ echo wrong >askpass-response &&
git clone "$HTTPD_URL_USER_PASS/auth/repo.git" clone-auth-none &&
- test_cmp askpass-expect-none askpass-query
+ expect_askpass none
'
test_expect_success 'http auth can use just user in URL' '
>askpass-query &&
echo user@host >askpass-response &&
git clone "$HTTPD_URL_USER/auth/repo.git" clone-auth-pass &&
- test_cmp askpass-expect-pass askpass-query
+ expect_askpass pass user@host
'
test_expect_success 'http auth can request both user and pass' '
>askpass-query &&
echo user@host >askpass-response &&
git clone "$HTTPD_URL/auth/repo.git" clone-auth-both &&
- test_cmp askpass-expect-both askpass-query
+ expect_askpass both user@host
+'
+
+test_expect_success 'http auth respects credential helper config' '
+ test_config_global credential.helper "!f() {
+ cat >/dev/null
+ echo username=user@host
+ echo password=user@host
+ }; f" &&
+ >askpass-query &&
+ echo wrong >askpass-response &&
+ git clone "$HTTPD_URL/auth/repo.git" clone-auth-helper &&
+ expect_askpass none
+'
+
+test_expect_success 'http auth can get username from config' '
+ test_config_global "credential.$HTTPD_URL.username" user@host &&
+ >askpass-query &&
+ echo user@host >askpass-response &&
+ git clone "$HTTPD_URL/auth/repo.git" clone-auth-user &&
+ expect_askpass pass user@host
+'
+
+test_expect_success 'configured username does not override URL' '
+ test_config_global "credential.$HTTPD_URL.username" wrong &&
+ >askpass-query &&
+ echo user@host >askpass-response &&
+ git clone "$HTTPD_URL_USER/auth/repo.git" clone-auth-user2 &&
+ expect_askpass pass user@host
'
test_expect_success 'fetch changes via http' '
test_description='git commit'
. ./test-lib.sh
+. "$TEST_DIRECTORY/diff-lib.sh"
-test_tick
+author='The Real Author <someguy@his.email.org>'
-test_expect_success \
- "initial status" \
- "echo 'bongo bongo' >file &&
- git add file"
+test_tick
-test_expect_success "Constructing initial commit" '
+test_expect_success 'initial status' '
+ echo bongo bongo >file &&
+ git add file &&
git status >actual &&
test_i18ngrep "Initial commit" actual
'
-test_expect_success \
- "fail initial amend" \
- "test_must_fail git commit --amend"
+test_expect_success 'fail initial amend' '
+ test_must_fail git commit --amend
+'
-test_expect_success \
- "initial commit" \
- "git commit -m initial"
+test_expect_success 'setup: initial commit' '
+ git commit -m initial
+'
-test_expect_success \
- "invalid options 1" \
- "test_must_fail git commit -m foo -m bar -F file"
+test_expect_success '-m and -F do not mix' '
+ test_must_fail git commit -m foo -m bar -F file
+'
-test_expect_success \
- "invalid options 2" \
- "test_must_fail git commit -C HEAD -m illegal"
+test_expect_success '-m and -C do not mix' '
+ test_must_fail git commit -C HEAD -m illegal
+'
-test_expect_success \
- "using paths with -a" \
- "echo King of the bongo >file &&
- test_must_fail git commit -m foo -a file"
+test_expect_success 'paths and -a do not mix' '
+ echo King of the bongo >file &&
+ test_must_fail git commit -m foo -a file
+'
test_expect_success PERL 'can use paths with --interactive' '
echo bong-o-bong >file &&
git reset --hard HEAD^
'
-test_expect_success \
- "using invalid commit with -C" \
- "test_must_fail git commit -C bogus"
+test_expect_success 'using invalid commit with -C' '
+ test_must_fail git commit -C bogus
+'
-test_expect_success \
- "testing nothing to commit" \
- "test_must_fail git commit -m initial"
+test_expect_success 'nothing to commit' '
+ test_must_fail git commit -m initial
+'
-test_expect_success \
- "next commit" \
- "echo 'bongo bongo bongo' >file \
- git commit -m next -a"
+test_expect_success 'setup: non-initial commit' '
+ echo bongo bongo bongo >file &&
+ git commit -m next -a
+'
-test_expect_success \
- "commit message from non-existing file" \
- "echo 'more bongo: bongo bongo bongo bongo' >file && \
- test_must_fail git commit -F gah -a"
+test_expect_success 'commit message from non-existing file' '
+ echo more bongo: bongo bongo bongo bongo >file &&
+ test_must_fail git commit -F gah -a
+'
-# Empty except stray tabs and spaces on a few lines.
-sed -e 's/@$//' >msg <<EOF
- @
+test_expect_success 'empty commit message' '
+ # Empty except stray tabs and spaces on a few lines.
+ sed -e "s/@//g" >msg <<-\EOF &&
+ @ @
+ @@
+ @ @
+ @Signed-off-by: hula@
+ EOF
+ test_must_fail git commit -F msg -a
+'
- @
-Signed-off-by: hula
-EOF
-test_expect_success \
- "empty commit message" \
- "test_must_fail git commit -F msg -a"
+test_expect_success 'setup: commit message from file' '
+ echo this is the commit message, coming from a file >msg &&
+ git commit -F msg -a
+'
-test_expect_success \
- "commit message from file" \
- "echo 'this is the commit message, coming from a file' >msg && \
- git commit -F msg -a"
+test_expect_success 'amend commit' '
+ cat >editor <<-\EOF &&
+ #!/bin/sh
+ sed -e "s/a file/an amend commit/g" < "$1" > "$1-"
+ mv "$1-" "$1"
+ EOF
+ chmod 755 editor &&
+ EDITOR=./editor git commit --amend
+'
-cat >editor <<\EOF
-#!/bin/sh
-sed -e "s/a file/an amend commit/g" < "$1" > "$1-"
-mv "$1-" "$1"
-EOF
-chmod 755 editor
+test_expect_success 'set up editor' '
+ cat >editor <<-\EOF &&
+ #!/bin/sh
+ sed -e "s/unamended/amended/g" <"$1" >"$1-"
+ mv "$1-" "$1"
+ EOF
+ chmod 755 editor
+'
-test_expect_success \
- "amend commit" \
- "EDITOR=./editor git commit --amend"
+test_expect_success 'amend without launching editor' '
+ echo unamended >expect &&
+ git commit --allow-empty -m "unamended" &&
+ echo needs more bongo >file &&
+ git add file &&
+ EDITOR=./editor git commit --no-edit --amend &&
+ git diff --exit-code HEAD -- file &&
+ git diff-tree -s --format=%s HEAD >msg &&
+ test_cmp expect msg
+'
-test_expect_success \
- "passing -m and -F" \
- "echo 'enough with the bongos' >file && \
- test_must_fail git commit -F msg -m amending ."
+test_expect_success '--amend --edit' '
+ echo amended >expect &&
+ git commit --allow-empty -m "unamended" &&
+ echo bongo again >file &&
+ git add file &&
+ EDITOR=./editor git commit --edit --amend &&
+ git diff-tree -s --format=%s HEAD >msg &&
+ test_cmp expect msg
+'
-test_expect_success \
- "using message from other commit" \
- "git commit -C HEAD^ ."
+test_expect_success '-m --edit' '
+ echo amended >expect &&
+ git commit --allow-empty -m buffer &&
+ echo bongo bongo >file &&
+ git add file &&
+ EDITOR=./editor git commit -m unamended --edit &&
+ git diff-tree -s --format=%s HEAD >msg &&
+ test_cmp expect msg
+'
-cat >editor <<\EOF
-#!/bin/sh
-sed -e "s/amend/older/g" < "$1" > "$1-"
-mv "$1-" "$1"
-EOF
-chmod 755 editor
-
-test_expect_success \
- "editing message from other commit" \
- "echo 'hula hula' >file && \
- EDITOR=./editor git commit -c HEAD^ -a"
-
-test_expect_success \
- "message from stdin" \
- "echo 'silly new contents' >file && \
- echo commit message from stdin | git commit -F - -a"
-
-test_expect_success \
- "overriding author from command line" \
- "echo 'gak' >file && \
- git commit -m 'author' --author 'Rubber Duck <rduck@convoy.org>' -a >output 2>&1"
-
-test_expect_success \
- "commit --author output mentions author" \
- "grep Rubber.Duck output"
-
-test_expect_success PERL \
- "interactive add" \
- "echo 7 | git commit --interactive | grep 'What now'"
-
-test_expect_success PERL \
- "commit --interactive doesn't change index if editor aborts" \
- "echo zoo >file &&
+test_expect_success '-m and -F do not mix' '
+ echo enough with the bongos >file &&
+ test_must_fail git commit -F msg -m amending .
+'
+
+test_expect_success 'using message from other commit' '
+ git commit -C HEAD^ .
+'
+
+test_expect_success 'editing message from other commit' '
+ cat >editor <<-\EOF &&
+ #!/bin/sh
+ sed -e "s/amend/older/g" < "$1" > "$1-"
+ mv "$1-" "$1"
+ EOF
+ chmod 755 editor &&
+ echo hula hula >file &&
+ EDITOR=./editor git commit -c HEAD^ -a
+'
+
+test_expect_success 'message from stdin' '
+ echo silly new contents >file &&
+ echo commit message from stdin |
+ git commit -F - -a
+'
+
+test_expect_success 'overriding author from command line' '
+ echo gak >file &&
+ git commit -m author \
+ --author "Rubber Duck <rduck@convoy.org>" -a >output 2>&1 &&
+ grep Rubber.Duck output
+'
+
+test_expect_success PERL 'interactive add' '
+ echo 7 |
+ git commit --interactive |
+ grep "What now"
+'
+
+test_expect_success PERL "commit --interactive doesn't change index if editor aborts" '
+ echo zoo >file &&
test_must_fail git diff --exit-code >diff1 &&
- (echo u ; echo '*' ; echo q) |
- (EDITOR=: && export EDITOR &&
- test_must_fail git commit --interactive) &&
+ (echo u ; echo "*" ; echo q) |
+ (
+ EDITOR=: &&
+ export EDITOR &&
+ test_must_fail git commit --interactive
+ ) &&
git diff >diff2 &&
- test_cmp diff1 diff2"
-
-test_expect_success \
- "showing committed revisions" \
- "git rev-list HEAD >current"
+ compare_diff_patch diff1 diff2
+'
-cat >editor <<\EOF
-#!/bin/sh
-sed -e "s/good/bad/g" < "$1" > "$1-"
-mv "$1-" "$1"
-EOF
-chmod 755 editor
-
-cat >msg <<EOF
-A good commit message.
-EOF
-
-test_expect_success \
- 'editor not invoked if -F is given' '
- echo "moo" >file &&
- EDITOR=./editor git commit -a -F msg &&
- git show -s --pretty=format:"%s" | grep -q good &&
- echo "quack" >file &&
- echo "Another good message." | EDITOR=./editor git commit -a -F - &&
- git show -s --pretty=format:"%s" | grep -q good
- '
-# We could just check the head sha1, but checking each commit makes it
-# easier to isolate bugs.
-
-cat >expected <<\EOF
-72c0dc9855b0c9dadcbfd5a31cab072e0cb774ca
-9b88fc14ce6b32e3d9ee021531a54f18a5cf38a2
-3536bbb352c3a1ef9a420f5b4242d48578b92aa7
-d381ac431806e53f3dd7ac2f1ae0534f36d738b9
-4fd44095ad6334f3ef72e4c5ec8ddf108174b54a
-402702b49136e7587daa9280e91e4bb7cb2179f7
-EOF
-
-test_expect_success \
- 'validate git rev-list output.' \
- 'test_cmp expected current'
+test_expect_success 'editor not invoked if -F is given' '
+ cat >editor <<-\EOF &&
+ #!/bin/sh
+ sed -e s/good/bad/g <"$1" >"$1-"
+ mv "$1-" "$1"
+ EOF
+ chmod 755 editor &&
+
+ echo A good commit message. >msg &&
+ echo moo >file &&
+
+ EDITOR=./editor git commit -a -F msg &&
+ git show -s --pretty=format:%s >subject &&
+ grep -q good subject &&
+
+ echo quack >file &&
+ echo Another good message. |
+ EDITOR=./editor git commit -a -F - &&
+ git show -s --pretty=format:%s >subject &&
+ grep -q good subject
+'
test_expect_success 'partial commit that involves removal (1)' '
'
-author="The Real Author <someguy@his.email.org>"
test_expect_success 'amend commit to fix author' '
oldtick=$GIT_AUTHOR_DATE &&
'
-author="The Real Author <someguy@his.email.org>"
test_expect_success 'amend commit to fix author' '
oldtick=$GIT_AUTHOR_DATE &&
test_expect_success 'same tree (single parent)' '
- git reset --hard
-
- if git commit -m empty
- then
- echo oops -- should have complained
- false
- else
- : happy
- fi
+ git reset --hard &&
+ test_must_fail git commit -m empty
'
>empty
create_merge_msgs () {
- echo "Merge commit 'c2'" >msg.1-5 &&
- echo "Merge commit 'c2'; commit 'c3'" >msg.1-5-9 &&
+ echo "Merge tag 'c2'" >msg.1-5 &&
+ echo "Merge tags 'c2' and 'c3'" >msg.1-5-9 &&
{
echo "Squashed commit of the following:" &&
echo &&
} >squash.1-5-9 &&
echo >msg.nolog &&
{
- echo "* commit 'c3':" &&
+ echo "* tag 'c3':" &&
echo " commit 3" &&
echo
} >msg.log
verify_mergeheads () {
printf '%s\n' "$@" >mergehead.expected &&
- test_cmp mergehead.expected .git/MERGE_HEAD
+ while read sha1 rest
+ do
+ git rev-parse $sha1
+ done <.git/MERGE_HEAD >mergehead.actual &&
+ test_cmp mergehead.expected mergehead.actual
}
verify_no_mergehead () {
cp exp.subject exp.log &&
echo >>exp.log "" &&
- echo >>exp.log "* commit 'c2':" &&
+ echo >>exp.log "* tag 'c2':" &&
echo >>exp.log " c2"
}
git checkout master &&
test_commit master-3 &&
git merge tag-1 &&
- check_oneline "Merge commit Qtag-1Q"
+ check_oneline "Merge tag Qtag-1Q"
'
test_expect_success 'ambiguous tag' '
git checkout master &&
test_commit master-4 &&
git merge ambiguous &&
- check_oneline "Merge commit QambiguousQ"
+ check_oneline "Merge tag QambiguousQ"
'
test_expect_success 'remote-tracking branch' '
test_cmp expect_non-note3 actual
'
+
+# Change the notes for the three top commits
+test_tick
+cat >input <<INPUT_END
+commit refs/notes/many_notes
+committer $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL> $GIT_COMMITTER_DATE
+data <<COMMIT
+changing notes for the top three commits
+COMMIT
+from refs/notes/many_notes^0
+INPUT_END
+
+rm expect
+i=$num_commits
+j=0
+while test $j -lt 3
+do
+ cat >>input <<INPUT_END
+N inline refs/heads/many_commits~$j
+data <<EOF
+changed note for commit #$i
+EOF
+INPUT_END
+ cat >>expect <<EXPECT_END
+ commit #$i
+ changed note for commit #$i
+EXPECT_END
+ i=$(($i - 1))
+ j=$(($j + 1))
+done
+
+test_expect_success 'change a few existing notes' '
+
+ git fast-import <input &&
+ GIT_NOTES_REF=refs/notes/many_notes git log -n3 refs/heads/many_commits |
+ grep "^ " > actual &&
+ test_cmp expect actual
+
+'
+
+test_expect_success 'verify that changing notes respect existing fanout' '
+
+ # None of the entries in the top-level notes tree should be a full SHA1
+ git ls-tree --name-only refs/notes/many_notes |
+ while read path
+ do
+ if test $(expr length "$path") -ge 40
+ then
+ return 1
+ fi
+ done
+
+'
+
remaining_notes=10
test_tick
-cat >>input <<INPUT_END
+cat >input <<INPUT_END
commit refs/notes/many_notes
committer $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL> $GIT_COMMITTER_DATE
data <<COMMIT
from refs/notes/many_notes^0
INPUT_END
-i=$remaining_notes
-while test $i -lt $num_commits
+i=$(($num_commits - $remaining_notes))
+for sha1 in $(git rev-list -n $i refs/heads/many_commits)
do
- i=$(($i + 1))
cat >>input <<INPUT_END
-N 0000000000000000000000000000000000000000 :$i
+N 0000000000000000000000000000000000000000 $sha1
INPUT_END
done
'commitdiff(2): directory becomes symlink' \
'gitweb_run "p=.git;a=commitdiff;hp=foo-becomes-a-directory;h=foo-symlinked-to-bar"'
+# ----------------------------------------------------------------------
+# commitdiff testing (incomplete lines)
+
+test_expect_success 'setup incomplete lines' '
+ cat >file<<-\EOF &&
+ Dominus regit me,
+ et nihil mihi deerit.
+ In loco pascuae ibi me collocavit,
+ super aquam refectionis educavit me;
+ animam meam convertit,
+ deduxit me super semitas jusitiae,
+ propter nomen suum.
+ CHANGE_ME
+ EOF
+ git commit -a -m "Preparing for incomplete lines" &&
+ echo "incomplete" | tr -d "\\012" >>file &&
+ git commit -a -m "Add incomplete line" &&
+ git tag incomplete_lines_add &&
+ sed -e s/CHANGE_ME/change_me/ <file >file+ &&
+ mv -f file+ file &&
+ git commit -a -m "Incomplete context line" &&
+ git tag incomplete_lines_ctx &&
+ echo "Dominus regit me," >file &&
+ echo "incomplete line" | tr -d "\\012" >>file &&
+ git commit -a -m "Change incomplete line" &&
+ git tag incomplete_lines_chg
+ echo "Dominus regit me," >file &&
+ git commit -a -m "Remove incomplete line" &&
+ git tag incomplete_lines_rem
+'
+
+test_expect_success 'commitdiff(1): addition of incomplete line' '
+ gitweb_run "p=.git;a=commitdiff;h=incomplete_lines_add"
+'
+
+test_expect_success 'commitdiff(1): incomplete line as context line' '
+ gitweb_run "p=.git;a=commitdiff;h=incomplete_lines_ctx"
+'
+
+test_expect_success 'commitdiff(1): change incomplete line' '
+ gitweb_run "p=.git;a=commitdiff;h=incomplete_lines_chg"
+'
+
+test_expect_success 'commitdiff(1): removal of incomplete line' '
+ gitweb_run "p=.git;a=commitdiff;h=incomplete_lines_rem"
+'
+
# ----------------------------------------------------------------------
# commit, commitdiff: merge, large
test_expect_success \
git add b &&
git commit -a -m "On branch" &&
git checkout master &&
- git pull . b'
+ git pull . b &&
+ git tag merge_commit'
test_expect_success \
'commit(0): merge commit' \
'commitdiff(1): large commit' \
'gitweb_run "p=.git;a=commitdiff;h=b"'
+# ----------------------------------------------------------------------
+# side-by-side diff
+
+test_expect_success 'side-by-side: addition of incomplete line' '
+ gitweb_run "p=.git;a=commitdiff;h=incomplete_lines_add;ds=sidebyside"
+'
+
+test_expect_success 'side-by-side: incomplete line as context line' '
+ gitweb_run "p=.git;a=commitdiff;h=incomplete_lines_ctx;ds=sidebyside"
+'
+
+test_expect_success 'side-by-side: changed incomplete line' '
+ gitweb_run "p=.git;a=commitdiff;h=incomplete_lines_chg;ds=sidebyside"
+'
+
+test_expect_success 'side-by-side: removal of incomplete line' '
+ gitweb_run "p=.git;a=commitdiff;h=incomplete_lines_rem;ds=sidebyside"
+'
+
+test_expect_success 'side-by-side: merge commit' '
+ gitweb_run "p=.git;a=commitdiff;h=merge_commit;ds=sidebyside"
+'
+
# ----------------------------------------------------------------------
# tags testing
cd "$git" &&
echo line >>file1 &&
git commit -a -m "change 3 (not really)" &&
- printf "bad response\nn\n" | "$GITP4" submit
+ printf "bad response\nn\n" | "$GITP4" submit &&
p4 changes //depot/... >wc &&
test_line_count = 2 wc
)
)
'
+# check the normal case, where the template really is edited
+test_expect_success 'no config, edited' '
+ "$GITP4" clone --dest="$git" //depot &&
+ test_when_finished cleanup_git &&
+ ed="$TRASH_DIRECTORY/ed.sh" &&
+ test_when_finished "rm \"$ed\"" &&
+ cat >"$ed" <<-EOF &&
+ #!$SHELL_PATH
+ sleep 1
+ touch "\$1"
+ exit 0
+ EOF
+ chmod 755 "$ed" &&
+ (
+ cd "$git" &&
+ echo line >>file1 &&
+ git commit -a -m "change 5" &&
+ EDITOR="\"$ed\"" "$GITP4" submit &&
+ p4 changes //depot/... >wc &&
+ test_line_count = 5 wc
+ )
+'
test_expect_success 'kill p4d' '
kill_p4d
--- /dev/null
+#!/bin/sh
+
+test_description='git-p4 submit'
+
+. ./lib-git-p4.sh
+
+test_expect_success 'start p4d' '
+ start_p4d
+'
+
+test_expect_success 'init depot' '
+ (
+ cd "$cli" &&
+ echo file1 >file1 &&
+ p4 add file1 &&
+ p4 submit -d "change 1"
+ )
+'
+
+test_expect_success 'submit with no client dir' '
+ test_when_finished cleanup_git &&
+ "$GITP4" clone --dest="$git" //depot &&
+ (
+ cd "$git" &&
+ echo file2 >file2 &&
+ git add file2 &&
+ git commit -m "git commit 2" &&
+ rm -rf "$cli" &&
+ git config git-p4.skipSubmitEdit true &&
+ "$GITP4" submit
+ )
+'
+
+test_expect_success 'kill p4d' '
+ kill_p4d
+'
+
+test_done
--- /dev/null
+#!/bin/sh
+
+test_description='git-p4 relative chdir'
+
+. ./lib-git-p4.sh
+
+test_expect_success 'start p4d' '
+ start_p4d
+'
+
+test_expect_success 'init depot' '
+ (
+ cd "$cli" &&
+ echo file1 >file1 &&
+ p4 add file1 &&
+ p4 submit -d "change 1"
+ )
+'
+
+# P4 reads from P4CONFIG file to find its server params, if the
+# environment variable is set
+test_expect_success 'P4CONFIG and absolute dir clone' '
+ printf "P4PORT=$P4PORT\nP4CLIENT=$P4CLIENT\n" >p4config &&
+ test_when_finished "rm \"$TRASH_DIRECTORY/p4config\"" &&
+ test_when_finished cleanup_git &&
+ (
+ P4CONFIG=p4config && export P4CONFIG &&
+ unset P4PORT P4CLIENT &&
+ "$GITP4" clone --verbose --dest="$git" //depot
+ )
+'
+
+# same thing, but with relative directory name, note missing $ on --dest
+test_expect_success 'P4CONFIG and relative dir clone' '
+ printf "P4PORT=$P4PORT\nP4CLIENT=$P4CLIENT\n" >p4config &&
+ test_when_finished "rm \"$TRASH_DIRECTORY/p4config\"" &&
+ test_when_finished cleanup_git &&
+ (
+ P4CONFIG=p4config && export P4CONFIG &&
+ unset P4PORT P4CLIENT &&
+ "$GITP4" clone --verbose --dest="git" //depot
+ )
+'
+
+test_expect_success 'kill p4d' '
+ kill_p4d
+'
+
+test_done
git config "$@"
}
+test_config_global () {
+ test_when_finished "test_unconfig --global '$1'" &&
+ git config --global "$@"
+}
+
# Use test_set_prereq to tell that a particular prerequisite is available.
# The prerequisite can later be checked for in two ways:
#
return ret;
}
+/*
+ * Look at a signed tag object, and return the offset where
+ * the embedded detached signature begins, or the end of the
+ * data when there is no such signature.
+ */
size_t parse_signature(const char *buf, unsigned long size)
{
char *eol;
--- /dev/null
+#include "cache.h"
+#include "credential.h"
+#include "string-list.h"
+
+static const char usage_msg[] =
+"test-credential <fill|approve|reject> [helper...]";
+
+int main(int argc, const char **argv)
+{
+ const char *op;
+ struct credential c = CREDENTIAL_INIT;
+ int i;
+
+ op = argv[1];
+ if (!op)
+ usage(usage_msg);
+ for (i = 2; i < argc; i++)
+ string_list_append(&c.helpers, argv[i]);
+
+ if (credential_read(&c, stdin) < 0)
+ die("unable to read credential from stdin");
+
+ if (!strcmp(op, "fill")) {
+ credential_fill(&c);
+ if (c.username)
+ printf("username=%s\n", c.username);
+ if (c.password)
+ printf("password=%s\n", c.password);
+ }
+ else if (!strcmp(op, "approve"))
+ credential_approve(&c);
+ else if (!strcmp(op, "reject"))
+ credential_reject(&c);
+ else
+ usage(usage_msg);
+
+ return 0;
+}
struct cache_tree *another = cache_tree();
if (read_cache() < 0)
die("unable to read index file");
- cache_tree_update(another, active_cache, active_nr, 0, 1);
+ cache_tree_update(another, active_cache, active_nr, 0, 1, 0);
return dump_cache_tree(active_cache_tree, another, "");
}
--- /dev/null
+#include "cache.h"
+#include "tree.h"
+#include "cache-tree.h"
+
+static struct lock_file index_lock;
+
+int main(int ac, char **av)
+{
+ int fd = hold_locked_index(&index_lock, 1);
+ if (read_cache() < 0)
+ die("unable to read index file");
+ active_cache_tree = NULL;
+ if (write_cache(fd, active_cache, active_nr)
+ || commit_lock_file(&index_lock))
+ die("unable to write index file");
+ return 0;
+}
--- /dev/null
+#include "cache.h"
+#include "unix-socket.h"
+
+static int unix_stream_socket(void)
+{
+ int fd = socket(AF_UNIX, SOCK_STREAM, 0);
+ if (fd < 0)
+ die_errno("unable to create socket");
+ return fd;
+}
+
+static void unix_sockaddr_init(struct sockaddr_un *sa, const char *path)
+{
+ int size = strlen(path) + 1;
+ if (size > sizeof(sa->sun_path))
+ die("socket path is too long to fit in sockaddr");
+ memset(sa, 0, sizeof(*sa));
+ sa->sun_family = AF_UNIX;
+ memcpy(sa->sun_path, path, size);
+}
+
+int unix_stream_connect(const char *path)
+{
+ int fd;
+ struct sockaddr_un sa;
+
+ unix_sockaddr_init(&sa, path);
+ fd = unix_stream_socket();
+ if (connect(fd, (struct sockaddr *)&sa, sizeof(sa)) < 0) {
+ close(fd);
+ return -1;
+ }
+ return fd;
+}
+
+int unix_stream_listen(const char *path)
+{
+ int fd;
+ struct sockaddr_un sa;
+
+ unix_sockaddr_init(&sa, path);
+ fd = unix_stream_socket();
+
+ unlink(path);
+ if (bind(fd, (struct sockaddr *)&sa, sizeof(sa)) < 0) {
+ close(fd);
+ return -1;
+ }
+
+ if (listen(fd, 5) < 0) {
+ close(fd);
+ return -1;
+ }
+
+ return fd;
+}
--- /dev/null
+#ifndef UNIX_SOCKET_H
+#define UNIX_SOCKET_H
+
+int unix_stream_connect(const char *path);
+int unix_stream_listen(const char *path);
+
+#endif /* UNIX_SOCKET_H */
/* Jump targets or access declarations */
"!^[ \t]*[A-Za-z_][A-Za-z_0-9]*:.*$\n"
/* C/++ functions/methods at top level */
- "^([A-Za-z_][A-Za-z_0-9]*([ \t]+[A-Za-z_][A-Za-z_0-9]*([ \t]*::[ \t]*[^[:space:]]+)?){1,}[ \t]*\\([^;]*)$\n"
+ "^([A-Za-z_][A-Za-z_0-9]*([ \t*]+[A-Za-z_][A-Za-z_0-9]*([ \t]*::[ \t]*[^[:space:]]+)?){1,}[ \t]*\\([^;]*)$\n"
/* compound type at top level */
"^((struct|class|enum)[^;]*)$",
/* -- */
strm->z.msg ? strm->z.msg : "no message");
}
-void git_deflate_end(git_zstream *strm)
+int git_deflate_abort(git_zstream *strm)
{
int status;
zlib_pre_call(strm);
status = deflateEnd(&strm->z);
zlib_post_call(strm);
+ return status;
+}
+
+void git_deflate_end(git_zstream *strm)
+{
+ int status = git_deflate_abort(strm);
+
if (status == Z_OK)
return;
error("deflateEnd: %s (%s)", zerr_to_string(status),