git-imap-send
git-index-pack
git-init-db
+git-instaweb
git-local-fetch
git-log
git-lost-found
git-ssh-upload
git-status
git-stripspace
+git-svn
git-svnimport
git-symbolic-ref
git-tag
git-whatchanged
git-write-tree
git-core-*/?*
+gitweb/gitweb.cgi
test-date
test-delta
test-dump-cache-tree
*.[ao]
*.py[co]
config.mak
+autom4te.cache
+config.log
+config.status
+config.mak.autogen
+config.mak.append
+configure
git-blame
DOC_MAN7=$(patsubst %.txt,%.7,$(MAN7_TXT))
prefix?=$(HOME)
-bin=$(prefix)/bin
-mandir=$(prefix)/man
-man1=$(mandir)/man1
-man7=$(mandir)/man7
+bindir?=$(prefix)/bin
+mandir?=$(prefix)/man
+man1dir=$(mandir)/man1
+man7dir=$(mandir)/man7
# DESTDIR=
INSTALL?=install
html: $(DOC_HTML)
+$(DOC_HTML) $(DOC_MAN1) $(DOC_MAN7): asciidoc.conf
man: man1 man7
man1: $(DOC_MAN1)
man7: $(DOC_MAN7)
install: man
- $(INSTALL) -d -m755 $(DESTDIR)$(man1) $(DESTDIR)$(man7)
- $(INSTALL) $(DOC_MAN1) $(DESTDIR)$(man1)
- $(INSTALL) $(DOC_MAN7) $(DESTDIR)$(man7)
+ $(INSTALL) -d -m755 $(DESTDIR)$(man1dir) $(DESTDIR)$(man7dir)
+ $(INSTALL) $(DOC_MAN1) $(DESTDIR)$(man1dir)
+ $(INSTALL) $(DOC_MAN7) $(DESTDIR)$(man7dir)
#
comment on the changes you are submitting. It is important for
a developer to be able to "quote" your changes, using standard
e-mail tools, so that they may comment on specific portions of
-your code. For this reason, all patches should be submited
+your code. For this reason, all patches should be submitted
"inline". WARNING: Be wary of your MUAs word-wrap
corrupting your patch. Do not cut-n-paste your patch; you can
lose tabs that way if you are not careful.
[attributes]
caret=^
+startsb=[
+endsb=]
ifdef::backend-docbook[]
[gitlink-inlinemacro]
version.
core.sharedRepository::
- If true, the repository is made shareable between several users
- in a group (making sure all the files and objects are group-writable).
- See gitlink:git-init-db[1]. False by default.
+ When 'group' (or 'true'), the repository is made shareable between
+ several users in a group (making sure all the files and objects are
+ group-writable). When 'all' (or 'world' or 'everybody'), the
+ repository will be readable by all users, additionally to being
+ group-shareable. When 'umask' (or 'false'), git will use permissions
+ reported by umask(2). See gitlink:git-init-db[1]. False by default.
core.warnAmbiguousRefs::
If true, git will warn you if the ref name you passed it is ambiguous
and might match multiple refs in the .git/refs/ tree. True by default.
+core.compression::
+ An integer -1..9, indicating the compression level for objects that
+ are not in a pack file. -1 is the zlib and git default. 0 means no
+ compression, and 1..9 are various speed/size tradeoffs, 9 being
+ slowest.
+
+core.legacyheaders::
+ A boolean which enables the legacy object header format in case
+ you want to interoperate with old clients accessing the object
+ database directly (where the "http://" and "rsync://" protocols
+ count as direct access).
+
alias.*::
Command aliases for the gitlink:git[1] command wrapper - e.g.
after defining "alias.last = cat-file commit HEAD", the invocation
Tells `git-apply` how to handle whitespaces, in the same way
as the '--whitespace' option. See gitlink:git-apply[1].
+pager.color::
+ A boolean to enable/disable colored output when the pager is in
+ use (default is true).
+
+diff.color::
+ When true (or `always`), always use colors in patch.
+ When false (or `never`), never. When set to `auto`, use
+ colors only when the output is to the terminal.
+
+diff.color.<slot>::
+ Use customized color for diff colorization. `<slot>`
+ specifies which part of the patch to use the specified
+ color, and is one of `plain` (context text), `meta`
+ (metainformation), `frag` (hunk header), `old` (removed
+ lines), or `new` (added lines). The value for these
+ configuration variables can be one of: `normal`, `bold`,
+ `dim`, `ul`, `blink`, `reverse`, `reset`, `black`,
+ `red`, `green`, `yellow`, `blue`, `magenta`, `cyan`, or
+ `white`.
+
diff.renameLimit::
The number of files to consider when performing the copy/rename
detection; equivalent to the git diff option '-l'.
+diff.renames::
+ Tells git to detect renames. If set to any boolean value, it
+ will enable basic rename detection. If set to "copies" or
+ "copy", it will detect copies, as well.
+
format.headers::
Additional email headers to include in a patch to be submitted
by mail. See gitlink:git-format-patch[1].
Whether to include summaries of merged commits in newly created
merge commit messages. False by default.
+pack.window::
+ The size of the window used by gitlink:git-pack-objects[1] when no
+ window size is given on the command line. Defaults to 10.
+
pull.octopus::
The default merge strategy to use when pulling multiple branches
at once.
The default set of branches for gitlink:git-show-branch[1].
See gitlink:git-show-branch[1].
+tar.umask::
+ By default, gitlink:git-tar-tree[1] sets file and directories modes
+ to 0666 or 0777. While this is both useful and acceptable for projects
+ such as the Linux Kernel, it might be excessive for other projects.
+ With this variable, it becomes possible to tell
+ gitlink:git-tar-tree[1] to apply a specific umask to the modes above.
+ The special value "user" indicates that the user's current umask will
+ be used. This should be enough for most projects, as it will lead to
+ the same permissions as gitlink:git-checkout[1] would use. The default
+ value remains 0, which means world read-write.
+
user.email::
Your email address to be recorded in any newly created commits.
Can be overridden by the 'GIT_AUTHOR_EMAIL' and 'GIT_COMMITTER_EMAIL'
full shell on the machine, there is a restricted shell which only allows
users to do git pushes and pulls; see gitlink:git-shell[1].
-Put all the committers should in the same group, and make the repository
+Put all the committers in the same group, and make the repository
writable by that group:
------------------------------------------------
-u::
Synonym for "-p".
+--raw::
+ Generate the raw format.
+
--patch-with-raw::
- Generate patch but keep also the default raw diff output.
+ Synonym for "-p --raw".
--stat::
- Generate a diffstat instead of a patch.
+ Generate a diffstat.
--summary::
Output a condensed summary of extended header information
such as creations, renames and mode changes.
--patch-with-stat::
- Generate patch and prepend its diffstat.
+ Synonym for "-p --stat".
-z::
\0 line termination on output
--name-status::
Show only names and status of changed files.
+--color::
+ Show colored diff.
+
+--no-color::
+ Turn off colored diff, even when the configuration file
+ gives the default to color output.
+
+--color-words::
+ Show colored word diff, i.e. color words which have changed.
+
+--no-renames::
+ Turn off rename detection, even when the configuration
+ file gives the default to do so.
+
--full-index::
Instead of the first handful characters, show full
object name of pre- and post-image blob on the "index"
- line when generating a patch format output.
+ line when generating a patch format output.
+
+--binary::
+ In addition to --full-index, output "binary diff" that
+ can be applied with "git apply".
--abbrev[=<n>]::
Instead of showing the full 40-byte hexadecimal object
Swap two inputs; that is, show differences from index or
on-disk file to tree contents.
+--text::
+ Treat all files as text.
+
+-a::
+ Shorthand for "--text".
+
For more detailed explanation on these common options, see also
link:diffcore.html[diffcore documentation].
DESCRIPTION
-----------
Updates the index file for given paths, or all modified files if
-'-a' is specified, and makes a commit object. The command
-VISUAL and EDITOR environment variables to edit the commit log
-message.
+'-a' is specified, and makes a commit object. The command specified
+by either the VISUAL or EDITOR environment variables are used to edit
+the commit log message.
Several environment variable are used during commits. They are
documented in gitlink:git-commit-tree[1].
SYNOPSIS
--------
-'git-cvsexportcommmit' [-h] [-v] [-c] [-p] [-f] [-m msgprefix] [PARENTCOMMIT] COMMITID
+'git-cvsexportcommit' [-h] [-v] [-c] [-p] [-a] [-f] [-m msgprefix] [PARENTCOMMIT] COMMITID
DESCRIPTION
commit if any hunks fail to apply or there were other problems.
-p::
- Be pedantic (paranoid) when applying patches. Invokes patch with
+ Be pedantic (paranoid) when applying patches. Invokes patch with
--fuzz=0
+-a::
+ Add authorship information. Adds Author line, and Committer (if
+ different from Author) to the message.
+
-f::
Force the merge even if the files are not up to date.
+
It is not recommended to use this feature if you intend to
export changes back to CVS again later with
-git-link[1]::git-cvsexportcommit.
+gitlink:git-cvsexportcommit[1].
OUTPUT
------
'git-daemon' [--verbose] [--syslog] [--inetd | --port=n] [--export-all]
[--timeout=n] [--init-timeout=n] [--strict-paths]
[--base-path=path] [--user-path | --user-path=path]
- [directory...]
+ [--reuseaddr] [--detach] [--pid-file=file] [directory...]
DESCRIPTION
-----------
--verbose::
Log details about the incoming connections and requested files.
+--reuseaddr::
+ Use SO_REUSEADDR when binding the listening socket.
+ This allows the server to restart without waiting for
+ old connections to time out.
+
+--detach::
+ Detach from the shell. Implies --syslog.
+
+--pid-file=file::
+ Save the process id in 'file'.
+
<directory>::
A directory to add to the whitelist of allowed directories. Unless
--strict-paths is specified this will also include subdirectories
commit with these flags.
-q::
- Remain silent even on nonexisting files
+ Remain silent even on nonexistent files
Output format
-------------
SYNOPSIS
--------
-'git-diff' [ --diff-options ] <ent>{0,2} [<path>...]
+'git-diff' [ --diff-options ] <tree-ish>{0,2} [<path>...]
DESCRIPTION
-----------
-Show changes between two ents, an ent and the working tree, an
-ent and the index file, or the index file and the working tree.
+Show changes between two trees, a tree and the working tree, a
+tree and the index file, or the index file and the working tree.
The combination of what is compared with what is determined by
-the number of ents given to the command.
+the number of trees given to the command.
-* When no <ent> is given, the working tree and the index
- file is compared, using `git-diff-files`.
+* When no <tree-ish> is given, the working tree and the index
+ file are compared, using `git-diff-files`.
-* When one <ent> is given, the working tree and the named
- tree is compared, using `git-diff-index`. The option
+* When one <tree-ish> is given, the working tree and the named
+ tree are compared, using `git-diff-index`. The option
`--cached` can be given to compare the index file and
the named tree.
-* When two <ent>s are given, these two trees are compared
+* When two <tree-ish>s are given, these two trees are compared
using `git-diff-tree`.
OPTIONS
SYNOPSIS
--------
[verse]
-'git-format-patch' [-n | -k] [-o <dir> | --stdout] [--attach]
+'git-format-patch' [-n | -k] [-o <dir> | --stdout] [--attach] [--thread]
[-s | --signoff] [--diff-options] [--start-number <n>]
+ [--in-reply-to=Message-Id]
<since>[..<until>]
DESCRIPTION
If -n is specified, instead of "[PATCH] Subject", the first line
is formatted as "[PATCH n/m] Subject".
+If given --thread, git-format-patch will generate In-Reply-To and
+References headers to make the second and subsequent patch mails appear
+as replies to the first mail; this also generates a Message-Id header to
+reference.
OPTIONS
-------
--attach::
Create attachments instead of inlining patches.
+--thread::
+ Add In-Reply-To and References headers to make the second and
+ subsequent mails appear as replies to the first. Also generates
+ the Message-Id header to reference.
+
+--in-reply-to=Message-Id::
+ Make the first mail (or all the mails with --no-thread) appear as a
+ reply to the given Message-Id, which avoids breaking threads to
+ provide a new patch series.
CONFIGURATION
-------------
[verse]
'git-grep' [--cached]
[-a | --text] [-I] [-i | --ignore-case] [-w | --word-regexp]
- [-v | --invert-match]
+ [-v | --invert-match] [--full-name]
[-E | --extended-regexp] [-G | --basic-regexp] [-F | --fixed-strings]
[-n] [-l | --files-with-matches] [-L | --files-without-match]
[-c | --count]
[-A <post-context>] [-B <pre-context>] [-C <context>]
- [-f <file>] [-e] <pattern>
+ [-f <file>] [-e] <pattern> [--and|--or|--not|(|)|-e <pattern>...]
[<tree>...]
[--] [<path>...]
-v | --invert-match::
Select non-matching lines.
+--full-name::
+ When run from a subdirectory, the command usually
+ outputs paths relative to the current directory. This
+ option forces paths to be output relative to the project
+ top directory.
+
-E | --extended-regexp | -G | --basic-regexp::
Use POSIX extended/basic regexp for patterns. Default
is to use basic regexp.
-e::
The next parameter is the pattern. This option has to be
used for patterns starting with - and should be used in
- scripts passing user input to grep.
+ scripts passing user input to grep. Multiple patterns are
+ combined by 'or'.
+
+--and | --or | --not | ( | )::
+ Specify how multiple patterns are combined using boolean
+ expressions. `--or` is the default operator. `--and` has
+ higher precedence than `--or`. `-e` has to be used for all
+ patterns.
`<tree>...`::
Search blobs in the trees for specified patterns.
-`--`::
+\--::
Signals the end of options; the rest of the parameters
are <path> limiters.
+Example
+-------
+
+git grep -e \'#define\' --and \( -e MAX_PATH -e PATH_MAX \)::
+ Looks for a line that has `#define` and either `MAX_PATH` or
+ `PATH_MAX`.
+
Author
------
Originally written by Linus Torvalds <torvalds@osdl.org>, later
SYNOPSIS
--------
-'git-http-fetch' [-c] [-t] [-a] [-d] [-v] [-w filename] [--recover] <commit> <url>
+'git-http-fetch' [-c] [-t] [-a] [-d] [-v] [-w filename] [--recover] [--stdin] <commit> <url>
DESCRIPTION
-----------
Writes the commit-id into the filename under $GIT_DIR/refs/<filename> on
the local end after the transfer is complete.
+--stdin::
+ Instead of a commit id on the commandline (which is not expected in this
+ case), 'git-http-fetch' expects lines on stdin in the format
+
+ <commit-id>['\t'<filename-as-in--w>]
+
Author
------
Written by Linus Torvalds <torvalds@osdl.org>
SYNOPSIS
--------
-'git-init-db' [--template=<template_directory>] [--shared]
+'git-init-db' [--template=<template_directory>] [--shared[=<permissions>]]
OPTIONS
-------
+
+--
+
--template=<template_directory>::
- Provide the directory from which templates will be used.
- The default template directory is `/usr/share/git-core/templates`.
---shared::
- Specify that the git repository is to be shared amongst several users.
+Provide the directory from which templates will be used. The default template
+directory is `/usr/share/git-core/templates`.
+
+When specified, `<template_directory>` is used as the source of the template
+files rather than the default. The template files include some directory
+structure, some suggested "exclude patterns", and copies of non-executing
+"hook" files. The suggested patterns and hook files are all modifiable and
+extensible.
+
+--shared[={false|true|umask|group|all|world|everybody}]::
+
+Specify that the git repository is to be shared amongst several users. This
+allows users belonging to the same group to push into that
+repository. When specified, the config variable "core.sharedRepository" is
+set so that files and directories under `$GIT_DIR` are created with the
+requested permissions. When not specified, git will use permissions reported
+by umask(2).
+
+The option can have the following values, defaulting to 'group' if no value
+is given:
+
+ - 'umask' (or 'false'): Use permissions reported by umask(2). The default,
+ when `--shared` is not specified.
+
+ - 'group' (or 'true'): Make the repository group-writable, (and g+sx, since
+ the git group may be not the primary group of all users).
+
+ - 'all' (or 'world' or 'everybody'): Same as 'group', but make the repository
+ readable by all users.
+
+--
DESCRIPTION
-----------
This command creates an empty git repository - basically a `.git` directory
with subdirectories for `objects`, `refs/heads`, `refs/tags`, and
-templated files.
+template files.
An initial `HEAD` file that references the HEAD of the master branch
is also created.
-If `--template=<template_directory>` is specified, `<template_directory>`
-is used as the source of the template files rather than the default.
-The template files include some directory structure, some suggested
-"exclude patterns", and copies of non-executing "hook" files. The
-suggested patterns and hook files are all modifiable and extensible.
-
If the `$GIT_DIR` environment variable is set then it specifies a path
to use instead of `./.git` for the base of the repository.
environment variable then the sha1 directories are created underneath -
otherwise the default `$GIT_DIR/objects` directory is used.
-A shared repository allows users belonging to the same group to push into that
-repository. When specifying `--shared` the config variable "core.sharedRepository"
-is set to 'true' so that directories under `$GIT_DIR` are made group writable
-(and g+sx, since the git group may be not the primary group of all users).
-
Running `git-init-db` in an existing repository is safe. It will not overwrite
things that are already there. The primary reason for rerunning `git-init-db`
is to pick up newly added templates.
--- /dev/null
+git-instaweb(1)
+===============
+
+NAME
+----
+git-instaweb - instantly browse your working repository in gitweb
+
+SYNOPSIS
+--------
+'git-instaweb' [--local] [--httpd=<httpd>] [--port=<port>] [--browser=<browser>]
+
+'git-instaweb' [--start] [--stop] [--restart]
+
+DESCRIPTION
+-----------
+A simple script to setup gitweb and a web server for browsing the local
+repository.
+
+OPTIONS
+-------
+
+-l|--local::
+ Only bind the web server to the local IP (127.0.0.1).
+
+-d|--httpd::
+ The HTTP daemon command-line that will be executed.
+ Command-line options may be specified here, and the
+ configuration file will be added at the end of the command-line.
+ Currently, lighttpd and apache2 are the only supported servers.
+ (Default: lighttpd)
+
+-m|--module-path::
+ The module path (only needed if httpd is Apache).
+ (Default: /usr/lib/apache2/modules)
+
+-p|--port::
+ The port number to bind the httpd to. (Default: 1234)
+
+-b|--browser::
+
+ The web browser command-line to execute to view the gitweb page.
+ If blank, the URL of the gitweb instance will be printed to
+ stdout. (Default: 'firefox')
+
+--start::
+ Start the httpd instance and exit. This does not generate
+ any of the configuration files for spawning a new instance.
+
+--stop::
+ Stop the httpd instance and exit. This does not generate
+ any of the configuration files for spawning a new instance,
+ nor does it close the browser.
+
+--restart::
+ Restart the httpd instance and exit. This does not generate
+ any of the configuration files for spawning a new instance.
+
+CONFIGURATION
+-------------
+
+You may specify configuration in your .git/config
+
+-----------------------------------------------------------------------
+[instaweb]
+ local = true
+ httpd = apache2 -f
+ port = 4321
+ browser = konqueror
+ modulepath = /usr/lib/apache2/modules
+
+-----------------------------------------------------------------------
+
+Author
+------
+Written by Eric Wong <normalperson@yhbt.net>
+
+Documentation
+--------------
+Documentation by Eric Wong <normalperson@yhbt.net>.
+
+GIT
+---
+Part of the gitlink:git[7] suite
+
Writes the commit-id into the filename under $GIT_DIR/refs/<filename> on
the local end after the transfer is complete.
+--stdin::
+ Instead of a commit id on the commandline (which is not expected in this
+ case), 'git-local-fetch' expects lines on stdin in the format
+
+ <commit-id>['\t'<filename-as-in--w>]
+
Author
------
Written by Junio C Hamano <junkio@cox.net>
An example:
--------------------------------------------------------------
- $ cat .git/ignore
+ $ cat .git/info/exclude
# ignore objects and archives, anywhere in the tree.
*.[oa]
$ cat Documentation/.gitignore
!foo.html
$ git-ls-files --ignored \
--exclude='Documentation/*.[0-9]' \
- --exclude-from=.git/ignore \
+ --exclude-from=.git/info/exclude \
--exclude-per-directory=.gitignore
--------------------------------------------------------------
-b::
If any file doesn't begin with a From line, assume it is a
- single mail message instead of signalling error.
+ single mail message instead of signaling error.
-d<prec>::
Instead of the default 4 digits with leading zeros,
stops before touching anything.
So in the above two "failed merge" case, you do not have to
-worry about lossage of data --- you simply were not ready to do
+worry about loss of data --- you simply were not ready to do
a merge, so no merge happened at all. You may want to finish
whatever you were in the middle of doing, and retry the same
pull after you are done and ready.
List all commits reachable from all refs
--stdin::
- Read from stdin, append "(<rev_name>)" to all sha1's of name'able
+ Read from stdin, append "(<rev_name>)" to all sha1's of nameable
commits, and pass to stdout
EXAMPLE
-------
Given a commit, find out where it is relative to the local refs. Say somebody
-wrote you about that phantastic commit 33db5f4d9027a10e477ccf054b2c1ab94f74c85a.
+wrote you about that fantastic commit 33db5f4d9027a10e477ccf054b2c1ab94f74c85a.
Of course, you look into the commit, but that only tells you what happened, but
not the context.
A git tag of the form p4/xx is created for every change imported from
the Perforce repository where xx is the Perforce changeset number.
Therefore after the import you can use git to access any commit by its
-Perforce number, eg. git show p4/327.
+Perforce number, e.g. git show p4/327.
The tag associated with the HEAD commit is also how `git-p4import`
determines if there are new changes to incrementally import from the
Notes
-----
-You can interrupt the import (eg. ctrl-c) at any time and restart it
+You can interrupt the import (e.g. ctrl-c) at any time and restart it
without worry.
Author information is automatically determined by querying the
--all::
- Processes all packs. Any filenames on the commandline are ignored.
+ Processes all packs. Any filenames on the command line are ignored.
--alt-odb::
Don't require objects present in packs from alternate object
SYNOPSIS
--------
-'git-push' [--all] [--tags] [--force] <repository> <refspec>...
+'git-push' [--all] [--tags] [-f | --force] <repository> <refspec>...
DESCRIPTION
-----------
-f, \--force::
Usually, the command refuses to update a remote ref that is
- not a descendent of the local ref used to overwrite it.
+ not a descendant of the local ref used to overwrite it.
This flag disables the check. This can cause the
remote repository to lose commits; use it with care.
% git repo-config core.filemode true
------------
-The hypothetic proxy command entries actually have a postfix to discern
-to what URL they apply. Here is how to change the entry for kernel.org
+The hypothetical proxy command entries actually have a postfix to discern
+what URL they apply to. Here is how to change the entry for kernel.org
to "ssh".
------------
[ \--sparse ]
[ \--no-merges ]
[ \--remove-empty ]
+ [ \--not ]
[ \--all ]
[ \--topo-order ]
[ \--parents ]
[ [\--objects | \--objects-edge] [ \--unpacked ] ]
[ \--pretty | \--header ]
[ \--bisect ]
+ [ \--merge ]
<commit>... [ \-- <paths>... ]
DESCRIPTION
A special notation <commit1>..<commit2> can be used as a
short-hand for {caret}<commit1> <commit2>.
+Another special notation is <commit1>...<commit2> which is useful for
+merges. The resulting set of commits is the symmetric difference
+between the two operands. The following two commands are equivalent:
+
+------------
+$ git-rev-list A B --not $(git-merge-base --all A B)
+$ git-rev-list A...B
+------------
OPTIONS
-------
Print the contents of the commit in raw-format; each
record is separated with a NUL character.
+--parents::
+ Print the parents of the commit.
+
--objects::
Print the object IDs of any object referenced by the listed commits.
'git-rev-list --objects foo ^bar' thus means "send me all object IDs
--objects-edge::
Similar to `--objects`, but also print the IDs of
- excluded commits refixed with a `-` character. This is
+ excluded commits prefixed with a `-` character. This is
used by `git-pack-objects` to build 'thin' pack, which
records objects in deltified form based on objects
contained in these excluded commits to reduce network
--remove-empty::
Stop when a given path disappears from the tree.
+--no-merges::
+ Do not print commits with more than one parent.
+
+--not::
+ Reverses the meaning of the '{caret}' prefix (or lack
+ thereof) for all following revision specifiers, up to
+ the next `--not`.
+
--all::
Pretend as if all the refs in `$GIT_DIR/refs/` are
listed on the command line as <commit>.
topological order (i.e. descendant commits are shown
before their parents).
+--merge::
+ After a failed merge, show refs that touch files having a
+ conflict and don't exist on all heads to merge.
+
Author
------
Written by Linus Torvalds <torvalds@osdl.org>
and dereference the tag recursively until a non-tag object is
found.
-'git-rev-parse' also accepts a prefix '{caret}' to revision parameter,
-which is passed to 'git-rev-list'. Two revision parameters
-concatenated with '..' is a short-hand for writing a range
-between them. I.e. 'r1..r2' is equivalent to saying '{caret}r1 r2'
-
Here is an illustration, by Jon Loeliger. Both node B and C are
a commit parents of commit node A. Parent commits are ordered
left-to-right.
G H I J
\ / \ /
D E F
- \ | /
- \ | /
- \|/
+ \ | / \
+ \ | / |
+ \|/ |
B C
\ /
\ /
J = F^2 = B^3^2 = A^^3^2
+SPECIFYING RANGES
+-----------------
+
+History traversing commands such as `git-log` operate on a set
+of commits, not just a single commit. To these commands,
+specifying a single revision with the notation described in the
+previous section means the set of commits reachable from that
+commit, following the commit ancestry chain.
+
+To exclude commits reachable from a commit, a prefix `{caret}`
+notation is used. E.g. "`{caret}r1 r2`" means commits reachable
+from `r2` but exclude the ones reachable from `r1`.
+
+This set operation appears so often that there is a shorthand
+for it. "`r1..r2`" is equivalent to "`{caret}r1 r2`". It is
+the difference of two sets (subtract the set of commits
+reachable from `r1` from the set of commits reachable from
+`r2`).
+
+A similar notation "`r1\...r2`" is called symmetric difference
+of `r1` and `r2` and is defined as
+"`r1 r2 --not $(git-merge-base --all r1 r2)`".
+It it the set of commits that are reachable from either one of
+`r1` or `r2` but not from both.
+
+Here are a few examples:
+
+ D A B D
+ D F A B C D F
+ ^A G B D
+ ^A F B C F
+ G...I C D F G I
+ ^B G I C D F G I
+
Author
------
Written by Linus Torvalds <torvalds@osdl.org> and
appear in topological order (i.e., descendant commits
are shown before their parents).
+--sparse::
+ By default, the output omits merges that are reachable
+ from only one tip being shown. This option makes them
+ visible.
+
--more=<n>::
Usually the command stops output upon showing the commit
that is the common ancestor of all the branches. This
SYNOPSIS
--------
-'git-status'
+'git-status' <options>...
DESCRIPTION
-----------
the current HEAD commit, the command exits with non-zero
status.
+The command takes the same set of options as `git-commit`; it
+shows what would be committed if the same options are given to
+`git-commit`.
+
OUTPUT
------
--- /dev/null
+git-svn(1)
+==========
+
+NAME
+----
+git-svn - bidirectional operation between a single Subversion branch and git
+
+SYNOPSIS
+--------
+'git-svn' <command> [options] [arguments]
+
+DESCRIPTION
+-----------
+git-svn is a simple conduit for changesets between a single Subversion
+branch and git.
+
+git-svn is not to be confused with git-svnimport. The were designed
+with very different goals in mind.
+
+git-svn is designed for an individual developer who wants a
+bidirectional flow of changesets between a single branch in Subversion
+and an arbitrary number of branches in git. git-svnimport is designed
+for read-only operation on repositories that match a particular layout
+(albeit the recommended one by SVN developers).
+
+For importing svn, git-svnimport is potentially more powerful when
+operating on repositories organized under the recommended
+trunk/branch/tags structure, and should be faster, too.
+
+git-svn mostly ignores the very limited view of branching that
+Subversion has. This allows git-svn to be much easier to use,
+especially on repositories that are not organized in a manner that
+git-svnimport is designed for.
+
+COMMANDS
+--------
+init::
+ Creates an empty git repository with additional metadata
+ directories for git-svn. The Subversion URL must be specified
+ as a command-line argument.
+
+fetch::
+ Fetch unfetched revisions from the Subversion URL we are
+ tracking. refs/remotes/git-svn will be updated to the
+ latest revision.
+
+ Note: You should never attempt to modify the remotes/git-svn
+ branch outside of git-svn. Instead, create a branch from
+ remotes/git-svn and work on that branch. Use the 'commit'
+ command (see below) to write git commits back to
+ remotes/git-svn.
+
+ See 'Additional Fetch Arguments' if you are interested in
+ manually joining branches on commit.
+
+commit::
+ Commit specified commit or tree objects to SVN. This relies on
+ your imported fetch data being up-to-date. This makes
+ absolutely no attempts to do patching when committing to SVN, it
+ simply overwrites files with those specified in the tree or
+ commit. All merging is assumed to have taken place
+ independently of git-svn functions.
+
+rebuild::
+ Not a part of daily usage, but this is a useful command if
+ you've just cloned a repository (using git-clone) that was
+ tracked with git-svn. Unfortunately, git-clone does not clone
+ git-svn metadata and the svn working tree that git-svn uses for
+ its operations. This rebuilds the metadata so git-svn can
+ resume fetch operations. A Subversion URL may be optionally
+ specified at the command-line if the directory/repository you're
+ tracking has moved or changed protocols.
+
+show-ignore::
+ Recursively finds and lists the svn:ignore property on
+ directories. The output is suitable for appending to
+ the $GIT_DIR/info/exclude file.
+
+OPTIONS
+-------
+-r <ARG>::
+--revision <ARG>::
+ Only used with the 'fetch' command.
+
+ Takes any valid -r<argument> svn would accept and passes it
+ directly to svn. -r<ARG1>:<ARG2> ranges and "{" DATE "}" syntax
+ is also supported. This is passed directly to svn, see svn
+ documentation for more details.
+
+ This can allow you to make partial mirrors when running fetch.
+
+-::
+--stdin::
+ Only used with the 'commit' command.
+
+ Read a list of commits from stdin and commit them in reverse
+ order. Only the leading sha1 is read from each line, so
+ git-rev-list --pretty=oneline output can be used.
+
+--rmdir::
+ Only used with the 'commit' command.
+
+ Remove directories from the SVN tree if there are no files left
+ behind. SVN can version empty directories, and they are not
+ removed by default if there are no files left in them. git
+ cannot version empty directories. Enabling this flag will make
+ the commit to SVN act like git.
+
+ repo-config key: svn.rmdir
+
+-e::
+--edit::
+ Only used with the 'commit' command.
+
+ Edit the commit message before committing to SVN. This is off by
+ default for objects that are commits, and forced on when committing
+ tree objects.
+
+ repo-config key: svn.edit
+
+-l<num>::
+--find-copies-harder::
+ Both of these are only used with the 'commit' command.
+
+ They are both passed directly to git-diff-tree see
+ git-diff-tree(1) for more information.
+
+ repo-config key: svn.l
+ repo-config key: svn.findcopiesharder
+
+-A<filename>::
+--authors-file=<filename>::
+
+ Syntax is compatible with the files used by git-svnimport and
+ git-cvsimport:
+
+------------------------------------------------------------------------
+loginname = Joe User <user@example.com>
+------------------------------------------------------------------------
+
+ If this option is specified and git-svn encounters an SVN
+ committer name that does not exist in the authors-file, git-svn
+ will abort operation. The user will then have to add the
+ appropriate entry. Re-running the previous git-svn command
+ after the authors-file is modified should continue operation.
+
+ repo-config key: svn.authors-file
+
+ADVANCED OPTIONS
+----------------
+-b<refname>::
+--branch <refname>::
+ Used with 'fetch' or 'commit'.
+
+ This can be used to join arbitrary git branches to remotes/git-svn
+ on new commits where the tree object is equivalent.
+
+ When used with different GIT_SVN_ID values, tags and branches in
+ SVN can be tracked this way, as can some merges where the heads
+ end up having completely equivalent content. This can even be
+ used to track branches across multiple SVN _repositories_.
+
+ This option may be specified multiple times, once for each
+ branch.
+
+ repo-config key: svn.branch
+
+-i<GIT_SVN_ID>::
+--id <GIT_SVN_ID>::
+ This sets GIT_SVN_ID (instead of using the environment). See
+ the section on "Tracking Multiple Repositories or Branches" for
+ more information on using GIT_SVN_ID.
+
+COMPATIBILITY OPTIONS
+---------------------
+--upgrade::
+ Only used with the 'rebuild' command.
+
+ Run this if you used an old version of git-svn that used
+ "git-svn-HEAD" instead of "remotes/git-svn" as the branch
+ for tracking the remote.
+
+--no-ignore-externals::
+ Only used with the 'fetch' and 'rebuild' command.
+
+ By default, git-svn passes --ignore-externals to svn to avoid
+ fetching svn:external trees into git. Pass this flag to enable
+ externals tracking directly via git.
+
+ Versions of svn that do not support --ignore-externals are
+ automatically detected and this flag will be automatically
+ enabled for them.
+
+ Otherwise, do not enable this flag unless you know what you're
+ doing.
+
+ repo-config key: svn.noignoreexternals
+
+Basic Examples
+~~~~~~~~~~~~~~
+
+Tracking and contributing to an Subversion managed-project:
+
+------------------------------------------------------------------------
+# Initialize a tree (like git init-db):
+ git-svn init http://svn.foo.org/project/trunk
+# Fetch remote revisions:
+ git-svn fetch
+# Create your own branch to hack on:
+ git checkout -b my-branch remotes/git-svn
+# Commit only the git commits you want to SVN:
+ git-svn commit <tree-ish> [<tree-ish_2> ...]
+# Commit all the git commits from my-branch that don't exist in SVN:
+ git-svn commit remotes/git-svn..my-branch
+# Something is committed to SVN, pull the latest into your branch:
+ git-svn fetch && git pull . remotes/git-svn
+# Append svn:ignore settings to the default git exclude file:
+ git-svn show-ignore >> .git/info/exclude
+------------------------------------------------------------------------
+
+DESIGN PHILOSOPHY
+-----------------
+Merge tracking in Subversion is lacking and doing branched development
+with Subversion is cumbersome as a result. git-svn completely forgoes
+any automated merge/branch tracking on the Subversion side and leaves it
+entirely up to the user on the git side. It's simply not worth it to do
+a useful translation when the original signal is weak.
+
+TRACKING MULTIPLE REPOSITORIES OR BRANCHES
+------------------------------------------
+This is for advanced users, most users should ignore this section.
+
+Because git-svn does not care about relationships between different
+branches or directories in a Subversion repository, git-svn has a simple
+hack to allow it to track an arbitrary number of related _or_ unrelated
+SVN repositories via one git repository. Simply set the GIT_SVN_ID
+environment variable to a name other other than "git-svn" (the default)
+and git-svn will ignore the contents of the $GIT_DIR/git-svn directory
+and instead do all of its work in $GIT_DIR/$GIT_SVN_ID for that
+invocation. The interface branch will be remotes/$GIT_SVN_ID, instead of
+remotes/git-svn. Any remotes/$GIT_SVN_ID branch should never be modified
+by the user outside of git-svn commands.
+
+ADDITIONAL FETCH ARGUMENTS
+--------------------------
+This is for advanced users, most users should ignore this section.
+
+Unfetched SVN revisions may be imported as children of existing commits
+by specifying additional arguments to 'fetch'. Additional parents may
+optionally be specified in the form of sha1 hex sums at the
+command-line. Unfetched SVN revisions may also be tied to particular
+git commits with the following syntax:
+
+ svn_revision_number=git_commit_sha1
+
+This allows you to tie unfetched SVN revision 375 to your current HEAD::
+
+ `git-svn fetch 375=$(git-rev-parse HEAD)`
+
+Advanced Example: Tracking a Reorganized Repository
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+If you're tracking a directory that has moved, or otherwise been
+branched or tagged off of another directory in the repository and you
+care about the full history of the project, then you can read this
+section.
+
+This is how Yann Dirson tracked the trunk of the ufoai directory when
+the /trunk directory of his repository was moved to /ufoai/trunk and
+he needed to continue tracking /ufoai/trunk where /trunk left off.
+
+------------------------------------------------------------------------
+ # This log message shows when the repository was reorganized:
+ r166 | ydirson | 2006-03-02 01:36:55 +0100 (Thu, 02 Mar 2006) | 1 line
+ Changed paths:
+ D /trunk
+ A /ufoai/trunk (from /trunk:165)
+
+ # First we start tracking the old revisions:
+ GIT_SVN_ID=git-oldsvn git-svn init \
+ https://svn.sourceforge.net/svnroot/ufoai/trunk
+ GIT_SVN_ID=git-oldsvn git-svn fetch -r1:165
+
+ # And now, we continue tracking the new revisions:
+ GIT_SVN_ID=git-newsvn git-svn init \
+ https://svn.sourceforge.net/svnroot/ufoai/ufoai/trunk
+ GIT_SVN_ID=git-newsvn git-svn fetch \
+ 166=`git-rev-parse refs/remotes/git-oldsvn`
+------------------------------------------------------------------------
+
+BUGS
+----
+If somebody commits a conflicting changeset to SVN at a bad moment
+(right before you commit) causing a conflict and your commit to fail,
+your svn working tree ($GIT_DIR/git-svn/tree) may be dirtied. The
+easiest thing to do is probably just to rm -rf $GIT_DIR/git-svn/tree and
+run 'rebuild'.
+
+We ignore all SVN properties except svn:executable. Too difficult to
+map them since we rely heavily on git write-tree being _exactly_ the
+same on both the SVN and git working trees and I prefer not to clutter
+working trees with metadata files.
+
+svn:keywords can't be ignored in Subversion (at least I don't know of
+a way to ignore them).
+
+Renamed and copied directories are not detected by git and hence not
+tracked when committing to SVN. I do not plan on adding support for
+this as it's quite difficult and time-consuming to get working for all
+the possible corner cases (git doesn't do it, either). Renamed and
+copied files are fully supported if they're similar enough for git to
+detect them.
+
+Author
+------
+Written by Eric Wong <normalperson@yhbt.net>.
+
+Documentation
+-------------
+Written by Eric Wong <normalperson@yhbt.net>.
Instead of making a tar archive from local repository,
retrieve a tar archive from a remote repository.
-Examples
+CONFIGURATION
+-------------
+By default, file and directories modes are set to 0666 or 0777. It is
+possible to change this by setting the "umask" variable in the
+repository configuration as follows :
+
+[tar]
+ umask = 002 ;# group friendly
+
+The special umask value "user" indicates that the user's current umask
+will be used instead. The default value remains 0, which means world
+readable/writable files and directories.
+
+EXAMPLES
--------
git tar-tree HEAD junk | (cd /var/tmp/ && tar xf -)::
Get a tarball v1.4.0 from example.com.
+git tar-tree HEAD:Documentation/ git-docs > git-1.4.0-docs.tar::
+
+ Put everything in the current head's Documentation/ directory
+ into 'git-1.4.0-docs.tar', with the prefix 'git-docs/'.
+
Author
------
Written by Rene Scharfe.
- *gitk* (shipped with git-core)
- gitk is a simple TK GUI for browsing history of GIT repositories easily.
+ gitk is a simple Tk GUI for browsing history of GIT repositories easily.
- *gitview* (contrib/)
This command is usually not invoked directly by the end user.
The UI for the protocol is on the 'git-tar-tree' side, and the
-program pair is meant to be used to get a tar achive from a
+program pair is meant to be used to get a tar archive from a
remote repository.
SYNOPSIS
--------
-'git' [--version] [--exec-path[=GIT_EXEC_PATH]] [--help] COMMAND [ARGS]
+'git' [--version] [--exec-path[=GIT_EXEC_PATH]] [-p|--paginate]
+ [--bare] [--git-dir=GIT_DIR] [--help] COMMAND [ARGS]
DESCRIPTION
-----------
environment variable. If no path is given 'git' will print
the current setting and then exit.
+-p|--paginate::
+ Pipe all output into 'less' (or if set, $PAGER).
+
+--git-dir=<path>::
+ Set the path to the repository. This can also be controlled by
+ setting the GIT_DIR environment variable.
+
+--bare::
+ Same as --git-dir=`pwd`.
FURTHER DOCUMENTATION
---------------------
gitlink:git-relink[1]::
Hardlink common objects in local repositories.
+gitlink:git-svn[1]::
+ Bidirectional operation between a single Subversion branch and git.
+
gitlink:git-svnimport[1]::
Import a SVN repository into git.
gitlink:git-imap-send[1]::
Dump a mailbox from stdin into an imap folder.
+gitlink:git-instaweb[1]::
+ Instantly browse your working repository in gitweb.
+
gitlink:git-mailinfo[1]::
Extracts patch and authorship information from a single
e-mail message, optionally transliterating the commit
Starting from 0.99.9 (actually mid 0.99.8.GIT), `.git/config` file
is used to hold per-repository configuration options. It is a
-simple text file modelled after `.ini` format familiar to some
+simple text file modeled after `.ini` format familiar to some
people. Here is an example:
------------
gitlink:git-diff-files[1];
gitlink:git-diff-tree[1]
+other
+~~~~~
+'GIT_PAGER'::
+ This environment variable overrides `$PAGER`.
+
+'GIT_TRACE'::
+ If this variable is set git will print `trace:` messages on
+ stderr telling about alias expansion, built-in command
+ execution and external command execution.
+
Discussion[[Discussion]]
------------------------
include::README[]
ent::
Favorite synonym to "tree-ish" by some total geeks. See
`http://en.wikipedia.org/wiki/Ent_(Middle-earth)` for an in-depth
- explanation.
+ explanation. Avoid this term, not to confuse people.
fast forward::
A fast-forward is a special type of merge where you have
and at this point "git bisect" will churn for a while, and tell you what
the mid-point between those two commits are, and check that state out as
-the head of the bew "bisect" branch.
+the head of the new "bisect" branch.
Compile and reboot.
The two commits #2' and #3' in the above picture record the same
changes your e-mail submission for #2 and #3 contained, but
-probably with the new sign-off line added by the upsteam
+probably with the new sign-off line added by the upstream
maintainer and definitely with different committer and ancestry
information, they are different objects from #2 and #3 commits.
--- /dev/null
+From: Rutger Nijlunsing <rutger@nospam.com>
+Subject: Setting up a git repository which can be pushed into and pulled from over HTTP.
+Date: Thu, 10 Aug 2006 22:00:26 +0200
+
+Since Apache is one of those packages people like to compile
+themselves while others prefer the bureaucrat's dream Debian, it is
+impossible to give guidelines which will work for everyone. Just send
+some feedback to the mailing list at git@vger.kernel.org to get this
+document tailored to your favorite distro.
+
+
+What's needed:
+
+- Have an Apache web-server
+
+ On Debian:
+ $ apt-get install apache2
+ To get apache2 by default started,
+ edit /etc/default/apache2 and set NO_START=0
+
+- can edit the configuration of it.
+
+ This could be found under /etc/httpd, or refer to your Apache documentation.
+
+ On Debian: this means being able to edit files under /etc/apache2
+
+- can restart it.
+
+ 'apachectl --graceful' might do. If it doesn't, just stop and
+ restart apache. Be warning that active connections to your server
+ might be aborted by this.
+
+ On Debian:
+ $ /etc/init.d/apache2 restart
+ or
+ $ /etc/init.d/apache2 force-reload
+ (which seems to do the same)
+ This adds symlinks from the /etc/apache2/mods-enabled to
+ /etc/apache2/mods-available.
+
+- have permissions to chown a directory
+
+- have git installed at the server _and_ client
+
+In effect, this probably means you're going to be root.
+
+
+Step 1: setup a bare GIT repository
+-----------------------------------
+
+At the time of writing, git-http-push cannot remotely create a GIT
+repository. So we have to do that at the server side with git. Another
+option would be to generate an empty repository at the client and copy
+it to the server with WebDAV. But then you're probably the first to
+try that out :)
+
+Create the directory under the DocumentRoot of the directories served
+by Apache. As an example we take /usr/local/apache2, but try "grep
+DocumentRoot /where/ever/httpd.conf" to find your root:
+
+ $ cd /usr/local/apache/htdocs
+ $ mkdir my-new-repo.git
+
+ On Debian:
+
+ $ cd /var/www
+ $ mkdir my-new-repo.git
+
+
+Initialize a bare repository
+
+ $ cd my-new-repo.git
+ $ git --bare init-db
+
+
+Change the ownership to your web-server's credentials. Use "grep ^User
+httpd.conf" and "grep ^Group httpd.conf" to find out:
+
+ $ chown -R www.www .
+
+ On Debian:
+
+ $ chown -R www-data.www-data .
+
+
+If you do not know which user Apache runs as, you can alternatively do
+a "chmod -R a+w .", inspect the files which are created later on, and
+set the permissions appropriately.
+
+Restart apache2, and check whether http://server/my-new-repo.git gives
+a directory listing. If not, check whether apache started up
+successfully.
+
+
+Step 2: enable DAV on this repository
+-------------------------------------
+
+First make sure the dav_module is loaded. For this, insert in httpd.conf:
+
+ LoadModule dav_module libexec/httpd/libdav.so
+ AddModule mod_dav.c
+
+Also make sure that this line exists which is the file used for
+locking DAV operations:
+
+ DAVLockDB "/usr/local/apache2/temp/DAV.lock"
+
+ On Debian these steps can be performed with:
+
+ Enable the dav and dav_fs modules of apache:
+ $ a2enmod dav_fs
+ (just to be sure. dav_fs might be unneeded, I don't know)
+ $ a2enmod dav
+ The DAV lock is located in /etc/apache2/mods-available/dav_fs.conf:
+ DAVLockDB /var/lock/apache2/DAVLock
+
+Of course, it can point somewhere else, but the string is actually just a
+prefix in some Apache configurations, and therefore the _directory_ has to
+be writable by the user Apache runs as.
+
+Then, add something like this to your httpd.conf
+
+ <Location /my-new-repo.git>
+ DAV on
+ AuthType Basic
+ AuthName "Git"
+ AuthUserFile /usr/local/apache2/conf/passwd.git
+ Require valid-user
+ </Location>
+
+ On Debian:
+ Create (or add to) /etc/apache2/conf.d/git.conf :
+
+ <Location /my-new-repo.git>
+ DAV on
+ AuthType Basic
+ AuthName "Git"
+ AuthUserFile /etc/apache2/passwd.git
+ Require valid-user
+ </Location>
+
+ Debian automatically reads all files under /etc/apach2/conf.d.
+
+The password file can be somewhere else, but it has to be readable by
+Apache and preferably not readable by the world.
+
+Create this file by
+ $ htpasswd -c /usr/local/apache2/conf/passwd.git <user>
+
+ On Debian:
+ $ htpasswd -c /etc/apache2/passwd.git <user>
+
+You will be asked a password, and the file is created. Subsequent calls
+to htpasswd should omit the '-c' option, since you want to append to the
+existing file.
+
+You need to restart Apache.
+
+Now go to http://<username>@<servername>/my-new-repo.git in your
+browser to check whether it asks for a password and accepts the right
+password.
+
+On Debian:
+
+ To test the WebDAV part, do:
+
+ $ apt-get install litmus
+ $ litmus http://<servername>/my-new-repo.git <username> <password>
+
+ Most tests should pass.
+
+A command line tool to test WebDAV is cadaver.
+
+If you're into Windows, from XP onwards Internet Explorer supports
+WebDAV. For this, do Internet Explorer -> Open Location ->
+http://<servername>/my-new-repo.git [x] Open as webfolder -> login .
+
+
+Step 3: setup the client
+------------------------
+
+Make sure that you have HTTP support, i.e. your git was built with curl.
+The easiest way to check is to look for the executable 'git-http-push'.
+
+Then, add the following to your $HOME/.netrc (you can do without, but will be
+asked to input your password a _lot_ of times):
+
+ machine <servername>
+ login <username>
+ password <password>
+
+...and set permissions:
+ chmod 600 ~/.netrc
+
+If you want to access the web-server by its IP, you have to type that in,
+instead of the server name.
+
+To check whether all is OK, do:
+
+ curl --netrc --location -v http://<username>@<servername>/my-new-repo.git/
+
+...this should give a directory listing in HTML of /var/www/my-new-repo.git .
+
+
+Now, add the remote in your existing repository which contains the project
+you want to export:
+
+ $ git-repo-config remote.upload.url \
+ http://<username>@<servername>/my-new-repo.git/
+
+It is important to put the last '/'; Without it, the server will send
+a redirect which git-http-push does not (yet) understand, and git-http-push
+will repeat the request infinitely.
+
+
+Step 4: make the initial push
+-----------------------------
+
+From your client repository, do
+
+ $ git push upload master
+
+This pushes branch 'master' (which is assumed to be the branch you
+want to export) to repository called 'upload', which we previously
+defined with git-repo-config.
+
+
+Troubleshooting:
+----------------
+
+If git-http-push says
+
+ Error: no DAV locking support on remote repo http://...
+
+then it means the web-server did not accept your authentication. Make sure
+that the user name and password matches in httpd.conf, .netrc and the URL
+you are uploading to.
+
+If git-http-push shows you an error (22/502) when trying to MOVE a blob,
+it means that your web-server somehow does not recognize its name in the
+request; This can happen when you start Apache, but then disable the
+network interface. A simple restart of Apache helps.
+
+Errors like (22/502) are of format (curl error code/http error
+code). So (22/404) means something like 'not found' at the server.
+
+Reading /usr/local/apache2/logs/error_log is often helpful.
+
+ On Debian: Read /var/log/apache2/error.log instead.
+
+
+Debian References: http://www.debian-administration.org/articles/285
+
+Authors
+ Johannes Schindelin <Johannes.Schindelin@gmx.de>
+ Rutger Nijlunsing <git@wingding.demon.nl>
info/exclude::
This file, by convention among Porcelains, stores the
- exclude pattern list. `git status` looks at it, but
- otherwise it is not looked at by any of the core git
- commands.
+ exclude pattern list. `.gitignore` is the per-directory
+ ignore file. `git status`, `git add`, `git rm` and `git
+ clean` look at it but the core git commands do not look
+ at it. See also: gitlink:git-ls-files[1] `--exclude-from`
+ and `--exclude-per-directory`.
remotes::
Stores shorthands to be used to give URL and default
<pasky> yes
-And Bable-like confusion flowed.
+And Babel-like confusion flowed.
<njs`> oh, hmm, and I'm not sure what this sliding window means either
(type, basename, size)).
Then we walk through this list, and calculate a delta of
- each object against the last n (tunable paramater) objects,
+ each object against the last n (tunable parameter) objects,
and pick the smallest of these deltas.
Vastly simplified, but the essence is there!
do "object name->location in packfile" translation.
<njs`> I'm assuming the real win for delta-ing large->small is
- more homogenous statistics for gzip to run over?
+ more homogeneous statistics for gzip to run over?
(You have to put the bytes in one place or another, but
putting them in a larger blob wins on compression)
Bugs happen, but they are "simple" bugs. And bugs that
actually get some object store detail wrong are almost always
- so obious that they never go anywhere.
+ so obvious that they never go anywhere.
<njs`> Yeah.
$ git cat-file -t 513feba2
blob
$ git cat-file blob 513feba2
+hello world!
hello world, again
------------------------------------------------
- https://host.xz/path/to/repo.git/
- git://host.xz/path/to/repo.git/
- git://host.xz/~user/path/to/repo.git/
-- ssh://host.xz/path/to/repo.git/
-- ssh://host.xz/~user/path/to/repo.git/
-- ssh://host.xz/~/path/to/repo.git
+- ssh://{startsb}user@{endsb}host.xz/path/to/repo.git/
+- ssh://{startsb}user@{endsb}host.xz/~user/path/to/repo.git/
+- ssh://{startsb}user@{endsb}host.xz/~/path/to/repo.git
===============================================================
-SSH Is the default transport protocol and also supports an
-scp-like syntax. Both syntaxes support username expansion,
+SSH is the default transport protocol. You can optionally specify
+which user to log-in as, and an alternate, scp-like syntax is also
+supported. Both syntaxes support username expansion,
as does the native git protocol. The following three are
identical to the last three above, respectively:
===============================================================
-- host.xz:/path/to/repo.git/
-- host.xz:~user/path/to/repo.git/
-- host.xz:path/to/repo.git
+- {startsb}user@{endsb}host.xz:/path/to/repo.git/
+- {startsb}user@{endsb}host.xz:~user/path/to/repo.git/
+- {startsb}user@{endsb}host.xz:path/to/repo.git
===============================================================
To sync with a local directory, use:
<repository> without <refspec> parameters on the command
line, <refspec> specified on `Push:` lines or `Pull:`
lines are used for `git-push` and `git-fetch`/`git-pull`,
-respectively. Multiple `Push:` and and `Pull:` lines may
+respectively. Multiple `Push:` and `Pull:` lines may
be specified for additional branch mappings.
The name of a file in `$GIT_DIR/branches` directory can be
#!/bin/sh
GVF=GIT-VERSION-FILE
-DEF_VER=v1.4.GIT
+DEF_VER=v1.4.2.GIT
+
+LF='
+'
# First try git-describe, then see if there is a version file
# (included in release tarballs), then default
-if VN=$(git describe --abbrev=4 HEAD 2>/dev/null); then
+if VN=$(git describe --abbrev=4 HEAD 2>/dev/null) &&
+ case "$VN" in
+ *$LF*) (exit 1) ;;
+ v[0-9]*) : happy ;;
+ esac
+then
VN=$(echo "$VN" | sed -e 's/-/./g');
elif test -f version
then
which are derived from $prefix, so "make all; make prefix=/usr
install" would not work.
+Alternatively you can use autoconf generated ./configure script to
+set up install paths (via config.mak.autogen), so you can write instead
+
+ $ make configure ;# as yourself
+ $ ./configure --prefix=/usr ;# as yourself
+ $ make all doc ;# as yourself
+ # make install install-doc ;# as root
+
+
Issues of note:
- git normally installs a helper script wrapper called "git", which
- "libcurl" and "curl" executable. git-http-fetch and
git-fetch use them. If you do not use http
- transfer, you are probabaly OK if you do not have
+ transfer, you are probably OK if you do not have
them.
- expat library; git-http-push uses it for remote lock
git, and if you only use git to track other peoples work you'll
never notice the lack of it.
- - "wish", the TCL/Tk windowing shell is used in gitk to show the
+ - "wish", the Tcl/Tk windowing shell is used in gitk to show the
history graphically
- "ssh" is used to push and pull over the net
# Define NO_D_TYPE_IN_DIRENT if your platform defines DT_UNKNOWN but lacks
# d_type in struct dirent (latest Cygwin -- will be fixed soonish).
#
+# Define NO_C99_FORMAT if your formatted IO functions (printf/scanf et.al.)
+# do not support the 'size specifiers' introduced by C99, namely ll, hh,
+# j, z, t. (representing long long int, char, intmax_t, size_t, ptrdiff_t).
+# some C compilers supported these specifiers prior to C99 as an extension.
+#
# Define NO_STRCASESTR if you don't have strcasestr.
#
# Define NO_STRLCPY if you don't have strlcpy.
# Define NO_SYMLINK_HEAD if you never want .git/HEAD to be a symbolic link.
# Enable it on Windows. By default, symrefs are still used.
#
+# Define NO_SVN_TESTS if you want to skip time-consuming SVN interoperability
+# tests. These tests take up a significant amount of the total test time
+# but are not needed unless you plan to talk to SVN repos.
+#
+# Define NO_FINK if you are building on Darwin/Mac OS X, have Fink
+# installed in /sw, but don't want GIT to link against any libraries
+# installed there. If defined you may specify your own (or Fink's)
+# include directories and library directories by defining CFLAGS
+# and LDFLAGS appropriately.
+#
+# Define NO_DARWIN_PORTS if you are building on Darwin/Mac OS X,
+# have DarwinPorts installed in /opt/local, but don't want GIT to
+# link against any libraries installed there. If defined you may
+# specify your own (or DarwinPort's) include directories and
+# library directories by defining CFLAGS and LDFLAGS appropriately.
+#
# Define PPC_SHA1 environment variable when running make to make use of
# a bundled SHA1 routine optimized for PowerPC.
#
# Define NO_ACCURATE_DIFF if your diff program at least sometimes misses
# a missing newline at the end of the file.
#
-# Define NO_PYTHON if you want to loose all benefits of the recursive merge.
+# Define NO_PYTHON if you want to lose all benefits of the recursive merge.
#
# Define COLLISION_CHECK below if you believe that SHA1's
# 1461501637330902918203684832716283019655932542976 hashes do not give you
GIT_PYTHON_DIR = $(prefix)/share/git-core/python
# DESTDIR=
+# default configuration for gitweb
+GITWEB_CONFIG = gitweb_config.perl
+GITWEB_SITENAME =
+GITWEB_PROJECTROOT = /pub/git
+GITWEB_LIST =
+GITWEB_HOMETEXT = indextext.html
+GITWEB_CSS = gitweb.css
+GITWEB_LOGO = git-logo.png
+
+export prefix bindir gitexecdir template_dir GIT_PYTHON_DIR
+
CC = gcc
AR = ar
TAR = tar
git-fetch.sh \
git-ls-remote.sh \
git-merge-one-file.sh git-parse-remote.sh \
- git-prune.sh git-pull.sh git-rebase.sh \
+ git-pull.sh git-rebase.sh \
git-repack.sh git-request-pull.sh git-reset.sh \
git-resolve.sh git-revert.sh git-sh-setup.sh \
git-tag.sh git-verify-tag.sh \
SCRIPT_PERL = \
git-archimport.perl git-cvsimport.perl git-relink.perl \
- git-shortlog.perl git-fmt-merge-msg.perl git-rerere.perl \
+ git-shortlog.perl git-rerere.perl \
git-annotate.perl git-cvsserver.perl \
- git-svnimport.perl git-mv.perl git-cvsexportcommit.perl \
- git-send-email.perl
+ git-svnimport.perl git-cvsexportcommit.perl \
+ git-send-email.perl git-svn.perl
SCRIPT_PYTHON = \
git-merge-recursive.py
SCRIPTS = $(patsubst %.sh,%,$(SCRIPT_SH)) \
$(patsubst %.perl,%,$(SCRIPT_PERL)) \
$(patsubst %.py,%,$(SCRIPT_PYTHON)) \
- git-cherry-pick git-status
+ git-cherry-pick git-status git-instaweb
# The ones that do not have to link with lcrypto, lz nor xdiff.
SIMPLE_PROGRAMS = \
# ... and all the rest that could be moved out of bindir to gitexecdir
PROGRAMS = \
- git-checkout-index$X \
git-convert-objects$X git-fetch-pack$X git-fsck-objects$X \
git-hash-object$X git-index-pack$X git-local-fetch$X \
git-merge-base$X \
- git-merge-index$X git-mktag$X git-mktree$X git-pack-objects$X git-patch-id$X \
- git-peek-remote$X git-prune-packed$X git-receive-pack$X \
+ git-merge-index$X git-mktag$X git-mktree$X git-patch-id$X \
+ git-peek-remote$X git-receive-pack$X \
git-send-pack$X git-shell$X \
git-show-index$X git-ssh-fetch$X \
git-ssh-upload$X git-unpack-file$X \
- git-unpack-objects$X git-update-server-info$X \
+ git-update-server-info$X \
git-upload-pack$X git-verify-pack$X \
- git-symbolic-ref$X \
- git-name-rev$X git-pack-redundant$X git-repo-config$X git-var$X \
+ git-pack-redundant$X git-var$X \
git-describe$X git-merge-tree$X git-blame$X git-imap-send$X
-BUILT_INS = git-log$X git-whatchanged$X git-show$X git-update-ref$X \
- git-count-objects$X git-diff$X git-push$X git-mailsplit$X \
- git-grep$X git-add$X git-rm$X git-rev-list$X git-stripspace$X \
- git-check-ref-format$X git-rev-parse$X git-mailinfo$X \
- git-init-db$X git-tar-tree$X git-upload-tar$X git-format-patch$X \
- git-ls-files$X git-ls-tree$X git-get-tar-commit-id$X \
- git-read-tree$X git-commit-tree$X git-write-tree$X \
- git-apply$X git-show-branch$X git-diff-files$X git-update-index$X \
- git-diff-index$X git-diff-stages$X git-diff-tree$X git-cat-file$X
+BUILT_INS = \
+ git-format-patch$X git-show$X git-whatchanged$X \
+ git-get-tar-commit-id$X \
+ $(patsubst builtin-%.o,git-%$X,$(BUILTIN_OBJS))
# what 'all' will build and 'install' will install, in gitexecdir
ALL_PROGRAMS = $(PROGRAMS) $(SIMPLE_PROGRAMS) $(SCRIPTS)
blob.h cache.h commit.h csum-file.h delta.h \
diff.h object.h pack.h pkt-line.h quote.h refs.h \
run-command.h strbuf.h tag.h tree.h git-compat-util.h revision.h \
- tree-walk.h log-tree.h dir.h
+ tree-walk.h log-tree.h dir.h path-list.h unpack-trees.h builtin.h
DIFF_OBJS = \
diff.o diff-lib.o diffcore-break.o diffcore-order.o \
server-info.o setup.o sha1_file.o sha1_name.o strbuf.o \
tag.o tree.o usage.o config.o environment.o ctype.o copy.o \
fetch-clone.o revision.o pager.o tree-walk.o xdiff-interface.o \
- alloc.o $(DIFF_OBJS)
+ alloc.o merge-file.o path-list.o help.o unpack-trees.o $(DIFF_OBJS)
BUILTIN_OBJS = \
- builtin-log.o builtin-help.o builtin-count.o builtin-diff.o builtin-push.o \
- builtin-grep.o builtin-add.o builtin-rev-list.o builtin-check-ref-format.o \
- builtin-rm.o builtin-init-db.o builtin-rev-parse.o \
- builtin-tar-tree.o builtin-upload-tar.o builtin-update-index.o \
- builtin-ls-files.o builtin-ls-tree.o builtin-write-tree.o \
- builtin-read-tree.o builtin-commit-tree.o builtin-mailinfo.o \
- builtin-apply.o builtin-show-branch.o builtin-diff-files.o \
- builtin-diff-index.o builtin-diff-stages.o builtin-diff-tree.o \
- builtin-cat-file.o builtin-mailsplit.o builtin-stripspace.o \
- builtin-update-ref.o
+ builtin-add.o \
+ builtin-apply.o \
+ builtin-cat-file.o \
+ builtin-checkout-index.o \
+ builtin-check-ref-format.o \
+ builtin-commit-tree.o \
+ builtin-count-objects.o \
+ builtin-diff.o \
+ builtin-diff-files.o \
+ builtin-diff-index.o \
+ builtin-diff-stages.o \
+ builtin-diff-tree.o \
+ builtin-fmt-merge-msg.o \
+ builtin-grep.o \
+ builtin-init-db.o \
+ builtin-log.o \
+ builtin-ls-files.o \
+ builtin-ls-tree.o \
+ builtin-mailinfo.o \
+ builtin-mailsplit.o \
+ builtin-mv.o \
+ builtin-name-rev.o \
+ builtin-pack-objects.o \
+ builtin-prune.o \
+ builtin-prune-packed.o \
+ builtin-push.o \
+ builtin-read-tree.o \
+ builtin-repo-config.o \
+ builtin-rev-list.o \
+ builtin-rev-parse.o \
+ builtin-rm.o \
+ builtin-show-branch.o \
+ builtin-stripspace.o \
+ builtin-symbolic-ref.o \
+ builtin-tar-tree.o \
+ builtin-unpack-objects.o \
+ builtin-update-index.o \
+ builtin-update-ref.o \
+ builtin-upload-tar.o \
+ builtin-verify-pack.o \
+ builtin-write-tree.o
GITLIBS = $(LIB_FILE) $(XDIFF_LIB)
LIBS = $(GITLIBS) -lz
ifeq ($(uname_S),Linux)
NO_STRLCPY = YesPlease
endif
+ifeq ($(uname_S),GNU/kFreeBSD)
+ NO_STRLCPY = YesPlease
+endif
ifeq ($(uname_S),Darwin)
NEEDS_SSL_WITH_CRYPTO = YesPlease
NEEDS_LIBICONV = YesPlease
NO_STRLCPY = YesPlease
- ## fink
- ifeq ($(shell test -d /sw/lib && echo y),y)
- ALL_CFLAGS += -I/sw/include
- ALL_LDFLAGS += -L/sw/lib
+ ifndef NO_FINK
+ ifeq ($(shell test -d /sw/lib && echo y),y)
+ ALL_CFLAGS += -I/sw/include
+ ALL_LDFLAGS += -L/sw/lib
+ endif
endif
- ## darwinports
- ifeq ($(shell test -d /opt/local/lib && echo y),y)
- ALL_CFLAGS += -I/opt/local/include
- ALL_LDFLAGS += -L/opt/local/lib
+ ifndef NO_DARWIN_PORTS
+ ifeq ($(shell test -d /opt/local/lib && echo y),y)
+ ALL_CFLAGS += -I/opt/local/include
+ ALL_LDFLAGS += -L/opt/local/lib
+ endif
endif
endif
ifeq ($(uname_S),SunOS)
NO_D_TYPE_IN_DIRENT = YesPlease
NO_D_INO_IN_DIRENT = YesPlease
NO_STRCASESTR = YesPlease
- NO_STRLCPY = YesPlease
NO_SYMLINK_HEAD = YesPlease
NEEDS_LIBICONV = YesPlease
+ NO_C99_FORMAT = YesPlease
# There are conflicting reports about this.
# On some boxes NO_MMAP is needed, and not so elsewhere.
# Try uncommenting this if you see things break -- YMMV.
ARM_SHA1 = YesPlease
endif
+-include config.mak.autogen
-include config.mak
ifdef WITH_OWN_SUBPROCESS_PY
ifdef NO_D_INO_IN_DIRENT
ALL_CFLAGS += -DNO_D_INO_IN_DIRENT
endif
+ifdef NO_C99_FORMAT
+ ALL_CFLAGS += -DNO_C99_FORMAT
+endif
ifdef NO_SYMLINK_HEAD
ALL_CFLAGS += -DNO_SYMLINK_HEAD
endif
ALL_CFLAGS += -DNO_ACCURATE_DIFF
endif
-# Shell quote (do not use $(call) to accomodate ancient setups);
+# Shell quote (do not use $(call) to accommodate ancient setups);
SHA1_HEADER_SQ = $(subst ','\'',$(SHA1_HEADER))
export prefix TAR INSTALL DESTDIR SHELL_PATH template_dir
### Build rules
-all: $(ALL_PROGRAMS) $(BUILT_INS) git$X gitk
+all: $(ALL_PROGRAMS) $(BUILT_INS) git$X gitk gitweb/gitweb.cgi
all:
$(MAKE) -C templates
$(ALL_CFLAGS) -o $@ $(filter %.c,$^) \
$(BUILTIN_OBJS) $(ALL_LDFLAGS) $(LIBS)
-builtin-help.o: common-cmds.h
+help.o: common-cmds.h
$(BUILT_INS): git$X
rm -f $@ && ln git$X $@
$(patsubst %.sh,%,$(SCRIPT_SH)) : % : %.sh
rm -f $@ $@+
sed -e '1s|#!.*/sh|#!$(SHELL_PATH_SQ)|' \
+ -e 's|@@PERL@@|$(PERL_PATH_SQ)|g' \
-e 's/@@GIT_VERSION@@/$(GIT_VERSION)/g' \
-e 's/@@NO_CURL@@/$(NO_CURL)/g' \
-e 's/@@NO_PYTHON@@/$(NO_PYTHON)/g' \
cp $< $@+
mv $@+ $@
+gitweb/gitweb.cgi: gitweb/gitweb.perl
+ rm -f $@ $@+
+ sed -e '1s|#!.*perl|#!$(PERL_PATH_SQ)|' \
+ -e 's|++GIT_VERSION++|$(GIT_VERSION)|g' \
+ -e 's|++GIT_BINDIR++|$(bindir)|g' \
+ -e 's|++GITWEB_CONFIG++|$(GITWEB_CONFIG)|g' \
+ -e 's|++GITWEB_SITENAME++|$(GITWEB_SITENAME)|g' \
+ -e 's|++GITWEB_PROJECTROOT++|$(GITWEB_PROJECTROOT)|g' \
+ -e 's|++GITWEB_LIST++|$(GITWEB_LIST)|g' \
+ -e 's|++GITWEB_HOMETEXT++|$(GITWEB_HOMETEXT)|g' \
+ -e 's|++GITWEB_CSS++|$(GITWEB_CSS)|g' \
+ -e 's|++GITWEB_LOGO++|$(GITWEB_LOGO)|g' \
+ $< >$@+
+ chmod +x $@+
+ mv $@+ $@
+
+git-instaweb: git-instaweb.sh gitweb/gitweb.cgi gitweb/gitweb.css
+ rm -f $@ $@+
+ sed -e '1s|#!.*/sh|#!$(SHELL_PATH_SQ)|' \
+ -e 's/@@GIT_VERSION@@/$(GIT_VERSION)/g' \
+ -e 's/@@NO_CURL@@/$(NO_CURL)/g' \
+ -e 's/@@NO_PYTHON@@/$(NO_PYTHON)/g' \
+ -e '/@@GITWEB_CGI@@/r gitweb/gitweb.cgi' \
+ -e '/@@GITWEB_CGI@@/d' \
+ -e '/@@GITWEB_CSS@@/r gitweb/gitweb.css' \
+ -e '/@@GITWEB_CSS@@/d' \
+ $@.sh > $@+
+ chmod +x $@+
+ mv $@+ $@
+
+configure: configure.ac
+ rm -f $@ $<+
+ sed -e 's/@@GIT_VERSION@@/$(GIT_VERSION)/g' \
+ $< > $<+
+ autoconf -o $@ $<+
+ rm -f $<+
+
# These can record GIT_VERSION
git$X git.spec \
$(patsubst %.sh,%,$(SCRIPT_SH)) \
$(CC) $(ALL_CFLAGS) -o $@ $(ALL_LDFLAGS) $(filter %.o,$^) \
$(LIB_FILE) $(SIMPLE_LIB)
+ssh-pull.o: ssh-fetch.c
+ssh-push.o: ssh-upload.c
git-local-fetch$X: fetch.o
git-ssh-fetch$X: rsh.o fetch.o
git-ssh-upload$X: rsh.o
git-imap-send$X: imap-send.o $(LIB_FILE)
http.o http-fetch.o http-push.o: http.h
-git-http-fetch$X: fetch.o http.o http-fetch.o $(LIB_FILE)
+git-http-fetch$X: fetch.o http.o http-fetch.o $(GITLIBS)
$(CC) $(ALL_CFLAGS) -o $@ $(ALL_LDFLAGS) $(filter %.o,$^) \
$(LIBS) $(CURL_LIBCURL) $(EXPAT_LIBEXPAT)
-git-http-push$X: revision.o http.o http-push.o $(LIB_FILE)
+git-http-push$X: revision.o http.o http-push.o $(GITLIBS)
$(CC) $(ALL_CFLAGS) -o $@ $(ALL_LDFLAGS) $(filter %.o,$^) \
$(LIBS) $(CURL_LIBCURL) $(EXPAT_LIBEXPAT)
# with that.
export NO_PYTHON
+export NO_SVN_TESTS
test: all
$(MAKE) -C t/ all
test-dump-cache-tree$X: dump-cache-tree.o $(GITLIBS)
$(CC) $(ALL_CFLAGS) -o $@ $(ALL_LDFLAGS) $(filter %.o,$^) $(LIBS)
+test-sha1$X: test-sha1.o $(GITLIBS)
+ $(CC) $(ALL_CFLAGS) -o $@ $(ALL_LDFLAGS) $(filter %.o,$^) $(LIBS)
+
+check-sha1:: test-sha1$X
+ ./test-sha1.sh
+
check:
for i in *.c; do sparse $(ALL_CFLAGS) $(SPARSE_FLAGS) $$i || exit; done
$(INSTALL) -d -m755 '$(DESTDIR_SQ)$(gitexecdir_SQ)'
$(INSTALL) $(ALL_PROGRAMS) '$(DESTDIR_SQ)$(gitexecdir_SQ)'
$(INSTALL) git$X gitk '$(DESTDIR_SQ)$(bindir_SQ)'
- $(MAKE) -C templates install
+ $(MAKE) -C templates DESTDIR='$(DESTDIR_SQ)' install
$(INSTALL) -d -m755 '$(DESTDIR_SQ)$(GIT_PYTHON_DIR_SQ)'
$(INSTALL) $(PYMODULES) '$(DESTDIR_SQ)$(GIT_PYTHON_DIR_SQ)'
if test 'z$(bindir_SQ)' != 'z$(gitexecdir_SQ)'; \
rm -fr .doc-tmp-dir
mkdir .doc-tmp-dir .doc-tmp-dir/man1 .doc-tmp-dir/man7
$(MAKE) -C Documentation DESTDIR=./ \
- man1=../.doc-tmp-dir/man1 \
- man7=../.doc-tmp-dir/man7 \
+ man1dir=../.doc-tmp-dir/man1 \
+ man7dir=../.doc-tmp-dir/man7 \
install
cd .doc-tmp-dir && $(TAR) cf ../$(manpages).tar .
gzip -n -9 -f $(manpages).tar
$(LIB_FILE) $(XDIFF_LIB)
rm -f $(ALL_PROGRAMS) $(BUILT_INS) git$X
rm -f *.spec *.pyc *.pyo */*.pyc */*.pyo common-cmds.h TAGS tags
+ rm -rf autom4te.cache
+ rm -f configure config.log config.mak.autogen config.mak.append config.status config.cache
rm -rf $(GIT_TARNAME) .doc-tmp-dir
rm -f $(GIT_TARNAME).tar.gz git-core_$(GIT_VERSION)-*.tar.gz
rm -f $(htmldocs).tar.gz $(manpages).tar.gz
+ rm -f gitweb/gitweb.cgi
$(MAKE) -C Documentation/ clean
$(MAKE) -C templates clean
$(MAKE) -C t/ clean
DEFINE_ALLOCATOR(commit)
DEFINE_ALLOCATOR(tag)
+#ifdef NO_C99_FORMAT
+#define SZ_FMT "%u"
+#else
+#define SZ_FMT "%zu"
+#endif
+
+static void report(const char* name, unsigned int count, size_t size)
+{
+ fprintf(stderr, "%10s: %8u (" SZ_FMT " kB)\n", name, count, size);
+}
+
+#undef SZ_FMT
+
#define REPORT(name) \
- fprintf(stderr, "%10s: %8u (%zu kB)\n", #name, name##_allocs, name##_allocs*sizeof(struct name) >> 10)
+ report(#name, name##_allocs, name##_allocs*sizeof(struct name) >> 10)
void alloc_report(void)
{
#define DEBUG 0
-static const char blame_usage[] = "[-c] [-l] [-t] [-S <revs-file>] [--] file [commit]\n"
- " -c, --compability Use the same output mode as git-annotate (Default: off)\n"
- " -l, --long Show long commit SHA1 (Default: off)\n"
- " -t, --time Show raw timestamp (Default: off)\n"
- " -S, --revs-file Use revisions from revs-file instead of calling git-rev-list\n"
- " -h, --help This message";
+static const char blame_usage[] = "git-blame [-c] [-l] [-t] [-S <revs-file>] [--] file [commit]\n"
+ " -c, --compatibility Use the same output mode as git-annotate (Default: off)\n"
+ " -l, --long Show long commit SHA1 (Default: off)\n"
+ " -t, --time Show raw timestamp (Default: off)\n"
+ " -S, --revs-file Use revisions from revs-file instead of calling git-rev-list\n"
+ " -h, --help This message";
static struct commit **blame_lines;
static int num_blame_lines;
};
struct chunk {
- int off1, len1; // ---
- int off2, len2; // +++
+ int off1, len1; /* --- */
+ int off2, len2; /* +++ */
};
struct patch {
}
#endif
-// p is a patch from commit to other.
+/* p is a patch from commit to other. */
static void fill_line_map(struct commit *commit, struct commit *other,
struct patch *p)
{
const char *filename = NULL, *commit = NULL;
char filename_buf[256];
int sha1_len = 8;
- int compability = 0;
+ int compatibility = 0;
int show_raw_time = 0;
int options = 1;
struct commit* start_commit;
sha1_len = 40;
continue;
} else if(!strcmp(argv[i], "-c") ||
- !strcmp(argv[i], "--compability")) {
- compability = 1;
+ !strcmp(argv[i], "--compatibility")) {
+ compatibility = 1;
continue;
} else if(!strcmp(argv[i], "-t") ||
!strcmp(argv[i], "--time")) {
} else if(!strcmp(argv[i], "-S")) {
if (i + 1 < argc &&
!read_ancestry(argv[i + 1], &sha1_p)) {
- compability = 1;
+ compatibility = 1;
i++;
continue;
}
}
- init_revisions(&rev);
+ init_revisions(&rev, setup_git_directory());
rev.remove_empty_trees = 1;
rev.topo_order = 1;
rev.prune_fn = simplify_commit;
u = c->util;
get_commit_info(c, &ci);
fwrite(sha1_to_hex(c->object.sha1), sha1_len, 1, stdout);
- if(compability) {
+ if(compatibility) {
printf("\t(%10s\t%10s\t%d)", ci.author,
format_time(ci.author_time, ci.author_tz,
show_raw_time),
if (!obj) {
struct blob *ret = alloc_blob_node();
created_object(sha1, &ret->object);
- ret->object.type = TYPE_BLOB;
+ ret->object.type = OBJ_BLOB;
return ret;
}
if (!obj->type)
- obj->type = TYPE_BLOB;
- if (obj->type != TYPE_BLOB) {
+ obj->type = OBJ_BLOB;
+ if (obj->type != OBJ_BLOB) {
error("Object %s is a %s, not a blob",
sha1_to_hex(sha1), typename(obj->type));
return NULL;
for (specs = 0; pathspec[specs]; specs++)
/* nothing */;
- seen = xmalloc(specs);
- memset(seen, 0, specs);
+ seen = xcalloc(specs, 1);
src = dst = dir->entries;
i = dir->nr;
prune_directory(dir, pathspec, baselen);
}
-static int add_file_to_index(const char *path, int verbose)
-{
- int size, namelen;
- struct stat st;
- struct cache_entry *ce;
-
- if (lstat(path, &st))
- die("%s: unable to stat (%s)", path, strerror(errno));
-
- if (!S_ISREG(st.st_mode) && !S_ISLNK(st.st_mode))
- die("%s: can only add regular files or symbolic links", path);
-
- namelen = strlen(path);
- size = cache_entry_size(namelen);
- ce = xcalloc(1, size);
- memcpy(ce->name, path, namelen);
- ce->ce_flags = htons(namelen);
- fill_stat_cache_info(ce, &st);
-
- ce->ce_mode = create_ce_mode(st.st_mode);
- if (!trust_executable_bit) {
- /* If there is an existing entry, pick the mode bits
- * from it.
- */
- int pos = cache_name_pos(path, namelen);
- if (pos >= 0)
- ce->ce_mode = active_cache[pos]->ce_mode;
- }
-
- if (index_path(ce->sha1, path, &st, 1))
- die("unable to index file %s", path);
- if (add_cache_entry(ce, ADD_CACHE_OK_TO_ADD))
- die("unable to add %s to index",path);
- if (verbose)
- printf("add '%s'\n", path);
- cache_tree_invalidate_path(active_cache_tree, path);
- return 0;
-}
-
static struct lock_file lock_file;
-int cmd_add(int argc, const char **argv, char **envp)
+int cmd_add(int argc, const char **argv, const char *prefix)
{
int i, newfd;
int verbose = 0, show_only = 0;
- const char *prefix = setup_git_directory();
const char **pathspec;
struct dir_struct dir;
git_config(git_default_config);
- newfd = hold_lock_file_for_update(&lock_file, get_index_file());
- if (newfd < 0)
- die("unable to create new index file");
+ newfd = hold_lock_file_for_update(&lock_file, get_index_file(), 1);
if (read_cache() < 0)
die("index file corrupt");
verbose = 1;
continue;
}
- die(builtin_add_usage);
+ usage(builtin_add_usage);
}
- git_config(git_default_config);
pathspec = get_pathspec(prefix, argv + i);
fill_directory(&dir, pathspec);
if (active_cache_changed) {
if (write_cache(newfd, active_cache, active_nr) ||
- commit_lock_file(&lock_file))
+ close(newfd) || commit_lock_file(&lock_file))
die("Unable to write new index file");
}
#include "delta.h"
#include "builtin.h"
-// --check turns on checking that the working tree matches the
-// files that are being modified, but doesn't apply the patch
-// --stat does just a diffstat, and doesn't actually apply
-// --numstat does numeric diffstat, and doesn't actually apply
-// --index-info shows the old and new index info for paths if available.
-// --index updates the cache as well.
-// --cached updates only the cache without ever touching the working tree.
-//
+/*
+ * --check turns on checking that the working tree matches the
+ * files that are being modified, but doesn't apply the patch
+ * --stat does just a diffstat, and doesn't actually apply
+ * --numstat does numeric diffstat, and doesn't actually apply
+ * --index-info shows the old and new index info for paths if available.
+ * --index updates the cache as well.
+ * --cached updates only the cache without ever touching the working tree.
+ */
static const char *prefix;
static int prefix_length = -1;
static int newfd = -1;
struct patch {
char *new_name, *old_name, *def_name;
unsigned int old_mode, new_mode;
- int is_rename, is_copy, is_new, is_delete, is_binary;
+ int is_rename, is_copy, is_new, is_delete, is_binary, is_reverse;
#define BINARY_DELTA_DEFLATED 1
#define BINARY_LITERAL_DEFLATED 2
unsigned long deflate_origlen;
{
char *name;
- first += 4; // skip "--- "
- second += 4; // skip "+++ "
+ first += 4; /* skip "--- " */
+ second += 4; /* skip "+++ " */
if (is_dev_null(first)) {
patch->is_new = 1;
patch->is_delete = 0;
continue;
/*
- * Make sure we don't find any unconnected patch fragmants.
+ * Make sure we don't find any unconnected patch fragments.
* That's a sign that we didn't find a header, and that a
* patch has become corrupted/broken up.
*/
* so one line can fit up to 13 groups that would decode
* to 52 bytes max. The length byte 'A'-'Z' corresponds
* to 1-26 bytes, and 'a'-'z' corresponds to 27-52 bytes.
- * The end of binary is signalled with an empty line.
+ * The end of binary is signaled with an empty line.
*/
int llen, used;
struct fragment *fragment;
return offset + hdrsize + patchsize;
}
+#define swap(a,b) myswap((a),(b),sizeof(a))
+
+#define myswap(a, b, size) do { \
+ unsigned char mytmp[size]; \
+ memcpy(mytmp, &a, size); \
+ memcpy(&a, &b, size); \
+ memcpy(&b, mytmp, size); \
+} while (0)
+
+static void reverse_patches(struct patch *p)
+{
+ for (; p; p = p->next) {
+ struct fragment *frag = p->fragments;
+
+ swap(p->new_name, p->old_name);
+ swap(p->new_mode, p->old_mode);
+ swap(p->is_new, p->is_delete);
+ swap(p->lines_added, p->lines_deleted);
+ swap(p->old_sha1_prefix, p->new_sha1_prefix);
+
+ for (; frag; frag = frag->next) {
+ swap(frag->newpos, frag->oldpos);
+ swap(frag->newlines, frag->oldlines);
+ }
+ p->is_reverse = !p->is_reverse;
+ }
+}
+
static const char pluses[] = "++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++";
static const char minuses[]= "----------------------------------------------------------------------";
}
static int apply_one_fragment(struct buffer_desc *desc, struct fragment *frag,
- int inaccurate_eof)
+ int reverse, int inaccurate_eof)
{
int match_beginning, match_end;
char *buf = desc->buffer;
int pos, lines;
while (size > 0) {
+ char first;
int len = linelen(patch, size);
int plen;
plen = len-1;
if (len < size && patch[len] == '\\')
plen--;
- switch (*patch) {
+ first = *patch;
+ if (reverse) {
+ if (first == '-')
+ first = '+';
+ else if (first == '+')
+ first = '-';
+ }
+ switch (first) {
case ' ':
case '-':
memcpy(old + oldsize, patch + 1, plen);
oldsize += plen;
- if (*patch == '-')
+ if (first == '-')
break;
/* Fall-through for ' ' */
case '+':
- if (*patch != '+' || !no_add)
+ if (first != '+' || !no_add)
newsize += apply_line(new + newsize, patch,
plen);
break;
void *data;
void *result;
+ /* Binary patch is irreversible */
+ if (patch->is_reverse)
+ return error("cannot reverse-apply a binary patch to '%s'",
+ patch->new_name
+ ? patch->new_name : patch->old_name);
+
data = inflate_it(fragment->patch, fragment->size,
patch->deflate_origlen);
if (!data)
return apply_binary(desc, patch);
while (frag) {
- if (apply_one_fragment(desc, frag, patch->inaccurate_eof) < 0)
+ if (apply_one_fragment(desc, frag, patch->is_reverse,
+ patch->inaccurate_eof) < 0)
return error("patch failed: %s:%ld",
name, frag->oldpos);
frag = frag->next;
desc.buffer = buf;
if (apply_fragments(&desc, patch) < 0)
return -1;
+
+ /* NUL terminate the result */
+ if (desc.alloc <= desc.size)
+ desc.buffer = xrealloc(desc.buffer, desc.size + 1);
+ desc.buffer[desc.size] = 0;
+
patch->result = desc.buffer;
patch->resultsize = desc.size;
return 0;
}
-static int check_patch(struct patch *patch)
+static int check_patch(struct patch *patch, struct patch *prev_patch)
{
struct stat st;
const char *old_name = patch->old_name;
const char *new_name = patch->new_name;
const char *name = old_name ? old_name : new_name;
struct cache_entry *ce = NULL;
+ int ok_if_exists;
if (old_name) {
int changed = 0;
old_name, st_mode, patch->old_mode);
}
+ if (new_name && prev_patch && prev_patch->is_delete &&
+ !strcmp(prev_patch->old_name, new_name))
+ /* A type-change diff is always split into a patch to
+ * delete old, immediately followed by a patch to
+ * create new (see diff.c::run_diff()); in such a case
+ * it is Ok that the entry to be deleted by the
+ * previous patch is still in the working tree and in
+ * the index.
+ */
+ ok_if_exists = 1;
+ else
+ ok_if_exists = 0;
+
if (new_name && (patch->is_new | patch->is_rename | patch->is_copy)) {
- if (check_index && cache_name_pos(new_name, strlen(new_name)) >= 0)
+ if (check_index &&
+ cache_name_pos(new_name, strlen(new_name)) >= 0 &&
+ !ok_if_exists)
return error("%s: already exists in index", new_name);
if (!cached) {
- if (!lstat(new_name, &st))
- return error("%s: already exists in working directory", new_name);
- if (errno != ENOENT)
+ struct stat nst;
+ if (!lstat(new_name, &nst)) {
+ if (S_ISDIR(nst.st_mode) || ok_if_exists)
+ ; /* ok */
+ else
+ return error("%s: already exists in working directory", new_name);
+ }
+ else if ((errno != ENOENT) && (errno != ENOTDIR))
return error("%s: %s", new_name, strerror(errno));
}
if (!patch->new_mode) {
static int check_patch_list(struct patch *patch)
{
+ struct patch *prev_patch = NULL;
int error = 0;
- for (;patch ; patch = patch->next)
- error |= check_patch(patch);
+ for (prev_patch = NULL; patch ; patch = patch->next) {
+ error |= check_patch(patch, prev_patch);
+ prev_patch = patch;
+ }
return error;
}
int fd;
if (S_ISLNK(mode))
+ /* Although buf:size is counted string, it also is NUL
+ * terminated.
+ */
return symlink(buf, path);
fd = open(path, O_CREAT | O_EXCL | O_WRONLY, (mode & 0100) ? 0777 : 0666);
if (fd < 0)
return;
}
+ if (errno == EEXIST || errno == EACCES) {
+ /* We may be trying to create a file where a directory
+ * used to be.
+ */
+ struct stat st;
+ errno = 0;
+ if (!lstat(path, &st) && S_ISDIR(st.st_mode) && !rmdir(path))
+ errno = EEXIST;
+ }
+
if (errno == EEXIST) {
unsigned int nr = getpid();
cache_tree_invalidate_path(active_cache_tree, path);
}
-static void write_out_one_result(struct patch *patch)
+/* phase zero is to remove, phase one is to create */
+static void write_out_one_result(struct patch *patch, int phase)
{
if (patch->is_delete > 0) {
- remove_file(patch);
+ if (phase == 0)
+ remove_file(patch);
return;
}
if (patch->is_new > 0 || patch->is_copy) {
- create_file(patch);
+ if (phase == 1)
+ create_file(patch);
return;
}
/*
* Rename or modification boils down to the same
* thing: remove the old, write the new
*/
- remove_file(patch);
+ if (phase == 0)
+ remove_file(patch);
+ if (phase == 1)
create_file(patch);
}
static void write_out_results(struct patch *list, int skipped_patch)
{
+ int phase;
+
if (!list && !skipped_patch)
die("No changes");
- while (list) {
- write_out_one_result(list);
- list = list->next;
+ for (phase = 0; phase < 2; phase++) {
+ struct patch *l = list;
+ while (l) {
+ write_out_one_result(l, phase);
+ l = l->next;
+ }
}
}
return 1;
}
-static int apply_patch(int fd, const char *filename, int inaccurate_eof)
+static int apply_patch(int fd, const char *filename,
+ int reverse, int inaccurate_eof)
{
unsigned long offset, size;
char *buffer = read_patch_file(fd, &size);
nr = parse_chunk(buffer + offset, size, patch);
if (nr < 0)
break;
+ if (reverse)
+ reverse_patches(patch);
if (use_patch(patch)) {
patch_stats(patch);
*listp = patch;
apply = 0;
write_index = check_index && apply;
- if (write_index && newfd < 0) {
+ if (write_index && newfd < 0)
newfd = hold_lock_file_for_update(&lock_file,
- get_index_file());
- if (newfd < 0)
- die("unable to create new index file");
- }
+ get_index_file(), 1);
if (check_index) {
if (read_cache() < 0)
die("unable to read index file");
}
-int cmd_apply(int argc, const char **argv, char **envp)
+int cmd_apply(int argc, const char **argv, const char *prefix)
{
int i;
int read_stdin = 1;
+ int reverse = 0;
int inaccurate_eof = 0;
const char *whitespace_option = NULL;
int fd;
if (!strcmp(arg, "-")) {
- apply_patch(0, "<stdin>", inaccurate_eof);
+ apply_patch(0, "<stdin>", reverse, inaccurate_eof);
read_stdin = 0;
continue;
}
parse_whitespace_option(arg + 13);
continue;
}
+ if (!strcmp(arg, "-R") || !strcmp(arg, "--reverse")) {
+ reverse = 1;
+ continue;
+ }
if (!strcmp(arg, "--inaccurate-eof")) {
inaccurate_eof = 1;
continue;
usage(apply_usage);
read_stdin = 0;
set_default_whitespace_mode(whitespace_option);
- apply_patch(fd, arg, inaccurate_eof);
+ apply_patch(fd, arg, reverse, inaccurate_eof);
close(fd);
}
set_default_whitespace_mode(whitespace_option);
if (read_stdin)
- apply_patch(0, "<stdin>", inaccurate_eof);
+ apply_patch(0, "<stdin>", reverse, inaccurate_eof);
if (whitespace_error) {
if (squelch_whitespace_errors &&
squelch_whitespace_errors < whitespace_error) {
if (write_index) {
if (write_cache(newfd, active_cache, active_nr) ||
- commit_lock_file(&lock_file))
+ close(newfd) || commit_lock_file(&lock_file))
die("Unable to write new index file");
}
return 0;
}
-int cmd_cat_file(int argc, const char **argv, char **envp)
+int cmd_cat_file(int argc, const char **argv, const char *prefix)
{
unsigned char sha1[20];
char type[20];
unsigned long size;
int opt;
- setup_git_directory();
git_config(git_default_config);
if (argc != 3)
usage("git-cat-file [-t|-s|-e|-p|<type>] <sha1>");
#include "refs.h"
#include "builtin.h"
-int cmd_check_ref_format(int argc, const char **argv, char **envp)
+int cmd_check_ref_format(int argc, const char **argv, const char *prefix)
{
if (argc != 2)
- usage("git check-ref-format refname");
+ usage("git-check-ref-format refname");
return !!check_ref_format(argv[1]);
}
--- /dev/null
+/*
+ * Check-out files from the "current cache directory"
+ *
+ * Copyright (C) 2005 Linus Torvalds
+ *
+ * Careful: order of argument flags does matter. For example,
+ *
+ * git-checkout-index -a -f file.c
+ *
+ * Will first check out all files listed in the cache (but not
+ * overwrite any old ones), and then force-checkout "file.c" a
+ * second time (ie that one _will_ overwrite any old contents
+ * with the same filename).
+ *
+ * Also, just doing "git-checkout-index" does nothing. You probably
+ * meant "git-checkout-index -a". And if you want to force it, you
+ * want "git-checkout-index -f -a".
+ *
+ * Intuitiveness is not the goal here. Repeatability is. The
+ * reason for the "no arguments means no work" thing is that
+ * from scripts you are supposed to be able to do things like
+ *
+ * find . -name '*.h' -print0 | xargs -0 git-checkout-index -f --
+ *
+ * or:
+ *
+ * find . -name '*.h' -print0 | git-checkout-index -f -z --stdin
+ *
+ * which will force all existing *.h files to be replaced with
+ * their cached copies. If an empty command line implied "all",
+ * then this would force-refresh everything in the cache, which
+ * was not the point.
+ *
+ * Oh, and the "--" is just a good idea when you know the rest
+ * will be filenames. Just so that you wouldn't have a filename
+ * of "-a" causing problems (not possible in the above example,
+ * but get used to it in scripting!).
+ */
+#include "cache.h"
+#include "strbuf.h"
+#include "quote.h"
+#include "cache-tree.h"
+
+#define CHECKOUT_ALL 4
+static int line_termination = '\n';
+static int checkout_stage; /* default to checkout stage0 */
+static int to_tempfile;
+static char topath[4][MAXPATHLEN+1];
+
+static struct checkout state;
+
+static void write_tempfile_record(const char *name, int prefix_length)
+{
+ int i;
+
+ if (CHECKOUT_ALL == checkout_stage) {
+ for (i = 1; i < 4; i++) {
+ if (i > 1)
+ putchar(' ');
+ if (topath[i][0])
+ fputs(topath[i], stdout);
+ else
+ putchar('.');
+ }
+ } else
+ fputs(topath[checkout_stage], stdout);
+
+ putchar('\t');
+ write_name_quoted("", 0, name + prefix_length,
+ line_termination, stdout);
+ putchar(line_termination);
+
+ for (i = 0; i < 4; i++) {
+ topath[i][0] = 0;
+ }
+}
+
+static int checkout_file(const char *name, int prefix_length)
+{
+ int namelen = strlen(name);
+ int pos = cache_name_pos(name, namelen);
+ int has_same_name = 0;
+ int did_checkout = 0;
+ int errs = 0;
+
+ if (pos < 0)
+ pos = -pos - 1;
+
+ while (pos < active_nr) {
+ struct cache_entry *ce = active_cache[pos];
+ if (ce_namelen(ce) != namelen ||
+ memcmp(ce->name, name, namelen))
+ break;
+ has_same_name = 1;
+ pos++;
+ if (ce_stage(ce) != checkout_stage
+ && (CHECKOUT_ALL != checkout_stage || !ce_stage(ce)))
+ continue;
+ did_checkout = 1;
+ if (checkout_entry(ce, &state,
+ to_tempfile ? topath[ce_stage(ce)] : NULL) < 0)
+ errs++;
+ }
+
+ if (did_checkout) {
+ if (to_tempfile)
+ write_tempfile_record(name, prefix_length);
+ return errs > 0 ? -1 : 0;
+ }
+
+ if (!state.quiet) {
+ fprintf(stderr, "git-checkout-index: %s ", name);
+ if (!has_same_name)
+ fprintf(stderr, "is not in the cache");
+ else if (checkout_stage)
+ fprintf(stderr, "does not exist at stage %d",
+ checkout_stage);
+ else
+ fprintf(stderr, "is unmerged");
+ fputc('\n', stderr);
+ }
+ return -1;
+}
+
+static int checkout_all(const char *prefix, int prefix_length)
+{
+ int i, errs = 0;
+ struct cache_entry* last_ce = NULL;
+
+ for (i = 0; i < active_nr ; i++) {
+ struct cache_entry *ce = active_cache[i];
+ if (ce_stage(ce) != checkout_stage
+ && (CHECKOUT_ALL != checkout_stage || !ce_stage(ce)))
+ continue;
+ if (prefix && *prefix &&
+ (ce_namelen(ce) <= prefix_length ||
+ memcmp(prefix, ce->name, prefix_length)))
+ continue;
+ if (last_ce && to_tempfile) {
+ if (ce_namelen(last_ce) != ce_namelen(ce)
+ || memcmp(last_ce->name, ce->name, ce_namelen(ce)))
+ write_tempfile_record(last_ce->name, prefix_length);
+ }
+ if (checkout_entry(ce, &state,
+ to_tempfile ? topath[ce_stage(ce)] : NULL) < 0)
+ errs++;
+ last_ce = ce;
+ }
+ if (last_ce && to_tempfile)
+ write_tempfile_record(last_ce->name, prefix_length);
+ if (errs)
+ /* we have already done our error reporting.
+ * exit with the same code as die().
+ */
+ exit(128);
+ return 0;
+}
+
+static const char checkout_cache_usage[] =
+"git-checkout-index [-u] [-q] [-a] [-f] [-n] [--stage=[123]|all] [--prefix=<string>] [--temp] [--] <file>...";
+
+static struct lock_file lock_file;
+
+int cmd_checkout_index(int argc, const char **argv, const char *prefix)
+{
+ int i;
+ int newfd = -1;
+ int all = 0;
+ int read_from_stdin = 0;
+ int prefix_length;
+
+ git_config(git_default_config);
+ state.base_dir = "";
+ prefix_length = prefix ? strlen(prefix) : 0;
+
+ if (read_cache() < 0) {
+ die("invalid cache");
+ }
+
+ for (i = 1; i < argc; i++) {
+ const char *arg = argv[i];
+
+ if (!strcmp(arg, "--")) {
+ i++;
+ break;
+ }
+ if (!strcmp(arg, "-a") || !strcmp(arg, "--all")) {
+ all = 1;
+ continue;
+ }
+ if (!strcmp(arg, "-f") || !strcmp(arg, "--force")) {
+ state.force = 1;
+ continue;
+ }
+ if (!strcmp(arg, "-q") || !strcmp(arg, "--quiet")) {
+ state.quiet = 1;
+ continue;
+ }
+ if (!strcmp(arg, "-n") || !strcmp(arg, "--no-create")) {
+ state.not_new = 1;
+ continue;
+ }
+ if (!strcmp(arg, "-u") || !strcmp(arg, "--index")) {
+ state.refresh_cache = 1;
+ if (newfd < 0)
+ newfd = hold_lock_file_for_update
+ (&lock_file, get_index_file(), 1);
+ if (newfd < 0)
+ die("cannot open index.lock file.");
+ continue;
+ }
+ if (!strcmp(arg, "-z")) {
+ line_termination = 0;
+ continue;
+ }
+ if (!strcmp(arg, "--stdin")) {
+ if (i != argc - 1)
+ die("--stdin must be at the end");
+ read_from_stdin = 1;
+ i++; /* do not consider arg as a file name */
+ break;
+ }
+ if (!strcmp(arg, "--temp")) {
+ to_tempfile = 1;
+ continue;
+ }
+ if (!strncmp(arg, "--prefix=", 9)) {
+ state.base_dir = arg+9;
+ state.base_dir_len = strlen(state.base_dir);
+ continue;
+ }
+ if (!strncmp(arg, "--stage=", 8)) {
+ if (!strcmp(arg + 8, "all")) {
+ to_tempfile = 1;
+ checkout_stage = CHECKOUT_ALL;
+ } else {
+ int ch = arg[8];
+ if ('1' <= ch && ch <= '3')
+ checkout_stage = arg[8] - '0';
+ else
+ die("stage should be between 1 and 3 or all");
+ }
+ continue;
+ }
+ if (arg[0] == '-')
+ usage(checkout_cache_usage);
+ break;
+ }
+
+ if (state.base_dir_len || to_tempfile) {
+ /* when --prefix is specified we do not
+ * want to update cache.
+ */
+ if (state.refresh_cache) {
+ close(newfd); newfd = -1;
+ rollback_lock_file(&lock_file);
+ }
+ state.refresh_cache = 0;
+ }
+
+ /* Check out named files first */
+ for ( ; i < argc; i++) {
+ const char *arg = argv[i];
+ const char *p;
+
+ if (all)
+ die("git-checkout-index: don't mix '--all' and explicit filenames");
+ if (read_from_stdin)
+ die("git-checkout-index: don't mix '--stdin' and explicit filenames");
+ p = prefix_path(prefix, prefix_length, arg);
+ checkout_file(p, prefix_length);
+ if (p < arg || p > arg + strlen(arg))
+ free((char*)p);
+ }
+
+ if (read_from_stdin) {
+ struct strbuf buf;
+ if (all)
+ die("git-checkout-index: don't mix '--all' and '--stdin'");
+ strbuf_init(&buf);
+ while (1) {
+ char *path_name;
+ const char *p;
+
+ read_line(&buf, stdin, line_termination);
+ if (buf.eof)
+ break;
+ if (line_termination && buf.buf[0] == '"')
+ path_name = unquote_c_style(buf.buf, NULL);
+ else
+ path_name = buf.buf;
+ p = prefix_path(prefix, prefix_length, path_name);
+ checkout_file(p, prefix_length);
+ if (p < path_name || p > path_name + strlen(path_name))
+ free((char *)p);
+ if (path_name != buf.buf)
+ free(path_name);
+ }
+ }
+
+ if (all)
+ checkout_all(prefix, prefix_length);
+
+ if (0 <= newfd &&
+ (write_cache(newfd, active_cache, active_nr) ||
+ close(newfd) || commit_lock_file(&lock_file)))
+ die("Unable to write new index file");
+ return 0;
+}
return 1;
}
-int cmd_commit_tree(int argc, const char **argv, char **envp)
+int cmd_commit_tree(int argc, const char **argv, const char *prefix)
{
int i;
int parents = 0;
unsigned int size;
setup_ident();
- setup_git_directory();
-
git_config(git_default_config);
if (argc < 2)
--- /dev/null
+/*
+ * Builtin "git count-objects".
+ *
+ * Copyright (c) 2006 Junio C Hamano
+ */
+
+#include "cache.h"
+#include "builtin.h"
+
+static const char count_objects_usage[] = "git-count-objects [-v]";
+
+static void count_objects(DIR *d, char *path, int len, int verbose,
+ unsigned long *loose,
+ unsigned long *loose_size,
+ unsigned long *packed_loose,
+ unsigned long *garbage)
+{
+ struct dirent *ent;
+ while ((ent = readdir(d)) != NULL) {
+ char hex[41];
+ unsigned char sha1[20];
+ const char *cp;
+ int bad = 0;
+
+ if ((ent->d_name[0] == '.') &&
+ (ent->d_name[1] == 0 ||
+ ((ent->d_name[1] == '.') && (ent->d_name[2] == 0))))
+ continue;
+ for (cp = ent->d_name; *cp; cp++) {
+ int ch = *cp;
+ if (('0' <= ch && ch <= '9') ||
+ ('a' <= ch && ch <= 'f'))
+ continue;
+ bad = 1;
+ break;
+ }
+ if (cp - ent->d_name != 38)
+ bad = 1;
+ else {
+ struct stat st;
+ memcpy(path + len + 3, ent->d_name, 38);
+ path[len + 2] = '/';
+ path[len + 41] = 0;
+ if (lstat(path, &st) || !S_ISREG(st.st_mode))
+ bad = 1;
+ else
+ (*loose_size) += st.st_blocks;
+ }
+ if (bad) {
+ if (verbose) {
+ error("garbage found: %.*s/%s",
+ len + 2, path, ent->d_name);
+ (*garbage)++;
+ }
+ continue;
+ }
+ (*loose)++;
+ if (!verbose)
+ continue;
+ memcpy(hex, path+len, 2);
+ memcpy(hex+2, ent->d_name, 38);
+ hex[40] = 0;
+ if (get_sha1_hex(hex, sha1))
+ die("internal error");
+ if (has_sha1_pack(sha1))
+ (*packed_loose)++;
+ }
+}
+
+int cmd_count_objects(int ac, const char **av, const char *prefix)
+{
+ int i;
+ int verbose = 0;
+ const char *objdir = get_object_directory();
+ int len = strlen(objdir);
+ char *path = xmalloc(len + 50);
+ unsigned long loose = 0, packed = 0, packed_loose = 0, garbage = 0;
+ unsigned long loose_size = 0;
+
+ for (i = 1; i < ac; i++) {
+ const char *arg = av[i];
+ if (*arg != '-')
+ break;
+ else if (!strcmp(arg, "-v"))
+ verbose = 1;
+ else
+ usage(count_objects_usage);
+ }
+
+ /* we do not take arguments other than flags for now */
+ if (i < ac)
+ usage(count_objects_usage);
+ memcpy(path, objdir, len);
+ if (len && objdir[len-1] != '/')
+ path[len++] = '/';
+ for (i = 0; i < 256; i++) {
+ DIR *d;
+ sprintf(path + len, "%02x", i);
+ d = opendir(path);
+ if (!d)
+ continue;
+ count_objects(d, path, len, verbose,
+ &loose, &loose_size, &packed_loose, &garbage);
+ closedir(d);
+ }
+ if (verbose) {
+ struct packed_git *p;
+ if (!packed_git)
+ prepare_packed_git();
+ for (p = packed_git; p; p = p->next) {
+ if (!p->pack_local)
+ continue;
+ packed += num_packed_objects(p);
+ }
+ printf("count: %lu\n", loose);
+ printf("size: %lu\n", loose_size / 2);
+ printf("in-pack: %lu\n", packed);
+ printf("prune-packable: %lu\n", packed_loose);
+ printf("garbage: %lu\n", garbage);
+ }
+ else
+ printf("%lu objects, %lu kilobytes\n",
+ loose, loose_size / 2);
+ return 0;
+}
+++ /dev/null
-/*
- * Builtin "git count-objects".
- *
- * Copyright (c) 2006 Junio C Hamano
- */
-
-#include "cache.h"
-#include "builtin.h"
-
-static const char count_objects_usage[] = "git-count-objects [-v]";
-
-static void count_objects(DIR *d, char *path, int len, int verbose,
- unsigned long *loose,
- unsigned long *loose_size,
- unsigned long *packed_loose,
- unsigned long *garbage)
-{
- struct dirent *ent;
- while ((ent = readdir(d)) != NULL) {
- char hex[41];
- unsigned char sha1[20];
- const char *cp;
- int bad = 0;
-
- if ((ent->d_name[0] == '.') &&
- (ent->d_name[1] == 0 ||
- ((ent->d_name[1] == '.') && (ent->d_name[2] == 0))))
- continue;
- for (cp = ent->d_name; *cp; cp++) {
- int ch = *cp;
- if (('0' <= ch && ch <= '9') ||
- ('a' <= ch && ch <= 'f'))
- continue;
- bad = 1;
- break;
- }
- if (cp - ent->d_name != 38)
- bad = 1;
- else {
- struct stat st;
- memcpy(path + len + 3, ent->d_name, 38);
- path[len + 2] = '/';
- path[len + 41] = 0;
- if (lstat(path, &st) || !S_ISREG(st.st_mode))
- bad = 1;
- else
- (*loose_size) += st.st_blocks;
- }
- if (bad) {
- if (verbose) {
- error("garbage found: %.*s/%s",
- len + 2, path, ent->d_name);
- (*garbage)++;
- }
- continue;
- }
- (*loose)++;
- if (!verbose)
- continue;
- memcpy(hex, path+len, 2);
- memcpy(hex+2, ent->d_name, 38);
- hex[40] = 0;
- if (get_sha1_hex(hex, sha1))
- die("internal error");
- if (has_sha1_pack(sha1))
- (*packed_loose)++;
- }
-}
-
-int cmd_count_objects(int ac, const char **av, char **ep)
-{
- int i;
- int verbose = 0;
- const char *objdir = get_object_directory();
- int len = strlen(objdir);
- char *path = xmalloc(len + 50);
- unsigned long loose = 0, packed = 0, packed_loose = 0, garbage = 0;
- unsigned long loose_size = 0;
-
- for (i = 1; i < ac; i++) {
- const char *arg = av[i];
- if (*arg != '-')
- break;
- else if (!strcmp(arg, "-v"))
- verbose = 1;
- else
- usage(count_objects_usage);
- }
-
- /* we do not take arguments other than flags for now */
- if (i < ac)
- usage(count_objects_usage);
- memcpy(path, objdir, len);
- if (len && objdir[len-1] != '/')
- path[len++] = '/';
- for (i = 0; i < 256; i++) {
- DIR *d;
- sprintf(path + len, "%02x", i);
- d = opendir(path);
- if (!d)
- continue;
- count_objects(d, path, len, verbose,
- &loose, &loose_size, &packed_loose, &garbage);
- closedir(d);
- }
- if (verbose) {
- struct packed_git *p;
- if (!packed_git)
- prepare_packed_git();
- for (p = packed_git; p; p = p->next) {
- if (!p->pack_local)
- continue;
- packed += num_packed_objects(p);
- }
- printf("count: %lu\n", loose);
- printf("size: %lu\n", loose_size / 2);
- printf("in-pack: %lu\n", packed);
- printf("prune-packable: %lu\n", packed_loose);
- printf("garbage: %lu\n", garbage);
- }
- else
- printf("%lu objects, %lu kilobytes\n",
- loose, loose_size / 2);
- return 0;
-}
"git-diff-files [-q] [-0/-1/2/3 |-c|--cc] [<common diff options>] [<path>...]"
COMMON_DIFF_OPTIONS_HELP;
-int cmd_diff_files(int argc, const char **argv, char **envp)
+int cmd_diff_files(int argc, const char **argv, const char *prefix)
{
struct rev_info rev;
int silent = 0;
- git_config(git_diff_config);
- init_revisions(&rev);
+ init_revisions(&rev, prefix);
+ git_config(git_default_config); /* no "diff" UI options */
rev.abbrev = 0;
argc = setup_revisions(argc, argv, &rev, NULL);
"[<common diff options>] <tree-ish> [<path>...]"
COMMON_DIFF_OPTIONS_HELP;
-int cmd_diff_index(int argc, const char **argv, char **envp)
+int cmd_diff_index(int argc, const char **argv, const char *prefix)
{
struct rev_info rev;
int cached = 0;
int i;
- git_config(git_diff_config);
- init_revisions(&rev);
+ init_revisions(&rev, prefix);
+ git_config(git_default_config); /* no "diff" UI options */
rev.abbrev = 0;
argc = setup_revisions(argc, argv, &rev, NULL);
}
}
-int cmd_diff_stages(int ac, const char **av, char **envp)
+int cmd_diff_stages(int ac, const char **av, const char *prefix)
{
int stage1, stage2;
- const char *prefix = setup_git_directory();
const char **pathspec = NULL;
- git_config(git_diff_config);
+ git_config(git_default_config); /* no "diff" UI options */
read_cache();
diff_setup(&diff_options);
while (1 < ac && av[1][0] == '-') {
" --root include the initial commit as diff against /dev/null\n"
COMMON_DIFF_OPTIONS_HELP;
-int cmd_diff_tree(int argc, const char **argv, char **envp)
+int cmd_diff_tree(int argc, const char **argv, const char *prefix)
{
int nr_sha1;
char line[1000];
static struct rev_info *opt = &log_tree_opt;
int read_stdin = 0;
- git_config(git_diff_config);
+ init_revisions(opt, prefix);
+ git_config(git_default_config); /* no "diff" UI options */
nr_sha1 = 0;
- init_revisions(opt);
opt->abbrev = 0;
opt->diff = 1;
argc = setup_revisions(argc, argv, opt, NULL);
};
static const char builtin_diff_usage[] =
-"diff <options> <rev>{0,2} -- <path>*";
+"git-diff <options> <rev>{0,2} -- <path>*";
static int builtin_diff_files(struct rev_info *revs,
int argc, const char **argv)
int argc, const char **argv,
struct blobinfo *blob)
{
- /* Blobs: the arguments are reversed when setup_revisions()
- * picked them up.
- */
unsigned mode = canon_mode(S_IFREG | 0644);
if (argc > 1)
stuff_change(&revs->diffopt,
mode, mode,
- blob[1].sha1, blob[0].sha1,
- blob[0].name, blob[0].name);
+ blob[0].sha1, blob[1].sha1,
+ blob[0].name, blob[1].name);
diffcore_std(&revs->diffopt);
diff_flush(&revs->diffopt);
return 0;
usage(builtin_diff_usage);
/* We saw two trees, ent[0] and ent[1].
- * if ent[1] is unintesting, they are swapped
+ * if ent[1] is uninteresting, they are swapped
*/
if (ent[1].item->flags & UNINTERESTING)
swap = 1;
add_pending_object(revs, obj, "HEAD");
}
-int cmd_diff(int argc, const char **argv, char **envp)
+int cmd_diff(int argc, const char **argv, const char *prefix)
{
int i;
struct rev_info rev;
* Other cases are errors.
*/
- git_config(git_diff_config);
- init_revisions(&rev);
+ git_config(git_diff_ui_config);
+ init_revisions(&rev, prefix);
argc = setup_revisions(argc, argv, &rev, NULL);
if (!rev.diffopt.output_format) {
rev.diffopt.output_format = DIFF_FORMAT_PATCH;
- diff_setup_done(&rev.diffopt);
+ if (diff_setup_done(&rev.diffopt) < 0)
+ die("diff_setup_done failed");
}
/* Do we have --cached and not have a pending object, then
obj = deref_tag(obj, NULL, 0);
if (!obj)
die("invalid object '%s' given.", name);
- if (obj->type == TYPE_COMMIT)
+ if (obj->type == OBJ_COMMIT)
obj = &((struct commit *)obj)->tree->object;
- if (obj->type == TYPE_TREE) {
+ if (obj->type == OBJ_TREE) {
if (ARRAY_SIZE(ent) <= ents)
die("more than %d trees given: '%s'",
(int) ARRAY_SIZE(ent), name);
ents++;
continue;
}
- if (obj->type == TYPE_BLOB) {
+ if (obj->type == OBJ_BLOB) {
if (2 <= blobs)
die("more than two blobs given: '%s'", name);
memcpy(blob[blobs].sha1, obj->sha1, 20);
return builtin_diff_index(&rev, argc, argv);
else if (ents == 2)
return builtin_diff_tree(&rev, argc, argv, ent);
+ else if ((ents == 3) && (ent[0].item->flags & UNINTERESTING)) {
+ /* diff A...B where there is one sane merge base between
+ * A and B. We have ent[0] == merge-base, ent[1] == A,
+ * and ent[2] == B. Show diff between the base and B.
+ */
+ ent[1] = ent[2];
+ return builtin_diff_tree(&rev, argc, argv, ent);
+ }
else
- return builtin_diff_combined(&rev, argc, argv, ent, ents);
+ return builtin_diff_combined(&rev, argc, argv,
+ ent, ents);
usage(builtin_diff_usage);
}
--- /dev/null
+#include "builtin.h"
+#include "cache.h"
+#include "commit.h"
+#include "diff.h"
+#include "revision.h"
+#include "tag.h"
+
+static const char *fmt_merge_msg_usage =
+ "git-fmt-merge-msg [--summary] [--no-summary] [--file <file>]";
+
+static int merge_summary = 0;
+
+static int fmt_merge_msg_config(const char *key, const char *value)
+{
+ if (!strcmp("merge.summary", key))
+ merge_summary = git_config_bool(key, value);
+ return 0;
+}
+
+struct list {
+ char **list;
+ void **payload;
+ unsigned nr, alloc;
+};
+
+static void append_to_list(struct list *list, char *value, void *payload)
+{
+ if (list->nr == list->alloc) {
+ list->alloc += 32;
+ list->list = realloc(list->list, sizeof(char *) * list->alloc);
+ list->payload = realloc(list->payload,
+ sizeof(char *) * list->alloc);
+ }
+ list->payload[list->nr] = payload;
+ list->list[list->nr++] = value;
+}
+
+static int find_in_list(struct list *list, char *value)
+{
+ int i;
+
+ for (i = 0; i < list->nr; i++)
+ if (!strcmp(list->list[i], value))
+ return i;
+
+ return -1;
+}
+
+static void free_list(struct list *list)
+{
+ int i;
+
+ if (list->alloc == 0)
+ return;
+
+ for (i = 0; i < list->nr; i++) {
+ free(list->list[i]);
+ if (list->payload[i])
+ free(list->payload[i]);
+ }
+ free(list->list);
+ free(list->payload);
+ list->nr = list->alloc = 0;
+}
+
+struct src_data {
+ struct list branch, tag, r_branch, generic;
+ int head_status;
+};
+
+static struct list srcs = { NULL, NULL, 0, 0};
+static struct list origins = { NULL, NULL, 0, 0};
+
+static int handle_line(char *line)
+{
+ int i, len = strlen(line);
+ unsigned char *sha1;
+ char *src, *origin;
+ struct src_data *src_data;
+ int pulling_head = 0;
+
+ if (len < 43 || line[40] != '\t')
+ return 1;
+
+ if (!strncmp(line + 41, "not-for-merge", 13))
+ return 0;
+
+ if (line[41] != '\t')
+ return 2;
+
+ line[40] = 0;
+ sha1 = xmalloc(20);
+ i = get_sha1(line, sha1);
+ line[40] = '\t';
+ if (i)
+ return 3;
+
+ if (line[len - 1] == '\n')
+ line[len - 1] = 0;
+ line += 42;
+
+ src = strstr(line, " of ");
+ if (src) {
+ *src = 0;
+ src += 4;
+ pulling_head = 0;
+ } else {
+ src = line;
+ pulling_head = 1;
+ }
+
+ i = find_in_list(&srcs, src);
+ if (i < 0) {
+ i = srcs.nr;
+ append_to_list(&srcs, strdup(src),
+ xcalloc(1, sizeof(struct src_data)));
+ }
+ src_data = srcs.payload[i];
+
+ if (pulling_head) {
+ origin = strdup(src);
+ src_data->head_status |= 1;
+ } else if (!strncmp(line, "branch ", 7)) {
+ origin = strdup(line + 7);
+ append_to_list(&src_data->branch, origin, NULL);
+ src_data->head_status |= 2;
+ } else if (!strncmp(line, "tag ", 4)) {
+ origin = line;
+ append_to_list(&src_data->tag, strdup(origin + 4), NULL);
+ src_data->head_status |= 2;
+ } else if (!strncmp(line, "remote branch ", 14)) {
+ origin = strdup(line + 14);
+ append_to_list(&src_data->r_branch, origin, NULL);
+ src_data->head_status |= 2;
+ } else {
+ origin = strdup(src);
+ append_to_list(&src_data->generic, strdup(line), NULL);
+ src_data->head_status |= 2;
+ }
+
+ if (!strcmp(".", src) || !strcmp(src, origin)) {
+ int len = strlen(origin);
+ if (origin[0] == '\'' && origin[len - 1] == '\'') {
+ char *new_origin = malloc(len - 1);
+ memcpy(new_origin, origin + 1, len - 2);
+ new_origin[len - 1] = 0;
+ origin = new_origin;
+ } else
+ origin = strdup(origin);
+ } else {
+ char *new_origin = malloc(strlen(origin) + strlen(src) + 5);
+ sprintf(new_origin, "%s of %s", origin, src);
+ origin = new_origin;
+ }
+ append_to_list(&origins, origin, sha1);
+ return 0;
+}
+
+static void print_joined(const char *singular, const char *plural,
+ struct list *list)
+{
+ if (list->nr == 0)
+ return;
+ if (list->nr == 1) {
+ printf("%s%s", singular, list->list[0]);
+ } else {
+ int i;
+ printf("%s", plural);
+ for (i = 0; i < list->nr - 1; i++)
+ printf("%s%s", i > 0 ? ", " : "", list->list[i]);
+ printf(" and %s", list->list[list->nr - 1]);
+ }
+}
+
+static void shortlog(const char *name, unsigned char *sha1,
+ struct commit *head, struct rev_info *rev, int limit)
+{
+ int i, count = 0;
+ struct commit *commit;
+ struct object *branch;
+ struct list subjects = { NULL, NULL, 0, 0 };
+ int flags = UNINTERESTING | TREECHANGE | SEEN | SHOWN | ADDED;
+
+ branch = deref_tag(parse_object(sha1), sha1_to_hex(sha1), 40);
+ if (!branch || branch->type != OBJ_COMMIT)
+ return;
+
+ setup_revisions(0, NULL, rev, NULL);
+ rev->ignore_merges = 1;
+ add_pending_object(rev, branch, name);
+ add_pending_object(rev, &head->object, "^HEAD");
+ head->object.flags |= UNINTERESTING;
+ prepare_revision_walk(rev);
+ while ((commit = get_revision(rev)) != NULL) {
+ char *oneline, *bol, *eol;
+
+ /* ignore merges */
+ if (commit->parents && commit->parents->next)
+ continue;
+
+ count++;
+ if (subjects.nr > limit)
+ continue;
+
+ bol = strstr(commit->buffer, "\n\n");
+ if (!bol) {
+ append_to_list(&subjects, strdup(sha1_to_hex(
+ commit->object.sha1)),
+ NULL);
+ continue;
+ }
+
+ bol += 2;
+ eol = strchr(bol, '\n');
+
+ if (eol) {
+ int len = eol - bol;
+ oneline = malloc(len + 1);
+ memcpy(oneline, bol, len);
+ oneline[len] = 0;
+ } else
+ oneline = strdup(bol);
+ append_to_list(&subjects, oneline, NULL);
+ }
+
+ if (count > limit)
+ printf("\n* %s: (%d commits)\n", name, count);
+ else
+ printf("\n* %s:\n", name);
+
+ for (i = 0; i < subjects.nr; i++)
+ if (i >= limit)
+ printf(" ...\n");
+ else
+ printf(" %s\n", subjects.list[i]);
+
+ clear_commit_marks((struct commit *)branch, flags);
+ clear_commit_marks(head, flags);
+ free_commit_list(rev->commits);
+ rev->commits = NULL;
+ rev->pending.nr = 0;
+
+ free_list(&subjects);
+}
+
+int cmd_fmt_merge_msg(int argc, const char **argv, const char *prefix)
+{
+ int limit = 20, i = 0;
+ char line[1024];
+ FILE *in = stdin;
+ const char *sep = "";
+ unsigned char head_sha1[20];
+ const char *head, *current_branch;
+
+ git_config(fmt_merge_msg_config);
+
+ while (argc > 1) {
+ if (!strcmp(argv[1], "--summary"))
+ merge_summary = 1;
+ else if (!strcmp(argv[1], "--no-summary"))
+ merge_summary = 0;
+ else if (!strcmp(argv[1], "-F") || !strcmp(argv[1], "--file")) {
+ if (argc < 2)
+ die ("Which file?");
+ if (!strcmp(argv[2], "-"))
+ in = stdin;
+ else {
+ fclose(in);
+ in = fopen(argv[2], "r");
+ }
+ argc--; argv++;
+ } else
+ break;
+ argc--; argv++;
+ }
+
+ if (argc > 1)
+ usage(fmt_merge_msg_usage);
+
+ /* get current branch */
+ head = strdup(git_path("HEAD"));
+ current_branch = resolve_ref(head, head_sha1, 1);
+ current_branch += strlen(head) - 4;
+ free((char *)head);
+ if (!strncmp(current_branch, "refs/heads/", 11))
+ current_branch += 11;
+
+ while (fgets(line, sizeof(line), in)) {
+ i++;
+ if (line[0] == 0)
+ continue;
+ if (handle_line(line))
+ die ("Error in line %d: %s", i, line);
+ }
+
+ printf("Merge ");
+ for (i = 0; i < srcs.nr; i++) {
+ struct src_data *src_data = srcs.payload[i];
+ const char *subsep = "";
+
+ printf(sep);
+ sep = "; ";
+
+ if (src_data->head_status == 1) {
+ printf(srcs.list[i]);
+ continue;
+ }
+ if (src_data->head_status == 3) {
+ subsep = ", ";
+ printf("HEAD");
+ }
+ if (src_data->branch.nr) {
+ printf(subsep);
+ subsep = ", ";
+ print_joined("branch ", "branches ", &src_data->branch);
+ }
+ if (src_data->r_branch.nr) {
+ printf(subsep);
+ subsep = ", ";
+ print_joined("remote branch ", "remote branches ",
+ &src_data->r_branch);
+ }
+ if (src_data->tag.nr) {
+ printf(subsep);
+ subsep = ", ";
+ print_joined("tag ", "tags ", &src_data->tag);
+ }
+ if (src_data->generic.nr) {
+ printf(subsep);
+ print_joined("commit ", "commits ", &src_data->generic);
+ }
+ if (strcmp(".", srcs.list[i]))
+ printf(" of %s", srcs.list[i]);
+ }
+
+ if (!strcmp("master", current_branch))
+ putchar('\n');
+ else
+ printf(" into %s\n", current_branch);
+
+ if (merge_summary) {
+ struct commit *head;
+ struct rev_info rev;
+
+ head = lookup_commit(head_sha1);
+ init_revisions(&rev, prefix);
+ rev.commit_format = CMIT_FMT_ONELINE;
+ rev.ignore_merges = 1;
+ rev.limited = 1;
+
+ for (i = 0; i < origins.nr; i++)
+ shortlog(origins.list[i], origins.payload[i],
+ head, &rev, limit);
+ }
+
+ /* No cleanup yet; is standalone anyway */
+
+ return 0;
+}
+
return 0;
}
+enum grep_pat_token {
+ GREP_PATTERN,
+ GREP_AND,
+ GREP_OPEN_PAREN,
+ GREP_CLOSE_PAREN,
+ GREP_NOT,
+ GREP_OR,
+};
+
struct grep_pat {
struct grep_pat *next;
const char *origin;
int no;
+ enum grep_pat_token token;
const char *pattern;
regex_t regexp;
};
+enum grep_expr_node {
+ GREP_NODE_ATOM,
+ GREP_NODE_NOT,
+ GREP_NODE_AND,
+ GREP_NODE_OR,
+};
+
+struct grep_expr {
+ enum grep_expr_node node;
+ union {
+ struct grep_pat *atom;
+ struct grep_expr *unary;
+ struct {
+ struct grep_expr *left;
+ struct grep_expr *right;
+ } binary;
+ } u;
+};
+
struct grep_opt {
struct grep_pat *pattern_list;
struct grep_pat **pattern_tail;
+ struct grep_expr *pattern_expression;
+ int prefix_length;
regex_t regexp;
unsigned linenum:1;
unsigned invert:1;
#define GREP_BINARY_NOMATCH 1
#define GREP_BINARY_TEXT 2
unsigned binary:2;
+ unsigned extended:1;
+ unsigned relative:1;
int regflags;
unsigned pre_context;
unsigned post_context;
};
static void add_pattern(struct grep_opt *opt, const char *pat,
- const char *origin, int no)
+ const char *origin, int no, enum grep_pat_token t)
{
struct grep_pat *p = xcalloc(1, sizeof(*p));
p->pattern = pat;
p->origin = origin;
p->no = no;
+ p->token = t;
*opt->pattern_tail = p;
opt->pattern_tail = &p->next;
p->next = NULL;
}
+static void compile_regexp(struct grep_pat *p, struct grep_opt *opt)
+{
+ int err = regcomp(&p->regexp, p->pattern, opt->regflags);
+ if (err) {
+ char errbuf[1024];
+ char where[1024];
+ if (p->no)
+ sprintf(where, "In '%s' at %d, ",
+ p->origin, p->no);
+ else if (p->origin)
+ sprintf(where, "%s, ", p->origin);
+ else
+ where[0] = 0;
+ regerror(err, &p->regexp, errbuf, 1024);
+ regfree(&p->regexp);
+ die("%s'%s': %s", where, p->pattern, errbuf);
+ }
+}
+
+#if DEBUG
+static inline void indent(int in)
+{
+ int i;
+ for (i = 0; i < in; i++) putchar(' ');
+}
+
+static void dump_pattern_exp(struct grep_expr *x, int in)
+{
+ switch (x->node) {
+ case GREP_NODE_ATOM:
+ indent(in);
+ puts(x->u.atom->pattern);
+ break;
+ case GREP_NODE_NOT:
+ indent(in);
+ puts("--not");
+ dump_pattern_exp(x->u.unary, in+1);
+ break;
+ case GREP_NODE_AND:
+ dump_pattern_exp(x->u.binary.left, in+1);
+ indent(in);
+ puts("--and");
+ dump_pattern_exp(x->u.binary.right, in+1);
+ break;
+ case GREP_NODE_OR:
+ dump_pattern_exp(x->u.binary.left, in+1);
+ indent(in);
+ puts("--or");
+ dump_pattern_exp(x->u.binary.right, in+1);
+ break;
+ }
+}
+
+static void looking_at(const char *msg, struct grep_pat **list)
+{
+ struct grep_pat *p = *list;
+ fprintf(stderr, "%s: looking at ", msg);
+ if (!p)
+ fprintf(stderr, "empty\n");
+ else
+ fprintf(stderr, "<%s>\n", p->pattern);
+}
+#else
+#define looking_at(a,b) do {} while(0)
+#endif
+
+static struct grep_expr *compile_pattern_expr(struct grep_pat **);
+static struct grep_expr *compile_pattern_atom(struct grep_pat **list)
+{
+ struct grep_pat *p;
+ struct grep_expr *x;
+
+ looking_at("atom", list);
+
+ p = *list;
+ switch (p->token) {
+ case GREP_PATTERN: /* atom */
+ x = xcalloc(1, sizeof (struct grep_expr));
+ x->node = GREP_NODE_ATOM;
+ x->u.atom = p;
+ *list = p->next;
+ return x;
+ case GREP_OPEN_PAREN:
+ *list = p->next;
+ x = compile_pattern_expr(list);
+ if (!x)
+ return NULL;
+ if (!*list || (*list)->token != GREP_CLOSE_PAREN)
+ die("unmatched parenthesis");
+ *list = (*list)->next;
+ return x;
+ default:
+ return NULL;
+ }
+}
+
+static struct grep_expr *compile_pattern_not(struct grep_pat **list)
+{
+ struct grep_pat *p;
+ struct grep_expr *x;
+
+ looking_at("not", list);
+
+ p = *list;
+ switch (p->token) {
+ case GREP_NOT:
+ if (!p->next)
+ die("--not not followed by pattern expression");
+ *list = p->next;
+ x = xcalloc(1, sizeof (struct grep_expr));
+ x->node = GREP_NODE_NOT;
+ x->u.unary = compile_pattern_not(list);
+ if (!x->u.unary)
+ die("--not followed by non pattern expression");
+ return x;
+ default:
+ return compile_pattern_atom(list);
+ }
+}
+
+static struct grep_expr *compile_pattern_and(struct grep_pat **list)
+{
+ struct grep_pat *p;
+ struct grep_expr *x, *y, *z;
+
+ looking_at("and", list);
+
+ x = compile_pattern_not(list);
+ p = *list;
+ if (p && p->token == GREP_AND) {
+ if (!p->next)
+ die("--and not followed by pattern expression");
+ *list = p->next;
+ y = compile_pattern_and(list);
+ if (!y)
+ die("--and not followed by pattern expression");
+ z = xcalloc(1, sizeof (struct grep_expr));
+ z->node = GREP_NODE_AND;
+ z->u.binary.left = x;
+ z->u.binary.right = y;
+ return z;
+ }
+ return x;
+}
+
+static struct grep_expr *compile_pattern_or(struct grep_pat **list)
+{
+ struct grep_pat *p;
+ struct grep_expr *x, *y, *z;
+
+ looking_at("or", list);
+
+ x = compile_pattern_and(list);
+ p = *list;
+ if (x && p && p->token != GREP_CLOSE_PAREN) {
+ y = compile_pattern_or(list);
+ if (!y)
+ die("not a pattern expression %s", p->pattern);
+ z = xcalloc(1, sizeof (struct grep_expr));
+ z->node = GREP_NODE_OR;
+ z->u.binary.left = x;
+ z->u.binary.right = y;
+ return z;
+ }
+ return x;
+}
+
+static struct grep_expr *compile_pattern_expr(struct grep_pat **list)
+{
+ looking_at("expr", list);
+
+ return compile_pattern_or(list);
+}
+
static void compile_patterns(struct grep_opt *opt)
{
struct grep_pat *p;
+
+ /* First compile regexps */
for (p = opt->pattern_list; p; p = p->next) {
- int err = regcomp(&p->regexp, p->pattern, opt->regflags);
- if (err) {
- char errbuf[1024];
- char where[1024];
- if (p->no)
- sprintf(where, "In '%s' at %d, ",
- p->origin, p->no);
- else if (p->origin)
- sprintf(where, "%s, ", p->origin);
- else
- where[0] = 0;
- regerror(err, &p->regexp, errbuf, 1024);
- regfree(&p->regexp);
- die("%s'%s': %s", where, p->pattern, errbuf);
- }
+ if (p->token == GREP_PATTERN)
+ compile_regexp(p, opt);
+ else
+ opt->extended = 1;
}
+
+ if (!opt->extended)
+ return;
+
+ /* Then bundle them up in an expression.
+ * A classic recursive descent parser would do.
+ */
+ p = opt->pattern_list;
+ opt->pattern_expression = compile_pattern_expr(&p);
+#if DEBUG
+ dump_pattern_exp(opt->pattern_expression, 0);
+#endif
+ if (p)
+ die("incomplete pattern expression: %s", p->pattern);
}
static char *end_of_line(char *cp, unsigned long *left)
}
}
+static int match_one_pattern(struct grep_opt *opt, struct grep_pat *p, char *bol, char *eol)
+{
+ int hit = 0;
+ int at_true_bol = 1;
+ regmatch_t pmatch[10];
+
+ again:
+ if (!opt->fixed) {
+ regex_t *exp = &p->regexp;
+ hit = !regexec(exp, bol, ARRAY_SIZE(pmatch),
+ pmatch, 0);
+ }
+ else {
+ hit = !fixmatch(p->pattern, bol, pmatch);
+ }
+
+ if (hit && opt->word_regexp) {
+ if ((pmatch[0].rm_so < 0) ||
+ (eol - bol) <= pmatch[0].rm_so ||
+ (pmatch[0].rm_eo < 0) ||
+ (eol - bol) < pmatch[0].rm_eo)
+ die("regexp returned nonsense");
+
+ /* Match beginning must be either beginning of the
+ * line, or at word boundary (i.e. the last char must
+ * not be a word char). Similarly, match end must be
+ * either end of the line, or at word boundary
+ * (i.e. the next char must not be a word char).
+ */
+ if ( ((pmatch[0].rm_so == 0 && at_true_bol) ||
+ !word_char(bol[pmatch[0].rm_so-1])) &&
+ ((pmatch[0].rm_eo == (eol-bol)) ||
+ !word_char(bol[pmatch[0].rm_eo])) )
+ ;
+ else
+ hit = 0;
+
+ if (!hit && pmatch[0].rm_so + bol + 1 < eol) {
+ /* There could be more than one match on the
+ * line, and the first match might not be
+ * strict word match. But later ones could be!
+ */
+ bol = pmatch[0].rm_so + bol + 1;
+ at_true_bol = 0;
+ goto again;
+ }
+ }
+ return hit;
+}
+
+static int match_expr_eval(struct grep_opt *opt,
+ struct grep_expr *x,
+ char *bol, char *eol)
+{
+ switch (x->node) {
+ case GREP_NODE_ATOM:
+ return match_one_pattern(opt, x->u.atom, bol, eol);
+ break;
+ case GREP_NODE_NOT:
+ return !match_expr_eval(opt, x->u.unary, bol, eol);
+ case GREP_NODE_AND:
+ return (match_expr_eval(opt, x->u.binary.left, bol, eol) &&
+ match_expr_eval(opt, x->u.binary.right, bol, eol));
+ case GREP_NODE_OR:
+ return (match_expr_eval(opt, x->u.binary.left, bol, eol) ||
+ match_expr_eval(opt, x->u.binary.right, bol, eol));
+ }
+ die("Unexpected node type (internal error) %d\n", x->node);
+}
+
+static int match_expr(struct grep_opt *opt, char *bol, char *eol)
+{
+ struct grep_expr *x = opt->pattern_expression;
+ return match_expr_eval(opt, x, bol, eol);
+}
+
+static int match_line(struct grep_opt *opt, char *bol, char *eol)
+{
+ struct grep_pat *p;
+ if (opt->extended)
+ return match_expr(opt, bol, eol);
+ for (p = opt->pattern_list; p; p = p->next) {
+ if (match_one_pattern(opt, p, bol, eol))
+ return 1;
+ }
+ return 0;
+}
+
static int grep_buffer(struct grep_opt *opt, const char *name,
char *buf, unsigned long size)
{
hunk_mark = "--\n";
while (left) {
- regmatch_t pmatch[10];
char *eol, ch;
int hit = 0;
- struct grep_pat *p;
eol = end_of_line(bol, &left);
ch = *eol;
*eol = 0;
- for (p = opt->pattern_list; p; p = p->next) {
- if (!opt->fixed) {
- regex_t *exp = &p->regexp;
- hit = !regexec(exp, bol, ARRAY_SIZE(pmatch),
- pmatch, 0);
- }
- else {
- hit = !fixmatch(p->pattern, bol, pmatch);
- }
+ hit = match_line(opt, bol, eol);
- if (hit && opt->word_regexp) {
- /* Match beginning must be either
- * beginning of the line, or at word
- * boundary (i.e. the last char must
- * not be alnum or underscore).
- */
- if ((pmatch[0].rm_so < 0) ||
- (eol - bol) <= pmatch[0].rm_so ||
- (pmatch[0].rm_eo < 0) ||
- (eol - bol) < pmatch[0].rm_eo)
- die("regexp returned nonsense");
- if (pmatch[0].rm_so != 0 &&
- word_char(bol[pmatch[0].rm_so-1]))
- hit = 0;
- if (pmatch[0].rm_eo != (eol-bol) &&
- word_char(bol[pmatch[0].rm_eo]))
- hit = 0;
- }
- if (hit)
- break;
- }
/* "grep -v -e foo -e bla" should list lines
* that do not have either, so inversion should
* be done outside.
return !!last_hit;
}
-static int grep_sha1(struct grep_opt *opt, const unsigned char *sha1, const char *name)
+static int grep_sha1(struct grep_opt *opt, const unsigned char *sha1, const char *name, int tree_name_len)
{
unsigned long size;
char *data;
char type[20];
+ char *to_free = NULL;
int hit;
+
data = read_sha1_file(sha1, type, &size);
if (!data) {
error("'%s': unable to read %s", name, sha1_to_hex(sha1));
return 0;
}
+ if (opt->relative && opt->prefix_length) {
+ static char name_buf[PATH_MAX];
+ char *cp;
+ int name_len = strlen(name) - opt->prefix_length + 1;
+
+ if (!tree_name_len)
+ name += opt->prefix_length;
+ else {
+ if (ARRAY_SIZE(name_buf) <= name_len)
+ cp = to_free = xmalloc(name_len);
+ else
+ cp = name_buf;
+ memcpy(cp, name, tree_name_len);
+ strcpy(cp + tree_name_len,
+ name + tree_name_len + opt->prefix_length);
+ name = cp;
+ }
+ }
hit = grep_buffer(opt, name, data, size);
free(data);
+ free(to_free);
return hit;
}
return 0;
}
close(i);
+ if (opt->relative && opt->prefix_length)
+ filename += opt->prefix_length;
i = grep_buffer(opt, filename, data, st.st_size);
free(data);
return i;
static int external_grep(struct grep_opt *opt, const char **paths, int cached)
{
- int i, nr, argc, hit, len;
+ int i, nr, argc, hit, len, status;
const char *argv[MAXARGS+1];
char randarg[ARGBUF];
char *argptr = randarg;
struct grep_pat *p;
+ if (opt->extended || (opt->relative && opt->prefix_length))
+ return -1;
len = nr = 0;
push_arg("grep");
if (opt->fixed)
argv[argc++] = name;
if (argc < MAXARGS)
continue;
- hit += exec_grep(argc, argv);
+ status = exec_grep(argc, argv);
+ if (0 < status)
+ hit = 1;
argc = nr;
}
- if (argc > nr)
- hit += exec_grep(argc, argv);
- return 0;
+ if (argc > nr) {
+ status = exec_grep(argc, argv);
+ if (0 < status)
+ hit = 1;
+ }
+ return hit;
}
static int grep_cache(struct grep_opt *opt, const char **paths, int cached)
if (!pathspec_matches(paths, ce->name))
continue;
if (cached)
- hit |= grep_sha1(opt, ce->sha1, ce->name);
+ hit |= grep_sha1(opt, ce->sha1, ce->name, 0);
else
hit |= grep_file(opt, ce->name);
}
int hit = 0;
struct name_entry entry;
char *down;
- char *path_buf = xmalloc(PATH_MAX + strlen(tree_name) + 100);
+ int tn_len = strlen(tree_name);
+ char *path_buf = xmalloc(PATH_MAX + tn_len + 100);
- if (tree_name[0]) {
- int offset = sprintf(path_buf, "%s:", tree_name);
- down = path_buf + offset;
+ if (tn_len) {
+ tn_len = sprintf(path_buf, "%s:", tree_name);
+ down = path_buf + tn_len;
strcat(down, base);
}
else {
if (!pathspec_matches(paths, down))
;
else if (S_ISREG(entry.mode))
- hit |= grep_sha1(opt, entry.sha1, path_buf);
+ hit |= grep_sha1(opt, entry.sha1, path_buf, tn_len);
else if (S_ISDIR(entry.mode)) {
char type[20];
struct tree_desc sub;
static int grep_object(struct grep_opt *opt, const char **paths,
struct object *obj, const char *name)
{
- if (obj->type == TYPE_BLOB)
- return grep_sha1(opt, obj->sha1, name);
- if (obj->type == TYPE_COMMIT || obj->type == TYPE_TREE) {
+ if (obj->type == OBJ_BLOB)
+ return grep_sha1(opt, obj->sha1, name, 0);
+ if (obj->type == OBJ_COMMIT || obj->type == OBJ_TREE) {
struct tree_desc tree;
void *data;
int hit;
static const char builtin_grep_usage[] =
"git-grep <option>* <rev>* [-e] <pattern> [<path>...]";
-int cmd_grep(int argc, const char **argv, char **envp)
+static const char emsg_invalid_context_len[] =
+"%s: invalid context length argument";
+static const char emsg_missing_context_len[] =
+"missing context length argument";
+static const char emsg_missing_argument[] =
+"option requires an argument -%s";
+
+int cmd_grep(int argc, const char **argv, const char *prefix)
{
int hit = 0;
int cached = 0;
int seen_dashdash = 0;
struct grep_opt opt;
struct object_array list = { 0, 0, NULL };
- const char *prefix = setup_git_directory();
const char **paths = NULL;
int i;
memset(&opt, 0, sizeof(opt));
+ opt.prefix_length = (prefix && *prefix) ? strlen(prefix) : 0;
+ opt.relative = 1;
opt.pattern_tail = &opt.pattern_list;
opt.regflags = REG_NEWLINE;
* pattern, but then what follows it must be zero or more
* valid refs up to the -- (if exists), and then existing
* paths. If there is an explicit pattern, then the first
- * unrecocnized non option is the beginning of the refs list
+ * unrecognized non option is the beginning of the refs list
* that continues up to the -- (if exists), and then paths.
*/
case 'A': case 'B': case 'C':
if (!arg[2]) {
if (argc <= 1)
- usage(builtin_grep_usage);
+ die(emsg_missing_context_len);
scan = *++argv;
argc--;
}
break;
}
if (sscanf(scan, "%u", &num) != 1)
- usage(builtin_grep_usage);
+ die(emsg_invalid_context_len, scan);
switch (arg[1]) {
case 'A':
opt.post_context = num;
int lno = 0;
char buf[1024];
if (argc <= 1)
- usage(builtin_grep_usage);
+ die(emsg_missing_argument, arg);
patterns = fopen(argv[1], "r");
if (!patterns)
die("'%s': %s", argv[1], strerror(errno));
/* ignore empty line like grep does */
if (!buf[0])
continue;
- add_pattern(&opt, strdup(buf), argv[1], ++lno);
+ add_pattern(&opt, strdup(buf), argv[1], ++lno,
+ GREP_PATTERN);
}
fclose(patterns);
argv++;
argc--;
continue;
}
+ if (!strcmp("--not", arg)) {
+ add_pattern(&opt, arg, "command line", 0, GREP_NOT);
+ continue;
+ }
+ if (!strcmp("--and", arg)) {
+ add_pattern(&opt, arg, "command line", 0, GREP_AND);
+ continue;
+ }
+ if (!strcmp("--or", arg))
+ continue; /* no-op */
+ if (!strcmp("(", arg)) {
+ add_pattern(&opt, arg, "command line", 0, GREP_OPEN_PAREN);
+ continue;
+ }
+ if (!strcmp(")", arg)) {
+ add_pattern(&opt, arg, "command line", 0, GREP_CLOSE_PAREN);
+ continue;
+ }
if (!strcmp("-e", arg)) {
if (1 < argc) {
- add_pattern(&opt, argv[1], "-e option", 0);
+ add_pattern(&opt, argv[1], "-e option", 0,
+ GREP_PATTERN);
argv++;
argc--;
continue;
}
- usage(builtin_grep_usage);
+ die(emsg_missing_argument, arg);
+ }
+ if (!strcmp("--full-name", arg)) {
+ opt.relative = 0;
+ continue;
}
- if (!strcmp("--", arg))
+ if (!strcmp("--", arg)) {
+ /* later processing wants to have this at argv[1] */
+ argv--;
+ argc++;
break;
+ }
if (*arg == '-')
usage(builtin_grep_usage);
/* First unrecognized non-option token */
if (!opt.pattern_list) {
- add_pattern(&opt, arg, "command line", 0);
+ add_pattern(&opt, arg, "command line", 0,
+ GREP_PATTERN);
break;
}
else {
verify_filename(prefix, argv[j]);
}
- if (i < argc)
+ if (i < argc) {
paths = get_pathspec(prefix, argv + i);
+ if (opt.prefix_length && opt.relative) {
+ /* Make sure we do not get outside of paths */
+ for (i = 0; paths[i]; i++)
+ if (strncmp(prefix, paths[i], opt.prefix_length))
+ die("git-grep: cannot generate relative filenames containing '..'");
+ }
+ }
else if (prefix) {
paths = xcalloc(2, sizeof(const char *));
paths[0] = prefix;
+++ /dev/null
-/*
- * builtin-help.c
- *
- * Builtin help-related commands (help, usage, version)
- */
-#include <sys/ioctl.h>
-#include "cache.h"
-#include "builtin.h"
-#include "exec_cmd.h"
-#include "common-cmds.h"
-
-static const char git_usage[] =
- "Usage: git [--version] [--exec-path[=GIT_EXEC_PATH]] [--help] COMMAND [ ARGS ]";
-
-/* most gui terms set COLUMNS (although some don't export it) */
-static int term_columns(void)
-{
- char *col_string = getenv("COLUMNS");
- int n_cols = 0;
-
- if (col_string && (n_cols = atoi(col_string)) > 0)
- return n_cols;
-
-#ifdef TIOCGWINSZ
- {
- struct winsize ws;
- if (!ioctl(1, TIOCGWINSZ, &ws)) {
- if (ws.ws_col)
- return ws.ws_col;
- }
- }
-#endif
-
- return 80;
-}
-
-static void oom(void)
-{
- fprintf(stderr, "git: out of memory\n");
- exit(1);
-}
-
-static inline void mput_char(char c, unsigned int num)
-{
- while(num--)
- putchar(c);
-}
-
-static struct cmdname {
- size_t len;
- char name[1];
-} **cmdname;
-static int cmdname_alloc, cmdname_cnt;
-
-static void add_cmdname(const char *name, int len)
-{
- struct cmdname *ent;
- if (cmdname_alloc <= cmdname_cnt) {
- cmdname_alloc = cmdname_alloc + 200;
- cmdname = realloc(cmdname, cmdname_alloc * sizeof(*cmdname));
- if (!cmdname)
- oom();
- }
- ent = malloc(sizeof(*ent) + len);
- if (!ent)
- oom();
- ent->len = len;
- memcpy(ent->name, name, len);
- ent->name[len] = 0;
- cmdname[cmdname_cnt++] = ent;
-}
-
-static int cmdname_compare(const void *a_, const void *b_)
-{
- struct cmdname *a = *(struct cmdname **)a_;
- struct cmdname *b = *(struct cmdname **)b_;
- return strcmp(a->name, b->name);
-}
-
-static void pretty_print_string_list(struct cmdname **cmdname, int longest)
-{
- int cols = 1, rows;
- int space = longest + 1; /* min 1 SP between words */
- int max_cols = term_columns() - 1; /* don't print *on* the edge */
- int i, j;
-
- if (space < max_cols)
- cols = max_cols / space;
- rows = (cmdname_cnt + cols - 1) / cols;
-
- qsort(cmdname, cmdname_cnt, sizeof(*cmdname), cmdname_compare);
-
- for (i = 0; i < rows; i++) {
- printf(" ");
-
- for (j = 0; j < cols; j++) {
- int n = j * rows + i;
- int size = space;
- if (n >= cmdname_cnt)
- break;
- if (j == cols-1 || n + rows >= cmdname_cnt)
- size = 1;
- printf("%-*s", size, cmdname[n]->name);
- }
- putchar('\n');
- }
-}
-
-static void list_commands(const char *exec_path, const char *pattern)
-{
- unsigned int longest = 0;
- char path[PATH_MAX];
- int dirlen;
- DIR *dir = opendir(exec_path);
- struct dirent *de;
-
- if (!dir) {
- fprintf(stderr, "git: '%s': %s\n", exec_path, strerror(errno));
- exit(1);
- }
-
- dirlen = strlen(exec_path);
- if (PATH_MAX - 20 < dirlen) {
- fprintf(stderr, "git: insanely long exec-path '%s'\n",
- exec_path);
- exit(1);
- }
-
- memcpy(path, exec_path, dirlen);
- path[dirlen++] = '/';
-
- while ((de = readdir(dir)) != NULL) {
- struct stat st;
- int entlen;
-
- if (strncmp(de->d_name, "git-", 4))
- continue;
- strcpy(path+dirlen, de->d_name);
- if (stat(path, &st) || /* stat, not lstat */
- !S_ISREG(st.st_mode) ||
- !(st.st_mode & S_IXUSR))
- continue;
-
- entlen = strlen(de->d_name);
- if (4 < entlen && !strcmp(de->d_name + entlen - 4, ".exe"))
- entlen -= 4;
-
- if (longest < entlen)
- longest = entlen;
-
- add_cmdname(de->d_name + 4, entlen-4);
- }
- closedir(dir);
-
- printf("git commands available in '%s'\n", exec_path);
- printf("----------------------------");
- mput_char('-', strlen(exec_path));
- putchar('\n');
- pretty_print_string_list(cmdname, longest - 4);
- putchar('\n');
-}
-
-static void list_common_cmds_help(void)
-{
- int i, longest = 0;
-
- for (i = 0; i < ARRAY_SIZE(common_cmds); i++) {
- if (longest < strlen(common_cmds[i].name))
- longest = strlen(common_cmds[i].name);
- }
-
- puts("The most commonly used git commands are:");
- for (i = 0; i < ARRAY_SIZE(common_cmds); i++) {
- printf(" %s", common_cmds[i].name);
- mput_char(' ', longest - strlen(common_cmds[i].name) + 4);
- puts(common_cmds[i].help);
- }
- puts("(use 'git help -a' to get a list of all installed git commands)");
-}
-
-void cmd_usage(int show_all, const char *exec_path, const char *fmt, ...)
-{
- if (fmt) {
- va_list ap;
-
- va_start(ap, fmt);
- printf("git: ");
- vprintf(fmt, ap);
- va_end(ap);
- putchar('\n');
- }
- else
- puts(git_usage);
-
- if (exec_path) {
- putchar('\n');
- if (show_all)
- list_commands(exec_path, "git-*");
- else
- list_common_cmds_help();
- }
-
- exit(1);
-}
-
-static void show_man_page(const char *git_cmd)
-{
- const char *page;
-
- if (!strncmp(git_cmd, "git", 3))
- page = git_cmd;
- else {
- int page_len = strlen(git_cmd) + 4;
- char *p = malloc(page_len + 1);
- strcpy(p, "git-");
- strcpy(p + 4, git_cmd);
- p[page_len] = 0;
- page = p;
- }
-
- execlp("man", "man", page, NULL);
-}
-
-int cmd_version(int argc, const char **argv, char **envp)
-{
- printf("git version %s\n", git_version_string);
- return 0;
-}
-
-int cmd_help(int argc, const char **argv, char **envp)
-{
- const char *help_cmd = argv[1];
- if (!help_cmd)
- cmd_usage(0, git_exec_path(), NULL);
- else if (!strcmp(help_cmd, "--all") || !strcmp(help_cmd, "-a"))
- cmd_usage(1, git_exec_path(), NULL);
- else
- show_man_page(help_cmd);
- return 0;
-}
-
-
* On the other hand, it might just make lookup slower and messier. You
* be the judge. The default case is to have one DB per managed directory.
*/
-int cmd_init_db(int argc, const char **argv, char **envp)
+int cmd_init_db(int argc, const char **argv, const char *prefix)
{
const char *git_dir;
const char *sha1_dir;
else if (!strncmp(arg, "--shared=", 9))
shared_repository = git_config_perm("arg", arg+9);
else
- die(init_db_usage);
+ usage(init_db_usage);
}
/*
#include "revision.h"
#include "log-tree.h"
#include "builtin.h"
+#include <time.h>
+#include <sys/time.h>
/* this is in builtin-diff.c */
void add_head(struct rev_info *revs);
-static void cmd_log_init(int argc, const char **argv, char **envp,
+static void cmd_log_init(int argc, const char **argv, const char *prefix,
struct rev_info *rev)
{
rev->abbrev = DEFAULT_ABBREV;
struct commit *commit;
prepare_revision_walk(rev);
- setup_pager();
while ((commit = get_revision(rev)) != NULL) {
log_tree_commit(rev, commit);
free(commit->buffer);
return 0;
}
-int cmd_whatchanged(int argc, const char **argv, char **envp)
+int cmd_whatchanged(int argc, const char **argv, const char *prefix)
{
struct rev_info rev;
- init_revisions(&rev);
+ git_config(git_diff_ui_config);
+ init_revisions(&rev, prefix);
rev.diff = 1;
rev.diffopt.recursive = 1;
rev.simplify_history = 0;
- cmd_log_init(argc, argv, envp, &rev);
+ cmd_log_init(argc, argv, prefix, &rev);
if (!rev.diffopt.output_format)
rev.diffopt.output_format = DIFF_FORMAT_RAW;
return cmd_log_walk(&rev);
}
-int cmd_show(int argc, const char **argv, char **envp)
+int cmd_show(int argc, const char **argv, const char *prefix)
{
struct rev_info rev;
- init_revisions(&rev);
+ git_config(git_diff_ui_config);
+ init_revisions(&rev, prefix);
rev.diff = 1;
rev.diffopt.recursive = 1;
rev.combine_merges = 1;
rev.always_show_header = 1;
rev.ignore_merges = 0;
rev.no_walk = 1;
- cmd_log_init(argc, argv, envp, &rev);
+ cmd_log_init(argc, argv, prefix, &rev);
return cmd_log_walk(&rev);
}
-int cmd_log(int argc, const char **argv, char **envp)
+int cmd_log(int argc, const char **argv, const char *prefix)
{
struct rev_info rev;
- init_revisions(&rev);
+ git_config(git_diff_ui_config);
+ init_revisions(&rev, prefix);
rev.always_show_header = 1;
- cmd_log_init(argc, argv, envp, &rev);
+ cmd_log_init(argc, argv, prefix, &rev);
return cmd_log_walk(&rev);
}
strcat(extra_headers, value);
return 0;
}
- return git_default_config(var, value);
+ if (!strcmp(var, "diff.color")) {
+ return 0;
+ }
+ return git_diff_ui_config(var, value);
}
freopen(filename, "w", stdout);
}
-int cmd_format_patch(int argc, const char **argv, char **envp)
+static int get_patch_id(struct commit *commit, struct diff_options *options,
+ unsigned char *sha1)
+{
+ diff_tree_sha1(commit->parents->item->object.sha1, commit->object.sha1,
+ "", options);
+ diffcore_std(options);
+ return diff_flush_patch_id(options, sha1);
+}
+
+static void get_patch_ids(struct rev_info *rev, struct diff_options *options, const char *prefix)
+{
+ struct rev_info check_rev;
+ struct commit *commit;
+ struct object *o1, *o2;
+ unsigned flags1, flags2;
+ unsigned char sha1[20];
+
+ if (rev->pending.nr != 2)
+ die("Need exactly one range.");
+
+ o1 = rev->pending.objects[0].item;
+ flags1 = o1->flags;
+ o2 = rev->pending.objects[1].item;
+ flags2 = o2->flags;
+
+ if ((flags1 & UNINTERESTING) == (flags2 & UNINTERESTING))
+ die("Not a range.");
+
+ diff_setup(options);
+ options->recursive = 1;
+ if (diff_setup_done(options) < 0)
+ die("diff_setup_done failed");
+
+ /* given a range a..b get all patch ids for b..a */
+ init_revisions(&check_rev, prefix);
+ o1->flags ^= UNINTERESTING;
+ o2->flags ^= UNINTERESTING;
+ add_pending_object(&check_rev, o1, "o1");
+ add_pending_object(&check_rev, o2, "o2");
+ prepare_revision_walk(&check_rev);
+
+ while ((commit = get_revision(&check_rev)) != NULL) {
+ /* ignore merges */
+ if (commit->parents && commit->parents->next)
+ continue;
+
+ if (!get_patch_id(commit, options, sha1))
+ created_object(sha1, xcalloc(1, sizeof(struct object)));
+ }
+
+ /* reset for next revision walk */
+ clear_commit_marks((struct commit *)o1,
+ SEEN | UNINTERESTING | SHOWN | ADDED);
+ clear_commit_marks((struct commit *)o2,
+ SEEN | UNINTERESTING | SHOWN | ADDED);
+ o1->flags = flags1;
+ o2->flags = flags2;
+}
+
+static void gen_message_id(char *dest, unsigned int length, char *base)
+{
+ const char *committer = git_committer_info(1);
+ const char *email_start = strrchr(committer, '<');
+ const char *email_end = strrchr(committer, '>');
+ if(!email_start || !email_end || email_start > email_end - 1)
+ die("Could not extract email from committer identity.");
+ snprintf(dest, length, "%s.%lu.git.%.*s", base,
+ (unsigned long) time(NULL),
+ (int)(email_end - email_start - 1), email_start + 1);
+}
+
+int cmd_format_patch(int argc, const char **argv, const char *prefix)
{
struct commit *commit;
struct commit **list = NULL;
int numbered = 0;
int start_number = -1;
int keep_subject = 0;
+ int ignore_if_in_upstream = 0;
+ int thread = 0;
+ const char *in_reply_to = NULL;
+ struct diff_options patch_id_opts;
char *add_signoff = NULL;
+ char message_id[1024];
+ char ref_message_id[1024];
- init_revisions(&rev);
+ setup_ident();
+ git_config(git_format_config);
+ init_revisions(&rev, prefix);
rev.commit_format = CMIT_FMT_EMAIL;
rev.verbose_header = 1;
rev.diff = 1;
rev.diffopt.msg_sep = "";
rev.diffopt.recursive = 1;
- git_config(git_format_config);
rev.extra_headers = extra_headers;
/*
!strcmp(argv[i], "-s")) {
const char *committer;
const char *endpos;
- setup_ident();
committer = git_committer_info(1);
endpos = strchr(committer, '>');
if (!endpos)
rev.mime_boundary = git_version_string;
else if (!strncmp(argv[i], "--attach=", 9))
rev.mime_boundary = argv[i] + 9;
+ else if (!strcmp(argv[i], "--ignore-if-in-upstream"))
+ ignore_if_in_upstream = 1;
+ else if (!strcmp(argv[i], "--thread"))
+ thread = 1;
+ else if (!strncmp(argv[i], "--in-reply-to=", 14))
+ in_reply_to = argv[i] + 14;
+ else if (!strcmp(argv[i], "--in-reply-to")) {
+ i++;
+ if (i == argc)
+ die("Need a Message-Id for --in-reply-to");
+ in_reply_to = argv[i];
+ }
else
argv[j++] = argv[i];
}
add_head(&rev);
}
+ if (ignore_if_in_upstream)
+ get_patch_ids(&rev, &patch_id_opts, prefix);
+
if (!use_stdout)
realstdout = fdopen(dup(1), "w");
prepare_revision_walk(&rev);
while ((commit = get_revision(&rev)) != NULL) {
+ unsigned char sha1[20];
+
/* ignore merges */
if (commit->parents && commit->parents->next)
continue;
+
+ if (ignore_if_in_upstream &&
+ !get_patch_id(commit, &patch_id_opts, sha1) &&
+ lookup_object(sha1))
+ continue;
+
nr++;
list = realloc(list, nr * sizeof(list[0]));
list[nr - 1] = commit;
if (numbered)
rev.total = total + start_number - 1;
rev.add_signoff = add_signoff;
+ rev.ref_message_id = in_reply_to;
while (0 <= --nr) {
int shown;
commit = list[nr];
rev.nr = total - nr + (start_number - 1);
+ /* Make the second and subsequent mails replies to the first */
+ if (thread) {
+ if (nr == (total - 2)) {
+ strncpy(ref_message_id, message_id,
+ sizeof(ref_message_id));
+ ref_message_id[sizeof(ref_message_id)-1]='\0';
+ rev.ref_message_id = ref_message_id;
+ }
+ gen_message_id(message_id, sizeof(message_id),
+ sha1_to_hex(commit->object.sha1));
+ rev.message_id = message_id;
+ }
if (!use_stdout)
reopen_stdout(commit, rev.nr, keep_subject);
shown = log_tree_commit(&rev, commit);
static int line_terminator = '\n';
static int prefix_len = 0, prefix_offset = 0;
-static const char *prefix = NULL;
static const char **pathspec = NULL;
static int error_unmatch = 0;
static char *ps_matched = NULL;
}
}
-static void show_files(struct dir_struct *dir)
+static void show_files(struct dir_struct *dir, const char *prefix)
{
int i;
/*
* Prune the index to only contain stuff starting with "prefix"
*/
-static void prune_cache(void)
+static void prune_cache(const char *prefix)
{
int pos = cache_name_pos(prefix, prefix_len);
unsigned int first, last;
active_nr = last;
}
-static void verify_pathspec(void)
+static const char *verify_pathspec(const char *prefix)
{
const char **p, *n, *prev;
char *real_prefix;
memcpy(real_prefix, prev, max);
real_prefix[max] = 0;
}
- prefix = real_prefix;
+ return real_prefix;
}
static const char ls_files_usage[] =
"[ --exclude-per-directory=<filename> ] [--full-name] [--abbrev] "
"[--] [<file>]*";
-int cmd_ls_files(int argc, const char **argv, char** envp)
+int cmd_ls_files(int argc, const char **argv, const char *prefix)
{
int i;
int exc_given = 0;
struct dir_struct dir;
memset(&dir, 0, sizeof(dir));
- prefix = setup_git_directory();
if (prefix)
prefix_offset = strlen(prefix);
git_config(git_default_config);
/* Verify that the pathspec matches the prefix */
if (pathspec)
- verify_pathspec();
+ prefix = verify_pathspec(prefix);
/* Treat unmatching pathspec elements as errors */
if (pathspec && error_unmatch) {
read_cache();
if (prefix)
- prune_cache();
- show_files(&dir);
+ prune_cache(prefix);
+ show_files(&dir, prefix);
if (ps_matched) {
/* We need to make sure all pathspec matched otherwise
static int ls_options = 0;
static const char **pathspec;
static int chomp_prefix = 0;
-static const char *prefix;
+static const char *ls_tree_prefix;
static const char ls_tree_usage[] =
"git-ls-tree [-d] [-r] [-t] [-z] [--name-only] [--name-status] [--full-name] [--abbrev[=<n>]] <tree-ish> [path...]";
return 0;
if (chomp_prefix &&
- (baselen < chomp_prefix || memcmp(prefix, base, chomp_prefix)))
+ (baselen < chomp_prefix || memcmp(ls_tree_prefix, base, chomp_prefix)))
return 0;
if (!(ls_options & LS_NAME_ONLY))
return retval;
}
-int cmd_ls_tree(int argc, const char **argv, char **envp)
+int cmd_ls_tree(int argc, const char **argv, const char *prefix)
{
unsigned char sha1[20];
struct tree *tree;
- prefix = setup_git_directory();
git_config(git_default_config);
+ ls_tree_prefix = prefix;
if (prefix && *prefix)
chomp_prefix = strlen(prefix);
while (1 < argc && argv[1][0] == '-') {
static int slurp_attr(const char *line, const char *name, char *attr)
{
- char *ends, *ap = strcasestr(line, name);
+ const char *ends, *ap = strcasestr(line, name);
size_t sz;
if (!ap) {
}
}
-static void decode_header_bq(char *it);
+static void decode_header(char *it);
typedef int (*header_fn_t)(char *);
struct header_def {
const char *name;
/* Unwrap inline B and Q encoding, and optionally
* normalize the meta information to utf8.
*/
- decode_header_bq(line + len + 2);
+ decode_header(line + len + 2);
header[i].func(line + len + 2);
break;
}
break;
}
/* Count mbox From headers as headers */
- if (!ofs && !memcmp(line, "From ", 5))
+ if (!ofs && (!memcmp(line, "From ", 5) || !memcmp(line, ">From ", 6)))
ofs = 1;
return ofs;
}
#endif
}
-static void decode_header_bq(char *it)
+static int decode_header_bq(char *it)
{
char *in, *out, *ep, *cp, *sp;
char outbuf[1000];
+ int rfc2047 = 0;
in = it;
out = outbuf;
while ((ep = strstr(in, "=?")) != NULL) {
int sz, encoding;
char charset_q[256], piecebuf[256];
+ rfc2047 = 1;
+
if (in != ep) {
sz = ep - in;
memcpy(out, in, sz);
ep += 2;
cp = strchr(ep, '?');
if (!cp)
- return; /* no munging */
+ return rfc2047; /* no munging */
for (sp = ep; sp < cp; sp++)
charset_q[sp - ep] = tolower(*sp);
charset_q[cp - ep] = 0;
encoding = cp[1];
if (!encoding || cp[2] != '?')
- return; /* no munging */
+ return rfc2047; /* no munging */
ep = strstr(cp + 3, "?=");
if (!ep)
- return; /* no munging */
+ return rfc2047; /* no munging */
switch (tolower(encoding)) {
default:
- return; /* no munging */
+ return rfc2047; /* no munging */
case 'b':
sz = decode_b_segment(cp + 3, piecebuf, ep);
break;
break;
}
if (sz < 0)
- return;
+ return rfc2047;
if (metainfo_charset)
convert_to_utf8(piecebuf, charset_q);
strcpy(out, piecebuf);
}
strcpy(out, in);
strcpy(it, outbuf);
+ return rfc2047;
+}
+
+static void decode_header(char *it)
+{
+
+ if (decode_header_bq(it))
+ return;
+ /* otherwise "it" is a straight copy of the input.
+ * This can be binary guck but there is no charset specified.
+ */
+ if (metainfo_charset)
+ convert_to_utf8(it, "");
}
static void decode_transfer_encoding(char *line)
static const char mailinfo_usage[] =
"git-mailinfo [-k] [-u | --encoding=<encoding>] msg patch <mail >info";
-int cmd_mailinfo(int argc, const char **argv, char **envp)
+int cmd_mailinfo(int argc, const char **argv, const char *prefix)
{
/* NEEDSWORK: might want to do the optional .git/ directory
* discovery
free(name);
return ret;
}
-int cmd_mailsplit(int argc, const char **argv, char **envp)
+int cmd_mailsplit(int argc, const char **argv, const char *prefix)
{
int nr = 0, nr_prec = 4, ret;
int allow_bare = 0;
--- /dev/null
+/*
+ * "git mv" builtin command
+ *
+ * Copyright (C) 2006 Johannes Schindelin
+ */
+#include <fnmatch.h>
+
+#include "cache.h"
+#include "builtin.h"
+#include "dir.h"
+#include "cache-tree.h"
+#include "path-list.h"
+
+static const char builtin_mv_usage[] =
+"git-mv [-n] [-f] (<source> <destination> | [-k] <source>... <destination>)";
+
+static const char **copy_pathspec(const char *prefix, const char **pathspec,
+ int count, int base_name)
+{
+ const char **result = xmalloc((count + 1) * sizeof(const char *));
+ memcpy(result, pathspec, count * sizeof(const char *));
+ result[count] = NULL;
+ if (base_name) {
+ int i;
+ for (i = 0; i < count; i++) {
+ const char *last_slash = strrchr(result[i], '/');
+ if (last_slash)
+ result[i] = last_slash + 1;
+ }
+ }
+ return get_pathspec(prefix, result);
+}
+
+static void show_list(const char *label, struct path_list *list)
+{
+ if (list->nr > 0) {
+ int i;
+ printf("%s", label);
+ for (i = 0; i < list->nr; i++)
+ printf("%s%s", i > 0 ? ", " : "", list->items[i].path);
+ putchar('\n');
+ }
+}
+
+static const char *add_slash(const char *path)
+{
+ int len = strlen(path);
+ if (path[len - 1] != '/') {
+ char *with_slash = xmalloc(len + 2);
+ memcpy(with_slash, path, len);
+ with_slash[len++] = '/';
+ with_slash[len] = 0;
+ return with_slash;
+ }
+ return path;
+}
+
+static struct lock_file lock_file;
+
+int cmd_mv(int argc, const char **argv, const char *prefix)
+{
+ int i, newfd, count;
+ int verbose = 0, show_only = 0, force = 0, ignore_errors = 0;
+ const char **source, **destination, **dest_path;
+ enum update_mode { BOTH = 0, WORKING_DIRECTORY, INDEX } *modes;
+ struct stat st;
+ struct path_list overwritten = {NULL, 0, 0, 0};
+ struct path_list src_for_dst = {NULL, 0, 0, 0};
+ struct path_list added = {NULL, 0, 0, 0};
+ struct path_list deleted = {NULL, 0, 0, 0};
+ struct path_list changed = {NULL, 0, 0, 0};
+
+ git_config(git_default_config);
+
+ newfd = hold_lock_file_for_update(&lock_file, get_index_file(), 1);
+ if (read_cache() < 0)
+ die("index file corrupt");
+
+ for (i = 1; i < argc; i++) {
+ const char *arg = argv[i];
+
+ if (arg[0] != '-')
+ break;
+ if (!strcmp(arg, "--")) {
+ i++;
+ break;
+ }
+ if (!strcmp(arg, "-n")) {
+ show_only = 1;
+ continue;
+ }
+ if (!strcmp(arg, "-f")) {
+ force = 1;
+ continue;
+ }
+ if (!strcmp(arg, "-k")) {
+ ignore_errors = 1;
+ continue;
+ }
+ usage(builtin_mv_usage);
+ }
+ count = argc - i - 1;
+ if (count < 1)
+ usage(builtin_mv_usage);
+
+ source = copy_pathspec(prefix, argv + i, count, 0);
+ modes = xcalloc(count, sizeof(enum update_mode));
+ dest_path = copy_pathspec(prefix, argv + argc - 1, 1, 0);
+
+ if (!lstat(dest_path[0], &st) &&
+ S_ISDIR(st.st_mode)) {
+ dest_path[0] = add_slash(dest_path[0]);
+ destination = copy_pathspec(dest_path[0], argv + i, count, 1);
+ } else {
+ if (count != 1)
+ usage(builtin_mv_usage);
+ destination = dest_path;
+ }
+
+ /* Checking */
+ for (i = 0; i < count; i++) {
+ const char *bad = NULL;
+
+ if (show_only)
+ printf("Checking rename of '%s' to '%s'\n",
+ source[i], destination[i]);
+
+ if (lstat(source[i], &st) < 0)
+ bad = "bad source";
+
+ if (S_ISDIR(st.st_mode)) {
+ const char *dir = source[i], *dest_dir = destination[i];
+ int first, last, len = strlen(dir);
+
+ if (lstat(dest_dir, &st) == 0) {
+ bad = "cannot move directory over file";
+ goto next;
+ }
+
+ modes[i] = WORKING_DIRECTORY;
+
+ first = cache_name_pos(source[i], len);
+ if (first >= 0)
+ die ("Huh? %s/ is in index?", dir);
+
+ first = -1 - first;
+ for (last = first; last < active_nr; last++) {
+ const char *path = active_cache[last]->name;
+ if (strncmp(path, dir, len) || path[len] != '/')
+ break;
+ }
+
+ if (last - first < 1)
+ bad = "source directory is empty";
+ else if (!bad) {
+ int j, dst_len = strlen(dest_dir);
+
+ if (last - first > 0) {
+ source = realloc(source,
+ (count + last - first)
+ * sizeof(char *));
+ destination = realloc(destination,
+ (count + last - first)
+ * sizeof(char *));
+ modes = realloc(modes,
+ (count + last - first)
+ * sizeof(enum update_mode));
+ }
+
+ dest_dir = add_slash(dest_dir);
+
+ for (j = 0; j < last - first; j++) {
+ const char *path =
+ active_cache[first + j]->name;
+ source[count + j] = path;
+ destination[count + j] =
+ prefix_path(dest_dir, dst_len,
+ path + len);
+ modes[count + j] = INDEX;
+ }
+ count += last - first;
+ }
+
+ goto next;
+ }
+
+ if (!bad && lstat(destination[i], &st) == 0) {
+ bad = "destination exists";
+ if (force) {
+ /*
+ * only files can overwrite each other:
+ * check both source and destination
+ */
+ if (S_ISREG(st.st_mode)) {
+ fprintf(stderr, "Warning: %s;"
+ " will overwrite!\n",
+ bad);
+ bad = NULL;
+ path_list_insert(destination[i],
+ &overwritten);
+ } else
+ bad = "Cannot overwrite";
+ }
+ }
+
+ if (!bad &&
+ !strncmp(destination[i], source[i], strlen(source[i])))
+ bad = "can not move directory into itself";
+
+ if (!bad && cache_name_pos(source[i], strlen(source[i])) < 0)
+ bad = "not under version control";
+
+ if (!bad) {
+ if (path_list_has_path(&src_for_dst, destination[i]))
+ bad = "multiple sources for the same target";
+ else
+ path_list_insert(destination[i], &src_for_dst);
+ }
+
+next:
+ if (bad) {
+ if (ignore_errors) {
+ if (--count > 0) {
+ memmove(source + i, source + i + 1,
+ (count - i) * sizeof(char *));
+ memmove(destination + i,
+ destination + i + 1,
+ (count - i) * sizeof(char *));
+ }
+ } else
+ die ("%s, source=%s, destination=%s",
+ bad, source[i], destination[i]);
+ }
+ }
+
+ for (i = 0; i < count; i++) {
+ if (show_only || verbose)
+ printf("Renaming %s to %s\n",
+ source[i], destination[i]);
+ if (!show_only && modes[i] != INDEX &&
+ rename(source[i], destination[i]) < 0 &&
+ !ignore_errors)
+ die ("renaming %s failed: %s",
+ source[i], strerror(errno));
+
+ if (modes[i] == WORKING_DIRECTORY)
+ continue;
+
+ if (cache_name_pos(source[i], strlen(source[i])) >= 0) {
+ path_list_insert(source[i], &deleted);
+
+ /* destination can be a directory with 1 file inside */
+ if (path_list_has_path(&overwritten, destination[i]))
+ path_list_insert(destination[i], &changed);
+ else
+ path_list_insert(destination[i], &added);
+ } else
+ path_list_insert(destination[i], &added);
+ }
+
+ if (show_only) {
+ show_list("Changed : ", &changed);
+ show_list("Adding : ", &added);
+ show_list("Deleting : ", &deleted);
+ } else {
+ for (i = 0; i < changed.nr; i++) {
+ const char *path = changed.items[i].path;
+ int i = cache_name_pos(path, strlen(path));
+ struct cache_entry *ce = active_cache[i];
+
+ if (i < 0)
+ die ("Huh? Cache entry for %s unknown?", path);
+ refresh_cache_entry(ce, 0);
+ }
+
+ for (i = 0; i < added.nr; i++) {
+ const char *path = added.items[i].path;
+ add_file_to_index(path, verbose);
+ }
+
+ for (i = 0; i < deleted.nr; i++) {
+ const char *path = deleted.items[i].path;
+ remove_file_from_cache(path);
+ }
+
+ if (active_cache_changed) {
+ if (write_cache(newfd, active_cache, active_nr) ||
+ close(newfd) ||
+ commit_lock_file(&lock_file))
+ die("Unable to write new index file");
+ }
+ }
+
+ return 0;
+}
--- /dev/null
+#include <stdlib.h>
+#include "builtin.h"
+#include "cache.h"
+#include "commit.h"
+#include "tag.h"
+#include "refs.h"
+
+static const char name_rev_usage[] =
+ "git-name-rev [--tags] ( --all | --stdin | committish [committish...] )\n";
+
+typedef struct rev_name {
+ const char *tip_name;
+ int merge_traversals;
+ int generation;
+} rev_name;
+
+static long cutoff = LONG_MAX;
+
+static void name_rev(struct commit *commit,
+ const char *tip_name, int merge_traversals, int generation,
+ int deref)
+{
+ struct rev_name *name = (struct rev_name *)commit->util;
+ struct commit_list *parents;
+ int parent_number = 1;
+
+ if (!commit->object.parsed)
+ parse_commit(commit);
+
+ if (commit->date < cutoff)
+ return;
+
+ if (deref) {
+ char *new_name = xmalloc(strlen(tip_name)+3);
+ strcpy(new_name, tip_name);
+ strcat(new_name, "^0");
+ tip_name = new_name;
+
+ if (generation)
+ die("generation: %d, but deref?", generation);
+ }
+
+ if (name == NULL) {
+ name = xmalloc(sizeof(rev_name));
+ commit->util = name;
+ goto copy_data;
+ } else if (name->merge_traversals > merge_traversals ||
+ (name->merge_traversals == merge_traversals &&
+ name->generation > generation)) {
+copy_data:
+ name->tip_name = tip_name;
+ name->merge_traversals = merge_traversals;
+ name->generation = generation;
+ } else
+ return;
+
+ for (parents = commit->parents;
+ parents;
+ parents = parents->next, parent_number++) {
+ if (parent_number > 1) {
+ char *new_name = xmalloc(strlen(tip_name)+8);
+
+ if (generation > 0)
+ sprintf(new_name, "%s~%d^%d", tip_name,
+ generation, parent_number);
+ else
+ sprintf(new_name, "%s^%d", tip_name, parent_number);
+
+ name_rev(parents->item, new_name,
+ merge_traversals + 1 , 0, 0);
+ } else {
+ name_rev(parents->item, tip_name, merge_traversals,
+ generation + 1, 0);
+ }
+ }
+}
+
+static int tags_only = 0;
+
+static int name_ref(const char *path, const unsigned char *sha1)
+{
+ struct object *o = parse_object(sha1);
+ int deref = 0;
+
+ if (tags_only && strncmp(path, "refs/tags/", 10))
+ return 0;
+
+ while (o && o->type == OBJ_TAG) {
+ struct tag *t = (struct tag *) o;
+ if (!t->tagged)
+ break; /* broken repository */
+ o = parse_object(t->tagged->sha1);
+ deref = 1;
+ }
+ if (o && o->type == OBJ_COMMIT) {
+ struct commit *commit = (struct commit *)o;
+
+ if (!strncmp(path, "refs/heads/", 11))
+ path = path + 11;
+ else if (!strncmp(path, "refs/", 5))
+ path = path + 5;
+
+ name_rev(commit, strdup(path), 0, 0, deref);
+ }
+ return 0;
+}
+
+/* returns a static buffer */
+static const char* get_rev_name(struct object *o)
+{
+ static char buffer[1024];
+ struct rev_name *n;
+ struct commit *c;
+
+ if (o->type != OBJ_COMMIT)
+ return "undefined";
+ c = (struct commit *) o;
+ n = c->util;
+ if (!n)
+ return "undefined";
+
+ if (!n->generation)
+ return n->tip_name;
+
+ snprintf(buffer, sizeof(buffer), "%s~%d", n->tip_name, n->generation);
+
+ return buffer;
+}
+
+int cmd_name_rev(int argc, const char **argv, const char *prefix)
+{
+ struct object_array revs = { 0, 0, NULL };
+ int as_is = 0, all = 0, transform_stdin = 0;
+
+ git_config(git_default_config);
+
+ if (argc < 2)
+ usage(name_rev_usage);
+
+ for (--argc, ++argv; argc; --argc, ++argv) {
+ unsigned char sha1[20];
+ struct object *o;
+ struct commit *commit;
+
+ if (!as_is && (*argv)[0] == '-') {
+ if (!strcmp(*argv, "--")) {
+ as_is = 1;
+ continue;
+ } else if (!strcmp(*argv, "--tags")) {
+ tags_only = 1;
+ continue;
+ } else if (!strcmp(*argv, "--all")) {
+ if (argc > 1)
+ die("Specify either a list, or --all, not both!");
+ all = 1;
+ cutoff = 0;
+ continue;
+ } else if (!strcmp(*argv, "--stdin")) {
+ if (argc > 1)
+ die("Specify either a list, or --stdin, not both!");
+ transform_stdin = 1;
+ cutoff = 0;
+ continue;
+ }
+ usage(name_rev_usage);
+ }
+
+ if (get_sha1(*argv, sha1)) {
+ fprintf(stderr, "Could not get sha1 for %s. Skipping.\n",
+ *argv);
+ continue;
+ }
+
+ o = deref_tag(parse_object(sha1), *argv, 0);
+ if (!o || o->type != OBJ_COMMIT) {
+ fprintf(stderr, "Could not get commit for %s. Skipping.\n",
+ *argv);
+ continue;
+ }
+
+ commit = (struct commit *)o;
+
+ if (cutoff > commit->date)
+ cutoff = commit->date;
+
+ add_object_array((struct object *)commit, *argv, &revs);
+ }
+
+ for_each_ref(name_ref);
+
+ if (transform_stdin) {
+ char buffer[2048];
+ char *p, *p_start;
+
+ while (!feof(stdin)) {
+ int forty = 0;
+ p = fgets(buffer, sizeof(buffer), stdin);
+ if (!p)
+ break;
+
+ for (p_start = p; *p; p++) {
+#define ishex(x) (isdigit((x)) || ((x) >= 'a' && (x) <= 'f'))
+ if (!ishex(*p))
+ forty = 0;
+ else if (++forty == 40 &&
+ !ishex(*(p+1))) {
+ unsigned char sha1[40];
+ const char *name = "undefined";
+ char c = *(p+1);
+
+ forty = 0;
+
+ *(p+1) = 0;
+ if (!get_sha1(p - 39, sha1)) {
+ struct object *o =
+ lookup_object(sha1);
+ if (o)
+ name = get_rev_name(o);
+ }
+ *(p+1) = c;
+
+ if (!strcmp(name, "undefined"))
+ continue;
+
+ fwrite(p_start, p - p_start + 1, 1,
+ stdout);
+ printf(" (%s)", name);
+ p_start = p + 1;
+ }
+ }
+
+ /* flush */
+ if (p_start != p)
+ fwrite(p_start, p - p_start, 1, stdout);
+ }
+ } else if (all) {
+ int i, max;
+
+ max = get_max_object_index();
+ for (i = 0; i < max; i++) {
+ struct object * obj = get_indexed_object(i);
+ if (!obj)
+ continue;
+ printf("%s %s\n", sha1_to_hex(obj->sha1), get_rev_name(obj));
+ }
+ } else {
+ int i;
+ for (i = 0; i < revs.nr; i++)
+ printf("%s %s\n",
+ revs.objects[i].name,
+ get_rev_name(revs.objects[i].item));
+ }
+
+ return 0;
+}
+
--- /dev/null
+#include "builtin.h"
+#include "cache.h"
+#include "object.h"
+#include "blob.h"
+#include "commit.h"
+#include "tag.h"
+#include "tree.h"
+#include "delta.h"
+#include "pack.h"
+#include "csum-file.h"
+#include "tree-walk.h"
+#include <sys/time.h>
+#include <signal.h>
+
+static const char pack_usage[] = "git-pack-objects [-q] [--no-reuse-delta] [--non-empty] [--local] [--incremental] [--window=N] [--depth=N] {--stdout | base-name} < object-list";
+
+struct object_entry {
+ unsigned char sha1[20];
+ unsigned long size; /* uncompressed size */
+ unsigned long offset; /* offset into the final pack file;
+ * nonzero if already written.
+ */
+ unsigned int depth; /* delta depth */
+ unsigned int delta_limit; /* base adjustment for in-pack delta */
+ unsigned int hash; /* name hint hash */
+ enum object_type type;
+ enum object_type in_pack_type; /* could be delta */
+ unsigned long delta_size; /* delta data size (uncompressed) */
+ struct object_entry *delta; /* delta base object */
+ struct packed_git *in_pack; /* already in pack */
+ unsigned int in_pack_offset;
+ struct object_entry *delta_child; /* deltified objects who bases me */
+ struct object_entry *delta_sibling; /* other deltified objects who
+ * uses the same base as me
+ */
+ int preferred_base; /* we do not pack this, but is encouraged to
+ * be used as the base objectto delta huge
+ * objects against.
+ */
+};
+
+/*
+ * Objects we are going to pack are collected in objects array (dynamically
+ * expanded). nr_objects & nr_alloc controls this array. They are stored
+ * in the order we see -- typically rev-list --objects order that gives us
+ * nice "minimum seek" order.
+ *
+ * sorted-by-sha ans sorted-by-type are arrays of pointers that point at
+ * elements in the objects array. The former is used to build the pack
+ * index (lists object names in the ascending order to help offset lookup),
+ * and the latter is used to group similar things together by try_delta()
+ * heuristics.
+ */
+
+static unsigned char object_list_sha1[20];
+static int non_empty = 0;
+static int no_reuse_delta = 0;
+static int local = 0;
+static int incremental = 0;
+static struct object_entry **sorted_by_sha, **sorted_by_type;
+static struct object_entry *objects = NULL;
+static int nr_objects = 0, nr_alloc = 0, nr_result = 0;
+static const char *base_name;
+static unsigned char pack_file_sha1[20];
+static int progress = 1;
+static volatile sig_atomic_t progress_update = 0;
+static int window = 10;
+
+/*
+ * The object names in objects array are hashed with this hashtable,
+ * to help looking up the entry by object name. Binary search from
+ * sorted_by_sha is also possible but this was easier to code and faster.
+ * This hashtable is built after all the objects are seen.
+ */
+static int *object_ix = NULL;
+static int object_ix_hashsz = 0;
+
+/*
+ * Pack index for existing packs give us easy access to the offsets into
+ * corresponding pack file where each object's data starts, but the entries
+ * do not store the size of the compressed representation (uncompressed
+ * size is easily available by examining the pack entry header). We build
+ * a hashtable of existing packs (pack_revindex), and keep reverse index
+ * here -- pack index file is sorted by object name mapping to offset; this
+ * pack_revindex[].revindex array is an ordered list of offsets, so if you
+ * know the offset of an object, next offset is where its packed
+ * representation ends.
+ */
+struct pack_revindex {
+ struct packed_git *p;
+ unsigned long *revindex;
+} *pack_revindex = NULL;
+static int pack_revindex_hashsz = 0;
+
+/*
+ * stats
+ */
+static int written = 0;
+static int written_delta = 0;
+static int reused = 0;
+static int reused_delta = 0;
+
+static int pack_revindex_ix(struct packed_git *p)
+{
+ unsigned long ui = (unsigned long)p;
+ int i;
+
+ ui = ui ^ (ui >> 16); /* defeat structure alignment */
+ i = (int)(ui % pack_revindex_hashsz);
+ while (pack_revindex[i].p) {
+ if (pack_revindex[i].p == p)
+ return i;
+ if (++i == pack_revindex_hashsz)
+ i = 0;
+ }
+ return -1 - i;
+}
+
+static void prepare_pack_ix(void)
+{
+ int num;
+ struct packed_git *p;
+ for (num = 0, p = packed_git; p; p = p->next)
+ num++;
+ if (!num)
+ return;
+ pack_revindex_hashsz = num * 11;
+ pack_revindex = xcalloc(sizeof(*pack_revindex), pack_revindex_hashsz);
+ for (p = packed_git; p; p = p->next) {
+ num = pack_revindex_ix(p);
+ num = - 1 - num;
+ pack_revindex[num].p = p;
+ }
+ /* revindex elements are lazily initialized */
+}
+
+static int cmp_offset(const void *a_, const void *b_)
+{
+ unsigned long a = *(unsigned long *) a_;
+ unsigned long b = *(unsigned long *) b_;
+ if (a < b)
+ return -1;
+ else if (a == b)
+ return 0;
+ else
+ return 1;
+}
+
+/*
+ * Ordered list of offsets of objects in the pack.
+ */
+static void prepare_pack_revindex(struct pack_revindex *rix)
+{
+ struct packed_git *p = rix->p;
+ int num_ent = num_packed_objects(p);
+ int i;
+ void *index = p->index_base + 256;
+
+ rix->revindex = xmalloc(sizeof(unsigned long) * (num_ent + 1));
+ for (i = 0; i < num_ent; i++) {
+ unsigned int hl = *((unsigned int *)((char *) index + 24*i));
+ rix->revindex[i] = ntohl(hl);
+ }
+ /* This knows the pack format -- the 20-byte trailer
+ * follows immediately after the last object data.
+ */
+ rix->revindex[num_ent] = p->pack_size - 20;
+ qsort(rix->revindex, num_ent, sizeof(unsigned long), cmp_offset);
+}
+
+static unsigned long find_packed_object_size(struct packed_git *p,
+ unsigned long ofs)
+{
+ int num;
+ int lo, hi;
+ struct pack_revindex *rix;
+ unsigned long *revindex;
+ num = pack_revindex_ix(p);
+ if (num < 0)
+ die("internal error: pack revindex uninitialized");
+ rix = &pack_revindex[num];
+ if (!rix->revindex)
+ prepare_pack_revindex(rix);
+ revindex = rix->revindex;
+ lo = 0;
+ hi = num_packed_objects(p) + 1;
+ do {
+ int mi = (lo + hi) / 2;
+ if (revindex[mi] == ofs) {
+ return revindex[mi+1] - ofs;
+ }
+ else if (ofs < revindex[mi])
+ hi = mi;
+ else
+ lo = mi + 1;
+ } while (lo < hi);
+ die("internal error: pack revindex corrupt");
+}
+
+static void *delta_against(void *buf, unsigned long size, struct object_entry *entry)
+{
+ unsigned long othersize, delta_size;
+ char type[10];
+ void *otherbuf = read_sha1_file(entry->delta->sha1, type, &othersize);
+ void *delta_buf;
+
+ if (!otherbuf)
+ die("unable to read %s", sha1_to_hex(entry->delta->sha1));
+ delta_buf = diff_delta(otherbuf, othersize,
+ buf, size, &delta_size, 0);
+ if (!delta_buf || delta_size != entry->delta_size)
+ die("delta size changed");
+ free(buf);
+ free(otherbuf);
+ return delta_buf;
+}
+
+/*
+ * The per-object header is a pretty dense thing, which is
+ * - first byte: low four bits are "size", then three bits of "type",
+ * and the high bit is "size continues".
+ * - each byte afterwards: low seven bits are size continuation,
+ * with the high bit being "size continues"
+ */
+static int encode_header(enum object_type type, unsigned long size, unsigned char *hdr)
+{
+ int n = 1;
+ unsigned char c;
+
+ if (type < OBJ_COMMIT || type > OBJ_DELTA)
+ die("bad type %d", type);
+
+ c = (type << 4) | (size & 15);
+ size >>= 4;
+ while (size) {
+ *hdr++ = c | 0x80;
+ c = size & 0x7f;
+ size >>= 7;
+ n++;
+ }
+ *hdr = c;
+ return n;
+}
+
+static unsigned long write_object(struct sha1file *f,
+ struct object_entry *entry)
+{
+ unsigned long size;
+ char type[10];
+ void *buf;
+ unsigned char header[10];
+ unsigned hdrlen, datalen;
+ enum object_type obj_type;
+ int to_reuse = 0;
+
+ if (entry->preferred_base)
+ return 0;
+
+ obj_type = entry->type;
+ if (! entry->in_pack)
+ to_reuse = 0; /* can't reuse what we don't have */
+ else if (obj_type == OBJ_DELTA)
+ to_reuse = 1; /* check_object() decided it for us */
+ else if (obj_type != entry->in_pack_type)
+ to_reuse = 0; /* pack has delta which is unusable */
+ else if (entry->delta)
+ to_reuse = 0; /* we want to pack afresh */
+ else
+ to_reuse = 1; /* we have it in-pack undeltified,
+ * and we do not need to deltify it.
+ */
+
+ if (!entry->in_pack && !entry->delta) {
+ unsigned char *map;
+ unsigned long mapsize;
+ map = map_sha1_file(entry->sha1, &mapsize);
+ if (map && !legacy_loose_object(map)) {
+ /* We can copy straight into the pack file */
+ sha1write(f, map, mapsize);
+ munmap(map, mapsize);
+ written++;
+ reused++;
+ return mapsize;
+ }
+ if (map)
+ munmap(map, mapsize);
+ }
+
+ if (! to_reuse) {
+ buf = read_sha1_file(entry->sha1, type, &size);
+ if (!buf)
+ die("unable to read %s", sha1_to_hex(entry->sha1));
+ if (size != entry->size)
+ die("object %s size inconsistency (%lu vs %lu)",
+ sha1_to_hex(entry->sha1), size, entry->size);
+ if (entry->delta) {
+ buf = delta_against(buf, size, entry);
+ size = entry->delta_size;
+ obj_type = OBJ_DELTA;
+ }
+ /*
+ * The object header is a byte of 'type' followed by zero or
+ * more bytes of length. For deltas, the 20 bytes of delta
+ * sha1 follows that.
+ */
+ hdrlen = encode_header(obj_type, size, header);
+ sha1write(f, header, hdrlen);
+
+ if (entry->delta) {
+ sha1write(f, entry->delta, 20);
+ hdrlen += 20;
+ }
+ datalen = sha1write_compressed(f, buf, size);
+ free(buf);
+ }
+ else {
+ struct packed_git *p = entry->in_pack;
+ use_packed_git(p);
+
+ datalen = find_packed_object_size(p, entry->in_pack_offset);
+ buf = (char *) p->pack_base + entry->in_pack_offset;
+ sha1write(f, buf, datalen);
+ unuse_packed_git(p);
+ hdrlen = 0; /* not really */
+ if (obj_type == OBJ_DELTA)
+ reused_delta++;
+ reused++;
+ }
+ if (obj_type == OBJ_DELTA)
+ written_delta++;
+ written++;
+ return hdrlen + datalen;
+}
+
+static unsigned long write_one(struct sha1file *f,
+ struct object_entry *e,
+ unsigned long offset)
+{
+ if (e->offset)
+ /* offset starts from header size and cannot be zero
+ * if it is written already.
+ */
+ return offset;
+ e->offset = offset;
+ offset += write_object(f, e);
+ /* if we are deltified, write out its base object. */
+ if (e->delta)
+ offset = write_one(f, e->delta, offset);
+ return offset;
+}
+
+static void write_pack_file(void)
+{
+ int i;
+ struct sha1file *f;
+ unsigned long offset;
+ struct pack_header hdr;
+ unsigned last_percent = 999;
+ int do_progress = 0;
+
+ if (!base_name)
+ f = sha1fd(1, "<stdout>");
+ else {
+ f = sha1create("%s-%s.%s", base_name,
+ sha1_to_hex(object_list_sha1), "pack");
+ do_progress = progress;
+ }
+ if (do_progress)
+ fprintf(stderr, "Writing %d objects.\n", nr_result);
+
+ hdr.hdr_signature = htonl(PACK_SIGNATURE);
+ hdr.hdr_version = htonl(PACK_VERSION);
+ hdr.hdr_entries = htonl(nr_result);
+ sha1write(f, &hdr, sizeof(hdr));
+ offset = sizeof(hdr);
+ if (!nr_result)
+ goto done;
+ for (i = 0; i < nr_objects; i++) {
+ offset = write_one(f, objects + i, offset);
+ if (do_progress) {
+ unsigned percent = written * 100 / nr_result;
+ if (progress_update || percent != last_percent) {
+ fprintf(stderr, "%4u%% (%u/%u) done\r",
+ percent, written, nr_result);
+ progress_update = 0;
+ last_percent = percent;
+ }
+ }
+ }
+ if (do_progress)
+ fputc('\n', stderr);
+ done:
+ sha1close(f, pack_file_sha1, 1);
+}
+
+static void write_index_file(void)
+{
+ int i;
+ struct sha1file *f = sha1create("%s-%s.%s", base_name,
+ sha1_to_hex(object_list_sha1), "idx");
+ struct object_entry **list = sorted_by_sha;
+ struct object_entry **last = list + nr_result;
+ unsigned int array[256];
+
+ /*
+ * Write the first-level table (the list is sorted,
+ * but we use a 256-entry lookup to be able to avoid
+ * having to do eight extra binary search iterations).
+ */
+ for (i = 0; i < 256; i++) {
+ struct object_entry **next = list;
+ while (next < last) {
+ struct object_entry *entry = *next;
+ if (entry->sha1[0] != i)
+ break;
+ next++;
+ }
+ array[i] = htonl(next - sorted_by_sha);
+ list = next;
+ }
+ sha1write(f, array, 256 * sizeof(int));
+
+ /*
+ * Write the actual SHA1 entries..
+ */
+ list = sorted_by_sha;
+ for (i = 0; i < nr_result; i++) {
+ struct object_entry *entry = *list++;
+ unsigned int offset = htonl(entry->offset);
+ sha1write(f, &offset, 4);
+ sha1write(f, entry->sha1, 20);
+ }
+ sha1write(f, pack_file_sha1, 20);
+ sha1close(f, NULL, 1);
+}
+
+static int locate_object_entry_hash(const unsigned char *sha1)
+{
+ int i;
+ unsigned int ui;
+ memcpy(&ui, sha1, sizeof(unsigned int));
+ i = ui % object_ix_hashsz;
+ while (0 < object_ix[i]) {
+ if (!memcmp(sha1, objects[object_ix[i]-1].sha1, 20))
+ return i;
+ if (++i == object_ix_hashsz)
+ i = 0;
+ }
+ return -1 - i;
+}
+
+static struct object_entry *locate_object_entry(const unsigned char *sha1)
+{
+ int i;
+
+ if (!object_ix_hashsz)
+ return NULL;
+
+ i = locate_object_entry_hash(sha1);
+ if (0 <= i)
+ return &objects[object_ix[i]-1];
+ return NULL;
+}
+
+static void rehash_objects(void)
+{
+ int i;
+ struct object_entry *oe;
+
+ object_ix_hashsz = nr_objects * 3;
+ if (object_ix_hashsz < 1024)
+ object_ix_hashsz = 1024;
+ object_ix = xrealloc(object_ix, sizeof(int) * object_ix_hashsz);
+ memset(object_ix, 0, sizeof(int) * object_ix_hashsz);
+ for (i = 0, oe = objects; i < nr_objects; i++, oe++) {
+ int ix = locate_object_entry_hash(oe->sha1);
+ if (0 <= ix)
+ continue;
+ ix = -1 - ix;
+ object_ix[ix] = i + 1;
+ }
+}
+
+static unsigned name_hash(const char *name)
+{
+ unsigned char c;
+ unsigned hash = 0;
+
+ /*
+ * This effectively just creates a sortable number from the
+ * last sixteen non-whitespace characters. Last characters
+ * count "most", so things that end in ".c" sort together.
+ */
+ while ((c = *name++) != 0) {
+ if (isspace(c))
+ continue;
+ hash = (hash >> 2) + (c << 24);
+ }
+ return hash;
+}
+
+static int add_object_entry(const unsigned char *sha1, unsigned hash, int exclude)
+{
+ unsigned int idx = nr_objects;
+ struct object_entry *entry;
+ struct packed_git *p;
+ unsigned int found_offset = 0;
+ struct packed_git *found_pack = NULL;
+ int ix, status = 0;
+
+ if (!exclude) {
+ for (p = packed_git; p; p = p->next) {
+ struct pack_entry e;
+ if (find_pack_entry_one(sha1, &e, p)) {
+ if (incremental)
+ return 0;
+ if (local && !p->pack_local)
+ return 0;
+ if (!found_pack) {
+ found_offset = e.offset;
+ found_pack = e.p;
+ }
+ }
+ }
+ }
+ if ((entry = locate_object_entry(sha1)) != NULL)
+ goto already_added;
+
+ if (idx >= nr_alloc) {
+ unsigned int needed = (idx + 1024) * 3 / 2;
+ objects = xrealloc(objects, needed * sizeof(*entry));
+ nr_alloc = needed;
+ }
+ entry = objects + idx;
+ nr_objects = idx + 1;
+ memset(entry, 0, sizeof(*entry));
+ memcpy(entry->sha1, sha1, 20);
+ entry->hash = hash;
+
+ if (object_ix_hashsz * 3 <= nr_objects * 4)
+ rehash_objects();
+ else {
+ ix = locate_object_entry_hash(entry->sha1);
+ if (0 <= ix)
+ die("internal error in object hashing.");
+ object_ix[-1 - ix] = idx + 1;
+ }
+ status = 1;
+
+ already_added:
+ if (progress_update) {
+ fprintf(stderr, "Counting objects...%d\r", nr_objects);
+ progress_update = 0;
+ }
+ if (exclude)
+ entry->preferred_base = 1;
+ else {
+ if (found_pack) {
+ entry->in_pack = found_pack;
+ entry->in_pack_offset = found_offset;
+ }
+ }
+ return status;
+}
+
+struct pbase_tree_cache {
+ unsigned char sha1[20];
+ int ref;
+ int temporary;
+ void *tree_data;
+ unsigned long tree_size;
+};
+
+static struct pbase_tree_cache *(pbase_tree_cache[256]);
+static int pbase_tree_cache_ix(const unsigned char *sha1)
+{
+ return sha1[0] % ARRAY_SIZE(pbase_tree_cache);
+}
+static int pbase_tree_cache_ix_incr(int ix)
+{
+ return (ix+1) % ARRAY_SIZE(pbase_tree_cache);
+}
+
+static struct pbase_tree {
+ struct pbase_tree *next;
+ /* This is a phony "cache" entry; we are not
+ * going to evict it nor find it through _get()
+ * mechanism -- this is for the toplevel node that
+ * would almost always change with any commit.
+ */
+ struct pbase_tree_cache pcache;
+} *pbase_tree;
+
+static struct pbase_tree_cache *pbase_tree_get(const unsigned char *sha1)
+{
+ struct pbase_tree_cache *ent, *nent;
+ void *data;
+ unsigned long size;
+ char type[20];
+ int neigh;
+ int my_ix = pbase_tree_cache_ix(sha1);
+ int available_ix = -1;
+
+ /* pbase-tree-cache acts as a limited hashtable.
+ * your object will be found at your index or within a few
+ * slots after that slot if it is cached.
+ */
+ for (neigh = 0; neigh < 8; neigh++) {
+ ent = pbase_tree_cache[my_ix];
+ if (ent && !memcmp(ent->sha1, sha1, 20)) {
+ ent->ref++;
+ return ent;
+ }
+ else if (((available_ix < 0) && (!ent || !ent->ref)) ||
+ ((0 <= available_ix) &&
+ (!ent && pbase_tree_cache[available_ix])))
+ available_ix = my_ix;
+ if (!ent)
+ break;
+ my_ix = pbase_tree_cache_ix_incr(my_ix);
+ }
+
+ /* Did not find one. Either we got a bogus request or
+ * we need to read and perhaps cache.
+ */
+ data = read_sha1_file(sha1, type, &size);
+ if (!data)
+ return NULL;
+ if (strcmp(type, tree_type)) {
+ free(data);
+ return NULL;
+ }
+
+ /* We need to either cache or return a throwaway copy */
+
+ if (available_ix < 0)
+ ent = NULL;
+ else {
+ ent = pbase_tree_cache[available_ix];
+ my_ix = available_ix;
+ }
+
+ if (!ent) {
+ nent = xmalloc(sizeof(*nent));
+ nent->temporary = (available_ix < 0);
+ }
+ else {
+ /* evict and reuse */
+ free(ent->tree_data);
+ nent = ent;
+ }
+ memcpy(nent->sha1, sha1, 20);
+ nent->tree_data = data;
+ nent->tree_size = size;
+ nent->ref = 1;
+ if (!nent->temporary)
+ pbase_tree_cache[my_ix] = nent;
+ return nent;
+}
+
+static void pbase_tree_put(struct pbase_tree_cache *cache)
+{
+ if (!cache->temporary) {
+ cache->ref--;
+ return;
+ }
+ free(cache->tree_data);
+ free(cache);
+}
+
+static int name_cmp_len(const char *name)
+{
+ int i;
+ for (i = 0; name[i] && name[i] != '\n' && name[i] != '/'; i++)
+ ;
+ return i;
+}
+
+static void add_pbase_object(struct tree_desc *tree,
+ const char *name,
+ int cmplen,
+ const char *fullname)
+{
+ struct name_entry entry;
+
+ while (tree_entry(tree,&entry)) {
+ unsigned long size;
+ char type[20];
+
+ if (entry.pathlen != cmplen ||
+ memcmp(entry.path, name, cmplen) ||
+ !has_sha1_file(entry.sha1) ||
+ sha1_object_info(entry.sha1, type, &size))
+ continue;
+ if (name[cmplen] != '/') {
+ unsigned hash = name_hash(fullname);
+ add_object_entry(entry.sha1, hash, 1);
+ return;
+ }
+ if (!strcmp(type, tree_type)) {
+ struct tree_desc sub;
+ struct pbase_tree_cache *tree;
+ const char *down = name+cmplen+1;
+ int downlen = name_cmp_len(down);
+
+ tree = pbase_tree_get(entry.sha1);
+ if (!tree)
+ return;
+ sub.buf = tree->tree_data;
+ sub.size = tree->tree_size;
+
+ add_pbase_object(&sub, down, downlen, fullname);
+ pbase_tree_put(tree);
+ }
+ }
+}
+
+static unsigned *done_pbase_paths;
+static int done_pbase_paths_num;
+static int done_pbase_paths_alloc;
+static int done_pbase_path_pos(unsigned hash)
+{
+ int lo = 0;
+ int hi = done_pbase_paths_num;
+ while (lo < hi) {
+ int mi = (hi + lo) / 2;
+ if (done_pbase_paths[mi] == hash)
+ return mi;
+ if (done_pbase_paths[mi] < hash)
+ hi = mi;
+ else
+ lo = mi + 1;
+ }
+ return -lo-1;
+}
+
+static int check_pbase_path(unsigned hash)
+{
+ int pos = (!done_pbase_paths) ? -1 : done_pbase_path_pos(hash);
+ if (0 <= pos)
+ return 1;
+ pos = -pos - 1;
+ if (done_pbase_paths_alloc <= done_pbase_paths_num) {
+ done_pbase_paths_alloc = alloc_nr(done_pbase_paths_alloc);
+ done_pbase_paths = xrealloc(done_pbase_paths,
+ done_pbase_paths_alloc *
+ sizeof(unsigned));
+ }
+ done_pbase_paths_num++;
+ if (pos < done_pbase_paths_num)
+ memmove(done_pbase_paths + pos + 1,
+ done_pbase_paths + pos,
+ (done_pbase_paths_num - pos - 1) * sizeof(unsigned));
+ done_pbase_paths[pos] = hash;
+ return 0;
+}
+
+static void add_preferred_base_object(char *name, unsigned hash)
+{
+ struct pbase_tree *it;
+ int cmplen = name_cmp_len(name);
+
+ if (check_pbase_path(hash))
+ return;
+
+ for (it = pbase_tree; it; it = it->next) {
+ if (cmplen == 0) {
+ hash = name_hash("");
+ add_object_entry(it->pcache.sha1, hash, 1);
+ }
+ else {
+ struct tree_desc tree;
+ tree.buf = it->pcache.tree_data;
+ tree.size = it->pcache.tree_size;
+ add_pbase_object(&tree, name, cmplen, name);
+ }
+ }
+}
+
+static void add_preferred_base(unsigned char *sha1)
+{
+ struct pbase_tree *it;
+ void *data;
+ unsigned long size;
+ unsigned char tree_sha1[20];
+
+ data = read_object_with_reference(sha1, tree_type, &size, tree_sha1);
+ if (!data)
+ return;
+
+ for (it = pbase_tree; it; it = it->next) {
+ if (!memcmp(it->pcache.sha1, tree_sha1, 20)) {
+ free(data);
+ return;
+ }
+ }
+
+ it = xcalloc(1, sizeof(*it));
+ it->next = pbase_tree;
+ pbase_tree = it;
+
+ memcpy(it->pcache.sha1, tree_sha1, 20);
+ it->pcache.tree_data = data;
+ it->pcache.tree_size = size;
+}
+
+static void check_object(struct object_entry *entry)
+{
+ char type[20];
+
+ if (entry->in_pack && !entry->preferred_base) {
+ unsigned char base[20];
+ unsigned long size;
+ struct object_entry *base_entry;
+
+ /* We want in_pack_type even if we do not reuse delta.
+ * There is no point not reusing non-delta representations.
+ */
+ check_reuse_pack_delta(entry->in_pack,
+ entry->in_pack_offset,
+ base, &size,
+ &entry->in_pack_type);
+
+ /* Check if it is delta, and the base is also an object
+ * we are going to pack. If so we will reuse the existing
+ * delta.
+ */
+ if (!no_reuse_delta &&
+ entry->in_pack_type == OBJ_DELTA &&
+ (base_entry = locate_object_entry(base)) &&
+ (!base_entry->preferred_base)) {
+
+ /* Depth value does not matter - find_deltas()
+ * will never consider reused delta as the
+ * base object to deltify other objects
+ * against, in order to avoid circular deltas.
+ */
+
+ /* uncompressed size of the delta data */
+ entry->size = entry->delta_size = size;
+ entry->delta = base_entry;
+ entry->type = OBJ_DELTA;
+
+ entry->delta_sibling = base_entry->delta_child;
+ base_entry->delta_child = entry;
+
+ return;
+ }
+ /* Otherwise we would do the usual */
+ }
+
+ if (sha1_object_info(entry->sha1, type, &entry->size))
+ die("unable to get type of object %s",
+ sha1_to_hex(entry->sha1));
+
+ if (!strcmp(type, commit_type)) {
+ entry->type = OBJ_COMMIT;
+ } else if (!strcmp(type, tree_type)) {
+ entry->type = OBJ_TREE;
+ } else if (!strcmp(type, blob_type)) {
+ entry->type = OBJ_BLOB;
+ } else if (!strcmp(type, tag_type)) {
+ entry->type = OBJ_TAG;
+ } else
+ die("unable to pack object %s of type %s",
+ sha1_to_hex(entry->sha1), type);
+}
+
+static unsigned int check_delta_limit(struct object_entry *me, unsigned int n)
+{
+ struct object_entry *child = me->delta_child;
+ unsigned int m = n;
+ while (child) {
+ unsigned int c = check_delta_limit(child, n + 1);
+ if (m < c)
+ m = c;
+ child = child->delta_sibling;
+ }
+ return m;
+}
+
+static void get_object_details(void)
+{
+ int i;
+ struct object_entry *entry;
+
+ prepare_pack_ix();
+ for (i = 0, entry = objects; i < nr_objects; i++, entry++)
+ check_object(entry);
+
+ if (nr_objects == nr_result) {
+ /*
+ * Depth of objects that depend on the entry -- this
+ * is subtracted from depth-max to break too deep
+ * delta chain because of delta data reusing.
+ * However, we loosen this restriction when we know we
+ * are creating a thin pack -- it will have to be
+ * expanded on the other end anyway, so do not
+ * artificially cut the delta chain and let it go as
+ * deep as it wants.
+ */
+ for (i = 0, entry = objects; i < nr_objects; i++, entry++)
+ if (!entry->delta && entry->delta_child)
+ entry->delta_limit =
+ check_delta_limit(entry, 1);
+ }
+}
+
+typedef int (*entry_sort_t)(const struct object_entry *, const struct object_entry *);
+
+static entry_sort_t current_sort;
+
+static int sort_comparator(const void *_a, const void *_b)
+{
+ struct object_entry *a = *(struct object_entry **)_a;
+ struct object_entry *b = *(struct object_entry **)_b;
+ return current_sort(a,b);
+}
+
+static struct object_entry **create_sorted_list(entry_sort_t sort)
+{
+ struct object_entry **list = xmalloc(nr_objects * sizeof(struct object_entry *));
+ int i;
+
+ for (i = 0; i < nr_objects; i++)
+ list[i] = objects + i;
+ current_sort = sort;
+ qsort(list, nr_objects, sizeof(struct object_entry *), sort_comparator);
+ return list;
+}
+
+static int sha1_sort(const struct object_entry *a, const struct object_entry *b)
+{
+ return memcmp(a->sha1, b->sha1, 20);
+}
+
+static struct object_entry **create_final_object_list(void)
+{
+ struct object_entry **list;
+ int i, j;
+
+ for (i = nr_result = 0; i < nr_objects; i++)
+ if (!objects[i].preferred_base)
+ nr_result++;
+ list = xmalloc(nr_result * sizeof(struct object_entry *));
+ for (i = j = 0; i < nr_objects; i++) {
+ if (!objects[i].preferred_base)
+ list[j++] = objects + i;
+ }
+ current_sort = sha1_sort;
+ qsort(list, nr_result, sizeof(struct object_entry *), sort_comparator);
+ return list;
+}
+
+static int type_size_sort(const struct object_entry *a, const struct object_entry *b)
+{
+ if (a->type < b->type)
+ return -1;
+ if (a->type > b->type)
+ return 1;
+ if (a->hash < b->hash)
+ return -1;
+ if (a->hash > b->hash)
+ return 1;
+ if (a->preferred_base < b->preferred_base)
+ return -1;
+ if (a->preferred_base > b->preferred_base)
+ return 1;
+ if (a->size < b->size)
+ return -1;
+ if (a->size > b->size)
+ return 1;
+ return a < b ? -1 : (a > b);
+}
+
+struct unpacked {
+ struct object_entry *entry;
+ void *data;
+ struct delta_index *index;
+};
+
+/*
+ * We search for deltas _backwards_ in a list sorted by type and
+ * by size, so that we see progressively smaller and smaller files.
+ * That's because we prefer deltas to be from the bigger file
+ * to the smaller - deletes are potentially cheaper, but perhaps
+ * more importantly, the bigger file is likely the more recent
+ * one.
+ */
+static int try_delta(struct unpacked *trg, struct unpacked *src,
+ unsigned max_depth)
+{
+ struct object_entry *trg_entry = trg->entry;
+ struct object_entry *src_entry = src->entry;
+ unsigned long trg_size, src_size, delta_size, sizediff, max_size, sz;
+ char type[10];
+ void *delta_buf;
+
+ /* Don't bother doing diffs between different types */
+ if (trg_entry->type != src_entry->type)
+ return -1;
+
+ /* We do not compute delta to *create* objects we are not
+ * going to pack.
+ */
+ if (trg_entry->preferred_base)
+ return -1;
+
+ /*
+ * We do not bother to try a delta that we discarded
+ * on an earlier try, but only when reusing delta data.
+ */
+ if (!no_reuse_delta && trg_entry->in_pack &&
+ trg_entry->in_pack == src_entry->in_pack)
+ return 0;
+
+ /*
+ * If the current object is at pack edge, take the depth the
+ * objects that depend on the current object into account --
+ * otherwise they would become too deep.
+ */
+ if (trg_entry->delta_child) {
+ if (max_depth <= trg_entry->delta_limit)
+ return 0;
+ max_depth -= trg_entry->delta_limit;
+ }
+ if (src_entry->depth >= max_depth)
+ return 0;
+
+ /* Now some size filtering heuristics. */
+ trg_size = trg_entry->size;
+ max_size = trg_size/2 - 20;
+ max_size = max_size * (max_depth - src_entry->depth) / max_depth;
+ if (max_size == 0)
+ return 0;
+ if (trg_entry->delta && trg_entry->delta_size <= max_size)
+ max_size = trg_entry->delta_size-1;
+ src_size = src_entry->size;
+ sizediff = src_size < trg_size ? trg_size - src_size : 0;
+ if (sizediff >= max_size)
+ return 0;
+
+ /* Load data if not already done */
+ if (!trg->data) {
+ trg->data = read_sha1_file(trg_entry->sha1, type, &sz);
+ if (sz != trg_size)
+ die("object %s inconsistent object length (%lu vs %lu)",
+ sha1_to_hex(trg_entry->sha1), sz, trg_size);
+ }
+ if (!src->data) {
+ src->data = read_sha1_file(src_entry->sha1, type, &sz);
+ if (sz != src_size)
+ die("object %s inconsistent object length (%lu vs %lu)",
+ sha1_to_hex(src_entry->sha1), sz, src_size);
+ }
+ if (!src->index) {
+ src->index = create_delta_index(src->data, src_size);
+ if (!src->index)
+ die("out of memory");
+ }
+
+ delta_buf = create_delta(src->index, trg->data, trg_size, &delta_size, max_size);
+ if (!delta_buf)
+ return 0;
+
+ trg_entry->delta = src_entry;
+ trg_entry->delta_size = delta_size;
+ trg_entry->depth = src_entry->depth + 1;
+ free(delta_buf);
+ return 1;
+}
+
+static void progress_interval(int signum)
+{
+ progress_update = 1;
+}
+
+static void find_deltas(struct object_entry **list, int window, int depth)
+{
+ int i, idx;
+ unsigned int array_size = window * sizeof(struct unpacked);
+ struct unpacked *array = xmalloc(array_size);
+ unsigned processed = 0;
+ unsigned last_percent = 999;
+
+ memset(array, 0, array_size);
+ i = nr_objects;
+ idx = 0;
+ if (progress)
+ fprintf(stderr, "Deltifying %d objects.\n", nr_result);
+
+ while (--i >= 0) {
+ struct object_entry *entry = list[i];
+ struct unpacked *n = array + idx;
+ int j;
+
+ if (!entry->preferred_base)
+ processed++;
+
+ if (progress) {
+ unsigned percent = processed * 100 / nr_result;
+ if (percent != last_percent || progress_update) {
+ fprintf(stderr, "%4u%% (%u/%u) done\r",
+ percent, processed, nr_result);
+ progress_update = 0;
+ last_percent = percent;
+ }
+ }
+
+ if (entry->delta)
+ /* This happens if we decided to reuse existing
+ * delta from a pack. "!no_reuse_delta &&" is implied.
+ */
+ continue;
+
+ if (entry->size < 50)
+ continue;
+ free_delta_index(n->index);
+ n->index = NULL;
+ free(n->data);
+ n->data = NULL;
+ n->entry = entry;
+
+ j = window;
+ while (--j > 0) {
+ unsigned int other_idx = idx + j;
+ struct unpacked *m;
+ if (other_idx >= window)
+ other_idx -= window;
+ m = array + other_idx;
+ if (!m->entry)
+ break;
+ if (try_delta(n, m, depth) < 0)
+ break;
+ }
+ /* if we made n a delta, and if n is already at max
+ * depth, leaving it in the window is pointless. we
+ * should evict it first.
+ */
+ if (entry->delta && depth <= entry->depth)
+ continue;
+
+ idx++;
+ if (idx >= window)
+ idx = 0;
+ }
+
+ if (progress)
+ fputc('\n', stderr);
+
+ for (i = 0; i < window; ++i) {
+ free_delta_index(array[i].index);
+ free(array[i].data);
+ }
+ free(array);
+}
+
+static void prepare_pack(int window, int depth)
+{
+ get_object_details();
+ sorted_by_type = create_sorted_list(type_size_sort);
+ if (window && depth)
+ find_deltas(sorted_by_type, window+1, depth);
+}
+
+static int reuse_cached_pack(unsigned char *sha1, int pack_to_stdout)
+{
+ static const char cache[] = "pack-cache/pack-%s.%s";
+ char *cached_pack, *cached_idx;
+ int ifd, ofd, ifd_ix = -1;
+
+ cached_pack = git_path(cache, sha1_to_hex(sha1), "pack");
+ ifd = open(cached_pack, O_RDONLY);
+ if (ifd < 0)
+ return 0;
+
+ if (!pack_to_stdout) {
+ cached_idx = git_path(cache, sha1_to_hex(sha1), "idx");
+ ifd_ix = open(cached_idx, O_RDONLY);
+ if (ifd_ix < 0) {
+ close(ifd);
+ return 0;
+ }
+ }
+
+ if (progress)
+ fprintf(stderr, "Reusing %d objects pack %s\n", nr_objects,
+ sha1_to_hex(sha1));
+
+ if (pack_to_stdout) {
+ if (copy_fd(ifd, 1))
+ exit(1);
+ close(ifd);
+ }
+ else {
+ char name[PATH_MAX];
+ snprintf(name, sizeof(name),
+ "%s-%s.%s", base_name, sha1_to_hex(sha1), "pack");
+ ofd = open(name, O_CREAT | O_EXCL | O_WRONLY, 0666);
+ if (ofd < 0)
+ die("unable to open %s (%s)", name, strerror(errno));
+ if (copy_fd(ifd, ofd))
+ exit(1);
+ close(ifd);
+
+ snprintf(name, sizeof(name),
+ "%s-%s.%s", base_name, sha1_to_hex(sha1), "idx");
+ ofd = open(name, O_CREAT | O_EXCL | O_WRONLY, 0666);
+ if (ofd < 0)
+ die("unable to open %s (%s)", name, strerror(errno));
+ if (copy_fd(ifd_ix, ofd))
+ exit(1);
+ close(ifd_ix);
+ puts(sha1_to_hex(sha1));
+ }
+
+ return 1;
+}
+
+static void setup_progress_signal(void)
+{
+ struct sigaction sa;
+ struct itimerval v;
+
+ memset(&sa, 0, sizeof(sa));
+ sa.sa_handler = progress_interval;
+ sigemptyset(&sa.sa_mask);
+ sa.sa_flags = SA_RESTART;
+ sigaction(SIGALRM, &sa, NULL);
+
+ v.it_interval.tv_sec = 1;
+ v.it_interval.tv_usec = 0;
+ v.it_value = v.it_interval;
+ setitimer(ITIMER_REAL, &v, NULL);
+}
+
+static int git_pack_config(const char *k, const char *v)
+{
+ if(!strcmp(k, "pack.window")) {
+ window = git_config_int(k, v);
+ return 0;
+ }
+ return git_default_config(k, v);
+}
+
+int cmd_pack_objects(int argc, const char **argv, const char *prefix)
+{
+ SHA_CTX ctx;
+ char line[40 + 1 + PATH_MAX + 2];
+ int depth = 10, pack_to_stdout = 0;
+ struct object_entry **list;
+ int num_preferred_base = 0;
+ int i;
+
+ git_config(git_pack_config);
+
+ progress = isatty(2);
+ for (i = 1; i < argc; i++) {
+ const char *arg = argv[i];
+
+ if (*arg == '-') {
+ if (!strcmp("--non-empty", arg)) {
+ non_empty = 1;
+ continue;
+ }
+ if (!strcmp("--local", arg)) {
+ local = 1;
+ continue;
+ }
+ if (!strcmp("--progress", arg)) {
+ progress = 1;
+ continue;
+ }
+ if (!strcmp("--incremental", arg)) {
+ incremental = 1;
+ continue;
+ }
+ if (!strncmp("--window=", arg, 9)) {
+ char *end;
+ window = strtoul(arg+9, &end, 0);
+ if (!arg[9] || *end)
+ usage(pack_usage);
+ continue;
+ }
+ if (!strncmp("--depth=", arg, 8)) {
+ char *end;
+ depth = strtoul(arg+8, &end, 0);
+ if (!arg[8] || *end)
+ usage(pack_usage);
+ continue;
+ }
+ if (!strcmp("--progress", arg)) {
+ progress = 1;
+ continue;
+ }
+ if (!strcmp("-q", arg)) {
+ progress = 0;
+ continue;
+ }
+ if (!strcmp("--no-reuse-delta", arg)) {
+ no_reuse_delta = 1;
+ continue;
+ }
+ if (!strcmp("--stdout", arg)) {
+ pack_to_stdout = 1;
+ continue;
+ }
+ usage(pack_usage);
+ }
+ if (base_name)
+ usage(pack_usage);
+ base_name = arg;
+ }
+
+ if (pack_to_stdout != !base_name)
+ usage(pack_usage);
+
+ prepare_packed_git();
+
+ if (progress) {
+ fprintf(stderr, "Generating pack...\n");
+ setup_progress_signal();
+ }
+
+ for (;;) {
+ unsigned char sha1[20];
+ unsigned hash;
+
+ if (!fgets(line, sizeof(line), stdin)) {
+ if (feof(stdin))
+ break;
+ if (!ferror(stdin))
+ die("fgets returned NULL, not EOF, not error!");
+ if (errno != EINTR)
+ die("fgets: %s", strerror(errno));
+ clearerr(stdin);
+ continue;
+ }
+
+ if (line[0] == '-') {
+ if (get_sha1_hex(line+1, sha1))
+ die("expected edge sha1, got garbage:\n %s",
+ line+1);
+ if (num_preferred_base++ < window)
+ add_preferred_base(sha1);
+ continue;
+ }
+ if (get_sha1_hex(line, sha1))
+ die("expected sha1, got garbage:\n %s", line);
+ hash = name_hash(line+41);
+ add_preferred_base_object(line+41, hash);
+ add_object_entry(sha1, hash, 0);
+ }
+ if (progress)
+ fprintf(stderr, "Done counting %d objects.\n", nr_objects);
+ sorted_by_sha = create_final_object_list();
+ if (non_empty && !nr_result)
+ return 0;
+
+ SHA1_Init(&ctx);
+ list = sorted_by_sha;
+ for (i = 0; i < nr_result; i++) {
+ struct object_entry *entry = *list++;
+ SHA1_Update(&ctx, entry->sha1, 20);
+ }
+ SHA1_Final(object_list_sha1, &ctx);
+ if (progress && (nr_objects != nr_result))
+ fprintf(stderr, "Result has %d objects.\n", nr_result);
+
+ if (reuse_cached_pack(object_list_sha1, pack_to_stdout))
+ ;
+ else {
+ if (nr_result)
+ prepare_pack(window, depth);
+ if (progress && pack_to_stdout) {
+ /* the other end usually displays progress itself */
+ struct itimerval v = {{0,},};
+ setitimer(ITIMER_REAL, &v, NULL);
+ signal(SIGALRM, SIG_IGN );
+ progress_update = 0;
+ }
+ write_pack_file();
+ if (!pack_to_stdout) {
+ write_index_file();
+ puts(sha1_to_hex(object_list_sha1));
+ }
+ }
+ if (progress)
+ fprintf(stderr, "Total %d, written %d (delta %d), reused %d (delta %d)\n",
+ nr_result, written, written_delta, reused, reused_delta);
+ return 0;
+}
--- /dev/null
+#include "builtin.h"
+#include "cache.h"
+
+static const char prune_packed_usage[] =
+"git-prune-packed [-n]";
+
+static int dryrun;
+
+static void prune_dir(int i, DIR *dir, char *pathname, int len)
+{
+ struct dirent *de;
+ char hex[40];
+
+ sprintf(hex, "%02x", i);
+ while ((de = readdir(dir)) != NULL) {
+ unsigned char sha1[20];
+ if (strlen(de->d_name) != 38)
+ continue;
+ memcpy(hex+2, de->d_name, 38);
+ if (get_sha1_hex(hex, sha1))
+ continue;
+ if (!has_sha1_pack(sha1))
+ continue;
+ memcpy(pathname + len, de->d_name, 38);
+ if (dryrun)
+ printf("rm -f %s\n", pathname);
+ else if (unlink(pathname) < 0)
+ error("unable to unlink %s", pathname);
+ }
+ pathname[len] = 0;
+ rmdir(pathname);
+}
+
+static void prune_packed_objects(void)
+{
+ int i;
+ static char pathname[PATH_MAX];
+ const char *dir = get_object_directory();
+ int len = strlen(dir);
+
+ if (len > PATH_MAX - 42)
+ die("impossible object directory");
+ memcpy(pathname, dir, len);
+ if (len && pathname[len-1] != '/')
+ pathname[len++] = '/';
+ for (i = 0; i < 256; i++) {
+ DIR *d;
+
+ sprintf(pathname + len, "%02x/", i);
+ d = opendir(pathname);
+ if (!d)
+ continue;
+ prune_dir(i, d, pathname, len + 3);
+ closedir(d);
+ }
+}
+
+int cmd_prune_packed(int argc, const char **argv, const char *prefix)
+{
+ int i;
+
+ for (i = 1; i < argc; i++) {
+ const char *arg = argv[i];
+
+ if (*arg == '-') {
+ if (!strcmp(arg, "-n"))
+ dryrun = 1;
+ else
+ usage(prune_packed_usage);
+ continue;
+ }
+ /* Handle arguments here .. */
+ usage(prune_packed_usage);
+ }
+ sync();
+ prune_packed_objects();
+ return 0;
+}
--- /dev/null
+#include "cache.h"
+#include "refs.h"
+#include "tag.h"
+#include "commit.h"
+#include "tree.h"
+#include "blob.h"
+#include "tree-walk.h"
+#include "diff.h"
+#include "revision.h"
+#include "builtin.h"
+#include "cache-tree.h"
+
+static const char prune_usage[] = "git-prune [-n]";
+static int show_only = 0;
+static struct rev_info revs;
+
+static int prune_object(char *path, const char *filename, const unsigned char *sha1)
+{
+ if (show_only) {
+ printf("would prune %s/%s\n", path, filename);
+ return 0;
+ }
+ unlink(mkpath("%s/%s", path, filename));
+ rmdir(path);
+ return 0;
+}
+
+static int prune_dir(int i, char *path)
+{
+ DIR *dir = opendir(path);
+ struct dirent *de;
+
+ if (!dir)
+ return 0;
+
+ while ((de = readdir(dir)) != NULL) {
+ char name[100];
+ unsigned char sha1[20];
+ int len = strlen(de->d_name);
+
+ switch (len) {
+ case 2:
+ if (de->d_name[1] != '.')
+ break;
+ case 1:
+ if (de->d_name[0] != '.')
+ break;
+ continue;
+ case 38:
+ sprintf(name, "%02x", i);
+ memcpy(name+2, de->d_name, len+1);
+ if (get_sha1_hex(name, sha1) < 0)
+ break;
+
+ /*
+ * Do we know about this object?
+ * It must have been reachable
+ */
+ if (lookup_object(sha1))
+ continue;
+
+ prune_object(path, de->d_name, sha1);
+ continue;
+ }
+ fprintf(stderr, "bad sha1 file: %s/%s\n", path, de->d_name);
+ }
+ closedir(dir);
+ return 0;
+}
+
+static void prune_object_dir(const char *path)
+{
+ int i;
+ for (i = 0; i < 256; i++) {
+ static char dir[4096];
+ sprintf(dir, "%s/%02x", path, i);
+ prune_dir(i, dir);
+ }
+}
+
+static void process_blob(struct blob *blob,
+ struct object_array *p,
+ struct name_path *path,
+ const char *name)
+{
+ struct object *obj = &blob->object;
+
+ if (obj->flags & SEEN)
+ return;
+ obj->flags |= SEEN;
+ /* Nothing to do, really .. The blob lookup was the important part */
+}
+
+static void process_tree(struct tree *tree,
+ struct object_array *p,
+ struct name_path *path,
+ const char *name)
+{
+ struct object *obj = &tree->object;
+ struct tree_desc desc;
+ struct name_entry entry;
+ struct name_path me;
+
+ if (obj->flags & SEEN)
+ return;
+ obj->flags |= SEEN;
+ if (parse_tree(tree) < 0)
+ die("bad tree object %s", sha1_to_hex(obj->sha1));
+ name = strdup(name);
+ add_object(obj, p, path, name);
+ me.up = path;
+ me.elem = name;
+ me.elem_len = strlen(name);
+
+ desc.buf = tree->buffer;
+ desc.size = tree->size;
+
+ while (tree_entry(&desc, &entry)) {
+ if (S_ISDIR(entry.mode))
+ process_tree(lookup_tree(entry.sha1), p, &me, entry.path);
+ else
+ process_blob(lookup_blob(entry.sha1), p, &me, entry.path);
+ }
+ free(tree->buffer);
+ tree->buffer = NULL;
+}
+
+static void process_tag(struct tag *tag, struct object_array *p, const char *name)
+{
+ struct object *obj = &tag->object;
+ struct name_path me;
+
+ if (obj->flags & SEEN)
+ return;
+ obj->flags |= SEEN;
+
+ me.up = NULL;
+ me.elem = "tag:/";
+ me.elem_len = 5;
+
+ if (parse_tag(tag) < 0)
+ die("bad tag object %s", sha1_to_hex(obj->sha1));
+ add_object(tag->tagged, p, NULL, name);
+}
+
+static void walk_commit_list(struct rev_info *revs)
+{
+ int i;
+ struct commit *commit;
+ struct object_array objects = { 0, 0, NULL };
+
+ /* Walk all commits, process their trees */
+ while ((commit = get_revision(revs)) != NULL)
+ process_tree(commit->tree, &objects, NULL, "");
+
+ /* Then walk all the pending objects, recursively processing them too */
+ for (i = 0; i < revs->pending.nr; i++) {
+ struct object_array_entry *pending = revs->pending.objects + i;
+ struct object *obj = pending->item;
+ const char *name = pending->name;
+ if (obj->type == OBJ_TAG) {
+ process_tag((struct tag *) obj, &objects, name);
+ continue;
+ }
+ if (obj->type == OBJ_TREE) {
+ process_tree((struct tree *)obj, &objects, NULL, name);
+ continue;
+ }
+ if (obj->type == OBJ_BLOB) {
+ process_blob((struct blob *)obj, &objects, NULL, name);
+ continue;
+ }
+ die("unknown pending object %s (%s)", sha1_to_hex(obj->sha1), name);
+ }
+}
+
+static int add_one_ref(const char *path, const unsigned char *sha1)
+{
+ struct object *object = parse_object(sha1);
+ if (!object)
+ die("bad object ref: %s:%s", path, sha1_to_hex(sha1));
+ add_pending_object(&revs, object, "");
+ return 0;
+}
+
+static void add_one_tree(const unsigned char *sha1)
+{
+ struct tree *tree = lookup_tree(sha1);
+ add_pending_object(&revs, &tree->object, "");
+}
+
+static void add_cache_tree(struct cache_tree *it)
+{
+ int i;
+
+ if (it->entry_count >= 0)
+ add_one_tree(it->sha1);
+ for (i = 0; i < it->subtree_nr; i++)
+ add_cache_tree(it->down[i]->cache_tree);
+}
+
+static void add_cache_refs(void)
+{
+ int i;
+
+ read_cache();
+ for (i = 0; i < active_nr; i++) {
+ lookup_blob(active_cache[i]->sha1);
+ /*
+ * We could add the blobs to the pending list, but quite
+ * frankly, we don't care. Once we've looked them up, and
+ * added them as objects, we've really done everything
+ * there is to do for a blob
+ */
+ }
+ if (active_cache_tree)
+ add_cache_tree(active_cache_tree);
+}
+
+int cmd_prune(int argc, const char **argv, const char *prefix)
+{
+ int i;
+
+ for (i = 1; i < argc; i++) {
+ const char *arg = argv[i];
+ if (!strcmp(arg, "-n")) {
+ show_only = 1;
+ continue;
+ }
+ usage(prune_usage);
+ }
+
+ /*
+ * Set up revision parsing, and mark us as being interested
+ * in all object types, not just commits.
+ */
+ init_revisions(&revs, prefix);
+ revs.tag_objects = 1;
+ revs.blob_objects = 1;
+ revs.tree_objects = 1;
+
+ /* Add all external refs */
+ for_each_ref(add_one_ref);
+
+ /* Add all refs from the index file */
+ add_cache_refs();
+
+ /*
+ * Set up the revision walk - this will move all commits
+ * from the pending list to the commit walking list.
+ */
+ prepare_revision_walk(&revs);
+
+ walk_commit_list(&revs);
+
+ prune_object_dir(get_object_directory());
+
+ return 0;
+}
#define MAX_URI (16)
-static const char push_usage[] = "git push [--all] [--tags] [--force] <repository> [<refspec>...]";
+static const char push_usage[] = "git-push [--all] [--tags] [-f | --force] <repository> [<refspec>...]";
static int all = 0, tags = 0, force = 0, thin = 1;
static const char *execute = NULL;
if (n < MAX_URI)
uri[n++] = strdup(s);
else
- error("more than %d URL's specified, ignoreing the rest", MAX_URI);
+ error("more than %d URL's specified, ignoring the rest", MAX_URI);
}
else if (is_refspec && !has_explicit_refspec)
add_refspec(strdup(s));
return 0;
}
-int cmd_push(int argc, const char **argv, char **envp)
+int cmd_push(int argc, const char **argv, const char *prefix)
{
int i;
- const char *repo = "origin"; // default repository
+ const char *repo = "origin"; /* default repository */
for (i = 1; i < argc; i++) {
const char *arg = argv[i];
tags = 1;
continue;
}
- if (!strcmp(arg, "--force")) {
+ if (!strcmp(arg, "--force") || !strcmp(arg, "-f")) {
force = 1;
continue;
}
*
* Copyright (C) Linus Torvalds, 2005
*/
-#define DBRT_DEBUG 1
#include "cache.h"
-
#include "object.h"
#include "tree.h"
#include "tree-walk.h"
#include "cache-tree.h"
-#include <sys/time.h>
-#include <signal.h>
+#include "unpack-trees.h"
#include "builtin.h"
-static int reset = 0;
-static int merge = 0;
-static int update = 0;
-static int index_only = 0;
-static int nontrivial_merge = 0;
-static int trivial_merges_only = 0;
-static int aggressive = 0;
-static int verbose_update = 0;
-static volatile int progress_update = 0;
-static const char *prefix = NULL;
-
-static int head_idx = -1;
-static int merge_size = 0;
-
static struct object_list *trees = NULL;
-static struct cache_entry df_conflict_entry;
-
-struct tree_entry_list {
- struct tree_entry_list *next;
- unsigned directory : 1;
- unsigned executable : 1;
- unsigned symlink : 1;
- unsigned int mode;
- const char *name;
- const unsigned char *sha1;
-};
-
-static struct tree_entry_list df_conflict_list = {
- .name = NULL,
- .next = &df_conflict_list
-};
-
-typedef int (*merge_fn_t)(struct cache_entry **src);
-
-static struct tree_entry_list *create_tree_entry_list(struct tree *tree)
-{
- struct tree_desc desc;
- struct name_entry one;
- struct tree_entry_list *ret = NULL;
- struct tree_entry_list **list_p = &ret;
-
- desc.buf = tree->buffer;
- desc.size = tree->size;
-
- while (tree_entry(&desc, &one)) {
- struct tree_entry_list *entry;
-
- entry = xmalloc(sizeof(struct tree_entry_list));
- entry->name = one.path;
- entry->sha1 = one.sha1;
- entry->mode = one.mode;
- entry->directory = S_ISDIR(one.mode) != 0;
- entry->executable = (one.mode & S_IXUSR) != 0;
- entry->symlink = S_ISLNK(one.mode) != 0;
- entry->next = NULL;
-
- *list_p = entry;
- list_p = &entry->next;
- }
- return ret;
-}
-
-static int entcmp(const char *name1, int dir1, const char *name2, int dir2)
-{
- int len1 = strlen(name1);
- int len2 = strlen(name2);
- int len = len1 < len2 ? len1 : len2;
- int ret = memcmp(name1, name2, len);
- unsigned char c1, c2;
- if (ret)
- return ret;
- c1 = name1[len];
- c2 = name2[len];
- if (!c1 && dir1)
- c1 = '/';
- if (!c2 && dir2)
- c2 = '/';
- ret = (c1 < c2) ? -1 : (c1 > c2) ? 1 : 0;
- if (c1 && c2 && !ret)
- ret = len1 - len2;
- return ret;
-}
-
-static int unpack_trees_rec(struct tree_entry_list **posns, int len,
- const char *base, merge_fn_t fn, int *indpos)
-{
- int baselen = strlen(base);
- int src_size = len + 1;
- do {
- int i;
- const char *first;
- int firstdir = 0;
- int pathlen;
- unsigned ce_size;
- struct tree_entry_list **subposns;
- struct cache_entry **src;
- int any_files = 0;
- int any_dirs = 0;
- char *cache_name;
- int ce_stage;
-
- /* Find the first name in the input. */
-
- first = NULL;
- cache_name = NULL;
-
- /* Check the cache */
- if (merge && *indpos < active_nr) {
- /* This is a bit tricky: */
- /* If the index has a subdirectory (with
- * contents) as the first name, it'll get a
- * filename like "foo/bar". But that's after
- * "foo", so the entry in trees will get
- * handled first, at which point we'll go into
- * "foo", and deal with "bar" from the index,
- * because the base will be "foo/". The only
- * way we can actually have "foo/bar" first of
- * all the things is if the trees don't
- * contain "foo" at all, in which case we'll
- * handle "foo/bar" without going into the
- * directory, but that's fine (and will return
- * an error anyway, with the added unknown
- * file case.
- */
-
- cache_name = active_cache[*indpos]->name;
- if (strlen(cache_name) > baselen &&
- !memcmp(cache_name, base, baselen)) {
- cache_name += baselen;
- first = cache_name;
- } else {
- cache_name = NULL;
- }
- }
-
-#if DBRT_DEBUG > 1
- if (first)
- printf("index %s\n", first);
-#endif
- for (i = 0; i < len; i++) {
- if (!posns[i] || posns[i] == &df_conflict_list)
- continue;
-#if DBRT_DEBUG > 1
- printf("%d %s\n", i + 1, posns[i]->name);
-#endif
- if (!first || entcmp(first, firstdir,
- posns[i]->name,
- posns[i]->directory) > 0) {
- first = posns[i]->name;
- firstdir = posns[i]->directory;
- }
- }
- /* No name means we're done */
- if (!first)
- return 0;
-
- pathlen = strlen(first);
- ce_size = cache_entry_size(baselen + pathlen);
-
- src = xcalloc(src_size, sizeof(struct cache_entry *));
-
- subposns = xcalloc(len, sizeof(struct tree_list_entry *));
-
- if (cache_name && !strcmp(cache_name, first)) {
- any_files = 1;
- src[0] = active_cache[*indpos];
- remove_cache_entry_at(*indpos);
- }
-
- for (i = 0; i < len; i++) {
- struct cache_entry *ce;
-
- if (!posns[i] ||
- (posns[i] != &df_conflict_list &&
- strcmp(first, posns[i]->name))) {
- continue;
- }
-
- if (posns[i] == &df_conflict_list) {
- src[i + merge] = &df_conflict_entry;
- continue;
- }
-
- if (posns[i]->directory) {
- struct tree *tree = lookup_tree(posns[i]->sha1);
- any_dirs = 1;
- parse_tree(tree);
- subposns[i] = create_tree_entry_list(tree);
- posns[i] = posns[i]->next;
- src[i + merge] = &df_conflict_entry;
- continue;
- }
-
- if (!merge)
- ce_stage = 0;
- else if (i + 1 < head_idx)
- ce_stage = 1;
- else if (i + 1 > head_idx)
- ce_stage = 3;
- else
- ce_stage = 2;
-
- ce = xcalloc(1, ce_size);
- ce->ce_mode = create_ce_mode(posns[i]->mode);
- ce->ce_flags = create_ce_flags(baselen + pathlen,
- ce_stage);
- memcpy(ce->name, base, baselen);
- memcpy(ce->name + baselen, first, pathlen + 1);
-
- any_files = 1;
-
- memcpy(ce->sha1, posns[i]->sha1, 20);
- src[i + merge] = ce;
- subposns[i] = &df_conflict_list;
- posns[i] = posns[i]->next;
- }
- if (any_files) {
- if (merge) {
- int ret;
-
-#if DBRT_DEBUG > 1
- printf("%s:\n", first);
- for (i = 0; i < src_size; i++) {
- printf(" %d ", i);
- if (src[i])
- printf("%s\n", sha1_to_hex(src[i]->sha1));
- else
- printf("\n");
- }
-#endif
- ret = fn(src);
-
-#if DBRT_DEBUG > 1
- printf("Added %d entries\n", ret);
-#endif
- *indpos += ret;
- } else {
- for (i = 0; i < src_size; i++) {
- if (src[i]) {
- add_cache_entry(src[i], ADD_CACHE_OK_TO_ADD|ADD_CACHE_SKIP_DFCHECK);
- }
- }
- }
- }
- if (any_dirs) {
- char *newbase = xmalloc(baselen + 2 + pathlen);
- memcpy(newbase, base, baselen);
- memcpy(newbase + baselen, first, pathlen);
- newbase[baselen + pathlen] = '/';
- newbase[baselen + pathlen + 1] = '\0';
- if (unpack_trees_rec(subposns, len, newbase, fn,
- indpos))
- return -1;
- free(newbase);
- }
- free(subposns);
- free(src);
- } while (1);
-}
-
-static void reject_merge(struct cache_entry *ce)
-{
- die("Entry '%s' would be overwritten by merge. Cannot merge.",
- ce->name);
-}
-
-/* Unlink the last component and attempt to remove leading
- * directories, in case this unlink is the removal of the
- * last entry in the directory -- empty directories are removed.
- */
-static void unlink_entry(char *name)
-{
- char *cp, *prev;
-
- if (unlink(name))
- return;
- prev = NULL;
- while (1) {
- int status;
- cp = strrchr(name, '/');
- if (prev)
- *prev = '/';
- if (!cp)
- break;
-
- *cp = 0;
- status = rmdir(name);
- if (status) {
- *cp = '/';
- break;
- }
- prev = cp;
- }
-}
-
-static void progress_interval(int signum)
-{
- progress_update = 1;
-}
-
-static void setup_progress_signal(void)
-{
- struct sigaction sa;
- struct itimerval v;
-
- memset(&sa, 0, sizeof(sa));
- sa.sa_handler = progress_interval;
- sigemptyset(&sa.sa_mask);
- sa.sa_flags = SA_RESTART;
- sigaction(SIGALRM, &sa, NULL);
-
- v.it_interval.tv_sec = 1;
- v.it_interval.tv_usec = 0;
- v.it_value = v.it_interval;
- setitimer(ITIMER_REAL, &v, NULL);
-}
-
-static void check_updates(struct cache_entry **src, int nr)
-{
- static struct checkout state = {
- .base_dir = "",
- .force = 1,
- .quiet = 1,
- .refresh_cache = 1,
- };
- unsigned short mask = htons(CE_UPDATE);
- unsigned last_percent = 200, cnt = 0, total = 0;
-
- if (update && verbose_update) {
- for (total = cnt = 0; cnt < nr; cnt++) {
- struct cache_entry *ce = src[cnt];
- if (!ce->ce_mode || ce->ce_flags & mask)
- total++;
- }
-
- /* Don't bother doing this for very small updates */
- if (total < 250)
- total = 0;
-
- if (total) {
- fprintf(stderr, "Checking files out...\n");
- setup_progress_signal();
- progress_update = 1;
- }
- cnt = 0;
- }
-
- while (nr--) {
- struct cache_entry *ce = *src++;
-
- if (total) {
- if (!ce->ce_mode || ce->ce_flags & mask) {
- unsigned percent;
- cnt++;
- percent = (cnt * 100) / total;
- if (percent != last_percent ||
- progress_update) {
- fprintf(stderr, "%4u%% (%u/%u) done\r",
- percent, cnt, total);
- last_percent = percent;
- progress_update = 0;
- }
- }
- }
- if (!ce->ce_mode) {
- if (update)
- unlink_entry(ce->name);
- continue;
- }
- if (ce->ce_flags & mask) {
- ce->ce_flags &= ~mask;
- if (update)
- checkout_entry(ce, &state, NULL);
- }
- }
- if (total) {
- signal(SIGALRM, SIG_IGN);
- fputc('\n', stderr);
- }
-}
-
-static int unpack_trees(merge_fn_t fn)
-{
- int indpos = 0;
- unsigned len = object_list_length(trees);
- struct tree_entry_list **posns;
- int i;
- struct object_list *posn = trees;
- merge_size = len;
-
- if (len) {
- posns = xmalloc(len * sizeof(struct tree_entry_list *));
- for (i = 0; i < len; i++) {
- posns[i] = create_tree_entry_list((struct tree *) posn->item);
- posn = posn->next;
- }
- if (unpack_trees_rec(posns, len, prefix ? prefix : "",
- fn, &indpos))
- return -1;
- }
-
- if (trivial_merges_only && nontrivial_merge)
- die("Merge requires file-level merging");
-
- check_updates(active_cache, active_nr);
- return 0;
-}
-
static int list_tree(unsigned char *sha1)
{
struct tree *tree = parse_tree_indirect(sha1);
return 0;
}
-static int same(struct cache_entry *a, struct cache_entry *b)
-{
- if (!!a != !!b)
- return 0;
- if (!a && !b)
- return 1;
- return a->ce_mode == b->ce_mode &&
- !memcmp(a->sha1, b->sha1, 20);
-}
-
-
-/*
- * When a CE gets turned into an unmerged entry, we
- * want it to be up-to-date
- */
-static void verify_uptodate(struct cache_entry *ce)
-{
- struct stat st;
-
- if (index_only || reset)
- return;
-
- if (!lstat(ce->name, &st)) {
- unsigned changed = ce_match_stat(ce, &st, 1);
- if (!changed)
- return;
- errno = 0;
- }
- if (reset) {
- ce->ce_flags |= htons(CE_UPDATE);
- return;
- }
- if (errno == ENOENT)
- return;
- die("Entry '%s' not uptodate. Cannot merge.", ce->name);
-}
-
-static void invalidate_ce_path(struct cache_entry *ce)
-{
- if (ce)
- cache_tree_invalidate_path(active_cache_tree, ce->name);
-}
-
-/*
- * We do not want to remove or overwrite a working tree file that
- * is not tracked.
- */
-static void verify_absent(const char *path, const char *action)
-{
- struct stat st;
-
- if (index_only || reset || !update)
- return;
- if (!lstat(path, &st))
- die("Untracked working tree file '%s' "
- "would be %s by merge.", path, action);
-}
-
-static int merged_entry(struct cache_entry *merge, struct cache_entry *old)
-{
- merge->ce_flags |= htons(CE_UPDATE);
- if (old) {
- /*
- * See if we can re-use the old CE directly?
- * That way we get the uptodate stat info.
- *
- * This also removes the UPDATE flag on
- * a match.
- */
- if (same(old, merge)) {
- *merge = *old;
- } else {
- verify_uptodate(old);
- invalidate_ce_path(old);
- }
- }
- else {
- verify_absent(merge->name, "overwritten");
- invalidate_ce_path(merge);
- }
-
- merge->ce_flags &= ~htons(CE_STAGEMASK);
- add_cache_entry(merge, ADD_CACHE_OK_TO_ADD);
- return 1;
-}
-
-static int deleted_entry(struct cache_entry *ce, struct cache_entry *old)
-{
- if (old)
- verify_uptodate(old);
- else
- verify_absent(ce->name, "removed");
- ce->ce_mode = 0;
- add_cache_entry(ce, ADD_CACHE_OK_TO_ADD);
- invalidate_ce_path(ce);
- return 1;
-}
-
-static int keep_entry(struct cache_entry *ce)
-{
- add_cache_entry(ce, ADD_CACHE_OK_TO_ADD);
- return 1;
-}
-
-#if DBRT_DEBUG
-static void show_stage_entry(FILE *o,
- const char *label, const struct cache_entry *ce)
-{
- if (!ce)
- fprintf(o, "%s (missing)\n", label);
- else
- fprintf(o, "%s%06o %s %d\t%s\n",
- label,
- ntohl(ce->ce_mode),
- sha1_to_hex(ce->sha1),
- ce_stage(ce),
- ce->name);
-}
-#endif
-
-static int threeway_merge(struct cache_entry **stages)
-{
- struct cache_entry *index;
- struct cache_entry *head;
- struct cache_entry *remote = stages[head_idx + 1];
- int count;
- int head_match = 0;
- int remote_match = 0;
- const char *path = NULL;
-
- int df_conflict_head = 0;
- int df_conflict_remote = 0;
-
- int any_anc_missing = 0;
- int no_anc_exists = 1;
- int i;
-
- for (i = 1; i < head_idx; i++) {
- if (!stages[i])
- any_anc_missing = 1;
- else {
- if (!path)
- path = stages[i]->name;
- no_anc_exists = 0;
- }
- }
-
- index = stages[0];
- head = stages[head_idx];
-
- if (head == &df_conflict_entry) {
- df_conflict_head = 1;
- head = NULL;
- }
-
- if (remote == &df_conflict_entry) {
- df_conflict_remote = 1;
- remote = NULL;
- }
-
- if (!path && index)
- path = index->name;
- if (!path && head)
- path = head->name;
- if (!path && remote)
- path = remote->name;
-
- /* First, if there's a #16 situation, note that to prevent #13
- * and #14.
- */
- if (!same(remote, head)) {
- for (i = 1; i < head_idx; i++) {
- if (same(stages[i], head)) {
- head_match = i;
- }
- if (same(stages[i], remote)) {
- remote_match = i;
- }
- }
- }
-
- /* We start with cases where the index is allowed to match
- * something other than the head: #14(ALT) and #2ALT, where it
- * is permitted to match the result instead.
- */
- /* #14, #14ALT, #2ALT */
- if (remote && !df_conflict_head && head_match && !remote_match) {
- if (index && !same(index, remote) && !same(index, head))
- reject_merge(index);
- return merged_entry(remote, index);
- }
- /*
- * If we have an entry in the index cache, then we want to
- * make sure that it matches head.
- */
- if (index && !same(index, head)) {
- reject_merge(index);
- }
-
- if (head) {
- /* #5ALT, #15 */
- if (same(head, remote))
- return merged_entry(head, index);
- /* #13, #3ALT */
- if (!df_conflict_remote && remote_match && !head_match)
- return merged_entry(head, index);
- }
-
- /* #1 */
- if (!head && !remote && any_anc_missing)
- return 0;
-
- /* Under the new "aggressive" rule, we resolve mostly trivial
- * cases that we historically had git-merge-one-file resolve.
- */
- if (aggressive) {
- int head_deleted = !head && !df_conflict_head;
- int remote_deleted = !remote && !df_conflict_remote;
- /*
- * Deleted in both.
- * Deleted in one and unchanged in the other.
- */
- if ((head_deleted && remote_deleted) ||
- (head_deleted && remote && remote_match) ||
- (remote_deleted && head && head_match)) {
- if (index)
- return deleted_entry(index, index);
- else if (path)
- verify_absent(path, "removed");
- return 0;
- }
- /*
- * Added in both, identically.
- */
- if (no_anc_exists && head && remote && same(head, remote))
- return merged_entry(head, index);
-
- }
-
- /* Below are "no merge" cases, which require that the index be
- * up-to-date to avoid the files getting overwritten with
- * conflict resolution files.
- */
- if (index) {
- verify_uptodate(index);
- }
- else if (path)
- verify_absent(path, "overwritten");
-
- nontrivial_merge = 1;
-
- /* #2, #3, #4, #6, #7, #9, #11. */
- count = 0;
- if (!head_match || !remote_match) {
- for (i = 1; i < head_idx; i++) {
- if (stages[i]) {
- keep_entry(stages[i]);
- count++;
- break;
- }
- }
- }
-#if DBRT_DEBUG
- else {
- fprintf(stderr, "read-tree: warning #16 detected\n");
- show_stage_entry(stderr, "head ", stages[head_match]);
- show_stage_entry(stderr, "remote ", stages[remote_match]);
- }
-#endif
- if (head) { count += keep_entry(head); }
- if (remote) { count += keep_entry(remote); }
- return count;
-}
-
-/*
- * Two-way merge.
- *
- * The rule is to "carry forward" what is in the index without losing
- * information across a "fast forward", favoring a successful merge
- * over a merge failure when it makes sense. For details of the
- * "carry forward" rule, please see <Documentation/git-read-tree.txt>.
- *
- */
-static int twoway_merge(struct cache_entry **src)
-{
- struct cache_entry *current = src[0];
- struct cache_entry *oldtree = src[1], *newtree = src[2];
-
- if (merge_size != 2)
- return error("Cannot do a twoway merge of %d trees",
- merge_size);
-
- if (current) {
- if ((!oldtree && !newtree) || /* 4 and 5 */
- (!oldtree && newtree &&
- same(current, newtree)) || /* 6 and 7 */
- (oldtree && newtree &&
- same(oldtree, newtree)) || /* 14 and 15 */
- (oldtree && newtree &&
- !same(oldtree, newtree) && /* 18 and 19*/
- same(current, newtree))) {
- return keep_entry(current);
- }
- else if (oldtree && !newtree && same(current, oldtree)) {
- /* 10 or 11 */
- return deleted_entry(oldtree, current);
- }
- else if (oldtree && newtree &&
- same(current, oldtree) && !same(current, newtree)) {
- /* 20 or 21 */
- return merged_entry(newtree, current);
- }
- else {
- /* all other failures */
- if (oldtree)
- reject_merge(oldtree);
- if (current)
- reject_merge(current);
- if (newtree)
- reject_merge(newtree);
- return -1;
- }
- }
- else if (newtree)
- return merged_entry(newtree, current);
- else
- return deleted_entry(oldtree, current);
-}
-
-/*
- * Bind merge.
- *
- * Keep the index entries at stage0, collapse stage1 but make sure
- * stage0 does not have anything there.
- */
-static int bind_merge(struct cache_entry **src)
-{
- struct cache_entry *old = src[0];
- struct cache_entry *a = src[1];
-
- if (merge_size != 1)
- return error("Cannot do a bind merge of %d trees\n",
- merge_size);
- if (a && old)
- die("Entry '%s' overlaps. Cannot bind.", a->name);
- if (!a)
- return keep_entry(old);
- else
- return merged_entry(a, NULL);
-}
-
-/*
- * One-way merge.
- *
- * The rule is:
- * - take the stat information from stage0, take the data from stage1
- */
-static int oneway_merge(struct cache_entry **src)
-{
- struct cache_entry *old = src[0];
- struct cache_entry *a = src[1];
-
- if (merge_size != 1)
- return error("Cannot do a oneway merge of %d trees",
- merge_size);
-
- if (!a)
- return deleted_entry(old, old);
- if (old && same(old, a)) {
- if (reset) {
- struct stat st;
- if (lstat(old->name, &st) ||
- ce_match_stat(old, &st, 1))
- old->ce_flags |= htons(CE_UPDATE);
- }
- return keep_entry(old);
- }
- return merged_entry(a, old);
-}
-
static int read_cache_unmerged(void)
{
int i;
if (ce_stage(ce)) {
if (last && !strcmp(ce->name, last->name))
continue;
- invalidate_ce_path(ce);
+ cache_tree_invalidate_path(active_cache_tree, ce->name);
last = ce;
ce->ce_mode = 0;
ce->ce_flags &= ~htons(CE_STAGEMASK);
static struct lock_file lock_file;
-int cmd_read_tree(int argc, const char **argv, char **envp)
+int cmd_read_tree(int argc, const char **argv, const char *unused_prefix)
{
int i, newfd, stage = 0;
unsigned char sha1[20];
- merge_fn_t fn = NULL;
+ struct unpack_trees_options opts;
+
+ memset(&opts, 0, sizeof(opts));
+ opts.head_idx = -1;
setup_git_directory();
git_config(git_default_config);
- newfd = hold_lock_file_for_update(&lock_file, get_index_file());
- if (newfd < 0)
- die("unable to create new index file");
+ newfd = hold_lock_file_for_update(&lock_file, get_index_file(), 1);
git_config(git_default_config);
- merge = 0;
- reset = 0;
for (i = 1; i < argc; i++) {
const char *arg = argv[i];
* the working tree.
*/
if (!strcmp(arg, "-u")) {
- update = 1;
+ opts.update = 1;
continue;
}
if (!strcmp(arg, "-v")) {
- verbose_update = 1;
+ opts.verbose_update = 1;
continue;
}
* not even look at the working tree.
*/
if (!strcmp(arg, "-i")) {
- index_only = 1;
+ opts.index_only = 1;
continue;
}
* given subdirectory.
*/
if (!strncmp(arg, "--prefix=", 9)) {
- if (stage || merge || prefix)
+ if (stage || opts.merge || opts.prefix)
usage(read_tree_usage);
- prefix = arg + 9;
- merge = 1;
+ opts.prefix = arg + 9;
+ opts.merge = 1;
stage = 1;
if (read_cache_unmerged())
die("you need to resolve your current index first");
* correspond to them.
*/
if (!strcmp(arg, "--reset")) {
- if (stage || merge || prefix)
+ if (stage || opts.merge || opts.prefix)
usage(read_tree_usage);
- reset = 1;
- merge = 1;
+ opts.reset = 1;
+ opts.merge = 1;
stage = 1;
read_cache_unmerged();
continue;
}
if (!strcmp(arg, "--trivial")) {
- trivial_merges_only = 1;
+ opts.trivial_merges_only = 1;
continue;
}
if (!strcmp(arg, "--aggressive")) {
- aggressive = 1;
+ opts.aggressive = 1;
continue;
}
/* "-m" stands for "merge", meaning we start in stage 1 */
if (!strcmp(arg, "-m")) {
- if (stage || merge || prefix)
+ if (stage || opts.merge || opts.prefix)
usage(read_tree_usage);
if (read_cache_unmerged())
die("you need to resolve your current index first");
stage = 1;
- merge = 1;
+ opts.merge = 1;
continue;
}
/* using -u and -i at the same time makes no sense */
- if (1 < index_only + update)
+ if (1 < opts.index_only + opts.update)
usage(read_tree_usage);
if (get_sha1(arg, sha1))
die("failed to unpack tree object %s", arg);
stage++;
}
- if ((update||index_only) && !merge)
+ if ((opts.update||opts.index_only) && !opts.merge)
usage(read_tree_usage);
- if (prefix) {
- int pfxlen = strlen(prefix);
+ if (opts.prefix) {
+ int pfxlen = strlen(opts.prefix);
int pos;
- if (prefix[pfxlen-1] != '/')
+ if (opts.prefix[pfxlen-1] != '/')
die("prefix must end with /");
if (stage != 2)
die("binding merge takes only one tree");
- pos = cache_name_pos(prefix, pfxlen);
+ pos = cache_name_pos(opts.prefix, pfxlen);
if (0 <= pos)
die("corrupt index file");
pos = -pos-1;
if (pos < active_nr &&
- !strncmp(active_cache[pos]->name, prefix, pfxlen))
- die("subdirectory '%s' already exists.", prefix);
- pos = cache_name_pos(prefix, pfxlen-1);
+ !strncmp(active_cache[pos]->name, opts.prefix, pfxlen))
+ die("subdirectory '%s' already exists.", opts.prefix);
+ pos = cache_name_pos(opts.prefix, pfxlen-1);
if (0 <= pos)
- die("file '%.*s' already exists.", pfxlen-1, prefix);
+ die("file '%.*s' already exists.",
+ pfxlen-1, opts.prefix);
}
- if (merge) {
+ if (opts.merge) {
if (stage < 2)
die("just how do you expect me to merge %d trees?", stage-1);
switch (stage - 1) {
case 1:
- fn = prefix ? bind_merge : oneway_merge;
+ opts.fn = opts.prefix ? bind_merge : oneway_merge;
break;
case 2:
- fn = twoway_merge;
+ opts.fn = twoway_merge;
break;
case 3:
default:
- fn = threeway_merge;
+ opts.fn = threeway_merge;
cache_tree_free(&active_cache_tree);
break;
}
if (stage - 1 >= 3)
- head_idx = stage - 2;
+ opts.head_idx = stage - 2;
else
- head_idx = 1;
+ opts.head_idx = 1;
}
- unpack_trees(fn);
+ unpack_trees(trees, &opts);
/*
* When reading only one tree (either the most basic form,
* valid cache-tree because the index must match exactly
* what came from the tree.
*/
- if (trees && trees->item && !prefix && (!merge || (stage == 2))) {
+ if (trees && trees->item && !opts.prefix && (!opts.merge || (stage == 2))) {
cache_tree_free(&active_cache_tree);
prime_cache_tree();
}
if (write_cache(newfd, active_cache, active_nr) ||
- commit_lock_file(&lock_file))
+ close(newfd) || commit_lock_file(&lock_file))
die("unable to write new index file");
return 0;
}
--- /dev/null
+#include "builtin.h"
+#include "cache.h"
+#include <regex.h>
+
+static const char git_config_set_usage[] =
+"git-repo-config [ --bool | --int ] [--get | --get-all | --get-regexp | --replace-all | --unset | --unset-all] name [value [value_regex]] | --list";
+
+static char* key = NULL;
+static regex_t* key_regexp = NULL;
+static regex_t* regexp = NULL;
+static int show_keys = 0;
+static int use_key_regexp = 0;
+static int do_all = 0;
+static int do_not_match = 0;
+static int seen = 0;
+static enum { T_RAW, T_INT, T_BOOL } type = T_RAW;
+
+static int show_all_config(const char *key_, const char *value_)
+{
+ if (value_)
+ printf("%s=%s\n", key_, value_);
+ else
+ printf("%s\n", key_);
+ return 0;
+}
+
+static int show_config(const char* key_, const char* value_)
+{
+ char value[256];
+ const char *vptr = value;
+ int dup_error = 0;
+
+ if (!use_key_regexp && strcmp(key_, key))
+ return 0;
+ if (use_key_regexp && regexec(key_regexp, key_, 0, NULL, 0))
+ return 0;
+ if (regexp != NULL &&
+ (do_not_match ^
+ regexec(regexp, (value_?value_:""), 0, NULL, 0)))
+ return 0;
+
+ if (show_keys)
+ printf("%s ", key_);
+ if (seen && !do_all)
+ dup_error = 1;
+ if (type == T_INT)
+ sprintf(value, "%d", git_config_int(key_, value_?value_:""));
+ else if (type == T_BOOL)
+ vptr = git_config_bool(key_, value_) ? "true" : "false";
+ else
+ vptr = value_?value_:"";
+ seen++;
+ if (dup_error) {
+ error("More than one value for the key %s: %s",
+ key_, vptr);
+ }
+ else
+ printf("%s\n", vptr);
+
+ return 0;
+}
+
+static int get_value(const char* key_, const char* regex_)
+{
+ int ret = -1;
+ char *tl;
+ char *global = NULL, *repo_config = NULL;
+ const char *local;
+
+ local = getenv("GIT_CONFIG");
+ if (!local) {
+ const char *home = getenv("HOME");
+ local = getenv("GIT_CONFIG_LOCAL");
+ if (!local)
+ local = repo_config = strdup(git_path("config"));
+ if (home)
+ global = strdup(mkpath("%s/.gitconfig", home));
+ }
+
+ key = strdup(key_);
+ for (tl=key+strlen(key)-1; tl >= key && *tl != '.'; --tl)
+ *tl = tolower(*tl);
+ for (tl=key; *tl && *tl != '.'; ++tl)
+ *tl = tolower(*tl);
+
+ if (use_key_regexp) {
+ key_regexp = (regex_t*)malloc(sizeof(regex_t));
+ if (regcomp(key_regexp, key, REG_EXTENDED)) {
+ fprintf(stderr, "Invalid key pattern: %s\n", key_);
+ goto free_strings;
+ }
+ }
+
+ if (regex_) {
+ if (regex_[0] == '!') {
+ do_not_match = 1;
+ regex_++;
+ }
+
+ regexp = (regex_t*)malloc(sizeof(regex_t));
+ if (regcomp(regexp, regex_, REG_EXTENDED)) {
+ fprintf(stderr, "Invalid pattern: %s\n", regex_);
+ goto free_strings;
+ }
+ }
+
+ if (do_all && global)
+ git_config_from_file(show_config, global);
+ git_config_from_file(show_config, local);
+ if (!do_all && !seen && global)
+ git_config_from_file(show_config, global);
+
+ free(key);
+ if (regexp) {
+ regfree(regexp);
+ free(regexp);
+ }
+
+ if (do_all)
+ ret = !seen;
+ else
+ ret = (seen == 1) ? 0 : 1;
+
+free_strings:
+ if (repo_config)
+ free(repo_config);
+ if (global)
+ free(global);
+ return ret;
+}
+
+int cmd_repo_config(int argc, const char **argv, const char *prefix)
+{
+ int nongit = 0;
+ setup_git_directory_gently(&nongit);
+
+ while (1 < argc) {
+ if (!strcmp(argv[1], "--int"))
+ type = T_INT;
+ else if (!strcmp(argv[1], "--bool"))
+ type = T_BOOL;
+ else if (!strcmp(argv[1], "--list") || !strcmp(argv[1], "-l"))
+ return git_config(show_all_config);
+ else
+ break;
+ argc--;
+ argv++;
+ }
+
+ switch (argc) {
+ case 2:
+ return get_value(argv[1], NULL);
+ case 3:
+ if (!strcmp(argv[1], "--unset"))
+ return git_config_set(argv[2], NULL);
+ else if (!strcmp(argv[1], "--unset-all"))
+ return git_config_set_multivar(argv[2], NULL, NULL, 1);
+ else if (!strcmp(argv[1], "--get"))
+ return get_value(argv[2], NULL);
+ else if (!strcmp(argv[1], "--get-all")) {
+ do_all = 1;
+ return get_value(argv[2], NULL);
+ } else if (!strcmp(argv[1], "--get-regexp")) {
+ show_keys = 1;
+ use_key_regexp = 1;
+ do_all = 1;
+ return get_value(argv[2], NULL);
+ } else
+
+ return git_config_set(argv[1], argv[2]);
+ case 4:
+ if (!strcmp(argv[1], "--unset"))
+ return git_config_set_multivar(argv[2], NULL, argv[3], 0);
+ else if (!strcmp(argv[1], "--unset-all"))
+ return git_config_set_multivar(argv[2], NULL, argv[3], 1);
+ else if (!strcmp(argv[1], "--get"))
+ return get_value(argv[2], argv[3]);
+ else if (!strcmp(argv[1], "--get-all")) {
+ do_all = 1;
+ return get_value(argv[2], argv[3]);
+ } else if (!strcmp(argv[1], "--get-regexp")) {
+ show_keys = 1;
+ use_key_regexp = 1;
+ do_all = 1;
+ return get_value(argv[2], argv[3]);
+ } else if (!strcmp(argv[1], "--replace-all"))
+
+ return git_config_set_multivar(argv[2], argv[3], NULL, 1);
+ else
+
+ return git_config_set_multivar(argv[1], argv[2], argv[3], 0);
+ case 5:
+ if (!strcmp(argv[1], "--replace-all"))
+ return git_config_set_multivar(argv[2], argv[3], argv[4], 1);
+ case 1:
+ default:
+ usage(git_config_set_usage);
+ }
+ return 0;
+}
const char *name = pending->name;
if (obj->flags & (UNINTERESTING | SEEN))
continue;
- if (obj->type == TYPE_TAG) {
+ if (obj->type == OBJ_TAG) {
obj->flags |= SEEN;
add_object_array(obj, name, &objects);
continue;
}
- if (obj->type == TYPE_TREE) {
+ if (obj->type == OBJ_TREE) {
process_tree((struct tree *)obj, &objects, NULL, name);
continue;
}
- if (obj->type == TYPE_BLOB) {
+ if (obj->type == OBJ_BLOB) {
process_blob((struct blob *)obj, &objects, NULL, name);
continue;
}
}
}
-int cmd_rev_list(int argc, const char **argv, char **envp)
+int cmd_rev_list(int argc, const char **argv, const char *prefix)
{
struct commit_list *list;
int i;
- init_revisions(&revs);
+ init_revisions(&revs, prefix);
revs.abbrev = 0;
revs.commit_format = CMIT_FMT_UNSPECIFIED;
argc = setup_revisions(argc, argv, &revs, NULL);
return 0;
}
-int cmd_rev_parse(int argc, const char **argv, char **envp)
+static int try_difference(const char *arg)
+{
+ char *dotdot;
+ unsigned char sha1[20];
+ unsigned char end[20];
+ const char *next;
+ const char *this;
+ int symmetric;
+
+ if (!(dotdot = strstr(arg, "..")))
+ return 0;
+ next = dotdot + 2;
+ this = arg;
+ symmetric = (*next == '.');
+
+ *dotdot = 0;
+ next += symmetric;
+
+ if (!*next)
+ next = "HEAD";
+ if (dotdot == arg)
+ this = "HEAD";
+ if (!get_sha1(this, sha1) && !get_sha1(next, end)) {
+ show_rev(NORMAL, end, next);
+ show_rev(symmetric ? NORMAL : REVERSED, sha1, this);
+ if (symmetric) {
+ struct commit_list *exclude;
+ struct commit *a, *b;
+ a = lookup_commit_reference(sha1);
+ b = lookup_commit_reference(end);
+ exclude = get_merge_bases(a, b, 1);
+ while (exclude) {
+ struct commit_list *n = exclude->next;
+ show_rev(REVERSED,
+ exclude->item->object.sha1,NULL);
+ free(exclude);
+ exclude = n;
+ }
+ }
+ return 1;
+ }
+ *dotdot = '.';
+ return 0;
+}
+
+int cmd_rev_parse(int argc, const char **argv, const char *prefix)
{
int i, as_is = 0, verify = 0;
unsigned char sha1[20];
- const char *prefix = setup_git_directory();
git_config(git_default_config);
for (i = 1; i < argc; i++) {
const char *arg = argv[i];
- char *dotdot;
if (as_is) {
if (show_file(arg) && as_is < 2)
}
/* Not a flag argument */
- dotdot = strstr(arg, "..");
- if (dotdot) {
- unsigned char end[20];
- char *next = dotdot + 2;
- const char *this = arg;
- *dotdot = 0;
- if (!*next)
- next = "HEAD";
- if (dotdot == arg)
- this = "HEAD";
- if (!get_sha1(this, sha1) && !get_sha1(next, end)) {
- show_rev(NORMAL, end, next);
- show_rev(REVERSED, sha1, this);
- continue;
- }
- *dotdot = '.';
- }
+ if (try_difference(arg))
+ continue;
if (!get_sha1(arg, sha1)) {
show_rev(NORMAL, sha1, arg);
continue;
static struct lock_file lock_file;
-int cmd_rm(int argc, const char **argv, char **envp)
+int cmd_rm(int argc, const char **argv, const char *prefix)
{
int i, newfd;
int verbose = 0, show_only = 0, force = 0;
- const char *prefix = setup_git_directory();
const char **pathspec;
char *seen;
git_config(git_default_config);
- newfd = hold_lock_file_for_update(&lock_file, get_index_file());
- if (newfd < 0)
- die("unable to create new index file");
+ newfd = hold_lock_file_for_update(&lock_file, get_index_file(), 1);
if (read_cache() < 0)
die("index file corrupt");
force = 1;
continue;
}
- die(builtin_rm_usage);
+ usage(builtin_rm_usage);
}
if (argc <= i)
usage(builtin_rm_usage);
seen = NULL;
for (i = 0; pathspec[i] ; i++)
/* nothing */;
- seen = xmalloc(i);
- memset(seen, 0, i);
+ seen = xcalloc(i, 1);
for (i = 0; i < active_nr; i++) {
struct cache_entry *ce = active_cache[i];
printf("rm '%s'\n", path);
if (remove_file_from_cache(path))
- die("git rm: unable to remove %s", path);
+ die("git-rm: unable to remove %s", path);
cache_tree_invalidate_path(active_cache_tree, path);
}
* workspace. If we fail to remove the first one, we
* abort the "git rm" (but once we've successfully removed
* any file at all, we'll go ahead and commit to it all:
- * by then we've already committed ourself and can't fail
+ * by then we've already committed ourselves and can't fail
* in the middle)
*/
if (force) {
continue;
}
if (!removed)
- die("git rm: %s: %s", path, strerror(errno));
+ die("git-rm: %s: %s", path, strerror(errno));
}
}
if (active_cache_changed) {
if (write_cache(newfd, active_cache, active_nr) ||
- commit_lock_file(&lock_file))
+ close(newfd) || commit_lock_file(&lock_file))
die("Unable to write new index file");
}
#include "builtin.h"
static const char show_branch_usage[] =
-"git-show-branch [--dense] [--current] [--all] [--heads] [--tags] [--topo-order] [--more=count | --list | --independent | --merge-base ] [--topics] [<refs>...]";
+"git-show-branch [--sparse] [--current] [--all] [--heads] [--tags] [--topo-order] [--more=count | --list | --independent | --merge-base ] [--topics] [<refs>...]";
static int default_num = 0;
static int default_alloc = 0;
name_parent(c, p);
i++;
}
+ else
+ break;
c = p;
}
return i;
static int mark_seen(struct commit *commit, struct commit_list **seen_p)
{
if (!commit->object.flags) {
- insert_by_date(commit, seen_p);
+ commit_list_insert(commit, seen_p);
return 1;
}
return 0;
* Postprocess to complete well-poisoning.
*
* At this point we have all the commits we have seen in
- * seen_p list (which happens to be sorted chronologically but
- * it does not really matter). Mark anything that can be
- * reached from uninteresting commits not interesting.
+ * seen_p list. Mark anything that can be reached from
+ * uninteresting commits not interesting.
*/
for (;;) {
int changed = 0;
return 0;
}
-int cmd_show_branch(int ac, const char **av, char **envp)
+int cmd_show_branch(int ac, const char **av, const char *prefix)
{
struct commit *rev[MAX_REVS], *commit;
struct commit_list *list = NULL, *seen = NULL;
int topics = 0;
int dense = 1;
- setup_git_directory();
git_config(git_show_branch_config);
/* If nothing is specified, try the default first */
if (0 <= extra)
join_revs(&list, &seen, num_rev, extra);
+ sort_by_date(&seen);
+
if (merge_base)
return show_merge_base(seen, num_rev);
fputc('\n', out);
}
-int cmd_stripspace(int argc, const char **argv, char **envp)
+int cmd_stripspace(int argc, const char **argv, const char *prefix)
{
stripspace(stdin, stdout);
return 0;
--- /dev/null
+#include "builtin.h"
+#include "cache.h"
+
+static const char git_symbolic_ref_usage[] =
+"git-symbolic-ref name [ref]";
+
+static void check_symref(const char *HEAD)
+{
+ unsigned char sha1[20];
+ const char *git_HEAD = strdup(git_path("%s", HEAD));
+ const char *git_refs_heads_master = resolve_ref(git_HEAD, sha1, 0);
+ if (git_refs_heads_master) {
+ /* we want to strip the .git/ part */
+ int pfxlen = strlen(git_HEAD) - strlen(HEAD);
+ puts(git_refs_heads_master + pfxlen);
+ }
+ else
+ die("No such ref: %s", HEAD);
+}
+
+int cmd_symbolic_ref(int argc, const char **argv, const char *prefix)
+{
+ git_config(git_default_config);
+ switch (argc) {
+ case 2:
+ check_symref(argv[1]);
+ break;
+ case 3:
+ create_symref(strdup(git_path("%s", argv[1])), argv[2]);
+ break;
+ default:
+ usage(git_symbolic_ref_usage);
+ }
+ return 0;
+}
static unsigned long offset;
static time_t archive_time;
+static int tar_umask;
/* tries hard to write, either succeeds or dies in the attempt */
static void reliable_write(const void *data, unsigned long size)
} else {
if (S_ISDIR(mode)) {
*header.typeflag = TYPEFLAG_DIR;
- mode |= 0777;
+ mode = (mode | 0777) & ~tar_umask;
} else if (S_ISLNK(mode)) {
*header.typeflag = TYPEFLAG_LNK;
mode |= 0777;
} else if (S_ISREG(mode)) {
*header.typeflag = TYPEFLAG_REG;
- mode |= (mode & 0100) ? 0777 : 0666;
+ mode = (mode | ((mode & 0100) ? 0777 : 0666)) & ~tar_umask;
} else {
error("unsupported file mode: 0%o (SHA1: %s)",
mode, sha1_to_hex(sha1));
}
}
-static int generate_tar(int argc, const char **argv, char** envp)
+int git_tar_config(const char *var, const char *value)
+{
+ if (!strcmp(var, "tar.umask")) {
+ if (!strcmp(value, "user")) {
+ tar_umask = umask(0);
+ umask(tar_umask);
+ } else {
+ tar_umask = git_config_int(var, value);
+ }
+ return 0;
+ }
+ return git_default_config(var, value);
+}
+
+static int generate_tar(int argc, const char **argv, const char *prefix)
{
unsigned char sha1[20], tree_sha1[20];
struct commit *commit;
struct tree_desc tree;
struct strbuf current_path;
+ void *buffer;
current_path.buf = xmalloc(PATH_MAX);
current_path.alloc = PATH_MAX;
current_path.len = current_path.eof = 0;
- setup_git_directory();
- git_config(git_default_config);
+ git_config(git_tar_config);
switch (argc) {
case 3:
} else
archive_time = time(NULL);
- tree.buf = read_object_with_reference(sha1, tree_type, &tree.size,
- tree_sha1);
+ tree.buf = buffer = read_object_with_reference(sha1, tree_type,
+ &tree.size, tree_sha1);
if (!tree.buf)
die("not a reference to a tag, commit or tree object: %s",
sha1_to_hex(sha1));
write_entry(tree_sha1, ¤t_path, 040777, NULL, 0);
traverse_tree(&tree, ¤t_path);
write_trailer();
+ free(buffer);
free(current_path.buf);
return 0;
}
return !!ret;
}
-int cmd_tar_tree(int argc, const char **argv, char **envp)
+int cmd_tar_tree(int argc, const char **argv, const char *prefix)
{
if (argc < 2)
usage(tar_tree_usage);
if (!strncmp("--remote=", argv[1], 9))
return remote_tar(argc, argv);
- return generate_tar(argc, argv, envp);
+ return generate_tar(argc, argv, prefix);
}
/* ustar header + extended global header content */
#define HEADERSIZE (2 * RECORDSIZE)
-int cmd_get_tar_commit_id(int argc, const char **argv, char **envp)
+int cmd_get_tar_commit_id(int argc, const char **argv, const char *prefix)
{
char buffer[HEADERSIZE];
struct ustar_header *header = (struct ustar_header *)buffer;
--- /dev/null
+#include "builtin.h"
+#include "cache.h"
+#include "object.h"
+#include "delta.h"
+#include "pack.h"
+#include "blob.h"
+#include "commit.h"
+#include "tag.h"
+#include "tree.h"
+
+#include <sys/time.h>
+
+static int dry_run, quiet;
+static const char unpack_usage[] = "git-unpack-objects [-n] [-q] < pack-file";
+
+/* We always read in 4kB chunks. */
+static unsigned char buffer[4096];
+static unsigned long offset, len, eof;
+static SHA_CTX ctx;
+
+/*
+ * Make sure at least "min" bytes are available in the buffer, and
+ * return the pointer to the buffer.
+ */
+static void * fill(int min)
+{
+ if (min <= len)
+ return buffer + offset;
+ if (eof)
+ die("unable to fill input");
+ if (min > sizeof(buffer))
+ die("cannot fill %d bytes", min);
+ if (offset) {
+ SHA1_Update(&ctx, buffer, offset);
+ memcpy(buffer, buffer + offset, len);
+ offset = 0;
+ }
+ do {
+ int ret = xread(0, buffer + len, sizeof(buffer) - len);
+ if (ret <= 0) {
+ if (!ret)
+ die("early EOF");
+ die("read error on input: %s", strerror(errno));
+ }
+ len += ret;
+ } while (len < min);
+ return buffer;
+}
+
+static void use(int bytes)
+{
+ if (bytes > len)
+ die("used more bytes than were available");
+ len -= bytes;
+ offset += bytes;
+}
+
+static void *get_data(unsigned long size)
+{
+ z_stream stream;
+ void *buf = xmalloc(size);
+
+ memset(&stream, 0, sizeof(stream));
+
+ stream.next_out = buf;
+ stream.avail_out = size;
+ stream.next_in = fill(1);
+ stream.avail_in = len;
+ inflateInit(&stream);
+
+ for (;;) {
+ int ret = inflate(&stream, 0);
+ use(len - stream.avail_in);
+ if (stream.total_out == size && ret == Z_STREAM_END)
+ break;
+ if (ret != Z_OK)
+ die("inflate returned %d\n", ret);
+ stream.next_in = fill(1);
+ stream.avail_in = len;
+ }
+ inflateEnd(&stream);
+ return buf;
+}
+
+struct delta_info {
+ unsigned char base_sha1[20];
+ unsigned long size;
+ void *delta;
+ struct delta_info *next;
+};
+
+static struct delta_info *delta_list;
+
+static void add_delta_to_list(unsigned char *base_sha1, void *delta, unsigned long size)
+{
+ struct delta_info *info = xmalloc(sizeof(*info));
+
+ memcpy(info->base_sha1, base_sha1, 20);
+ info->size = size;
+ info->delta = delta;
+ info->next = delta_list;
+ delta_list = info;
+}
+
+static void added_object(unsigned char *sha1, const char *type, void *data, unsigned long size);
+
+static void write_object(void *buf, unsigned long size, const char *type)
+{
+ unsigned char sha1[20];
+ if (write_sha1_file(buf, size, type, sha1) < 0)
+ die("failed to write object");
+ added_object(sha1, type, buf, size);
+}
+
+static int resolve_delta(const char *type,
+ void *base, unsigned long base_size,
+ void *delta, unsigned long delta_size)
+{
+ void *result;
+ unsigned long result_size;
+
+ result = patch_delta(base, base_size,
+ delta, delta_size,
+ &result_size);
+ if (!result)
+ die("failed to apply delta");
+ free(delta);
+ write_object(result, result_size, type);
+ free(result);
+ return 0;
+}
+
+static void added_object(unsigned char *sha1, const char *type, void *data, unsigned long size)
+{
+ struct delta_info **p = &delta_list;
+ struct delta_info *info;
+
+ while ((info = *p) != NULL) {
+ if (!memcmp(info->base_sha1, sha1, 20)) {
+ *p = info->next;
+ p = &delta_list;
+ resolve_delta(type, data, size, info->delta, info->size);
+ free(info);
+ continue;
+ }
+ p = &info->next;
+ }
+}
+
+static int unpack_non_delta_entry(enum object_type kind, unsigned long size)
+{
+ void *buf = get_data(size);
+ const char *type;
+
+ switch (kind) {
+ case OBJ_COMMIT: type = commit_type; break;
+ case OBJ_TREE: type = tree_type; break;
+ case OBJ_BLOB: type = blob_type; break;
+ case OBJ_TAG: type = tag_type; break;
+ default: die("bad type %d", kind);
+ }
+ if (!dry_run)
+ write_object(buf, size, type);
+ free(buf);
+ return 0;
+}
+
+static int unpack_delta_entry(unsigned long delta_size)
+{
+ void *delta_data, *base;
+ unsigned long base_size;
+ char type[20];
+ unsigned char base_sha1[20];
+ int result;
+
+ memcpy(base_sha1, fill(20), 20);
+ use(20);
+
+ delta_data = get_data(delta_size);
+ if (dry_run) {
+ free(delta_data);
+ return 0;
+ }
+
+ if (!has_sha1_file(base_sha1)) {
+ add_delta_to_list(base_sha1, delta_data, delta_size);
+ return 0;
+ }
+ base = read_sha1_file(base_sha1, type, &base_size);
+ if (!base)
+ die("failed to read delta-pack base object %s", sha1_to_hex(base_sha1));
+ result = resolve_delta(type, base, base_size, delta_data, delta_size);
+ free(base);
+ return result;
+}
+
+static void unpack_one(unsigned nr, unsigned total)
+{
+ unsigned shift;
+ unsigned char *pack, c;
+ unsigned long size;
+ enum object_type type;
+
+ pack = fill(1);
+ c = *pack;
+ use(1);
+ type = (c >> 4) & 7;
+ size = (c & 15);
+ shift = 4;
+ while (c & 0x80) {
+ pack = fill(1);
+ c = *pack++;
+ use(1);
+ size += (c & 0x7f) << shift;
+ shift += 7;
+ }
+ if (!quiet) {
+ static unsigned long last_sec;
+ static unsigned last_percent;
+ struct timeval now;
+ unsigned percentage = (nr * 100) / total;
+
+ gettimeofday(&now, NULL);
+ if (percentage != last_percent || now.tv_sec != last_sec) {
+ last_sec = now.tv_sec;
+ last_percent = percentage;
+ fprintf(stderr, "%4u%% (%u/%u) done\r", percentage, nr, total);
+ }
+ }
+ switch (type) {
+ case OBJ_COMMIT:
+ case OBJ_TREE:
+ case OBJ_BLOB:
+ case OBJ_TAG:
+ unpack_non_delta_entry(type, size);
+ return;
+ case OBJ_DELTA:
+ unpack_delta_entry(size);
+ return;
+ default:
+ die("bad object type %d", type);
+ }
+}
+
+static void unpack_all(void)
+{
+ int i;
+ struct pack_header *hdr = fill(sizeof(struct pack_header));
+ unsigned nr_objects = ntohl(hdr->hdr_entries);
+
+ if (ntohl(hdr->hdr_signature) != PACK_SIGNATURE)
+ die("bad pack file");
+ if (!pack_version_ok(hdr->hdr_version))
+ die("unknown pack file version %d", ntohl(hdr->hdr_version));
+ fprintf(stderr, "Unpacking %d objects\n", nr_objects);
+
+ use(sizeof(struct pack_header));
+ for (i = 0; i < nr_objects; i++)
+ unpack_one(i+1, nr_objects);
+ if (delta_list)
+ die("unresolved deltas left after unpacking");
+}
+
+int cmd_unpack_objects(int argc, const char **argv, const char *prefix)
+{
+ int i;
+ unsigned char sha1[20];
+
+ git_config(git_default_config);
+
+ quiet = !isatty(2);
+
+ for (i = 1 ; i < argc; i++) {
+ const char *arg = argv[i];
+
+ if (*arg == '-') {
+ if (!strcmp(arg, "-n")) {
+ dry_run = 1;
+ continue;
+ }
+ if (!strcmp(arg, "-q")) {
+ quiet = 1;
+ continue;
+ }
+ usage(unpack_usage);
+ }
+
+ /* We don't take any non-flag arguments now.. Maybe some day */
+ usage(unpack_usage);
+ }
+ SHA1_Init(&ctx);
+ unpack_all();
+ SHA1_Update(&ctx, buffer, offset);
+ SHA1_Final(sha1, &ctx);
+ if (memcmp(fill(20), sha1, 20))
+ die("final sha1 did not match");
+ use(20);
+
+ /* Write the last part of the buffer to stdout */
+ while (len) {
+ int ret = xwrite(1, buffer + offset, len);
+ if (ret <= 0)
+ break;
+ len -= ret;
+ offset += ret;
+ }
+
+ /* All done */
+ if (!quiet)
+ fprintf(stderr, "\n");
+ return 0;
+}
return 0;
}
-int cmd_update_index(int argc, const char **argv, char **envp)
+int cmd_update_index(int argc, const char **argv, const char *prefix)
{
int i, newfd, entries, has_errors = 0, line_termination = '\n';
int allow_options = 1;
int read_from_stdin = 0;
- const char *prefix = setup_git_directory();
int prefix_length = prefix ? strlen(prefix) : 0;
char set_executable_bit = 0;
unsigned int refresh_flags = 0;
/* We can't free this memory, it becomes part of a linked list parsed atexit() */
lock_file = xcalloc(1, sizeof(struct lock_file));
- newfd = hold_lock_file_for_update(lock_file, get_index_file());
- if (newfd < 0)
- die("unable to create new cachefile");
+ newfd = hold_lock_file_for_update(lock_file, get_index_file(), 1);
entries = read_cache();
if (entries < 0)
finish:
if (active_cache_changed) {
if (write_cache(newfd, active_cache, active_nr) ||
- commit_lock_file(lock_file))
+ close(newfd) || commit_lock_file(lock_file))
die("Unable to write new index file");
}
static const char git_update_ref_usage[] =
"git-update-ref <refname> <value> [<oldval>] [-m <reason>]";
-int cmd_update_ref(int argc, const char **argv, char **envp)
+int cmd_update_ref(int argc, const char **argv, const char *prefix)
{
const char *refname=NULL, *value=NULL, *oldval=NULL, *msg=NULL;
struct ref_lock *lock;
unsigned char sha1[20], oldsha1[20];
int i;
- setup_git_directory();
+ setup_ident();
git_config(git_default_config);
for (i = 1; i < argc; i++) {
return 1;
}
-int cmd_upload_tar(int argc, const char **argv, char **envp)
+int cmd_upload_tar(int argc, const char **argv, const char *prefix)
{
int len;
const char *dir = argv[1];
--- /dev/null
+#include "builtin.h"
+#include "cache.h"
+#include "pack.h"
+
+static int verify_one_pack(const char *path, int verbose)
+{
+ char arg[PATH_MAX];
+ int len;
+ struct packed_git *pack;
+ int err;
+
+ len = strlcpy(arg, path, PATH_MAX);
+ if (len >= PATH_MAX)
+ return error("name too long: %s", path);
+
+ /*
+ * In addition to "foo.idx" we accept "foo.pack" and "foo";
+ * normalize these forms to "foo.idx" for add_packed_git().
+ */
+ if (has_extension(arg, ".pack")) {
+ strcpy(arg + len - 5, ".idx");
+ len--;
+ } else if (!has_extension(arg, ".idx")) {
+ if (len + 4 >= PATH_MAX)
+ return error("name too long: %s.idx", arg);
+ strcpy(arg + len, ".idx");
+ len += 4;
+ }
+
+ /*
+ * add_packed_git() uses our buffer (containing "foo.idx") to
+ * build the pack filename ("foo.pack"). Make sure it fits.
+ */
+ if (len + 1 >= PATH_MAX) {
+ arg[len - 4] = '\0';
+ return error("name too long: %s.pack", arg);
+ }
+
+ pack = add_packed_git(arg, len, 1);
+ if (!pack)
+ return error("packfile %s not found.", arg);
+
+ err = verify_pack(pack, verbose);
+ free(pack);
+
+ return err;
+}
+
+static const char verify_pack_usage[] = "git-verify-pack [-v] <pack>...";
+
+int cmd_verify_pack(int argc, const char **argv, const char *prefix)
+{
+ int err = 0;
+ int verbose = 0;
+ int no_more_options = 0;
+ int nothing_done = 1;
+
+ while (1 < argc) {
+ if (!no_more_options && argv[1][0] == '-') {
+ if (!strcmp("-v", argv[1]))
+ verbose = 1;
+ else if (!strcmp("--", argv[1]))
+ no_more_options = 1;
+ else
+ usage(verify_pack_usage);
+ }
+ else {
+ if (verify_one_pack(argv[1], verbose))
+ err = 1;
+ nothing_done = 0;
+ }
+ argc--; argv++;
+ }
+
+ if (nothing_done)
+ usage(verify_pack_usage);
+
+ return err;
+}
/* We can't free this memory, it becomes part of a linked list parsed atexit() */
struct lock_file *lock_file = xcalloc(1, sizeof(struct lock_file));
- newfd = hold_lock_file_for_update(lock_file, get_index_file());
+ newfd = hold_lock_file_for_update(lock_file, get_index_file(), 0);
entries = read_cache();
if (entries < 0)
missing_ok, 0) < 0)
die("git-write-tree: error building trees");
if (0 <= newfd) {
- if (!write_cache(newfd, active_cache, active_nr))
+ if (!write_cache(newfd, active_cache, active_nr)
+ && !close(newfd))
commit_lock_file(lock_file);
}
/* Not being able to write is fine -- we are only interested
return 0;
}
-int cmd_write_tree(int argc, const char **argv, char **envp)
+int cmd_write_tree(int argc, const char **argv, const char *unused_prefix)
{
int missing_ok = 0, ret;
const char *prefix = NULL;
unsigned char sha1[20];
- setup_git_directory();
-
while (1 < argc) {
const char *arg = argv[1];
if (!strcmp(arg, "--missing-ok"))
else if (!strncmp(arg, "--prefix=", 9))
prefix = arg + 9;
else
- die(write_tree_usage);
+ usage(write_tree_usage);
argc--; argv++;
}
#define BUILTIN_H
#include <stdio.h>
-
-#ifndef PATH_MAX
-# define PATH_MAX 4096
-#endif
+#include <limits.h>
extern const char git_version_string[];
+extern const char git_usage_string[];
-void cmd_usage(int show_all, const char *exec_path, const char *fmt, ...)
-#ifdef __GNUC__
- __attribute__((__format__(__printf__, 3, 4), __noreturn__))
-#endif
- ;
-
-extern int cmd_help(int argc, const char **argv, char **envp);
-extern int cmd_version(int argc, const char **argv, char **envp);
-
-extern int cmd_whatchanged(int argc, const char **argv, char **envp);
-extern int cmd_show(int argc, const char **argv, char **envp);
-extern int cmd_log(int argc, const char **argv, char **envp);
-extern int cmd_diff(int argc, const char **argv, char **envp);
-extern int cmd_format_patch(int argc, const char **argv, char **envp);
-extern int cmd_count_objects(int argc, const char **argv, char **envp);
-
-extern int cmd_push(int argc, const char **argv, char **envp);
-extern int cmd_grep(int argc, const char **argv, char **envp);
-extern int cmd_rm(int argc, const char **argv, char **envp);
-extern int cmd_add(int argc, const char **argv, char **envp);
-extern int cmd_rev_list(int argc, const char **argv, char **envp);
-extern int cmd_check_ref_format(int argc, const char **argv, char **envp);
-extern int cmd_init_db(int argc, const char **argv, char **envp);
-extern int cmd_tar_tree(int argc, const char **argv, char **envp);
-extern int cmd_upload_tar(int argc, const char **argv, char **envp);
-extern int cmd_get_tar_commit_id(int argc, const char **argv, char **envp);
-extern int cmd_ls_files(int argc, const char **argv, char **envp);
-extern int cmd_ls_tree(int argc, const char **argv, char **envp);
-extern int cmd_read_tree(int argc, const char **argv, char **envp);
-extern int cmd_commit_tree(int argc, const char **argv, char **envp);
-extern int cmd_apply(int argc, const char **argv, char **envp);
-extern int cmd_show_branch(int argc, const char **argv, char **envp);
-extern int cmd_diff_files(int argc, const char **argv, char **envp);
-extern int cmd_diff_index(int argc, const char **argv, char **envp);
-extern int cmd_diff_stages(int argc, const char **argv, char **envp);
-extern int cmd_diff_tree(int argc, const char **argv, char **envp);
-extern int cmd_cat_file(int argc, const char **argv, char **envp);
-extern int cmd_rev_parse(int argc, const char **argv, char **envp);
-extern int cmd_update_index(int argc, const char **argv, char **envp);
-extern int cmd_update_ref(int argc, const char **argv, char **envp);
-
-extern int cmd_write_tree(int argc, const char **argv, char **envp);
-extern int write_tree(unsigned char *sha1, int missing_ok, const char *prefix);
-
-extern int cmd_mailsplit(int argc, const char **argv, char **envp);
+extern void help_unknown_cmd(const char *cmd);
+extern int mailinfo(FILE *in, FILE *out, int ks, const char *encoding, const char *msg, const char *patch);
extern int split_mbox(const char **mbox, const char *dir, int allow_bare, int nr_prec, int skip);
+extern void stripspace(FILE *in, FILE *out);
+extern int write_tree(unsigned char *sha1, int missing_ok, const char *prefix);
-extern int cmd_mailinfo(int argc, const char **argv, char **envp);
-extern int mailinfo(FILE *in, FILE *out, int ks, const char *encoding, const char *msg, const char *patch);
+extern int cmd_add(int argc, const char **argv, const char *prefix);
+extern int cmd_apply(int argc, const char **argv, const char *prefix);
+extern int cmd_cat_file(int argc, const char **argv, const char *prefix);
+extern int cmd_checkout_index(int argc, const char **argv, const char *prefix);
+extern int cmd_check_ref_format(int argc, const char **argv, const char *prefix);
+extern int cmd_commit_tree(int argc, const char **argv, const char *prefix);
+extern int cmd_count_objects(int argc, const char **argv, const char *prefix);
+extern int cmd_diff_files(int argc, const char **argv, const char *prefix);
+extern int cmd_diff_index(int argc, const char **argv, const char *prefix);
+extern int cmd_diff(int argc, const char **argv, const char *prefix);
+extern int cmd_diff_stages(int argc, const char **argv, const char *prefix);
+extern int cmd_diff_tree(int argc, const char **argv, const char *prefix);
+extern int cmd_fmt_merge_msg(int argc, const char **argv, const char *prefix);
+extern int cmd_format_patch(int argc, const char **argv, const char *prefix);
+extern int cmd_get_tar_commit_id(int argc, const char **argv, const char *prefix);
+extern int cmd_grep(int argc, const char **argv, const char *prefix);
+extern int cmd_help(int argc, const char **argv, const char *prefix);
+extern int cmd_init_db(int argc, const char **argv, const char *prefix);
+extern int cmd_log(int argc, const char **argv, const char *prefix);
+extern int cmd_ls_files(int argc, const char **argv, const char *prefix);
+extern int cmd_ls_tree(int argc, const char **argv, const char *prefix);
+extern int cmd_mailinfo(int argc, const char **argv, const char *prefix);
+extern int cmd_mailsplit(int argc, const char **argv, const char *prefix);
+extern int cmd_mv(int argc, const char **argv, const char *prefix);
+extern int cmd_name_rev(int argc, const char **argv, const char *prefix);
+extern int cmd_pack_objects(int argc, const char **argv, const char *prefix);
+extern int cmd_prune(int argc, const char **argv, const char *prefix);
+extern int cmd_prune_packed(int argc, const char **argv, const char *prefix);
+extern int cmd_push(int argc, const char **argv, const char *prefix);
+extern int cmd_read_tree(int argc, const char **argv, const char *prefix);
+extern int cmd_repo_config(int argc, const char **argv, const char *prefix);
+extern int cmd_rev_list(int argc, const char **argv, const char *prefix);
+extern int cmd_rev_parse(int argc, const char **argv, const char *prefix);
+extern int cmd_rm(int argc, const char **argv, const char *prefix);
+extern int cmd_show_branch(int argc, const char **argv, const char *prefix);
+extern int cmd_show(int argc, const char **argv, const char *prefix);
+extern int cmd_stripspace(int argc, const char **argv, const char *prefix);
+extern int cmd_symbolic_ref(int argc, const char **argv, const char *prefix);
+extern int cmd_tar_tree(int argc, const char **argv, const char *prefix);
+extern int cmd_unpack_objects(int argc, const char **argv, const char *prefix);
+extern int cmd_update_index(int argc, const char **argv, const char *prefix);
+extern int cmd_update_ref(int argc, const char **argv, const char *prefix);
+extern int cmd_upload_tar(int argc, const char **argv, const char *prefix);
+extern int cmd_version(int argc, const char **argv, const char *prefix);
+extern int cmd_whatchanged(int argc, const char **argv, const char *prefix);
+extern int cmd_write_tree(int argc, const char **argv, const char *prefix);
+extern int cmd_verify_pack(int argc, const char **argv, const char *prefix);
-extern int cmd_stripspace(int argc, const char **argv, char **envp);
-extern void stripspace(FILE *in, FILE *out);
#endif
extern struct cache_entry **active_cache;
extern unsigned int active_nr, active_alloc, active_cache_changed;
extern struct cache_tree *active_cache_tree;
+extern int cache_errno;
#define GIT_DIR_ENVIRONMENT "GIT_DIR"
#define DEFAULT_GIT_DIR_ENVIRONMENT ".git"
/* Initialize and use the cache information */
extern int read_cache(void);
+extern int read_cache_from(const char *path);
extern int write_cache(int newfd, struct cache_entry **cache, int entries);
extern int verify_path(const char *path);
extern int cache_name_pos(const char *name, int namelen);
#define ADD_CACHE_OK_TO_REPLACE 2 /* Ok to replace file/directory */
#define ADD_CACHE_SKIP_DFCHECK 4 /* Ok to skip DF conflict checks */
extern int add_cache_entry(struct cache_entry *ce, int option);
+extern struct cache_entry *refresh_cache_entry(struct cache_entry *ce, int really);
extern int remove_cache_entry_at(int pos);
extern int remove_file_from_cache(const char *path);
+extern int add_file_to_index(const char *path, int verbose);
extern int ce_same_name(struct cache_entry *a, struct cache_entry *b);
extern int ce_match_stat(struct cache_entry *ce, struct stat *st, int);
extern int ce_modified(struct cache_entry *ce, struct stat *st, int);
struct lock_file *next;
char filename[PATH_MAX];
};
-extern int hold_lock_file_for_update(struct lock_file *, const char *path);
+extern int hold_lock_file_for_update(struct lock_file *, const char *path, int);
extern int commit_lock_file(struct lock_file *);
extern void rollback_lock_file(struct lock_file *);
/* Environment bits from configuration mechanism */
+extern int use_legacy_headers;
extern int trust_executable_bit;
extern int assume_unchanged;
extern int prefer_symlink_refs;
extern int warn_ambiguous_refs;
extern int shared_repository;
extern const char *apply_default_whitespace;
+extern int zlib_compression_level;
#define GIT_REPO_VERSION 0
extern int repository_format_version;
char *enter_repo(char *path, int strict);
/* Read and unpack a sha1 file into memory, write memory to a sha1 file */
-extern int unpack_sha1_header(z_stream *stream, void *map, unsigned long mapsize, void *buffer, unsigned long size);
-extern int parse_sha1_header(char *hdr, char *type, unsigned long *sizep);
extern int sha1_object_info(const unsigned char *, char *, unsigned long *);
extern void * unpack_sha1_file(void *map, unsigned long mapsize, char *type, unsigned long *size);
extern void * read_sha1_file(const unsigned char *sha1, char *type, unsigned long *size);
extern int has_sha1_pack(const unsigned char *sha1);
extern int has_sha1_file(const unsigned char *sha1);
+extern void *map_sha1_file(const unsigned char *sha1, unsigned long *);
+extern int legacy_loose_object(unsigned char *);
extern int has_pack_file(const unsigned char *sha1);
extern int has_pack_index(const unsigned char *sha1);
char name[FLEX_ARRAY]; /* more */
};
+#define REF_NORMAL (1u << 0)
+#define REF_HEADS (1u << 1)
+#define REF_TAGS (1u << 2)
+
extern int git_connect(int fd[2], char *url, const char *prog);
extern int finish_connect(pid_t pid);
extern int path_match(const char *path, int nr, char **match);
extern int match_refs(struct ref *src, struct ref *dst, struct ref ***dst_tail,
int nr_refspec, char **refspec, int all);
extern int get_ack(int fd, unsigned char *result_sha1);
-extern struct ref **get_remote_heads(int in, struct ref **list, int nr_match, char **match, int ignore_funny);
+extern struct ref **get_remote_heads(int in, struct ref **list, int nr_match, char **match, unsigned int flags);
extern int server_supports(const char *feature);
extern struct packed_git *parse_pack_index(unsigned char *sha1);
/* pager.c */
extern void setup_pager(void);
+extern int pager_in_use;
+extern int pager_use_color;
/* base85 */
int decode_85(char *dst, char *line, int linelen);
+++ /dev/null
-/*
- * Check-out files from the "current cache directory"
- *
- * Copyright (C) 2005 Linus Torvalds
- *
- * Careful: order of argument flags does matter. For example,
- *
- * git-checkout-index -a -f file.c
- *
- * Will first check out all files listed in the cache (but not
- * overwrite any old ones), and then force-checkout "file.c" a
- * second time (ie that one _will_ overwrite any old contents
- * with the same filename).
- *
- * Also, just doing "git-checkout-index" does nothing. You probably
- * meant "git-checkout-index -a". And if you want to force it, you
- * want "git-checkout-index -f -a".
- *
- * Intuitiveness is not the goal here. Repeatability is. The
- * reason for the "no arguments means no work" thing is that
- * from scripts you are supposed to be able to do things like
- *
- * find . -name '*.h' -print0 | xargs -0 git-checkout-index -f --
- *
- * or:
- *
- * find . -name '*.h' -print0 | git-checkout-index -f -z --stdin
- *
- * which will force all existing *.h files to be replaced with
- * their cached copies. If an empty command line implied "all",
- * then this would force-refresh everything in the cache, which
- * was not the point.
- *
- * Oh, and the "--" is just a good idea when you know the rest
- * will be filenames. Just so that you wouldn't have a filename
- * of "-a" causing problems (not possible in the above example,
- * but get used to it in scripting!).
- */
-#include "cache.h"
-#include "strbuf.h"
-#include "quote.h"
-#include "cache-tree.h"
-
-#define CHECKOUT_ALL 4
-static const char *prefix;
-static int prefix_length;
-static int line_termination = '\n';
-static int checkout_stage; /* default to checkout stage0 */
-static int to_tempfile;
-static char topath[4][MAXPATHLEN+1];
-
-static struct checkout state = {
- .base_dir = "",
- .base_dir_len = 0,
- .force = 0,
- .quiet = 0,
- .not_new = 0,
- .refresh_cache = 0,
-};
-
-static void write_tempfile_record (const char *name)
-{
- int i;
-
- if (CHECKOUT_ALL == checkout_stage) {
- for (i = 1; i < 4; i++) {
- if (i > 1)
- putchar(' ');
- if (topath[i][0])
- fputs(topath[i], stdout);
- else
- putchar('.');
- }
- } else
- fputs(topath[checkout_stage], stdout);
-
- putchar('\t');
- write_name_quoted("", 0, name + prefix_length,
- line_termination, stdout);
- putchar(line_termination);
-
- for (i = 0; i < 4; i++) {
- topath[i][0] = 0;
- }
-}
-
-static int checkout_file(const char *name)
-{
- int namelen = strlen(name);
- int pos = cache_name_pos(name, namelen);
- int has_same_name = 0;
- int did_checkout = 0;
- int errs = 0;
-
- if (pos < 0)
- pos = -pos - 1;
-
- while (pos < active_nr) {
- struct cache_entry *ce = active_cache[pos];
- if (ce_namelen(ce) != namelen ||
- memcmp(ce->name, name, namelen))
- break;
- has_same_name = 1;
- pos++;
- if (ce_stage(ce) != checkout_stage
- && (CHECKOUT_ALL != checkout_stage || !ce_stage(ce)))
- continue;
- did_checkout = 1;
- if (checkout_entry(ce, &state,
- to_tempfile ? topath[ce_stage(ce)] : NULL) < 0)
- errs++;
- }
-
- if (did_checkout) {
- if (to_tempfile)
- write_tempfile_record(name);
- return errs > 0 ? -1 : 0;
- }
-
- if (!state.quiet) {
- fprintf(stderr, "git-checkout-index: %s ", name);
- if (!has_same_name)
- fprintf(stderr, "is not in the cache");
- else if (checkout_stage)
- fprintf(stderr, "does not exist at stage %d",
- checkout_stage);
- else
- fprintf(stderr, "is unmerged");
- fputc('\n', stderr);
- }
- return -1;
-}
-
-static int checkout_all(void)
-{
- int i, errs = 0;
- struct cache_entry* last_ce = NULL;
-
- for (i = 0; i < active_nr ; i++) {
- struct cache_entry *ce = active_cache[i];
- if (ce_stage(ce) != checkout_stage
- && (CHECKOUT_ALL != checkout_stage || !ce_stage(ce)))
- continue;
- if (prefix && *prefix &&
- (ce_namelen(ce) <= prefix_length ||
- memcmp(prefix, ce->name, prefix_length)))
- continue;
- if (last_ce && to_tempfile) {
- if (ce_namelen(last_ce) != ce_namelen(ce)
- || memcmp(last_ce->name, ce->name, ce_namelen(ce)))
- write_tempfile_record(last_ce->name);
- }
- if (checkout_entry(ce, &state,
- to_tempfile ? topath[ce_stage(ce)] : NULL) < 0)
- errs++;
- last_ce = ce;
- }
- if (last_ce && to_tempfile)
- write_tempfile_record(last_ce->name);
- if (errs)
- /* we have already done our error reporting.
- * exit with the same code as die().
- */
- exit(128);
- return 0;
-}
-
-static const char checkout_cache_usage[] =
-"git-checkout-index [-u] [-q] [-a] [-f] [-n] [--stage=[123]|all] [--prefix=<string>] [--temp] [--] <file>...";
-
-static struct lock_file lock_file;
-
-int main(int argc, char **argv)
-{
- int i;
- int newfd = -1;
- int all = 0;
- int read_from_stdin = 0;
-
- prefix = setup_git_directory();
- git_config(git_default_config);
- prefix_length = prefix ? strlen(prefix) : 0;
-
- if (read_cache() < 0) {
- die("invalid cache");
- }
-
- for (i = 1; i < argc; i++) {
- const char *arg = argv[i];
-
- if (!strcmp(arg, "--")) {
- i++;
- break;
- }
- if (!strcmp(arg, "-a") || !strcmp(arg, "--all")) {
- all = 1;
- continue;
- }
- if (!strcmp(arg, "-f") || !strcmp(arg, "--force")) {
- state.force = 1;
- continue;
- }
- if (!strcmp(arg, "-q") || !strcmp(arg, "--quiet")) {
- state.quiet = 1;
- continue;
- }
- if (!strcmp(arg, "-n") || !strcmp(arg, "--no-create")) {
- state.not_new = 1;
- continue;
- }
- if (!strcmp(arg, "-u") || !strcmp(arg, "--index")) {
- state.refresh_cache = 1;
- if (newfd < 0)
- newfd = hold_lock_file_for_update
- (&lock_file, get_index_file());
- if (newfd < 0)
- die("cannot open index.lock file.");
- continue;
- }
- if (!strcmp(arg, "-z")) {
- line_termination = 0;
- continue;
- }
- if (!strcmp(arg, "--stdin")) {
- if (i != argc - 1)
- die("--stdin must be at the end");
- read_from_stdin = 1;
- i++; /* do not consider arg as a file name */
- break;
- }
- if (!strcmp(arg, "--temp")) {
- to_tempfile = 1;
- continue;
- }
- if (!strncmp(arg, "--prefix=", 9)) {
- state.base_dir = arg+9;
- state.base_dir_len = strlen(state.base_dir);
- continue;
- }
- if (!strncmp(arg, "--stage=", 8)) {
- if (!strcmp(arg + 8, "all")) {
- to_tempfile = 1;
- checkout_stage = CHECKOUT_ALL;
- } else {
- int ch = arg[8];
- if ('1' <= ch && ch <= '3')
- checkout_stage = arg[8] - '0';
- else
- die("stage should be between 1 and 3 or all");
- }
- continue;
- }
- if (arg[0] == '-')
- usage(checkout_cache_usage);
- break;
- }
-
- if (state.base_dir_len || to_tempfile) {
- /* when --prefix is specified we do not
- * want to update cache.
- */
- if (state.refresh_cache) {
- close(newfd); newfd = -1;
- rollback_lock_file(&lock_file);
- }
- state.refresh_cache = 0;
- }
-
- /* Check out named files first */
- for ( ; i < argc; i++) {
- const char *arg = argv[i];
- const char *p;
-
- if (all)
- die("git-checkout-index: don't mix '--all' and explicit filenames");
- if (read_from_stdin)
- die("git-checkout-index: don't mix '--stdin' and explicit filenames");
- p = prefix_path(prefix, prefix_length, arg);
- checkout_file(p);
- if (p < arg || p > arg + strlen(arg))
- free((char*)p);
- }
-
- if (read_from_stdin) {
- struct strbuf buf;
- if (all)
- die("git-checkout-index: don't mix '--all' and '--stdin'");
- strbuf_init(&buf);
- while (1) {
- char *path_name;
- const char *p;
-
- read_line(&buf, stdin, line_termination);
- if (buf.eof)
- break;
- if (line_termination && buf.buf[0] == '"')
- path_name = unquote_c_style(buf.buf, NULL);
- else
- path_name = buf.buf;
- p = prefix_path(prefix, prefix_length, path_name);
- checkout_file(p);
- if (p < path_name || p > path_name + strlen(path_name))
- free((char *)p);
- if (path_name != buf.buf)
- free(path_name);
- }
- }
-
- if (all)
- checkout_all();
-
- if (0 <= newfd &&
- (write_cache(newfd, active_cache, active_nr) ||
- commit_lock_file(&lock_file)))
- die("Unable to write new index file");
- return 0;
-}
}
static void combine_diff(const unsigned char *parent, mmfile_t *result_file,
- struct sline *sline, int cnt, int n, int num_parent)
+ struct sline *sline, unsigned int cnt, int n,
+ int num_parent)
{
unsigned int p_lno, lno;
unsigned long nmask = (1UL << n);
unsigned long mark,
unsigned long i,
unsigned long cnt,
- int uninteresting)
+ int look_for_uninteresting)
{
/* We have examined up to i-1 and are about to look at i.
* Find next interesting or uninteresting line. Here,
* that are surrounded by interesting() ones.
*/
while (i <= cnt)
- if (uninteresting
+ if (look_for_uninteresting
? !(sline[i].flag & mark)
: (sline[i].flag & mark))
return i;
unsigned long i;
/* Two groups of interesting lines may have a short gap of
- * unintersting lines. Connect such groups to give them a
+ * uninteresting lines. Connect such groups to give them a
* bit of context.
*
* We first start from what the interesting() function says,
return has_interesting;
}
-static void show_parent_lno(struct sline *sline, unsigned long l0, unsigned long l1, unsigned long cnt, int n)
+static void show_parent_lno(struct sline *sline, unsigned long l0, unsigned long l1, int n)
{
l0 = sline[l0].p_lno[n];
l1 = sline[l1].p_lno[n];
printf(" -%lu,%lu", l0, l1-l0);
}
-static void dump_sline(struct sline *sline, unsigned long cnt, int num_parent)
+static void dump_sline(struct sline *sline, unsigned long cnt, int num_parent,
+ int use_color)
{
unsigned long mark = (1UL<<num_parent);
int i;
unsigned long lno = 0;
+ const char *c_frag = diff_get_color(use_color, DIFF_FRAGINFO);
+ const char *c_new = diff_get_color(use_color, DIFF_FILE_NEW);
+ const char *c_old = diff_get_color(use_color, DIFF_FILE_OLD);
+ const char *c_plain = diff_get_color(use_color, DIFF_PLAIN);
+ const char *c_reset = diff_get_color(use_color, DIFF_RESET);
if (!cnt)
return; /* result deleted */
rlines = hunk_end - lno;
if (cnt < hunk_end)
rlines--; /* pointing at the last delete hunk */
+ fputs(c_frag, stdout);
for (i = 0; i <= num_parent; i++) putchar(combine_marker);
for (i = 0; i < num_parent; i++)
- show_parent_lno(sline, lno, hunk_end, cnt, i);
+ show_parent_lno(sline, lno, hunk_end, i);
printf(" +%lu,%lu ", lno+1, rlines);
for (i = 0; i <= num_parent; i++) putchar(combine_marker);
- putchar('\n');
+ printf("%s\n", c_reset);
while (lno < hunk_end) {
struct lline *ll;
int j;
sl = &sline[lno++];
ll = sl->lost_head;
while (ll) {
+ fputs(c_old, stdout);
for (j = 0; j < num_parent; j++) {
if (ll->parent_map & (1UL<<j))
putchar('-');
else
putchar(' ');
}
- puts(ll->line);
+ printf("%s%s\n", ll->line, c_reset);
ll = ll->next;
}
if (cnt < lno)
break;
p_mask = 1;
+ if (!(sl->flag & (mark-1)))
+ fputs(c_plain, stdout);
+ else
+ fputs(c_new, stdout);
for (j = 0; j < num_parent; j++) {
if (p_mask & sl->flag)
putchar('+');
putchar(' ');
p_mask <<= 1;
}
- printf("%.*s\n", sl->len, sl->bol);
+ printf("%.*s%s\n", sl->len, sl->bol, c_reset);
}
}
}
sline->p_lno[i] = sline->p_lno[j];
}
-static void dump_quoted_path(const char *prefix, const char *path)
+static void dump_quoted_path(const char *prefix, const char *path,
+ const char *c_meta, const char *c_reset)
{
- fputs(prefix, stdout);
+ printf("%s%s", c_meta, prefix);
if (quote_c_style(path, NULL, NULL, 0))
quote_c_style(path, NULL, stdout, 0);
else
printf("%s", path);
- putchar('\n');
+ printf("%s\n", c_reset);
}
static int show_patch_diff(struct combine_diff_path *elem, int num_parent,
if (0 <= (fd = open(elem->path, O_RDONLY)) &&
!fstat(fd, &st)) {
int len = st.st_size;
- int cnt = 0;
+ int sz = 0;
elem->mode = canon_mode(st.st_mode);
result_size = len;
result = xmalloc(len + 1);
- while (cnt < len) {
- int done = xread(fd, result+cnt, len-cnt);
+ while (sz < len) {
+ int done = xread(fd, result+sz, len-sz);
if (done == 0)
break;
if (done < 0)
die("read error '%s'", elem->path);
- cnt += done;
+ sz += done;
}
result[len] = 0;
}
/* deleted file */
result_size = 0;
elem->mode = 0;
- result = xmalloc(1);
- result[0] = 0;
+ result = xcalloc(1, 1);
}
if (0 <= fd)
close(fd);
}
- for (cnt = 0, cp = result; cp - result < result_size; cp++) {
+ for (cnt = 0, cp = result; cp < result + result_size; cp++) {
if (*cp == '\n')
cnt++;
}
sline[lno].lost_tail = &sline[lno].lost_head;
sline[lno].flag = 0;
}
- for (lno = 0, cp = result; cp - result < result_size; cp++) {
+ for (lno = 0, cp = result; cp < result + result_size; cp++) {
if (*cp == '\n') {
sline[lno].len = cp - sline[lno].bol;
lno++;
if (show_hunks || mode_differs || working_tree_file) {
const char *abb;
+ int use_color = opt->color_diff;
+ const char *c_meta = diff_get_color(use_color, DIFF_METAINFO);
+ const char *c_reset = diff_get_color(use_color, DIFF_RESET);
if (rev->loginfo)
show_log(rev, opt->msg_sep);
- dump_quoted_path(dense ? "diff --cc " : "diff --combined ", elem->path);
- printf("index ");
+ dump_quoted_path(dense ? "diff --cc " : "diff --combined ",
+ elem->path, c_meta, c_reset);
+ printf("%sindex ", c_meta);
for (i = 0; i < num_parent; i++) {
abb = find_unique_abbrev(elem->parent[i].sha1,
abbrev);
printf("%s%s", i ? "," : "", abb);
}
abb = find_unique_abbrev(elem->sha1, abbrev);
- printf("..%s\n", abb);
+ printf("..%s%s\n", abb, c_reset);
if (mode_differs) {
int added = !!elem->mode;
DIFF_STATUS_ADDED)
added = 0;
if (added)
- printf("new file mode %06o", elem->mode);
+ printf("%snew file mode %06o",
+ c_meta, elem->mode);
else {
if (!elem->mode)
- printf("deleted file ");
+ printf("%sdeleted file ", c_meta);
printf("mode ");
for (i = 0; i < num_parent; i++) {
printf("%s%06o", i ? "," : "",
if (elem->mode)
printf("..%06o", elem->mode);
}
- putchar('\n');
+ printf("%s\n", c_reset);
}
- dump_quoted_path("--- a/", elem->path);
- dump_quoted_path("+++ b/", elem->path);
- dump_sline(sline, cnt, num_parent);
+ dump_quoted_path("--- a/", elem->path, c_meta, c_reset);
+ dump_quoted_path("+++ b/", elem->path, c_meta, c_reset);
+ dump_sline(sline, cnt, num_parent, opt->color_diff);
}
free(result);
- for (i = 0; i < cnt; i++) {
- if (sline[i].lost_head) {
- struct lline *ll = sline[i].lost_head;
+ for (lno = 0; lno < cnt; lno++) {
+ if (sline[lno].lost_head) {
+ struct lline *ll = sline[lno].lost_head;
while (ll) {
struct lline *tmp = ll;
ll = ll->next;
const unsigned char *sha1,
int quiet)
{
- if (obj->type != TYPE_COMMIT) {
+ if (obj->type != OBJ_COMMIT) {
if (!quiet)
error("Object %s is a %s, not a commit",
sha1_to_hex(sha1), typename(obj->type));
if (!obj) {
struct commit *ret = alloc_commit_node();
created_object(sha1, &ret->object);
- ret->object.type = TYPE_COMMIT;
+ ret->object.type = OBJ_COMMIT;
return ret;
}
if (!obj->type)
- obj->type = TYPE_COMMIT;
+ obj->type = OBJ_COMMIT;
return check_commit(obj, sha1, 0);
}
int parse_commit_buffer(struct commit *item, void *buffer, unsigned long size)
{
+ char *tail = buffer;
char *bufptr = buffer;
unsigned char parent[20];
struct commit_list **pptr;
if (item->object.parsed)
return 0;
item->object.parsed = 1;
- if (memcmp(bufptr, "tree ", 5))
+ tail += size;
+ if (tail <= bufptr + 5 || memcmp(bufptr, "tree ", 5))
return error("bogus commit object %s", sha1_to_hex(item->object.sha1));
- if (get_sha1_hex(bufptr + 5, parent) < 0)
+ if (tail <= bufptr + 45 || get_sha1_hex(bufptr + 5, parent) < 0)
return error("bad tree pointer in commit %s",
sha1_to_hex(item->object.sha1));
item->tree = lookup_tree(parent);
pptr = &item->parents;
graft = lookup_commit_graft(item->object.sha1);
- while (!memcmp(bufptr, "parent ", 7)) {
+ while (bufptr + 48 < tail && !memcmp(bufptr, "parent ", 7)) {
struct commit *new_parent;
- if (get_sha1_hex(bufptr + 7, parent) || bufptr[47] != '\n')
+ if (tail <= bufptr + 48 ||
+ get_sha1_hex(bufptr + 7, parent) ||
+ bufptr[47] != '\n')
return error("bad parents in commit %s", sha1_to_hex(item->object.sha1));
bufptr += 48;
if (graft)
{
struct commit_list *parents;
- parents = commit->parents;
commit->object.flags &= ~mark;
+ parents = commit->parents;
while (parents) {
struct commit *parent = parents->item;
- if (parent && parent->object.parsed &&
- (parent->object.flags & mark))
+
+ /* Have we already cleared this? */
+ if (mark & parent->object.flags)
clear_commit_marks(parent, mark);
parents = parents->next;
}
const char *hex = abbrev
? find_unique_abbrev(p->object.sha1, abbrev)
: sha1_to_hex(p->object.sha1);
- char *dots = (abbrev && strlen(hex) != 40) ? "..." : "";
+ const char *dots = (abbrev && strlen(hex) != 40) ? "..." : "";
parent = parent->next;
offset += sprintf(buf + offset, " %s%s", hex, dots);
continue;
}
+ if (!subject)
+ body = 1;
+
if (is_empty_line(line, &linelen)) {
if (!body)
continue;
continue;
if (fmt == CMIT_FMT_SHORT)
break;
- } else {
- body = 1;
}
if (subject) {
/* Make sure there is an EOLN for the non-oneline case */
if (fmt != CMIT_FMT_ONELINE)
buf[offset++] = '\n';
+ /*
+ * make sure there is another EOLN to separate the headers from whatever
+ * body the caller appends if we haven't already written a body
+ */
+ if (fmt == CMIT_FMT_EMAIL && !body)
+ buf[offset++] = '\n';
buf[offset] = '\0';
return offset;
}
}
free(nodes);
}
+
+/* merge-rebase stuff */
+
+/* bits #0..7 in revision.h */
+#define PARENT1 (1u<< 8)
+#define PARENT2 (1u<< 9)
+#define STALE (1u<<10)
+#define RESULT (1u<<11)
+
+static struct commit *interesting(struct commit_list *list)
+{
+ while (list) {
+ struct commit *commit = list->item;
+ list = list->next;
+ if (commit->object.flags & STALE)
+ continue;
+ return commit;
+ }
+ return NULL;
+}
+
+static struct commit_list *merge_bases(struct commit *one, struct commit *two)
+{
+ struct commit_list *list = NULL;
+ struct commit_list *result = NULL;
+
+ if (one == two)
+ /* We do not mark this even with RESULT so we do not
+ * have to clean it up.
+ */
+ return commit_list_insert(one, &result);
+
+ parse_commit(one);
+ parse_commit(two);
+
+ one->object.flags |= PARENT1;
+ two->object.flags |= PARENT2;
+ insert_by_date(one, &list);
+ insert_by_date(two, &list);
+
+ while (interesting(list)) {
+ struct commit *commit;
+ struct commit_list *parents;
+ struct commit_list *n;
+ int flags;
+
+ commit = list->item;
+ n = list->next;
+ free(list);
+ list = n;
+
+ flags = commit->object.flags & (PARENT1 | PARENT2 | STALE);
+ if (flags == (PARENT1 | PARENT2)) {
+ if (!(commit->object.flags & RESULT)) {
+ commit->object.flags |= RESULT;
+ insert_by_date(commit, &result);
+ }
+ /* Mark parents of a found merge stale */
+ flags |= STALE;
+ }
+ parents = commit->parents;
+ while (parents) {
+ struct commit *p = parents->item;
+ parents = parents->next;
+ if ((p->object.flags & flags) == flags)
+ continue;
+ parse_commit(p);
+ p->object.flags |= flags;
+ insert_by_date(p, &list);
+ }
+ }
+
+ /* Clean up the result to remove stale ones */
+ list = result; result = NULL;
+ while (list) {
+ struct commit_list *n = list->next;
+ if (!(list->item->object.flags & STALE))
+ insert_by_date(list->item, &result);
+ free(list);
+ list = n;
+ }
+ return result;
+}
+
+struct commit_list *get_merge_bases(struct commit *one,
+ struct commit *two,
+ int cleanup)
+{
+ const unsigned all_flags = (PARENT1 | PARENT2 | STALE | RESULT);
+ struct commit_list *list;
+ struct commit **rslt;
+ struct commit_list *result;
+ int cnt, i, j;
+
+ result = merge_bases(one, two);
+ if (one == two)
+ return result;
+ if (!result || !result->next) {
+ if (cleanup) {
+ clear_commit_marks(one, all_flags);
+ clear_commit_marks(two, all_flags);
+ }
+ return result;
+ }
+
+ /* There are more than one */
+ cnt = 0;
+ list = result;
+ while (list) {
+ list = list->next;
+ cnt++;
+ }
+ rslt = xcalloc(cnt, sizeof(*rslt));
+ for (list = result, i = 0; list; list = list->next)
+ rslt[i++] = list->item;
+ free_commit_list(result);
+
+ clear_commit_marks(one, all_flags);
+ clear_commit_marks(two, all_flags);
+ for (i = 0; i < cnt - 1; i++) {
+ for (j = i+1; j < cnt; j++) {
+ if (!rslt[i] || !rslt[j])
+ continue;
+ result = merge_bases(rslt[i], rslt[j]);
+ clear_commit_marks(rslt[i], all_flags);
+ clear_commit_marks(rslt[j], all_flags);
+ for (list = result; list; list = list->next) {
+ if (rslt[i] == list->item)
+ rslt[i] = NULL;
+ if (rslt[j] == list->item)
+ rslt[j] = NULL;
+ }
+ }
+ }
+
+ /* Surviving ones in rslt[] are the independent results */
+ result = NULL;
+ for (i = 0; i < cnt; i++) {
+ if (rslt[i])
+ insert_by_date(rslt[i], &result);
+ }
+ free(rslt);
+ return result;
+}
int register_commit_graft(struct commit_graft *, int);
int read_graft_file(const char *graft_file);
+extern struct commit_list *get_merge_bases(struct commit *rev1, struct commit *rev2, int cleanup);
+
#endif /* COMMIT_H */
# Windows methods
#
def _get_handles(self, stdin, stdout, stderr):
- """Construct and return tupel with IO objects:
+ """Construct and return tuple with IO objects:
p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite
"""
if stdin == None and stdout == None and stderr == None:
def _find_w9xpopen(self):
- """Find and return absolut path to w9xpopen.exe"""
+ """Find and return absolute path to w9xpopen.exe"""
w9xpopen = os.path.join(os.path.dirname(GetModuleFileName(0)),
"w9xpopen.exe")
if not os.path.exists(w9xpopen):
# POSIX methods
#
def _get_handles(self, stdin, stdout, stderr):
- """Construct and return tupel with IO objects:
+ """Construct and return tuple with IO objects:
p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite
"""
p2cread, p2cwrite = None, None
return 1;
if (!*value)
return 0;
- if (!strcasecmp(value, "true"))
+ if (!strcasecmp(value, "true") || !strcasecmp(value, "yes"))
return 1;
- if (!strcasecmp(value, "false"))
+ if (!strcasecmp(value, "false") || !strcasecmp(value, "no"))
return 0;
return git_config_int(name, value) != 0;
}
return 0;
}
+ if (!strcmp(var, "core.legacyheaders")) {
+ use_legacy_headers = git_config_bool(var, value);
+ return 0;
+ }
+
+ if (!strcmp(var, "core.compression")) {
+ int level = git_config_int(var, value);
+ if (level == -1)
+ level = Z_DEFAULT_COMPRESSION;
+ else if (level < 0 || level > Z_BEST_COMPRESSION)
+ die("bad zlib compression level %d", level);
+ zlib_compression_level = level;
+ return 0;
+ }
+
if (!strcmp(var, "user.name")) {
strlcpy(git_default_name, value, sizeof(git_default_name));
return 0;
return 0;
}
+ if (!strcmp(var, "pager.color")) {
+ pager_use_color = git_config_bool(var,value);
+ return 0;
+ }
+
/* Add other config variables here and to Documentation/config.txt. */
return 0;
}
--- /dev/null
+# git Makefile configuration, included in main Makefile
+# @configure_input@
+
+CC = @CC@
+AR = @AR@
+TAR = @TAR@
+#INSTALL = @INSTALL@ # needs install-sh or install.sh in sources
+
+prefix = @prefix@
+exec_prefix = @exec_prefix@
+bindir = @bindir@
+#gitexecdir = @libexecdir@/git-core/
+datarootdir = @datarootdir@
+template_dir = @datadir@/git-core/templates/
+GIT_PYTHON_DIR = @datadir@/git-core/python
+
+mandir=@mandir@
+
+srcdir = @srcdir@
+VPATH = @srcdir@
+
+export exec_prefix mandir
+export srcdir VPATH
+
+NO_PYTHON=@NO_PYTHON@
+NEEDS_SSL_WITH_CRYPTO=@NEEDS_SSL_WITH_CRYPTO@
+NO_OPENSSL=@NO_OPENSSL@
+NO_CURL=@NO_CURL@
+NO_EXPAT=@NO_EXPAT@
+NEEDS_LIBICONV=@NEEDS_LIBICONV@
+NEEDS_SOCKET=@NEEDS_SOCKET@
+NO_D_INO_IN_DIRENT=@NO_D_INO_IN_DIRENT@
+NO_D_TYPE_IN_DIRENT=@NO_D_TYPE_IN_DIRENT@
+NO_SOCKADDR_STORAGE=@NO_SOCKADDR_STORAGE@
+NO_IPV6=@NO_IPV6@
+NO_C99_FORMAT=@NO_C99_FORMAT@
+NO_STRCASESTR=@NO_STRCASESTR@
+NO_STRLCPY=@NO_STRLCPY@
+NO_SETENV=@NO_SETENV@
+
--- /dev/null
+# -*- Autoconf -*-
+# Process this file with autoconf to produce a configure script.
+
+AC_PREREQ(2.59)
+AC_INIT([git], [@@GIT_VERSION@@], [git@vger.kernel.org])
+
+AC_CONFIG_SRCDIR([git.c])
+
+config_file=config.mak.autogen
+config_append=config.mak.append
+config_in=config.mak.in
+
+echo "# ${config_append}. Generated by configure." > "${config_append}"
+
+
+## Definitions of macros
+# GIT_CONF_APPEND_LINE(LINE)
+# --------------------------
+# Append LINE to file ${config_append}
+AC_DEFUN([GIT_CONF_APPEND_LINE],
+[echo "$1" >> "${config_append}"])# GIT_CONF_APPEND_LINE
+#
+# GIT_ARG_SET_PATH(PROGRAM)
+# -------------------------
+# Provide --with-PROGRAM=PATH option to set PATH to PROGRAM
+AC_DEFUN([GIT_ARG_SET_PATH],
+[AC_ARG_WITH([$1],
+ [AS_HELP_STRING([--with-$1=PATH],
+ [provide PATH to $1])],
+ [GIT_CONF_APPEND_PATH($1)],[])
+])# GIT_ARG_SET_PATH
+#
+# GIT_CONF_APPEND_PATH(PROGRAM)
+# ------------------------------
+# Parse --with-PROGRAM=PATH option to set PROGRAM_PATH=PATH
+# Used by GIT_ARG_SET_PATH(PROGRAM)
+AC_DEFUN([GIT_CONF_APPEND_PATH],
+[PROGRAM=m4_toupper($1); \
+if test "$withval" = "no"; then \
+ AC_MSG_ERROR([You cannot use git without $1]); \
+else \
+ if test "$withval" = "yes"; then \
+ AC_MSG_WARN([You should provide path for --with-$1=PATH]); \
+ else \
+ GIT_CONF_APPEND_LINE(${PROGRAM}_PATH=$withval); \
+ fi; \
+fi; \
+]) # GIT_CONF_APPEND_PATH
+#
+# GIT_PARSE_WITH(PACKAGE)
+# -----------------------
+# For use in AC_ARG_WITH action-if-found, for packages default ON.
+# * Set NO_PACKAGE=YesPlease for --without-PACKAGE
+# * Set PACKAGEDIR=PATH for --with-PACKAGE=PATH
+# * Unset NO_PACKAGE for --with-PACKAGE without ARG
+AC_DEFUN([GIT_PARSE_WITH],
+[PACKAGE=m4_toupper($1); \
+if test "$withval" = "no"; then \
+ m4_toupper(NO_$1)=YesPlease; \
+elif test "$withval" = "yes"; then \
+ m4_toupper(NO_$1)=; \
+else \
+ m4_toupper(NO_$1)=; \
+ GIT_CONF_APPEND_LINE(${PACKAGE}DIR=$withval); \
+fi \
+])# GIT_PARSE_WITH
+
+
+## Site configuration related to programs (before tests)
+## --with-PACKAGE[=ARG] and --without-PACKAGE
+#
+# Define SHELL_PATH to provide path to shell.
+GIT_ARG_SET_PATH(shell)
+#
+# Define PERL_PATH to provide path to Perl.
+GIT_ARG_SET_PATH(perl)
+#
+# Define NO_PYTHON if you want to lose all benefits of the recursive merge.
+# Define PYTHON_PATH to provide path to Python.
+AC_ARG_WITH(python,[AS_HELP_STRING([--with-python=PATH], [provide PATH to python])
+AS_HELP_STRING([--without-python], [don't use python scripts])],
+ [if test "$withval" = "no"; then \
+ NO_PYTHON=YesPlease; \
+ elif test "$withval" = "yes"; then \
+ NO_PYTHON=; \
+ else \
+ NO_PYTHON=; \
+ PYTHON_PATH=$withval; \
+ fi; \
+ ])
+AC_SUBST(NO_PYTHON)
+AC_SUBST(PYTHON_PATH)
+
+
+## Checks for programs.
+AC_MSG_NOTICE([CHECKS for programs])
+#
+AC_PROG_CC
+#AC_PROG_INSTALL # needs install-sh or install.sh in sources
+AC_CHECK_TOOL(AR, ar, :)
+AC_CHECK_PROGS(TAR, [gtar tar])
+#
+# Define NO_PYTHON if you want to lose all benefits of the recursive merge.
+# Define PYTHON_PATH to provide path to Python.
+if test -z "$NO_PYTHON"; then
+ if test -z "$PYTHON_PATH"; then
+ AC_PATH_PROGS(PYTHON_PATH, [python python2.4 python2.3 python2])
+ fi
+ if test -n "$PYTHON_PATH"; then
+ GIT_CONF_APPEND_LINE([PYTHON_PATH=@PYTHON_PATH@])
+ NO_PYTHON=""
+ fi
+fi
+
+
+## Checks for libraries.
+AC_MSG_NOTICE([CHECKS for libraries])
+#
+# Define NO_OPENSSL environment variable if you do not have OpenSSL.
+# Define NEEDS_SSL_WITH_CRYPTO if you need -lcrypto with -lssl (Darwin).
+AC_CHECK_LIB([crypto], [SHA1_Init],
+[NEEDS_SSL_WITH_CRYPTO=],
+[AC_CHECK_LIB([ssl], [SHA1_Init],
+ [NEEDS_SSL_WITH_CRYPTO=YesPlease
+ NEEDS_SSL_WITH_CRYPTO=],
+ [NO_OPENSSL=YesPlease])])
+AC_SUBST(NEEDS_SSL_WITH_CRYPTO)
+AC_SUBST(NO_OPENSSL)
+#
+# Define NO_CURL if you do not have curl installed. git-http-pull and
+# git-http-push are not built, and you cannot use http:// and https://
+# transports.
+AC_CHECK_LIB([curl], [curl_global_init],
+[NO_CURL=],
+[NO_CURL=YesPlease])
+AC_SUBST(NO_CURL)
+#
+# Define NO_EXPAT if you do not have expat installed. git-http-push is
+# not built, and you cannot push using http:// and https:// transports.
+AC_CHECK_LIB([expat], [XML_ParserCreate],
+[NO_EXPAT=],
+[NO_EXPAT=YesPlease])
+AC_SUBST(NO_EXPAT)
+#
+# Define NEEDS_LIBICONV if linking with libc is not enough (Darwin).
+AC_CHECK_LIB([c], [iconv],
+[NEEDS_LIBICONV=],
+[NEEDS_LIBICONV=YesPlease])
+AC_SUBST(NEEDS_LIBICONV)
+#
+# Define NEEDS_SOCKET if linking with libc is not enough (SunOS,
+# Patrick Mauritz).
+AC_CHECK_LIB([c], [socket],
+[NEEDS_SOCKET=],
+[NEEDS_SOCKET=YesPlease])
+AC_SUBST(NEEDS_SOCKET)
+
+
+## Checks for header files.
+
+
+## Checks for typedefs, structures, and compiler characteristics.
+AC_MSG_NOTICE([CHECKS for typedefs, structures, and compiler characteristics])
+#
+# Define NO_D_INO_IN_DIRENT if you don't have d_ino in your struct dirent.
+AC_CHECK_MEMBER(struct dirent.d_ino,
+[NO_D_INO_IN_DIRENT=],
+[NO_D_INO_IN_DIRENT=YesPlease],
+[#include <dirent.h>])
+AC_SUBST(NO_D_INO_IN_DIRENT)
+#
+# Define NO_D_TYPE_IN_DIRENT if your platform defines DT_UNKNOWN but lacks
+# d_type in struct dirent (latest Cygwin -- will be fixed soonish).
+AC_CHECK_MEMBER(struct dirent.d_type,
+[NO_D_TYPE_IN_DIRENT=],
+[NO_D_TYPE_IN_DIRENT=YesPlease],
+[#include <dirent.h>])
+AC_SUBST(NO_D_TYPE_IN_DIRENT)
+#
+# Define NO_SOCKADDR_STORAGE if your platform does not have struct
+# sockaddr_storage.
+AC_CHECK_TYPE(struct sockaddr_storage,
+[NO_SOCKADDR_STORAGE=],
+[NO_SOCKADDR_STORAGE=YesPlease],
+[#include <netinet/in.h>])
+AC_SUBST(NO_SOCKADDR_STORAGE)
+#
+# Define NO_IPV6 if you lack IPv6 support and getaddrinfo().
+AC_CHECK_TYPE([struct addrinfo],[
+ AC_CHECK_FUNC([getaddrinfo],
+ [NO_IPV6=],
+ [NO_IPV6=YesPlease])
+],[NO_IPV6=YesPlease],[
+#include <sys/types.h>
+#include <sys/socket.h>
+#include <netdb.h>
+])
+AC_SUBST(NO_IPV6)
+#
+# Define NO_C99_FORMAT if your formatted IO functions (printf/scanf et.al.)
+# do not support the 'size specifiers' introduced by C99, namely ll, hh,
+# j, z, t. (representing long long int, char, intmax_t, size_t, ptrdiff_t).
+# some C compilers supported these specifiers prior to C99 as an extension.
+AC_CACHE_CHECK(whether formatted IO functions support C99 size specifiers,
+ ac_cv_c_c99_format,
+[# Actually git uses only %z (%zu) in alloc.c, and %t (%td) in mktag.c
+AC_RUN_IFELSE(
+ [AC_LANG_PROGRAM([AC_INCLUDES_DEFAULT],
+ [[char buf[64];
+ if (sprintf(buf, "%lld%hhd%jd%zd%td", (long long int)1, (char)2, (intmax_t)3, (size_t)4, (ptrdiff_t)5) != 5)
+ exit(1);
+ else if (strcmp(buf, "12345"))
+ exit(2);]])],
+ [ac_cv_c_c99_format=yes],
+ [ac_cv_c_c99_format=no])
+])
+if test $ac_cv_c_c99_format = no; then
+ NO_C99_FORMAT=YesPlease
+else
+ NO_C99_FORMAT=
+fi
+AC_SUBST(NO_C99_FORMAT)
+
+
+## Checks for library functions.
+## (in default C library and libraries checked by AC_CHECK_LIB)
+AC_MSG_NOTICE([CHECKS for library functions])
+#
+# Define NO_STRCASESTR if you don't have strcasestr.
+AC_CHECK_FUNC(strcasestr,
+[NO_STRCASESTR=],
+[NO_STRCASESTR=YesPlease])
+AC_SUBST(NO_STRCASESTR)
+#
+# Define NO_STRLCPY if you don't have strlcpy.
+AC_CHECK_FUNC(strlcpy,
+[NO_STRLCPY=],
+[NO_STRLCPY=YesPlease])
+AC_SUBST(NO_STRLCPY)
+#
+# Define NO_SETENV if you don't have setenv in the C library.
+AC_CHECK_FUNC(setenv,
+[NO_SETENV=],
+[NO_SETENV=YesPlease])
+AC_SUBST(NO_SETENV)
+#
+# Define NO_MMAP if you want to avoid mmap.
+#
+# Define NO_ICONV if your libc does not properly support iconv.
+
+
+## Other checks.
+# Define USE_PIC if you need the main git objects to be built with -fPIC
+# in order to build and link perl/Git.so. x86-64 seems to need this.
+#
+# Define NO_SYMLINK_HEAD if you never want .git/HEAD to be a symbolic link.
+# Enable it on Windows. By default, symrefs are still used.
+#
+# Define WITH_OWN_SUBPROCESS_PY if you want to use with python 2.3.
+#
+# Define NO_ACCURATE_DIFF if your diff program at least sometimes misses
+# a missing newline at the end of the file.
+
+
+## Site configuration (override autodetection)
+## --with-PACKAGE[=ARG] and --without-PACKAGE
+AC_MSG_NOTICE([CHECKS for site configuration])
+#
+# Define NO_SVN_TESTS if you want to skip time-consuming SVN interoperability
+# tests. These tests take up a significant amount of the total test time
+# but are not needed unless you plan to talk to SVN repos.
+#
+# Define MOZILLA_SHA1 environment variable when running make to make use of
+# a bundled SHA1 routine coming from Mozilla. It is GPL'd and should be fast
+# on non-x86 architectures (e.g. PowerPC), while the OpenSSL version (default
+# choice) has very fast version optimized for i586.
+#
+# Define PPC_SHA1 environment variable when running make to make use of
+# a bundled SHA1 routine optimized for PowerPC.
+#
+# Define ARM_SHA1 environment variable when running make to make use of
+# a bundled SHA1 routine optimized for ARM.
+#
+# Define NO_OPENSSL environment variable if you do not have OpenSSL.
+# This also implies MOZILLA_SHA1.
+#
+# Define OPENSSLDIR=/foo/bar if your openssl header and library files are in
+# /foo/bar/include and /foo/bar/lib directories.
+AC_ARG_WITH(openssl,
+AS_HELP_STRING([--with-openssl],[use OpenSSL library (default is YES)])
+AS_HELP_STRING([], [ARG can be prefix for openssl library and headers]),\
+GIT_PARSE_WITH(openssl))
+#
+# Define NO_CURL if you do not have curl installed. git-http-pull and
+# git-http-push are not built, and you cannot use http:// and https://
+# transports.
+#
+# Define CURLDIR=/foo/bar if your curl header and library files are in
+# /foo/bar/include and /foo/bar/lib directories.
+AC_ARG_WITH(curl,
+AS_HELP_STRING([--with-curl],[support http(s):// transports (default is YES)])
+AS_HELP_STRING([], [ARG can be also prefix for curl library and headers]),
+GIT_PARSE_WITH(curl))
+#
+# Define NO_EXPAT if you do not have expat installed. git-http-push is
+# not built, and you cannot push using http:// and https:// transports.
+#
+# Define EXPATDIR=/foo/bar if your expat header and library files are in
+# /foo/bar/include and /foo/bar/lib directories.
+AC_ARG_WITH(expat,
+AS_HELP_STRING([--with-expat],
+[support git-push using http:// and https:// transports via WebDAV (default is YES)])
+AS_HELP_STRING([], [ARG can be also prefix for expat library and headers]),
+GIT_PARSE_WITH(expat))
+#
+# Define NO_FINK if you are building on Darwin/Mac OS X, have Fink
+# installed in /sw, but don't want GIT to link against any libraries
+# installed there. If defined you may specify your own (or Fink's)
+# include directories and library directories by defining CFLAGS
+# and LDFLAGS appropriately.
+#
+# Define NO_DARWIN_PORTS if you are building on Darwin/Mac OS X,
+# have DarwinPorts installed in /opt/local, but don't want GIT to
+# link against any libraries installed there. If defined you may
+# specify your own (or DarwinPort's) include directories and
+# library directories by defining CFLAGS and LDFLAGS appropriately.
+#
+# Define NO_MMAP if you want to avoid mmap.
+
+## --enable-FEATURE[=ARG] and --disable-FEATURE
+#
+# Define COLLISION_CHECK below if you believe that SHA1's
+# 1461501637330902918203684832716283019655932542976 hashes do not give you
+# sufficient guarantee that no collisions between objects will ever happen.
+#
+# Define USE_NSEC below if you want git to care about sub-second file mtimes
+# and ctimes. Note that you need recent glibc (at least 2.2.4) for this, and
+# it will BREAK YOUR LOCAL DIFFS! show-diff and anything using it will likely
+# randomly break unless your underlying filesystem supports those sub-second
+# times (my ext3 doesn't).
+#
+# Define USE_STDEV below if you want git to care about the underlying device
+# change being considered an inode change from the update-cache perspective.
+
+
+## Output files
+AC_CONFIG_FILES(["${config_file}":"${config_in}":"${config_append}"])
+AC_OUTPUT
+
+
+## Cleanup
+rm -f "${config_append}"
static char *server_capabilities = NULL;
+static int check_ref(const char *name, int len, unsigned int flags)
+{
+ if (!flags)
+ return 1;
+
+ if (len > 45 || memcmp(name, "refs/", 5))
+ return 0;
+
+ /* Skip the "refs/" part */
+ name += 5;
+ len -= 5;
+
+ /* REF_NORMAL means that we don't want the magic fake tag refs */
+ if ((flags & REF_NORMAL) && check_ref_format(name) < 0)
+ return 0;
+
+ /* REF_HEADS means that we want regular branch heads */
+ if ((flags & REF_HEADS) && !memcmp(name, "heads/", 6))
+ return 1;
+
+ /* REF_TAGS means that we want tags */
+ if ((flags & REF_TAGS) && !memcmp(name, "tags/", 5))
+ return 1;
+
+ /* All type bits clear means that we are ok with anything */
+ return !(flags & ~REF_NORMAL);
+}
+
/*
* Read all the refs from the other end
*/
struct ref **get_remote_heads(int in, struct ref **list,
- int nr_match, char **match, int ignore_funny)
+ int nr_match, char **match,
+ unsigned int flags)
{
*list = NULL;
for (;;) {
server_capabilities = strdup(name + name_len + 1);
}
- if (ignore_funny && 45 < len && !memcmp(name, "refs/", 5) &&
- check_ref_format(name + 5))
+ if (!check_ref(name, name_len, flags))
continue;
-
if (nr_match && !path_match(name, nr_match, match))
continue;
ref = xcalloc(1, sizeof(*ref) + len - 40);
*/
static int git_tcp_connect_sock(char *host)
{
- int sockfd = -1;
+ int sockfd = -1, saved_errno = 0;
char *colon, *end;
- char *port = STR(DEFAULT_GIT_PORT);
+ const char *port = STR(DEFAULT_GIT_PORT);
struct addrinfo hints, *ai0, *ai;
int gai;
for (ai0 = ai; ai; ai = ai->ai_next) {
sockfd = socket(ai->ai_family,
ai->ai_socktype, ai->ai_protocol);
- if (sockfd < 0)
+ if (sockfd < 0) {
+ saved_errno = errno;
continue;
+ }
if (connect(sockfd, ai->ai_addr, ai->ai_addrlen) < 0) {
+ saved_errno = errno;
close(sockfd);
sockfd = -1;
continue;
freeaddrinfo(ai0);
if (sockfd < 0)
- die("unable to connect a socket (%s)", strerror(errno));
+ die("unable to connect a socket (%s)", strerror(saved_errno));
return sockfd;
}
*/
static int git_tcp_connect_sock(char *host)
{
- int sockfd = -1;
+ int sockfd = -1, saved_errno = 0;
char *colon, *end;
char *port = STR(DEFAULT_GIT_PORT), *ep;
struct hostent *he;
for (ap = he->h_addr_list; *ap; ap++) {
sockfd = socket(he->h_addrtype, SOCK_STREAM, 0);
- if (sockfd < 0)
+ if (sockfd < 0) {
+ saved_errno = errno;
continue;
+ }
memset(&sa, 0, sizeof sa);
sa.sin_family = he->h_addrtype;
memcpy(&sa.sin_addr, *ap, he->h_length);
if (connect(sockfd, (struct sockaddr *)&sa, sizeof sa) < 0) {
+ saved_errno = errno;
close(sockfd);
sockfd = -1;
continue;
}
if (sockfd < 0)
- die("unable to connect a socket (%s)", strerror(errno));
+ die("unable to connect a socket (%s)", strerror(saved_errno));
return sockfd;
}
#endif /* NO_IPV6 */
-static void git_tcp_connect(int fd[2],
- const char *prog, char *host, char *path)
+static void git_tcp_connect(int fd[2], char *host)
{
int sockfd = git_tcp_connect_sock(host);
return (git_proxy_command && *git_proxy_command);
}
-static void git_proxy_connect(int fd[2],
- const char *prog, char *host, char *path)
+static void git_proxy_connect(int fd[2], char *host)
{
- char *port = STR(DEFAULT_GIT_PORT);
+ const char *port = STR(DEFAULT_GIT_PORT);
char *colon, *end;
int pipefd[2][2];
pid_t pid;
*/
char *target_host = strdup(host);
if (git_use_proxy(host))
- git_proxy_connect(fd, prog, host, path);
+ git_proxy_connect(fd, host);
else
- git_tcp_connect(fd, prog, host, path);
+ git_tcp_connect(fd, host);
/*
* Separate original protocol components prog and path
* from extended components with a NUL byte.
}
}
-# colordiff specfic options here. Need to pre-declare if using variables
+# colordiff specific options here. Need to pre-declare if using variables
GetOptions(
"no-banner" => sub { $show_banner = 0 },
"plain-text=s" => \&set_color,
EMACS = emacs
ELC = git.elc vc-git.elc
-INSTALL = install
+INSTALL ?= install
INSTALL_ELC = $(INSTALL) -m 644
-prefix = $(HOME)
+prefix ?= $(HOME)
emacsdir = $(prefix)/share/emacs/site-lisp
all: $(ELC)
;;;; ------------------------------------------------------------
(defgroup git nil
- "Git user interface")
+ "A user interface for the git versioning system."
+ :group 'tools)
(defcustom git-committer-name nil
"User name to use for commits.
-The default is to fall back to the repository config, then to `add-log-full-name' and then to `user-full-name'."
+The default is to fall back to the repository config,
+then to `add-log-full-name' and then to `user-full-name'."
:group 'git
:type '(choice (const :tag "Default" nil)
(string :tag "Name")))
(defcustom git-committer-email nil
"Email address to use for commits.
-The default is to fall back to the git repository config, then to `add-log-mailing-address' and then to `user-mail-address'."
+The default is to fall back to the git repository config,
+then to `add-log-mailing-address' and then to `user-mail-address'."
:group 'git
:type '(choice (const :tag "Default" nil)
(string :tag "Email")))
:group 'git
:type 'boolean)
+(defcustom git-reuse-status-buffer t
+ "Whether `git-status' should try to reuse an existing buffer
+if there is already one that displays the same directory."
+ :group 'git
+ :type 'boolean)
+
(defcustom git-per-dir-ignore-file ".gitignore"
"Name of the per-directory ignore file."
:group 'git
:type 'string)
+
(defface git-status-face
'((((class color) (background light)) (:foreground "purple")))
"Git mode face used to highlight added and modified files."
(apply #'call-process "git" nil buffer nil args)))
(defun git-call-process-env-string (env &rest args)
- "Wrapper for call-process that sets environment strings, and returns the process output as a string."
+ "Wrapper for call-process that sets environment strings,
+and returns the process output as a string."
(with-temp-buffer
(and (eq 0 (apply #' git-call-process-env t env args))
(buffer-string))))
(set-buffer (find-file-noselect ignore-name))
(goto-char (point-max))
(unless (zerop (current-column)) (insert "\n"))
- (insert name "\n")
+ (insert "/" name "\n")
(sort-lines nil (point-min) (point-max))
(save-buffer))
(when created
(condition-case nil (delete-file ".git/MERGE_HEAD") (error nil))
(with-current-buffer buffer (erase-buffer))
(git-set-files-state files 'uptodate)
+ (when (file-directory-p ".git/rr-cache")
+ (git-run-command nil nil "rerere"))
(git-refresh-files)
(git-refresh-ewoc-hf git-status)
(message "Committed %s." commit))
(let ((map (make-keymap))
(diff-map (make-sparse-keymap)))
(suppress-keymap map)
+ (define-key map "?" 'git-help)
+ (define-key map "h" 'git-help)
(define-key map " " 'git-next-file)
(define-key map "a" 'git-add-file)
(define-key map "c" 'git-commit-file)
(set (make-local-variable 'list-buffers-directory) default-directory)
(run-hooks 'git-status-mode-hook)))
+(defun git-find-status-buffer (dir)
+ "Find the git status buffer handling a specified directory."
+ (let ((list (buffer-list))
+ (fulldir (expand-file-name dir))
+ found)
+ (while (and list (not found))
+ (let ((buffer (car list)))
+ (with-current-buffer buffer
+ (when (and list-buffers-directory
+ (string-equal fulldir (expand-file-name list-buffers-directory))
+ (string-match "\\*git-status\\*$" (buffer-name buffer)))
+ (setq found buffer))))
+ (setq list (cdr list)))
+ found))
+
(defun git-status (dir)
"Entry point into git-status mode."
(interactive "DSelect directory: ")
(setq dir (git-get-top-dir dir))
(if (file-directory-p (concat (file-name-as-directory dir) ".git"))
- (let ((buffer (create-file-buffer (expand-file-name "*git-status*" dir))))
+ (let ((buffer (or (and git-reuse-status-buffer (git-find-status-buffer dir))
+ (create-file-buffer (expand-file-name "*git-status*" dir)))))
(switch-to-buffer buffer)
(cd dir)
(git-status-mode)
(goto-char (point-min)))
(message "%s is not a git working tree." dir)))
+(defun git-help ()
+ "Display help for Git mode."
+ (interactive)
+ (describe-function 'git-status-mode))
+
(provide 'git)
;;; git.el ends here
"Register FILE into the git version-control system."
(vc-git--run-command file "update-index" "--add" "--"))
-(defun vc-git-print-log (file)
+(defun vc-git-print-log (file &optional buffer)
(let ((name (file-relative-name file))
(coding-system-for-read git-commits-coding-system))
- (vc-do-command nil 'async "git" name "rev-list" "--pretty" "HEAD" "--")))
+ (vc-do-command buffer 'async "git" name "rev-list" "--pretty" "HEAD" "--")))
-(defun vc-git-diff (file &optional rev1 rev2)
- (let ((name (file-relative-name file)))
+(defun vc-git-diff (file &optional rev1 rev2 buffer)
+ (let ((name (file-relative-name file))
+ (buf (or buffer "*vc-diff*")))
(if (and rev1 rev2)
- (vc-do-command "*vc-diff*" 0 "git" name "diff-tree" "-p" rev1 rev2 "--")
- (vc-do-command "*vc-diff*" 0 "git" name "diff-index" "-p" (or rev1 "HEAD") "--"))
+ (vc-do-command buf 0 "git" name "diff-tree" "-p" rev1 rev2 "--")
+ (vc-do-command buf 0 "git" name "diff-index" "-p" (or rev1 "HEAD") "--"))
; git-diff-index doesn't set exit status like diff does
(if (vc-git-workfile-unchanged-p file) 0 1)))
+++ /dev/null
-git-svn
-git-svn.xml
-git-svn.html
-git-svn.1
+++ /dev/null
-all: git-svn
-
-prefix?=$(HOME)
-bindir=$(prefix)/bin
-mandir=$(prefix)/man
-man1=$(mandir)/man1
-INSTALL?=install
-doc_conf=../../Documentation/asciidoc.conf
--include ../../config.mak
-
-git-svn: git-svn.perl
- cp $< $@
- chmod +x $@
-
-install: all
- $(INSTALL) -d -m755 $(DESTDIR)$(bindir)
- $(INSTALL) git-svn $(DESTDIR)$(bindir)
-
-install-doc: doc
- $(INSTALL) git-svn.1 $(DESTDIR)$(man1)
-
-doc: git-svn.1
-git-svn.1 : git-svn.xml
- xmlto man git-svn.xml
-git-svn.xml : git-svn.txt
- asciidoc -b docbook -d manpage \
- -f ../../Documentation/asciidoc.conf $<
-git-svn.html : git-svn.txt
- asciidoc -b xhtml11 -d manpage \
- -f ../../Documentation/asciidoc.conf $<
-test: git-svn
- cd t && for i in t????-*.sh; do $(SHELL) ./$$i $(TEST_FLAGS); done
-
-# we can test NO_OPTIMIZE_COMMITS independently of LC_ALL
-full-test:
- $(MAKE) test GIT_SVN_NO_LIB=1 GIT_SVN_NO_OPTIMIZE_COMMITS=1 LC_ALL=C
- $(MAKE) test GIT_SVN_NO_LIB=0 GIT_SVN_NO_OPTIMIZE_COMMITS=1 LC_ALL=C
- $(MAKE) test GIT_SVN_NO_LIB=1 GIT_SVN_NO_OPTIMIZE_COMMITS=0 \
- LC_ALL=en_US.UTF-8
- $(MAKE) test GIT_SVN_NO_LIB=0 GIT_SVN_NO_OPTIMIZE_COMMITS=0 \
- LC_ALL=en_US.UTF-8
-
-clean:
- rm -f git-svn *.xml *.html *.1
+++ /dev/null
-#!/usr/bin/env perl
-# Copyright (C) 2006, Eric Wong <normalperson@yhbt.net>
-# License: GPL v2 or later
-use warnings;
-use strict;
-use vars qw/ $AUTHOR $VERSION
- $SVN_URL $SVN_INFO $SVN_WC $SVN_UUID
- $GIT_SVN_INDEX $GIT_SVN
- $GIT_DIR $GIT_SVN_DIR $REVDB/;
-$AUTHOR = 'Eric Wong <normalperson@yhbt.net>';
-$VERSION = '1.1.1-broken';
-
-use Cwd qw/abs_path/;
-$GIT_DIR = abs_path($ENV{GIT_DIR} || '.git');
-$ENV{GIT_DIR} = $GIT_DIR;
-
-my $LC_ALL = $ENV{LC_ALL};
-my $TZ = $ENV{TZ};
-# make sure the svn binary gives consistent output between locales and TZs:
-$ENV{TZ} = 'UTC';
-$ENV{LC_ALL} = 'C';
-
-# If SVN:: library support is added, please make the dependencies
-# optional and preserve the capability to use the command-line client.
-# use eval { require SVN::... } to make it lazy load
-# We don't use any modules not in the standard Perl distribution:
-use Carp qw/croak/;
-use IO::File qw//;
-use File::Basename qw/dirname basename/;
-use File::Path qw/mkpath/;
-use Getopt::Long qw/:config gnu_getopt no_ignore_case auto_abbrev pass_through/;
-use File::Spec qw//;
-use POSIX qw/strftime/;
-use IPC::Open3;
-use Memoize;
-memoize('revisions_eq');
-
-my ($SVN_PATH, $SVN, $SVN_LOG, $_use_lib);
-$_use_lib = 1 unless $ENV{GIT_SVN_NO_LIB};
-libsvn_load();
-my $_optimize_commits = 1 unless $ENV{GIT_SVN_NO_OPTIMIZE_COMMITS};
-my $sha1 = qr/[a-f\d]{40}/;
-my $sha1_short = qr/[a-f\d]{4,40}/;
-my ($_revision,$_stdin,$_no_ignore_ext,$_no_stop_copy,$_help,$_rmdir,$_edit,
- $_find_copies_harder, $_l, $_cp_similarity, $_cp_remote,
- $_repack, $_repack_nr, $_repack_flags,
- $_template, $_shared, $_no_default_regex, $_no_graft_copy,
- $_limit, $_verbose, $_incremental, $_oneline, $_l_fmt, $_show_commit,
- $_version, $_upgrade, $_authors, $_branch_all_refs, @_opt_m);
-my (@_branch_from, %tree_map, %users, %rusers, %equiv);
-my ($_svn_co_url_revs, $_svn_pg_peg_revs);
-my @repo_path_split_cache;
-
-my %fc_opts = ( 'no-ignore-externals' => \$_no_ignore_ext,
- 'branch|b=s' => \@_branch_from,
- 'branch-all-refs|B' => \$_branch_all_refs,
- 'authors-file|A=s' => \$_authors,
- 'repack:i' => \$_repack,
- 'repack-flags|repack-args|repack-opts=s' => \$_repack_flags);
-
-my ($_trunk, $_tags, $_branches);
-my %multi_opts = ( 'trunk|T=s' => \$_trunk,
- 'tags|t=s' => \$_tags,
- 'branches|b=s' => \$_branches );
-my %init_opts = ( 'template=s' => \$_template, 'shared' => \$_shared );
-
-# yes, 'native' sets "\n". Patches to fix this for non-*nix systems welcome:
-my %EOL = ( CR => "\015", LF => "\012", CRLF => "\015\012", native => "\012" );
-
-my %cmd = (
- fetch => [ \&fetch, "Download new revisions from SVN",
- { 'revision|r=s' => \$_revision, %fc_opts } ],
- init => [ \&init, "Initialize a repo for tracking" .
- " (requires URL argument)",
- \%init_opts ],
- commit => [ \&commit, "Commit git revisions to SVN",
- { 'stdin|' => \$_stdin,
- 'edit|e' => \$_edit,
- 'rmdir' => \$_rmdir,
- 'find-copies-harder' => \$_find_copies_harder,
- 'l=i' => \$_l,
- 'copy-similarity|C=i'=> \$_cp_similarity,
- %fc_opts,
- } ],
- 'show-ignore' => [ \&show_ignore, "Show svn:ignore listings",
- { 'revision|r=i' => \$_revision } ],
- rebuild => [ \&rebuild, "Rebuild git-svn metadata (after git clone)",
- { 'no-ignore-externals' => \$_no_ignore_ext,
- 'copy-remote|remote=s' => \$_cp_remote,
- 'upgrade' => \$_upgrade } ],
- 'graft-branches' => [ \&graft_branches,
- 'Detect merges/branches from already imported history',
- { 'merge-rx|m' => \@_opt_m,
- 'no-default-regex' => \$_no_default_regex,
- 'no-graft-copy' => \$_no_graft_copy } ],
- 'multi-init' => [ \&multi_init,
- 'Initialize multiple trees (like git-svnimport)',
- { %multi_opts, %fc_opts } ],
- 'multi-fetch' => [ \&multi_fetch,
- 'Fetch multiple trees (like git-svnimport)',
- \%fc_opts ],
- 'log' => [ \&show_log, 'Show commit logs',
- { 'limit=i' => \$_limit,
- 'revision|r=s' => \$_revision,
- 'verbose|v' => \$_verbose,
- 'incremental' => \$_incremental,
- 'oneline' => \$_oneline,
- 'show-commit' => \$_show_commit,
- 'authors-file|A=s' => \$_authors,
- } ],
-);
-
-my $cmd;
-for (my $i = 0; $i < @ARGV; $i++) {
- if (defined $cmd{$ARGV[$i]}) {
- $cmd = $ARGV[$i];
- splice @ARGV, $i, 1;
- last;
- }
-};
-
-my %opts = %{$cmd{$cmd}->[2]} if (defined $cmd);
-
-read_repo_config(\%opts);
-my $rv = GetOptions(%opts, 'help|H|h' => \$_help,
- 'version|V' => \$_version,
- 'id|i=s' => \$GIT_SVN);
-exit 1 if (!$rv && $cmd ne 'log');
-
-set_default_vals();
-usage(0) if $_help;
-version() if $_version;
-usage(1) unless defined $cmd;
-init_vars();
-load_authors() if $_authors;
-load_all_refs() if $_branch_all_refs;
-svn_compat_check();
-migration_check() unless $cmd =~ /^(?:init|rebuild|multi-init)$/;
-$cmd{$cmd}->[0]->(@ARGV);
-exit 0;
-
-####################### primary functions ######################
-sub usage {
- my $exit = shift || 0;
- my $fd = $exit ? \*STDERR : \*STDOUT;
- print $fd <<"";
-git-svn - bidirectional operations between a single Subversion tree and git
-Usage: $0 <command> [options] [arguments]\n
-
- print $fd "Available commands:\n" unless $cmd;
-
- foreach (sort keys %cmd) {
- next if $cmd && $cmd ne $_;
- print $fd ' ',pack('A13',$_),$cmd{$_}->[1],"\n";
- foreach (keys %{$cmd{$_}->[2]}) {
- # prints out arguments as they should be passed:
- my $x = s#[:=]s$## ? '<arg>' : s#[:=]i$## ? '<num>' : '';
- print $fd ' ' x 17, join(', ', map { length $_ > 1 ?
- "--$_" : "-$_" }
- split /\|/,$_)," $x\n";
- }
- }
- print $fd <<"";
-\nGIT_SVN_ID may be set in the environment or via the --id/-i switch to an
-arbitrary identifier if you're tracking multiple SVN branches/repositories in
-one git repository and want to keep them separate. See git-svn(1) for more
-information.
-
- exit $exit;
-}
-
-sub version {
- print "git-svn version $VERSION\n";
- exit 0;
-}
-
-sub rebuild {
- if (quiet_run(qw/git-rev-parse --verify/,"refs/remotes/$GIT_SVN^0")) {
- copy_remote_ref();
- }
- $SVN_URL = shift or undef;
- my $newest_rev = 0;
- if ($_upgrade) {
- sys('git-update-ref',"refs/remotes/$GIT_SVN","$GIT_SVN-HEAD");
- } else {
- check_upgrade_needed();
- }
-
- my $pid = open(my $rev_list,'-|');
- defined $pid or croak $!;
- if ($pid == 0) {
- exec("git-rev-list","refs/remotes/$GIT_SVN") or croak $!;
- }
- my $latest;
- while (<$rev_list>) {
- chomp;
- my $c = $_;
- croak "Non-SHA1: $c\n" unless $c =~ /^$sha1$/o;
- my @commit = grep(/^git-svn-id: /,`git-cat-file commit $c`);
- next if (!@commit); # skip merges
- my ($url, $rev, $uuid) = extract_metadata($commit[$#commit]);
- if (!$rev || !$uuid) {
- croak "Unable to extract revision or UUID from ",
- "$c, $commit[$#commit]\n";
- }
-
- # if we merged or otherwise started elsewhere, this is
- # how we break out of it
- next if (defined $SVN_UUID && ($uuid ne $SVN_UUID));
- next if (defined $SVN_URL && defined $url && ($url ne $SVN_URL));
-
- unless (defined $latest) {
- if (!$SVN_URL && !$url) {
- croak "SVN repository location required: $url\n";
- }
- $SVN_URL ||= $url;
- $SVN_UUID ||= $uuid;
- setup_git_svn();
- $latest = $rev;
- }
- revdb_set($REVDB, $rev, $c);
- print "r$rev = $c\n";
- $newest_rev = $rev if ($rev > $newest_rev);
- }
- close $rev_list or croak $?;
-
- goto out if $_use_lib;
- if (!chdir $SVN_WC) {
- svn_cmd_checkout($SVN_URL, $latest, $SVN_WC);
- chdir $SVN_WC or croak $!;
- }
-
- $pid = fork;
- defined $pid or croak $!;
- if ($pid == 0) {
- my @svn_up = qw(svn up);
- push @svn_up, '--ignore-externals' unless $_no_ignore_ext;
- sys(@svn_up,"-r$newest_rev");
- $ENV{GIT_INDEX_FILE} = $GIT_SVN_INDEX;
- index_changes();
- exec('git-write-tree') or croak $!;
- }
- waitpid $pid, 0;
- croak $? if $?;
-out:
- if ($_upgrade) {
- print STDERR <<"";
-Keeping deprecated refs/head/$GIT_SVN-HEAD for now. Please remove it
-when you have upgraded your tools and habits to use refs/remotes/$GIT_SVN
-
- }
-}
-
-sub init {
- $SVN_URL = shift or die "SVN repository location required " .
- "as a command-line argument\n";
- $SVN_URL =~ s!/+$!!; # strip trailing slash
- unless (-d $GIT_DIR) {
- my @init_db = ('git-init-db');
- push @init_db, "--template=$_template" if defined $_template;
- push @init_db, "--shared" if defined $_shared;
- sys(@init_db);
- }
- setup_git_svn();
-}
-
-sub fetch {
- check_upgrade_needed();
- $SVN_URL ||= file_to_s("$GIT_SVN_DIR/info/url");
- my $ret = $_use_lib ? fetch_lib(@_) : fetch_cmd(@_);
- if ($ret->{commit} && quiet_run(qw(git-rev-parse --verify
- refs/heads/master^0))) {
- sys(qw(git-update-ref refs/heads/master),$ret->{commit});
- }
- return $ret;
-}
-
-sub fetch_cmd {
- my (@parents) = @_;
- my @log_args = -d $SVN_WC ? ($SVN_WC) : ($SVN_URL);
- unless ($_revision) {
- $_revision = -d $SVN_WC ? 'BASE:HEAD' : '0:HEAD';
- }
- push @log_args, "-r$_revision";
- push @log_args, '--stop-on-copy' unless $_no_stop_copy;
-
- my $svn_log = svn_log_raw(@log_args);
-
- my $base = next_log_entry($svn_log) or croak "No base revision!\n";
- # don't need last_revision from grab_base_rev() because
- # user could've specified a different revision to skip (they
- # didn't want to import certain revisions into git for whatever
- # reason, so trust $base->{revision} instead.
- my (undef, $last_commit) = svn_grab_base_rev();
- unless (-d $SVN_WC) {
- svn_cmd_checkout($SVN_URL,$base->{revision},$SVN_WC);
- chdir $SVN_WC or croak $!;
- read_uuid();
- $last_commit = git_commit($base, @parents);
- assert_tree($last_commit);
- } else {
- chdir $SVN_WC or croak $!;
- read_uuid();
- # looks like a user manually cp'd and svn switch'ed
- unless ($last_commit) {
- sys(qw/svn revert -R ./);
- assert_svn_wc_clean($base->{revision});
- $last_commit = git_commit($base, @parents);
- assert_tree($last_commit);
- }
- }
- my @svn_up = qw(svn up);
- push @svn_up, '--ignore-externals' unless $_no_ignore_ext;
- my $last = $base;
- while (my $log_msg = next_log_entry($svn_log)) {
- if ($last->{revision} >= $log_msg->{revision}) {
- croak "Out of order: last >= current: ",
- "$last->{revision} >= $log_msg->{revision}\n";
- }
- # Revert is needed for cases like:
- # https://svn.musicpd.org/Jamming/trunk (r166:167), but
- # I can't seem to reproduce something like that on a test...
- sys(qw/svn revert -R ./);
- assert_svn_wc_clean($last->{revision});
- sys(@svn_up,"-r$log_msg->{revision}");
- $last_commit = git_commit($log_msg, $last_commit, @parents);
- $last = $log_msg;
- }
- close $svn_log->{fh};
- $last->{commit} = $last_commit;
- return $last;
-}
-
-sub fetch_lib {
- my (@parents) = @_;
- $SVN_URL ||= file_to_s("$GIT_SVN_DIR/info/url");
- my $repo;
- ($repo, $SVN_PATH) = repo_path_split($SVN_URL);
- $SVN_LOG ||= libsvn_connect($repo);
- $SVN ||= libsvn_connect($repo);
- my ($last_rev, $last_commit) = svn_grab_base_rev();
- my ($base, $head) = libsvn_parse_revision($last_rev);
- if ($base > $head) {
- return { revision => $last_rev, commit => $last_commit }
- }
- my $index = set_index($GIT_SVN_INDEX);
-
- # limit ourselves and also fork() since get_log won't release memory
- # after processing a revision and SVN stuff seems to leak
- my $inc = 1000;
- my ($min, $max) = ($base, $head < $base+$inc ? $head : $base+$inc);
- read_uuid();
- if (defined $last_commit) {
- unless (-e $GIT_SVN_INDEX) {
- sys(qw/git-read-tree/, $last_commit);
- }
- chomp (my $x = `git-write-tree`);
- my ($y) = (`git-cat-file commit $last_commit`
- =~ /^tree ($sha1)/m);
- if ($y ne $x) {
- unlink $GIT_SVN_INDEX or croak $!;
- sys(qw/git-read-tree/, $last_commit);
- }
- chomp ($x = `git-write-tree`);
- if ($y ne $x) {
- print STDERR "trees ($last_commit) $y != $x\n",
- "Something is seriously wrong...\n";
- }
- }
- while (1) {
- # fork, because using SVN::Pool with get_log() still doesn't
- # seem to help enough to keep memory usage down.
- defined(my $pid = fork) or croak $!;
- if (!$pid) {
- $SVN::Error::handler = \&libsvn_skip_unknown_revs;
-
- # Yes I'm perfectly aware that the fourth argument
- # below is the limit revisions number. Unfortunately
- # performance sucks with it enabled, so it's much
- # faster to fetch revision ranges instead of relying
- # on the limiter.
- $SVN_LOG->get_log( '/'.$SVN_PATH, $min, $max, 0, 1, 1,
- sub {
- my $log_msg;
- if ($last_commit) {
- $log_msg = libsvn_fetch(
- $last_commit, @_);
- $last_commit = git_commit(
- $log_msg,
- $last_commit,
- @parents);
- } else {
- $log_msg = libsvn_new_tree(@_);
- $last_commit = git_commit(
- $log_msg, @parents);
- }
- });
- exit 0;
- }
- waitpid $pid, 0;
- croak $? if $?;
- ($last_rev, $last_commit) = svn_grab_base_rev();
- last if ($max >= $head);
- $min = $max + 1;
- $max += $inc;
- $max = $head if ($max > $head);
- }
- restore_index($index);
- return { revision => $last_rev, commit => $last_commit };
-}
-
-sub commit {
- my (@commits) = @_;
- check_upgrade_needed();
- if ($_stdin || !@commits) {
- print "Reading from stdin...\n";
- @commits = ();
- while (<STDIN>) {
- if (/\b($sha1_short)\b/o) {
- unshift @commits, $1;
- }
- }
- }
- my @revs;
- foreach my $c (@commits) {
- chomp(my @tmp = safe_qx('git-rev-parse',$c));
- if (scalar @tmp == 1) {
- push @revs, $tmp[0];
- } elsif (scalar @tmp > 1) {
- push @revs, reverse (safe_qx('git-rev-list',@tmp));
- } else {
- die "Failed to rev-parse $c\n";
- }
- }
- chomp @revs;
- $_use_lib ? commit_lib(@revs) : commit_cmd(@revs);
- print "Done committing ",scalar @revs," revisions to SVN\n";
-}
-
-sub commit_cmd {
- my (@revs) = @_;
-
- chdir $SVN_WC or croak "Unable to chdir $SVN_WC: $!\n";
- my $info = svn_info('.');
- my $fetched = fetch();
- if ($info->{Revision} != $fetched->{revision}) {
- print STDERR "There are new revisions that were fetched ",
- "and need to be merged (or acknowledged) ",
- "before committing.\n";
- exit 1;
- }
- $info = svn_info('.');
- read_uuid($info);
- my $last = $fetched;
- foreach my $c (@revs) {
- my $mods = svn_checkout_tree($last, $c);
- if (scalar @$mods == 0) {
- print "Skipping, no changes detected\n";
- next;
- }
- $last = svn_commit_tree($last, $c);
- }
-}
-
-sub commit_lib {
- my (@revs) = @_;
- my ($r_last, $cmt_last) = svn_grab_base_rev();
- defined $r_last or die "Must have an existing revision to commit\n";
- my $fetched = fetch();
- if ($r_last != $fetched->{revision}) {
- print STDERR "There are new revisions that were fetched ",
- "and need to be merged (or acknowledged) ",
- "before committing.\n",
- "last rev: $r_last\n",
- " current: $fetched->{revision}\n";
- exit 1;
- }
- read_uuid();
- my @lock = $SVN::Core::VERSION ge '1.2.0' ? (undef, 0) : ();
- my $commit_msg = "$GIT_SVN_DIR/.svn-commit.tmp.$$";
-
- if (defined $LC_ALL) {
- $ENV{LC_ALL} = $LC_ALL;
- } else {
- delete $ENV{LC_ALL};
- }
- foreach my $c (@revs) {
- my $log_msg = get_commit_message($c, $commit_msg);
-
- # fork for each commit because there's a memory leak I
- # can't track down... (it's probably in the SVN code)
- defined(my $pid = open my $fh, '-|') or croak $!;
- if (!$pid) {
- my $ed = SVN::Git::Editor->new(
- { r => $r_last,
- ra => $SVN,
- c => $c,
- svn_path => $SVN_PATH
- },
- $SVN->get_commit_editor(
- $log_msg->{msg},
- sub {
- libsvn_commit_cb(
- @_, $c,
- $log_msg->{msg},
- $r_last,
- $cmt_last)
- },
- @lock)
- );
- my $mods = libsvn_checkout_tree($cmt_last, $c, $ed);
- if (@$mods == 0) {
- print "No changes\nr$r_last = $cmt_last\n";
- $ed->abort_edit;
- } else {
- $ed->close_edit;
- }
- exit 0;
- }
- my ($r_new, $cmt_new, $no);
- while (<$fh>) {
- print $_;
- chomp;
- if (/^r(\d+) = ($sha1)$/o) {
- ($r_new, $cmt_new) = ($1, $2);
- } elsif ($_ eq 'No changes') {
- $no = 1;
- }
- }
- close $fh or croak $?;
- if (! defined $r_new && ! defined $cmt_new) {
- unless ($no) {
- die "Failed to parse revision information\n";
- }
- } else {
- ($r_last, $cmt_last) = ($r_new, $cmt_new);
- }
- }
- $ENV{LC_ALL} = 'C';
- unlink $commit_msg;
-}
-
-sub show_ignore {
- $SVN_URL ||= file_to_s("$GIT_SVN_DIR/info/url");
- $_use_lib ? show_ignore_lib() : show_ignore_cmd();
-}
-
-sub show_ignore_cmd {
- require File::Find or die $!;
- if (defined $_revision) {
- die "-r/--revision option doesn't work unless the Perl SVN ",
- "libraries are used\n";
- }
- chdir $SVN_WC or croak $!;
- my %ign;
- File::Find::find({wanted=>sub{if(lstat $_ && -d _ && -d "$_/.svn"){
- s#^\./##;
- @{$ign{$_}} = svn_propget_base('svn:ignore', $_);
- }}, no_chdir=>1},'.');
-
- print "\n# /\n";
- foreach (@{$ign{'.'}}) { print '/',$_ if /\S/ }
- delete $ign{'.'};
- foreach my $i (sort keys %ign) {
- print "\n# ",$i,"\n";
- foreach (@{$ign{$i}}) { print '/',$i,'/',$_ if /\S/ }
- }
-}
-
-sub show_ignore_lib {
- my $repo;
- ($repo, $SVN_PATH) = repo_path_split($SVN_URL);
- $SVN ||= libsvn_connect($repo);
- my $r = defined $_revision ? $_revision : $SVN->get_latest_revnum;
- libsvn_traverse_ignore(\*STDOUT, $SVN_PATH, $r);
-}
-
-sub graft_branches {
- my $gr_file = "$GIT_DIR/info/grafts";
- my ($grafts, $comments) = read_grafts($gr_file);
- my $gr_sha1;
-
- if (%$grafts) {
- # temporarily disable our grafts file to make this idempotent
- chomp($gr_sha1 = safe_qx(qw/git-hash-object -w/,$gr_file));
- rename $gr_file, "$gr_file~$gr_sha1" or croak $!;
- }
-
- my $l_map = read_url_paths();
- my @re = map { qr/$_/is } @_opt_m if @_opt_m;
- unless ($_no_default_regex) {
- push @re, ( qr/\b(?:merge|merging|merged)\s+(\S.+)/is,
- qr/\b(?:from|of)\s+(\S.+)/is );
- }
- foreach my $u (keys %$l_map) {
- if (@re) {
- foreach my $p (keys %{$l_map->{$u}}) {
- graft_merge_msg($grafts,$l_map,$u,$p);
- }
- }
- unless ($_no_graft_copy) {
- if ($_use_lib) {
- graft_file_copy_lib($grafts,$l_map,$u);
- } else {
- graft_file_copy_cmd($grafts,$l_map,$u);
- }
- }
- }
-
- write_grafts($grafts, $comments, $gr_file);
- unlink "$gr_file~$gr_sha1" if $gr_sha1;
-}
-
-sub multi_init {
- my $url = shift;
- $_trunk ||= 'trunk';
- $_trunk =~ s#/+$##;
- $url =~ s#/+$## if $url;
- if ($_trunk !~ m#^[a-z\+]+://#) {
- $_trunk = '/' . $_trunk if ($_trunk !~ m#^/#);
- unless ($url) {
- print STDERR "E: '$_trunk' is not a complete URL ",
- "and a separate URL is not specified\n";
- exit 1;
- }
- $_trunk = $url . $_trunk;
- }
- if ($GIT_SVN eq 'git-svn') {
- print "GIT_SVN_ID set to 'trunk' for $_trunk\n";
- $GIT_SVN = $ENV{GIT_SVN_ID} = 'trunk';
- }
- init_vars();
- init($_trunk);
- complete_url_ls_init($url, $_branches, '--branches/-b', '');
- complete_url_ls_init($url, $_tags, '--tags/-t', 'tags/');
-}
-
-sub multi_fetch {
- # try to do trunk first, since branches/tags
- # may be descended from it.
- if (-e "$GIT_DIR/svn/trunk/info/url") {
- fetch_child_id('trunk', @_);
- }
- rec_fetch('', "$GIT_DIR/svn", @_);
-}
-
-sub show_log {
- my (@args) = @_;
- my ($r_min, $r_max);
- my $r_last = -1; # prevent dupes
- rload_authors() if $_authors;
- if (defined $TZ) {
- $ENV{TZ} = $TZ;
- } else {
- delete $ENV{TZ};
- }
- if (defined $_revision) {
- if ($_revision =~ /^(\d+):(\d+)$/) {
- ($r_min, $r_max) = ($1, $2);
- } elsif ($_revision =~ /^\d+$/) {
- $r_min = $r_max = $_revision;
- } else {
- print STDERR "-r$_revision is not supported, use ",
- "standard \'git log\' arguments instead\n";
- exit 1;
- }
- }
-
- my $pid = open(my $log,'-|');
- defined $pid or croak $!;
- if (!$pid) {
- exec(git_svn_log_cmd($r_min,$r_max), @args) or croak $!;
- }
- setup_pager();
- my (@k, $c, $d);
-
- while (<$log>) {
- if (/^commit ($sha1_short)/o) {
- my $cmt = $1;
- if ($c && cmt_showable($c) && $c->{r} != $r_last) {
- $r_last = $c->{r};
- process_commit($c, $r_min, $r_max, \@k) or
- goto out;
- }
- $d = undef;
- $c = { c => $cmt };
- } elsif (/^author (.+) (\d+) ([\-\+]?\d+)$/) {
- get_author_info($c, $1, $2, $3);
- } elsif (/^(?:tree|parent|committer) /) {
- # ignore
- } elsif (/^:\d{6} \d{6} $sha1_short/o) {
- push @{$c->{raw}}, $_;
- } elsif (/^diff /) {
- $d = 1;
- push @{$c->{diff}}, $_;
- } elsif ($d) {
- push @{$c->{diff}}, $_;
- } elsif (/^ (git-svn-id:.+)$/) {
- (undef, $c->{r}, undef) = extract_metadata($1);
- } elsif (s/^ //) {
- push @{$c->{l}}, $_;
- }
- }
- if ($c && defined $c->{r} && $c->{r} != $r_last) {
- $r_last = $c->{r};
- process_commit($c, $r_min, $r_max, \@k);
- }
- if (@k) {
- my $swap = $r_max;
- $r_max = $r_min;
- $r_min = $swap;
- process_commit($_, $r_min, $r_max) foreach reverse @k;
- }
-out:
- close $log;
- print '-' x72,"\n" unless $_incremental || $_oneline;
-}
-
-########################### utility functions #########################
-
-sub cmt_showable {
- my ($c) = @_;
- return 1 if defined $c->{r};
- if ($c->{l} && $c->{l}->[-1] eq "...\n" &&
- $c->{a_raw} =~ /\@([a-f\d\-]+)>$/) {
- my @msg = safe_qx(qw/git-cat-file commit/, $c->{c});
- shift @msg while ($msg[0] ne "\n");
- shift @msg;
- @{$c->{l}} = grep !/^git-svn-id: /, @msg;
-
- (undef, $c->{r}, undef) = extract_metadata(
- (grep(/^git-svn-id: /, @msg))[-1]);
- }
- return defined $c->{r};
-}
-
-sub git_svn_log_cmd {
- my ($r_min, $r_max) = @_;
- my @cmd = (qw/git-log --abbrev-commit --pretty=raw
- --default/, "refs/remotes/$GIT_SVN");
- push @cmd, '--summary' if $_verbose;
- return @cmd unless defined $r_max;
- if ($r_max == $r_min) {
- push @cmd, '--max-count=1';
- if (my $c = revdb_get($REVDB, $r_max)) {
- push @cmd, $c;
- }
- } else {
- my ($c_min, $c_max);
- $c_max = revdb_get($REVDB, $r_max);
- $c_min = revdb_get($REVDB, $r_min);
- if ($c_min && $c_max) {
- if ($r_max > $r_max) {
- push @cmd, "$c_min..$c_max";
- } else {
- push @cmd, "$c_max..$c_min";
- }
- } elsif ($r_max > $r_min) {
- push @cmd, $c_max;
- } else {
- push @cmd, $c_min;
- }
- }
- return @cmd;
-}
-
-sub fetch_child_id {
- my $id = shift;
- print "Fetching $id\n";
- my $ref = "$GIT_DIR/refs/remotes/$id";
- my $ca = file_to_s($ref) if (-r $ref);
- defined(my $pid = fork) or croak $!;
- if (!$pid) {
- $GIT_SVN = $ENV{GIT_SVN_ID} = $id;
- init_vars();
- fetch(@_);
- exit 0;
- }
- waitpid $pid, 0;
- croak $? if $?;
- return unless $_repack || -r $ref;
-
- my $cb = file_to_s($ref);
-
- defined($pid = open my $fh, '-|') or croak $!;
- my $url = file_to_s("$GIT_DIR/svn/$id/info/url");
- $url = qr/\Q$url\E/;
- if (!$pid) {
- exec qw/git-rev-list --pretty=raw/,
- $ca ? "$ca..$cb" : $cb or croak $!;
- }
- while (<$fh>) {
- if (/^ git-svn-id: $url\@\d+ [a-f0-9\-]+$/) {
- check_repack();
- } elsif (/^ git-svn-id: \S+\@\d+ [a-f0-9\-]+$/) {
- last;
- }
- }
- close $fh;
-}
-
-sub rec_fetch {
- my ($pfx, $p, @args) = @_;
- my @dir;
- foreach (sort <$p/*>) {
- if (-r "$_/info/url") {
- $pfx .= '/' if $pfx && $pfx !~ m!/$!;
- my $id = $pfx . basename $_;
- next if $id eq 'trunk';
- fetch_child_id($id, @args);
- } elsif (-d $_) {
- push @dir, $_;
- }
- }
- foreach (@dir) {
- my $x = $_;
- $x =~ s!^\Q$GIT_DIR\E/svn/!!;
- rec_fetch($x, $_);
- }
-}
-
-sub complete_url_ls_init {
- my ($url, $var, $switch, $pfx) = @_;
- unless ($var) {
- print STDERR "W: $switch not specified\n";
- return;
- }
- $var =~ s#/+$##;
- if ($var !~ m#^[a-z\+]+://#) {
- $var = '/' . $var if ($var !~ m#^/#);
- unless ($url) {
- print STDERR "E: '$var' is not a complete URL ",
- "and a separate URL is not specified\n";
- exit 1;
- }
- $var = $url . $var;
- }
- chomp(my @ls = $_use_lib ? libsvn_ls_fullurl($var)
- : safe_qx(qw/svn ls --non-interactive/, $var));
- my $old = $GIT_SVN;
- defined(my $pid = fork) or croak $!;
- if (!$pid) {
- foreach my $u (map { "$var/$_" } (grep m!/$!, @ls)) {
- $u =~ s#/+$##;
- if ($u !~ m!\Q$var\E/(.+)$!) {
- print STDERR "W: Unrecognized URL: $u\n";
- die "This should never happen\n";
- }
- my $id = $pfx.$1;
- print "init $u => $id\n";
- $GIT_SVN = $ENV{GIT_SVN_ID} = $id;
- init_vars();
- init($u);
- }
- exit 0;
- }
- waitpid $pid, 0;
- croak $? if $?;
-}
-
-sub common_prefix {
- my $paths = shift;
- my %common;
- foreach (@$paths) {
- my @tmp = split m#/#, $_;
- my $p = '';
- while (my $x = shift @tmp) {
- $p .= "/$x";
- $common{$p} ||= 0;
- $common{$p}++;
- }
- }
- foreach (sort {length $b <=> length $a} keys %common) {
- if ($common{$_} == @$paths) {
- return $_;
- }
- }
- return '';
-}
-
-# this isn't funky-filename safe, but good enough for now...
-sub graft_file_copy_cmd {
- my ($grafts, $l_map, $u) = @_;
- my $paths = $l_map->{$u};
- my $pfx = common_prefix([keys %$paths]);
- $SVN_URL ||= $u.$pfx;
- my $pid = open my $fh, '-|';
- defined $pid or croak $!;
- unless ($pid) {
- my @exec = qw/svn log -v/;
- push @exec, "-r$_revision" if defined $_revision;
- exec @exec, $u.$pfx or croak $!;
- }
- my ($r, $mp) = (undef, undef);
- while (<$fh>) {
- chomp;
- if (/^\-{72}$/) {
- $mp = $r = undef;
- } elsif (/^r(\d+) \| /) {
- $r = $1 unless defined $r;
- } elsif (/^Changed paths:/) {
- $mp = 1;
- } elsif ($mp && m#^ [AR] /(\S.*?) \(from /(\S+?):(\d+)\)$#) {
- my ($p1, $p0, $r0) = ($1, $2, $3);
- my $c = find_graft_path_commit($paths, $p1, $r);
- next unless $c;
- find_graft_path_parents($grafts, $paths, $c, $p0, $r0);
- }
- }
-}
-
-sub graft_file_copy_lib {
- my ($grafts, $l_map, $u) = @_;
- my $tree_paths = $l_map->{$u};
- my $pfx = common_prefix([keys %$tree_paths]);
- my ($repo, $path) = repo_path_split($u.$pfx);
- $SVN_LOG ||= libsvn_connect($repo);
- $SVN ||= libsvn_connect($repo);
-
- my ($base, $head) = libsvn_parse_revision();
- my $inc = 1000;
- my ($min, $max) = ($base, $head < $base+$inc ? $head : $base+$inc);
- my $eh = $SVN::Error::handler;
- $SVN::Error::handler = \&libsvn_skip_unknown_revs;
- while (1) {
- my $pool = SVN::Pool->new;
- $SVN_LOG->get_log( "/$path", $min, $max, 0, 1, 1,
- sub {
- libsvn_graft_file_copies($grafts, $tree_paths,
- $path, @_);
- }, $pool);
- $pool->clear;
- last if ($max >= $head);
- $min = $max + 1;
- $max += $inc;
- $max = $head if ($max > $head);
- }
- $SVN::Error::handler = $eh;
-}
-
-sub process_merge_msg_matches {
- my ($grafts, $l_map, $u, $p, $c, @matches) = @_;
- my (@strong, @weak);
- foreach (@matches) {
- # merging with ourselves is not interesting
- next if $_ eq $p;
- if ($l_map->{$u}->{$_}) {
- push @strong, $_;
- } else {
- push @weak, $_;
- }
- }
- foreach my $w (@weak) {
- last if @strong;
- # no exact match, use branch name as regexp.
- my $re = qr/\Q$w\E/i;
- foreach (keys %{$l_map->{$u}}) {
- if (/$re/) {
- push @strong, $_;
- last;
- }
- }
- last if @strong;
- $w = basename($w);
- $re = qr/\Q$w\E/i;
- foreach (keys %{$l_map->{$u}}) {
- if (/$re/) {
- push @strong, $_;
- last;
- }
- }
- }
- my ($rev) = ($c->{m} =~ /^git-svn-id:\s(?:\S+?)\@(\d+)
- \s(?:[a-f\d\-]+)$/xsm);
- unless (defined $rev) {
- ($rev) = ($c->{m} =~/^git-svn-id:\s(\d+)
- \@(?:[a-f\d\-]+)/xsm);
- return unless defined $rev;
- }
- foreach my $m (@strong) {
- my ($r0, $s0) = find_rev_before($rev, $m);
- $grafts->{$c->{c}}->{$s0} = 1 if defined $s0;
- }
-}
-
-sub graft_merge_msg {
- my ($grafts, $l_map, $u, $p, @re) = @_;
-
- my $x = $l_map->{$u}->{$p};
- my $rl = rev_list_raw($x);
- while (my $c = next_rev_list_entry($rl)) {
- foreach my $re (@re) {
- my (@br) = ($c->{m} =~ /$re/g);
- next unless @br;
- process_merge_msg_matches($grafts,$l_map,$u,$p,$c,@br);
- }
- }
-}
-
-sub read_uuid {
- return if $SVN_UUID;
- if ($_use_lib) {
- my $pool = SVN::Pool->new;
- $SVN_UUID = $SVN->get_uuid($pool);
- $pool->clear;
- } else {
- my $info = shift || svn_info('.');
- $SVN_UUID = $info->{'Repository UUID'} or
- croak "Repository UUID unreadable\n";
- }
-}
-
-sub quiet_run {
- my $pid = fork;
- defined $pid or croak $!;
- if (!$pid) {
- open my $null, '>', '/dev/null' or croak $!;
- open STDERR, '>&', $null or croak $!;
- open STDOUT, '>&', $null or croak $!;
- exec @_ or croak $!;
- }
- waitpid $pid, 0;
- return $?;
-}
-
-sub repo_path_split {
- my $full_url = shift;
- $full_url =~ s#/+$##;
-
- foreach (@repo_path_split_cache) {
- if ($full_url =~ s#$_##) {
- my $u = $1;
- $full_url =~ s#^/+##;
- return ($u, $full_url);
- }
- }
-
- my ($url, $path) = ($full_url =~ m!^([a-z\+]+://[^/]*)(.*)$!i);
- $path =~ s#^/+##;
- my @paths = split(m#/+#, $path);
-
- if ($_use_lib) {
- while (1) {
- $SVN = libsvn_connect($url);
- last if (defined $SVN &&
- defined eval { $SVN->get_latest_revnum });
- my $n = shift @paths || last;
- $url .= "/$n";
- }
- } else {
- while (quiet_run(qw/svn ls --non-interactive/, $url)) {
- my $n = shift @paths || last;
- $url .= "/$n";
- }
- }
- push @repo_path_split_cache, qr/^(\Q$url\E)/;
- $path = join('/',@paths);
- return ($url, $path);
-}
-
-sub setup_git_svn {
- defined $SVN_URL or croak "SVN repository location required\n";
- unless (-d $GIT_DIR) {
- croak "GIT_DIR=$GIT_DIR does not exist!\n";
- }
- mkpath([$GIT_SVN_DIR]);
- mkpath(["$GIT_SVN_DIR/info"]);
- open my $fh, '>>',$REVDB or croak $!;
- close $fh;
- s_to_file($SVN_URL,"$GIT_SVN_DIR/info/url");
-
-}
-
-sub assert_svn_wc_clean {
- return if $_use_lib;
- my ($svn_rev) = @_;
- croak "$svn_rev is not an integer!\n" unless ($svn_rev =~ /^\d+$/);
- my $lcr = svn_info('.')->{'Last Changed Rev'};
- if ($svn_rev != $lcr) {
- print STDERR "Checking for copy-tree ... ";
- my @diff = grep(/^Index: /,(safe_qx(qw(svn diff),
- "-r$lcr:$svn_rev")));
- if (@diff) {
- croak "Nope! Expected r$svn_rev, got r$lcr\n";
- } else {
- print STDERR "OK!\n";
- }
- }
- my @status = grep(!/^Performing status on external/,(`svn status`));
- @status = grep(!/^\s*$/,@status);
- if (scalar @status) {
- print STDERR "Tree ($SVN_WC) is not clean:\n";
- print STDERR $_ foreach @status;
- croak;
- }
-}
-
-sub get_tree_from_treeish {
- my ($treeish) = @_;
- croak "Not a sha1: $treeish\n" unless $treeish =~ /^$sha1$/o;
- chomp(my $type = `git-cat-file -t $treeish`);
- my $expected;
- while ($type eq 'tag') {
- chomp(($treeish, $type) = `git-cat-file tag $treeish`);
- }
- if ($type eq 'commit') {
- $expected = (grep /^tree /,`git-cat-file commit $treeish`)[0];
- ($expected) = ($expected =~ /^tree ($sha1)$/);
- die "Unable to get tree from $treeish\n" unless $expected;
- } elsif ($type eq 'tree') {
- $expected = $treeish;
- } else {
- die "$treeish is a $type, expected tree, tag or commit\n";
- }
- return $expected;
-}
-
-sub assert_tree {
- return if $_use_lib;
- my ($treeish) = @_;
- my $expected = get_tree_from_treeish($treeish);
-
- my $tmpindex = $GIT_SVN_INDEX.'.assert-tmp';
- if (-e $tmpindex) {
- unlink $tmpindex or croak $!;
- }
- my $old_index = set_index($tmpindex);
- index_changes(1);
- chomp(my $tree = `git-write-tree`);
- restore_index($old_index);
- if ($tree ne $expected) {
- croak "Tree mismatch, Got: $tree, Expected: $expected\n";
- }
- unlink $tmpindex;
-}
-
-sub parse_diff_tree {
- my $diff_fh = shift;
- local $/ = "\0";
- my $state = 'meta';
- my @mods;
- while (<$diff_fh>) {
- chomp $_; # this gets rid of the trailing "\0"
- if ($state eq 'meta' && /^:(\d{6})\s(\d{6})\s
- $sha1\s($sha1)\s([MTCRAD])\d*$/xo) {
- push @mods, { mode_a => $1, mode_b => $2,
- sha1_b => $3, chg => $4 };
- if ($4 =~ /^(?:C|R)$/) {
- $state = 'file_a';
- } else {
- $state = 'file_b';
- }
- } elsif ($state eq 'file_a') {
- my $x = $mods[$#mods] or croak "Empty array\n";
- if ($x->{chg} !~ /^(?:C|R)$/) {
- croak "Error parsing $_, $x->{chg}\n";
- }
- $x->{file_a} = $_;
- $state = 'file_b';
- } elsif ($state eq 'file_b') {
- my $x = $mods[$#mods] or croak "Empty array\n";
- if (exists $x->{file_a} && $x->{chg} !~ /^(?:C|R)$/) {
- croak "Error parsing $_, $x->{chg}\n";
- }
- if (!exists $x->{file_a} && $x->{chg} =~ /^(?:C|R)$/) {
- croak "Error parsing $_, $x->{chg}\n";
- }
- $x->{file_b} = $_;
- $state = 'meta';
- } else {
- croak "Error parsing $_\n";
- }
- }
- close $diff_fh or croak $?;
-
- return \@mods;
-}
-
-sub svn_check_prop_executable {
- my $m = shift;
- return if -l $m->{file_b};
- if ($m->{mode_b} =~ /755$/) {
- chmod((0755 &~ umask),$m->{file_b}) or croak $!;
- if ($m->{mode_a} !~ /755$/) {
- sys(qw(svn propset svn:executable 1), $m->{file_b});
- }
- -x $m->{file_b} or croak "$m->{file_b} is not executable!\n";
- } elsif ($m->{mode_b} !~ /755$/ && $m->{mode_a} =~ /755$/) {
- sys(qw(svn propdel svn:executable), $m->{file_b});
- chmod((0644 &~ umask),$m->{file_b}) or croak $!;
- -x $m->{file_b} and croak "$m->{file_b} is executable!\n";
- }
-}
-
-sub svn_ensure_parent_path {
- my $dir_b = dirname(shift);
- svn_ensure_parent_path($dir_b) if ($dir_b ne File::Spec->curdir);
- mkpath([$dir_b]) unless (-d $dir_b);
- sys(qw(svn add -N), $dir_b) unless (-d "$dir_b/.svn");
-}
-
-sub precommit_check {
- my $mods = shift;
- my (%rm_file, %rmdir_check, %added_check);
-
- my %o = ( D => 0, R => 1, C => 2, A => 3, M => 3, T => 3 );
- foreach my $m (sort { $o{$a->{chg}} <=> $o{$b->{chg}} } @$mods) {
- if ($m->{chg} eq 'R') {
- if (-d $m->{file_b}) {
- err_dir_to_file("$m->{file_a} => $m->{file_b}");
- }
- # dir/$file => dir/file/$file
- my $dirname = dirname($m->{file_b});
- while ($dirname ne File::Spec->curdir) {
- if ($dirname ne $m->{file_a}) {
- $dirname = dirname($dirname);
- next;
- }
- err_file_to_dir("$m->{file_a} => $m->{file_b}");
- }
- # baz/zzz => baz (baz is a file)
- $dirname = dirname($m->{file_a});
- while ($dirname ne File::Spec->curdir) {
- if ($dirname ne $m->{file_b}) {
- $dirname = dirname($dirname);
- next;
- }
- err_dir_to_file("$m->{file_a} => $m->{file_b}");
- }
- }
- if ($m->{chg} =~ /^(D|R)$/) {
- my $t = $1 eq 'D' ? 'file_b' : 'file_a';
- $rm_file{ $m->{$t} } = 1;
- my $dirname = dirname( $m->{$t} );
- my $basename = basename( $m->{$t} );
- $rmdir_check{$dirname}->{$basename} = 1;
- } elsif ($m->{chg} =~ /^(?:A|C)$/) {
- if (-d $m->{file_b}) {
- err_dir_to_file($m->{file_b});
- }
- my $dirname = dirname( $m->{file_b} );
- my $basename = basename( $m->{file_b} );
- $added_check{$dirname}->{$basename} = 1;
- while ($dirname ne File::Spec->curdir) {
- if ($rm_file{$dirname}) {
- err_file_to_dir($m->{file_b});
- }
- $dirname = dirname $dirname;
- }
- }
- }
- return (\%rmdir_check, \%added_check);
-
- sub err_dir_to_file {
- my $file = shift;
- print STDERR "Node change from directory to file ",
- "is not supported by Subversion: ",$file,"\n";
- exit 1;
- }
- sub err_file_to_dir {
- my $file = shift;
- print STDERR "Node change from file to directory ",
- "is not supported by Subversion: ",$file,"\n";
- exit 1;
- }
-}
-
-
-sub get_diff {
- my ($from, $treeish) = @_;
- assert_tree($from);
- print "diff-tree $from $treeish\n";
- my $pid = open my $diff_fh, '-|';
- defined $pid or croak $!;
- if ($pid == 0) {
- my @diff_tree = qw(git-diff-tree -z -r);
- if ($_cp_similarity) {
- push @diff_tree, "-C$_cp_similarity";
- } else {
- push @diff_tree, '-C';
- }
- push @diff_tree, '--find-copies-harder' if $_find_copies_harder;
- push @diff_tree, "-l$_l" if defined $_l;
- exec(@diff_tree, $from, $treeish) or croak $!;
- }
- return parse_diff_tree($diff_fh);
-}
-
-sub svn_checkout_tree {
- my ($from, $treeish) = @_;
- my $mods = get_diff($from->{commit}, $treeish);
- return $mods unless (scalar @$mods);
- my ($rm, $add) = precommit_check($mods);
-
- my %o = ( D => 1, R => 0, C => -1, A => 3, M => 3, T => 3 );
- foreach my $m (sort { $o{$a->{chg}} <=> $o{$b->{chg}} } @$mods) {
- if ($m->{chg} eq 'C') {
- svn_ensure_parent_path( $m->{file_b} );
- sys(qw(svn cp), $m->{file_a}, $m->{file_b});
- apply_mod_line_blob($m);
- svn_check_prop_executable($m);
- } elsif ($m->{chg} eq 'D') {
- sys(qw(svn rm --force), $m->{file_b});
- } elsif ($m->{chg} eq 'R') {
- svn_ensure_parent_path( $m->{file_b} );
- sys(qw(svn mv --force), $m->{file_a}, $m->{file_b});
- apply_mod_line_blob($m);
- svn_check_prop_executable($m);
- } elsif ($m->{chg} eq 'M') {
- apply_mod_line_blob($m);
- svn_check_prop_executable($m);
- } elsif ($m->{chg} eq 'T') {
- sys(qw(svn rm --force),$m->{file_b});
- apply_mod_line_blob($m);
- sys(qw(svn add), $m->{file_b});
- svn_check_prop_executable($m);
- } elsif ($m->{chg} eq 'A') {
- svn_ensure_parent_path( $m->{file_b} );
- apply_mod_line_blob($m);
- sys(qw(svn add), $m->{file_b});
- svn_check_prop_executable($m);
- } else {
- croak "Invalid chg: $m->{chg}\n";
- }
- }
-
- assert_tree($treeish);
- if ($_rmdir) { # remove empty directories
- handle_rmdir($rm, $add);
- }
- assert_tree($treeish);
- return $mods;
-}
-
-sub libsvn_checkout_tree {
- my ($from, $treeish, $ed) = @_;
- my $mods = get_diff($from, $treeish);
- return $mods unless (scalar @$mods);
- my %o = ( D => 1, R => 0, C => -1, A => 3, M => 3, T => 3 );
- foreach my $m (sort { $o{$a->{chg}} <=> $o{$b->{chg}} } @$mods) {
- my $f = $m->{chg};
- if (defined $o{$f}) {
- $ed->$f($m);
- } else {
- croak "Invalid change type: $f\n";
- }
- }
- $ed->rmdirs if $_rmdir;
- return $mods;
-}
-
-# svn ls doesn't work with respect to the current working tree, but what's
-# in the repository. There's not even an option for it... *sigh*
-# (added files don't show up and removed files remain in the ls listing)
-sub svn_ls_current {
- my ($dir, $rm, $add) = @_;
- chomp(my @ls = safe_qx('svn','ls',$dir));
- my @ret = ();
- foreach (@ls) {
- s#/$##; # trailing slashes are evil
- push @ret, $_ unless $rm->{$dir}->{$_};
- }
- if (exists $add->{$dir}) {
- push @ret, keys %{$add->{$dir}};
- }
- return \@ret;
-}
-
-sub handle_rmdir {
- my ($rm, $add) = @_;
-
- foreach my $dir (sort {length $b <=> length $a} keys %$rm) {
- my $ls = svn_ls_current($dir, $rm, $add);
- next if (scalar @$ls);
- sys(qw(svn rm --force),$dir);
-
- my $dn = dirname $dir;
- $rm->{ $dn }->{ basename $dir } = 1;
- $ls = svn_ls_current($dn, $rm, $add);
- while (scalar @$ls == 0 && $dn ne File::Spec->curdir) {
- sys(qw(svn rm --force),$dn);
- $dir = basename $dn;
- $dn = dirname $dn;
- $rm->{ $dn }->{ $dir } = 1;
- $ls = svn_ls_current($dn, $rm, $add);
- }
- }
-}
-
-sub get_commit_message {
- my ($commit, $commit_msg) = (@_);
- my %log_msg = ( msg => '' );
- open my $msg, '>', $commit_msg or croak $!;
-
- print "commit: $commit\n";
- chomp(my $type = `git-cat-file -t $commit`);
- if ($type eq 'commit') {
- my $pid = open my $msg_fh, '-|';
- defined $pid or croak $!;
-
- if ($pid == 0) {
- exec(qw(git-cat-file commit), $commit) or croak $!;
- }
- my $in_msg = 0;
- while (<$msg_fh>) {
- if (!$in_msg) {
- $in_msg = 1 if (/^\s*$/);
- } elsif (/^git-svn-id: /) {
- # skip this, we regenerate the correct one
- # on re-fetch anyways
- } else {
- print $msg $_ or croak $!;
- }
- }
- close $msg_fh or croak $?;
- }
- close $msg or croak $!;
-
- if ($_edit || ($type eq 'tree')) {
- my $editor = $ENV{VISUAL} || $ENV{EDITOR} || 'vi';
- system($editor, $commit_msg);
- }
-
- # file_to_s removes all trailing newlines, so just use chomp() here:
- open $msg, '<', $commit_msg or croak $!;
- { local $/; chomp($log_msg{msg} = <$msg>); }
- close $msg or croak $!;
-
- return \%log_msg;
-}
-
-sub svn_commit_tree {
- my ($last, $commit) = @_;
- my $commit_msg = "$GIT_SVN_DIR/.svn-commit.tmp.$$";
- my $log_msg = get_commit_message($commit, $commit_msg);
- my ($oneline) = ($log_msg->{msg} =~ /([^\n\r]+)/);
- print "Committing $commit: $oneline\n";
-
- if (defined $LC_ALL) {
- $ENV{LC_ALL} = $LC_ALL;
- } else {
- delete $ENV{LC_ALL};
- }
- my @ci_output = safe_qx(qw(svn commit -F),$commit_msg);
- $ENV{LC_ALL} = 'C';
- unlink $commit_msg;
- my ($committed) = ($ci_output[$#ci_output] =~ /(\d+)/);
- if (!defined $committed) {
- my $out = join("\n",@ci_output);
- print STDERR "W: Trouble parsing \`svn commit' output:\n\n",
- $out, "\n\nAssuming English locale...";
- ($committed) = ($out =~ /^Committed revision \d+\./sm);
- defined $committed or die " FAILED!\n",
- "Commit output failed to parse committed revision!\n",
- print STDERR " OK\n";
- }
-
- my @svn_up = qw(svn up);
- push @svn_up, '--ignore-externals' unless $_no_ignore_ext;
- if ($_optimize_commits && ($committed == ($last->{revision} + 1))) {
- push @svn_up, "-r$committed";
- sys(@svn_up);
- my $info = svn_info('.');
- my $date = $info->{'Last Changed Date'} or die "Missing date\n";
- if ($info->{'Last Changed Rev'} != $committed) {
- croak "$info->{'Last Changed Rev'} != $committed\n"
- }
- my ($Y,$m,$d,$H,$M,$S,$tz) = ($date =~
- /(\d{4})\-(\d\d)\-(\d\d)\s
- (\d\d)\:(\d\d)\:(\d\d)\s([\-\+]\d+)/x)
- or croak "Failed to parse date: $date\n";
- $log_msg->{date} = "$tz $Y-$m-$d $H:$M:$S";
- $log_msg->{author} = $info->{'Last Changed Author'};
- $log_msg->{revision} = $committed;
- $log_msg->{msg} .= "\n";
- $log_msg->{parents} = [ $last->{commit} ];
- $log_msg->{commit} = git_commit($log_msg, $commit);
- return $log_msg;
- }
- # resync immediately
- push @svn_up, "-r$last->{revision}";
- sys(@svn_up);
- return fetch("$committed=$commit");
-}
-
-sub rev_list_raw {
- my (@args) = @_;
- my $pid = open my $fh, '-|';
- defined $pid or croak $!;
- if (!$pid) {
- exec(qw/git-rev-list --pretty=raw/, @args) or croak $!;
- }
- return { fh => $fh, t => { } };
-}
-
-sub next_rev_list_entry {
- my $rl = shift;
- my $fh = $rl->{fh};
- my $x = $rl->{t};
- while (<$fh>) {
- if (/^commit ($sha1)$/o) {
- if ($x->{c}) {
- $rl->{t} = { c => $1 };
- return $x;
- } else {
- $x->{c} = $1;
- }
- } elsif (/^parent ($sha1)$/o) {
- $x->{p}->{$1} = 1;
- } elsif (s/^ //) {
- $x->{m} ||= '';
- $x->{m} .= $_;
- }
- }
- return ($x != $rl->{t}) ? $x : undef;
-}
-
-# read the entire log into a temporary file (which is removed ASAP)
-# and store the file handle + parser state
-sub svn_log_raw {
- my (@log_args) = @_;
- my $log_fh = IO::File->new_tmpfile or croak $!;
- my $pid = fork;
- defined $pid or croak $!;
- if (!$pid) {
- open STDOUT, '>&', $log_fh or croak $!;
- exec (qw(svn log), @log_args) or croak $!
- }
- waitpid $pid, 0;
- croak $? if $?;
- seek $log_fh, 0, 0 or croak $!;
- return { state => 'sep', fh => $log_fh };
-}
-
-sub next_log_entry {
- my $log = shift; # retval of svn_log_raw()
- my $ret = undef;
- my $fh = $log->{fh};
-
- while (<$fh>) {
- chomp;
- if (/^\-{72}$/) {
- if ($log->{state} eq 'msg') {
- if ($ret->{lines}) {
- $ret->{msg} .= $_."\n";
- unless(--$ret->{lines}) {
- $log->{state} = 'sep';
- }
- } else {
- croak "Log parse error at: $_\n",
- $ret->{revision},
- "\n";
- }
- next;
- }
- if ($log->{state} ne 'sep') {
- croak "Log parse error at: $_\n",
- "state: $log->{state}\n",
- $ret->{revision},
- "\n";
- }
- $log->{state} = 'rev';
-
- # if we have an empty log message, put something there:
- if ($ret) {
- $ret->{msg} ||= "\n";
- delete $ret->{lines};
- return $ret;
- }
- next;
- }
- if ($log->{state} eq 'rev' && s/^r(\d+)\s*\|\s*//) {
- my $rev = $1;
- my ($author, $date, $lines) = split(/\s*\|\s*/, $_, 3);
- ($lines) = ($lines =~ /(\d+)/);
- my ($Y,$m,$d,$H,$M,$S,$tz) = ($date =~
- /(\d{4})\-(\d\d)\-(\d\d)\s
- (\d\d)\:(\d\d)\:(\d\d)\s([\-\+]\d+)/x)
- or croak "Failed to parse date: $date\n";
- $ret = { revision => $rev,
- date => "$tz $Y-$m-$d $H:$M:$S",
- author => $author,
- lines => $lines,
- msg => '' };
- if (defined $_authors && ! defined $users{$author}) {
- die "Author: $author not defined in ",
- "$_authors file\n";
- }
- $log->{state} = 'msg_start';
- next;
- }
- # skip the first blank line of the message:
- if ($log->{state} eq 'msg_start' && /^$/) {
- $log->{state} = 'msg';
- } elsif ($log->{state} eq 'msg') {
- if ($ret->{lines}) {
- $ret->{msg} .= $_."\n";
- unless (--$ret->{lines}) {
- $log->{state} = 'sep';
- }
- } else {
- croak "Log parse error at: $_\n",
- $ret->{revision},"\n";
- }
- }
- }
- return $ret;
-}
-
-sub svn_info {
- my $url = shift || $SVN_URL;
-
- my $pid = open my $info_fh, '-|';
- defined $pid or croak $!;
-
- if ($pid == 0) {
- exec(qw(svn info),$url) or croak $!;
- }
-
- my $ret = {};
- # only single-lines seem to exist in svn info output
- while (<$info_fh>) {
- chomp $_;
- if (m#^([^:]+)\s*:\s*(\S.*)$#) {
- $ret->{$1} = $2;
- push @{$ret->{-order}}, $1;
- }
- }
- close $info_fh or croak $?;
- return $ret;
-}
-
-sub sys { system(@_) == 0 or croak $? }
-
-sub eol_cp {
- my ($from, $to) = @_;
- my $es = svn_propget_base('svn:eol-style', $to);
- open my $rfd, '<', $from or croak $!;
- binmode $rfd or croak $!;
- open my $wfd, '>', $to or croak $!;
- binmode $wfd or croak $!;
- eol_cp_fd($rfd, $wfd, $es);
- close $rfd or croak $!;
- close $wfd or croak $!;
-}
-
-sub eol_cp_fd {
- my ($rfd, $wfd, $es) = @_;
- my $eol = defined $es ? $EOL{$es} : undef;
- my $buf;
- use bytes;
- while (1) {
- my ($r, $w, $t);
- defined($r = sysread($rfd, $buf, 4096)) or croak $!;
- return unless $r;
- if ($eol) {
- if ($buf =~ /\015$/) {
- my $c;
- defined($r = sysread($rfd,$c,1)) or croak $!;
- $buf .= $c if $r > 0;
- }
- $buf =~ s/(?:\015\012|\015|\012)/$eol/gs;
- $r = length($buf);
- }
- for ($w = 0; $w < $r; $w += $t) {
- $t = syswrite($wfd, $buf, $r - $w, $w) or croak $!;
- }
- }
- no bytes;
-}
-
-sub do_update_index {
- my ($z_cmd, $cmd, $no_text_base) = @_;
-
- my $z = open my $p, '-|';
- defined $z or croak $!;
- unless ($z) { exec @$z_cmd or croak $! }
-
- my $pid = open my $ui, '|-';
- defined $pid or croak $!;
- unless ($pid) {
- exec('git-update-index',"--$cmd",'-z','--stdin') or croak $!;
- }
- local $/ = "\0";
- while (my $x = <$p>) {
- chomp $x;
- if (!$no_text_base && lstat $x && ! -l _ &&
- svn_propget_base('svn:keywords', $x)) {
- my $mode = -x _ ? 0755 : 0644;
- my ($v,$d,$f) = File::Spec->splitpath($x);
- my $tb = File::Spec->catfile($d, '.svn', 'tmp',
- 'text-base',"$f.svn-base");
- $tb =~ s#^/##;
- unless (-f $tb) {
- $tb = File::Spec->catfile($d, '.svn',
- 'text-base',"$f.svn-base");
- $tb =~ s#^/##;
- }
- unlink $x or croak $!;
- eol_cp($tb, $x);
- chmod(($mode &~ umask), $x) or croak $!;
- }
- print $ui $x,"\0";
- }
- close $ui or croak $?;
-}
-
-sub index_changes {
- return if $_use_lib;
-
- if (!-f "$GIT_SVN_DIR/info/exclude") {
- open my $fd, '>>', "$GIT_SVN_DIR/info/exclude" or croak $!;
- print $fd '.svn',"\n";
- close $fd or croak $!;
- }
- my $no_text_base = shift;
- do_update_index([qw/git-diff-files --name-only -z/],
- 'remove',
- $no_text_base);
- do_update_index([qw/git-ls-files -z --others/,
- "--exclude-from=$GIT_SVN_DIR/info/exclude"],
- 'add',
- $no_text_base);
-}
-
-sub s_to_file {
- my ($str, $file, $mode) = @_;
- open my $fd,'>',$file or croak $!;
- print $fd $str,"\n" or croak $!;
- close $fd or croak $!;
- chmod ($mode &~ umask, $file) if (defined $mode);
-}
-
-sub file_to_s {
- my $file = shift;
- open my $fd,'<',$file or croak "$!: file: $file\n";
- local $/;
- my $ret = <$fd>;
- close $fd or croak $!;
- $ret =~ s/\s*$//s;
- return $ret;
-}
-
-sub assert_revision_unknown {
- my $r = shift;
- if (my $c = revdb_get($REVDB, $r)) {
- croak "$r = $c already exists! Why are we refetching it?";
- }
-}
-
-sub trees_eq {
- my ($x, $y) = @_;
- my @x = safe_qx('git-cat-file','commit',$x);
- my @y = safe_qx('git-cat-file','commit',$y);
- if (($y[0] ne $x[0]) || $x[0] !~ /^tree $sha1\n$/
- || $y[0] !~ /^tree $sha1\n$/) {
- print STDERR "Trees not equal: $y[0] != $x[0]\n";
- return 0
- }
- return 1;
-}
-
-sub git_commit {
- my ($log_msg, @parents) = @_;
- assert_revision_unknown($log_msg->{revision});
- map_tree_joins() if (@_branch_from && !%tree_map);
-
- my (@tmp_parents, @exec_parents, %seen_parent);
- if (my $lparents = $log_msg->{parents}) {
- @tmp_parents = @$lparents
- }
- # commit parents can be conditionally bound to a particular
- # svn revision via: "svn_revno=commit_sha1", filter them out here:
- foreach my $p (@parents) {
- next unless defined $p;
- if ($p =~ /^(\d+)=($sha1_short)$/o) {
- if ($1 == $log_msg->{revision}) {
- push @tmp_parents, $2;
- }
- } else {
- push @tmp_parents, $p if $p =~ /$sha1_short/o;
- }
- }
- my $tree = $log_msg->{tree};
- if (!defined $tree) {
- my $index = set_index($GIT_SVN_INDEX);
- index_changes();
- chomp($tree = `git-write-tree`);
- croak $? if $?;
- restore_index($index);
- }
- if (exists $tree_map{$tree}) {
- push @tmp_parents, @{$tree_map{$tree}};
- }
- foreach (@tmp_parents) {
- next if $seen_parent{$_};
- $seen_parent{$_} = 1;
- push @exec_parents, $_;
- # MAXPARENT is defined to 16 in commit-tree.c:
- last if @exec_parents > 16;
- }
-
- defined(my $pid = open my $out_fh, '-|') or croak $!;
- if ($pid == 0) {
- my $msg_fh = IO::File->new_tmpfile or croak $!;
- print $msg_fh $log_msg->{msg}, "\ngit-svn-id: ",
- "$SVN_URL\@$log_msg->{revision}",
- " $SVN_UUID\n" or croak $!;
- $msg_fh->flush == 0 or croak $!;
- seek $msg_fh, 0, 0 or croak $!;
- set_commit_env($log_msg);
- my @exec = ('git-commit-tree',$tree);
- push @exec, '-p', $_ foreach @exec_parents;
- open STDIN, '<&', $msg_fh or croak $!;
- exec @exec or croak $!;
- }
- chomp(my $commit = do { local $/; <$out_fh> });
- close $out_fh or croak $?;
- if ($commit !~ /^$sha1$/o) {
- croak "Failed to commit, invalid sha1: $commit\n";
- }
- my @update_ref = ('git-update-ref',"refs/remotes/$GIT_SVN",$commit);
- if (my $primary_parent = shift @exec_parents) {
- quiet_run(qw/git-rev-parse --verify/,"refs/remotes/$GIT_SVN^0");
- push @update_ref, $primary_parent unless $?;
- }
- sys(@update_ref);
- revdb_set($REVDB, $log_msg->{revision}, $commit);
-
- # this output is read via pipe, do not change:
- print "r$log_msg->{revision} = $commit\n";
- check_repack();
- return $commit;
-}
-
-sub check_repack {
- if ($_repack && (--$_repack_nr == 0)) {
- $_repack_nr = $_repack;
- sys("git repack $_repack_flags");
- }
-}
-
-sub set_commit_env {
- my ($log_msg) = @_;
- my $author = $log_msg->{author};
- if (!defined $author || length $author == 0) {
- $author = '(no author)';
- }
- my ($name,$email) = defined $users{$author} ? @{$users{$author}}
- : ($author,"$author\@$SVN_UUID");
- $ENV{GIT_AUTHOR_NAME} = $ENV{GIT_COMMITTER_NAME} = $name;
- $ENV{GIT_AUTHOR_EMAIL} = $ENV{GIT_COMMITTER_EMAIL} = $email;
- $ENV{GIT_AUTHOR_DATE} = $ENV{GIT_COMMITTER_DATE} = $log_msg->{date};
-}
-
-sub apply_mod_line_blob {
- my $m = shift;
- if ($m->{mode_b} =~ /^120/) {
- blob_to_symlink($m->{sha1_b}, $m->{file_b});
- } else {
- blob_to_file($m->{sha1_b}, $m->{file_b});
- }
-}
-
-sub blob_to_symlink {
- my ($blob, $link) = @_;
- defined $link or croak "\$link not defined!\n";
- croak "Not a sha1: $blob\n" unless $blob =~ /^$sha1$/o;
- if (-l $link || -f _) {
- unlink $link or croak $!;
- }
-
- my $dest = `git-cat-file blob $blob`; # no newline, so no chomp
- symlink $dest, $link or croak $!;
-}
-
-sub blob_to_file {
- my ($blob, $file) = @_;
- defined $file or croak "\$file not defined!\n";
- croak "Not a sha1: $blob\n" unless $blob =~ /^$sha1$/o;
- if (-l $file || -f _) {
- unlink $file or croak $!;
- }
-
- open my $blob_fh, '>', $file or croak "$!: $file\n";
- my $pid = fork;
- defined $pid or croak $!;
-
- if ($pid == 0) {
- open STDOUT, '>&', $blob_fh or croak $!;
- exec('git-cat-file','blob',$blob) or croak $!;
- }
- waitpid $pid, 0;
- croak $? if $?;
-
- close $blob_fh or croak $!;
-}
-
-sub safe_qx {
- my $pid = open my $child, '-|';
- defined $pid or croak $!;
- if ($pid == 0) {
- exec(@_) or croak $!;
- }
- my @ret = (<$child>);
- close $child or croak $?;
- die $? if $?; # just in case close didn't error out
- return wantarray ? @ret : join('',@ret);
-}
-
-sub svn_compat_check {
- my @co_help = safe_qx(qw(svn co -h));
- unless (grep /ignore-externals/,@co_help) {
- print STDERR "W: Installed svn version does not support ",
- "--ignore-externals\n";
- $_no_ignore_ext = 1;
- }
- if (grep /usage: checkout URL\[\@REV\]/,@co_help) {
- $_svn_co_url_revs = 1;
- }
- if (grep /\[TARGET\[\@REV\]\.\.\.\]/, `svn propget -h`) {
- $_svn_pg_peg_revs = 1;
- }
-
- # I really, really hope nobody hits this...
- unless (grep /stop-on-copy/, (safe_qx(qw(svn log -h)))) {
- print STDERR <<'';
-W: The installed svn version does not support the --stop-on-copy flag in
- the log command.
- Lets hope the directory you're tracking is not a branch or tag
- and was never moved within the repository...
-
- $_no_stop_copy = 1;
- }
-}
-
-# *sigh*, new versions of svn won't honor -r<rev> without URL@<rev>,
-# (and they won't honor URL@<rev> without -r<rev>, too!)
-sub svn_cmd_checkout {
- my ($url, $rev, $dir) = @_;
- my @cmd = ('svn','co', "-r$rev");
- push @cmd, '--ignore-externals' unless $_no_ignore_ext;
- $url .= "\@$rev" if $_svn_co_url_revs;
- sys(@cmd, $url, $dir);
-}
-
-sub check_upgrade_needed {
- if (!-r $REVDB) {
- -d $GIT_SVN_DIR or mkpath([$GIT_SVN_DIR]);
- open my $fh, '>>',$REVDB or croak $!;
- close $fh;
- }
- my $old = eval {
- my $pid = open my $child, '-|';
- defined $pid or croak $!;
- if ($pid == 0) {
- close STDERR;
- exec('git-rev-parse',"$GIT_SVN-HEAD") or croak $!;
- }
- my @ret = (<$child>);
- close $child or croak $?;
- die $? if $?; # just in case close didn't error out
- return wantarray ? @ret : join('',@ret);
- };
- return unless $old;
- my $head = eval { safe_qx('git-rev-parse',"refs/remotes/$GIT_SVN") };
- if ($@ || !$head) {
- print STDERR "Please run: $0 rebuild --upgrade\n";
- exit 1;
- }
-}
-
-# fills %tree_map with a reverse mapping of trees to commits. Useful
-# for finding parents to commit on.
-sub map_tree_joins {
- my %seen;
- foreach my $br (@_branch_from) {
- my $pid = open my $pipe, '-|';
- defined $pid or croak $!;
- if ($pid == 0) {
- exec(qw(git-rev-list --topo-order --pretty=raw), $br)
- or croak $!;
- }
- while (<$pipe>) {
- if (/^commit ($sha1)$/o) {
- my $commit = $1;
-
- # if we've seen a commit,
- # we've seen its parents
- last if $seen{$commit};
- my ($tree) = (<$pipe> =~ /^tree ($sha1)$/o);
- unless (defined $tree) {
- die "Failed to parse commit $commit\n";
- }
- push @{$tree_map{$tree}}, $commit;
- $seen{$commit} = 1;
- }
- }
- close $pipe; # we could be breaking the pipe early
- }
-}
-
-sub load_all_refs {
- if (@_branch_from) {
- print STDERR '--branch|-b parameters are ignored when ',
- "--branch-all-refs|-B is passed\n";
- }
-
- # don't worry about rev-list on non-commit objects/tags,
- # it shouldn't blow up if a ref is a blob or tree...
- chomp(@_branch_from = `git-rev-parse --symbolic --all`);
-}
-
-# '<svn username> = real-name <email address>' mapping based on git-svnimport:
-sub load_authors {
- open my $authors, '<', $_authors or die "Can't open $_authors $!\n";
- while (<$authors>) {
- chomp;
- next unless /^(\S+?)\s*=\s*(.+?)\s*<(.+)>\s*$/;
- my ($user, $name, $email) = ($1, $2, $3);
- $users{$user} = [$name, $email];
- }
- close $authors or croak $!;
-}
-
-sub rload_authors {
- open my $authors, '<', $_authors or die "Can't open $_authors $!\n";
- while (<$authors>) {
- chomp;
- next unless /^(\S+?)\s*=\s*(.+?)\s*<(.+)>\s*$/;
- my ($user, $name, $email) = ($1, $2, $3);
- $rusers{"$name <$email>"} = $user;
- }
- close $authors or croak $!;
-}
-
-sub svn_propget_base {
- my ($p, $f) = @_;
- $f .= '@BASE' if $_svn_pg_peg_revs;
- return safe_qx(qw/svn propget/, $p, $f);
-}
-
-sub git_svn_each {
- my $sub = shift;
- foreach (`git-rev-parse --symbolic --all`) {
- next unless s#^refs/remotes/##;
- chomp $_;
- next unless -f "$GIT_DIR/svn/$_/info/url";
- &$sub($_);
- }
-}
-
-sub migrate_revdb {
- git_svn_each(sub {
- my $id = shift;
- defined(my $pid = fork) or croak $!;
- if (!$pid) {
- $GIT_SVN = $ENV{GIT_SVN_ID} = $id;
- init_vars();
- exit 0 if -r $REVDB;
- print "Upgrading svn => git mapping...\n";
- -d $GIT_SVN_DIR or mkpath([$GIT_SVN_DIR]);
- open my $fh, '>>',$REVDB or croak $!;
- close $fh;
- rebuild();
- print "Done upgrading. You may now delete the ",
- "deprecated $GIT_SVN_DIR/revs directory\n";
- exit 0;
- }
- waitpid $pid, 0;
- croak $? if $?;
- });
-}
-
-sub migration_check {
- migrate_revdb() unless (-e $REVDB);
- return if (-d "$GIT_DIR/svn" || !-d $GIT_DIR);
- print "Upgrading repository...\n";
- unless (-d "$GIT_DIR/svn") {
- mkdir "$GIT_DIR/svn" or croak $!;
- }
- print "Data from a previous version of git-svn exists, but\n\t",
- "$GIT_SVN_DIR\n\t(required for this version ",
- "($VERSION) of git-svn) does not.\n";
-
- foreach my $x (`git-rev-parse --symbolic --all`) {
- next unless $x =~ s#^refs/remotes/##;
- chomp $x;
- next unless -f "$GIT_DIR/$x/info/url";
- my $u = eval { file_to_s("$GIT_DIR/$x/info/url") };
- next unless $u;
- my $dn = dirname("$GIT_DIR/svn/$x");
- mkpath([$dn]) unless -d $dn;
- rename "$GIT_DIR/$x", "$GIT_DIR/svn/$x" or croak "$!: $x";
- }
- migrate_revdb() if (-d $GIT_SVN_DIR && !-w $REVDB);
- print "Done upgrading.\n";
-}
-
-sub find_rev_before {
- my ($r, $id, $eq_ok) = @_;
- my $f = "$GIT_DIR/svn/$id/.rev_db";
- return (undef,undef) unless -r $f;
- --$r unless $eq_ok;
- while ($r > 0) {
- if (my $c = revdb_get($f, $r)) {
- return ($r, $c);
- }
- --$r;
- }
- return (undef, undef);
-}
-
-sub init_vars {
- $GIT_SVN ||= $ENV{GIT_SVN_ID} || 'git-svn';
- $GIT_SVN_DIR = "$GIT_DIR/svn/$GIT_SVN";
- $REVDB = "$GIT_SVN_DIR/.rev_db";
- $GIT_SVN_INDEX = "$GIT_SVN_DIR/index";
- $SVN_URL = undef;
- $SVN_WC = "$GIT_SVN_DIR/tree";
-}
-
-# convert GetOpt::Long specs for use by git-repo-config
-sub read_repo_config {
- return unless -d $GIT_DIR;
- my $opts = shift;
- foreach my $o (keys %$opts) {
- my $v = $opts->{$o};
- my ($key) = ($o =~ /^([a-z\-]+)/);
- $key =~ s/-//g;
- my $arg = 'git-repo-config';
- $arg .= ' --int' if ($o =~ /[:=]i$/);
- $arg .= ' --bool' if ($o !~ /[:=][sfi]$/);
- if (ref $v eq 'ARRAY') {
- chomp(my @tmp = `$arg --get-all svn.$key`);
- @$v = @tmp if @tmp;
- } else {
- chomp(my $tmp = `$arg --get svn.$key`);
- if ($tmp && !($arg =~ / --bool / && $tmp eq 'false')) {
- $$v = $tmp;
- }
- }
- }
-}
-
-sub set_default_vals {
- if (defined $_repack) {
- $_repack = 1000 if ($_repack <= 0);
- $_repack_nr = $_repack;
- $_repack_flags ||= '-d';
- }
-}
-
-sub read_grafts {
- my $gr_file = shift;
- my ($grafts, $comments) = ({}, {});
- if (open my $fh, '<', $gr_file) {
- my @tmp;
- while (<$fh>) {
- if (/^($sha1)\s+/) {
- my $c = $1;
- if (@tmp) {
- @{$comments->{$c}} = @tmp;
- @tmp = ();
- }
- foreach my $p (split /\s+/, $_) {
- $grafts->{$c}->{$p} = 1;
- }
- } else {
- push @tmp, $_;
- }
- }
- close $fh or croak $!;
- @{$comments->{'END'}} = @tmp if @tmp;
- }
- return ($grafts, $comments);
-}
-
-sub write_grafts {
- my ($grafts, $comments, $gr_file) = @_;
-
- open my $fh, '>', $gr_file or croak $!;
- foreach my $c (sort keys %$grafts) {
- if ($comments->{$c}) {
- print $fh $_ foreach @{$comments->{$c}};
- }
- my $p = $grafts->{$c};
- delete $p->{$c}; # commits are not self-reproducing...
- my $pid = open my $ch, '-|';
- defined $pid or croak $!;
- if (!$pid) {
- exec(qw/git-cat-file commit/, $c) or croak $!;
- }
- while (<$ch>) {
- if (/^parent ([a-f\d]{40})/) {
- $p->{$1} = 1;
- } else {
- last unless /^\S/i;
- }
- }
- close $ch; # breaking the pipe
- print $fh $c, ' ', join(' ', sort keys %$p),"\n";
- }
- if ($comments->{'END'}) {
- print $fh $_ foreach @{$comments->{'END'}};
- }
- close $fh or croak $!;
-}
-
-sub read_url_paths {
- my $l_map = {};
- git_svn_each(sub { my $x = shift;
- my $url = file_to_s("$GIT_DIR/svn/$x/info/url");
- my ($u, $p) = repo_path_split($url);
- $l_map->{$u}->{$p} = $x;
- });
- return $l_map;
-}
-
-sub extract_metadata {
- my $id = shift;
- my ($url, $rev, $uuid) = ($id =~ /^git-svn-id:\s(\S+?)\@(\d+)
- \s([a-f\d\-]+)$/x);
- if (!$rev || !$uuid || !$url) {
- # some of the original repositories I made had
- # indentifiers like this:
- ($rev, $uuid) = ($id =~/^git-svn-id:\s(\d+)\@([a-f\d\-]+)/);
- }
- return ($url, $rev, $uuid);
-}
-
-sub tz_to_s_offset {
- my ($tz) = @_;
- $tz =~ s/(\d\d)$//;
- return ($1 * 60) + ($tz * 3600);
-}
-
-sub setup_pager { # translated to Perl from pager.c
- return unless (-t *STDOUT);
- my $pager = $ENV{PAGER};
- if (!defined $pager) {
- $pager = 'less';
- } elsif (length $pager == 0 || $pager eq 'cat') {
- return;
- }
- pipe my $rfd, my $wfd or return;
- defined(my $pid = fork) or croak $!;
- if (!$pid) {
- open STDOUT, '>&', $wfd or croak $!;
- return;
- }
- open STDIN, '<&', $rfd or croak $!;
- $ENV{LESS} ||= '-S';
- exec $pager or croak "Can't run pager: $!\n";;
-}
-
-sub get_author_info {
- my ($dest, $author, $t, $tz) = @_;
- $author =~ s/(?:^\s*|\s*$)//g;
- $dest->{a_raw} = $author;
- my $_a;
- if ($_authors) {
- $_a = $rusers{$author} || undef;
- }
- if (!$_a) {
- ($_a) = ($author =~ /<([^>]+)\@[^>]+>$/);
- }
- $dest->{t} = $t;
- $dest->{tz} = $tz;
- $dest->{a} = $_a;
- # Date::Parse isn't in the standard Perl distro :(
- if ($tz =~ s/^\+//) {
- $t += tz_to_s_offset($tz);
- } elsif ($tz =~ s/^\-//) {
- $t -= tz_to_s_offset($tz);
- }
- $dest->{t_utc} = $t;
-}
-
-sub process_commit {
- my ($c, $r_min, $r_max, $defer) = @_;
- if (defined $r_min && defined $r_max) {
- if ($r_min == $c->{r} && $r_min == $r_max) {
- show_commit($c);
- return 0;
- }
- return 1 if $r_min == $r_max;
- if ($r_min < $r_max) {
- # we need to reverse the print order
- return 0 if (defined $_limit && --$_limit < 0);
- push @$defer, $c;
- return 1;
- }
- if ($r_min != $r_max) {
- return 1 if ($r_min < $c->{r});
- return 1 if ($r_max > $c->{r});
- }
- }
- return 0 if (defined $_limit && --$_limit < 0);
- show_commit($c);
- return 1;
-}
-
-sub show_commit {
- my $c = shift;
- if ($_oneline) {
- my $x = "\n";
- if (my $l = $c->{l}) {
- while ($l->[0] =~ /^\s*$/) { shift @$l }
- $x = $l->[0];
- }
- $_l_fmt ||= 'A' . length($c->{r});
- print 'r',pack($_l_fmt, $c->{r}),' | ';
- print "$c->{c} | " if $_show_commit;
- print $x;
- } else {
- show_commit_normal($c);
- }
-}
-
-sub show_commit_normal {
- my ($c) = @_;
- print '-' x72, "\nr$c->{r} | ";
- print "$c->{c} | " if $_show_commit;
- print "$c->{a} | ", strftime("%Y-%m-%d %H:%M:%S %z (%a, %d %b %Y)",
- localtime($c->{t_utc})), ' | ';
- my $nr_line = 0;
-
- if (my $l = $c->{l}) {
- while ($l->[$#$l] eq "\n" && $l->[($#$l - 1)] eq "\n") {
- pop @$l;
- }
- $nr_line = scalar @$l;
- if (!$nr_line) {
- print "1 line\n\n\n";
- } else {
- if ($nr_line == 1) {
- $nr_line = '1 line';
- } else {
- $nr_line .= ' lines';
- }
- print $nr_line, "\n\n";
- print $_ foreach @$l;
- }
- } else {
- print "1 line\n\n";
-
- }
- foreach my $x (qw/raw diff/) {
- if ($c->{$x}) {
- print "\n";
- print $_ foreach @{$c->{$x}}
- }
- }
-}
-
-sub libsvn_load {
- return unless $_use_lib;
- $_use_lib = eval {
- require SVN::Core;
- if ($SVN::Core::VERSION lt '1.2.1') {
- die "Need SVN::Core 1.2.1 or better ",
- "(got $SVN::Core::VERSION) ",
- "Falling back to command-line svn\n";
- }
- require SVN::Ra;
- require SVN::Delta;
- push @SVN::Git::Editor::ISA, 'SVN::Delta::Editor';
- my $kill_stupid_warnings = $SVN::Node::none.$SVN::Node::file.
- $SVN::Node::dir.$SVN::Node::unknown.
- $SVN::Node::none.$SVN::Node::file.
- $SVN::Node::dir.$SVN::Node::unknown;
- 1;
- };
-}
-
-sub libsvn_connect {
- my ($url) = @_;
- my $auth = SVN::Core::auth_open([SVN::Client::get_simple_provider(),
- SVN::Client::get_ssl_server_trust_file_provider(),
- SVN::Client::get_username_provider()]);
- my $s = eval { SVN::Ra->new(url => $url, auth => $auth) };
- return $s;
-}
-
-sub libsvn_get_file {
- my ($gui, $f, $rev) = @_;
- my $p = $f;
- return unless ($p =~ s#^\Q$SVN_PATH\E/?##);
-
- my ($hash, $pid, $in, $out);
- my $pool = SVN::Pool->new;
- defined($pid = open3($in, $out, '>&STDERR',
- qw/git-hash-object -w --stdin/)) or croak $!;
- my ($r, $props) = $SVN->get_file($f, $rev, $in, $pool);
- $in->flush == 0 or croak $!;
- close $in or croak $!;
- $pool->clear;
- chomp($hash = do { local $/; <$out> });
- close $out or croak $!;
- waitpid $pid, 0;
- $hash =~ /^$sha1$/o or die "not a sha1: $hash\n";
-
- my $mode = exists $props->{'svn:executable'} ? '100755' : '100644';
- if (exists $props->{'svn:special'}) {
- $mode = '120000';
- my $link = `git-cat-file blob $hash`;
- $link =~ s/^link // or die "svn:special file with contents: <",
- $link, "> is not understood\n";
- defined($pid = open3($in, $out, '>&STDERR',
- qw/git-hash-object -w --stdin/)) or croak $!;
- print $in $link;
- $in->flush == 0 or croak $!;
- close $in or croak $!;
- chomp($hash = do { local $/; <$out> });
- close $out or croak $!;
- waitpid $pid, 0;
- $hash =~ /^$sha1$/o or die "not a sha1: $hash\n";
- }
- print $gui $mode,' ',$hash,"\t",$p,"\0" or croak $!;
-}
-
-sub libsvn_log_entry {
- my ($rev, $author, $date, $msg, $parents) = @_;
- my ($Y,$m,$d,$H,$M,$S) = ($date =~ /^(\d{4})\-(\d\d)\-(\d\d)T
- (\d\d)\:(\d\d)\:(\d\d).\d+Z$/x)
- or die "Unable to parse date: $date\n";
- if (defined $_authors && ! defined $users{$author}) {
- die "Author: $author not defined in $_authors file\n";
- }
- return { revision => $rev, date => "+0000 $Y-$m-$d $H:$M:$S",
- author => $author, msg => $msg."\n", parents => $parents || [] }
-}
-
-sub process_rm {
- my ($gui, $last_commit, $f) = @_;
- $f =~ s#^\Q$SVN_PATH\E/?## or return;
- # remove entire directories.
- if (safe_qx('git-ls-tree',$last_commit,'--',$f) =~ /^040000 tree/) {
- defined(my $pid = open my $ls, '-|') or croak $!;
- if (!$pid) {
- exec(qw/git-ls-tree -r --name-only -z/,
- $last_commit,'--',$f) or croak $!;
- }
- local $/ = "\0";
- while (<$ls>) {
- print $gui '0 ',0 x 40,"\t",$_ or croak $!;
- }
- close $ls or croak $?;
- } else {
- print $gui '0 ',0 x 40,"\t",$f,"\0" or croak $!;
- }
-}
-
-sub libsvn_fetch {
- my ($last_commit, $paths, $rev, $author, $date, $msg) = @_;
- open my $gui, '| git-update-index -z --index-info' or croak $!;
- my @amr;
- foreach my $f (keys %$paths) {
- my $m = $paths->{$f}->action();
- $f =~ s#^/+##;
- if ($m =~ /^[DR]$/) {
- process_rm($gui, $last_commit, $f);
- next if $m eq 'D';
- # 'R' can be file replacements, too, right?
- }
- my $pool = SVN::Pool->new;
- my $t = $SVN->check_path($f, $rev, $pool);
- if ($t == $SVN::Node::file) {
- if ($m =~ /^[AMR]$/) {
- push @amr, $f;
- } else {
- die "Unrecognized action: $m, ($f r$rev)\n";
- }
- }
- $pool->clear;
- }
- libsvn_get_file($gui, $_, $rev) foreach (@amr);
- close $gui or croak $?;
- return libsvn_log_entry($rev, $author, $date, $msg, [$last_commit]);
-}
-
-sub svn_grab_base_rev {
- defined(my $pid = open my $fh, '-|') or croak $!;
- if (!$pid) {
- open my $null, '>', '/dev/null' or croak $!;
- open STDERR, '>&', $null or croak $!;
- exec qw/git-rev-parse --verify/,"refs/remotes/$GIT_SVN^0"
- or croak $!;
- }
- chomp(my $c = do { local $/; <$fh> });
- close $fh;
- if (defined $c && length $c) {
- my ($url, $rev, $uuid) = extract_metadata((grep(/^git-svn-id: /,
- safe_qx(qw/git-cat-file commit/, $c)))[-1]);
- return ($rev, $c);
- }
- return (undef, undef);
-}
-
-sub libsvn_parse_revision {
- my $base = shift;
- my $head = $SVN->get_latest_revnum();
- if (!defined $_revision || $_revision eq 'BASE:HEAD') {
- return ($base + 1, $head) if (defined $base);
- return (0, $head);
- }
- return ($1, $2) if ($_revision =~ /^(\d+):(\d+)$/);
- return ($_revision, $_revision) if ($_revision =~ /^\d+$/);
- if ($_revision =~ /^BASE:(\d+)$/) {
- return ($base + 1, $1) if (defined $base);
- return (0, $head);
- }
- return ($1, $head) if ($_revision =~ /^(\d+):HEAD$/);
- die "revision argument: $_revision not understood by git-svn\n",
- "Try using the command-line svn client instead\n";
-}
-
-sub libsvn_traverse {
- my ($gui, $pfx, $path, $rev) = @_;
- my $cwd = "$pfx/$path";
- my $pool = SVN::Pool->new;
- $cwd =~ s#^/+##g;
- my ($dirent, $r, $props) = $SVN->get_dir($cwd, $rev, $pool);
- foreach my $d (keys %$dirent) {
- my $t = $dirent->{$d}->kind;
- if ($t == $SVN::Node::dir) {
- libsvn_traverse($gui, $cwd, $d, $rev);
- } elsif ($t == $SVN::Node::file) {
- libsvn_get_file($gui, "$cwd/$d", $rev);
- }
- }
- $pool->clear;
-}
-
-sub libsvn_traverse_ignore {
- my ($fh, $path, $r) = @_;
- $path =~ s#^/+##g;
- my $pool = SVN::Pool->new;
- my ($dirent, undef, $props) = $SVN->get_dir($path, $r, $pool);
- my $p = $path;
- $p =~ s#^\Q$SVN_PATH\E/?##;
- print $fh length $p ? "\n# $p\n" : "\n# /\n";
- if (my $s = $props->{'svn:ignore'}) {
- $s =~ s/[\r\n]+/\n/g;
- chomp $s;
- if (length $p == 0) {
- $s =~ s#\n#\n/$p#g;
- print $fh "/$s\n";
- } else {
- $s =~ s#\n#\n/$p/#g;
- print $fh "/$p/$s\n";
- }
- }
- foreach (sort keys %$dirent) {
- next if $dirent->{$_}->kind != $SVN::Node::dir;
- libsvn_traverse_ignore($fh, "$path/$_", $r);
- }
- $pool->clear;
-}
-
-sub revisions_eq {
- my ($path, $r0, $r1) = @_;
- return 1 if $r0 == $r1;
- my $nr = 0;
- if ($_use_lib) {
- # should be OK to use Pool here (r1 - r0) should be small
- my $pool = SVN::Pool->new;
- $SVN->get_log("/$path", $r0, $r1, 0, 1, 1, sub {$nr++},$pool);
- $pool->clear;
- } else {
- my ($url, undef) = repo_path_split($SVN_URL);
- my $svn_log = svn_log_raw("$url/$path","-r$r0:$r1");
- while (next_log_entry($svn_log)) { $nr++ }
- close $svn_log->{fh};
- }
- return 0 if ($nr > 1);
- return 1;
-}
-
-sub libsvn_find_parent_branch {
- my ($paths, $rev, $author, $date, $msg) = @_;
- my $svn_path = '/'.$SVN_PATH;
-
- # look for a parent from another branch:
- my $i = $paths->{$svn_path} or return;
- my $branch_from = $i->copyfrom_path or return;
- my $r = $i->copyfrom_rev;
- print STDERR "Found possible branch point: ",
- "$branch_from => $svn_path, $r\n";
- $branch_from =~ s#^/##;
- my $l_map = read_url_paths();
- my $url = $SVN->{url};
- defined $l_map->{$url} or return;
- my $id = $l_map->{$url}->{$branch_from} or return;
- my ($r0, $parent) = find_rev_before($r,$id,1);
- return unless (defined $r0 && defined $parent);
- if (revisions_eq($branch_from, $r0, $r)) {
- unlink $GIT_SVN_INDEX;
- print STDERR "Found branch parent: $parent\n";
- sys(qw/git-read-tree/, $parent);
- return libsvn_fetch($parent, $paths, $rev,
- $author, $date, $msg);
- }
- print STDERR "Nope, branch point not imported or unknown\n";
- return undef;
-}
-
-sub libsvn_new_tree {
- if (my $log_entry = libsvn_find_parent_branch(@_)) {
- return $log_entry;
- }
- my ($paths, $rev, $author, $date, $msg) = @_;
- open my $gui, '| git-update-index -z --index-info' or croak $!;
- my $pool = SVN::Pool->new;
- libsvn_traverse($gui, '', $SVN_PATH, $rev, $pool);
- $pool->clear;
- close $gui or croak $?;
- return libsvn_log_entry($rev, $author, $date, $msg);
-}
-
-sub find_graft_path_commit {
- my ($tree_paths, $p1, $r1) = @_;
- foreach my $x (keys %$tree_paths) {
- next unless ($p1 =~ /^\Q$x\E/);
- my $i = $tree_paths->{$x};
- my ($r0, $parent) = find_rev_before($r1,$i,1);
- return $parent if (defined $r0 && $r0 == $r1);
- print STDERR "r$r1 of $i not imported\n";
- next;
- }
- return undef;
-}
-
-sub find_graft_path_parents {
- my ($grafts, $tree_paths, $c, $p0, $r0) = @_;
- foreach my $x (keys %$tree_paths) {
- next unless ($p0 =~ /^\Q$x\E/);
- my $i = $tree_paths->{$x};
- my ($r, $parent) = find_rev_before($r0, $i, 1);
- if (defined $r && defined $parent && revisions_eq($x,$r,$r0)) {
- $grafts->{$c}->{$parent} = 1;
- }
- }
-}
-
-sub libsvn_graft_file_copies {
- my ($grafts, $tree_paths, $path, $paths, $rev) = @_;
- foreach (keys %$paths) {
- my $i = $paths->{$_};
- my ($m, $p0, $r0) = ($i->action, $i->copyfrom_path,
- $i->copyfrom_rev);
- next unless (defined $p0 && defined $r0);
-
- my $p1 = $_;
- $p1 =~ s#^/##;
- $p0 =~ s#^/##;
- my $c = find_graft_path_commit($tree_paths, $p1, $rev);
- next unless $c;
- find_graft_path_parents($grafts, $tree_paths, $c, $p0, $r0);
- }
-}
-
-sub set_index {
- my $old = $ENV{GIT_INDEX_FILE};
- $ENV{GIT_INDEX_FILE} = shift;
- return $old;
-}
-
-sub restore_index {
- my ($old) = @_;
- if (defined $old) {
- $ENV{GIT_INDEX_FILE} = $old;
- } else {
- delete $ENV{GIT_INDEX_FILE};
- }
-}
-
-sub libsvn_commit_cb {
- my ($rev, $date, $committer, $c, $msg, $r_last, $cmt_last) = @_;
- if ($_optimize_commits && $rev == ($r_last + 1)) {
- my $log = libsvn_log_entry($rev,$committer,$date,$msg);
- $log->{tree} = get_tree_from_treeish($c);
- my $cmt = git_commit($log, $cmt_last, $c);
- my @diff = safe_qx('git-diff-tree', $cmt, $c);
- if (@diff) {
- print STDERR "Trees differ: $cmt $c\n",
- join('',@diff),"\n";
- exit 1;
- }
- } else {
- fetch("$rev=$c");
- }
-}
-
-sub libsvn_ls_fullurl {
- my $fullurl = shift;
- my ($repo, $path) = repo_path_split($fullurl);
- $SVN ||= libsvn_connect($repo);
- my @ret;
- my $pool = SVN::Pool->new;
- my ($dirent, undef, undef) = $SVN->get_dir($path,
- $SVN->get_latest_revnum, $pool);
- foreach my $d (keys %$dirent) {
- if ($dirent->{$d}->kind == $SVN::Node::dir) {
- push @ret, "$d/"; # add '/' for compat with cli svn
- }
- }
- $pool->clear;
- return @ret;
-}
-
-
-sub libsvn_skip_unknown_revs {
- my $err = shift;
- my $errno = $err->apr_err();
- # Maybe the branch we're tracking didn't
- # exist when the repo started, so it's
- # not an error if it doesn't, just continue
- #
- # Wonderfully consistent library, eh?
- # 160013 - svn:// and file://
- # 175002 - http(s)://
- # More codes may be discovered later...
- if ($errno == 175002 || $errno == 160013) {
- return;
- }
- croak "Error from SVN, ($errno): ", $err->expanded_message,"\n";
-};
-
-# Tie::File seems to be prone to offset errors if revisions get sparse,
-# it's not that fast, either. Tie::File is also not in Perl 5.6. So
-# one of my favorite modules is out :< Next up would be one of the DBM
-# modules, but I'm not sure which is most portable... So I'll just
-# go with something that's plain-text, but still capable of
-# being randomly accessed. So here's my ultra-simple fixed-width
-# database. All records are 40 characters + "\n", so it's easy to seek
-# to a revision: (41 * rev) is the byte offset.
-# A record of 40 0s denotes an empty revision.
-# And yes, it's still pretty fast (faster than Tie::File).
-sub revdb_set {
- my ($file, $rev, $commit) = @_;
- length $commit == 40 or croak "arg3 must be a full SHA1 hexsum\n";
- open my $fh, '+<', $file or croak $!;
- my $offset = $rev * 41;
- # assume that append is the common case:
- seek $fh, 0, 2 or croak $!;
- my $pos = tell $fh;
- if ($pos < $offset) {
- print $fh (('0' x 40),"\n") x (($offset - $pos) / 41);
- }
- seek $fh, $offset, 0 or croak $!;
- print $fh $commit,"\n";
- close $fh or croak $!;
-}
-
-sub revdb_get {
- my ($file, $rev) = @_;
- my $ret;
- my $offset = $rev * 41;
- open my $fh, '<', $file or croak $!;
- seek $fh, $offset, 0;
- if (tell $fh == $offset) {
- $ret = readline $fh;
- if (defined $ret) {
- chomp $ret;
- $ret = undef if ($ret =~ /^0{40}$/);
- }
- }
- close $fh or croak $!;
- return $ret;
-}
-
-sub copy_remote_ref {
- my $origin = $_cp_remote ? $_cp_remote : 'origin';
- my $ref = "refs/remotes/$GIT_SVN";
- if (safe_qx('git-ls-remote', $origin, $ref)) {
- sys(qw/git fetch/, $origin, "$ref:$ref");
- } else {
- die "Unable to find remote reference: ",
- "refs/remotes/$GIT_SVN on $origin\n";
- }
-}
-
-package SVN::Git::Editor;
-use vars qw/@ISA/;
-use strict;
-use warnings;
-use Carp qw/croak/;
-use IO::File;
-
-sub new {
- my $class = shift;
- my $git_svn = shift;
- my $self = SVN::Delta::Editor->new(@_);
- bless $self, $class;
- foreach (qw/svn_path c r ra /) {
- die "$_ required!\n" unless (defined $git_svn->{$_});
- $self->{$_} = $git_svn->{$_};
- }
- $self->{pool} = SVN::Pool->new;
- $self->{bat} = { '' => $self->open_root($self->{r}, $self->{pool}) };
- $self->{rm} = { };
- require Digest::MD5;
- return $self;
-}
-
-sub split_path {
- return ($_[0] =~ m#^(.*?)/?([^/]+)$#);
-}
-
-sub repo_path {
- (defined $_[1] && length $_[1]) ? "$_[0]->{svn_path}/$_[1]"
- : $_[0]->{svn_path}
-}
-
-sub url_path {
- my ($self, $path) = @_;
- $self->{ra}->{url} . '/' . $self->repo_path($path);
-}
-
-sub rmdirs {
- my ($self) = @_;
- my $rm = $self->{rm};
- delete $rm->{''}; # we never delete the url we're tracking
- return unless %$rm;
-
- foreach (keys %$rm) {
- my @d = split m#/#, $_;
- my $c = shift @d;
- $rm->{$c} = 1;
- while (@d) {
- $c .= '/' . shift @d;
- $rm->{$c} = 1;
- }
- }
- delete $rm->{$self->{svn_path}};
- delete $rm->{''}; # we never delete the url we're tracking
- return unless %$rm;
-
- defined(my $pid = open my $fh,'-|') or croak $!;
- if (!$pid) {
- exec qw/git-ls-tree --name-only -r -z/, $self->{c} or croak $!;
- }
- local $/ = "\0";
- my @svn_path = split m#/#, $self->{svn_path};
- while (<$fh>) {
- chomp;
- my @dn = (@svn_path, (split m#/#, $_));
- while (pop @dn) {
- delete $rm->{join '/', @dn};
- }
- unless (%$rm) {
- close $fh;
- return;
- }
- }
- close $fh;
-
- my ($r, $p, $bat) = ($self->{r}, $self->{pool}, $self->{bat});
- foreach my $d (sort { $b =~ tr#/#/# <=> $a =~ tr#/#/# } keys %$rm) {
- $self->close_directory($bat->{$d}, $p);
- my ($dn) = ($d =~ m#^(.*?)/?(?:[^/]+)$#);
- $self->SUPER::delete_entry($d, $r, $bat->{$dn}, $p);
- delete $bat->{$d};
- }
-}
-
-sub open_or_add_dir {
- my ($self, $full_path, $baton) = @_;
- my $p = SVN::Pool->new;
- my $t = $self->{ra}->check_path($full_path, $self->{r}, $p);
- $p->clear;
- if ($t == $SVN::Node::none) {
- return $self->add_directory($full_path, $baton,
- undef, -1, $self->{pool});
- } elsif ($t == $SVN::Node::dir) {
- return $self->open_directory($full_path, $baton,
- $self->{r}, $self->{pool});
- }
- print STDERR "$full_path already exists in repository at ",
- "r$self->{r} and it is not a directory (",
- ($t == $SVN::Node::file ? 'file' : 'unknown'),"/$t)\n";
- exit 1;
-}
-
-sub ensure_path {
- my ($self, $path) = @_;
- my $bat = $self->{bat};
- $path = $self->repo_path($path);
- return $bat->{''} unless (length $path);
- my @p = split m#/+#, $path;
- my $c = shift @p;
- $bat->{$c} ||= $self->open_or_add_dir($c, $bat->{''});
- while (@p) {
- my $c0 = $c;
- $c .= '/' . shift @p;
- $bat->{$c} ||= $self->open_or_add_dir($c, $bat->{$c0});
- }
- return $bat->{$c};
-}
-
-sub A {
- my ($self, $m) = @_;
- my ($dir, $file) = split_path($m->{file_b});
- my $pbat = $self->ensure_path($dir);
- my $fbat = $self->add_file($self->repo_path($m->{file_b}), $pbat,
- undef, -1);
- $self->chg_file($fbat, $m);
- $self->close_file($fbat,undef,$self->{pool});
-}
-
-sub C {
- my ($self, $m) = @_;
- my ($dir, $file) = split_path($m->{file_b});
- my $pbat = $self->ensure_path($dir);
- my $fbat = $self->add_file($self->repo_path($m->{file_b}), $pbat,
- $self->url_path($m->{file_a}), $self->{r});
- $self->chg_file($fbat, $m);
- $self->close_file($fbat,undef,$self->{pool});
-}
-
-sub delete_entry {
- my ($self, $path, $pbat) = @_;
- my $rpath = $self->repo_path($path);
- my ($dir, $file) = split_path($rpath);
- $self->{rm}->{$dir} = 1;
- $self->SUPER::delete_entry($rpath, $self->{r}, $pbat, $self->{pool});
-}
-
-sub R {
- my ($self, $m) = @_;
- my ($dir, $file) = split_path($m->{file_b});
- my $pbat = $self->ensure_path($dir);
- my $fbat = $self->add_file($self->repo_path($m->{file_b}), $pbat,
- $self->url_path($m->{file_a}), $self->{r});
- $self->chg_file($fbat, $m);
- $self->close_file($fbat,undef,$self->{pool});
-
- ($dir, $file) = split_path($m->{file_a});
- $pbat = $self->ensure_path($dir);
- $self->delete_entry($m->{file_a}, $pbat);
-}
-
-sub M {
- my ($self, $m) = @_;
- my ($dir, $file) = split_path($m->{file_b});
- my $pbat = $self->ensure_path($dir);
- my $fbat = $self->open_file($self->repo_path($m->{file_b}),
- $pbat,$self->{r},$self->{pool});
- $self->chg_file($fbat, $m);
- $self->close_file($fbat,undef,$self->{pool});
-}
-
-sub T { shift->M(@_) }
-
-sub change_file_prop {
- my ($self, $fbat, $pname, $pval) = @_;
- $self->SUPER::change_file_prop($fbat, $pname, $pval, $self->{pool});
-}
-
-sub chg_file {
- my ($self, $fbat, $m) = @_;
- if ($m->{mode_b} =~ /755$/ && $m->{mode_a} !~ /755$/) {
- $self->change_file_prop($fbat,'svn:executable','*');
- } elsif ($m->{mode_b} !~ /755$/ && $m->{mode_a} =~ /755$/) {
- $self->change_file_prop($fbat,'svn:executable',undef);
- }
- my $fh = IO::File->new_tmpfile or croak $!;
- if ($m->{mode_b} =~ /^120/) {
- print $fh 'link ' or croak $!;
- $self->change_file_prop($fbat,'svn:special','*');
- } elsif ($m->{mode_a} =~ /^120/ && $m->{mode_b} !~ /^120/) {
- $self->change_file_prop($fbat,'svn:special',undef);
- }
- defined(my $pid = fork) or croak $!;
- if (!$pid) {
- open STDOUT, '>&', $fh or croak $!;
- exec qw/git-cat-file blob/, $m->{sha1_b} or croak $!;
- }
- waitpid $pid, 0;
- croak $? if $?;
- $fh->flush == 0 or croak $!;
- seek $fh, 0, 0 or croak $!;
-
- my $md5 = Digest::MD5->new;
- $md5->addfile($fh) or croak $!;
- seek $fh, 0, 0 or croak $!;
-
- my $exp = $md5->hexdigest;
- my $atd = $self->apply_textdelta($fbat, undef, $self->{pool});
- my $got = SVN::TxDelta::send_stream($fh, @$atd, $self->{pool});
- die "Checksum mismatch\nexpected: $exp\ngot: $got\n" if ($got ne $exp);
-
- close $fh or croak $!;
-}
-
-sub D {
- my ($self, $m) = @_;
- my ($dir, $file) = split_path($m->{file_b});
- my $pbat = $self->ensure_path($dir);
- $self->delete_entry($m->{file_b}, $pbat);
-}
-
-sub close_edit {
- my ($self) = @_;
- my ($p,$bat) = ($self->{pool}, $self->{bat});
- foreach (sort { $b =~ tr#/#/# <=> $a =~ tr#/#/# } keys %$bat) {
- $self->close_directory($bat->{$_}, $p);
- }
- $self->SUPER::close_edit($p);
- $p->clear;
-}
-
-sub abort_edit {
- my ($self) = @_;
- $self->SUPER::abort_edit($self->{pool});
- $self->{pool}->clear;
-}
-
-__END__
-
-Data structures:
-
-$svn_log hashref (as returned by svn_log_raw)
-{
- fh => file handle of the log file,
- state => state of the log file parser (sep/msg/rev/msg_start...)
-}
-
-$log_msg hashref as returned by next_log_entry($svn_log)
-{
- msg => 'whitespace-formatted log entry
-', # trailing newline is preserved
- revision => '8', # integer
- date => '2004-02-24T17:01:44.108345Z', # commit date
- author => 'committer name'
-};
-
-
-@mods = array of diff-index line hashes, each element represents one line
- of diff-index output
-
-diff-index line ($m hash)
-{
- mode_a => first column of diff-index output, no leading ':',
- mode_b => second column of diff-index output,
- sha1_b => sha1sum of the final blob,
- chg => change type [MCRADT],
- file_a => original file name of a file (iff chg is 'C' or 'R')
- file_b => new/current file name of a file (any chg)
-}
-;
-
-Notes:
- I don't trust the each() function on unless I created %hash myself
- because the internal iterator may not have started at base.
+++ /dev/null
-git-svn(1)
-==========
-
-NAME
-----
-git-svn - bidirectional operation between a single Subversion branch and git
-
-SYNOPSIS
---------
-'git-svn' <command> [options] [arguments]
-
-DESCRIPTION
------------
-git-svn is a simple conduit for changesets between a single Subversion
-branch and git.
-
-git-svn is not to be confused with git-svnimport. The were designed
-with very different goals in mind.
-
-git-svn is designed for an individual developer who wants a
-bidirectional flow of changesets between a single branch in Subversion
-and an arbitrary number of branches in git. git-svnimport is designed
-for read-only operation on repositories that match a particular layout
-(albeit the recommended one by SVN developers).
-
-For importing svn, git-svnimport is potentially more powerful when
-operating on repositories organized under the recommended
-trunk/branch/tags structure, and should be faster, too.
-
-git-svn mostly ignores the very limited view of branching that
-Subversion has. This allows git-svn to be much easier to use,
-especially on repositories that are not organized in a manner that
-git-svnimport is designed for.
-
-COMMANDS
---------
-init::
- Creates an empty git repository with additional metadata
- directories for git-svn. The Subversion URL must be specified
- as a command-line argument.
-
-fetch::
- Fetch unfetched revisions from the Subversion URL we are
- tracking. refs/remotes/git-svn will be updated to the
- latest revision.
-
- Note: You should never attempt to modify the remotes/git-svn
- branch outside of git-svn. Instead, create a branch from
- remotes/git-svn and work on that branch. Use the 'commit'
- command (see below) to write git commits back to
- remotes/git-svn.
-
- See 'Additional Fetch Arguments' if you are interested in
- manually joining branches on commit.
-
-commit::
- Commit specified commit or tree objects to SVN. This relies on
- your imported fetch data being up-to-date. This makes
- absolutely no attempts to do patching when committing to SVN, it
- simply overwrites files with those specified in the tree or
- commit. All merging is assumed to have taken place
- independently of git-svn functions.
-
-rebuild::
- Not a part of daily usage, but this is a useful command if
- you've just cloned a repository (using git-clone) that was
- tracked with git-svn. Unfortunately, git-clone does not clone
- git-svn metadata and the svn working tree that git-svn uses for
- its operations. This rebuilds the metadata so git-svn can
- resume fetch operations. A Subversion URL may be optionally
- specified at the command-line if the directory/repository you're
- tracking has moved or changed protocols.
-
-show-ignore::
- Recursively finds and lists the svn:ignore property on
- directories. The output is suitable for appending to
- the $GIT_DIR/info/exclude file.
-
-OPTIONS
--------
--r <ARG>::
---revision <ARG>::
- Only used with the 'fetch' command.
-
- Takes any valid -r<argument> svn would accept and passes it
- directly to svn. -r<ARG1>:<ARG2> ranges and "{" DATE "}" syntax
- is also supported. This is passed directly to svn, see svn
- documentation for more details.
-
- This can allow you to make partial mirrors when running fetch.
-
--::
---stdin::
- Only used with the 'commit' command.
-
- Read a list of commits from stdin and commit them in reverse
- order. Only the leading sha1 is read from each line, so
- git-rev-list --pretty=oneline output can be used.
-
---rmdir::
- Only used with the 'commit' command.
-
- Remove directories from the SVN tree if there are no files left
- behind. SVN can version empty directories, and they are not
- removed by default if there are no files left in them. git
- cannot version empty directories. Enabling this flag will make
- the commit to SVN act like git.
-
- repo-config key: svn.rmdir
-
--e::
---edit::
- Only used with the 'commit' command.
-
- Edit the commit message before committing to SVN. This is off by
- default for objects that are commits, and forced on when committing
- tree objects.
-
- repo-config key: svn.edit
-
--l<num>::
---find-copies-harder::
- Both of these are only used with the 'commit' command.
-
- They are both passed directly to git-diff-tree see
- git-diff-tree(1) for more information.
-
- repo-config key: svn.l
- repo-config key: svn.findcopiesharder
-
--A<filename>::
---authors-file=<filename>::
-
- Syntax is compatible with the files used by git-svnimport and
- git-cvsimport:
-
-------------------------------------------------------------------------
-loginname = Joe User <user@example.com>
-------------------------------------------------------------------------
-
- If this option is specified and git-svn encounters an SVN
- committer name that does not exist in the authors-file, git-svn
- will abort operation. The user will then have to add the
- appropriate entry. Re-running the previous git-svn command
- after the authors-file is modified should continue operation.
-
- repo-config key: svn.authors-file
-
-ADVANCED OPTIONS
-----------------
--b<refname>::
---branch <refname>::
- Used with 'fetch' or 'commit'.
-
- This can be used to join arbitrary git branches to remotes/git-svn
- on new commits where the tree object is equivalent.
-
- When used with different GIT_SVN_ID values, tags and branches in
- SVN can be tracked this way, as can some merges where the heads
- end up having completely equivalent content. This can even be
- used to track branches across multiple SVN _repositories_.
-
- This option may be specified multiple times, once for each
- branch.
-
- repo-config key: svn.branch
-
--i<GIT_SVN_ID>::
---id <GIT_SVN_ID>::
- This sets GIT_SVN_ID (instead of using the environment). See
- the section on "Tracking Multiple Repositories or Branches" for
- more information on using GIT_SVN_ID.
-
-COMPATIBILITY OPTIONS
----------------------
---upgrade::
- Only used with the 'rebuild' command.
-
- Run this if you used an old version of git-svn that used
- "git-svn-HEAD" instead of "remotes/git-svn" as the branch
- for tracking the remote.
-
---no-ignore-externals::
- Only used with the 'fetch' and 'rebuild' command.
-
- By default, git-svn passes --ignore-externals to svn to avoid
- fetching svn:external trees into git. Pass this flag to enable
- externals tracking directly via git.
-
- Versions of svn that do not support --ignore-externals are
- automatically detected and this flag will be automatically
- enabled for them.
-
- Otherwise, do not enable this flag unless you know what you're
- doing.
-
- repo-config key: svn.noignoreexternals
-
-Basic Examples
-~~~~~~~~~~~~~~
-
-Tracking and contributing to an Subversion managed-project:
-
-------------------------------------------------------------------------
-# Initialize a tree (like git init-db):
- git-svn init http://svn.foo.org/project/trunk
-# Fetch remote revisions:
- git-svn fetch
-# Create your own branch to hack on:
- git checkout -b my-branch remotes/git-svn
-# Commit only the git commits you want to SVN:
- git-svn commit <tree-ish> [<tree-ish_2> ...]
-# Commit all the git commits from my-branch that don't exist in SVN:
- git-svn commit remotes/git-svn..my-branch
-# Something is committed to SVN, pull the latest into your branch:
- git-svn fetch && git pull . remotes/git-svn
-# Append svn:ignore settings to the default git exclude file:
- git-svn show-ignore >> .git/info/exclude
-------------------------------------------------------------------------
-
-DESIGN PHILOSOPHY
------------------
-Merge tracking in Subversion is lacking and doing branched development
-with Subversion is cumbersome as a result. git-svn completely forgoes
-any automated merge/branch tracking on the Subversion side and leaves it
-entirely up to the user on the git side. It's simply not worth it to do
-a useful translation when the the original signal is weak.
-
-TRACKING MULTIPLE REPOSITORIES OR BRANCHES
-------------------------------------------
-This is for advanced users, most users should ignore this section.
-
-Because git-svn does not care about relationships between different
-branches or directories in a Subversion repository, git-svn has a simple
-hack to allow it to track an arbitrary number of related _or_ unrelated
-SVN repositories via one git repository. Simply set the GIT_SVN_ID
-environment variable to a name other other than "git-svn" (the default)
-and git-svn will ignore the contents of the $GIT_DIR/git-svn directory
-and instead do all of its work in $GIT_DIR/$GIT_SVN_ID for that
-invocation. The interface branch will be remotes/$GIT_SVN_ID, instead of
-remotes/git-svn. Any remotes/$GIT_SVN_ID branch should never be modified
-by the user outside of git-svn commands.
-
-ADDITIONAL FETCH ARGUMENTS
---------------------------
-This is for advanced users, most users should ignore this section.
-
-Unfetched SVN revisions may be imported as children of existing commits
-by specifying additional arguments to 'fetch'. Additional parents may
-optionally be specified in the form of sha1 hex sums at the
-command-line. Unfetched SVN revisions may also be tied to particular
-git commits with the following syntax:
-
- svn_revision_number=git_commit_sha1
-
-This allows you to tie unfetched SVN revision 375 to your current HEAD::
-
- `git-svn fetch 375=$(git-rev-parse HEAD)`
-
-Advanced Example: Tracking a Reorganized Repository
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-If you're tracking a directory that has moved, or otherwise been
-branched or tagged off of another directory in the repository and you
-care about the full history of the project, then you can read this
-section.
-
-This is how Yann Dirson tracked the trunk of the ufoai directory when
-the /trunk directory of his repository was moved to /ufoai/trunk and
-he needed to continue tracking /ufoai/trunk where /trunk left off.
-
-------------------------------------------------------------------------
- # This log message shows when the repository was reorganized:
- r166 | ydirson | 2006-03-02 01:36:55 +0100 (Thu, 02 Mar 2006) | 1 line
- Changed paths:
- D /trunk
- A /ufoai/trunk (from /trunk:165)
-
- # First we start tracking the old revisions:
- GIT_SVN_ID=git-oldsvn git-svn init \
- https://svn.sourceforge.net/svnroot/ufoai/trunk
- GIT_SVN_ID=git-oldsvn git-svn fetch -r1:165
-
- # And now, we continue tracking the new revisions:
- GIT_SVN_ID=git-newsvn git-svn init \
- https://svn.sourceforge.net/svnroot/ufoai/ufoai/trunk
- GIT_SVN_ID=git-newsvn git-svn fetch \
- 166=`git-rev-parse refs/remotes/git-oldsvn`
-------------------------------------------------------------------------
-
-BUGS
-----
-If somebody commits a conflicting changeset to SVN at a bad moment
-(right before you commit) causing a conflict and your commit to fail,
-your svn working tree ($GIT_DIR/git-svn/tree) may be dirtied. The
-easiest thing to do is probably just to rm -rf $GIT_DIR/git-svn/tree and
-run 'rebuild'.
-
-We ignore all SVN properties except svn:executable. Too difficult to
-map them since we rely heavily on git write-tree being _exactly_ the
-same on both the SVN and git working trees and I prefer not to clutter
-working trees with metadata files.
-
-svn:keywords can't be ignored in Subversion (at least I don't know of
-a way to ignore them).
-
-Renamed and copied directories are not detected by git and hence not
-tracked when committing to SVN. I do not plan on adding support for
-this as it's quite difficult and time-consuming to get working for all
-the possible corner cases (git doesn't do it, either). Renamed and
-copied files are fully supported if they're similar enough for git to
-detect them.
-
-Author
-------
-Written by Eric Wong <normalperson@yhbt.net>.
-
-Documentation
--------------
-Written by Eric Wong <normalperson@yhbt.net>.
+++ /dev/null
-PATH=$PWD/../:$PATH
-if test -d ../../../t
-then
- cd ../../../t
-else
- echo "Must be run in contrib/git-svn/t" >&2
- exit 1
-fi
-
-. ./test-lib.sh
-
-GIT_DIR=$PWD/.git
-GIT_SVN_DIR=$GIT_DIR/svn/git-svn
-SVN_TREE=$GIT_SVN_DIR/svn-tree
-
-svnadmin >/dev/null 2>&1
-if test $? != 1
-then
- test_expect_success 'skipping contrib/git-svn test' :
- test_done
- exit
-fi
-
-svn >/dev/null 2>&1
-if test $? != 1
-then
- test_expect_success 'skipping contrib/git-svn test' :
- test_done
- exit
-fi
-
-svnrepo=$PWD/svnrepo
-
-set -e
-
-svnadmin create $svnrepo
-svnrepo="file://$svnrepo/test-git-svn"
-
-
+++ /dev/null
-#!/bin/sh
-#
-# Copyright (c) 2006 Eric Wong
-#
-
-test_description='git-svn tests'
-GIT_SVN_LC_ALL=$LC_ALL
-. ./lib-git-svn.sh
-
-mkdir import
-cd import
-
-echo foo > foo
-if test -z "$NO_SYMLINK"
-then
- ln -s foo foo.link
-fi
-mkdir -p dir/a/b/c/d/e
-echo 'deep dir' > dir/a/b/c/d/e/file
-mkdir -p bar
-echo 'zzz' > bar/zzz
-echo '#!/bin/sh' > exec.sh
-chmod +x exec.sh
-svn import -m 'import for git-svn' . "$svnrepo" >/dev/null
-
-cd ..
-rm -rf import
-
-test_expect_success \
- 'initialize git-svn' \
- "git-svn init $svnrepo"
-
-test_expect_success \
- 'import an SVN revision into git' \
- 'git-svn fetch'
-
-test_expect_success "checkout from svn" "svn co $svnrepo $SVN_TREE"
-
-name='try a deep --rmdir with a commit'
-git checkout -f -b mybranch remotes/git-svn
-mv dir/a/b/c/d/e/file dir/file
-cp dir/file file
-git update-index --add --remove dir/a/b/c/d/e/file dir/file file
-git commit -m "$name"
-
-test_expect_success "$name" \
- "git-svn commit --find-copies-harder --rmdir remotes/git-svn..mybranch &&
- svn up $SVN_TREE &&
- test -d $SVN_TREE/dir && test ! -d $SVN_TREE/dir/a"
-
-
-name='detect node change from file to directory #1'
-mkdir dir/new_file
-mv dir/file dir/new_file/file
-mv dir/new_file dir/file
-git update-index --remove dir/file
-git update-index --add dir/file/file
-git commit -m "$name"
-
-test_expect_failure "$name" \
- 'git-svn commit --find-copies-harder --rmdir remotes/git-svn..mybranch' \
- || true
-
-
-name='detect node change from directory to file #1'
-rm -rf dir $GIT_DIR/index
-git checkout -f -b mybranch2 remotes/git-svn
-mv bar/zzz zzz
-rm -rf bar
-mv zzz bar
-git update-index --remove -- bar/zzz
-git update-index --add -- bar
-git commit -m "$name"
-
-test_expect_failure "$name" \
- 'git-svn commit --find-copies-harder --rmdir remotes/git-svn..mybranch2' \
- || true
-
-
-name='detect node change from file to directory #2'
-rm -f $GIT_DIR/index
-git checkout -f -b mybranch3 remotes/git-svn
-rm bar/zzz
-git-update-index --remove bar/zzz
-mkdir bar/zzz
-echo yyy > bar/zzz/yyy
-git-update-index --add bar/zzz/yyy
-git commit -m "$name"
-
-test_expect_failure "$name" \
- 'git-svn commit --find-copies-harder --rmdir remotes/git-svn..mybranch3' \
- || true
-
-
-name='detect node change from directory to file #2'
-rm -f $GIT_DIR/index
-git checkout -f -b mybranch4 remotes/git-svn
-rm -rf dir
-git update-index --remove -- dir/file
-touch dir
-echo asdf > dir
-git update-index --add -- dir
-git commit -m "$name"
-
-test_expect_failure "$name" \
- 'git-svn commit --find-copies-harder --rmdir remotes/git-svn..mybranch4' \
- || true
-
-
-name='remove executable bit from a file'
-rm -f $GIT_DIR/index
-git checkout -f -b mybranch5 remotes/git-svn
-chmod -x exec.sh
-git update-index exec.sh
-git commit -m "$name"
-
-test_expect_success "$name" \
- "git-svn commit --find-copies-harder --rmdir remotes/git-svn..mybranch5 &&
- svn up $SVN_TREE &&
- test ! -x $SVN_TREE/exec.sh"
-
-
-name='add executable bit back file'
-chmod +x exec.sh
-git update-index exec.sh
-git commit -m "$name"
-
-test_expect_success "$name" \
- "git-svn commit --find-copies-harder --rmdir remotes/git-svn..mybranch5 &&
- svn up $SVN_TREE &&
- test -x $SVN_TREE/exec.sh"
-
-
-
-if test -z "$NO_SYMLINK"
-then
- name='executable file becomes a symlink to bar/zzz (file)'
- rm exec.sh
- ln -s bar/zzz exec.sh
- git update-index exec.sh
- git commit -m "$name"
-
- test_expect_success "$name" \
- "git-svn commit --find-copies-harder --rmdir remotes/git-svn..mybranch5 &&
- svn up $SVN_TREE &&
- test -L $SVN_TREE/exec.sh"
-
- name='new symlink is added to a file that was also just made executable'
- chmod +x bar/zzz
- ln -s bar/zzz exec-2.sh
- git update-index --add bar/zzz exec-2.sh
- git commit -m "$name"
-
- test_expect_success "$name" \
- "git-svn commit --find-copies-harder --rmdir remotes/git-svn..mybranch5 &&
- svn up $SVN_TREE &&
- test -x $SVN_TREE/bar/zzz &&
- test -L $SVN_TREE/exec-2.sh"
-
- name='modify a symlink to become a file'
- git help > help || true
- rm exec-2.sh
- cp help exec-2.sh
- git update-index exec-2.sh
- git commit -m "$name"
-
- test_expect_success "$name" \
- "git-svn commit --find-copies-harder --rmdir remotes/git-svn..mybranch5 &&
- svn up $SVN_TREE &&
- test -f $SVN_TREE/exec-2.sh &&
- test ! -L $SVN_TREE/exec-2.sh &&
- diff -u help $SVN_TREE/exec-2.sh"
-fi
-
-
-if test -n "$GIT_SVN_LC_ALL" && echo $GIT_SVN_LC_ALL | grep -q '\.UTF-8$'
-then
- name="commit with UTF-8 message: locale: $GIT_SVN_LC_ALL"
- echo '# hello' >> exec-2.sh
- git update-index exec-2.sh
- git commit -m 'éï∏'
- export LC_ALL="$GIT_SVN_LC_ALL"
- test_expect_success "$name" "git-svn commit HEAD"
- unset LC_ALL
-else
- echo "UTF-8 locale not set, test skipped ($GIT_SVN_LC_ALL)"
-fi
-
-name='test fetch functionality (svn => git) with alternate GIT_SVN_ID'
-GIT_SVN_ID=alt
-export GIT_SVN_ID
-test_expect_success "$name" \
- "git-svn init $svnrepo && git-svn fetch &&
- git-rev-list --pretty=raw remotes/git-svn | grep ^tree | uniq > a &&
- git-rev-list --pretty=raw remotes/alt | grep ^tree | uniq > b &&
- diff -u a b"
-
-if test -n "$NO_SYMLINK"
-then
- test_done
- exit 0
-fi
-
-name='check imported tree checksums expected tree checksums'
-rm -f expected
-if test -n "$GIT_SVN_LC_ALL" && echo $GIT_SVN_LC_ALL | grep -q '\.UTF-8$'
-then
- echo tree f735671b89a7eb30cab1d8597de35bd4271ab813 > expected
-fi
-cat >> expected <<\EOF
-tree 4b9af72bb861eaed053854ec502cf7df72618f0f
-tree 031b8d557afc6fea52894eaebb45bec52f1ba6d1
-tree 0b094cbff17168f24c302e297f55bfac65eb8bd3
-tree d667270a1f7b109f5eb3aaea21ede14b56bfdd6e
-tree 56a30b966619b863674f5978696f4a3594f2fca9
-tree d667270a1f7b109f5eb3aaea21ede14b56bfdd6e
-tree 8f51f74cf0163afc9ad68a4b1537288c4558b5a4
-EOF
-test_expect_success "$name" "diff -u a expected"
-
-test_done
-
+++ /dev/null
-#!/bin/sh
-#
-# Copyright (c) 2006 Eric Wong
-#
-
-test_description='git-svn property tests'
-. ./lib-git-svn.sh
-
-mkdir import
-
-a_crlf=
-a_lf=
-a_cr=
-a_ne_crlf=
-a_ne_lf=
-a_ne_cr=
-a_empty=
-a_empty_lf=
-a_empty_cr=
-a_empty_crlf=
-
-cd import
- cat >> kw.c <<\EOF
-/* Make it look like somebody copied a file from CVS into SVN: */
-/* $Id: kw.c,v 1.1.1.1 1994/03/06 00:00:00 eric Exp $ */
-EOF
-
- printf "Hello\r\nWorld\r\n" > crlf
- a_crlf=`git-hash-object -w crlf`
- printf "Hello\rWorld\r" > cr
- a_cr=`git-hash-object -w cr`
- printf "Hello\nWorld\n" > lf
- a_lf=`git-hash-object -w lf`
-
- printf "Hello\r\nWorld" > ne_crlf
- a_ne_crlf=`git-hash-object -w ne_crlf`
- printf "Hello\nWorld" > ne_lf
- a_ne_lf=`git-hash-object -w ne_lf`
- printf "Hello\rWorld" > ne_cr
- a_ne_cr=`git-hash-object -w ne_cr`
-
- touch empty
- a_empty=`git-hash-object -w empty`
- printf "\n" > empty_lf
- a_empty_lf=`git-hash-object -w empty_lf`
- printf "\r" > empty_cr
- a_empty_cr=`git-hash-object -w empty_cr`
- printf "\r\n" > empty_crlf
- a_empty_crlf=`git-hash-object -w empty_crlf`
-
- svn import -m 'import for git-svn' . "$svnrepo" >/dev/null
-cd ..
-
-rm -rf import
-test_expect_success 'checkout working copy from svn' "svn co $svnrepo test_wc"
-test_expect_success 'setup some commits to svn' \
- 'cd test_wc &&
- echo Greetings >> kw.c &&
- svn commit -m "Not yet an Id" &&
- svn up &&
- echo Hello world >> kw.c &&
- svn commit -m "Modified file, but still not yet an Id" &&
- svn up &&
- svn propset svn:keywords Id kw.c &&
- svn commit -m "Propset Id" &&
- svn up &&
- cd ..'
-
-test_expect_success 'initialize git-svn' "git-svn init $svnrepo"
-test_expect_success 'fetch revisions from svn' 'git-svn fetch'
-
-name='test svn:keywords ignoring'
-test_expect_success "$name" \
- 'git checkout -b mybranch remotes/git-svn &&
- echo Hi again >> kw.c &&
- git commit -a -m "test keywoards ignoring" &&
- git-svn commit remotes/git-svn..mybranch &&
- git pull . remotes/git-svn'
-
-expect='/* $Id$ */'
-got="`sed -ne 2p kw.c`"
-test_expect_success 'raw $Id$ found in kw.c' "test '$expect' = '$got'"
-
-test_expect_success "propset CR on crlf files" \
- 'cd test_wc &&
- svn propset svn:eol-style CR empty &&
- svn propset svn:eol-style CR crlf &&
- svn propset svn:eol-style CR ne_crlf &&
- svn commit -m "propset CR on crlf files" &&
- svn up &&
- cd ..'
-
-test_expect_success 'fetch and pull latest from svn and checkout a new wc' \
- "git-svn fetch &&
- git pull . remotes/git-svn &&
- svn co $svnrepo new_wc"
-
-for i in crlf ne_crlf lf ne_lf cr ne_cr empty_cr empty_lf empty empty_crlf
-do
- test_expect_success "Comparing $i" "cmp $i new_wc/$i"
-done
-
-
-cd test_wc
- printf '$Id$\rHello\rWorld\r' > cr
- printf '$Id$\rHello\rWorld' > ne_cr
- a_cr=`printf '$Id$\r\nHello\r\nWorld\r\n' | git-hash-object --stdin`
- a_ne_cr=`printf '$Id$\r\nHello\r\nWorld' | git-hash-object --stdin`
- test_expect_success 'Set CRLF on cr files' \
- 'svn propset svn:eol-style CRLF cr &&
- svn propset svn:eol-style CRLF ne_cr &&
- svn propset svn:keywords Id cr &&
- svn propset svn:keywords Id ne_cr &&
- svn commit -m "propset CRLF on cr files" &&
- svn up'
-cd ..
-test_expect_success 'fetch and pull latest from svn' \
- 'git-svn fetch && git pull . remotes/git-svn'
-
-b_cr="`git-hash-object cr`"
-b_ne_cr="`git-hash-object ne_cr`"
-
-test_expect_success 'CRLF + $Id$' "test '$a_cr' = '$b_cr'"
-test_expect_success 'CRLF + $Id$ (no newline)' "test '$a_ne_cr' = '$b_ne_cr'"
-
-test_done
+++ /dev/null
-test_description='git-svn rmdir'
-. ./lib-git-svn.sh
-
-test_expect_success 'initialize repo' "
- mkdir import &&
- cd import &&
- mkdir -p deeply/nested/directory/number/1 &&
- mkdir -p deeply/nested/directory/number/2 &&
- echo foo > deeply/nested/directory/number/1/file &&
- echo foo > deeply/nested/directory/number/2/another &&
- svn import -m 'import for git-svn' . $svnrepo &&
- cd ..
- "
-
-test_expect_success 'mirror via git-svn' "
- git-svn init $svnrepo &&
- git-svn fetch &&
- git checkout -f -b test-rmdir remotes/git-svn
- "
-
-test_expect_success 'Try a commit on rmdir' "
- git rm -f deeply/nested/directory/number/2/another &&
- git commit -a -m 'remove another' &&
- git-svn commit --rmdir HEAD &&
- svn ls -R $svnrepo | grep ^deeply/nested/directory/number/1
- "
-
-
-test_done
#define _XOPEN_SOURCE 500 /* glibc2 and AIX 5.3L need this */
#define _XOPEN_SOURCE_EXTENDED 1 /* AIX 5.3L needs this */
+#define _GNU_SOURCE
#include <time.h>
#include "cache.h"
#include "blob.h"
{
char *new = xmalloc(size + 100);
unsigned long newlen = 0;
-
- // "tree <sha1>\n"
+
+ /* "tree <sha1>\n" */
memcpy(new + newlen, buffer, 46);
newlen += 46;
buffer = (char *) buffer + 46;
size -= 46;
- // "parent <sha1>\n"
+ /* "parent <sha1>\n" */
while (!memcmp(buffer, "parent ", 7)) {
memcpy(new + newlen, buffer, 48);
newlen += 48;
size -= 48;
}
- // "author xyz <xyz> date"
+ /* "author xyz <xyz> date" */
newlen += convert_date_line(new + newlen, &buffer, &size);
- // "committer xyz <xyz> date"
+ /* "committer xyz <xyz> date" */
newlen += convert_date_line(new + newlen, &buffer, &size);
- // Rest
+ /* Rest */
memcpy(new + newlen, buffer, size);
newlen += size;
void *out;
memset(&stream, 0, sizeof(stream));
- deflateInit(&stream, Z_DEFAULT_COMPRESSION);
+ deflateInit(&stream, zlib_compression_level);
maxsize = deflateBound(&stream, size);
out = xmalloc(maxsize);
"git-daemon [--verbose] [--syslog] [--inetd | --port=n] [--export-all]\n"
" [--timeout=n] [--init-timeout=n] [--strict-paths]\n"
" [--base-path=path] [--user-path | --user-path=path]\n"
-" [--reuseaddr] [directory...]";
+" [--reuseaddr] [--detach] [--pid-file=file] [directory...]";
/* List of acceptable pathname prefixes */
static char **ok_paths = NULL;
* after ~user/. E.g. a request to git://host/~alice/frotz would
* go to /home/alice/pub_git/frotz with --user-path=pub_git.
*/
-static char *user_path = NULL;
+static const char *user_path = NULL;
/* Timeout, and initial timeout */
static unsigned int timeout = 0;
va_end(params);
}
+static void NORETURN daemon_die(const char *err, va_list params)
+{
+ logreport(LOG_ERR, err, params);
+ exit(1);
+}
+
static int avoid_alias(char *p)
{
int sl, ndot;
children_reaped = reaped + 1;
/* XXX: Custom logging, since we don't wanna getpid() */
if (verbose) {
- char *dead = "";
+ const char *dead = "";
if (!WIFEXITED(status) || WEXITSTATUS(status) > 0)
dead = " (with error)";
if (log_syslog)
}
}
+/* if any standard file descriptor is missing open it to /dev/null */
+static void sanitize_stdfds(void)
+{
+ int fd = open("/dev/null", O_RDWR, 0);
+ while (fd != -1 && fd < 2)
+ fd = dup(fd);
+ if (fd == -1)
+ die("open /dev/null or dup failed: %s", strerror(errno));
+ if (fd > 2)
+ close(fd);
+}
+
+static void daemonize(void)
+{
+ switch (fork()) {
+ case 0:
+ break;
+ case -1:
+ die("fork failed: %s", strerror(errno));
+ default:
+ exit(0);
+ }
+ if (setsid() == -1)
+ die("setsid failed: %s", strerror(errno));
+ close(0);
+ close(1);
+ close(2);
+ sanitize_stdfds();
+}
+
+static void store_pid(const char *path)
+{
+ FILE *f = fopen(path, "w");
+ if (!f)
+ die("cannot open pid file %s: %s", path, strerror(errno));
+ fprintf(f, "%d\n", getpid());
+ fclose(f);
+}
+
static int serve(int port)
{
int socknum, *socklist;
{
int port = DEFAULT_GIT_PORT;
int inetd_mode = 0;
+ const char *pid_file = NULL;
+ int detach = 0;
int i;
/* Without this we cannot rely on waitpid() to tell
user_path = arg + 12;
continue;
}
+ if (!strncmp(arg, "--pid-file=", 11)) {
+ pid_file = arg + 11;
+ continue;
+ }
+ if (!strcmp(arg, "--detach")) {
+ detach = 1;
+ log_syslog = 1;
+ continue;
+ }
if (!strcmp(arg, "--")) {
ok_paths = &argv[i+1];
break;
usage(daemon_usage);
}
- if (log_syslog)
+ if (log_syslog) {
openlog("git-daemon", 0, LOG_DAEMON);
-
- if (strict_paths && (!ok_paths || !*ok_paths)) {
- if (!inetd_mode)
- die("git-daemon: option --strict-paths requires a whitelist");
-
- logerror("option --strict-paths requires a whitelist");
- exit (1);
+ set_die_routine(daemon_die);
}
+ if (strict_paths && (!ok_paths || !*ok_paths))
+ die("option --strict-paths requires a whitelist");
+
if (inetd_mode) {
struct sockaddr_storage ss;
struct sockaddr *peer = (struct sockaddr *)&ss;
return execute(peer);
}
+ if (detach)
+ daemonize();
+ else
+ sanitize_stdfds();
+
+ if (pid_file)
+ store_pid(pid_file);
+
return serve(port);
}
* Otherwise only annotated tags are used.
*/
if (!strncmp(path, "refs/tags/", 10)) {
- if (object->type == TYPE_TAG)
+ if (object->type == OBJ_TAG)
prio = 2;
else
prio = 1;
return (a_date > b_date) ? -1 : (a_date == b_date) ? 0 : 1;
}
-static void describe(char *arg, int last_one)
+static void describe(const char *arg, int last_one)
{
unsigned char sha1[20];
struct commit *cmit;
return NULL;
/* Determine index hash size. Note that indexing skips the
- first byte to allow for optimizing the rabin polynomial
+ first byte to allow for optimizing the Rabin's polynomial
initialization in create_delta(). */
entries = (bufsize - 1) / RABIN_WINDOW;
hsize = entries / 4;
/*
* Determine a limit on the number of entries in the same hash
- * bucket. This guard us against patological data sets causing
+ * bucket. This guards us against pathological data sets causing
* really bad hash distribution with most entries in the same hash
* bucket that would bring us to O(m*n) computing costs (m and n
* corresponding to reference and target buffer sizes).
/*
* The maximum size for any opcode sequence, including the initial header
- * plus rabin window plus biggest copy.
+ * plus Rabin window plus biggest copy.
*/
#define MAX_OP_SIZE (5 + 5 + 1 + RABIN_WINDOW + 7)
static int use_size_cache;
+static int diff_detect_rename_default = 0;
static int diff_rename_limit_default = -1;
static int diff_use_color_default = 0;
-enum color_diff {
- DIFF_RESET = 0,
- DIFF_PLAIN = 1,
- DIFF_METAINFO = 2,
- DIFF_FRAGINFO = 3,
- DIFF_FILE_OLD = 4,
- DIFF_FILE_NEW = 5,
-};
-
-#define COLOR_NORMAL ""
-#define COLOR_BOLD "\033[1m"
-#define COLOR_DIM "\033[2m"
-#define COLOR_UL "\033[4m"
-#define COLOR_BLINK "\033[5m"
-#define COLOR_REVERSE "\033[7m"
-#define COLOR_RESET "\033[m"
-
-#define COLOR_BLACK "\033[30m"
-#define COLOR_RED "\033[31m"
-#define COLOR_GREEN "\033[32m"
-#define COLOR_YELLOW "\033[33m"
-#define COLOR_BLUE "\033[34m"
-#define COLOR_MAGENTA "\033[35m"
-#define COLOR_CYAN "\033[36m"
-#define COLOR_WHITE "\033[37m"
-
-static const char *diff_colors[] = {
- [DIFF_RESET] = COLOR_RESET,
- [DIFF_PLAIN] = COLOR_NORMAL,
- [DIFF_METAINFO] = COLOR_BOLD,
- [DIFF_FRAGINFO] = COLOR_CYAN,
- [DIFF_FILE_OLD] = COLOR_RED,
- [DIFF_FILE_NEW] = COLOR_GREEN,
+/* "\033[1;38;5;2xx;48;5;2xxm\0" is 23 bytes */
+static char diff_colors[][24] = {
+ "\033[m", /* reset */
+ "", /* normal */
+ "\033[1m", /* bold */
+ "\033[36m", /* cyan */
+ "\033[31m", /* red */
+ "\033[32m", /* green */
+ "\033[33m" /* yellow */
};
static int parse_diff_color_slot(const char *var, int ofs)
return DIFF_FILE_OLD;
if (!strcasecmp(var+ofs, "new"))
return DIFF_FILE_NEW;
+ if (!strcasecmp(var+ofs, "commit"))
+ return DIFF_COMMIT;
die("bad config variable '%s'", var);
}
-static const char *parse_diff_color_value(const char *value, const char *var)
-{
- if (!strcasecmp(value, "normal"))
- return COLOR_NORMAL;
- if (!strcasecmp(value, "bold"))
- return COLOR_BOLD;
- if (!strcasecmp(value, "dim"))
- return COLOR_DIM;
- if (!strcasecmp(value, "ul"))
- return COLOR_UL;
- if (!strcasecmp(value, "blink"))
- return COLOR_BLINK;
- if (!strcasecmp(value, "reverse"))
- return COLOR_REVERSE;
- if (!strcasecmp(value, "reset"))
- return COLOR_RESET;
- if (!strcasecmp(value, "black"))
- return COLOR_BLACK;
- if (!strcasecmp(value, "red"))
- return COLOR_RED;
- if (!strcasecmp(value, "green"))
- return COLOR_GREEN;
- if (!strcasecmp(value, "yellow"))
- return COLOR_YELLOW;
- if (!strcasecmp(value, "blue"))
- return COLOR_BLUE;
- if (!strcasecmp(value, "magenta"))
- return COLOR_MAGENTA;
- if (!strcasecmp(value, "cyan"))
- return COLOR_CYAN;
- if (!strcasecmp(value, "white"))
- return COLOR_WHITE;
+static int parse_color(const char *name, int len)
+{
+ static const char * const color_names[] = {
+ "normal", "black", "red", "green", "yellow",
+ "blue", "magenta", "cyan", "white"
+ };
+ char *end;
+ int i;
+ for (i = 0; i < ARRAY_SIZE(color_names); i++) {
+ const char *str = color_names[i];
+ if (!strncasecmp(name, str, len) && !str[len])
+ return i - 1;
+ }
+ i = strtol(name, &end, 10);
+ if (*name && !*end && i >= -1 && i <= 255)
+ return i;
+ return -2;
+}
+
+static int parse_attr(const char *name, int len)
+{
+ static const int attr_values[] = { 1, 2, 4, 5, 7 };
+ static const char * const attr_names[] = {
+ "bold", "dim", "ul", "blink", "reverse"
+ };
+ int i;
+ for (i = 0; i < ARRAY_SIZE(attr_names); i++) {
+ const char *str = attr_names[i];
+ if (!strncasecmp(name, str, len) && !str[len])
+ return attr_values[i];
+ }
+ return -1;
+}
+
+static void parse_diff_color_value(const char *value, const char *var, char *dst)
+{
+ const char *ptr = value;
+ int attr = -1;
+ int fg = -2;
+ int bg = -2;
+
+ if (!strcasecmp(value, "reset")) {
+ strcpy(dst, "\033[m");
+ return;
+ }
+
+ /* [fg [bg]] [attr] */
+ while (*ptr) {
+ const char *word = ptr;
+ int val, len = 0;
+
+ while (word[len] && !isspace(word[len]))
+ len++;
+
+ ptr = word + len;
+ while (*ptr && isspace(*ptr))
+ ptr++;
+
+ val = parse_color(word, len);
+ if (val >= -1) {
+ if (fg == -2) {
+ fg = val;
+ continue;
+ }
+ if (bg == -2) {
+ bg = val;
+ continue;
+ }
+ goto bad;
+ }
+ val = parse_attr(word, len);
+ if (val < 0 || attr != -1)
+ goto bad;
+ attr = val;
+ }
+
+ if (attr >= 0 || fg >= 0 || bg >= 0) {
+ int sep = 0;
+
+ *dst++ = '\033';
+ *dst++ = '[';
+ if (attr >= 0) {
+ *dst++ = '0' + attr;
+ sep++;
+ }
+ if (fg >= 0) {
+ if (sep++)
+ *dst++ = ';';
+ if (fg < 8) {
+ *dst++ = '3';
+ *dst++ = '0' + fg;
+ } else {
+ dst += sprintf(dst, "38;5;%d", fg);
+ }
+ }
+ if (bg >= 0) {
+ if (sep++)
+ *dst++ = ';';
+ if (bg < 8) {
+ *dst++ = '4';
+ *dst++ = '0' + bg;
+ } else {
+ dst += sprintf(dst, "48;5;%d", bg);
+ }
+ }
+ *dst++ = 'm';
+ }
+ *dst = 0;
+ return;
+bad:
die("bad config value '%s' for variable '%s'", value, var);
}
-int git_diff_config(const char *var, const char *value)
+/*
+ * These are to give UI layer defaults.
+ * The core-level commands such as git-diff-files should
+ * never be affected by the setting of diff.renames
+ * the user happens to have in the configuration file.
+ */
+int git_diff_ui_config(const char *var, const char *value)
{
if (!strcmp(var, "diff.renamelimit")) {
diff_rename_limit_default = git_config_int(var, value);
if (!strcmp(var, "diff.color")) {
if (!value)
diff_use_color_default = 1; /* bool */
- else if (!strcasecmp(value, "auto"))
- diff_use_color_default = isatty(1);
+ else if (!strcasecmp(value, "auto")) {
+ diff_use_color_default = 0;
+ if (isatty(1) || (pager_in_use && pager_use_color)) {
+ char *term = getenv("TERM");
+ if (term && strcmp(term, "dumb"))
+ diff_use_color_default = 1;
+ }
+ }
else if (!strcasecmp(value, "never"))
diff_use_color_default = 0;
else if (!strcasecmp(value, "always"))
diff_use_color_default = git_config_bool(var, value);
return 0;
}
+ if (!strcmp(var, "diff.renames")) {
+ if (!value)
+ diff_detect_rename_default = DIFF_DETECT_RENAME;
+ else if (!strcasecmp(value, "copies") ||
+ !strcasecmp(value, "copy"))
+ diff_detect_rename_default = DIFF_DETECT_COPY;
+ else if (git_config_bool(var,value))
+ diff_detect_rename_default = DIFF_DETECT_RENAME;
+ return 0;
+ }
if (!strncmp(var, "diff.color.", 11)) {
int slot = parse_diff_color_slot(var, 11);
- diff_colors[slot] = parse_diff_color_value(value, var);
+ parse_diff_color_value(value, var, diff_colors[slot]);
return 0;
}
return git_default_config(var, value);
return 0;
}
+struct diff_words_buffer {
+ mmfile_t text;
+ long alloc;
+ long current; /* output pointer */
+ int suppressed_newline;
+};
+
+static void diff_words_append(char *line, unsigned long len,
+ struct diff_words_buffer *buffer)
+{
+ if (buffer->text.size + len > buffer->alloc) {
+ buffer->alloc = (buffer->text.size + len) * 3 / 2;
+ buffer->text.ptr = xrealloc(buffer->text.ptr, buffer->alloc);
+ }
+ line++;
+ len--;
+ memcpy(buffer->text.ptr + buffer->text.size, line, len);
+ buffer->text.size += len;
+}
+
+struct diff_words_data {
+ struct xdiff_emit_state xm;
+ struct diff_words_buffer minus, plus;
+};
+
+static void print_word(struct diff_words_buffer *buffer, int len, int color,
+ int suppress_newline)
+{
+ const char *ptr;
+ int eol = 0;
+
+ if (len == 0)
+ return;
+
+ ptr = buffer->text.ptr + buffer->current;
+ buffer->current += len;
+
+ if (ptr[len - 1] == '\n') {
+ eol = 1;
+ len--;
+ }
+
+ fputs(diff_get_color(1, color), stdout);
+ fwrite(ptr, len, 1, stdout);
+ fputs(diff_get_color(1, DIFF_RESET), stdout);
+
+ if (eol) {
+ if (suppress_newline)
+ buffer->suppressed_newline = 1;
+ else
+ putchar('\n');
+ }
+}
+
+static void fn_out_diff_words_aux(void *priv, char *line, unsigned long len)
+{
+ struct diff_words_data *diff_words = priv;
+
+ if (diff_words->minus.suppressed_newline) {
+ if (line[0] != '+')
+ putchar('\n');
+ diff_words->minus.suppressed_newline = 0;
+ }
+
+ len--;
+ switch (line[0]) {
+ case '-':
+ print_word(&diff_words->minus, len, DIFF_FILE_OLD, 1);
+ break;
+ case '+':
+ print_word(&diff_words->plus, len, DIFF_FILE_NEW, 0);
+ break;
+ case ' ':
+ print_word(&diff_words->plus, len, DIFF_PLAIN, 0);
+ diff_words->minus.current += len;
+ break;
+ }
+}
+
+/* this executes the word diff on the accumulated buffers */
+static void diff_words_show(struct diff_words_data *diff_words)
+{
+ xpparam_t xpp;
+ xdemitconf_t xecfg;
+ xdemitcb_t ecb;
+ mmfile_t minus, plus;
+ int i;
+
+ minus.size = diff_words->minus.text.size;
+ minus.ptr = xmalloc(minus.size);
+ memcpy(minus.ptr, diff_words->minus.text.ptr, minus.size);
+ for (i = 0; i < minus.size; i++)
+ if (isspace(minus.ptr[i]))
+ minus.ptr[i] = '\n';
+ diff_words->minus.current = 0;
+
+ plus.size = diff_words->plus.text.size;
+ plus.ptr = xmalloc(plus.size);
+ memcpy(plus.ptr, diff_words->plus.text.ptr, plus.size);
+ for (i = 0; i < plus.size; i++)
+ if (isspace(plus.ptr[i]))
+ plus.ptr[i] = '\n';
+ diff_words->plus.current = 0;
+
+ xpp.flags = XDF_NEED_MINIMAL;
+ xecfg.ctxlen = diff_words->minus.alloc + diff_words->plus.alloc;
+ xecfg.flags = 0;
+ ecb.outf = xdiff_outf;
+ ecb.priv = diff_words;
+ diff_words->xm.consume = fn_out_diff_words_aux;
+ xdl_diff(&minus, &plus, &xpp, &xecfg, &ecb);
+
+ free(minus.ptr);
+ free(plus.ptr);
+ diff_words->minus.text.size = diff_words->plus.text.size = 0;
+
+ if (diff_words->minus.suppressed_newline) {
+ putchar('\n');
+ diff_words->minus.suppressed_newline = 0;
+ }
+}
+
struct emit_callback {
struct xdiff_emit_state xm;
int nparents, color_diff;
const char **label_path;
+ struct diff_words_data *diff_words;
};
-static inline const char *get_color(int diff_use_color, enum color_diff ix)
+static void free_diff_words_data(struct emit_callback *ecbdata)
+{
+ if (ecbdata->diff_words) {
+ /* flush buffers */
+ if (ecbdata->diff_words->minus.text.size ||
+ ecbdata->diff_words->plus.text.size)
+ diff_words_show(ecbdata->diff_words);
+
+ if (ecbdata->diff_words->minus.text.ptr)
+ free (ecbdata->diff_words->minus.text.ptr);
+ if (ecbdata->diff_words->plus.text.ptr)
+ free (ecbdata->diff_words->plus.text.ptr);
+ free(ecbdata->diff_words);
+ ecbdata->diff_words = NULL;
+ }
+}
+
+const char *diff_get_color(int diff_use_color, enum color_diff ix)
{
if (diff_use_color)
return diff_colors[ix];
{
int i;
struct emit_callback *ecbdata = priv;
- const char *set = get_color(ecbdata->color_diff, DIFF_METAINFO);
- const char *reset = get_color(ecbdata->color_diff, DIFF_RESET);
+ const char *set = diff_get_color(ecbdata->color_diff, DIFF_METAINFO);
+ const char *reset = diff_get_color(ecbdata->color_diff, DIFF_RESET);
if (ecbdata->label_path[0]) {
printf("%s--- %s%s\n", set, ecbdata->label_path[0], reset);
;
if (2 <= i && i < len && line[i] == ' ') {
ecbdata->nparents = i - 1;
- set = get_color(ecbdata->color_diff, DIFF_FRAGINFO);
+ set = diff_get_color(ecbdata->color_diff, DIFF_FRAGINFO);
}
else if (len < ecbdata->nparents)
set = reset;
else {
int nparents = ecbdata->nparents;
int color = DIFF_PLAIN;
- for (i = 0; i < nparents && len; i++) {
- if (line[i] == '-')
- color = DIFF_FILE_OLD;
- else if (line[i] == '+')
- color = DIFF_FILE_NEW;
- }
- set = get_color(ecbdata->color_diff, color);
+ if (ecbdata->diff_words && nparents != 1)
+ /* fall back to normal diff */
+ free_diff_words_data(ecbdata);
+ if (ecbdata->diff_words) {
+ if (line[0] == '-') {
+ diff_words_append(line, len,
+ &ecbdata->diff_words->minus);
+ return;
+ } else if (line[0] == '+') {
+ diff_words_append(line, len,
+ &ecbdata->diff_words->plus);
+ return;
+ }
+ if (ecbdata->diff_words->minus.text.size ||
+ ecbdata->diff_words->plus.text.size)
+ diff_words_show(ecbdata->diff_words);
+ line++;
+ len--;
+ } else
+ for (i = 0; i < nparents && len; i++) {
+ if (line[i] == '-')
+ color = DIFF_FILE_OLD;
+ else if (line[i] == '+')
+ color = DIFF_FILE_NEW;
+ }
+ set = diff_get_color(ecbdata->color_diff, color);
}
if (len > 0 && line[len-1] == '\n')
len--;
- printf("%s%.*s%s\n", set, (int) len, line, reset);
+ fputs (set, stdout);
+ fwrite (line, len, 1, stdout);
+ puts (reset);
}
static char *pprint_rename(const char *a, const char *b)
z_stream stream;
memset(&stream, 0, sizeof(stream));
- deflateInit(&stream, Z_BEST_COMPRESSION);
+ deflateInit(&stream, zlib_compression_level);
bound = deflateBound(&stream, size);
deflated = xmalloc(bound);
stream.next_out = deflated;
mmfile_t mf1, mf2;
const char *lbl[2];
char *a_one, *b_two;
- const char *set = get_color(o->color_diff, DIFF_METAINFO);
- const char *reset = get_color(o->color_diff, DIFF_RESET);
+ const char *set = diff_get_color(o->color_diff, DIFF_METAINFO);
+ const char *reset = diff_get_color(o->color_diff, DIFF_RESET);
a_one = quote_two("a/", name_a);
b_two = quote_two("b/", name_b);
if (fill_mmfile(&mf1, one) < 0 || fill_mmfile(&mf2, two) < 0)
die("unable to read files to diff");
- if (mmfile_is_binary(&mf1) || mmfile_is_binary(&mf2)) {
+ if (!o->text && (mmfile_is_binary(&mf1) || mmfile_is_binary(&mf2))) {
/* Quite common confusing case */
if (mf1.size == mf2.size &&
!memcmp(mf1.ptr, mf2.ptr, mf1.size))
ecb.outf = xdiff_outf;
ecb.priv = &ecbdata;
ecbdata.xm.consume = fn_out_consume;
+ if (o->color_diff_words)
+ ecbdata.diff_words =
+ xcalloc(1, sizeof(struct diff_words_data));
xdl_diff(&mf1, &mf2, &xpp, &xecfg, &ecb);
+ if (o->color_diff_words)
+ free_diff_words_data(&ecbdata);
}
free_ab_and_return:
options->change = diff_change;
options->add_remove = diff_addremove;
options->color_diff = diff_use_color_default;
+ options->detect_rename = diff_detect_rename_default;
}
int diff_setup_done(struct diff_options *options)
if (count > 1)
die("--name-only, --name-status, --check and -s are mutually exclusive");
- if ((options->find_copies_harder &&
- options->detect_rename != DIFF_DETECT_COPY) ||
- (0 <= options->rename_limit && !options->detect_rename))
- return -1;
+ if (options->find_copies_harder)
+ options->detect_rename = DIFF_DETECT_COPY;
if (options->output_format & (DIFF_FORMAT_NAME |
DIFF_FORMAT_NAME_STATUS |
options->output_format |= DIFF_FORMAT_PATCH;
options->full_index = options->binary = 1;
}
+ else if (!strcmp(arg, "-a") || !strcmp(arg, "--text")) {
+ options->text = 1;
+ }
else if (!strcmp(arg, "--name-only"))
options->output_format |= DIFF_FORMAT_NAME;
else if (!strcmp(arg, "--name-status"))
}
else if (!strcmp(arg, "--color"))
options->color_diff = 1;
+ else if (!strcmp(arg, "--no-color"))
+ options->color_diff = 0;
else if (!strcmp(arg, "-w") || !strcmp(arg, "--ignore-all-space"))
options->xdl_opts |= XDF_IGNORE_WHITESPACE;
else if (!strcmp(arg, "-b") || !strcmp(arg, "--ignore-space-change"))
options->xdl_opts |= XDF_IGNORE_WHITESPACE_CHANGE;
+ else if (!strcmp(arg, "--color-words"))
+ options->color_diff = options->color_diff_words = 1;
+ else if (!strcmp(arg, "--no-renames"))
+ options->detect_rename = 0;
else
return 0;
return 1;
struct diff_filespec *one,
struct diff_filespec *two)
{
- struct diff_filepair *dp = xmalloc(sizeof(*dp));
+ struct diff_filepair *dp = xcalloc(1, sizeof(*dp));
dp->one = one;
dp->two = two;
- dp->score = 0;
- dp->status = 0;
- dp->source_stays = 0;
- dp->broken_pair = 0;
if (queue)
diff_q(queue, dp);
return dp;
}
}
+struct patch_id_t {
+ struct xdiff_emit_state xm;
+ SHA_CTX *ctx;
+ int patchlen;
+};
+
+static int remove_space(char *line, int len)
+{
+ int i;
+ char *dst = line;
+ unsigned char c;
+
+ for (i = 0; i < len; i++)
+ if (!isspace((c = line[i])))
+ *dst++ = c;
+
+ return dst - line;
+}
+
+static void patch_id_consume(void *priv, char *line, unsigned long len)
+{
+ struct patch_id_t *data = priv;
+ int new_len;
+
+ /* Ignore line numbers when computing the SHA1 of the patch */
+ if (!strncmp(line, "@@ -", 4))
+ return;
+
+ new_len = remove_space(line, len);
+
+ SHA1_Update(data->ctx, line, new_len);
+ data->patchlen += new_len;
+}
+
+/* returns 0 upon success, and writes result into sha1 */
+static int diff_get_patch_id(struct diff_options *options, unsigned char *sha1)
+{
+ struct diff_queue_struct *q = &diff_queued_diff;
+ int i;
+ SHA_CTX ctx;
+ struct patch_id_t data;
+ char buffer[PATH_MAX * 4 + 20];
+
+ SHA1_Init(&ctx);
+ memset(&data, 0, sizeof(struct patch_id_t));
+ data.ctx = &ctx;
+ data.xm.consume = patch_id_consume;
+
+ for (i = 0; i < q->nr; i++) {
+ xpparam_t xpp;
+ xdemitconf_t xecfg;
+ xdemitcb_t ecb;
+ mmfile_t mf1, mf2;
+ struct diff_filepair *p = q->queue[i];
+ int len1, len2;
+
+ if (p->status == 0)
+ return error("internal diff status error");
+ if (p->status == DIFF_STATUS_UNKNOWN)
+ continue;
+ if (diff_unmodified_pair(p))
+ continue;
+ if ((DIFF_FILE_VALID(p->one) && S_ISDIR(p->one->mode)) ||
+ (DIFF_FILE_VALID(p->two) && S_ISDIR(p->two->mode)))
+ continue;
+ if (DIFF_PAIR_UNMERGED(p))
+ continue;
+
+ diff_fill_sha1_info(p->one);
+ diff_fill_sha1_info(p->two);
+ if (fill_mmfile(&mf1, p->one) < 0 ||
+ fill_mmfile(&mf2, p->two) < 0)
+ return error("unable to read files to diff");
+
+ /* Maybe hash p->two? into the patch id? */
+ if (mmfile_is_binary(&mf2))
+ continue;
+
+ len1 = remove_space(p->one->path, strlen(p->one->path));
+ len2 = remove_space(p->two->path, strlen(p->two->path));
+ if (p->one->mode == 0)
+ len1 = snprintf(buffer, sizeof(buffer),
+ "diff--gita/%.*sb/%.*s"
+ "newfilemode%06o"
+ "---/dev/null"
+ "+++b/%.*s",
+ len1, p->one->path,
+ len2, p->two->path,
+ p->two->mode,
+ len2, p->two->path);
+ else if (p->two->mode == 0)
+ len1 = snprintf(buffer, sizeof(buffer),
+ "diff--gita/%.*sb/%.*s"
+ "deletedfilemode%06o"
+ "---a/%.*s"
+ "+++/dev/null",
+ len1, p->one->path,
+ len2, p->two->path,
+ p->one->mode,
+ len1, p->one->path);
+ else
+ len1 = snprintf(buffer, sizeof(buffer),
+ "diff--gita/%.*sb/%.*s"
+ "---a/%.*s"
+ "+++b/%.*s",
+ len1, p->one->path,
+ len2, p->two->path,
+ len1, p->one->path,
+ len2, p->two->path);
+ SHA1_Update(&ctx, buffer, len1);
+
+ xpp.flags = XDF_NEED_MINIMAL;
+ xecfg.ctxlen = 3;
+ xecfg.flags = XDL_EMIT_FUNCNAMES;
+ ecb.outf = xdiff_outf;
+ ecb.priv = &data;
+ xdl_diff(&mf1, &mf2, &xpp, &xecfg, &ecb);
+ }
+
+ SHA1_Final(sha1, &ctx);
+ return 0;
+}
+
+int diff_flush_patch_id(struct diff_options *options, unsigned char *sha1)
+{
+ struct diff_queue_struct *q = &diff_queued_diff;
+ int i;
+ int result = diff_get_patch_id(options, sha1);
+
+ for (i = 0; i < q->nr; i++)
+ diff_free_filepair(q->queue[i]);
+
+ free(q->queue);
+ q->queue = NULL;
+ q->nr = q->alloc = 0;
+
+ return result;
+}
+
static int is_summary_empty(const struct diff_queue_struct *q)
{
int i;
unsigned recursive:1,
tree_in_recursive:1,
binary:1,
+ text:1,
full_index:1,
silent_on_remove:1,
find_copies_harder:1,
- color_diff:1;
+ color_diff:1,
+ color_diff_words:1;
int context;
int break_opt;
int detect_rename;
add_remove_fn_t add_remove;
};
+enum color_diff {
+ DIFF_RESET = 0,
+ DIFF_PLAIN = 1,
+ DIFF_METAINFO = 2,
+ DIFF_FRAGINFO = 3,
+ DIFF_FILE_OLD = 4,
+ DIFF_FILE_NEW = 5,
+ DIFF_COMMIT = 6,
+};
+const char *diff_get_color(int diff_use_color, enum color_diff ix);
+
extern const char mime_boundary_leader[];
extern void diff_tree_setup_paths(const char **paths, struct diff_options *);
#define DIFF_SETUP_USE_CACHE 2
#define DIFF_SETUP_USE_SIZE_CACHE 4
-extern int git_diff_config(const char *var, const char *value);
+extern int git_diff_ui_config(const char *var, const char *value);
extern void diff_setup(struct diff_options *);
extern int diff_opt_parse(struct diff_options *, const char **, int);
extern int diff_setup_done(struct diff_options *);
" -O<file> reorder diffs according to the <file>.\n" \
" -S<string> find filepair whose only one side contains the string.\n" \
" --pickaxe-all\n" \
-" show all files diff when -S is used and hit is found.\n"
+" show all files diff when -S is used and hit is found.\n" \
+" -a --text treat all files as text.\n"
extern int diff_queue_is_empty(void);
extern void diff_flush(struct diff_options*);
extern int run_diff_index(struct rev_info *revs, int cached);
+extern int diff_flush_patch_id(struct diff_options *, unsigned char *);
+
#endif /* DIFF_H */
return &(rename_src[first]);
}
-static int is_exact_match(struct diff_filespec *src, struct diff_filespec *dst)
+static int is_exact_match(struct diff_filespec *src,
+ struct diff_filespec *dst,
+ int contents_too)
{
if (src->sha1_valid && dst->sha1_valid &&
!memcmp(src->sha1, dst->sha1, 20))
return 1;
+ if (!contents_too)
+ return 0;
if (diff_populate_filespec(src, 1) || diff_populate_filespec(dst, 1))
return 0;
if (src->size != dst->size)
fill_filespec(two, dst->sha1, dst->mode);
dp = diff_queue(NULL, one, two);
+ dp->renamed_pair = 1;
if (!strcmp(src->path, dst->path))
dp->score = rename_src[src_index].score;
else
struct diff_queue_struct *q = &diff_queued_diff;
struct diff_queue_struct outq;
struct diff_score *mx;
- int i, j, rename_count;
+ int i, j, rename_count, contents_too;
int num_create, num_src, dst_cnt;
if (!minimum_score)
/* We really want to cull the candidates list early
* with cheap tests in order to avoid doing deltas.
+ * The first round matches up the up-to-date entries,
+ * and then during the second round we try to match
+ * cache-dirty entries as well.
*/
- for (i = 0; i < rename_dst_nr; i++) {
- struct diff_filespec *two = rename_dst[i].two;
- for (j = 0; j < rename_src_nr; j++) {
- struct diff_filespec *one = rename_src[j].one;
- if (!is_exact_match(one, two))
- continue;
- record_rename_pair(i, j, MAX_SCORE);
- rename_count++;
- break; /* we are done with this entry */
+ for (contents_too = 0; contents_too < 2; contents_too++) {
+ for (i = 0; i < rename_dst_nr; i++) {
+ struct diff_filespec *two = rename_dst[i].two;
+ if (rename_dst[i].pair)
+ continue; /* dealt with an earlier round */
+ for (j = 0; j < rename_src_nr; j++) {
+ struct diff_filespec *one = rename_src[j].one;
+ if (!is_exact_match(one, two, contents_too))
+ continue;
+ record_rename_pair(i, j, MAX_SCORE);
+ rename_count++;
+ break; /* we are done with this entry */
+ }
}
}
char status; /* M C R N D U (see Documentation/diff-format.txt) */
unsigned source_stays : 1; /* all of R/C are copies */
unsigned broken_pair : 1;
+ unsigned renamed_pair : 1;
};
#define DIFF_PAIR_UNMERGED(p) \
(!DIFF_FILE_VALID((p)->one) && !DIFF_FILE_VALID((p)->two))
-#define DIFF_PAIR_RENAME(p) (strcmp((p)->one->path, (p)->two->path))
+#define DIFF_PAIR_RENAME(p) ((p)->renamed_pair)
#define DIFF_PAIR_BROKEN(p) \
( (!DIFF_FILE_VALID((p)->one) != !DIFF_FILE_VALID((p)->two)) && \
if (dir->show_other_directories &&
(subdir || !dir->hide_empty_directories) &&
!dir_exists(fullname, baselen + len)) {
- // Rewind the read subdirectory
+ /* Rewind the read subdirectory */
while (dir->nr > rewind_base)
free(dir->entries[--dir->nr]);
break;
char git_default_email[MAX_GITNAME];
char git_default_name[MAX_GITNAME];
+int use_legacy_headers = 1;
int trust_executable_bit = 1;
int assume_unchanged = 0;
int prefer_symlink_refs = 0;
char git_commit_encoding[MAX_ENCODING_LENGTH] = "utf-8";
int shared_repository = PERM_UMASK;
const char *apply_default_whitespace = NULL;
+int zlib_compression_level = Z_DEFAULT_COMPRESSION;
+int pager_in_use;
+int pager_use_color = 1;
static char *git_dir, *git_object_dir, *git_index_file, *git_refs_dir,
*git_graft_file;
#include "cache.h"
#include "exec_cmd.h"
+#include "quote.h"
#define MAX_ARGS 32
extern char **environ;
tmp = argv[0];
argv[0] = git_command;
+ if (getenv("GIT_TRACE")) {
+ const char **p = argv;
+ fputs("trace: exec:", stderr);
+ while (*p) {
+ fputc(' ', stderr);
+ sq_quote_print(stderr, *p);
+ ++p;
+ }
+ putc('\n', stderr);
+ fflush(stderr);
+ }
+
/* execve() can only ever return if it fails */
execve(git_command, (char **)argv, environ);
+ if (getenv("GIT_TRACE")) {
+ fprintf(stderr, "trace: exec failed: %s\n",
+ strerror(errno));
+ fflush(stderr);
+ }
+
argv[0] = tmp;
}
return -1;
len--;
switch (buf[0] & 0xFF) {
case 3:
+ safe_write(2, "remote: ", 8);
safe_write(2, buf+1, len);
- fprintf(stderr, "\n");
+ safe_write(2, "\n", 1);
exit(1);
case 2:
+ safe_write(2, "remote: ", 8);
safe_write(2, buf+1, len);
continue;
case 1:
/*
* A "binary msec" is a power-of-two-msec, aka 1/1024th of a second.
- * Keeing the time in that format means that "bytes / msecs" means
- * is the same as kB/s (modulo rounding).
+ * Keeping the time in that format means that "bytes / msecs" means
+ * the same as kB/s (modulo rounding).
*
* 1000512 is a magic number (usecs in a second, rounded up by half
* of 1024, to make "rounding" come out right ;)
{
struct object *o = deref_tag(parse_object(sha1), path, 0);
- if (o && o->type == TYPE_COMMIT)
+ if (o && o->type == OBJ_COMMIT)
rev_list_push((struct commit *)o, SEEN);
return 0;
{
struct object *o = parse_object(sha1);
- while (o && o->type == TYPE_TAG) {
+ while (o && o->type == OBJ_TAG) {
struct tag *t = (struct tag *) o;
if (!t->tagged)
break; /* broken repository */
o->flags |= COMPLETE;
o = parse_object(t->tagged->sha1);
}
- if (o && o->type == TYPE_COMMIT) {
+ if (o && o->type == OBJ_COMMIT) {
struct commit *commit = (struct commit *)o;
commit->object.flags |= COMPLETE;
insert_by_date(commit, &complete);
* in sync with the other side at some time after
* that (it is OK if we guess wrong here).
*/
- if (o->type == TYPE_COMMIT) {
+ if (o->type == OBJ_COMMIT) {
struct commit *commit = (struct commit *)o;
if (!cutoff || cutoff < commit->date)
cutoff = commit->date;
struct object *o = deref_tag(lookup_object(ref->old_sha1),
NULL, 0);
- if (!o || o->type != TYPE_COMMIT || !(o->flags & COMPLETE))
+ if (!o || o->type != OBJ_COMMIT || !(o->flags & COMPLETE))
continue;
if (!(o->flags & SEEN)) {
#include "tag.h"
#include "blob.h"
#include "refs.h"
-
-const char *write_ref = NULL;
-const char *write_ref_log_details = NULL;
+#include "strbuf.h"
int get_tree = 0;
int get_history = 0;
static int process_object(struct object *obj)
{
- if (obj->type == TYPE_COMMIT) {
+ if (obj->type == OBJ_COMMIT) {
if (process_commit((struct commit *)obj))
return -1;
return 0;
}
- if (obj->type == TYPE_TREE) {
+ if (obj->type == OBJ_TREE) {
if (process_tree((struct tree *)obj))
return -1;
return 0;
}
- if (obj->type == TYPE_BLOB) {
+ if (obj->type == OBJ_BLOB) {
return 0;
}
- if (obj->type == TYPE_TAG) {
+ if (obj->type == OBJ_TAG) {
if (process_tag((struct tag *)obj))
return -1;
return 0;
return 0;
}
-int pull(char *target)
+int pull_targets_stdin(char ***target, const char ***write_ref)
+{
+ int targets = 0, targets_alloc = 0;
+ struct strbuf buf;
+ *target = NULL; *write_ref = NULL;
+ strbuf_init(&buf);
+ while (1) {
+ char *rf_one = NULL;
+ char *tg_one;
+
+ read_line(&buf, stdin, '\n');
+ if (buf.eof)
+ break;
+ tg_one = buf.buf;
+ rf_one = strchr(tg_one, '\t');
+ if (rf_one)
+ *rf_one++ = 0;
+
+ if (targets >= targets_alloc) {
+ targets_alloc = targets_alloc ? targets_alloc * 2 : 64;
+ *target = xrealloc(*target, targets_alloc * sizeof(**target));
+ *write_ref = xrealloc(*write_ref, targets_alloc * sizeof(**write_ref));
+ }
+ (*target)[targets] = strdup(tg_one);
+ (*write_ref)[targets] = rf_one ? strdup(rf_one) : NULL;
+ targets++;
+ }
+ return targets;
+}
+
+void pull_targets_free(int targets, char **target, const char **write_ref)
{
- struct ref_lock *lock = NULL;
- unsigned char sha1[20];
+ while (targets--) {
+ free(target[targets]);
+ if (write_ref && write_ref[targets])
+ free((char *) write_ref[targets]);
+ }
+}
+
+int pull(int targets, char **target, const char **write_ref,
+ const char *write_ref_log_details)
+{
+ struct ref_lock **lock = xcalloc(targets, sizeof(struct ref_lock *));
+ unsigned char *sha1 = xmalloc(targets * 20);
char *msg;
int ret;
+ int i;
save_commit_buffer = 0;
track_object_refs = 0;
- if (write_ref) {
- lock = lock_ref_sha1(write_ref, NULL, 0);
- if (!lock) {
- error("Can't lock ref %s", write_ref);
- return -1;
+
+ for (i = 0; i < targets; i++) {
+ if (!write_ref || !write_ref[i])
+ continue;
+
+ lock[i] = lock_ref_sha1(write_ref[i], NULL, 0);
+ if (!lock[i]) {
+ error("Can't lock ref %s", write_ref[i]);
+ goto unlock_and_fail;
}
}
if (!get_recover)
for_each_ref(mark_complete);
- if (interpret_target(target, sha1)) {
- error("Could not interpret %s as something to pull", target);
- if (lock)
- unlock_ref(lock);
- return -1;
+ for (i = 0; i < targets; i++) {
+ if (interpret_target(target[i], &sha1[20 * i])) {
+ error("Could not interpret %s as something to pull", target[i]);
+ goto unlock_and_fail;
+ }
+ if (process(lookup_unknown_object(&sha1[20 * i])))
+ goto unlock_and_fail;
}
- if (process(lookup_unknown_object(sha1))) {
- if (lock)
- unlock_ref(lock);
- return -1;
+
+ if (loop())
+ goto unlock_and_fail;
+
+ if (write_ref_log_details) {
+ msg = xmalloc(strlen(write_ref_log_details) + 12);
+ sprintf(msg, "fetch from %s", write_ref_log_details);
+ } else {
+ msg = NULL;
}
- if (loop()) {
- if (lock)
- unlock_ref(lock);
- return -1;
+ for (i = 0; i < targets; i++) {
+ if (!write_ref || !write_ref[i])
+ continue;
+ ret = write_ref_sha1(lock[i], &sha1[20 * i], msg ? msg : "fetch (unknown)");
+ lock[i] = NULL;
+ if (ret)
+ goto unlock_and_fail;
}
+ if (msg)
+ free(msg);
- if (write_ref) {
- if (write_ref_log_details) {
- msg = xmalloc(strlen(write_ref_log_details) + 12);
- sprintf(msg, "fetch from %s", write_ref_log_details);
- }
- else
- msg = NULL;
- ret = write_ref_sha1(lock, sha1, msg ? msg : "fetch (unknown)");
- if (msg)
- free(msg);
- return ret;
- }
return 0;
+
+
+unlock_and_fail:
+ for (i = 0; i < targets; i++)
+ if (lock[i])
+ unlock_ref(lock[i]);
+ return -1;
}
*/
extern int fetch_ref(char *ref, unsigned char *sha1);
-/* If set, the ref filename to write the target value to. */
-extern const char *write_ref;
-
-/* If set additional text will appear in the ref log. */
-extern const char *write_ref_log_details;
-
/* Set to fetch the target tree. */
extern int get_tree;
/* Report what we got under get_verbosely */
extern void pull_say(const char *, const char *);
-extern int pull(char *target);
+/* Load pull targets from stdin */
+extern int pull_targets_stdin(char ***target, const char ***write_ref);
+
+/* Free up loaded targets */
+extern void pull_targets_free(int targets, char **target, const char **write_ref);
+
+/* If write_ref is set, the ref filename to write the target value to. */
+/* If write_ref_log_details is set, additional text will appear in the ref log. */
+extern int pull(int targets, char **target, const char **write_ref,
+ const char *write_ref_log_details);
#endif /* PULL_H */
static void check_connectivity(void)
{
- int i;
+ int i, max;
/* Look up all the requirements, warn about missing objects.. */
- for (i = 0; i < obj_allocs; i++) {
+ max = get_max_object_index();
+ for (i = 0; i < max; i++) {
const struct object_refs *refs;
- struct object *obj = objs[i];
+ struct object *obj = get_indexed_object(i);
if (!obj)
continue;
if (obj->flags & SEEN)
return 0;
obj->flags |= SEEN;
- if (obj->type == TYPE_BLOB)
+ if (obj->type == OBJ_BLOB)
return 0;
- if (obj->type == TYPE_TREE)
+ if (obj->type == OBJ_TREE)
return fsck_tree((struct tree *) obj);
- if (obj->type == TYPE_COMMIT)
+ if (obj->type == OBJ_COMMIT)
return fsck_commit((struct commit *) obj);
- if (obj->type == TYPE_TAG)
+ if (obj->type == OBJ_TAG)
return fsck_tag((struct tag *) obj);
/* By now, parse_object() would've returned NULL instead. */
return objerror(obj, "unknown type '%d' (internal fsck error)", obj->type);
}
mark_reachable(obj, REACHABLE);
obj->used = 1;
- if (obj->type != TYPE_TREE)
+ if (obj->type != OBJ_TREE)
err |= objerror(obj, "non-tree in cache-tree");
}
for (i = 0; i < it->subtree_nr; i++)
this=$next
}
+cannot_fallback () {
+ echo "$1"
+ echo "Cannot fall back to three-way merge."
+ exit 1
+}
+
fall_back_3way () {
O_OBJECT=`cd "$GIT_OBJECT_DIRECTORY" && pwd`
mkdir "$dotest/patch-merge-tmp-dir"
# First see if the patch records the index info that we can use.
- if git-apply -z --index-info "$dotest/patch" \
- >"$dotest/patch-merge-index-info" 2>/dev/null &&
- GIT_INDEX_FILE="$dotest/patch-merge-tmp-index" \
- git-update-index -z --index-info <"$dotest/patch-merge-index-info" &&
- GIT_INDEX_FILE="$dotest/patch-merge-tmp-index" \
- git-write-tree >"$dotest/patch-merge-base+" &&
- # index has the base tree now.
- GIT_INDEX_FILE="$dotest/patch-merge-tmp-index" \
+ git-apply -z --index-info "$dotest/patch" \
+ >"$dotest/patch-merge-index-info" &&
+ GIT_INDEX_FILE="$dotest/patch-merge-tmp-index" \
+ git-update-index -z --index-info <"$dotest/patch-merge-index-info" &&
+ GIT_INDEX_FILE="$dotest/patch-merge-tmp-index" \
+ git-write-tree >"$dotest/patch-merge-base+" ||
+ cannot_fallback "Patch does not record usable index information."
+
+ echo Using index info to reconstruct a base tree...
+ if GIT_INDEX_FILE="$dotest/patch-merge-tmp-index" \
git-apply $binary --cached <"$dotest/patch"
then
- echo Using index info to reconstruct a base tree...
mv "$dotest/patch-merge-base+" "$dotest/patch-merge-base"
mv "$dotest/patch-merge-tmp-index" "$dotest/patch-merge-index"
+ else
+ cannot_fallback "Did you hand edit your patch?
+It does not apply to blobs recorded in its index."
fi
test -f "$dotest/patch-merge-index" &&
# This is not so wrong. Depending on which base we picked,
# orig_tree may be wildly different from ours, but his_tree
# has the same set of wildly different changes in parts the
- # patch did not touch, so resolve ends up cancelling them,
+ # patch did not touch, so resolve ends up canceling them,
# saying that we reverted all those changes.
git-merge-resolve $orig_tree -- HEAD $his_tree || {
}
prec=4
+rloga=am
dotest=.dotest sign= utf8= keep= skip= interactive= resolved= binary= ws= resolvemsg=
while case "$#" in 0) break;; esac
do
case "$1" in
-d=*|--d=*|--do=*|--dot=*|--dote=*|--dotes=*|--dotest=*)
- dotest=`expr "$1" : '-[^=]*=\(.*\)'`; shift ;;
+ dotest=`expr "z$1" : 'z-[^=]*=\(.*\)'`; shift ;;
-d|--d|--do|--dot|--dote|--dotes|--dotest)
case "$#" in 1) usage ;; esac; shift
dotest="$1"; shift;;
--resolvemsg=*)
resolvemsg=$(echo "$1" | sed -e "s/^--resolvemsg=//"); shift ;;
+ --reflog-action=*)
+ rloga=`expr "z$1" : 'z-[^=]*=\(.*\)'`; shift ;;
+
--)
shift; break ;;
-*)
if test -d "$dotest"
then
- test ",$#," = ",0," ||
- die "previous dotest directory $dotest still exists but mbox given."
+ if test ",$#," != ",0," || ! tty -s
+ then
+ die "previous dotest directory $dotest still exists but mbox given."
+ fi
resume=yes
else
# Make sure we are not given --skip nor --resolved
parent=$(git-rev-parse --verify HEAD) &&
commit=$(git-commit-tree $tree -p $parent <"$dotest/final-commit") &&
echo Committed: $commit &&
- git-update-ref -m "am: $SUBJECT" HEAD $commit $parent ||
+ git-update-ref -m "$rloga: $SUBJECT" HEAD $commit $parent ||
stop_here $this
if test -x "$GIT_DIR"/hooks/post-applypatch
push @revqueue, $head;
init_claim( defined $starting_rev ? $head : 'dirty');
unless (defined $starting_rev) {
- my $diff = open_pipe("git","diff","-R", "HEAD", "--",$filename)
+ my $diff = open_pipe("git","diff","HEAD", "--",$filename)
or die "Failed to call git diff to check for dirty state: $!";
- _git_diff_parse($diff, $head, "dirty", (
+ _git_diff_parse($diff, [$head], "dirty", (
'author' => gitvar_name("GIT_AUTHOR_IDENT"),
'author_date' => sprintf("%s +0000",time()),
)
sub handle_rev {
- my $i = 0;
+ my $revseen = 0;
my %seen;
while (my $rev = shift @revqueue) {
next if $seen{$rev}++;
my %revinfo = git_commit_info($rev);
- foreach my $p (@{$revs{$rev}{'parents'}}) {
-
- git_diff_parse($p, $rev, %revinfo);
- push @revqueue, $p;
- }
+ if (exists $revs{$rev}{parents} &&
+ scalar @{$revs{$rev}{parents}} != 0) {
+ git_diff_parse($revs{$rev}{'parents'}, $rev, %revinfo);
+ push @revqueue, @{$revs{$rev}{'parents'}};
- if (scalar @{$revs{$rev}{parents}} == 0) {
+ } else {
# We must be at the initial rev here, so claim everything that is left.
for (my $i = 0; $i < @{$revs{$rev}{lines}}; $i++) {
if (ref ${$revs{$rev}{lines}}[$i] eq '' || ${$revs{$rev}{lines}}[$i][1] eq '') {
return $parent;
}
+sub git_find_all_parents {
+ my ($rev) = @_;
+
+ my $revparent = open_pipe("git-rev-list","--remove-empty", "--parents","--max-count=1","$rev")
+ or die "Failed to open git-rev-list to find a single parent: $!";
+
+ my $parentline = <$revparent>;
+ chomp $parentline;
+ my ($origrev, @parents) = split m/\s+/, $parentline;
+
+ close($revparent);
+
+ return @parents;
+}
+
+sub git_merge_base {
+ my ($rev1, $rev2) = @_;
+
+ my $mb = open_pipe("git-merge-base", $rev1, $rev2)
+ or die "Failed to open git-merge-base: $!";
+
+ my $base = <$mb>;
+ chomp $base;
+
+ close($mb);
+
+ return $base;
+}
+
+# Construct a set of pseudo parents that are in the same order,
+# and the same quantity as the real parents,
+# but whose SHA1s are as similar to the logical parents
+# as possible.
+sub get_pseudo_parents {
+ my ($all, $fake) = @_;
+
+ my @all = @$all;
+ my @fake = @$fake;
+
+ my @pseudo;
+
+ my %fake = map {$_ => 1} @fake;
+ my %seenfake;
+
+ my $fakeidx = 0;
+ foreach my $p (@all) {
+ if (exists $fake{$p}) {
+ if ($fake[$fakeidx] ne $p) {
+ die sprintf("parent mismatch: %s != %s\nall:%s\nfake:%s\n",
+ $fake[$fakeidx], $p,
+ join(", ", @all),
+ join(", ", @fake),
+ );
+ }
+
+ push @pseudo, $p;
+ $fakeidx++;
+ $seenfake{$p}++;
+
+ } else {
+ my $base = git_merge_base($fake[$fakeidx], $p);
+ if ($base ne $fake[$fakeidx]) {
+ die sprintf("Result of merge-base doesn't match fake: %s,%s != %s\n",
+ $fake[$fakeidx], $p, $base);
+ }
+
+ # The details of how we parse the diffs
+ # mean that we cannot have a duplicate
+ # revision in the list, so if we've already
+ # seen the revision we would normally add, just use
+ # the actual revision.
+ if ($seenfake{$base}) {
+ push @pseudo, $p;
+ } else {
+ push @pseudo, $base;
+ $seenfake{$base}++;
+ }
+ }
+ }
+
+ return @pseudo;
+}
+
# Get a diff between the current revision and a parent.
# Record the commit information that results.
sub git_diff_parse {
- my ($parent, $rev, %revinfo) = @_;
+ my ($parents, $rev, %revinfo) = @_;
+
+ my @pseudo_parents;
+ my @command = ("git-diff-tree");
+ my $revision_spec;
+
+ if (scalar @$parents == 1) {
+
+ $revision_spec = join("..", $parents->[0], $rev);
+ @pseudo_parents = @$parents;
+ } else {
+ my @all_parents = git_find_all_parents($rev);
+
+ if (@all_parents != @$parents) {
+ @pseudo_parents = get_pseudo_parents(\@all_parents, $parents);
+ } else {
+ @pseudo_parents = @$parents;
+ }
+
+ $revision_spec = $rev;
+ push @command, "-c";
+ }
+
+ my @filenames = ( $revs{$rev}{'filename'} );
+
+ foreach my $parent (@$parents) {
+ push @filenames, $revs{$parent}{'filename'};
+ }
+
+ push @command, "-p", "-M", $revision_spec, "--", @filenames;
+
- my $diff = open_pipe("git-diff-tree","-M","-p",$rev,$parent,"--",
- $revs{$rev}{'filename'}, $revs{$parent}{'filename'})
+ my $diff = open_pipe( @command )
or die "Failed to call git-diff for annotation: $!";
- _git_diff_parse($diff, $parent, $rev, %revinfo);
+ _git_diff_parse($diff, \@pseudo_parents, $rev, %revinfo);
close($diff);
}
sub _git_diff_parse {
- my ($diff, $parent, $rev, %revinfo) = @_;
+ my ($diff, $parents, $rev, %revinfo) = @_;
+
+ my $ri = 0;
- my ($ri, $pi) = (0,0);
my $slines = $revs{$rev}{'lines'};
- my @plines;
+ my (%plines, %pi);
my $gotheader = 0;
my ($remstart);
- my ($hunk_start, $hunk_index);
+ my $parent_count = @$parents;
+
+ my $diff_header_regexp = "^@";
+ $diff_header_regexp .= "@" x @$parents;
+ $diff_header_regexp .= ' -\d+,\d+' x @$parents;
+ $diff_header_regexp .= ' \+(\d+),\d+';
+ $diff_header_regexp .= " " . ("@" x @$parents);
+
+ my %claim_regexps;
+ my $allparentplus = '^' . '\\+' x @$parents . '(.*)$';
+
+ {
+ my $i = 0;
+ foreach my $parent (@$parents) {
+
+ $pi{$parent} = 0;
+ my $r = '^' . '.' x @$parents . '(.*)$';
+ my $p = $r;
+ substr($p,$i+1, 1) = '\\+';
+
+ my $m = $r;
+ substr($m,$i+1, 1) = '-';
+
+ $claim_regexps{$parent}{plus} = $p;
+ $claim_regexps{$parent}{minus} = $m;
+
+ $plines{$parent} = [];
+
+ $i++;
+ }
+ }
+
+ DIFF:
while(<$diff>) {
chomp;
- if (m/^@@ -(\d+),(\d+) \+(\d+),(\d+)/) {
- $remstart = $1;
- # Adjust for 0-based arrays
- $remstart--;
- # Reinit hunk tracking.
- $hunk_start = $remstart;
- $hunk_index = 0;
+ #printf("%d:%s:\n", $gotheader, $_);
+ if (m/$diff_header_regexp/) {
+ $remstart = $1 - 1;
+ # (0-based arrays)
+
$gotheader = 1;
- for (my $i = $ri; $i < $remstart; $i++) {
- $plines[$pi++] = $slines->[$i];
- $ri++;
+ foreach my $parent (@$parents) {
+ for (my $i = $ri; $i < $remstart; $i++) {
+ $plines{$parent}[$pi{$parent}++] = $slines->[$i];
+ }
}
- next;
- } elsif (!$gotheader) {
- next;
- }
+ $ri = $remstart;
- if (m/^\+(.*)$/) {
- my $line = $1;
- $plines[$pi++] = [ $line, '', '', '', 0 ];
- next;
+ next DIFF;
- } elsif (m/^-(.*)$/) {
- my $line = $1;
- if (get_line($slines, $ri) eq $line) {
- # Found a match, claim
- claim_line($ri, $rev, $slines, %revinfo);
- } else {
- die sprintf("Sync error: %d/%d\n|%s\n|%s\n%s => %s\n",
- $ri, $hunk_start + $hunk_index,
- $line,
- get_line($slines, $ri),
- $rev, $parent);
- }
- $ri++;
+ } elsif (!$gotheader) {
+ # Skip over the leadin.
+ next DIFF;
+ }
- } elsif (m/^\\/) {
+ if (m/^\\/) {
;
# Skip \No newline at end of file.
# But this can be internationalized, so only look
# for an initial \
} else {
- if (substr($_,1) ne get_line($slines,$ri) ) {
- die sprintf("Line %d (%d) does not match:\n|%s\n|%s\n%s => %s\n",
- $hunk_start + $hunk_index, $ri,
- substr($_,1),
- get_line($slines,$ri),
- $rev, $parent);
+ my %claims = ();
+ my $negclaim = 0;
+ my $allclaimed = 0;
+ my $line;
+
+ if (m/$allparentplus/) {
+ claim_line($ri, $rev, $slines, %revinfo);
+ $allclaimed = 1;
+
+ }
+
+ PARENT:
+ foreach my $parent (keys %claim_regexps) {
+ my $m = $claim_regexps{$parent}{minus};
+ my $p = $claim_regexps{$parent}{plus};
+
+ if (m/$m/) {
+ $line = $1;
+ $plines{$parent}[$pi{$parent}++] = [ $line, '', '', '', 0 ];
+ $negclaim++;
+
+ } elsif (m/$p/) {
+ $line = $1;
+ if (get_line($slines, $ri) eq $line) {
+ # Found a match, claim
+ $claims{$parent}++;
+
+ } else {
+ die sprintf("Sync error: %d\n|%s\n|%s\n%s => %s\n",
+ $ri, $line,
+ get_line($slines, $ri),
+ $rev, $parent);
+ }
+ }
+ }
+
+ if (%claims) {
+ foreach my $parent (@$parents) {
+ next if $claims{$parent} || $allclaimed;
+ $plines{$parent}[$pi{$parent}++] = $slines->[$ri];
+ #[ $line, '', '', '', 0 ];
+ }
+ $ri++;
+
+ } elsif ($negclaim) {
+ next DIFF;
+
+ } else {
+ if (substr($_,scalar @$parents) ne get_line($slines,$ri) ) {
+ foreach my $parent (@$parents) {
+ printf("parent %s is on line %d\n", $parent, $pi{$parent});
+ }
+
+ my @context;
+ for (my $i = -2; $i < 2; $i++) {
+ push @context, get_line($slines, $ri + $i);
+ }
+ my $context = join("\n", @context);
+
+ my $justline = substr($_, scalar @$parents);
+ die sprintf("Line %d, does not match:\n|%s|\n|%s|\n%s\n",
+ $ri,
+ $justline,
+ $context);
+ }
+ foreach my $parent (@$parents) {
+ $plines{$parent}[$pi{$parent}++] = $slines->[$ri];
+ }
+ $ri++;
}
- $plines[$pi++] = $slines->[$ri++];
}
- $hunk_index++;
}
+
for (my $i = $ri; $i < @{$slines} ; $i++) {
- push @plines, $slines->[$ri++];
+ foreach my $parent (@$parents) {
+ push @{$plines{$parent}}, $slines->[$ri];
+ }
+ $ri++;
+ }
+
+ foreach my $parent (@$parents) {
+ $revs{$parent}{lines} = $plines{$parent};
}
- $revs{$parent}{lines} = \@plines;
return;
}
# This is not so wrong. Depending on which base we picked,
# orig_tree may be wildly different from ours, but his_tree
# has the same set of wildly different changes in parts the
- # patch did not touch, so resolve ends up cancelling them,
+ # patch did not touch, so resolve ends up canceling them,
# saying that we reverted all those changes.
if git-merge-resolve $orig_tree -- HEAD $his_tree
Imports a project from one or more Arch repositories. It will follow branches
and repositories within the namespaces defined by the <archive/branch>
-parameters suppplied. If it cannot find the remote branch a merge comes from
+parameters supplied. If it cannot find the remote branch a merge comes from
it will just import it as a regular commit. If it can find it, it will mark it
as a merge whenever possible.
# $arch_branches:
# values associated with keys:
# =1 - Arch version / git 'branch' detected via abrowse on a limit
-# >1 - Arch version / git 'branch' of an auxilliary branch we've merged
+# >1 - Arch version / git 'branch' of an auxiliary branch we've merged
my %arch_branches = map { $_ => 1 } @ARGV;
$ENV{'TMPDIR'} = $opt_t if $opt_t; # $ENV{TMPDIR} will affect tempdir() calls:
if (`find $tmp/changeset/patches -type f -name '*.patch'`) {
# this can be sped up considerably by doing
# (find | xargs cat) | patch
- # but that cna get mucked up by patches
+ # but that can get mucked up by patches
# with missing trailing newlines or the standard
# 'missing newline' flag in the patch - possibly
# produced with an old/buggy diff.
}
-# an alterative to `command` that allows input to be passed as an array
+# an alternative to `command` that allows input to be passed as an array
# to work around shell problems with weird characters in arguments
sub safe_pipe_capture {
my @output;
. git-sh-setup
sq() {
- perl -e '
+ @@PERL@@ -e '
for (@ARGV) {
s/'\''/'\'\\\\\'\''/g;
print " '\''$_'\''";
set x "$arg" "$@"
shift
fi
+ case "$1" in
+ --)
+ shift ;;
+ esac
break
;;
esac
# Match the index to the working tree, and do a three-way.
git diff-files --name-only | git update-index --remove --stdin &&
work=`git write-tree` &&
- git read-tree --reset $new &&
- git checkout-index -f -u -q -a &&
+ git read-tree --reset -u $new &&
git read-tree -m -u --aggressive $old $new $work || exit
if result=`git write-tree 2>/dev/null`
*,--reference)
shift; reference="$1" ;;
*,--reference=*)
- reference=`expr "$1" : '--reference=\(.*\)'` ;;
+ reference=`expr "z$1" : 'z--reference=\(.*\)'` ;;
*,-o|*,--or|*,--ori|*,--orig|*,--origi|*,--origin)
case "$2" in
'')
[ -e "$dir" ] && echo "$dir already exists." && usage
mkdir -p "$dir" &&
D=$(cd "$dir" && pwd) &&
-trap 'err=$?; cd ..; rm -r "$D"; exit $err' 0
+trap 'err=$?; cd ..; rm -rf "$D"; exit $err' 0
case "$bare" in
yes)
GIT_DIR="$D" ;;
echo "$repo/objects" >> "$GIT_DIR/objects/info/alternates"
;;
esac
- git-ls-remote "$repo" >"$GIT_DIR/CLONE_HEAD"
+ git-ls-remote "$repo" >"$GIT_DIR/CLONE_HEAD" || exit 1
;;
*)
case "$repo" in
done
rm -f "$GIT_DIR/TMP_ALT"
fi
- git-ls-remote "$repo" >"$GIT_DIR/CLONE_HEAD"
+ git-ls-remote "$repo" >"$GIT_DIR/CLONE_HEAD" || exit 1
;;
- http://*)
+ https://*|http://*)
if test -z "@@NO_CURL@@"
then
clone_dumb_http "$repo" "$D"
if test -f "$GIT_DIR/CLONE_HEAD"
then
# Read git-fetch-pack -k output and store the remote branches.
- perl -e "$copy_refs" "$GIT_DIR" "$use_separate_remote" "$origin"
+ @@PERL@@ -e "$copy_refs" "$GIT_DIR" "$use_separate_remote" "$origin" ||
+ exit
fi
cd "$D" || exit
NEXT_INDEX="$GIT_DIR/next-index$$"
rm -f "$NEXT_INDEX"
save_index () {
- cp "$THIS_INDEX" "$NEXT_INDEX"
+ cp -p "$THIS_INDEX" "$NEXT_INDEX"
}
report () {
if test -z "$untracked_files"; then
option="--directory --no-empty-directory"
fi
+ hdr_shown=
if test -f "$GIT_DIR/info/exclude"
then
- git-ls-files -z --others $option \
+ git-ls-files --others $option \
--exclude-from="$GIT_DIR/info/exclude" \
--exclude-per-directory=.gitignore
else
- git-ls-files -z --others $option \
+ git-ls-files --others $option \
--exclude-per-directory=.gitignore
fi |
- perl -e '$/ = "\0";
- my $shown = 0;
- while (<>) {
- chomp;
- s|\\|\\\\|g;
- s|\t|\\t|g;
- s|\n|\\n|g;
- s/^/# /;
- if (!$shown) {
- print "#\n# Untracked files:\n";
- print "# (use \"git add\" to add to commit)\n";
- print "#\n";
- $shown = 1;
- }
- print "$_\n";
- }
- '
+ while read line; do
+ if [ -z "$hdr_shown" ]; then
+ echo '#'
+ echo '# Untracked files:'
+ echo '# (use "git add" to add to commit)'
+ echo '#'
+ hdr_shown=1
+ fi
+ echo "# $line"
+ done
if test -n "$verbose" -a -z "$IS_INITIAL"
then
-F*|-f*)
no_edit=t
log_given=t$log_given
- logfile=`expr "$1" : '-[Ff]\(.*\)'`
+ logfile=`expr "z$1" : 'z-[Ff]\(.*\)'`
shift
;;
--F=*|--f=*|--fi=*|--fil=*|--file=*)
no_edit=t
log_given=t$log_given
- logfile=`expr "$1" : '-[^=]*=\(.*\)'`
+ logfile=`expr "z$1" : 'z-[^=]*=\(.*\)'`
shift
;;
-a|--a|--al|--all)
shift
;;
--au=*|--aut=*|--auth=*|--autho=*|--author=*)
- force_author=`expr "$1" : '-[^=]*=\(.*\)'`
+ force_author=`expr "z$1" : 'z-[^=]*=\(.*\)'`
shift
;;
--au|--aut|--auth|--autho|--author)
log_given=m$log_given
if test "$log_message" = ''
then
- log_message=`expr "$1" : '-m\(.*\)'`
+ log_message=`expr "z$1" : 'z-m\(.*\)'`
else
log_message="$log_message
-`expr "$1" : '-m\(.*\)'`"
+`expr "z$1" : 'z-m\(.*\)'`"
fi
no_edit=t
shift
log_given=m$log_given
if test "$log_message" = ''
then
- log_message=`expr "$1" : '-[^=]*=\(.*\)'`
+ log_message=`expr "z$1" : 'z-[^=]*=\(.*\)'`
else
log_message="$log_message
-`expr "$1" : '-[^=]*=\(.*\)'`"
+`expr "z$1" : 'zq-[^=]*=\(.*\)'`"
fi
no_edit=t
shift
--reedit-me=*|--reedit-mes=*|--reedit-mess=*|--reedit-messa=*|\
--reedit-messag=*|--reedit-message=*)
log_given=t$log_given
- use_commit=`expr "$1" : '-[^=]*=\(.*\)'`
+ use_commit=`expr "z$1" : 'z-[^=]*=\(.*\)'`
no_edit=
shift
;;
--reuse-mes=*|--reuse-mess=*|--reuse-messa=*|--reuse-messag=*|\
--reuse-message=*)
log_given=t$log_given
- use_commit=`expr "$1" : '-[^=]*=\(.*\)'`
+ use_commit=`expr "z$1" : 'z-[^=]*=\(.*\)'`
no_edit=t
shift
;;
GIT_AUTHOR_EMAIL=`expr "z$force_author" : '.*\(<.*\)'` &&
test '' != "$GIT_AUTHOR_NAME" &&
test '' != "$GIT_AUTHOR_EMAIL" ||
- die "malformatted --author parameter"
+ die "malformed --author parameter"
export GIT_AUTHOR_NAME GIT_AUTHOR_EMAIL
elif test '' != "$use_commit"
then
PARENTS="-p HEAD"
if test -z "$initial_commit"
then
+ rloga='commit'
if [ -f "$GIT_DIR/MERGE_HEAD" ]; then
+ rloga='commit (merge)'
PARENTS="-p HEAD "`sed -e 's/^/-p /' "$GIT_DIR/MERGE_HEAD"`
elif test -n "$amend"; then
+ rloga='commit (amend)'
PARENTS=$(git-cat-file commit HEAD |
sed -n -e '/^$/q' -e 's/^parent /-p /p')
fi
fi
PARENTS=""
current=
+ rloga='commit (initial)'
fi
if test -z "$no_edit"
fi &&
commit=$(cat "$GIT_DIR"/COMMIT_MSG | git-commit-tree $tree $PARENTS) &&
rlogm=$(sed -e 1q "$GIT_DIR"/COMMIT_MSG) &&
- git-update-ref -m "commit: $rlogm" HEAD $commit $current &&
+ git-update-ref -m "$rloga: $rlogm" HEAD $commit $current &&
rm -f -- "$GIT_DIR/MERGE_HEAD" &&
if test -f "$NEXT_INDEX"
then
ret = malloc(1);
if (!ret)
die("Out of memory, malloc failed");
+#ifdef XMALLOC_POISON
+ memset(ret, 0xA5, size);
+#endif
return ret;
}
}
}
+static inline int has_extension(const char *filename, const char *ext)
+{
+ size_t len = strlen(filename);
+ size_t extlen = strlen(ext);
+ return len > extlen && !memcmp(filename + len - extlen, ext, extlen);
+}
+
/* Sane ctype - no locale, and works with signed chars */
#undef isspace
#undef isdigit
die "GIT_DIR is not defined or is unreadable";
}
-our ($opt_h, $opt_p, $opt_v, $opt_c, $opt_f, $opt_m );
+our ($opt_h, $opt_p, $opt_v, $opt_c, $opt_f, $opt_a, $opt_m );
-getopts('hpvcfm:');
+getopts('hpvcfam:');
$opt_h && usage();
TMPDIR => 1,
CLEANUP => 1);
-print Dumper(@ARGV);
# resolve target commit
my $commit;
$commit = pop @ARGV;
# find parents from the commit itself
my @commit = safe_pipe_capture('git-cat-file', 'commit', $commit);
my @parents;
-foreach my $p (@commit) {
- if ($p =~ m/^$/) { # end of commit headers, we're done
- last;
+my $committer;
+my $author;
+my $stage = 'headers'; # headers, msg
+my $title;
+my $msg = '';
+
+foreach my $line (@commit) {
+ chomp $line;
+ if ($stage eq 'headers' && $line eq '') {
+ $stage = 'msg';
+ next;
}
- if ($p =~ m/^parent (\w{40})$/) { # found a parent
- push @parents, $1;
+
+ if ($stage eq 'headers') {
+ if ($line =~ m/^parent (\w{40})$/) { # found a parent
+ push @parents, $1;
+ } elsif ($line =~ m/^author (.+) \d+ \+\d+$/) {
+ $author = $1;
+ } elsif ($line =~ m/^committer (.+) \d+ \+\d+$/) {
+ $committer = $1;
+ }
+ } else {
+ $msg .= $line . "\n";
+ unless ($title) {
+ $title = $line;
+ }
}
}
if ($parent) {
+ my $found;
# double check that it's a valid parent
foreach my $p (@parents) {
- my $found;
if ($p eq $parent) {
$found = 1;
last;
}; # found it
- die "Did not find $parent in the parents for this commit!";
}
+ die "Did not find $parent in the parents for this commit!" if !$found;
} else { # we don't have a parent from the cmdline...
if (@parents == 1) { # it's safe to get it from the commit
$parent = $parents[0];
# grab the commit message
open(MSG, ">.msg") or die "Cannot open .msg for writing";
-print MSG $opt_m;
+if ($opt_m) {
+ print MSG $opt_m;
+}
+print MSG $msg;
+if ($opt_a) {
+ print MSG "\n\nAuthor: $author\n";
+ if ($author ne $committer) {
+ print MSG "Committer: $committer\n";
+ }
+}
close MSG;
-`git-cat-file commit $commit | sed -e '1,/^\$/d' >> .msg`;
-$? && die "Error extracting the commit message";
-
my (@afiles, @dfiles, @mfiles, @dirs);
my @files = safe_pipe_capture('git-diff-tree', '-r', $parent, $commit);
#print @files;
@bfiles = map { chomp } @bfiles;
foreach my $f (@bfiles) {
# check that the file in cvs matches the "old" file
- # extract the file to $tmpdir and comparre with cmp
+ # extract the file to $tmpdir and compare with cmp
my $tree = safe_pipe_capture('git-rev-parse', "$parent^{tree}");
chomp $tree;
my $blob = `git-ls-tree $tree "$f" | cut -f 1 | cut -d ' ' -f 3`;
}
print "Commit to CVS\n";
+print "Patch: $title\n";
my $commitfiles = join(' ', @afiles, @mfiles, @dfiles);
my $cmd = "cvs commit -F .msg $commitfiles";
}
}
-# An alterative to `command` that allows input to be passed as an array
+# An alternative to `command` that allows input to be passed as an array
# to work around shell problems with weird characters in arguments
# if the exec returns non-zero we die
sub safe_pipe_capture {
$orig_git_index = $ENV{GIT_INDEX_FILE} if exists $ENV{GIT_INDEX_FILE};
my %index; # holds filenames of one index per branch
-$index{$opt_o} = tmpnam();
-
-$ENV{GIT_INDEX_FILE} = $index{$opt_o};
-system("git-read-tree", $opt_o);
-die "read-tree failed: $?\n" if $?;
unless(-d $git_dir) {
system("git-init-db");
$orig_branch = $last_branch;
$tip_at_start = `git-rev-parse --verify HEAD`;
- # populate index
- unless ($index{$last_branch}) {
- $index{$last_branch} = tmpnam();
- }
- $ENV{GIT_INDEX_FILE} = $index{$last_branch};
- system('git-read-tree', $last_branch);
- die "read-tree failed: $?\n" if $?;
-
# Get the last import timestamps
opendir(D,"$git_dir/refs/heads");
while(defined(my $head = readdir(D))) {
$ignorebranch{'#CVSPS_NO_BRANCH'} = 1;
sub commit {
+ if ($branch eq $opt_o && !$index{branch} && !get_headref($branch, $git_dir)) {
+ # looks like an initial commit
+ # use the index primed by git-init-db
+ $ENV{GIT_INDEX_FILE} = '.git/index';
+ $index{$branch} = '.git/index';
+ } else {
+ # use an index per branch to speed up
+ # imports of projects with many branches
+ unless ($index{$branch}) {
+ $index{$branch} = tmpnam();
+ $ENV{GIT_INDEX_FILE} = $index{$branch};
+ if ($ancestor) {
+ system("git-read-tree", $ancestor);
+ } else {
+ system("git-read-tree", $branch);
+ }
+ die "read-tree failed: $?\n" if $?;
+ }
+ }
+ $ENV{GIT_INDEX_FILE} = $index{$branch};
+
update_index(@old, @new);
@old = @new = ();
my $tree = write_tree();
close(H)
or die "Could not write branch $branch: $!";
}
- if(($ancestor || $branch) ne $last_branch) {
- print "Switching from $last_branch to $branch\n" if $opt_v;
- unless ($index{$branch}) {
- $index{$branch} = tmpnam();
- $ENV{GIT_INDEX_FILE} = $index{$branch};
- system("git-read-tree", $branch);
- die "read-tree failed: $?\n" if $?;
- }
- # just in case
- $ENV{GIT_INDEX_FILE} = $index{$branch};
- if ($ancestor) {
- print "have ancestor $ancestor" if $opt_v;
- system("git-read-tree", $ancestor);
- die "read-tree failed: $?\n" if $?;
- }
- } else {
- # just in case
- unless ($index{$branch}) {
- $index{$branch} = tmpnam();
- $ENV{GIT_INDEX_FILE} = $index{$branch};
- system("git-read-tree", $branch);
- die "read-tree failed: $?\n" if $?;
- }
- }
$last_branch = $branch if $branch ne $last_branch;
$state = 9;
} elsif($state == 8) {
commit() if $branch and $state != 11;
foreach my $git_index (values %index) {
- unlink($git_index);
+ if ($git_index ne '.git/index') {
+ unlink($git_index);
+ }
}
if (defined $orig_git_index) {
# $state holds all the bits of information the clients sends us that could
# potentially be useful when it comes to actually _doing_ something.
-my $state = {};
+my $state = { prependdir => '' };
$log->info("--------------- STARTING -----------------");
my $TEMP_DIR = tempdir( CLEANUP => 1 );
{
my ( $cmd, $data ) = @_;
- # TODO : Not quite sure how Argument and Argumentx differ, but I assume
- # it's for multi-line arguments ... somehow ...
+ # Argumentx means: append to last Argument (with a newline in front)
$log->debug("$cmd : $data");
- push @{$state->{arguments}}, $data;
+ if ( $cmd eq 'Argumentx') {
+ ${$state->{arguments}}[$#{$state->{arguments}}] .= "\n" . $data;
+ } else {
+ push @{$state->{arguments}}, $data;
+ }
}
# expand-modules \n
#$log->debug("update state : " . Dumper($state));
- # foreach file specified on the commandline ...
+ # foreach file specified on the command line ...
foreach my $filename ( @{$state->{args}} )
{
$filename = filecleanup($filename);
my @committedfiles = ();
- # foreach file specified on the commandline ...
+ # foreach file specified on the command line ...
foreach my $filename ( @{$state->{args}} )
{
my $committedfile = $filename;
exit;
}
- open FILE, ">", "$ENV{GIT_DIR}refs/heads/$state->{module}";
- print FILE $commithash;
- close FILE;
+ print LOCKFILE $commithash;
$updater->update();
- # foreach file specified on the commandline ...
+ # foreach file specified on the command line ...
foreach my $filename ( @committedfiles )
{
$filename = filecleanup($filename);
}
close LOCKFILE;
- unlink($lockfile);
+ my $reffile = "$ENV{GIT_DIR}refs/heads/$state->{module}";
+ unlink($reffile);
+ rename($lockfile, $reffile);
chdir "/";
print "ok\n";
# if no files were specified, we need to work out what files we should be providing status on ...
argsfromdir($updater);
- # foreach file specified on the commandline ...
+ # foreach file specified on the command line ...
foreach my $filename ( @{$state->{args}} )
{
$filename = filecleanup($filename);
# if no files were specified, we need to work out what files we should be providing status on ...
argsfromdir($updater);
- # foreach file specified on the commandline ...
+ # foreach file specified on the command line ...
foreach my $filename ( @{$state->{args}} )
{
$filename = filecleanup($filename);
# if no files were specified, we need to work out what files we should be providing status on ...
argsfromdir($updater);
- # foreach file specified on the commandline ...
+ # foreach file specified on the command line ...
foreach my $filename ( @{$state->{args}} )
{
$filename = filecleanup($filename);
chdir $tmpdir;
- # foreach file specified on the commandline ...
+ # foreach file specified on the command line ...
foreach my $filename ( @{$state->{args}} )
{
$filename = filecleanup($filename);
# first lets get the commit list
$ENV{GIT_DIR} = $self->{git_path};
- # prepare database queries
- my $db_insert_rev = $self->{dbh}->prepare_cached("INSERT INTO revision (name, revision, filehash, commithash, modified, author, mode) VALUES (?,?,?,?,?,?,?)",{},1);
- my $db_insert_mergelog = $self->{dbh}->prepare_cached("INSERT INTO commitmsgs (key, value) VALUES (?,?)",{},1);
- my $db_delete_head = $self->{dbh}->prepare_cached("DELETE FROM head",{},1);
- my $db_insert_head = $self->{dbh}->prepare_cached("INSERT INTO head (name, revision, filehash, commithash, modified, author, mode) VALUES (?,?,?,?,?,?,?)",{},1);
-
my $commitinfo = `git-cat-file commit $self->{module} 2>&1`;
unless ( $commitinfo =~ /tree\s+[a-zA-Z0-9]{40}/ )
{
author => $commit->{author},
mode => $git_perms,
};
- $db_insert_rev->execute($4, $head->{$4}{revision}, $2, $commit->{hash}, $commit->{date}, $commit->{author}, $git_perms);
+ $self->insert_rev($4, $head->{$4}{revision}, $2, $commit->{hash}, $commit->{date}, $commit->{author}, $git_perms);
}
elsif ( $3 eq "M" )
{
author => $commit->{author},
mode => $git_perms,
};
- $db_insert_rev->execute($4, $head->{$4}{revision}, $2, $commit->{hash}, $commit->{date}, $commit->{author}, $git_perms);
+ $self->insert_rev($4, $head->{$4}{revision}, $2, $commit->{hash}, $commit->{date}, $commit->{author}, $git_perms);
}
elsif ( $3 eq "A" )
{
author => $commit->{author},
mode => $git_perms,
};
- $db_insert_rev->execute($4, $head->{$4}{revision}, $2, $commit->{hash}, $commit->{date}, $commit->{author}, $git_perms);
+ $self->insert_rev($4, $head->{$4}{revision}, $2, $commit->{hash}, $commit->{date}, $commit->{author}, $git_perms);
}
else
{
};
- $db_insert_rev->execute($git_filename, $newrevision, $git_hash, $commit->{hash}, $commit->{date}, $commit->{author}, $git_perms);
+ $self->insert_rev($git_filename, $newrevision, $git_hash, $commit->{hash}, $commit->{date}, $commit->{author}, $git_perms);
}
}
close FILELIST;
$head->{$file}{modified} = $commit->{date};
$head->{$file}{author} = $commit->{author};
- $db_insert_rev->execute($file, $head->{$file}{revision}, $head->{$file}{filehash}, $commit->{hash}, $commit->{date}, $commit->{author}, $head->{$file}{mode});
+ $self->insert_rev($file, $head->{$file}{revision}, $head->{$file}{filehash}, $commit->{hash}, $commit->{date}, $commit->{author}, $head->{$file}{mode});
}
}
# END : "Detect deleted files"
if (exists $commit->{mergemsg})
{
- $db_insert_mergelog->execute($commit->{hash}, $commit->{mergemsg});
+ $self->insert_mergelog($commit->{hash}, $commit->{mergemsg});
}
$lastpicked = $commit->{hash};
$self->_set_prop("last_commit", $commit->{hash});
}
- $db_delete_head->execute();
+ $self->delete_head();
foreach my $file ( keys %$head )
{
- $db_insert_head->execute(
+ $self->insert_head(
$file,
$head->{$file}{revision},
$head->{$file}{filehash},
$self->{dbh}->commit() or die "Failed to commit changes to SQLite";
}
+sub insert_rev
+{
+ my $self = shift;
+ my $name = shift;
+ my $revision = shift;
+ my $filehash = shift;
+ my $commithash = shift;
+ my $modified = shift;
+ my $author = shift;
+ my $mode = shift;
+
+ my $insert_rev = $self->{dbh}->prepare_cached("INSERT INTO revision (name, revision, filehash, commithash, modified, author, mode) VALUES (?,?,?,?,?,?,?)",{},1);
+ $insert_rev->execute($name, $revision, $filehash, $commithash, $modified, $author, $mode);
+}
+
+sub insert_mergelog
+{
+ my $self = shift;
+ my $key = shift;
+ my $value = shift;
+
+ my $insert_mergelog = $self->{dbh}->prepare_cached("INSERT INTO commitmsgs (key, value) VALUES (?,?)",{},1);
+ $insert_mergelog->execute($key, $value);
+}
+
+sub delete_head
+{
+ my $self = shift;
+
+ my $delete_head = $self->{dbh}->prepare_cached("DELETE FROM head",{},1);
+ $delete_head->execute();
+}
+
+sub insert_head
+{
+ my $self = shift;
+ my $name = shift;
+ my $revision = shift;
+ my $filehash = shift;
+ my $commithash = shift;
+ my $modified = shift;
+ my $author = shift;
+ my $mode = shift;
+
+ my $insert_head = $self->{dbh}->prepare_cached("INSERT INTO head (name, revision, filehash, commithash, modified, author, mode) VALUES (?,?,?,?,?,?,?)",{},1);
+ $insert_head->execute($name, $revision, $filehash, $commithash, $modified, $author, $mode);
+}
+
sub _headrev
{
my $self = shift;
'
IFS="$LF"
+rloga=fetch
no_tags=
tags=
append=
update_head_ok=
exec=
upload_pack=
+keep=--thin
while case "$#" in 0) break ;; esac
do
case "$1" in
-k|--k|--ke|--kee|--keep)
keep=--keep
;;
+ --reflog-action=*)
+ rloga=`expr "z$1" : 'z-[^=]*=\(.*\)'`
+ ;;
-*)
usage
;;
0)
test -f "$GIT_DIR/branches/origin" ||
test -f "$GIT_DIR/remotes/origin" ||
- die "Where do you want to fetch from today?"
+ git-repo-config --get remote.origin.url >/dev/null ||
+ die "Where do you want to fetch from today?"
set origin ;;
esac
rref=
rsync_slurped_objects=
+rloga="$rloga $remote_nick"
+test "$remote_nick" = "$remote" || rloga="$rloga $remote"
+
if test "" = "$append"
then
: >"$GIT_DIR/FETCH_HEAD"
then
if now_=$(cat "$GIT_DIR/$1") && test "$now_" = "$2"
then
- [ "$verbose" ] && echo >&2 "* $1: same as $3"
+ [ "$verbose" ] && echo >&2 "* $1: same as $3" ||:
else
echo >&2 "* $1: updating with $3"
+ git-update-ref -m "$rloga: updating tag" "$1" "$2"
fi
else
echo >&2 "* $1: storing $3"
+ git-update-ref -m "$rloga: storing tag" "$1" "$2"
fi
- git-update-ref "$1" "$2"
;;
refs/heads/* | refs/remotes/*)
*,$local)
echo >&2 "* $1: fast forward to $3"
echo >&2 " from $local to $2"
- git-update-ref "$1" "$2" "$local"
+ git-update-ref -m "$rloga: fast-forward" "$1" "$2" "$local"
;;
*)
false
case ",$force,$single_force," in
*,t,*)
echo >&2 " forcing update."
- git-update-ref "$1" "$2" "$local"
+ git-update-ref -m "$rloga: forced-update" "$1" "$2" "$local"
;;
*)
echo >&2 " not updating."
}
else
echo >&2 "* $1: storing $3"
- git-update-ref "$1" "$2"
+ git-update-ref -m "$rloga: storing head" "$1" "$2"
fi
;;
esac
if test "$tags"
then
taglist=`IFS=" " &&
- git-ls-remote $upload_pack --tags "$remote" |
+ (
+ git-ls-remote $upload_pack --tags "$remote" ||
+ echo fail ouch
+ ) |
while read sha1 name
do
+ case "$sha1" in
+ fail)
+ exit 1
+ esac
case "$name" in
*^*) continue ;;
esac
else
echo >&2 "warning: tag ${name} ignored"
fi
- done`
+ done` || exit
if test "$#" -gt 1
then
# remote URL plus explicit refspecs; we need to merge them.
head="ref: $remote_name"
while (expr "z$head" : "zref:" && expr $depth \< $max_depth) >/dev/null
do
- remote_name_quoted=$(perl -e '
+ remote_name_quoted=$(@@PERL@@ -e '
my $u = $ARGV[0];
$u =~ s/^ref:\s*//;
$u =~ s{([^-a-zA-Z0-9/.])}{sprintf"%%%02x",ord($1)}eg;
( : subshell because we muck with IFS
IFS=" $LF"
(
- git-fetch-pack $exec $keep --thin "$remote" $rref || echo failed "$remote"
+ git-fetch-pack $exec $keep "$remote" $rref || echo failed "$remote"
) |
while read sha1 remote_name
do
curr_head=$(git-rev-parse --verify HEAD 2>/dev/null)
if test "$curr_head" != "$orig_head"
then
- git-update-ref HEAD "$orig_head"
+ git-update-ref \
+ -m "$rloga: Undoing incorrectly fetched HEAD." \
+ HEAD "$orig_head"
die "Cannot fetch into the current branch."
fi
;;
+++ /dev/null
-#!/usr/bin/perl -w
-#
-# Copyright (c) 2005 Junio C Hamano
-#
-# Read .git/FETCH_HEAD and make a human readable merge message
-# by grouping branches and tags together to form a single line.
-
-use strict;
-
-my @src;
-my %src;
-sub andjoin {
- my ($label, $labels, $stuff) = @_;
- my $l = scalar @$stuff;
- my $m = '';
- if ($l == 0) {
- return ();
- }
- if ($l == 1) {
- $m = "$label$stuff->[0]";
- }
- else {
- $m = ("$labels" .
- join (', ', @{$stuff}[0..$l-2]) .
- " and $stuff->[-1]");
- }
- return ($m);
-}
-
-sub repoconfig {
- my ($val) = qx{git-repo-config --get merge.summary};
- return $val;
-}
-
-sub current_branch {
- my ($bra) = qx{git-symbolic-ref HEAD};
- chomp($bra);
- $bra =~ s|^refs/heads/||;
- if ($bra ne 'master') {
- $bra = " into $bra";
- } else {
- $bra = "";
- }
- return $bra;
-}
-
-sub shortlog {
- my ($tip) = @_;
- my @result;
- foreach ( qx{git-log --no-merges --topo-order --pretty=oneline $tip ^HEAD} ) {
- s/^[0-9a-f]{40}\s+//;
- push @result, $_;
- }
- die "git-log failed\n" if $?;
- return @result;
-}
-
-my @origin = ();
-while (<>) {
- my ($bname, $tname, $gname, $src, $sha1, $origin);
- chomp;
- s/^([0-9a-f]*) //;
- $sha1 = $1;
- next if (/^not-for-merge/);
- s/^ //;
- if (s/ of (.*)$//) {
- $src = $1;
- } else {
- # Pulling HEAD
- $src = $_;
- $_ = 'HEAD';
- }
- if (! exists $src{$src}) {
- push @src, $src;
- $src{$src} = {
- BRANCH => [],
- TAG => [],
- R_BRANCH => [],
- GENERIC => [],
- # &1 == has HEAD.
- # &2 == has others.
- HEAD_STATUS => 0,
- };
- }
- if (/^branch (.*)$/) {
- $origin = $1;
- push @{$src{$src}{BRANCH}}, $1;
- $src{$src}{HEAD_STATUS} |= 2;
- }
- elsif (/^tag (.*)$/) {
- $origin = $_;
- push @{$src{$src}{TAG}}, $1;
- $src{$src}{HEAD_STATUS} |= 2;
- }
- elsif (/^remote branch (.*)$/) {
- $origin = $1;
- push @{$src{$src}{R_BRANCH}}, $1;
- $src{$src}{HEAD_STATUS} |= 2;
- }
- elsif (/^HEAD$/) {
- $origin = $src;
- $src{$src}{HEAD_STATUS} |= 1;
- }
- else {
- push @{$src{$src}{GENERIC}}, $_;
- $src{$src}{HEAD_STATUS} |= 2;
- $origin = $src;
- }
- if ($src eq '.' || $src eq $origin) {
- $origin =~ s/^'(.*)'$/$1/;
- push @origin, [$sha1, "$origin"];
- }
- else {
- push @origin, [$sha1, "$origin of $src"];
- }
-}
-
-my @msg;
-for my $src (@src) {
- if ($src{$src}{HEAD_STATUS} == 1) {
- # Only HEAD is fetched, nothing else.
- push @msg, $src;
- next;
- }
- my @this;
- if ($src{$src}{HEAD_STATUS} == 3) {
- # HEAD is fetched among others.
- push @this, andjoin('', '', ['HEAD']);
- }
- push @this, andjoin("branch ", "branches ",
- $src{$src}{BRANCH});
- push @this, andjoin("remote branch ", "remote branches ",
- $src{$src}{R_BRANCH});
- push @this, andjoin("tag ", "tags ",
- $src{$src}{TAG});
- push @this, andjoin("commit ", "commits ",
- $src{$src}{GENERIC});
- my $this = join(', ', @this);
- if ($src ne '.') {
- $this .= " of $src";
- }
- push @msg, $this;
-}
-
-my $into = current_branch();
-
-print "Merge ", join("; ", @msg), $into, "\n";
-
-if (!repoconfig) {
- exit(0);
-}
-
-# We limit the merge message to the latst 20 or so per each branch.
-my $limit = 20;
-
-for (@origin) {
- my ($sha1, $name) = @$_;
- my @log = shortlog($sha1);
- if ($limit + 1 <= @log) {
- print "\n* $name: (" . scalar(@log) . " commits)\n";
- }
- else {
- print "\n* $name:\n";
- }
- my $cnt = 0;
- for my $log (@log) {
- if ($limit < ++$cnt) {
- print " ...\n";
- last;
- }
- print " $log";
- }
-}
--- /dev/null
+#!/bin/sh
+#
+# Copyright (c) 2006 Eric Wong
+#
+USAGE='[--start] [--stop] [--restart]
+ [--local] [--httpd=<httpd>] [--port=<port>] [--browser=<browser>]
+ [--module-path=<path> (for Apache2 only)]'
+
+. git-sh-setup
+
+case "$GIT_DIR" in
+/*)
+ fqgitdir="$GIT_DIR" ;;
+*)
+ fqgitdir="$PWD/$GIT_DIR" ;;
+esac
+
+local="`git repo-config --bool --get instaweb.local`"
+httpd="`git repo-config --get instaweb.httpd`"
+browser="`git repo-config --get instaweb.browser`"
+port=`git repo-config --get instaweb.port`
+module_path="`git repo-config --get instaweb.modulepath`"
+
+conf=$GIT_DIR/gitweb/httpd.conf
+
+# Defaults:
+
+# if installed, it doesn't need further configuration (module_path)
+test -z "$httpd" && httpd='lighttpd -f'
+
+# probably the most popular browser among gitweb users
+test -z "$browser" && browser='firefox'
+
+# any untaken local port will do...
+test -z "$port" && port=1234
+
+start_httpd () {
+ httpd_only="`echo $httpd | cut -f1 -d' '`"
+ if test "`expr index $httpd_only /`" -eq '1' || \
+ which $httpd_only >/dev/null
+ then
+ $httpd $fqgitdir/gitweb/httpd.conf
+ else
+ # many httpds are installed in /usr/sbin or /usr/local/sbin
+ # these days and those are not in most users $PATHs
+ for i in /usr/local/sbin /usr/sbin
+ do
+ if test -x "$i/$httpd_only"
+ then
+ # don't quote $httpd, there can be
+ # arguments to it (-f)
+ $i/$httpd "$fqgitdir/gitweb/httpd.conf"
+ return
+ fi
+ done
+ fi
+ if test $? != 0; then
+ echo "Could not execute http daemon $httpd."
+ exit 1
+ fi
+}
+
+stop_httpd () {
+ test -f "$fqgitdir/pid" && kill `cat "$fqgitdir/pid"`
+}
+
+while case "$#" in 0) break ;; esac
+do
+ case "$1" in
+ --stop|stop)
+ stop_httpd
+ exit 0
+ ;;
+ --start|start)
+ start_httpd
+ exit 0
+ ;;
+ --restart|restart)
+ stop_httpd
+ start_httpd
+ exit 0
+ ;;
+ --local|-l)
+ local=true
+ ;;
+ -d|--httpd|--httpd=*)
+ case "$#,$1" in
+ *,*=*)
+ httpd=`expr "$1" : '-[^=]*=\(.*\)'` ;;
+ 1,*)
+ usage ;;
+ *)
+ httpd="$2"
+ shift ;;
+ esac
+ ;;
+ -b|--browser|--browser=*)
+ case "$#,$1" in
+ *,*=*)
+ browser=`expr "$1" : '-[^=]*=\(.*\)'` ;;
+ 1,*)
+ usage ;;
+ *)
+ browser="$2"
+ shift ;;
+ esac
+ ;;
+ -p|--port|--port=*)
+ case "$#,$1" in
+ *,*=*)
+ port=`expr "$1" : '-[^=]*=\(.*\)'` ;;
+ 1,*)
+ usage ;;
+ *)
+ port="$2"
+ shift ;;
+ esac
+ ;;
+ -m|--module-path=*|--module-path)
+ case "$#,$1" in
+ *,*=*)
+ module_path=`expr "$1" : '-[^=]*=\(.*\)'` ;;
+ 1,*)
+ usage ;;
+ *)
+ module_path="$2"
+ shift ;;
+ esac
+ ;;
+ *)
+ usage
+ ;;
+ esac
+ shift
+done
+
+mkdir -p "$GIT_DIR/gitweb/tmp"
+GIT_EXEC_PATH="`git --exec-path`"
+GIT_DIR="$fqgitdir"
+export GIT_EXEC_PATH GIT_DIR
+
+
+lighttpd_conf () {
+ cat > "$conf" <<EOF
+server.document-root = "$fqgitdir/gitweb"
+server.port = $port
+server.modules = ( "mod_cgi" )
+server.indexfiles = ( "gitweb.cgi" )
+server.pid-file = "$fqgitdir/pid"
+cgi.assign = ( ".cgi" => "" )
+mimetype.assign = ( ".css" => "text/css" )
+EOF
+ test "$local" = true && echo 'server.bind = "127.0.0.1"' >> "$conf"
+}
+
+apache2_conf () {
+ test -z "$module_path" && module_path=/usr/lib/apache2/modules
+ mkdir -p "$GIT_DIR/gitweb/logs"
+ bind=
+ test "$local" = true && bind='127.0.0.1:'
+ echo 'text/css css' > $fqgitdir/mime.types
+ cat > "$conf" <<EOF
+ServerRoot "$fqgitdir/gitweb"
+DocumentRoot "$fqgitdir/gitweb"
+PidFile "$fqgitdir/pid"
+Listen $bind$port
+TypesConfig $fqgitdir/mime.types
+DirectoryIndex gitweb.cgi
+EOF
+
+ # check to see if Dennis Stosberg's mod_perl compatibility patch
+ # (<20060621130708.Gcbc6e5c@leonov.stosberg.net>) has been applied
+ if test -f "$module_path/mod_perl.so" && grep '^our $gitbin' \
+ "$GIT_DIR/gitweb/gitweb.cgi" >/dev/null
+ then
+ # favor mod_perl if available
+ cat >> "$conf" <<EOF
+LoadModule perl_module $module_path/mod_perl.so
+PerlPassEnv GIT_DIR
+PerlPassEnv GIT_EXEC_DIR
+<Location /gitweb.cgi>
+ SetHandler perl-script
+ PerlResponseHandler ModPerl::Registry
+ PerlOptions +ParseHeaders
+ Options +ExecCGI
+</Location>
+EOF
+ else
+ # plain-old CGI
+ list_mods=`echo "$httpd" | sed "s/-f$/-l/"`
+ $list_mods | grep 'mod_cgi\.c' >/dev/null 2>&1 || \
+ echo "LoadModule cgi_module $module_path/mod_cgi.so" >> "$conf"
+ cat >> "$conf" <<EOF
+AddHandler cgi-script .cgi
+<Location /gitweb.cgi>
+ Options +ExecCGI
+</Location>
+EOF
+ fi
+}
+
+script='
+s#^\(my\|our\) $projectroot =.*#\1 $projectroot = "'`dirname $fqgitdir`'";#
+s#\(my\|our\) $gitbin =.*#\1 $gitbin = "'$GIT_EXEC_PATH'";#
+s#\(my\|our\) $projects_list =.*#\1 $projects_list = $projectroot;#
+s#\(my\|our\) $git_temp =.*#\1 $git_temp = "'$fqgitdir/gitweb/tmp'";#'
+
+gitweb_cgi () {
+ cat > "$1.tmp" <<\EOFGITWEB
+@@GITWEB_CGI@@
+EOFGITWEB
+ sed "$script" "$1.tmp" > "$1"
+ chmod +x "$1"
+ rm -f "$1.tmp"
+}
+
+gitweb_css () {
+ cat > "$1" <<\EOFGITWEB
+@@GITWEB_CSS@@
+EOFGITWEB
+}
+
+gitweb_cgi $GIT_DIR/gitweb/gitweb.cgi
+gitweb_css $GIT_DIR/gitweb/gitweb.css
+
+case "$httpd" in
+*lighttpd*)
+ lighttpd_conf
+ ;;
+*apache2*)
+ apache2_conf
+ ;;
+*)
+ echo "Unknown httpd specified: $httpd"
+ exit 1
+ ;;
+esac
+
+start_httpd
+test -z "$browser" && browser=echo
+url=http://127.0.0.1:$port
+$browser $url || echo $url
laf="$GIT_DIR/lost-found"
rm -fr "$laf" && mkdir -p "$laf/commit" "$laf/other" || exit
-git fsck-objects |
+git fsck-objects --full |
while read dangling type sha1
do
case "$dangling" in
# $2 - file in branch1 SHA1 (or empty)
# $3 - file in branch2 SHA1 (or empty)
# $4 - pathname in repository
-# $5 - orignal file mode (or empty)
+# $5 - original file mode (or empty)
# $6 - file in branch1 mode (or empty)
# $7 - file in branch2 mode (or empty)
#
def merge(h1, h2, branch1Name, branch2Name, graph, callDepth=0, ancestor=None):
'''Merge the commits h1 and h2, return the resulting virtual
- commit object and a flag indicating the cleaness of the merge.'''
+ commit object and a flag indicating the cleanness of the merge.'''
assert(isinstance(h1, Commit) and isinstance(h2, Commit))
global outputIndent
}
finish () {
- test '' = "$2" || echo "$2"
+ if test '' = "$2"
+ then
+ rlogm="$rloga"
+ else
+ echo "$2"
+ rlogm="$rloga: $2"
+ fi
case "$squash" in
t)
echo "Squash commit -- not updating HEAD"
echo "No merge message -- not updating HEAD"
;;
*)
- git-update-ref HEAD "$1" "$head" || exit 1
+ git-update-ref -m "$rlogm" HEAD "$1" "$head" || exit 1
;;
esac
;;
esac
}
+rloga=
while case "$#" in 0) break ;; esac
do
case "$1" in
-s|--s|--st|--str|--stra|--strat|--strate|--strateg|--strategy)
case "$#,$1" in
*,*=*)
- strategy=`expr "$1" : '-[^=]*=\(.*\)'` ;;
+ strategy=`expr "z$1" : 'z-[^=]*=\(.*\)'` ;;
1,*)
usage ;;
*)
die "available strategies are: $all_strategies" ;;
esac
;;
+ --reflog-action=*)
+ rloga=`expr "z$1" : 'z-[^=]*=\(.*\)'`
+ ;;
-*) usage ;;
*) break ;;
esac
# All the rest are remote heads
test "$#" = 0 && usage ;# we need at least one remote head.
+test "$rloga" = '' && rloga="merge: $@"
remoteheads=
for remote
then
parents=$(git-show-branch --independent "$head" "$@" | sed -e 's/^/-p /')
result_commit=$(echo "$merge_msg" | git-commit-tree $result_tree $parents) || exit
- finish "$result_commit" "Merge $result_commit, made by $wt_strategy."
+ finish "$result_commit" "Merge made by $wt_strategy."
dropsave
exit 0
fi
+++ /dev/null
-#!/usr/bin/perl
-#
-# Copyright 2005, Ryan Anderson <ryan@michonline.com>
-# Josef Weidendorfer <Josef.Weidendorfer@gmx.de>
-#
-# This file is licensed under the GPL v2, or a later version
-# at the discretion of Linus Torvalds.
-
-
-use warnings;
-use strict;
-use Getopt::Std;
-
-sub usage() {
- print <<EOT;
-$0 [-f] [-n] <source> <destination>
-$0 [-f] [-n] [-k] <source> ... <destination directory>
-EOT
- exit(1);
-}
-
-our ($opt_n, $opt_f, $opt_h, $opt_k, $opt_v);
-getopts("hnfkv") || usage;
-usage() if $opt_h;
-@ARGV >= 1 or usage;
-
-my $GIT_DIR = `git rev-parse --git-dir`;
-exit 1 if $?; # rev-parse would have given "not a git dir" message.
-chomp($GIT_DIR);
-
-my (@srcArgs, @dstArgs, @srcs, @dsts);
-my ($src, $dst, $base, $dstDir);
-
-# remove any trailing slash in arguments
-for (@ARGV) { s/\/*$//; }
-
-my $argCount = scalar @ARGV;
-if (-d $ARGV[$argCount-1]) {
- $dstDir = $ARGV[$argCount-1];
- @srcArgs = @ARGV[0..$argCount-2];
-
- foreach $src (@srcArgs) {
- $base = $src;
- $base =~ s/^.*\///;
- $dst = "$dstDir/". $base;
- push @dstArgs, $dst;
- }
-}
-else {
- if ($argCount < 2) {
- print "Error: need at least two arguments\n";
- exit(1);
- }
- if ($argCount > 2) {
- print "Error: moving to directory '"
- . $ARGV[$argCount-1]
- . "' not possible; not existing\n";
- exit(1);
- }
- @srcArgs = ($ARGV[0]);
- @dstArgs = ($ARGV[1]);
- $dstDir = "";
-}
-
-my $subdir_prefix = `git rev-parse --show-prefix`;
-chomp($subdir_prefix);
-
-# run in git base directory, so that git-ls-files lists all revisioned files
-chdir "$GIT_DIR/..";
-
-# normalize paths, needed to compare against versioned files and update-index
-# also, this is nicer to end-users by doing ".//a/./b/.//./c" ==> "a/b/c"
-for (@srcArgs, @dstArgs) {
- # prepend git prefix as we run from base directory
- $_ = $subdir_prefix.$_;
- s|^\./||;
- s|/\./|/| while (m|/\./|);
- s|//+|/|g;
- # Also "a/b/../c" ==> "a/c"
- 1 while (s,(^|/)[^/]+/\.\./,$1,);
-}
-
-my (@allfiles,@srcfiles,@dstfiles);
-my $safesrc;
-my (%overwritten, %srcForDst);
-
-$/ = "\0";
-open(F, 'git-ls-files -z |')
- or die "Failed to open pipe from git-ls-files: " . $!;
-
-@allfiles = map { chomp; $_; } <F>;
-close(F);
-
-
-my ($i, $bad);
-while(scalar @srcArgs > 0) {
- $src = shift @srcArgs;
- $dst = shift @dstArgs;
- $bad = "";
-
- for ($src, $dst) {
- # Be nicer to end-users by doing ".//a/./b/.//./c" ==> "a/b/c"
- s|^\./||;
- s|/\./|/| while (m|/\./|);
- s|//+|/|g;
- # Also "a/b/../c" ==> "a/c"
- 1 while (s,(^|/)[^/]+/\.\./,$1,);
- }
-
- if ($opt_v) {
- print "Checking rename of '$src' to '$dst'\n";
- }
-
- unless (-f $src || -l $src || -d $src) {
- $bad = "bad source '$src'";
- }
-
- $safesrc = quotemeta($src);
- @srcfiles = grep /^$safesrc(\/|$)/, @allfiles;
-
- $overwritten{$dst} = 0;
- if (($bad eq "") && -e $dst) {
- $bad = "destination '$dst' already exists";
- if ($opt_f) {
- # only files can overwrite each other: check both source and destination
- if (-f $dst && (scalar @srcfiles == 1)) {
- print "Warning: $bad; will overwrite!\n";
- $bad = "";
- $overwritten{$dst} = 1;
- }
- else {
- $bad = "Can not overwrite '$src' with '$dst'";
- }
- }
- }
-
- if (($bad eq "") && ($dst =~ /^$safesrc\//)) {
- $bad = "can not move directory '$src' into itself";
- }
-
- if ($bad eq "") {
- if (scalar @srcfiles == 0) {
- $bad = "'$src' not under version control";
- }
- }
-
- if ($bad eq "") {
- if (defined $srcForDst{$dst}) {
- $bad = "can not move '$src' to '$dst'; already target of ";
- $bad .= "'".$srcForDst{$dst}."'";
- }
- else {
- $srcForDst{$dst} = $src;
- }
- }
-
- if ($bad ne "") {
- if ($opt_k) {
- print "Warning: $bad; skipping\n";
- next;
- }
- print "Error: $bad\n";
- exit(1);
- }
- push @srcs, $src;
- push @dsts, $dst;
-}
-
-# Final pass: rename/move
-my (@deletedfiles,@addedfiles,@changedfiles);
-$bad = "";
-while(scalar @srcs > 0) {
- $src = shift @srcs;
- $dst = shift @dsts;
-
- if ($opt_n || $opt_v) { print "Renaming $src to $dst\n"; }
- if (!$opt_n) {
- if (!rename($src,$dst)) {
- $bad = "renaming '$src' failed: $!";
- if ($opt_k) {
- print "Warning: skipped: $bad\n";
- $bad = "";
- next;
- }
- last;
- }
- }
-
- $safesrc = quotemeta($src);
- @srcfiles = grep /^$safesrc(\/|$)/, @allfiles;
- @dstfiles = @srcfiles;
- s/^$safesrc(\/|$)/$dst$1/ for @dstfiles;
-
- push @deletedfiles, @srcfiles;
- if (scalar @srcfiles == 1) {
- # $dst can be a directory with 1 file inside
- if ($overwritten{$dst} ==1) {
- push @changedfiles, $dstfiles[0];
-
- } else {
- push @addedfiles, $dstfiles[0];
- }
- }
- else {
- push @addedfiles, @dstfiles;
- }
-}
-
-if ($opt_n) {
- if (@changedfiles) {
- print "Changed : ". join(", ", @changedfiles) ."\n";
- }
- if (@addedfiles) {
- print "Adding : ". join(", ", @addedfiles) ."\n";
- }
- if (@deletedfiles) {
- print "Deleting : ". join(", ", @deletedfiles) ."\n";
- }
-}
-else {
- if (@changedfiles) {
- open(H, "| git-update-index -z --stdin")
- or die "git-update-index failed to update changed files with code $!\n";
- foreach my $fileName (@changedfiles) {
- print H "$fileName\0";
- }
- close(H);
- }
- if (@addedfiles) {
- open(H, "| git-update-index --add -z --stdin")
- or die "git-update-index failed to add new names with code $!\n";
- foreach my $fileName (@addedfiles) {
- print H "$fileName\0";
- }
- close(H);
- }
- if (@deletedfiles) {
- open(H, "| git-update-index --remove -z --stdin")
- or die "git-update-index failed to remove old names with code $!\n";
- foreach my $fileName (@deletedfiles) {
- print H "$fileName\0";
- }
- close(H);
- }
-}
-
-if ($bad ne "") {
- print "Error: $bad\n";
- exit(1);
-}
+++ /dev/null
-#!/bin/sh
-
-USAGE='[-n] [--] [<head>...]'
-. git-sh-setup
-
-dryrun=
-echo=
-while case "$#" in 0) break ;; esac
-do
- case "$1" in
- -n) dryrun=-n echo=echo ;;
- --) break ;;
- -*) usage ;;
- *) break ;;
- esac
- shift;
-done
-
-sync
-case "$#" in
-0) git-fsck-objects --full --cache --unreachable ;;
-*) git-fsck-objects --full --cache --unreachable $(git-rev-parse --all) "$@" ;;
-esac |
-
-sed -ne '/unreachable /{
- s/unreachable [^ ][^ ]* //
- s|\(..\)|\1/|p
-}' | {
- cd "$GIT_OBJECT_DIRECTORY" || exit
- xargs $echo rm -f
- rmdir 2>/dev/null [0-9a-f][0-9a-f]
-}
-
-git-prune-packed $dryrun
-
-if redundant=$(git-pack-redundant --all 2>/dev/null) && test "" != "$redundant"
-then
- if test "" = "$dryrun"
- then
- echo "$redundant" | xargs rm -f
- else
- echo rm -f "$redundant"
- fi
-fi
-s|--s|--st|--str|--stra|--strat|--strate|--strateg|--strategy)
case "$#,$1" in
*,*=*)
- strategy=`expr "$1" : '-[^=]*=\(.*\)'` ;;
+ strategy=`expr "z$1" : 'z-[^=]*=\(.*\)'` ;;
1,*)
usage ;;
*)
done
orig_head=$(git-rev-parse --verify HEAD) || die "Pulling into a black hole?"
-git-fetch --update-head-ok "$@" || exit 1
+git-fetch --update-head-ok --reflog-action=pull "$@" || exit 1
curr_head=$(git-rev-parse --verify HEAD)
if test "$curr_head" != "$orig_head"
esac
merge_name=$(git-fmt-merge-msg <"$GIT_DIR/FETCH_HEAD") || exit
-git-merge $no_summary $no_commit $squash $strategy_args \
+git-merge "--reflog-action=pull $*" \
+ $no_summary $no_commit $squash $strategy_args \
"$merge_name" HEAD $merge_head
+++ /dev/null
-#!/bin/sh
-
-USAGE='[--all] [--tags] [--force] <repository> [<refspec>...]'
-. git-sh-setup
-
-# Parse out parameters and then stop at remote, so that we can
-# translate it using .git/branches information
-has_all=
-has_force=
-has_exec=
-has_thin=--thin
-remote=
-do_tags=
-
-while case "$#" in 0) break ;; esac
-do
- case "$1" in
- --all)
- has_all=--all ;;
- --tags)
- do_tags=yes ;;
- --force)
- has_force=--force ;;
- --exec=*)
- has_exec="$1" ;;
- --thin)
- ;; # noop
- --no-thin)
- has_thin= ;;
- -*)
- usage ;;
- *)
- set x "$@"
- shift
- break ;;
- esac
- shift
-done
-case "$#" in
-0)
- echo "Where would you want to push today?"
- usage ;;
-esac
-
-. git-parse-remote
-remote=$(get_remote_url "$@")
-
-case "$has_all" in
---all)
- set x ;;
-'')
- case "$do_tags,$#" in
- yes,1)
- set x $(cd "$GIT_DIR/refs" && find tags -type f -print) ;;
- yes,*)
- set x $(cd "$GIT_DIR/refs" && find tags -type f -print) \
- $(get_remote_refs_for_push "$@") ;;
- ,*)
- set x $(get_remote_refs_for_push "$@") ;;
- esac
-esac
-
-shift ;# away the initial 'x'
-
-# $# is now 0 if there was no explicit refspec on the command line
-# and there was no defalt refspec to push from remotes/ file.
-# we will let git-send-pack to do its "matching refs" thing.
-
-case "$remote" in
-git://*)
- die "Cannot use READ-ONLY transport to push to $remote" ;;
-rsync://*)
- die "Pushing with rsync transport is deprecated" ;;
-esac
-
-set x "$remote" "$@"; shift
-test "$has_all" && set x "$has_all" "$@" && shift
-test "$has_force" && set x "$has_force" "$@" && shift
-test "$has_exec" && set x "$has_exec" "$@" && shift
-test "$has_thin" && set x "$has_thin" "$@" && shift
-
-case "$remote" in
-http://* | https://*)
- exec git-http-push "$@";;
-*)
- exec git-send-pack "$@";;
-esac
do
case "$1" in
--au=*|--aut=*|--auth=*|--autho=*|--author=*)
- quilt_author=$(expr "$1" : '-[^=]*\(.*\)')
+ quilt_author=$(expr "z$1" : 'z-[^=]*\(.*\)')
shift
;;
;;
--pa=*|--pat=*|--patc=*|--patch=*|--patche=*|--patches=*)
- QUILT_PATCHES=$(expr "$1" : '-[^=]*\(.*\)')
+ QUILT_PATCHES=$(expr "z$1" : 'z-[^=]*\(.*\)')
shift
;;
quilt_author_email=$(expr "z$quilt_author" : '.*<\([^>]*\)') &&
test '' != "$quilt_author_name" &&
test '' != "$quilt_author_email" ||
- die "malformatted --author parameter"
+ die "malformed --author parameter"
fi
# Quilt patch directory
git-apply --index -C1 "$tmp_patch" &&
tree=$(git-write-tree) &&
commit=$((echo "$SUBJECT"; echo; cat "$tmp_msg") | git-commit-tree $tree -p $commit) &&
- git-update-ref HEAD $commit || exit 4
+ git-update-ref -m "quiltimport: $patch_name" HEAD $commit || exit 4
fi
done
rm -rf $tmp_dir || exit 5
If you would prefer to skip this patch, instead run \"git rebase --skip\".
To restore the original branch and stop rebasing run \"git rebase --abort\".
"
-
-MRESOLVEMSG="
-When you have resolved this problem run \"git rebase --continue\".
-To restore the original branch and stop rebasing run \"git rebase --abort\".
-"
unset newbase
strategy=recursive
do_merge=
then
echo "You still have unmerged paths in your index"
echo "did you forget update-index?"
- die "$MRESOLVEMSG"
+ die "$RESOLVEMSG"
fi
if test -n "`git-diff-index HEAD`"
then
+ if ! git-commit -C "`cat $dotest/current`"
+ then
+ echo "Commit failed, please do not call \"git commit\""
+ echo "directly, but instead do one of the following: "
+ die "$RESOLVEMSG"
+ fi
printf "Committed: %0${prec}d" $msgnum
- git-commit -C "`cat $dotest/current`"
else
printf "Already applied: %0${prec}d" $msgnum
fi
;;
1)
test -d "$GIT_DIR/rr-cache" && git-rerere
- die "$MRESOLVEMSG"
+ die "$RESOLVEMSG"
;;
2)
echo "Strategy: $rv $strategy failed, try another" 1>&2
- die "$MRESOLVEMSG"
+ die "$RESOLVEMSG"
;;
*)
die "Unknown exit code ($rv) from command:" \
finish_rb_merge
exit
fi
- git am --resolved --3way --resolvemsg="$RESOLVEMSG"
+ git am --resolved --3way --resolvemsg="$RESOLVEMSG" \
+ --reflog-action=rebase
exit
;;
--skip)
finish_rb_merge
exit
fi
- git am -3 --skip --resolvemsg="$RESOLVEMSG"
+ git am -3 --skip --resolvemsg="$RESOLVEMSG" \
+ --reflog-action=rebase
exit
;;
--abort)
-s|--s|--st|--str|--stra|--strat|--strate|--strateg|--strategy)
case "$#,$1" in
*,*=*)
- strategy=`expr "$1" : '-[^=]*=\(.*\)'` ;;
+ strategy=`expr "z$1" : 'z-[^=]*=\(.*\)'` ;;
1,*)
usage ;;
*)
# Check if we are already based on $onto, but this should be
# done only when upstream and onto are the same.
-if test "$upstream" = "$onto"
+mb=$(git-merge-base "$onto" "$branch")
+if test "$upstream" = "$onto" && test "$mb" = "$onto"
then
- mb=$(git-merge-base "$onto" "$branch")
- if test "$mb" = "$onto"
- then
- echo >&2 "Current branch $branch_name is up to date."
- exit 0
- fi
+ echo >&2 "Current branch $branch_name is up to date."
+ exit 0
fi
# Rewind the head to "$onto"; this saves our current head in ORIG_HEAD.
# If the $onto is a proper descendant of the tip of the branch, then
# we just fast forwarded.
-if test "$mb" = "$onto"
+if test "$mb" = "$branch"
then
- echo >&2 "Fast-forwarded $branch to $newbase."
+ echo >&2 "Fast-forwarded $branch_name to $onto_name."
exit 0
fi
if test -z "$do_merge"
then
git-format-patch -k --stdout --full-index "$upstream"..ORIG_HEAD |
- git am --binary -3 -k --resolvemsg="$RESOLVEMSG"
+ git am --binary -3 -k --resolvemsg="$RESOLVEMSG" \
+ --reflog-action=rebase
exit $?
fi
msgnum=0
for cmt in `git-rev-list --no-merges "$upstream"..ORIG_HEAD \
- | perl -e 'print reverse <>'`
+ | @@PERL@@ -e 'print reverse <>'`
do
msgnum=$(($msgnum + 1))
echo "$cmt" > "$dotest/cmt.$msgnum"
;;
esac
pack_objects="$pack_objects $local $quiet $no_reuse_delta$extra"
-name=$(git-rev-list --objects --all $rev_list 2>&1 |
+name=$( { git-rev-list --objects --all $rev_list ||
+ echo "git-rev-list died with exit code $?"
+ } |
git-pack-objects --non-empty $pack_objects .tmp-pack) ||
exit 1
if [ -z "$name" ]; then
fi
mkdir -p "$PACKDIR" || exit
- mv .tmp-pack-$name.pack "$PACKDIR/pack-$name.pack" &&
- mv .tmp-pack-$name.idx "$PACKDIR/pack-$name.idx" ||
- exit
+ for sfx in pack idx
+ do
+ if test -f "$PACKDIR/pack-$name.$sfx"
+ then
+ mv -f "$PACKDIR/pack-$name.$sfx" \
+ "$PACKDIR/old-pack-$name.$sfx"
+ fi
+ done &&
+ mv -f .tmp-pack-$name.pack "$PACKDIR/pack-$name.pack" &&
+ mv -f .tmp-pack-$name.idx "$PACKDIR/pack-$name.idx" &&
+ test -f "$PACKDIR/pack-$name.pack" &&
+ test -f "$PACKDIR/pack-$name.idx" || {
+ echo >&2 "Couldn't replace the existing pack with updated one."
+ echo >&2 "The original set of packs have been saved as"
+ echo >&2 "old-pack-$name.{pack,idx} in $PACKDIR."
+ exit 1
+ }
+ rm -f "$PACKDIR/old-pack-$name.pack" "$PACKDIR/old-pack-$name.idx"
fi
if test "$remove_redundant" = t
usage ;;
esac
-rev=$(git-rev-parse --verify --default HEAD "$@") || exit
+case $# in
+0) rev=HEAD ;;
+1) rev=$(git-rev-parse --verify "$1") || exit ;;
+*) usage ;;
+esac
rev=$(git-rev-parse --verify $rev^0) || exit
# We need to remember the set of paths that _could_ be left
else
rm -f "$GIT_DIR/ORIG_HEAD"
fi
-git-update-ref -m "reset $reset_type $@" HEAD "$rev"
+git-update-ref -m "reset $reset_type $*" HEAD "$rev"
+update_ref_status=$?
case "$reset_type" in
--hard )
esac
rm -f "$GIT_DIR/MERGE_HEAD" "$GIT_DIR/rr-cache/MERGE_RR" "$GIT_DIR/SQUASH_MSG"
+
+exit $update_ref_status
head=$(git-rev-parse --verify "$1"^0) &&
merge=$(git-rev-parse --verify "$2"^0) &&
+merge_name="$2" &&
merge_msg="$3" || usage
#
"$head")
echo "Updating from $head to $merge"
git-read-tree -u -m $head $merge || exit 1
- git-update-ref HEAD "$merge" "$head"
+ git-update-ref -m "resolve $merge_name: Fast forward" \
+ HEAD "$merge" "$head"
git-diff-tree -p $head $merge | git-apply --stat
dropheads
exit 0
fi
result_commit=$(echo "$merge_msg" | git-commit-tree $result_tree -p $head -p $merge)
echo "Committed merge $result_commit"
-git-update-ref HEAD "$result_commit" "$head"
+git-update-ref -m "resolve $merge_name: In-index merge" \
+ HEAD "$result_commit" "$head"
git-diff-tree -p $head $result_commit | git-apply --stat
dropheads
s/^[^ ]* /Revert "/
s/$/"/'
echo
- echo "This reverts $commit commit."
+ echo "This reverts commit $commit."
test "$rev" = "$commit" ||
echo "(original 'git revert' arguments: $@)"
base=$commit next=$prev
use Getopt::Long;
use Data::Dumper;
+package FakeTerm;
+sub new {
+ my ($class, $reason) = @_;
+ return bless \$reason, shift;
+}
+sub readline {
+ my $self = shift;
+ die "Cannot use readline on FakeTerm: $$self";
+}
+package main;
+
# most mail servers generate the Date: header, but not all...
-$ENV{LC_ALL} = 'C';
-use POSIX qw/strftime/;
+sub format_2822_time {
+ my ($time) = @_;
+ my @localtm = localtime($time);
+ my @gmttm = gmtime($time);
+ my $localmin = $localtm[1] + $localtm[2] * 60;
+ my $gmtmin = $gmttm[1] + $gmttm[2] * 60;
+ if ($localtm[0] != $gmttm[0]) {
+ die "local zone differs from GMT by a non-minute interval\n";
+ }
+ if ((($gmttm[6] + 1) % 7) == $localtm[6]) {
+ $localmin += 1440;
+ } elsif ((($gmttm[6] - 1) % 7) == $localtm[6]) {
+ $localmin -= 1440;
+ } elsif ($gmttm[6] != $localtm[6]) {
+ die "local time offset greater than or equal to 24 hours\n";
+ }
+ my $offset = $localmin - $gmtmin;
+ my $offhour = $offset / 60;
+ my $offmin = abs($offset % 60);
+ if (abs($offhour) >= 24) {
+ die ("local time offset greater than or equal to 24 hours\n");
+ }
+
+ return sprintf("%s, %2d %s %d %02d:%02d:%02d %s%02d%02d",
+ qw(Sun Mon Tue Wed Thu Fri Sat)[$localtm[6]],
+ $localtm[3],
+ qw(Jan Feb Mar Apr May Jun
+ Jul Aug Sep Oct Nov Dec)[$localtm[4]],
+ $localtm[5]+1900,
+ $localtm[2],
+ $localtm[1],
+ $localtm[0],
+ ($offset >= 0) ? '+' : '-',
+ abs($offhour),
+ $offmin,
+ );
+}
my $have_email_valid = eval { require Email::Valid; 1 };
my $smtp;
# Example reply to:
#$initial_reply_to = ''; #<20050203173208.GA23964@foobar.com>';
-my $term = new Term::ReadLine 'git-send-email';
+my $term = eval {
+ new Term::ReadLine 'git-send-email';
+};
+if ($@) {
+ $term = new FakeTerm "$@: going non-interactive";
+}
# Begin by accumulating all the variables (defined above), that we will end up
# needing, first, from the command line:
--smtp-server If set, specifies the outgoing SMTP server to use.
Defaults to localhost.
- --suppress-from Supress sending emails to yourself if your address
+ --suppress-from Suppress sending emails to yourself if your address
appears in a From: line.
--quiet Make git-send-email less verbose. One line per email should be
my @recipients = unique_email_list(@to);
my $to = join (",\n\t", @recipients);
@recipients = unique_email_list(@recipients,@cc,@bcclist);
- my $date = strftime('%a, %d %b %Y %H:%M:%S %z', localtime($time++));
+ my $date = format_2822_time($time++);
my $gitversion = '@@GIT_VERSION@@';
if ($gitversion =~ m/..GIT_VERSION../) {
$gitversion = `git --version`;
To: $to
Cc: $cc
Subject: $subject
-Reply-To: $from
Date: $date
Message-Id: $message_id
X-Mailer: git-send-email $gitversion
exit
esac
+# Make sure we are in a valid repository of a vintage we understand.
if [ -z "$SUBDIRECTORY_OK" ]
then
: ${GIT_DIR=.git}
- : ${GIT_OBJECT_DIRECTORY="$GIT_DIR/objects"}
-
- # Make sure we are in a valid repository of a vintage we understand.
- GIT_DIR="$GIT_DIR" git repo-config --get core.nosuch >/dev/null
- if test $? = 128
- then
- exit
- fi
+ GIT_DIR=$(GIT_DIR="$GIT_DIR" git-rev-parse --git-dir) || exit
else
GIT_DIR=$(git-rev-parse --git-dir) || exit
fi
+: ${GIT_OBJECT_DIRECTORY="$GIT_DIR/objects"}
--- /dev/null
+#!/usr/bin/env perl
+# Copyright (C) 2006, Eric Wong <normalperson@yhbt.net>
+# License: GPL v2 or later
+use warnings;
+use strict;
+use vars qw/ $AUTHOR $VERSION
+ $SVN_URL $SVN_INFO $SVN_WC $SVN_UUID
+ $GIT_SVN_INDEX $GIT_SVN
+ $GIT_DIR $GIT_SVN_DIR $REVDB/;
+$AUTHOR = 'Eric Wong <normalperson@yhbt.net>';
+$VERSION = '@@GIT_VERSION@@';
+
+use Cwd qw/abs_path/;
+$GIT_DIR = abs_path($ENV{GIT_DIR} || '.git');
+$ENV{GIT_DIR} = $GIT_DIR;
+
+my $LC_ALL = $ENV{LC_ALL};
+my $TZ = $ENV{TZ};
+# make sure the svn binary gives consistent output between locales and TZs:
+$ENV{TZ} = 'UTC';
+$ENV{LC_ALL} = 'C';
+$| = 1; # unbuffer STDOUT
+
+# If SVN:: library support is added, please make the dependencies
+# optional and preserve the capability to use the command-line client.
+# use eval { require SVN::... } to make it lazy load
+# We don't use any modules not in the standard Perl distribution:
+use Carp qw/croak/;
+use IO::File qw//;
+use File::Basename qw/dirname basename/;
+use File::Path qw/mkpath/;
+use Getopt::Long qw/:config gnu_getopt no_ignore_case auto_abbrev pass_through/;
+use File::Spec qw//;
+use File::Copy qw/copy/;
+use POSIX qw/strftime/;
+use IPC::Open3;
+use Memoize;
+memoize('revisions_eq');
+memoize('cmt_metadata');
+memoize('get_commit_time');
+
+my ($SVN_PATH, $SVN, $SVN_LOG, $_use_lib);
+$_use_lib = 1 unless $ENV{GIT_SVN_NO_LIB};
+libsvn_load();
+my $_optimize_commits = 1 unless $ENV{GIT_SVN_NO_OPTIMIZE_COMMITS};
+my $sha1 = qr/[a-f\d]{40}/;
+my $sha1_short = qr/[a-f\d]{4,40}/;
+my ($_revision,$_stdin,$_no_ignore_ext,$_no_stop_copy,$_help,$_rmdir,$_edit,
+ $_find_copies_harder, $_l, $_cp_similarity, $_cp_remote,
+ $_repack, $_repack_nr, $_repack_flags, $_q,
+ $_message, $_file, $_follow_parent, $_no_metadata,
+ $_template, $_shared, $_no_default_regex, $_no_graft_copy,
+ $_limit, $_verbose, $_incremental, $_oneline, $_l_fmt, $_show_commit,
+ $_version, $_upgrade, $_authors, $_branch_all_refs, @_opt_m);
+my (@_branch_from, %tree_map, %users, %rusers, %equiv);
+my ($_svn_co_url_revs, $_svn_pg_peg_revs);
+my @repo_path_split_cache;
+
+my %fc_opts = ( 'no-ignore-externals' => \$_no_ignore_ext,
+ 'branch|b=s' => \@_branch_from,
+ 'follow-parent|follow' => \$_follow_parent,
+ 'branch-all-refs|B' => \$_branch_all_refs,
+ 'authors-file|A=s' => \$_authors,
+ 'repack:i' => \$_repack,
+ 'no-metadata' => \$_no_metadata,
+ 'quiet|q' => \$_q,
+ 'repack-flags|repack-args|repack-opts=s' => \$_repack_flags);
+
+my ($_trunk, $_tags, $_branches);
+my %multi_opts = ( 'trunk|T=s' => \$_trunk,
+ 'tags|t=s' => \$_tags,
+ 'branches|b=s' => \$_branches );
+my %init_opts = ( 'template=s' => \$_template, 'shared' => \$_shared );
+my %cmt_opts = ( 'edit|e' => \$_edit,
+ 'rmdir' => \$_rmdir,
+ 'find-copies-harder' => \$_find_copies_harder,
+ 'l=i' => \$_l,
+ 'copy-similarity|C=i'=> \$_cp_similarity
+);
+
+my %cmd = (
+ fetch => [ \&fetch, "Download new revisions from SVN",
+ { 'revision|r=s' => \$_revision, %fc_opts } ],
+ init => [ \&init, "Initialize a repo for tracking" .
+ " (requires URL argument)",
+ \%init_opts ],
+ commit => [ \&commit, "Commit git revisions to SVN",
+ { 'stdin|' => \$_stdin, %cmt_opts, %fc_opts, } ],
+ 'show-ignore' => [ \&show_ignore, "Show svn:ignore listings",
+ { 'revision|r=i' => \$_revision } ],
+ rebuild => [ \&rebuild, "Rebuild git-svn metadata (after git clone)",
+ { 'no-ignore-externals' => \$_no_ignore_ext,
+ 'copy-remote|remote=s' => \$_cp_remote,
+ 'upgrade' => \$_upgrade } ],
+ 'graft-branches' => [ \&graft_branches,
+ 'Detect merges/branches from already imported history',
+ { 'merge-rx|m' => \@_opt_m,
+ 'branch|b=s' => \@_branch_from,
+ 'branch-all-refs|B' => \$_branch_all_refs,
+ 'no-default-regex' => \$_no_default_regex,
+ 'no-graft-copy' => \$_no_graft_copy } ],
+ 'multi-init' => [ \&multi_init,
+ 'Initialize multiple trees (like git-svnimport)',
+ { %multi_opts, %fc_opts } ],
+ 'multi-fetch' => [ \&multi_fetch,
+ 'Fetch multiple trees (like git-svnimport)',
+ \%fc_opts ],
+ 'log' => [ \&show_log, 'Show commit logs',
+ { 'limit=i' => \$_limit,
+ 'revision|r=s' => \$_revision,
+ 'verbose|v' => \$_verbose,
+ 'incremental' => \$_incremental,
+ 'oneline' => \$_oneline,
+ 'show-commit' => \$_show_commit,
+ 'authors-file|A=s' => \$_authors,
+ } ],
+ 'commit-diff' => [ \&commit_diff, 'Commit a diff between two trees',
+ { 'message|m=s' => \$_message,
+ 'file|F=s' => \$_file,
+ %cmt_opts } ],
+);
+
+my $cmd;
+for (my $i = 0; $i < @ARGV; $i++) {
+ if (defined $cmd{$ARGV[$i]}) {
+ $cmd = $ARGV[$i];
+ splice @ARGV, $i, 1;
+ last;
+ }
+};
+
+my %opts = %{$cmd{$cmd}->[2]} if (defined $cmd);
+
+read_repo_config(\%opts);
+my $rv = GetOptions(%opts, 'help|H|h' => \$_help,
+ 'version|V' => \$_version,
+ 'id|i=s' => \$GIT_SVN);
+exit 1 if (!$rv && $cmd ne 'log');
+
+set_default_vals();
+usage(0) if $_help;
+version() if $_version;
+usage(1) unless defined $cmd;
+init_vars();
+load_authors() if $_authors;
+load_all_refs() if $_branch_all_refs;
+svn_compat_check() unless $_use_lib;
+migration_check() unless $cmd =~ /^(?:init|rebuild|multi-init|commit-diff)$/;
+$cmd{$cmd}->[0]->(@ARGV);
+exit 0;
+
+####################### primary functions ######################
+sub usage {
+ my $exit = shift || 0;
+ my $fd = $exit ? \*STDERR : \*STDOUT;
+ print $fd <<"";
+git-svn - bidirectional operations between a single Subversion tree and git
+Usage: $0 <command> [options] [arguments]\n
+
+ print $fd "Available commands:\n" unless $cmd;
+
+ foreach (sort keys %cmd) {
+ next if $cmd && $cmd ne $_;
+ print $fd ' ',pack('A13',$_),$cmd{$_}->[1],"\n";
+ foreach (keys %{$cmd{$_}->[2]}) {
+ # prints out arguments as they should be passed:
+ my $x = s#[:=]s$## ? '<arg>' : s#[:=]i$## ? '<num>' : '';
+ print $fd ' ' x 17, join(', ', map { length $_ > 1 ?
+ "--$_" : "-$_" }
+ split /\|/,$_)," $x\n";
+ }
+ }
+ print $fd <<"";
+\nGIT_SVN_ID may be set in the environment or via the --id/-i switch to an
+arbitrary identifier if you're tracking multiple SVN branches/repositories in
+one git repository and want to keep them separate. See git-svn(1) for more
+information.
+
+ exit $exit;
+}
+
+sub version {
+ print "git-svn version $VERSION\n";
+ exit 0;
+}
+
+sub rebuild {
+ if (quiet_run(qw/git-rev-parse --verify/,"refs/remotes/$GIT_SVN^0")) {
+ copy_remote_ref();
+ }
+ $SVN_URL = shift or undef;
+ my $newest_rev = 0;
+ if ($_upgrade) {
+ sys('git-update-ref',"refs/remotes/$GIT_SVN","$GIT_SVN-HEAD");
+ } else {
+ check_upgrade_needed();
+ }
+
+ my $pid = open(my $rev_list,'-|');
+ defined $pid or croak $!;
+ if ($pid == 0) {
+ exec("git-rev-list","refs/remotes/$GIT_SVN") or croak $!;
+ }
+ my $latest;
+ while (<$rev_list>) {
+ chomp;
+ my $c = $_;
+ croak "Non-SHA1: $c\n" unless $c =~ /^$sha1$/o;
+ my @commit = grep(/^git-svn-id: /,`git-cat-file commit $c`);
+ next if (!@commit); # skip merges
+ my ($url, $rev, $uuid) = extract_metadata($commit[$#commit]);
+ if (!$rev || !$uuid) {
+ croak "Unable to extract revision or UUID from ",
+ "$c, $commit[$#commit]\n";
+ }
+
+ # if we merged or otherwise started elsewhere, this is
+ # how we break out of it
+ next if (defined $SVN_UUID && ($uuid ne $SVN_UUID));
+ next if (defined $SVN_URL && defined $url && ($url ne $SVN_URL));
+
+ unless (defined $latest) {
+ if (!$SVN_URL && !$url) {
+ croak "SVN repository location required: $url\n";
+ }
+ $SVN_URL ||= $url;
+ $SVN_UUID ||= $uuid;
+ setup_git_svn();
+ $latest = $rev;
+ }
+ revdb_set($REVDB, $rev, $c);
+ print "r$rev = $c\n";
+ $newest_rev = $rev if ($rev > $newest_rev);
+ }
+ close $rev_list or croak $?;
+
+ goto out if $_use_lib;
+ if (!chdir $SVN_WC) {
+ svn_cmd_checkout($SVN_URL, $latest, $SVN_WC);
+ chdir $SVN_WC or croak $!;
+ }
+
+ $pid = fork;
+ defined $pid or croak $!;
+ if ($pid == 0) {
+ my @svn_up = qw(svn up);
+ push @svn_up, '--ignore-externals' unless $_no_ignore_ext;
+ sys(@svn_up,"-r$newest_rev");
+ $ENV{GIT_INDEX_FILE} = $GIT_SVN_INDEX;
+ index_changes();
+ exec('git-write-tree') or croak $!;
+ }
+ waitpid $pid, 0;
+ croak $? if $?;
+out:
+ if ($_upgrade) {
+ print STDERR <<"";
+Keeping deprecated refs/head/$GIT_SVN-HEAD for now. Please remove it
+when you have upgraded your tools and habits to use refs/remotes/$GIT_SVN
+
+ }
+}
+
+sub init {
+ my $url = shift or die "SVN repository location required " .
+ "as a command-line argument\n";
+ $url =~ s!/+$!!; # strip trailing slash
+
+ if (my $repo_path = shift) {
+ unless (-d $repo_path) {
+ mkpath([$repo_path]);
+ }
+ $GIT_DIR = $ENV{GIT_DIR} = $repo_path . "/.git";
+ init_vars();
+ }
+
+ $SVN_URL = $url;
+ unless (-d $GIT_DIR) {
+ my @init_db = ('git-init-db');
+ push @init_db, "--template=$_template" if defined $_template;
+ push @init_db, "--shared" if defined $_shared;
+ sys(@init_db);
+ }
+ setup_git_svn();
+}
+
+sub fetch {
+ check_upgrade_needed();
+ $SVN_URL ||= file_to_s("$GIT_SVN_DIR/info/url");
+ my $ret = $_use_lib ? fetch_lib(@_) : fetch_cmd(@_);
+ if ($ret->{commit} && quiet_run(qw(git-rev-parse --verify
+ refs/heads/master^0))) {
+ sys(qw(git-update-ref refs/heads/master),$ret->{commit});
+ }
+ return $ret;
+}
+
+sub fetch_cmd {
+ my (@parents) = @_;
+ my @log_args = -d $SVN_WC ? ($SVN_WC) : ($SVN_URL);
+ unless ($_revision) {
+ $_revision = -d $SVN_WC ? 'BASE:HEAD' : '0:HEAD';
+ }
+ push @log_args, "-r$_revision";
+ push @log_args, '--stop-on-copy' unless $_no_stop_copy;
+
+ my $svn_log = svn_log_raw(@log_args);
+
+ my $base = next_log_entry($svn_log) or croak "No base revision!\n";
+ # don't need last_revision from grab_base_rev() because
+ # user could've specified a different revision to skip (they
+ # didn't want to import certain revisions into git for whatever
+ # reason, so trust $base->{revision} instead.
+ my (undef, $last_commit) = svn_grab_base_rev();
+ unless (-d $SVN_WC) {
+ svn_cmd_checkout($SVN_URL,$base->{revision},$SVN_WC);
+ chdir $SVN_WC or croak $!;
+ read_uuid();
+ $last_commit = git_commit($base, @parents);
+ assert_tree($last_commit);
+ } else {
+ chdir $SVN_WC or croak $!;
+ read_uuid();
+ # looks like a user manually cp'd and svn switch'ed
+ unless ($last_commit) {
+ sys(qw/svn revert -R ./);
+ assert_svn_wc_clean($base->{revision});
+ $last_commit = git_commit($base, @parents);
+ assert_tree($last_commit);
+ }
+ }
+ my @svn_up = qw(svn up);
+ push @svn_up, '--ignore-externals' unless $_no_ignore_ext;
+ my $last = $base;
+ while (my $log_msg = next_log_entry($svn_log)) {
+ if ($last->{revision} >= $log_msg->{revision}) {
+ croak "Out of order: last >= current: ",
+ "$last->{revision} >= $log_msg->{revision}\n";
+ }
+ # Revert is needed for cases like:
+ # https://svn.musicpd.org/Jamming/trunk (r166:167), but
+ # I can't seem to reproduce something like that on a test...
+ sys(qw/svn revert -R ./);
+ assert_svn_wc_clean($last->{revision});
+ sys(@svn_up,"-r$log_msg->{revision}");
+ $last_commit = git_commit($log_msg, $last_commit, @parents);
+ $last = $log_msg;
+ }
+ close $svn_log->{fh};
+ $last->{commit} = $last_commit;
+ return $last;
+}
+
+sub fetch_lib {
+ my (@parents) = @_;
+ $SVN_URL ||= file_to_s("$GIT_SVN_DIR/info/url");
+ my $repo;
+ ($repo, $SVN_PATH) = repo_path_split($SVN_URL);
+ $SVN_LOG ||= libsvn_connect($repo);
+ $SVN ||= libsvn_connect($repo);
+ my ($last_rev, $last_commit) = svn_grab_base_rev();
+ my ($base, $head) = libsvn_parse_revision($last_rev);
+ if ($base > $head) {
+ return { revision => $last_rev, commit => $last_commit }
+ }
+ my $index = set_index($GIT_SVN_INDEX);
+
+ # limit ourselves and also fork() since get_log won't release memory
+ # after processing a revision and SVN stuff seems to leak
+ my $inc = 1000;
+ my ($min, $max) = ($base, $head < $base+$inc ? $head : $base+$inc);
+ read_uuid();
+ if (defined $last_commit) {
+ unless (-e $GIT_SVN_INDEX) {
+ sys(qw/git-read-tree/, $last_commit);
+ }
+ chomp (my $x = `git-write-tree`);
+ my ($y) = (`git-cat-file commit $last_commit`
+ =~ /^tree ($sha1)/m);
+ if ($y ne $x) {
+ unlink $GIT_SVN_INDEX or croak $!;
+ sys(qw/git-read-tree/, $last_commit);
+ }
+ chomp ($x = `git-write-tree`);
+ if ($y ne $x) {
+ print STDERR "trees ($last_commit) $y != $x\n",
+ "Something is seriously wrong...\n";
+ }
+ }
+ while (1) {
+ # fork, because using SVN::Pool with get_log() still doesn't
+ # seem to help enough to keep memory usage down.
+ defined(my $pid = fork) or croak $!;
+ if (!$pid) {
+ $SVN::Error::handler = \&libsvn_skip_unknown_revs;
+
+ # Yes I'm perfectly aware that the fourth argument
+ # below is the limit revisions number. Unfortunately
+ # performance sucks with it enabled, so it's much
+ # faster to fetch revision ranges instead of relying
+ # on the limiter.
+ libsvn_get_log($SVN_LOG, '/'.$SVN_PATH,
+ $min, $max, 0, 1, 1,
+ sub {
+ my $log_msg;
+ if ($last_commit) {
+ $log_msg = libsvn_fetch(
+ $last_commit, @_);
+ $last_commit = git_commit(
+ $log_msg,
+ $last_commit,
+ @parents);
+ } else {
+ $log_msg = libsvn_new_tree(@_);
+ $last_commit = git_commit(
+ $log_msg, @parents);
+ }
+ });
+ exit 0;
+ }
+ waitpid $pid, 0;
+ croak $? if $?;
+ ($last_rev, $last_commit) = svn_grab_base_rev();
+ last if ($max >= $head);
+ $min = $max + 1;
+ $max += $inc;
+ $max = $head if ($max > $head);
+ }
+ restore_index($index);
+ return { revision => $last_rev, commit => $last_commit };
+}
+
+sub commit {
+ my (@commits) = @_;
+ check_upgrade_needed();
+ if ($_stdin || !@commits) {
+ print "Reading from stdin...\n";
+ @commits = ();
+ while (<STDIN>) {
+ if (/\b($sha1_short)\b/o) {
+ unshift @commits, $1;
+ }
+ }
+ }
+ my @revs;
+ foreach my $c (@commits) {
+ chomp(my @tmp = safe_qx('git-rev-parse',$c));
+ if (scalar @tmp == 1) {
+ push @revs, $tmp[0];
+ } elsif (scalar @tmp > 1) {
+ push @revs, reverse (safe_qx('git-rev-list',@tmp));
+ } else {
+ die "Failed to rev-parse $c\n";
+ }
+ }
+ chomp @revs;
+ $_use_lib ? commit_lib(@revs) : commit_cmd(@revs);
+ print "Done committing ",scalar @revs," revisions to SVN\n";
+}
+
+sub commit_cmd {
+ my (@revs) = @_;
+
+ chdir $SVN_WC or croak "Unable to chdir $SVN_WC: $!\n";
+ my $info = svn_info('.');
+ my $fetched = fetch();
+ if ($info->{Revision} != $fetched->{revision}) {
+ print STDERR "There are new revisions that were fetched ",
+ "and need to be merged (or acknowledged) ",
+ "before committing.\n";
+ exit 1;
+ }
+ $info = svn_info('.');
+ read_uuid($info);
+ my $last = $fetched;
+ foreach my $c (@revs) {
+ my $mods = svn_checkout_tree($last, $c);
+ if (scalar @$mods == 0) {
+ print "Skipping, no changes detected\n";
+ next;
+ }
+ $last = svn_commit_tree($last, $c);
+ }
+}
+
+sub commit_lib {
+ my (@revs) = @_;
+ my ($r_last, $cmt_last) = svn_grab_base_rev();
+ defined $r_last or die "Must have an existing revision to commit\n";
+ my $fetched = fetch();
+ if ($r_last != $fetched->{revision}) {
+ print STDERR "There are new revisions that were fetched ",
+ "and need to be merged (or acknowledged) ",
+ "before committing.\n",
+ "last rev: $r_last\n",
+ " current: $fetched->{revision}\n";
+ exit 1;
+ }
+ read_uuid();
+ my @lock = $SVN::Core::VERSION ge '1.2.0' ? (undef, 0) : ();
+ my $commit_msg = "$GIT_SVN_DIR/.svn-commit.tmp.$$";
+
+ set_svn_commit_env();
+ foreach my $c (@revs) {
+ my $log_msg = get_commit_message($c, $commit_msg);
+
+ # fork for each commit because there's a memory leak I
+ # can't track down... (it's probably in the SVN code)
+ defined(my $pid = open my $fh, '-|') or croak $!;
+ if (!$pid) {
+ my $ed = SVN::Git::Editor->new(
+ { r => $r_last,
+ ra => $SVN,
+ c => $c,
+ svn_path => $SVN_PATH
+ },
+ $SVN->get_commit_editor(
+ $log_msg->{msg},
+ sub {
+ libsvn_commit_cb(
+ @_, $c,
+ $log_msg->{msg},
+ $r_last,
+ $cmt_last)
+ },
+ @lock)
+ );
+ my $mods = libsvn_checkout_tree($cmt_last, $c, $ed);
+ if (@$mods == 0) {
+ print "No changes\nr$r_last = $cmt_last\n";
+ $ed->abort_edit;
+ } else {
+ $ed->close_edit;
+ }
+ exit 0;
+ }
+ my ($r_new, $cmt_new, $no);
+ while (<$fh>) {
+ print $_;
+ chomp;
+ if (/^r(\d+) = ($sha1)$/o) {
+ ($r_new, $cmt_new) = ($1, $2);
+ } elsif ($_ eq 'No changes') {
+ $no = 1;
+ }
+ }
+ close $fh or croak $?;
+ if (! defined $r_new && ! defined $cmt_new) {
+ unless ($no) {
+ die "Failed to parse revision information\n";
+ }
+ } else {
+ ($r_last, $cmt_last) = ($r_new, $cmt_new);
+ }
+ }
+ $ENV{LC_ALL} = 'C';
+ unlink $commit_msg;
+}
+
+sub show_ignore {
+ $SVN_URL ||= file_to_s("$GIT_SVN_DIR/info/url");
+ $_use_lib ? show_ignore_lib() : show_ignore_cmd();
+}
+
+sub show_ignore_cmd {
+ require File::Find or die $!;
+ if (defined $_revision) {
+ die "-r/--revision option doesn't work unless the Perl SVN ",
+ "libraries are used\n";
+ }
+ chdir $SVN_WC or croak $!;
+ my %ign;
+ File::Find::find({wanted=>sub{if(lstat $_ && -d _ && -d "$_/.svn"){
+ s#^\./##;
+ @{$ign{$_}} = svn_propget_base('svn:ignore', $_);
+ }}, no_chdir=>1},'.');
+
+ print "\n# /\n";
+ foreach (@{$ign{'.'}}) { print '/',$_ if /\S/ }
+ delete $ign{'.'};
+ foreach my $i (sort keys %ign) {
+ print "\n# ",$i,"\n";
+ foreach (@{$ign{$i}}) { print '/',$i,'/',$_ if /\S/ }
+ }
+}
+
+sub show_ignore_lib {
+ my $repo;
+ ($repo, $SVN_PATH) = repo_path_split($SVN_URL);
+ $SVN ||= libsvn_connect($repo);
+ my $r = defined $_revision ? $_revision : $SVN->get_latest_revnum;
+ libsvn_traverse_ignore(\*STDOUT, $SVN_PATH, $r);
+}
+
+sub graft_branches {
+ my $gr_file = "$GIT_DIR/info/grafts";
+ my ($grafts, $comments) = read_grafts($gr_file);
+ my $gr_sha1;
+
+ if (%$grafts) {
+ # temporarily disable our grafts file to make this idempotent
+ chomp($gr_sha1 = safe_qx(qw/git-hash-object -w/,$gr_file));
+ rename $gr_file, "$gr_file~$gr_sha1" or croak $!;
+ }
+
+ my $l_map = read_url_paths();
+ my @re = map { qr/$_/is } @_opt_m if @_opt_m;
+ unless ($_no_default_regex) {
+ push @re, (qr/\b(?:merge|merging|merged)\s+with\s+([\w\.\-]+)/i,
+ qr/\b(?:merge|merging|merged)\s+([\w\.\-]+)/i,
+ qr/\b(?:from|of)\s+([\w\.\-]+)/i );
+ }
+ foreach my $u (keys %$l_map) {
+ if (@re) {
+ foreach my $p (keys %{$l_map->{$u}}) {
+ graft_merge_msg($grafts,$l_map,$u,$p,@re);
+ }
+ }
+ unless ($_no_graft_copy) {
+ if ($_use_lib) {
+ graft_file_copy_lib($grafts,$l_map,$u);
+ } else {
+ graft_file_copy_cmd($grafts,$l_map,$u);
+ }
+ }
+ }
+ graft_tree_joins($grafts);
+
+ write_grafts($grafts, $comments, $gr_file);
+ unlink "$gr_file~$gr_sha1" if $gr_sha1;
+}
+
+sub multi_init {
+ my $url = shift;
+ $_trunk ||= 'trunk';
+ $_trunk =~ s#/+$##;
+ $url =~ s#/+$## if $url;
+ if ($_trunk !~ m#^[a-z\+]+://#) {
+ $_trunk = '/' . $_trunk if ($_trunk !~ m#^/#);
+ unless ($url) {
+ print STDERR "E: '$_trunk' is not a complete URL ",
+ "and a separate URL is not specified\n";
+ exit 1;
+ }
+ $_trunk = $url . $_trunk;
+ }
+ if ($GIT_SVN eq 'git-svn') {
+ print "GIT_SVN_ID set to 'trunk' for $_trunk\n";
+ $GIT_SVN = $ENV{GIT_SVN_ID} = 'trunk';
+ }
+ init_vars();
+ init($_trunk);
+ complete_url_ls_init($url, $_branches, '--branches/-b', '');
+ complete_url_ls_init($url, $_tags, '--tags/-t', 'tags/');
+}
+
+sub multi_fetch {
+ # try to do trunk first, since branches/tags
+ # may be descended from it.
+ if (-e "$GIT_DIR/svn/trunk/info/url") {
+ fetch_child_id('trunk', @_);
+ }
+ rec_fetch('', "$GIT_DIR/svn", @_);
+}
+
+sub show_log {
+ my (@args) = @_;
+ my ($r_min, $r_max);
+ my $r_last = -1; # prevent dupes
+ rload_authors() if $_authors;
+ if (defined $TZ) {
+ $ENV{TZ} = $TZ;
+ } else {
+ delete $ENV{TZ};
+ }
+ if (defined $_revision) {
+ if ($_revision =~ /^(\d+):(\d+)$/) {
+ ($r_min, $r_max) = ($1, $2);
+ } elsif ($_revision =~ /^\d+$/) {
+ $r_min = $r_max = $_revision;
+ } else {
+ print STDERR "-r$_revision is not supported, use ",
+ "standard \'git log\' arguments instead\n";
+ exit 1;
+ }
+ }
+
+ my $pid = open(my $log,'-|');
+ defined $pid or croak $!;
+ if (!$pid) {
+ exec(git_svn_log_cmd($r_min,$r_max), @args) or croak $!;
+ }
+ setup_pager();
+ my (@k, $c, $d);
+
+ while (<$log>) {
+ if (/^commit ($sha1_short)/o) {
+ my $cmt = $1;
+ if ($c && cmt_showable($c) && $c->{r} != $r_last) {
+ $r_last = $c->{r};
+ process_commit($c, $r_min, $r_max, \@k) or
+ goto out;
+ }
+ $d = undef;
+ $c = { c => $cmt };
+ } elsif (/^author (.+) (\d+) ([\-\+]?\d+)$/) {
+ get_author_info($c, $1, $2, $3);
+ } elsif (/^(?:tree|parent|committer) /) {
+ # ignore
+ } elsif (/^:\d{6} \d{6} $sha1_short/o) {
+ push @{$c->{raw}}, $_;
+ } elsif (/^diff /) {
+ $d = 1;
+ push @{$c->{diff}}, $_;
+ } elsif ($d) {
+ push @{$c->{diff}}, $_;
+ } elsif (/^ (git-svn-id:.+)$/) {
+ (undef, $c->{r}, undef) = extract_metadata($1);
+ } elsif (s/^ //) {
+ push @{$c->{l}}, $_;
+ }
+ }
+ if ($c && defined $c->{r} && $c->{r} != $r_last) {
+ $r_last = $c->{r};
+ process_commit($c, $r_min, $r_max, \@k);
+ }
+ if (@k) {
+ my $swap = $r_max;
+ $r_max = $r_min;
+ $r_min = $swap;
+ process_commit($_, $r_min, $r_max) foreach reverse @k;
+ }
+out:
+ close $log;
+ print '-' x72,"\n" unless $_incremental || $_oneline;
+}
+
+sub commit_diff_usage {
+ print STDERR "Usage: $0 commit-diff <tree-ish> <tree-ish> [<URL>]\n";
+ exit 1
+}
+
+sub commit_diff {
+ if (!$_use_lib) {
+ print STDERR "commit-diff must be used with SVN libraries\n";
+ exit 1;
+ }
+ my $ta = shift or commit_diff_usage();
+ my $tb = shift or commit_diff_usage();
+ if (!eval { $SVN_URL = shift || file_to_s("$GIT_SVN_DIR/info/url") }) {
+ print STDERR "Needed URL or usable git-svn id command-line\n";
+ commit_diff_usage();
+ }
+ if (defined $_message && defined $_file) {
+ print STDERR "Both --message/-m and --file/-F specified ",
+ "for the commit message.\n",
+ "I have no idea what you mean\n";
+ exit 1;
+ }
+ if (defined $_file) {
+ $_message = file_to_s($_file);
+ } else {
+ $_message ||= get_commit_message($tb,
+ "$GIT_DIR/.svn-commit.tmp.$$")->{msg};
+ }
+ my $repo;
+ ($repo, $SVN_PATH) = repo_path_split($SVN_URL);
+ $SVN_LOG ||= libsvn_connect($repo);
+ $SVN ||= libsvn_connect($repo);
+ my @lock = $SVN::Core::VERSION ge '1.2.0' ? (undef, 0) : ();
+ my $ed = SVN::Git::Editor->new({ r => $SVN->get_latest_revnum,
+ ra => $SVN, c => $tb,
+ svn_path => $SVN_PATH
+ },
+ $SVN->get_commit_editor($_message,
+ sub {print "Committed $_[0]\n"},@lock)
+ );
+ my $mods = libsvn_checkout_tree($ta, $tb, $ed);
+ if (@$mods == 0) {
+ print "No changes\n$ta == $tb\n";
+ $ed->abort_edit;
+ } else {
+ $ed->close_edit;
+ }
+}
+
+########################### utility functions #########################
+
+sub cmt_showable {
+ my ($c) = @_;
+ return 1 if defined $c->{r};
+ if ($c->{l} && $c->{l}->[-1] eq "...\n" &&
+ $c->{a_raw} =~ /\@([a-f\d\-]+)>$/) {
+ my @msg = safe_qx(qw/git-cat-file commit/, $c->{c});
+ shift @msg while ($msg[0] ne "\n");
+ shift @msg;
+ @{$c->{l}} = grep !/^git-svn-id: /, @msg;
+
+ (undef, $c->{r}, undef) = extract_metadata(
+ (grep(/^git-svn-id: /, @msg))[-1]);
+ }
+ return defined $c->{r};
+}
+
+sub git_svn_log_cmd {
+ my ($r_min, $r_max) = @_;
+ my @cmd = (qw/git-log --abbrev-commit --pretty=raw
+ --default/, "refs/remotes/$GIT_SVN");
+ push @cmd, '--summary' if $_verbose;
+ return @cmd unless defined $r_max;
+ if ($r_max == $r_min) {
+ push @cmd, '--max-count=1';
+ if (my $c = revdb_get($REVDB, $r_max)) {
+ push @cmd, $c;
+ }
+ } else {
+ my ($c_min, $c_max);
+ $c_max = revdb_get($REVDB, $r_max);
+ $c_min = revdb_get($REVDB, $r_min);
+ if ($c_min && $c_max) {
+ if ($r_max > $r_max) {
+ push @cmd, "$c_min..$c_max";
+ } else {
+ push @cmd, "$c_max..$c_min";
+ }
+ } elsif ($r_max > $r_min) {
+ push @cmd, $c_max;
+ } else {
+ push @cmd, $c_min;
+ }
+ }
+ return @cmd;
+}
+
+sub fetch_child_id {
+ my $id = shift;
+ print "Fetching $id\n";
+ my $ref = "$GIT_DIR/refs/remotes/$id";
+ defined(my $pid = open my $fh, '-|') or croak $!;
+ if (!$pid) {
+ $_repack = undef;
+ $GIT_SVN = $ENV{GIT_SVN_ID} = $id;
+ init_vars();
+ fetch(@_);
+ exit 0;
+ }
+ while (<$fh>) {
+ print $_;
+ check_repack() if (/^r\d+ = $sha1/);
+ }
+ close $fh or croak $?;
+}
+
+sub rec_fetch {
+ my ($pfx, $p, @args) = @_;
+ my @dir;
+ foreach (sort <$p/*>) {
+ if (-r "$_/info/url") {
+ $pfx .= '/' if $pfx && $pfx !~ m!/$!;
+ my $id = $pfx . basename $_;
+ next if $id eq 'trunk';
+ fetch_child_id($id, @args);
+ } elsif (-d $_) {
+ push @dir, $_;
+ }
+ }
+ foreach (@dir) {
+ my $x = $_;
+ $x =~ s!^\Q$GIT_DIR\E/svn/!!;
+ rec_fetch($x, $_);
+ }
+}
+
+sub complete_url_ls_init {
+ my ($url, $var, $switch, $pfx) = @_;
+ unless ($var) {
+ print STDERR "W: $switch not specified\n";
+ return;
+ }
+ $var =~ s#/+$##;
+ if ($var !~ m#^[a-z\+]+://#) {
+ $var = '/' . $var if ($var !~ m#^/#);
+ unless ($url) {
+ print STDERR "E: '$var' is not a complete URL ",
+ "and a separate URL is not specified\n";
+ exit 1;
+ }
+ $var = $url . $var;
+ }
+ chomp(my @ls = $_use_lib ? libsvn_ls_fullurl($var)
+ : safe_qx(qw/svn ls --non-interactive/, $var));
+ my $old = $GIT_SVN;
+ defined(my $pid = fork) or croak $!;
+ if (!$pid) {
+ foreach my $u (map { "$var/$_" } (grep m!/$!, @ls)) {
+ $u =~ s#/+$##;
+ if ($u !~ m!\Q$var\E/(.+)$!) {
+ print STDERR "W: Unrecognized URL: $u\n";
+ die "This should never happen\n";
+ }
+ my $id = $pfx.$1;
+ print "init $u => $id\n";
+ $GIT_SVN = $ENV{GIT_SVN_ID} = $id;
+ init_vars();
+ init($u);
+ }
+ exit 0;
+ }
+ waitpid $pid, 0;
+ croak $? if $?;
+}
+
+sub common_prefix {
+ my $paths = shift;
+ my %common;
+ foreach (@$paths) {
+ my @tmp = split m#/#, $_;
+ my $p = '';
+ while (my $x = shift @tmp) {
+ $p .= "/$x";
+ $common{$p} ||= 0;
+ $common{$p}++;
+ }
+ }
+ foreach (sort {length $b <=> length $a} keys %common) {
+ if ($common{$_} == @$paths) {
+ return $_;
+ }
+ }
+ return '';
+}
+
+# grafts set here are 'stronger' in that they're based on actual tree
+# matches, and won't be deleted from merge-base checking in write_grafts()
+sub graft_tree_joins {
+ my $grafts = shift;
+ map_tree_joins() if (@_branch_from && !%tree_map);
+ return unless %tree_map;
+
+ git_svn_each(sub {
+ my $i = shift;
+ defined(my $pid = open my $fh, '-|') or croak $!;
+ if (!$pid) {
+ exec qw/git-rev-list --pretty=raw/,
+ "refs/remotes/$i" or croak $!;
+ }
+ while (<$fh>) {
+ next unless /^commit ($sha1)$/o;
+ my $c = $1;
+ my ($t) = (<$fh> =~ /^tree ($sha1)$/o);
+ next unless $tree_map{$t};
+
+ my $l;
+ do {
+ $l = readline $fh;
+ } until ($l =~ /^committer (?:.+) (\d+) ([\-\+]?\d+)$/);
+
+ my ($s, $tz) = ($1, $2);
+ if ($tz =~ s/^\+//) {
+ $s += tz_to_s_offset($tz);
+ } elsif ($tz =~ s/^\-//) {
+ $s -= tz_to_s_offset($tz);
+ }
+
+ my ($url_a, $r_a, $uuid_a) = cmt_metadata($c);
+
+ foreach my $p (@{$tree_map{$t}}) {
+ next if $p eq $c;
+ my $mb = eval {
+ safe_qx('git-merge-base', $c, $p)
+ };
+ next unless ($@ || $?);
+ if (defined $r_a) {
+ # see if SVN says it's a relative
+ my ($url_b, $r_b, $uuid_b) =
+ cmt_metadata($p);
+ next if (defined $url_b &&
+ defined $url_a &&
+ ($url_a eq $url_b) &&
+ ($uuid_a eq $uuid_b));
+ if ($uuid_a eq $uuid_b) {
+ if ($r_b < $r_a) {
+ $grafts->{$c}->{$p} = 2;
+ next;
+ } elsif ($r_b > $r_a) {
+ $grafts->{$p}->{$c} = 2;
+ next;
+ }
+ }
+ }
+ my $ct = get_commit_time($p);
+ if ($ct < $s) {
+ $grafts->{$c}->{$p} = 2;
+ } elsif ($ct > $s) {
+ $grafts->{$p}->{$c} = 2;
+ }
+ # what should we do when $ct == $s ?
+ }
+ }
+ close $fh or croak $?;
+ });
+}
+
+# this isn't funky-filename safe, but good enough for now...
+sub graft_file_copy_cmd {
+ my ($grafts, $l_map, $u) = @_;
+ my $paths = $l_map->{$u};
+ my $pfx = common_prefix([keys %$paths]);
+ $SVN_URL ||= $u.$pfx;
+ my $pid = open my $fh, '-|';
+ defined $pid or croak $!;
+ unless ($pid) {
+ my @exec = qw/svn log -v/;
+ push @exec, "-r$_revision" if defined $_revision;
+ exec @exec, $u.$pfx or croak $!;
+ }
+ my ($r, $mp) = (undef, undef);
+ while (<$fh>) {
+ chomp;
+ if (/^\-{72}$/) {
+ $mp = $r = undef;
+ } elsif (/^r(\d+) \| /) {
+ $r = $1 unless defined $r;
+ } elsif (/^Changed paths:/) {
+ $mp = 1;
+ } elsif ($mp && m#^ [AR] /(\S.*?) \(from /(\S+?):(\d+)\)$#) {
+ my ($p1, $p0, $r0) = ($1, $2, $3);
+ my $c = find_graft_path_commit($paths, $p1, $r);
+ next unless $c;
+ find_graft_path_parents($grafts, $paths, $c, $p0, $r0);
+ }
+ }
+}
+
+sub graft_file_copy_lib {
+ my ($grafts, $l_map, $u) = @_;
+ my $tree_paths = $l_map->{$u};
+ my $pfx = common_prefix([keys %$tree_paths]);
+ my ($repo, $path) = repo_path_split($u.$pfx);
+ $SVN_LOG ||= libsvn_connect($repo);
+ $SVN ||= libsvn_connect($repo);
+
+ my ($base, $head) = libsvn_parse_revision();
+ my $inc = 1000;
+ my ($min, $max) = ($base, $head < $base+$inc ? $head : $base+$inc);
+ my $eh = $SVN::Error::handler;
+ $SVN::Error::handler = \&libsvn_skip_unknown_revs;
+ while (1) {
+ my $pool = SVN::Pool->new;
+ libsvn_get_log($SVN_LOG, "/$path", $min, $max, 0, 1, 1,
+ sub {
+ libsvn_graft_file_copies($grafts, $tree_paths,
+ $path, @_);
+ }, $pool);
+ $pool->clear;
+ last if ($max >= $head);
+ $min = $max + 1;
+ $max += $inc;
+ $max = $head if ($max > $head);
+ }
+ $SVN::Error::handler = $eh;
+}
+
+sub process_merge_msg_matches {
+ my ($grafts, $l_map, $u, $p, $c, @matches) = @_;
+ my (@strong, @weak);
+ foreach (@matches) {
+ # merging with ourselves is not interesting
+ next if $_ eq $p;
+ if ($l_map->{$u}->{$_}) {
+ push @strong, $_;
+ } else {
+ push @weak, $_;
+ }
+ }
+ foreach my $w (@weak) {
+ last if @strong;
+ # no exact match, use branch name as regexp.
+ my $re = qr/\Q$w\E/i;
+ foreach (keys %{$l_map->{$u}}) {
+ if (/$re/) {
+ push @strong, $l_map->{$u}->{$_};
+ last;
+ }
+ }
+ last if @strong;
+ $w = basename($w);
+ $re = qr/\Q$w\E/i;
+ foreach (keys %{$l_map->{$u}}) {
+ if (/$re/) {
+ push @strong, $l_map->{$u}->{$_};
+ last;
+ }
+ }
+ }
+ my ($rev) = ($c->{m} =~ /^git-svn-id:\s(?:\S+?)\@(\d+)
+ \s(?:[a-f\d\-]+)$/xsm);
+ unless (defined $rev) {
+ ($rev) = ($c->{m} =~/^git-svn-id:\s(\d+)
+ \@(?:[a-f\d\-]+)/xsm);
+ return unless defined $rev;
+ }
+ foreach my $m (@strong) {
+ my ($r0, $s0) = find_rev_before($rev, $m, 1);
+ $grafts->{$c->{c}}->{$s0} = 1 if defined $s0;
+ }
+}
+
+sub graft_merge_msg {
+ my ($grafts, $l_map, $u, $p, @re) = @_;
+
+ my $x = $l_map->{$u}->{$p};
+ my $rl = rev_list_raw($x);
+ while (my $c = next_rev_list_entry($rl)) {
+ foreach my $re (@re) {
+ my (@br) = ($c->{m} =~ /$re/g);
+ next unless @br;
+ process_merge_msg_matches($grafts,$l_map,$u,$p,$c,@br);
+ }
+ }
+}
+
+sub read_uuid {
+ return if $SVN_UUID;
+ if ($_use_lib) {
+ my $pool = SVN::Pool->new;
+ $SVN_UUID = $SVN->get_uuid($pool);
+ $pool->clear;
+ } else {
+ my $info = shift || svn_info('.');
+ $SVN_UUID = $info->{'Repository UUID'} or
+ croak "Repository UUID unreadable\n";
+ }
+}
+
+sub quiet_run {
+ my $pid = fork;
+ defined $pid or croak $!;
+ if (!$pid) {
+ open my $null, '>', '/dev/null' or croak $!;
+ open STDERR, '>&', $null or croak $!;
+ open STDOUT, '>&', $null or croak $!;
+ exec @_ or croak $!;
+ }
+ waitpid $pid, 0;
+ return $?;
+}
+
+sub repo_path_split {
+ my $full_url = shift;
+ $full_url =~ s#/+$##;
+
+ foreach (@repo_path_split_cache) {
+ if ($full_url =~ s#$_##) {
+ my $u = $1;
+ $full_url =~ s#^/+##;
+ return ($u, $full_url);
+ }
+ }
+
+ if ($_use_lib) {
+ my $tmp = libsvn_connect($full_url);
+ my $url = $tmp->get_repos_root;
+ $full_url =~ s#^\Q$url\E/*##;
+ push @repo_path_split_cache, qr/^(\Q$url\E)/;
+ return ($url, $full_url);
+ } else {
+ my ($url, $path) = ($full_url =~ m!^([a-z\+]+://[^/]*)(.*)$!i);
+ $path =~ s#^/+##;
+ my @paths = split(m#/+#, $path);
+ while (quiet_run(qw/svn ls --non-interactive/, $url)) {
+ my $n = shift @paths || last;
+ $url .= "/$n";
+ }
+ push @repo_path_split_cache, qr/^(\Q$url\E)/;
+ $path = join('/',@paths);
+ return ($url, $path);
+ }
+}
+
+sub setup_git_svn {
+ defined $SVN_URL or croak "SVN repository location required\n";
+ unless (-d $GIT_DIR) {
+ croak "GIT_DIR=$GIT_DIR does not exist!\n";
+ }
+ mkpath([$GIT_SVN_DIR]);
+ mkpath(["$GIT_SVN_DIR/info"]);
+ open my $fh, '>>',$REVDB or croak $!;
+ close $fh;
+ s_to_file($SVN_URL,"$GIT_SVN_DIR/info/url");
+
+}
+
+sub assert_svn_wc_clean {
+ return if $_use_lib;
+ my ($svn_rev) = @_;
+ croak "$svn_rev is not an integer!\n" unless ($svn_rev =~ /^\d+$/);
+ my $lcr = svn_info('.')->{'Last Changed Rev'};
+ if ($svn_rev != $lcr) {
+ print STDERR "Checking for copy-tree ... ";
+ my @diff = grep(/^Index: /,(safe_qx(qw(svn diff),
+ "-r$lcr:$svn_rev")));
+ if (@diff) {
+ croak "Nope! Expected r$svn_rev, got r$lcr\n";
+ } else {
+ print STDERR "OK!\n";
+ }
+ }
+ my @status = grep(!/^Performing status on external/,(`svn status`));
+ @status = grep(!/^\s*$/,@status);
+ if (scalar @status) {
+ print STDERR "Tree ($SVN_WC) is not clean:\n";
+ print STDERR $_ foreach @status;
+ croak;
+ }
+}
+
+sub get_tree_from_treeish {
+ my ($treeish) = @_;
+ croak "Not a sha1: $treeish\n" unless $treeish =~ /^$sha1$/o;
+ chomp(my $type = `git-cat-file -t $treeish`);
+ my $expected;
+ while ($type eq 'tag') {
+ chomp(($treeish, $type) = `git-cat-file tag $treeish`);
+ }
+ if ($type eq 'commit') {
+ $expected = (grep /^tree /,`git-cat-file commit $treeish`)[0];
+ ($expected) = ($expected =~ /^tree ($sha1)$/);
+ die "Unable to get tree from $treeish\n" unless $expected;
+ } elsif ($type eq 'tree') {
+ $expected = $treeish;
+ } else {
+ die "$treeish is a $type, expected tree, tag or commit\n";
+ }
+ return $expected;
+}
+
+sub assert_tree {
+ return if $_use_lib;
+ my ($treeish) = @_;
+ my $expected = get_tree_from_treeish($treeish);
+
+ my $tmpindex = $GIT_SVN_INDEX.'.assert-tmp';
+ if (-e $tmpindex) {
+ unlink $tmpindex or croak $!;
+ }
+ my $old_index = set_index($tmpindex);
+ index_changes(1);
+ chomp(my $tree = `git-write-tree`);
+ restore_index($old_index);
+ if ($tree ne $expected) {
+ croak "Tree mismatch, Got: $tree, Expected: $expected\n";
+ }
+ unlink $tmpindex;
+}
+
+sub parse_diff_tree {
+ my $diff_fh = shift;
+ local $/ = "\0";
+ my $state = 'meta';
+ my @mods;
+ while (<$diff_fh>) {
+ chomp $_; # this gets rid of the trailing "\0"
+ if ($state eq 'meta' && /^:(\d{6})\s(\d{6})\s
+ $sha1\s($sha1)\s([MTCRAD])\d*$/xo) {
+ push @mods, { mode_a => $1, mode_b => $2,
+ sha1_b => $3, chg => $4 };
+ if ($4 =~ /^(?:C|R)$/) {
+ $state = 'file_a';
+ } else {
+ $state = 'file_b';
+ }
+ } elsif ($state eq 'file_a') {
+ my $x = $mods[$#mods] or croak "Empty array\n";
+ if ($x->{chg} !~ /^(?:C|R)$/) {
+ croak "Error parsing $_, $x->{chg}\n";
+ }
+ $x->{file_a} = $_;
+ $state = 'file_b';
+ } elsif ($state eq 'file_b') {
+ my $x = $mods[$#mods] or croak "Empty array\n";
+ if (exists $x->{file_a} && $x->{chg} !~ /^(?:C|R)$/) {
+ croak "Error parsing $_, $x->{chg}\n";
+ }
+ if (!exists $x->{file_a} && $x->{chg} =~ /^(?:C|R)$/) {
+ croak "Error parsing $_, $x->{chg}\n";
+ }
+ $x->{file_b} = $_;
+ $state = 'meta';
+ } else {
+ croak "Error parsing $_\n";
+ }
+ }
+ close $diff_fh or croak $?;
+
+ return \@mods;
+}
+
+sub svn_check_prop_executable {
+ my $m = shift;
+ return if -l $m->{file_b};
+ if ($m->{mode_b} =~ /755$/) {
+ chmod((0755 &~ umask),$m->{file_b}) or croak $!;
+ if ($m->{mode_a} !~ /755$/) {
+ sys(qw(svn propset svn:executable 1), $m->{file_b});
+ }
+ -x $m->{file_b} or croak "$m->{file_b} is not executable!\n";
+ } elsif ($m->{mode_b} !~ /755$/ && $m->{mode_a} =~ /755$/) {
+ sys(qw(svn propdel svn:executable), $m->{file_b});
+ chmod((0644 &~ umask),$m->{file_b}) or croak $!;
+ -x $m->{file_b} and croak "$m->{file_b} is executable!\n";
+ }
+}
+
+sub svn_ensure_parent_path {
+ my $dir_b = dirname(shift);
+ svn_ensure_parent_path($dir_b) if ($dir_b ne File::Spec->curdir);
+ mkpath([$dir_b]) unless (-d $dir_b);
+ sys(qw(svn add -N), $dir_b) unless (-d "$dir_b/.svn");
+}
+
+sub precommit_check {
+ my $mods = shift;
+ my (%rm_file, %rmdir_check, %added_check);
+
+ my %o = ( D => 0, R => 1, C => 2, A => 3, M => 3, T => 3 );
+ foreach my $m (sort { $o{$a->{chg}} <=> $o{$b->{chg}} } @$mods) {
+ if ($m->{chg} eq 'R') {
+ if (-d $m->{file_b}) {
+ err_dir_to_file("$m->{file_a} => $m->{file_b}");
+ }
+ # dir/$file => dir/file/$file
+ my $dirname = dirname($m->{file_b});
+ while ($dirname ne File::Spec->curdir) {
+ if ($dirname ne $m->{file_a}) {
+ $dirname = dirname($dirname);
+ next;
+ }
+ err_file_to_dir("$m->{file_a} => $m->{file_b}");
+ }
+ # baz/zzz => baz (baz is a file)
+ $dirname = dirname($m->{file_a});
+ while ($dirname ne File::Spec->curdir) {
+ if ($dirname ne $m->{file_b}) {
+ $dirname = dirname($dirname);
+ next;
+ }
+ err_dir_to_file("$m->{file_a} => $m->{file_b}");
+ }
+ }
+ if ($m->{chg} =~ /^(D|R)$/) {
+ my $t = $1 eq 'D' ? 'file_b' : 'file_a';
+ $rm_file{ $m->{$t} } = 1;
+ my $dirname = dirname( $m->{$t} );
+ my $basename = basename( $m->{$t} );
+ $rmdir_check{$dirname}->{$basename} = 1;
+ } elsif ($m->{chg} =~ /^(?:A|C)$/) {
+ if (-d $m->{file_b}) {
+ err_dir_to_file($m->{file_b});
+ }
+ my $dirname = dirname( $m->{file_b} );
+ my $basename = basename( $m->{file_b} );
+ $added_check{$dirname}->{$basename} = 1;
+ while ($dirname ne File::Spec->curdir) {
+ if ($rm_file{$dirname}) {
+ err_file_to_dir($m->{file_b});
+ }
+ $dirname = dirname $dirname;
+ }
+ }
+ }
+ return (\%rmdir_check, \%added_check);
+
+ sub err_dir_to_file {
+ my $file = shift;
+ print STDERR "Node change from directory to file ",
+ "is not supported by Subversion: ",$file,"\n";
+ exit 1;
+ }
+ sub err_file_to_dir {
+ my $file = shift;
+ print STDERR "Node change from file to directory ",
+ "is not supported by Subversion: ",$file,"\n";
+ exit 1;
+ }
+}
+
+
+sub get_diff {
+ my ($from, $treeish) = @_;
+ assert_tree($from);
+ print "diff-tree $from $treeish\n";
+ my $pid = open my $diff_fh, '-|';
+ defined $pid or croak $!;
+ if ($pid == 0) {
+ my @diff_tree = qw(git-diff-tree -z -r);
+ if ($_cp_similarity) {
+ push @diff_tree, "-C$_cp_similarity";
+ } else {
+ push @diff_tree, '-C';
+ }
+ push @diff_tree, '--find-copies-harder' if $_find_copies_harder;
+ push @diff_tree, "-l$_l" if defined $_l;
+ exec(@diff_tree, $from, $treeish) or croak $!;
+ }
+ return parse_diff_tree($diff_fh);
+}
+
+sub svn_checkout_tree {
+ my ($from, $treeish) = @_;
+ my $mods = get_diff($from->{commit}, $treeish);
+ return $mods unless (scalar @$mods);
+ my ($rm, $add) = precommit_check($mods);
+
+ my %o = ( D => 1, R => 0, C => -1, A => 3, M => 3, T => 3 );
+ foreach my $m (sort { $o{$a->{chg}} <=> $o{$b->{chg}} } @$mods) {
+ if ($m->{chg} eq 'C') {
+ svn_ensure_parent_path( $m->{file_b} );
+ sys(qw(svn cp), $m->{file_a}, $m->{file_b});
+ apply_mod_line_blob($m);
+ svn_check_prop_executable($m);
+ } elsif ($m->{chg} eq 'D') {
+ sys(qw(svn rm --force), $m->{file_b});
+ } elsif ($m->{chg} eq 'R') {
+ svn_ensure_parent_path( $m->{file_b} );
+ sys(qw(svn mv --force), $m->{file_a}, $m->{file_b});
+ apply_mod_line_blob($m);
+ svn_check_prop_executable($m);
+ } elsif ($m->{chg} eq 'M') {
+ apply_mod_line_blob($m);
+ svn_check_prop_executable($m);
+ } elsif ($m->{chg} eq 'T') {
+ sys(qw(svn rm --force),$m->{file_b});
+ apply_mod_line_blob($m);
+ sys(qw(svn add), $m->{file_b});
+ svn_check_prop_executable($m);
+ } elsif ($m->{chg} eq 'A') {
+ svn_ensure_parent_path( $m->{file_b} );
+ apply_mod_line_blob($m);
+ sys(qw(svn add), $m->{file_b});
+ svn_check_prop_executable($m);
+ } else {
+ croak "Invalid chg: $m->{chg}\n";
+ }
+ }
+
+ assert_tree($treeish);
+ if ($_rmdir) { # remove empty directories
+ handle_rmdir($rm, $add);
+ }
+ assert_tree($treeish);
+ return $mods;
+}
+
+sub libsvn_checkout_tree {
+ my ($from, $treeish, $ed) = @_;
+ my $mods = get_diff($from, $treeish);
+ return $mods unless (scalar @$mods);
+ my %o = ( D => 1, R => 0, C => -1, A => 3, M => 3, T => 3 );
+ foreach my $m (sort { $o{$a->{chg}} <=> $o{$b->{chg}} } @$mods) {
+ my $f = $m->{chg};
+ if (defined $o{$f}) {
+ $ed->$f($m, $_q);
+ } else {
+ croak "Invalid change type: $f\n";
+ }
+ }
+ $ed->rmdirs($_q) if $_rmdir;
+ return $mods;
+}
+
+# svn ls doesn't work with respect to the current working tree, but what's
+# in the repository. There's not even an option for it... *sigh*
+# (added files don't show up and removed files remain in the ls listing)
+sub svn_ls_current {
+ my ($dir, $rm, $add) = @_;
+ chomp(my @ls = safe_qx('svn','ls',$dir));
+ my @ret = ();
+ foreach (@ls) {
+ s#/$##; # trailing slashes are evil
+ push @ret, $_ unless $rm->{$dir}->{$_};
+ }
+ if (exists $add->{$dir}) {
+ push @ret, keys %{$add->{$dir}};
+ }
+ return \@ret;
+}
+
+sub handle_rmdir {
+ my ($rm, $add) = @_;
+
+ foreach my $dir (sort {length $b <=> length $a} keys %$rm) {
+ my $ls = svn_ls_current($dir, $rm, $add);
+ next if (scalar @$ls);
+ sys(qw(svn rm --force),$dir);
+
+ my $dn = dirname $dir;
+ $rm->{ $dn }->{ basename $dir } = 1;
+ $ls = svn_ls_current($dn, $rm, $add);
+ while (scalar @$ls == 0 && $dn ne File::Spec->curdir) {
+ sys(qw(svn rm --force),$dn);
+ $dir = basename $dn;
+ $dn = dirname $dn;
+ $rm->{ $dn }->{ $dir } = 1;
+ $ls = svn_ls_current($dn, $rm, $add);
+ }
+ }
+}
+
+sub get_commit_message {
+ my ($commit, $commit_msg) = (@_);
+ my %log_msg = ( msg => '' );
+ open my $msg, '>', $commit_msg or croak $!;
+
+ chomp(my $type = `git-cat-file -t $commit`);
+ if ($type eq 'commit' || $type eq 'tag') {
+ my $pid = open my $msg_fh, '-|';
+ defined $pid or croak $!;
+
+ if ($pid == 0) {
+ exec('git-cat-file', $type, $commit) or croak $!;
+ }
+ my $in_msg = 0;
+ while (<$msg_fh>) {
+ if (!$in_msg) {
+ $in_msg = 1 if (/^\s*$/);
+ } elsif (/^git-svn-id: /) {
+ # skip this, we regenerate the correct one
+ # on re-fetch anyways
+ } else {
+ print $msg $_ or croak $!;
+ }
+ }
+ close $msg_fh or croak $?;
+ }
+ close $msg or croak $!;
+
+ if ($_edit || ($type eq 'tree')) {
+ my $editor = $ENV{VISUAL} || $ENV{EDITOR} || 'vi';
+ system($editor, $commit_msg);
+ }
+
+ # file_to_s removes all trailing newlines, so just use chomp() here:
+ open $msg, '<', $commit_msg or croak $!;
+ { local $/; chomp($log_msg{msg} = <$msg>); }
+ close $msg or croak $!;
+
+ return \%log_msg;
+}
+
+sub set_svn_commit_env {
+ if (defined $LC_ALL) {
+ $ENV{LC_ALL} = $LC_ALL;
+ } else {
+ delete $ENV{LC_ALL};
+ }
+}
+
+sub svn_commit_tree {
+ my ($last, $commit) = @_;
+ my $commit_msg = "$GIT_SVN_DIR/.svn-commit.tmp.$$";
+ my $log_msg = get_commit_message($commit, $commit_msg);
+ my ($oneline) = ($log_msg->{msg} =~ /([^\n\r]+)/);
+ print "Committing $commit: $oneline\n";
+
+ set_svn_commit_env();
+ my @ci_output = safe_qx(qw(svn commit -F),$commit_msg);
+ $ENV{LC_ALL} = 'C';
+ unlink $commit_msg;
+ my ($committed) = ($ci_output[$#ci_output] =~ /(\d+)/);
+ if (!defined $committed) {
+ my $out = join("\n",@ci_output);
+ print STDERR "W: Trouble parsing \`svn commit' output:\n\n",
+ $out, "\n\nAssuming English locale...";
+ ($committed) = ($out =~ /^Committed revision \d+\./sm);
+ defined $committed or die " FAILED!\n",
+ "Commit output failed to parse committed revision!\n",
+ print STDERR " OK\n";
+ }
+
+ my @svn_up = qw(svn up);
+ push @svn_up, '--ignore-externals' unless $_no_ignore_ext;
+ if ($_optimize_commits && ($committed == ($last->{revision} + 1))) {
+ push @svn_up, "-r$committed";
+ sys(@svn_up);
+ my $info = svn_info('.');
+ my $date = $info->{'Last Changed Date'} or die "Missing date\n";
+ if ($info->{'Last Changed Rev'} != $committed) {
+ croak "$info->{'Last Changed Rev'} != $committed\n"
+ }
+ my ($Y,$m,$d,$H,$M,$S,$tz) = ($date =~
+ /(\d{4})\-(\d\d)\-(\d\d)\s
+ (\d\d)\:(\d\d)\:(\d\d)\s([\-\+]\d+)/x)
+ or croak "Failed to parse date: $date\n";
+ $log_msg->{date} = "$tz $Y-$m-$d $H:$M:$S";
+ $log_msg->{author} = $info->{'Last Changed Author'};
+ $log_msg->{revision} = $committed;
+ $log_msg->{msg} .= "\n";
+ $log_msg->{parents} = [ $last->{commit} ];
+ $log_msg->{commit} = git_commit($log_msg, $commit);
+ return $log_msg;
+ }
+ # resync immediately
+ push @svn_up, "-r$last->{revision}";
+ sys(@svn_up);
+ return fetch("$committed=$commit");
+}
+
+sub rev_list_raw {
+ my (@args) = @_;
+ my $pid = open my $fh, '-|';
+ defined $pid or croak $!;
+ if (!$pid) {
+ exec(qw/git-rev-list --pretty=raw/, @args) or croak $!;
+ }
+ return { fh => $fh, t => { } };
+}
+
+sub next_rev_list_entry {
+ my $rl = shift;
+ my $fh = $rl->{fh};
+ my $x = $rl->{t};
+ while (<$fh>) {
+ if (/^commit ($sha1)$/o) {
+ if ($x->{c}) {
+ $rl->{t} = { c => $1 };
+ return $x;
+ } else {
+ $x->{c} = $1;
+ }
+ } elsif (/^parent ($sha1)$/o) {
+ $x->{p}->{$1} = 1;
+ } elsif (s/^ //) {
+ $x->{m} ||= '';
+ $x->{m} .= $_;
+ }
+ }
+ return ($x != $rl->{t}) ? $x : undef;
+}
+
+# read the entire log into a temporary file (which is removed ASAP)
+# and store the file handle + parser state
+sub svn_log_raw {
+ my (@log_args) = @_;
+ my $log_fh = IO::File->new_tmpfile or croak $!;
+ my $pid = fork;
+ defined $pid or croak $!;
+ if (!$pid) {
+ open STDOUT, '>&', $log_fh or croak $!;
+ exec (qw(svn log), @log_args) or croak $!
+ }
+ waitpid $pid, 0;
+ croak $? if $?;
+ seek $log_fh, 0, 0 or croak $!;
+ return { state => 'sep', fh => $log_fh };
+}
+
+sub next_log_entry {
+ my $log = shift; # retval of svn_log_raw()
+ my $ret = undef;
+ my $fh = $log->{fh};
+
+ while (<$fh>) {
+ chomp;
+ if (/^\-{72}$/) {
+ if ($log->{state} eq 'msg') {
+ if ($ret->{lines}) {
+ $ret->{msg} .= $_."\n";
+ unless(--$ret->{lines}) {
+ $log->{state} = 'sep';
+ }
+ } else {
+ croak "Log parse error at: $_\n",
+ $ret->{revision},
+ "\n";
+ }
+ next;
+ }
+ if ($log->{state} ne 'sep') {
+ croak "Log parse error at: $_\n",
+ "state: $log->{state}\n",
+ $ret->{revision},
+ "\n";
+ }
+ $log->{state} = 'rev';
+
+ # if we have an empty log message, put something there:
+ if ($ret) {
+ $ret->{msg} ||= "\n";
+ delete $ret->{lines};
+ return $ret;
+ }
+ next;
+ }
+ if ($log->{state} eq 'rev' && s/^r(\d+)\s*\|\s*//) {
+ my $rev = $1;
+ my ($author, $date, $lines) = split(/\s*\|\s*/, $_, 3);
+ ($lines) = ($lines =~ /(\d+)/);
+ my ($Y,$m,$d,$H,$M,$S,$tz) = ($date =~
+ /(\d{4})\-(\d\d)\-(\d\d)\s
+ (\d\d)\:(\d\d)\:(\d\d)\s([\-\+]\d+)/x)
+ or croak "Failed to parse date: $date\n";
+ $ret = { revision => $rev,
+ date => "$tz $Y-$m-$d $H:$M:$S",
+ author => $author,
+ lines => $lines,
+ msg => '' };
+ if (defined $_authors && ! defined $users{$author}) {
+ die "Author: $author not defined in ",
+ "$_authors file\n";
+ }
+ $log->{state} = 'msg_start';
+ next;
+ }
+ # skip the first blank line of the message:
+ if ($log->{state} eq 'msg_start' && /^$/) {
+ $log->{state} = 'msg';
+ } elsif ($log->{state} eq 'msg') {
+ if ($ret->{lines}) {
+ $ret->{msg} .= $_."\n";
+ unless (--$ret->{lines}) {
+ $log->{state} = 'sep';
+ }
+ } else {
+ croak "Log parse error at: $_\n",
+ $ret->{revision},"\n";
+ }
+ }
+ }
+ return $ret;
+}
+
+sub svn_info {
+ my $url = shift || $SVN_URL;
+
+ my $pid = open my $info_fh, '-|';
+ defined $pid or croak $!;
+
+ if ($pid == 0) {
+ exec(qw(svn info),$url) or croak $!;
+ }
+
+ my $ret = {};
+ # only single-lines seem to exist in svn info output
+ while (<$info_fh>) {
+ chomp $_;
+ if (m#^([^:]+)\s*:\s*(\S.*)$#) {
+ $ret->{$1} = $2;
+ push @{$ret->{-order}}, $1;
+ }
+ }
+ close $info_fh or croak $?;
+ return $ret;
+}
+
+sub sys { system(@_) == 0 or croak $? }
+
+sub do_update_index {
+ my ($z_cmd, $cmd, $no_text_base) = @_;
+
+ my $z = open my $p, '-|';
+ defined $z or croak $!;
+ unless ($z) { exec @$z_cmd or croak $! }
+
+ my $pid = open my $ui, '|-';
+ defined $pid or croak $!;
+ unless ($pid) {
+ exec('git-update-index',"--$cmd",'-z','--stdin') or croak $!;
+ }
+ local $/ = "\0";
+ while (my $x = <$p>) {
+ chomp $x;
+ if (!$no_text_base && lstat $x && ! -l _ &&
+ svn_propget_base('svn:keywords', $x)) {
+ my $mode = -x _ ? 0755 : 0644;
+ my ($v,$d,$f) = File::Spec->splitpath($x);
+ my $tb = File::Spec->catfile($d, '.svn', 'tmp',
+ 'text-base',"$f.svn-base");
+ $tb =~ s#^/##;
+ unless (-f $tb) {
+ $tb = File::Spec->catfile($d, '.svn',
+ 'text-base',"$f.svn-base");
+ $tb =~ s#^/##;
+ }
+ my @s = stat($x);
+ unlink $x or croak $!;
+ copy($tb, $x);
+ chmod(($mode &~ umask), $x) or croak $!;
+ utime $s[8], $s[9], $x;
+ }
+ print $ui $x,"\0";
+ }
+ close $ui or croak $?;
+}
+
+sub index_changes {
+ return if $_use_lib;
+
+ if (!-f "$GIT_SVN_DIR/info/exclude") {
+ open my $fd, '>>', "$GIT_SVN_DIR/info/exclude" or croak $!;
+ print $fd '.svn',"\n";
+ close $fd or croak $!;
+ }
+ my $no_text_base = shift;
+ do_update_index([qw/git-diff-files --name-only -z/],
+ 'remove',
+ $no_text_base);
+ do_update_index([qw/git-ls-files -z --others/,
+ "--exclude-from=$GIT_SVN_DIR/info/exclude"],
+ 'add',
+ $no_text_base);
+}
+
+sub s_to_file {
+ my ($str, $file, $mode) = @_;
+ open my $fd,'>',$file or croak $!;
+ print $fd $str,"\n" or croak $!;
+ close $fd or croak $!;
+ chmod ($mode &~ umask, $file) if (defined $mode);
+}
+
+sub file_to_s {
+ my $file = shift;
+ open my $fd,'<',$file or croak "$!: file: $file\n";
+ local $/;
+ my $ret = <$fd>;
+ close $fd or croak $!;
+ $ret =~ s/\s*$//s;
+ return $ret;
+}
+
+sub assert_revision_unknown {
+ my $r = shift;
+ if (my $c = revdb_get($REVDB, $r)) {
+ croak "$r = $c already exists! Why are we refetching it?";
+ }
+}
+
+sub trees_eq {
+ my ($x, $y) = @_;
+ my @x = safe_qx('git-cat-file','commit',$x);
+ my @y = safe_qx('git-cat-file','commit',$y);
+ if (($y[0] ne $x[0]) || $x[0] !~ /^tree $sha1\n$/
+ || $y[0] !~ /^tree $sha1\n$/) {
+ print STDERR "Trees not equal: $y[0] != $x[0]\n";
+ return 0
+ }
+ return 1;
+}
+
+sub git_commit {
+ my ($log_msg, @parents) = @_;
+ assert_revision_unknown($log_msg->{revision});
+ map_tree_joins() if (@_branch_from && !%tree_map);
+
+ my (@tmp_parents, @exec_parents, %seen_parent);
+ if (my $lparents = $log_msg->{parents}) {
+ @tmp_parents = @$lparents
+ }
+ # commit parents can be conditionally bound to a particular
+ # svn revision via: "svn_revno=commit_sha1", filter them out here:
+ foreach my $p (@parents) {
+ next unless defined $p;
+ if ($p =~ /^(\d+)=($sha1_short)$/o) {
+ if ($1 == $log_msg->{revision}) {
+ push @tmp_parents, $2;
+ }
+ } else {
+ push @tmp_parents, $p if $p =~ /$sha1_short/o;
+ }
+ }
+ my $tree = $log_msg->{tree};
+ if (!defined $tree) {
+ my $index = set_index($GIT_SVN_INDEX);
+ index_changes();
+ chomp($tree = `git-write-tree`);
+ croak $? if $?;
+ restore_index($index);
+ }
+
+ # just in case we clobber the existing ref, we still want that ref
+ # as our parent:
+ if (my $cur = eval { file_to_s("$GIT_DIR/refs/remotes/$GIT_SVN") }) {
+ push @tmp_parents, $cur;
+ }
+
+ if (exists $tree_map{$tree}) {
+ foreach my $p (@{$tree_map{$tree}}) {
+ my $skip;
+ foreach (@tmp_parents) {
+ # see if a common parent is found
+ my $mb = eval {
+ safe_qx('git-merge-base', $_, $p)
+ };
+ next if ($@ || $?);
+ $skip = 1;
+ last;
+ }
+ next if $skip;
+ my ($url_p, $r_p, $uuid_p) = cmt_metadata($p);
+ next if (($SVN_UUID eq $uuid_p) &&
+ ($log_msg->{revision} > $r_p));
+ next if (defined $url_p && defined $SVN_URL &&
+ ($SVN_UUID eq $uuid_p) &&
+ ($url_p eq $SVN_URL));
+ push @tmp_parents, $p;
+ }
+ }
+ foreach (@tmp_parents) {
+ next if $seen_parent{$_};
+ $seen_parent{$_} = 1;
+ push @exec_parents, $_;
+ # MAXPARENT is defined to 16 in commit-tree.c:
+ last if @exec_parents > 16;
+ }
+
+ set_commit_env($log_msg);
+ my @exec = ('git-commit-tree', $tree);
+ push @exec, '-p', $_ foreach @exec_parents;
+ defined(my $pid = open3(my $msg_fh, my $out_fh, '>&STDERR', @exec))
+ or croak $!;
+ print $msg_fh $log_msg->{msg} or croak $!;
+ unless ($_no_metadata) {
+ print $msg_fh "\ngit-svn-id: $SVN_URL\@$log_msg->{revision}",
+ " $SVN_UUID\n" or croak $!;
+ }
+ $msg_fh->flush == 0 or croak $!;
+ close $msg_fh or croak $!;
+ chomp(my $commit = do { local $/; <$out_fh> });
+ close $out_fh or croak $!;
+ waitpid $pid, 0;
+ croak $? if $?;
+ if ($commit !~ /^$sha1$/o) {
+ die "Failed to commit, invalid sha1: $commit\n";
+ }
+ sys('git-update-ref',"refs/remotes/$GIT_SVN",$commit);
+ revdb_set($REVDB, $log_msg->{revision}, $commit);
+
+ # this output is read via pipe, do not change:
+ print "r$log_msg->{revision} = $commit\n";
+ check_repack();
+ return $commit;
+}
+
+sub check_repack {
+ if ($_repack && (--$_repack_nr == 0)) {
+ $_repack_nr = $_repack;
+ sys("git repack $_repack_flags");
+ }
+}
+
+sub set_commit_env {
+ my ($log_msg) = @_;
+ my $author = $log_msg->{author};
+ if (!defined $author || length $author == 0) {
+ $author = '(no author)';
+ }
+ my ($name,$email) = defined $users{$author} ? @{$users{$author}}
+ : ($author,"$author\@$SVN_UUID");
+ $ENV{GIT_AUTHOR_NAME} = $ENV{GIT_COMMITTER_NAME} = $name;
+ $ENV{GIT_AUTHOR_EMAIL} = $ENV{GIT_COMMITTER_EMAIL} = $email;
+ $ENV{GIT_AUTHOR_DATE} = $ENV{GIT_COMMITTER_DATE} = $log_msg->{date};
+}
+
+sub apply_mod_line_blob {
+ my $m = shift;
+ if ($m->{mode_b} =~ /^120/) {
+ blob_to_symlink($m->{sha1_b}, $m->{file_b});
+ } else {
+ blob_to_file($m->{sha1_b}, $m->{file_b});
+ }
+}
+
+sub blob_to_symlink {
+ my ($blob, $link) = @_;
+ defined $link or croak "\$link not defined!\n";
+ croak "Not a sha1: $blob\n" unless $blob =~ /^$sha1$/o;
+ if (-l $link || -f _) {
+ unlink $link or croak $!;
+ }
+
+ my $dest = `git-cat-file blob $blob`; # no newline, so no chomp
+ symlink $dest, $link or croak $!;
+}
+
+sub blob_to_file {
+ my ($blob, $file) = @_;
+ defined $file or croak "\$file not defined!\n";
+ croak "Not a sha1: $blob\n" unless $blob =~ /^$sha1$/o;
+ if (-l $file || -f _) {
+ unlink $file or croak $!;
+ }
+
+ open my $blob_fh, '>', $file or croak "$!: $file\n";
+ my $pid = fork;
+ defined $pid or croak $!;
+
+ if ($pid == 0) {
+ open STDOUT, '>&', $blob_fh or croak $!;
+ exec('git-cat-file','blob',$blob) or croak $!;
+ }
+ waitpid $pid, 0;
+ croak $? if $?;
+
+ close $blob_fh or croak $!;
+}
+
+sub safe_qx {
+ my $pid = open my $child, '-|';
+ defined $pid or croak $!;
+ if ($pid == 0) {
+ exec(@_) or croak $!;
+ }
+ my @ret = (<$child>);
+ close $child or croak $?;
+ die $? if $?; # just in case close didn't error out
+ return wantarray ? @ret : join('',@ret);
+}
+
+sub svn_compat_check {
+ if ($_follow_parent) {
+ print STDERR 'E: --follow-parent functionality is only ',
+ "available when SVN libraries are used\n";
+ exit 1;
+ }
+ my @co_help = safe_qx(qw(svn co -h));
+ unless (grep /ignore-externals/,@co_help) {
+ print STDERR "W: Installed svn version does not support ",
+ "--ignore-externals\n";
+ $_no_ignore_ext = 1;
+ }
+ if (grep /usage: checkout URL\[\@REV\]/,@co_help) {
+ $_svn_co_url_revs = 1;
+ }
+ if (grep /\[TARGET\[\@REV\]\.\.\.\]/, `svn propget -h`) {
+ $_svn_pg_peg_revs = 1;
+ }
+
+ # I really, really hope nobody hits this...
+ unless (grep /stop-on-copy/, (safe_qx(qw(svn log -h)))) {
+ print STDERR <<'';
+W: The installed svn version does not support the --stop-on-copy flag in
+ the log command.
+ Lets hope the directory you're tracking is not a branch or tag
+ and was never moved within the repository...
+
+ $_no_stop_copy = 1;
+ }
+}
+
+# *sigh*, new versions of svn won't honor -r<rev> without URL@<rev>,
+# (and they won't honor URL@<rev> without -r<rev>, too!)
+sub svn_cmd_checkout {
+ my ($url, $rev, $dir) = @_;
+ my @cmd = ('svn','co', "-r$rev");
+ push @cmd, '--ignore-externals' unless $_no_ignore_ext;
+ $url .= "\@$rev" if $_svn_co_url_revs;
+ sys(@cmd, $url, $dir);
+}
+
+sub check_upgrade_needed {
+ if (!-r $REVDB) {
+ -d $GIT_SVN_DIR or mkpath([$GIT_SVN_DIR]);
+ open my $fh, '>>',$REVDB or croak $!;
+ close $fh;
+ }
+ my $old = eval {
+ my $pid = open my $child, '-|';
+ defined $pid or croak $!;
+ if ($pid == 0) {
+ close STDERR;
+ exec('git-rev-parse',"$GIT_SVN-HEAD") or croak $!;
+ }
+ my @ret = (<$child>);
+ close $child or croak $?;
+ die $? if $?; # just in case close didn't error out
+ return wantarray ? @ret : join('',@ret);
+ };
+ return unless $old;
+ my $head = eval { safe_qx('git-rev-parse',"refs/remotes/$GIT_SVN") };
+ if ($@ || !$head) {
+ print STDERR "Please run: $0 rebuild --upgrade\n";
+ exit 1;
+ }
+}
+
+# fills %tree_map with a reverse mapping of trees to commits. Useful
+# for finding parents to commit on.
+sub map_tree_joins {
+ my %seen;
+ foreach my $br (@_branch_from) {
+ my $pid = open my $pipe, '-|';
+ defined $pid or croak $!;
+ if ($pid == 0) {
+ exec(qw(git-rev-list --topo-order --pretty=raw), $br)
+ or croak $!;
+ }
+ while (<$pipe>) {
+ if (/^commit ($sha1)$/o) {
+ my $commit = $1;
+
+ # if we've seen a commit,
+ # we've seen its parents
+ last if $seen{$commit};
+ my ($tree) = (<$pipe> =~ /^tree ($sha1)$/o);
+ unless (defined $tree) {
+ die "Failed to parse commit $commit\n";
+ }
+ push @{$tree_map{$tree}}, $commit;
+ $seen{$commit} = 1;
+ }
+ }
+ close $pipe; # we could be breaking the pipe early
+ }
+}
+
+sub load_all_refs {
+ if (@_branch_from) {
+ print STDERR '--branch|-b parameters are ignored when ',
+ "--branch-all-refs|-B is passed\n";
+ }
+
+ # don't worry about rev-list on non-commit objects/tags,
+ # it shouldn't blow up if a ref is a blob or tree...
+ chomp(@_branch_from = `git-rev-parse --symbolic --all`);
+}
+
+# '<svn username> = real-name <email address>' mapping based on git-svnimport:
+sub load_authors {
+ open my $authors, '<', $_authors or die "Can't open $_authors $!\n";
+ while (<$authors>) {
+ chomp;
+ next unless /^(\S+?)\s*=\s*(.+?)\s*<(.+)>\s*$/;
+ my ($user, $name, $email) = ($1, $2, $3);
+ $users{$user} = [$name, $email];
+ }
+ close $authors or croak $!;
+}
+
+sub rload_authors {
+ open my $authors, '<', $_authors or die "Can't open $_authors $!\n";
+ while (<$authors>) {
+ chomp;
+ next unless /^(\S+?)\s*=\s*(.+?)\s*<(.+)>\s*$/;
+ my ($user, $name, $email) = ($1, $2, $3);
+ $rusers{"$name <$email>"} = $user;
+ }
+ close $authors or croak $!;
+}
+
+sub svn_propget_base {
+ my ($p, $f) = @_;
+ $f .= '@BASE' if $_svn_pg_peg_revs;
+ return safe_qx(qw/svn propget/, $p, $f);
+}
+
+sub git_svn_each {
+ my $sub = shift;
+ foreach (`git-rev-parse --symbolic --all`) {
+ next unless s#^refs/remotes/##;
+ chomp $_;
+ next unless -f "$GIT_DIR/svn/$_/info/url";
+ &$sub($_);
+ }
+}
+
+sub migrate_revdb {
+ git_svn_each(sub {
+ my $id = shift;
+ defined(my $pid = fork) or croak $!;
+ if (!$pid) {
+ $GIT_SVN = $ENV{GIT_SVN_ID} = $id;
+ init_vars();
+ exit 0 if -r $REVDB;
+ print "Upgrading svn => git mapping...\n";
+ -d $GIT_SVN_DIR or mkpath([$GIT_SVN_DIR]);
+ open my $fh, '>>',$REVDB or croak $!;
+ close $fh;
+ rebuild();
+ print "Done upgrading. You may now delete the ",
+ "deprecated $GIT_SVN_DIR/revs directory\n";
+ exit 0;
+ }
+ waitpid $pid, 0;
+ croak $? if $?;
+ });
+}
+
+sub migration_check {
+ migrate_revdb() unless (-e $REVDB);
+ return if (-d "$GIT_DIR/svn" || !-d $GIT_DIR);
+ print "Upgrading repository...\n";
+ unless (-d "$GIT_DIR/svn") {
+ mkdir "$GIT_DIR/svn" or croak $!;
+ }
+ print "Data from a previous version of git-svn exists, but\n\t",
+ "$GIT_SVN_DIR\n\t(required for this version ",
+ "($VERSION) of git-svn) does not.\n";
+
+ foreach my $x (`git-rev-parse --symbolic --all`) {
+ next unless $x =~ s#^refs/remotes/##;
+ chomp $x;
+ next unless -f "$GIT_DIR/$x/info/url";
+ my $u = eval { file_to_s("$GIT_DIR/$x/info/url") };
+ next unless $u;
+ my $dn = dirname("$GIT_DIR/svn/$x");
+ mkpath([$dn]) unless -d $dn;
+ rename "$GIT_DIR/$x", "$GIT_DIR/svn/$x" or croak "$!: $x";
+ }
+ migrate_revdb() if (-d $GIT_SVN_DIR && !-w $REVDB);
+ print "Done upgrading.\n";
+}
+
+sub find_rev_before {
+ my ($r, $id, $eq_ok) = @_;
+ my $f = "$GIT_DIR/svn/$id/.rev_db";
+ return (undef,undef) unless -r $f;
+ --$r unless $eq_ok;
+ while ($r > 0) {
+ if (my $c = revdb_get($f, $r)) {
+ return ($r, $c);
+ }
+ --$r;
+ }
+ return (undef, undef);
+}
+
+sub init_vars {
+ $GIT_SVN ||= $ENV{GIT_SVN_ID} || 'git-svn';
+ $GIT_SVN_DIR = "$GIT_DIR/svn/$GIT_SVN";
+ $REVDB = "$GIT_SVN_DIR/.rev_db";
+ $GIT_SVN_INDEX = "$GIT_SVN_DIR/index";
+ $SVN_URL = undef;
+ $SVN_WC = "$GIT_SVN_DIR/tree";
+ %tree_map = ();
+}
+
+# convert GetOpt::Long specs for use by git-repo-config
+sub read_repo_config {
+ return unless -d $GIT_DIR;
+ my $opts = shift;
+ foreach my $o (keys %$opts) {
+ my $v = $opts->{$o};
+ my ($key) = ($o =~ /^([a-z\-]+)/);
+ $key =~ s/-//g;
+ my $arg = 'git-repo-config';
+ $arg .= ' --int' if ($o =~ /[:=]i$/);
+ $arg .= ' --bool' if ($o !~ /[:=][sfi]$/);
+ if (ref $v eq 'ARRAY') {
+ chomp(my @tmp = `$arg --get-all svn.$key`);
+ @$v = @tmp if @tmp;
+ } else {
+ chomp(my $tmp = `$arg --get svn.$key`);
+ if ($tmp && !($arg =~ / --bool / && $tmp eq 'false')) {
+ $$v = $tmp;
+ }
+ }
+ }
+}
+
+sub set_default_vals {
+ if (defined $_repack) {
+ $_repack = 1000 if ($_repack <= 0);
+ $_repack_nr = $_repack;
+ $_repack_flags ||= '-d';
+ }
+}
+
+sub read_grafts {
+ my $gr_file = shift;
+ my ($grafts, $comments) = ({}, {});
+ if (open my $fh, '<', $gr_file) {
+ my @tmp;
+ while (<$fh>) {
+ if (/^($sha1)\s+/) {
+ my $c = $1;
+ if (@tmp) {
+ @{$comments->{$c}} = @tmp;
+ @tmp = ();
+ }
+ foreach my $p (split /\s+/, $_) {
+ $grafts->{$c}->{$p} = 1;
+ }
+ } else {
+ push @tmp, $_;
+ }
+ }
+ close $fh or croak $!;
+ @{$comments->{'END'}} = @tmp if @tmp;
+ }
+ return ($grafts, $comments);
+}
+
+sub write_grafts {
+ my ($grafts, $comments, $gr_file) = @_;
+
+ open my $fh, '>', $gr_file or croak $!;
+ foreach my $c (sort keys %$grafts) {
+ if ($comments->{$c}) {
+ print $fh $_ foreach @{$comments->{$c}};
+ }
+ my $p = $grafts->{$c};
+ my %x; # real parents
+ delete $p->{$c}; # commits are not self-reproducing...
+ my $pid = open my $ch, '-|';
+ defined $pid or croak $!;
+ if (!$pid) {
+ exec(qw/git-cat-file commit/, $c) or croak $!;
+ }
+ while (<$ch>) {
+ if (/^parent ($sha1)/) {
+ $x{$1} = $p->{$1} = 1;
+ } else {
+ last unless /^\S/;
+ }
+ }
+ close $ch; # breaking the pipe
+
+ # if real parents are the only ones in the grafts, drop it
+ next if join(' ',sort keys %$p) eq join(' ',sort keys %x);
+
+ my (@ip, @jp, $mb);
+ my %del = %x;
+ @ip = @jp = keys %$p;
+ foreach my $i (@ip) {
+ next if $del{$i} || $p->{$i} == 2;
+ foreach my $j (@jp) {
+ next if $i eq $j || $del{$j} || $p->{$j} == 2;
+ $mb = eval { safe_qx('git-merge-base',$i,$j) };
+ next unless $mb;
+ chomp $mb;
+ next if $x{$mb};
+ if ($mb eq $j) {
+ delete $p->{$i};
+ $del{$i} = 1;
+ } elsif ($mb eq $i) {
+ delete $p->{$j};
+ $del{$j} = 1;
+ }
+ }
+ }
+
+ # if real parents are the only ones in the grafts, drop it
+ next if join(' ',sort keys %$p) eq join(' ',sort keys %x);
+
+ print $fh $c, ' ', join(' ', sort keys %$p),"\n";
+ }
+ if ($comments->{'END'}) {
+ print $fh $_ foreach @{$comments->{'END'}};
+ }
+ close $fh or croak $!;
+}
+
+sub read_url_paths_all {
+ my ($l_map, $pfx, $p) = @_;
+ my @dir;
+ foreach (<$p/*>) {
+ if (-r "$_/info/url") {
+ $pfx .= '/' if $pfx && $pfx !~ m!/$!;
+ my $id = $pfx . basename $_;
+ my $url = file_to_s("$_/info/url");
+ my ($u, $p) = repo_path_split($url);
+ $l_map->{$u}->{$p} = $id;
+ } elsif (-d $_) {
+ push @dir, $_;
+ }
+ }
+ foreach (@dir) {
+ my $x = $_;
+ $x =~ s!^\Q$GIT_DIR\E/svn/!!o;
+ read_url_paths_all($l_map, $x, $_);
+ }
+}
+
+# this one only gets ids that have been imported, not new ones
+sub read_url_paths {
+ my $l_map = {};
+ git_svn_each(sub { my $x = shift;
+ my $url = file_to_s("$GIT_DIR/svn/$x/info/url");
+ my ($u, $p) = repo_path_split($url);
+ $l_map->{$u}->{$p} = $x;
+ });
+ return $l_map;
+}
+
+sub extract_metadata {
+ my $id = shift or return (undef, undef, undef);
+ my ($url, $rev, $uuid) = ($id =~ /^git-svn-id:\s(\S+?)\@(\d+)
+ \s([a-f\d\-]+)$/x);
+ if (!$rev || !$uuid || !$url) {
+ # some of the original repositories I made had
+ # identifiers like this:
+ ($rev, $uuid) = ($id =~/^git-svn-id:\s(\d+)\@([a-f\d\-]+)/);
+ }
+ return ($url, $rev, $uuid);
+}
+
+sub cmt_metadata {
+ return extract_metadata((grep(/^git-svn-id: /,
+ safe_qx(qw/git-cat-file commit/, shift)))[-1]);
+}
+
+sub get_commit_time {
+ my $cmt = shift;
+ defined(my $pid = open my $fh, '-|') or croak $!;
+ if (!$pid) {
+ exec qw/git-rev-list --pretty=raw -n1/, $cmt or croak $!;
+ }
+ while (<$fh>) {
+ /^committer\s(?:.+) (\d+) ([\-\+]?\d+)$/ or next;
+ my ($s, $tz) = ($1, $2);
+ if ($tz =~ s/^\+//) {
+ $s += tz_to_s_offset($tz);
+ } elsif ($tz =~ s/^\-//) {
+ $s -= tz_to_s_offset($tz);
+ }
+ close $fh;
+ return $s;
+ }
+ die "Can't get commit time for commit: $cmt\n";
+}
+
+sub tz_to_s_offset {
+ my ($tz) = @_;
+ $tz =~ s/(\d\d)$//;
+ return ($1 * 60) + ($tz * 3600);
+}
+
+sub setup_pager { # translated to Perl from pager.c
+ return unless (-t *STDOUT);
+ my $pager = $ENV{PAGER};
+ if (!defined $pager) {
+ $pager = 'less';
+ } elsif (length $pager == 0 || $pager eq 'cat') {
+ return;
+ }
+ pipe my $rfd, my $wfd or return;
+ defined(my $pid = fork) or croak $!;
+ if (!$pid) {
+ open STDOUT, '>&', $wfd or croak $!;
+ return;
+ }
+ open STDIN, '<&', $rfd or croak $!;
+ $ENV{LESS} ||= '-S';
+ exec $pager or croak "Can't run pager: $!\n";;
+}
+
+sub get_author_info {
+ my ($dest, $author, $t, $tz) = @_;
+ $author =~ s/(?:^\s*|\s*$)//g;
+ $dest->{a_raw} = $author;
+ my $_a;
+ if ($_authors) {
+ $_a = $rusers{$author} || undef;
+ }
+ if (!$_a) {
+ ($_a) = ($author =~ /<([^>]+)\@[^>]+>$/);
+ }
+ $dest->{t} = $t;
+ $dest->{tz} = $tz;
+ $dest->{a} = $_a;
+ # Date::Parse isn't in the standard Perl distro :(
+ if ($tz =~ s/^\+//) {
+ $t += tz_to_s_offset($tz);
+ } elsif ($tz =~ s/^\-//) {
+ $t -= tz_to_s_offset($tz);
+ }
+ $dest->{t_utc} = $t;
+}
+
+sub process_commit {
+ my ($c, $r_min, $r_max, $defer) = @_;
+ if (defined $r_min && defined $r_max) {
+ if ($r_min == $c->{r} && $r_min == $r_max) {
+ show_commit($c);
+ return 0;
+ }
+ return 1 if $r_min == $r_max;
+ if ($r_min < $r_max) {
+ # we need to reverse the print order
+ return 0 if (defined $_limit && --$_limit < 0);
+ push @$defer, $c;
+ return 1;
+ }
+ if ($r_min != $r_max) {
+ return 1 if ($r_min < $c->{r});
+ return 1 if ($r_max > $c->{r});
+ }
+ }
+ return 0 if (defined $_limit && --$_limit < 0);
+ show_commit($c);
+ return 1;
+}
+
+sub show_commit {
+ my $c = shift;
+ if ($_oneline) {
+ my $x = "\n";
+ if (my $l = $c->{l}) {
+ while ($l->[0] =~ /^\s*$/) { shift @$l }
+ $x = $l->[0];
+ }
+ $_l_fmt ||= 'A' . length($c->{r});
+ print 'r',pack($_l_fmt, $c->{r}),' | ';
+ print "$c->{c} | " if $_show_commit;
+ print $x;
+ } else {
+ show_commit_normal($c);
+ }
+}
+
+sub show_commit_normal {
+ my ($c) = @_;
+ print '-' x72, "\nr$c->{r} | ";
+ print "$c->{c} | " if $_show_commit;
+ print "$c->{a} | ", strftime("%Y-%m-%d %H:%M:%S %z (%a, %d %b %Y)",
+ localtime($c->{t_utc})), ' | ';
+ my $nr_line = 0;
+
+ if (my $l = $c->{l}) {
+ while ($l->[$#$l] eq "\n" && $l->[($#$l - 1)] eq "\n") {
+ pop @$l;
+ }
+ $nr_line = scalar @$l;
+ if (!$nr_line) {
+ print "1 line\n\n\n";
+ } else {
+ if ($nr_line == 1) {
+ $nr_line = '1 line';
+ } else {
+ $nr_line .= ' lines';
+ }
+ print $nr_line, "\n\n";
+ print $_ foreach @$l;
+ }
+ } else {
+ print "1 line\n\n";
+
+ }
+ foreach my $x (qw/raw diff/) {
+ if ($c->{$x}) {
+ print "\n";
+ print $_ foreach @{$c->{$x}}
+ }
+ }
+}
+
+sub libsvn_load {
+ return unless $_use_lib;
+ $_use_lib = eval {
+ require SVN::Core;
+ if ($SVN::Core::VERSION lt '1.1.0') {
+ die "Need SVN::Core 1.1.0 or better ",
+ "(got $SVN::Core::VERSION) ",
+ "Falling back to command-line svn\n";
+ }
+ require SVN::Ra;
+ require SVN::Delta;
+ push @SVN::Git::Editor::ISA, 'SVN::Delta::Editor';
+ my $kill_stupid_warnings = $SVN::Node::none.$SVN::Node::file.
+ $SVN::Node::dir.$SVN::Node::unknown.
+ $SVN::Node::none.$SVN::Node::file.
+ $SVN::Node::dir.$SVN::Node::unknown;
+ 1;
+ };
+}
+
+sub libsvn_connect {
+ my ($url) = @_;
+ my $auth = SVN::Core::auth_open([SVN::Client::get_simple_provider(),
+ SVN::Client::get_ssl_server_trust_file_provider(),
+ SVN::Client::get_username_provider()]);
+ my $s = eval { SVN::Ra->new(url => $url, auth => $auth) };
+ return $s;
+}
+
+sub libsvn_get_file {
+ my ($gui, $f, $rev) = @_;
+ my $p = $f;
+ if (length $SVN_PATH > 0) {
+ return unless ($p =~ s#^\Q$SVN_PATH\E/##);
+ }
+
+ my ($hash, $pid, $in, $out);
+ my $pool = SVN::Pool->new;
+ defined($pid = open3($in, $out, '>&STDERR',
+ qw/git-hash-object -w --stdin/)) or croak $!;
+ # redirect STDOUT for SVN 1.1.x compatibility
+ open my $stdout, '>&', \*STDOUT or croak $!;
+ open STDOUT, '>&', $in or croak $!;
+ my ($r, $props) = $SVN->get_file($f, $rev, \*STDOUT, $pool);
+ $in->flush == 0 or croak $!;
+ open STDOUT, '>&', $stdout or croak $!;
+ close $in or croak $!;
+ close $stdout or croak $!;
+ $pool->clear;
+ chomp($hash = do { local $/; <$out> });
+ close $out or croak $!;
+ waitpid $pid, 0;
+ $hash =~ /^$sha1$/o or die "not a sha1: $hash\n";
+
+ my $mode = exists $props->{'svn:executable'} ? '100755' : '100644';
+ if (exists $props->{'svn:special'}) {
+ $mode = '120000';
+ my $link = `git-cat-file blob $hash`;
+ $link =~ s/^link // or die "svn:special file with contents: <",
+ $link, "> is not understood\n";
+ defined($pid = open3($in, $out, '>&STDERR',
+ qw/git-hash-object -w --stdin/)) or croak $!;
+ print $in $link;
+ $in->flush == 0 or croak $!;
+ close $in or croak $!;
+ chomp($hash = do { local $/; <$out> });
+ close $out or croak $!;
+ waitpid $pid, 0;
+ $hash =~ /^$sha1$/o or die "not a sha1: $hash\n";
+ }
+ print $gui $mode,' ',$hash,"\t",$p,"\0" or croak $!;
+}
+
+sub libsvn_log_entry {
+ my ($rev, $author, $date, $msg, $parents) = @_;
+ my ($Y,$m,$d,$H,$M,$S) = ($date =~ /^(\d{4})\-(\d\d)\-(\d\d)T
+ (\d\d)\:(\d\d)\:(\d\d).\d+Z$/x)
+ or die "Unable to parse date: $date\n";
+ if (defined $_authors && ! defined $users{$author}) {
+ die "Author: $author not defined in $_authors file\n";
+ }
+ $msg = '' if ($rev == 0 && !defined $msg);
+ return { revision => $rev, date => "+0000 $Y-$m-$d $H:$M:$S",
+ author => $author, msg => $msg."\n", parents => $parents || [] }
+}
+
+sub process_rm {
+ my ($gui, $last_commit, $f) = @_;
+ $f =~ s#^\Q$SVN_PATH\E/?## or return;
+ # remove entire directories.
+ if (safe_qx('git-ls-tree',$last_commit,'--',$f) =~ /^040000 tree/) {
+ defined(my $pid = open my $ls, '-|') or croak $!;
+ if (!$pid) {
+ exec(qw/git-ls-tree -r --name-only -z/,
+ $last_commit,'--',$f) or croak $!;
+ }
+ local $/ = "\0";
+ while (<$ls>) {
+ print $gui '0 ',0 x 40,"\t",$_ or croak $!;
+ }
+ close $ls or croak $?;
+ } else {
+ print $gui '0 ',0 x 40,"\t",$f,"\0" or croak $!;
+ }
+}
+
+sub libsvn_fetch {
+ my ($last_commit, $paths, $rev, $author, $date, $msg) = @_;
+ open my $gui, '| git-update-index -z --index-info' or croak $!;
+ my @amr;
+ foreach my $f (keys %$paths) {
+ my $m = $paths->{$f}->action();
+ $f =~ s#^/+##;
+ if ($m =~ /^[DR]$/) {
+ print "\t$m\t$f\n" unless $_q;
+ process_rm($gui, $last_commit, $f);
+ next if $m eq 'D';
+ # 'R' can be file replacements, too, right?
+ }
+ my $pool = SVN::Pool->new;
+ my $t = $SVN->check_path($f, $rev, $pool);
+ if ($t == $SVN::Node::file) {
+ if ($m =~ /^[AMR]$/) {
+ push @amr, [ $m, $f ];
+ } else {
+ die "Unrecognized action: $m, ($f r$rev)\n";
+ }
+ } elsif ($t == $SVN::Node::dir && $m =~ /^[AR]$/) {
+ my @traversed = ();
+ libsvn_traverse($gui, '', $f, $rev, \@traversed);
+ foreach (@traversed) {
+ push @amr, [ $m, $_ ]
+ }
+ }
+ $pool->clear;
+ }
+ foreach (@amr) {
+ print "\t$_->[0]\t$_->[1]\n" unless $_q;
+ libsvn_get_file($gui, $_->[1], $rev)
+ }
+ close $gui or croak $?;
+ return libsvn_log_entry($rev, $author, $date, $msg, [$last_commit]);
+}
+
+sub svn_grab_base_rev {
+ defined(my $pid = open my $fh, '-|') or croak $!;
+ if (!$pid) {
+ open my $null, '>', '/dev/null' or croak $!;
+ open STDERR, '>&', $null or croak $!;
+ exec qw/git-rev-parse --verify/,"refs/remotes/$GIT_SVN^0"
+ or croak $!;
+ }
+ chomp(my $c = do { local $/; <$fh> });
+ close $fh;
+ if (defined $c && length $c) {
+ my ($url, $rev, $uuid) = cmt_metadata($c);
+ return ($rev, $c) if defined $rev;
+ }
+ if ($_no_metadata) {
+ my $offset = -41; # from tail
+ my $rl;
+ open my $fh, '<', $REVDB or
+ die "--no-metadata specified and $REVDB not readable\n";
+ seek $fh, $offset, 2;
+ $rl = readline $fh;
+ defined $rl or return (undef, undef);
+ chomp $rl;
+ while ($c ne $rl && tell $fh != 0) {
+ $offset -= 41;
+ seek $fh, $offset, 2;
+ $rl = readline $fh;
+ defined $rl or return (undef, undef);
+ chomp $rl;
+ }
+ my $rev = tell $fh;
+ croak $! if ($rev < -1);
+ $rev = ($rev - 41) / 41;
+ close $fh or croak $!;
+ return ($rev, $c);
+ }
+ return (undef, undef);
+}
+
+sub libsvn_parse_revision {
+ my $base = shift;
+ my $head = $SVN->get_latest_revnum();
+ if (!defined $_revision || $_revision eq 'BASE:HEAD') {
+ return ($base + 1, $head) if (defined $base);
+ return (0, $head);
+ }
+ return ($1, $2) if ($_revision =~ /^(\d+):(\d+)$/);
+ return ($_revision, $_revision) if ($_revision =~ /^\d+$/);
+ if ($_revision =~ /^BASE:(\d+)$/) {
+ return ($base + 1, $1) if (defined $base);
+ return (0, $head);
+ }
+ return ($1, $head) if ($_revision =~ /^(\d+):HEAD$/);
+ die "revision argument: $_revision not understood by git-svn\n",
+ "Try using the command-line svn client instead\n";
+}
+
+sub libsvn_traverse {
+ my ($gui, $pfx, $path, $rev, $files) = @_;
+ my $cwd = "$pfx/$path";
+ my $pool = SVN::Pool->new;
+ $cwd =~ s#^/+##g;
+ my ($dirent, $r, $props) = $SVN->get_dir($cwd, $rev, $pool);
+ foreach my $d (keys %$dirent) {
+ my $t = $dirent->{$d}->kind;
+ if ($t == $SVN::Node::dir) {
+ libsvn_traverse($gui, $cwd, $d, $rev, $files);
+ } elsif ($t == $SVN::Node::file) {
+ my $file = "$cwd/$d";
+ if (defined $files) {
+ push @$files, $file;
+ } else {
+ print "\tA\t$file\n" unless $_q;
+ libsvn_get_file($gui, $file, $rev);
+ }
+ }
+ }
+ $pool->clear;
+}
+
+sub libsvn_traverse_ignore {
+ my ($fh, $path, $r) = @_;
+ $path =~ s#^/+##g;
+ my $pool = SVN::Pool->new;
+ my ($dirent, undef, $props) = $SVN->get_dir($path, $r, $pool);
+ my $p = $path;
+ $p =~ s#^\Q$SVN_PATH\E/?##;
+ print $fh length $p ? "\n# $p\n" : "\n# /\n";
+ if (my $s = $props->{'svn:ignore'}) {
+ $s =~ s/[\r\n]+/\n/g;
+ chomp $s;
+ if (length $p == 0) {
+ $s =~ s#\n#\n/$p#g;
+ print $fh "/$s\n";
+ } else {
+ $s =~ s#\n#\n/$p/#g;
+ print $fh "/$p/$s\n";
+ }
+ }
+ foreach (sort keys %$dirent) {
+ next if $dirent->{$_}->kind != $SVN::Node::dir;
+ libsvn_traverse_ignore($fh, "$path/$_", $r);
+ }
+ $pool->clear;
+}
+
+sub revisions_eq {
+ my ($path, $r0, $r1) = @_;
+ return 1 if $r0 == $r1;
+ my $nr = 0;
+ if ($_use_lib) {
+ # should be OK to use Pool here (r1 - r0) should be small
+ my $pool = SVN::Pool->new;
+ libsvn_get_log($SVN, "/$path", $r0, $r1,
+ 0, 1, 1, sub {$nr++}, $pool);
+ $pool->clear;
+ } else {
+ my ($url, undef) = repo_path_split($SVN_URL);
+ my $svn_log = svn_log_raw("$url/$path","-r$r0:$r1");
+ while (next_log_entry($svn_log)) { $nr++ }
+ close $svn_log->{fh};
+ }
+ return 0 if ($nr > 1);
+ return 1;
+}
+
+sub libsvn_find_parent_branch {
+ my ($paths, $rev, $author, $date, $msg) = @_;
+ my $svn_path = '/'.$SVN_PATH;
+
+ # look for a parent from another branch:
+ my $i = $paths->{$svn_path} or return;
+ my $branch_from = $i->copyfrom_path or return;
+ my $r = $i->copyfrom_rev;
+ print STDERR "Found possible branch point: ",
+ "$branch_from => $svn_path, $r\n";
+ $branch_from =~ s#^/##;
+ my $l_map = {};
+ read_url_paths_all($l_map, '', "$GIT_DIR/svn");
+ my $url = $SVN->{url};
+ defined $l_map->{$url} or return;
+ my $id = $l_map->{$url}->{$branch_from};
+ if (!defined $id && $_follow_parent) {
+ print STDERR "Following parent: $branch_from\@$r\n";
+ # auto create a new branch and follow it
+ $id = basename($branch_from);
+ $id .= '@'.$r if -r "$GIT_DIR/svn/$id";
+ while (-r "$GIT_DIR/svn/$id") {
+ # just grow a tail if we're not unique enough :x
+ $id .= '-';
+ }
+ }
+ return unless defined $id;
+
+ my ($r0, $parent) = find_rev_before($r,$id,1);
+ if ($_follow_parent && (!defined $r0 || !defined $parent)) {
+ defined(my $pid = fork) or croak $!;
+ if (!$pid) {
+ $GIT_SVN = $ENV{GIT_SVN_ID} = $id;
+ init_vars();
+ $SVN_URL = "$url/$branch_from";
+ $SVN_LOG = $SVN = undef;
+ setup_git_svn();
+ # we can't assume SVN_URL exists at r+1:
+ $_revision = "0:$r";
+ fetch_lib();
+ exit 0;
+ }
+ waitpid $pid, 0;
+ croak $? if $?;
+ ($r0, $parent) = find_rev_before($r,$id,1);
+ }
+ return unless (defined $r0 && defined $parent);
+ if (revisions_eq($branch_from, $r0, $r)) {
+ unlink $GIT_SVN_INDEX;
+ print STDERR "Found branch parent: ($GIT_SVN) $parent\n";
+ sys(qw/git-read-tree/, $parent);
+ return libsvn_fetch($parent, $paths, $rev,
+ $author, $date, $msg);
+ }
+ print STDERR "Nope, branch point not imported or unknown\n";
+ return undef;
+}
+
+sub libsvn_get_log {
+ my ($ra, @args) = @_;
+ if ($SVN::Core::VERSION le '1.2.0') {
+ splice(@args, 3, 1);
+ }
+ $ra->get_log(@args);
+}
+
+sub libsvn_new_tree {
+ if (my $log_entry = libsvn_find_parent_branch(@_)) {
+ return $log_entry;
+ }
+ my ($paths, $rev, $author, $date, $msg) = @_;
+ open my $gui, '| git-update-index -z --index-info' or croak $!;
+ libsvn_traverse($gui, '', $SVN_PATH, $rev);
+ close $gui or croak $?;
+ return libsvn_log_entry($rev, $author, $date, $msg);
+}
+
+sub find_graft_path_commit {
+ my ($tree_paths, $p1, $r1) = @_;
+ foreach my $x (keys %$tree_paths) {
+ next unless ($p1 =~ /^\Q$x\E/);
+ my $i = $tree_paths->{$x};
+ my ($r0, $parent) = find_rev_before($r1,$i,1);
+ return $parent if (defined $r0 && $r0 == $r1);
+ print STDERR "r$r1 of $i not imported\n";
+ next;
+ }
+ return undef;
+}
+
+sub find_graft_path_parents {
+ my ($grafts, $tree_paths, $c, $p0, $r0) = @_;
+ foreach my $x (keys %$tree_paths) {
+ next unless ($p0 =~ /^\Q$x\E/);
+ my $i = $tree_paths->{$x};
+ my ($r, $parent) = find_rev_before($r0, $i, 1);
+ if (defined $r && defined $parent && revisions_eq($x,$r,$r0)) {
+ my ($url_b, undef, $uuid_b) = cmt_metadata($c);
+ my ($url_a, undef, $uuid_a) = cmt_metadata($parent);
+ next if ($url_a && $url_b && $url_a eq $url_b &&
+ $uuid_b eq $uuid_a);
+ $grafts->{$c}->{$parent} = 1;
+ }
+ }
+}
+
+sub libsvn_graft_file_copies {
+ my ($grafts, $tree_paths, $path, $paths, $rev) = @_;
+ foreach (keys %$paths) {
+ my $i = $paths->{$_};
+ my ($m, $p0, $r0) = ($i->action, $i->copyfrom_path,
+ $i->copyfrom_rev);
+ next unless (defined $p0 && defined $r0);
+
+ my $p1 = $_;
+ $p1 =~ s#^/##;
+ $p0 =~ s#^/##;
+ my $c = find_graft_path_commit($tree_paths, $p1, $rev);
+ next unless $c;
+ find_graft_path_parents($grafts, $tree_paths, $c, $p0, $r0);
+ }
+}
+
+sub set_index {
+ my $old = $ENV{GIT_INDEX_FILE};
+ $ENV{GIT_INDEX_FILE} = shift;
+ return $old;
+}
+
+sub restore_index {
+ my ($old) = @_;
+ if (defined $old) {
+ $ENV{GIT_INDEX_FILE} = $old;
+ } else {
+ delete $ENV{GIT_INDEX_FILE};
+ }
+}
+
+sub libsvn_commit_cb {
+ my ($rev, $date, $committer, $c, $msg, $r_last, $cmt_last) = @_;
+ if ($_optimize_commits && $rev == ($r_last + 1)) {
+ my $log = libsvn_log_entry($rev,$committer,$date,$msg);
+ $log->{tree} = get_tree_from_treeish($c);
+ my $cmt = git_commit($log, $cmt_last, $c);
+ my @diff = safe_qx('git-diff-tree', $cmt, $c);
+ if (@diff) {
+ print STDERR "Trees differ: $cmt $c\n",
+ join('',@diff),"\n";
+ exit 1;
+ }
+ } else {
+ fetch("$rev=$c");
+ }
+}
+
+sub libsvn_ls_fullurl {
+ my $fullurl = shift;
+ my ($repo, $path) = repo_path_split($fullurl);
+ $SVN ||= libsvn_connect($repo);
+ my @ret;
+ my $pool = SVN::Pool->new;
+ my ($dirent, undef, undef) = $SVN->get_dir($path,
+ $SVN->get_latest_revnum, $pool);
+ foreach my $d (keys %$dirent) {
+ if ($dirent->{$d}->kind == $SVN::Node::dir) {
+ push @ret, "$d/"; # add '/' for compat with cli svn
+ }
+ }
+ $pool->clear;
+ return @ret;
+}
+
+
+sub libsvn_skip_unknown_revs {
+ my $err = shift;
+ my $errno = $err->apr_err();
+ # Maybe the branch we're tracking didn't
+ # exist when the repo started, so it's
+ # not an error if it doesn't, just continue
+ #
+ # Wonderfully consistent library, eh?
+ # 160013 - svn:// and file://
+ # 175002 - http(s)://
+ # More codes may be discovered later...
+ if ($errno == 175002 || $errno == 160013) {
+ return;
+ }
+ croak "Error from SVN, ($errno): ", $err->expanded_message,"\n";
+};
+
+# Tie::File seems to be prone to offset errors if revisions get sparse,
+# it's not that fast, either. Tie::File is also not in Perl 5.6. So
+# one of my favorite modules is out :< Next up would be one of the DBM
+# modules, but I'm not sure which is most portable... So I'll just
+# go with something that's plain-text, but still capable of
+# being randomly accessed. So here's my ultra-simple fixed-width
+# database. All records are 40 characters + "\n", so it's easy to seek
+# to a revision: (41 * rev) is the byte offset.
+# A record of 40 0s denotes an empty revision.
+# And yes, it's still pretty fast (faster than Tie::File).
+sub revdb_set {
+ my ($file, $rev, $commit) = @_;
+ length $commit == 40 or croak "arg3 must be a full SHA1 hexsum\n";
+ open my $fh, '+<', $file or croak $!;
+ my $offset = $rev * 41;
+ # assume that append is the common case:
+ seek $fh, 0, 2 or croak $!;
+ my $pos = tell $fh;
+ if ($pos < $offset) {
+ print $fh (('0' x 40),"\n") x (($offset - $pos) / 41);
+ }
+ seek $fh, $offset, 0 or croak $!;
+ print $fh $commit,"\n";
+ close $fh or croak $!;
+}
+
+sub revdb_get {
+ my ($file, $rev) = @_;
+ my $ret;
+ my $offset = $rev * 41;
+ open my $fh, '<', $file or croak $!;
+ seek $fh, $offset, 0;
+ if (tell $fh == $offset) {
+ $ret = readline $fh;
+ if (defined $ret) {
+ chomp $ret;
+ $ret = undef if ($ret =~ /^0{40}$/);
+ }
+ }
+ close $fh or croak $!;
+ return $ret;
+}
+
+sub copy_remote_ref {
+ my $origin = $_cp_remote ? $_cp_remote : 'origin';
+ my $ref = "refs/remotes/$GIT_SVN";
+ if (safe_qx('git-ls-remote', $origin, $ref)) {
+ sys(qw/git fetch/, $origin, "$ref:$ref");
+ } else {
+ die "Unable to find remote reference: ",
+ "refs/remotes/$GIT_SVN on $origin\n";
+ }
+}
+
+package SVN::Git::Editor;
+use vars qw/@ISA/;
+use strict;
+use warnings;
+use Carp qw/croak/;
+use IO::File;
+
+sub new {
+ my $class = shift;
+ my $git_svn = shift;
+ my $self = SVN::Delta::Editor->new(@_);
+ bless $self, $class;
+ foreach (qw/svn_path c r ra /) {
+ die "$_ required!\n" unless (defined $git_svn->{$_});
+ $self->{$_} = $git_svn->{$_};
+ }
+ $self->{pool} = SVN::Pool->new;
+ $self->{bat} = { '' => $self->open_root($self->{r}, $self->{pool}) };
+ $self->{rm} = { };
+ require Digest::MD5;
+ return $self;
+}
+
+sub split_path {
+ return ($_[0] =~ m#^(.*?)/?([^/]+)$#);
+}
+
+sub repo_path {
+ (defined $_[1] && length $_[1]) ? "$_[0]->{svn_path}/$_[1]"
+ : $_[0]->{svn_path}
+}
+
+sub url_path {
+ my ($self, $path) = @_;
+ $self->{ra}->{url} . '/' . $self->repo_path($path);
+}
+
+sub rmdirs {
+ my ($self, $q) = @_;
+ my $rm = $self->{rm};
+ delete $rm->{''}; # we never delete the url we're tracking
+ return unless %$rm;
+
+ foreach (keys %$rm) {
+ my @d = split m#/#, $_;
+ my $c = shift @d;
+ $rm->{$c} = 1;
+ while (@d) {
+ $c .= '/' . shift @d;
+ $rm->{$c} = 1;
+ }
+ }
+ delete $rm->{$self->{svn_path}};
+ delete $rm->{''}; # we never delete the url we're tracking
+ return unless %$rm;
+
+ defined(my $pid = open my $fh,'-|') or croak $!;
+ if (!$pid) {
+ exec qw/git-ls-tree --name-only -r -z/, $self->{c} or croak $!;
+ }
+ local $/ = "\0";
+ my @svn_path = split m#/#, $self->{svn_path};
+ while (<$fh>) {
+ chomp;
+ my @dn = (@svn_path, (split m#/#, $_));
+ while (pop @dn) {
+ delete $rm->{join '/', @dn};
+ }
+ unless (%$rm) {
+ close $fh;
+ return;
+ }
+ }
+ close $fh;
+
+ my ($r, $p, $bat) = ($self->{r}, $self->{pool}, $self->{bat});
+ foreach my $d (sort { $b =~ tr#/#/# <=> $a =~ tr#/#/# } keys %$rm) {
+ $self->close_directory($bat->{$d}, $p);
+ my ($dn) = ($d =~ m#^(.*?)/?(?:[^/]+)$#);
+ print "\tD+\t/$d/\n" unless $q;
+ $self->SUPER::delete_entry($d, $r, $bat->{$dn}, $p);
+ delete $bat->{$d};
+ }
+}
+
+sub open_or_add_dir {
+ my ($self, $full_path, $baton) = @_;
+ my $p = SVN::Pool->new;
+ my $t = $self->{ra}->check_path($full_path, $self->{r}, $p);
+ $p->clear;
+ if ($t == $SVN::Node::none) {
+ return $self->add_directory($full_path, $baton,
+ undef, -1, $self->{pool});
+ } elsif ($t == $SVN::Node::dir) {
+ return $self->open_directory($full_path, $baton,
+ $self->{r}, $self->{pool});
+ }
+ print STDERR "$full_path already exists in repository at ",
+ "r$self->{r} and it is not a directory (",
+ ($t == $SVN::Node::file ? 'file' : 'unknown'),"/$t)\n";
+ exit 1;
+}
+
+sub ensure_path {
+ my ($self, $path) = @_;
+ my $bat = $self->{bat};
+ $path = $self->repo_path($path);
+ return $bat->{''} unless (length $path);
+ my @p = split m#/+#, $path;
+ my $c = shift @p;
+ $bat->{$c} ||= $self->open_or_add_dir($c, $bat->{''});
+ while (@p) {
+ my $c0 = $c;
+ $c .= '/' . shift @p;
+ $bat->{$c} ||= $self->open_or_add_dir($c, $bat->{$c0});
+ }
+ return $bat->{$c};
+}
+
+sub A {
+ my ($self, $m, $q) = @_;
+ my ($dir, $file) = split_path($m->{file_b});
+ my $pbat = $self->ensure_path($dir);
+ my $fbat = $self->add_file($self->repo_path($m->{file_b}), $pbat,
+ undef, -1);
+ print "\tA\t$m->{file_b}\n" unless $q;
+ $self->chg_file($fbat, $m);
+ $self->close_file($fbat,undef,$self->{pool});
+}
+
+sub C {
+ my ($self, $m, $q) = @_;
+ my ($dir, $file) = split_path($m->{file_b});
+ my $pbat = $self->ensure_path($dir);
+ my $fbat = $self->add_file($self->repo_path($m->{file_b}), $pbat,
+ $self->url_path($m->{file_a}), $self->{r});
+ print "\tC\t$m->{file_a} => $m->{file_b}\n" unless $q;
+ $self->chg_file($fbat, $m);
+ $self->close_file($fbat,undef,$self->{pool});
+}
+
+sub delete_entry {
+ my ($self, $path, $pbat) = @_;
+ my $rpath = $self->repo_path($path);
+ my ($dir, $file) = split_path($rpath);
+ $self->{rm}->{$dir} = 1;
+ $self->SUPER::delete_entry($rpath, $self->{r}, $pbat, $self->{pool});
+}
+
+sub R {
+ my ($self, $m, $q) = @_;
+ my ($dir, $file) = split_path($m->{file_b});
+ my $pbat = $self->ensure_path($dir);
+ my $fbat = $self->add_file($self->repo_path($m->{file_b}), $pbat,
+ $self->url_path($m->{file_a}), $self->{r});
+ print "\tR\t$m->{file_a} => $m->{file_b}\n" unless $q;
+ $self->chg_file($fbat, $m);
+ $self->close_file($fbat,undef,$self->{pool});
+
+ ($dir, $file) = split_path($m->{file_a});
+ $pbat = $self->ensure_path($dir);
+ $self->delete_entry($m->{file_a}, $pbat);
+}
+
+sub M {
+ my ($self, $m, $q) = @_;
+ my ($dir, $file) = split_path($m->{file_b});
+ my $pbat = $self->ensure_path($dir);
+ my $fbat = $self->open_file($self->repo_path($m->{file_b}),
+ $pbat,$self->{r},$self->{pool});
+ print "\t$m->{chg}\t$m->{file_b}\n" unless $q;
+ $self->chg_file($fbat, $m);
+ $self->close_file($fbat,undef,$self->{pool});
+}
+
+sub T { shift->M(@_) }
+
+sub change_file_prop {
+ my ($self, $fbat, $pname, $pval) = @_;
+ $self->SUPER::change_file_prop($fbat, $pname, $pval, $self->{pool});
+}
+
+sub chg_file {
+ my ($self, $fbat, $m) = @_;
+ if ($m->{mode_b} =~ /755$/ && $m->{mode_a} !~ /755$/) {
+ $self->change_file_prop($fbat,'svn:executable','*');
+ } elsif ($m->{mode_b} !~ /755$/ && $m->{mode_a} =~ /755$/) {
+ $self->change_file_prop($fbat,'svn:executable',undef);
+ }
+ my $fh = IO::File->new_tmpfile or croak $!;
+ if ($m->{mode_b} =~ /^120/) {
+ print $fh 'link ' or croak $!;
+ $self->change_file_prop($fbat,'svn:special','*');
+ } elsif ($m->{mode_a} =~ /^120/ && $m->{mode_b} !~ /^120/) {
+ $self->change_file_prop($fbat,'svn:special',undef);
+ }
+ defined(my $pid = fork) or croak $!;
+ if (!$pid) {
+ open STDOUT, '>&', $fh or croak $!;
+ exec qw/git-cat-file blob/, $m->{sha1_b} or croak $!;
+ }
+ waitpid $pid, 0;
+ croak $? if $?;
+ $fh->flush == 0 or croak $!;
+ seek $fh, 0, 0 or croak $!;
+
+ my $md5 = Digest::MD5->new;
+ $md5->addfile($fh) or croak $!;
+ seek $fh, 0, 0 or croak $!;
+
+ my $exp = $md5->hexdigest;
+ my $atd = $self->apply_textdelta($fbat, undef, $self->{pool});
+ my $got = SVN::TxDelta::send_stream($fh, @$atd, $self->{pool});
+ die "Checksum mismatch\nexpected: $exp\ngot: $got\n" if ($got ne $exp);
+
+ close $fh or croak $!;
+}
+
+sub D {
+ my ($self, $m, $q) = @_;
+ my ($dir, $file) = split_path($m->{file_b});
+ my $pbat = $self->ensure_path($dir);
+ print "\tD\t$m->{file_b}\n" unless $q;
+ $self->delete_entry($m->{file_b}, $pbat);
+}
+
+sub close_edit {
+ my ($self) = @_;
+ my ($p,$bat) = ($self->{pool}, $self->{bat});
+ foreach (sort { $b =~ tr#/#/# <=> $a =~ tr#/#/# } keys %$bat) {
+ $self->close_directory($bat->{$_}, $p);
+ }
+ $self->SUPER::close_edit($p);
+ $p->clear;
+}
+
+sub abort_edit {
+ my ($self) = @_;
+ $self->SUPER::abort_edit($self->{pool});
+ $self->{pool}->clear;
+}
+
+__END__
+
+Data structures:
+
+$svn_log hashref (as returned by svn_log_raw)
+{
+ fh => file handle of the log file,
+ state => state of the log file parser (sep/msg/rev/msg_start...)
+}
+
+$log_msg hashref as returned by next_log_entry($svn_log)
+{
+ msg => 'whitespace-formatted log entry
+', # trailing newline is preserved
+ revision => '8', # integer
+ date => '2004-02-24T17:01:44.108345Z', # commit date
+ author => 'committer name'
+};
+
+
+@mods = array of diff-index line hashes, each element represents one line
+ of diff-index output
+
+diff-index line ($m hash)
+{
+ mode_a => first column of diff-index output, no leading ':',
+ mode_b => second column of diff-index output,
+ sha1_b => sha1sum of the final blob,
+ chg => change type [MCRADT],
+ file_a => original file name of a file (iff chg is 'C' or 'R')
+ file_b => new/current file name of a file (any chg)
+}
+;
+
+# retval of read_url_paths{,_all}();
+$l_map = {
+ # repository root url
+ 'https://svn.musicpd.org' => {
+ # repository path # GIT_SVN_ID
+ 'mpd/trunk' => 'trunk',
+ 'mpd/tags/0.11.5' => 'tags/0.11.5',
+ },
+}
+
+Notes:
+ I don't trust the each() function on unless I created %hash myself
+ because the internal iterator may not have started at base.
my($author_name,$author_email,$dest);
my(@old,@new,@parents);
- if (not defined $author) {
+ if (not defined $author or $author eq "") {
$author_name = $author_email = "unknown";
} elsif (defined $users_file) {
die "User $author is not listed in $users_file\n"
#include "git-compat-util.h"
#include "exec_cmd.h"
#include "cache.h"
+#include "quote.h"
#include "builtin.h"
+const char git_usage_string[] =
+ "git [--version] [--exec-path[=GIT_EXEC_PATH]] [--help] COMMAND [ ARGS ]";
+
static void prepend_to_path(const char *dir, int len)
{
- char *path, *old_path = getenv("PATH");
+ const char *old_path = getenv("PATH");
+ char *path;
int path_len = len;
if (!old_path)
setenv("PATH", path, 1);
}
+static int handle_options(const char*** argv, int* argc)
+{
+ int handled = 0;
+
+ while (*argc > 0) {
+ const char *cmd = (*argv)[0];
+ if (cmd[0] != '-')
+ break;
+
+ /*
+ * For legacy reasons, the "version" and "help"
+ * commands can be written with "--" prepended
+ * to make them look like flags.
+ */
+ if (!strcmp(cmd, "--help") || !strcmp(cmd, "--version"))
+ break;
+
+ /*
+ * Check remaining flags.
+ */
+ if (!strncmp(cmd, "--exec-path", 11)) {
+ cmd += 11;
+ if (*cmd == '=')
+ git_set_exec_path(cmd + 1);
+ else {
+ puts(git_exec_path());
+ exit(0);
+ }
+ } else if (!strcmp(cmd, "-p") || !strcmp(cmd, "--paginate")) {
+ setup_pager();
+ } else if (!strcmp(cmd, "--git-dir")) {
+ if (*argc < 1)
+ return -1;
+ setenv("GIT_DIR", (*argv)[1], 1);
+ (*argv)++;
+ (*argc)--;
+ } else if (!strncmp(cmd, "--git-dir=", 10)) {
+ setenv("GIT_DIR", cmd + 10, 1);
+ } else if (!strcmp(cmd, "--bare")) {
+ static char git_dir[1024];
+ setenv("GIT_DIR", getcwd(git_dir, 1024), 1);
+ } else {
+ fprintf(stderr, "Unknown option: %s\n", cmd);
+ usage(git_usage_string);
+ }
+
+ (*argv)++;
+ (*argc)--;
+ handled++;
+ }
+ return handled;
+}
+
static const char *alias_command;
static char *alias_string = NULL;
static int handle_alias(int *argcp, const char ***argv)
{
- int nongit = 0, ret = 0;
+ int nongit = 0, ret = 0, saved_errno = errno;
const char *subdir;
+ int count, option_count;
+ const char** new_argv;
subdir = setup_git_directory_gently(&nongit);
- if (!nongit) {
- int count;
- const char** new_argv;
-
- alias_command = (*argv)[0];
- git_config(git_alias_config);
- if (alias_string) {
-
- count = split_cmdline(alias_string, &new_argv);
-
- if (count < 1)
- die("empty alias for %s", alias_command);
-
- if (!strcmp(alias_command, new_argv[0]))
- die("recursive alias: %s", alias_command);
- /* insert after command name */
- if (*argcp > 1) {
- new_argv = realloc(new_argv, sizeof(char*) *
- (count + *argcp));
- memcpy(new_argv + count, *argv + 1,
- sizeof(char*) * *argcp);
+ alias_command = (*argv)[0];
+ git_config(git_alias_config);
+ if (alias_string) {
+ count = split_cmdline(alias_string, &new_argv);
+ option_count = handle_options(&new_argv, &count);
+ memmove(new_argv - option_count, new_argv,
+ count * sizeof(char *));
+ new_argv -= option_count;
+
+ if (count < 1)
+ die("empty alias for %s", alias_command);
+
+ if (!strcmp(alias_command, new_argv[0]))
+ die("recursive alias: %s", alias_command);
+
+ if (getenv("GIT_TRACE")) {
+ int i;
+ fprintf(stderr, "trace: alias expansion: %s =>",
+ alias_command);
+ for (i = 0; i < count; ++i) {
+ fputc(' ', stderr);
+ sq_quote_print(stderr, new_argv[i]);
}
+ fputc('\n', stderr);
+ fflush(stderr);
+ }
- *argv = new_argv;
- *argcp += count - 1;
+ new_argv = realloc(new_argv, sizeof(char*) *
+ (count + *argcp + 1));
+ /* insert after command name */
+ memcpy(new_argv + count, *argv + 1, sizeof(char*) * *argcp);
+ new_argv[count+*argcp] = NULL;
- ret = 1;
- }
+ *argv = new_argv;
+ *argcp += count - 1;
+
+ ret = 1;
}
if (subdir)
chdir(subdir);
+ errno = saved_errno;
+
return ret;
}
const char git_version_string[] = GIT_VERSION;
+#define RUN_SETUP (1<<0)
+#define USE_PAGER (1<<1)
+
static void handle_internal_command(int argc, const char **argv, char **envp)
{
const char *cmd = argv[0];
static struct cmd_struct {
const char *cmd;
- int (*fn)(int, const char **, char **);
+ int (*fn)(int, const char **, const char *);
+ int option;
} commands[] = {
- { "version", cmd_version },
- { "help", cmd_help },
- { "log", cmd_log },
- { "whatchanged", cmd_whatchanged },
- { "show", cmd_show },
- { "push", cmd_push },
- { "format-patch", cmd_format_patch },
+ { "add", cmd_add, RUN_SETUP },
+ { "apply", cmd_apply },
+ { "cat-file", cmd_cat_file, RUN_SETUP },
+ { "checkout-index", cmd_checkout_index, RUN_SETUP },
+ { "check-ref-format", cmd_check_ref_format },
+ { "commit-tree", cmd_commit_tree, RUN_SETUP },
{ "count-objects", cmd_count_objects },
- { "diff", cmd_diff },
- { "grep", cmd_grep },
- { "rm", cmd_rm },
- { "add", cmd_add },
- { "rev-list", cmd_rev_list },
- { "init-db", cmd_init_db },
+ { "diff", cmd_diff, RUN_SETUP },
+ { "diff-files", cmd_diff_files, RUN_SETUP },
+ { "diff-index", cmd_diff_index, RUN_SETUP },
+ { "diff-stages", cmd_diff_stages, RUN_SETUP },
+ { "diff-tree", cmd_diff_tree, RUN_SETUP },
+ { "fmt-merge-msg", cmd_fmt_merge_msg, RUN_SETUP },
+ { "format-patch", cmd_format_patch, RUN_SETUP },
{ "get-tar-commit-id", cmd_get_tar_commit_id },
- { "upload-tar", cmd_upload_tar },
- { "check-ref-format", cmd_check_ref_format },
- { "ls-files", cmd_ls_files },
- { "ls-tree", cmd_ls_tree },
- { "tar-tree", cmd_tar_tree },
- { "read-tree", cmd_read_tree },
- { "commit-tree", cmd_commit_tree },
- { "apply", cmd_apply },
- { "show-branch", cmd_show_branch },
- { "diff-files", cmd_diff_files },
- { "diff-index", cmd_diff_index },
- { "diff-stages", cmd_diff_stages },
- { "diff-tree", cmd_diff_tree },
- { "cat-file", cmd_cat_file },
- { "rev-parse", cmd_rev_parse },
- { "write-tree", cmd_write_tree },
- { "mailsplit", cmd_mailsplit },
+ { "grep", cmd_grep, RUN_SETUP },
+ { "help", cmd_help },
+ { "init-db", cmd_init_db },
+ { "log", cmd_log, RUN_SETUP | USE_PAGER },
+ { "ls-files", cmd_ls_files, RUN_SETUP },
+ { "ls-tree", cmd_ls_tree, RUN_SETUP },
{ "mailinfo", cmd_mailinfo },
+ { "mailsplit", cmd_mailsplit },
+ { "mv", cmd_mv, RUN_SETUP },
+ { "name-rev", cmd_name_rev, RUN_SETUP },
+ { "pack-objects", cmd_pack_objects, RUN_SETUP },
+ { "prune", cmd_prune, RUN_SETUP },
+ { "prune-packed", cmd_prune_packed, RUN_SETUP },
+ { "push", cmd_push, RUN_SETUP },
+ { "read-tree", cmd_read_tree, RUN_SETUP },
+ { "repo-config", cmd_repo_config },
+ { "rev-list", cmd_rev_list, RUN_SETUP },
+ { "rev-parse", cmd_rev_parse, RUN_SETUP },
+ { "rm", cmd_rm, RUN_SETUP },
+ { "show-branch", cmd_show_branch, RUN_SETUP },
+ { "show", cmd_show, RUN_SETUP | USE_PAGER },
{ "stripspace", cmd_stripspace },
- { "update-index", cmd_update_index },
- { "update-ref", cmd_update_ref }
+ { "symbolic-ref", cmd_symbolic_ref, RUN_SETUP },
+ { "tar-tree", cmd_tar_tree, RUN_SETUP },
+ { "unpack-objects", cmd_unpack_objects, RUN_SETUP },
+ { "update-index", cmd_update_index, RUN_SETUP },
+ { "update-ref", cmd_update_ref, RUN_SETUP },
+ { "upload-tar", cmd_upload_tar },
+ { "version", cmd_version },
+ { "whatchanged", cmd_whatchanged, RUN_SETUP | USE_PAGER },
+ { "write-tree", cmd_write_tree, RUN_SETUP },
+ { "verify-pack", cmd_verify_pack },
};
int i;
for (i = 0; i < ARRAY_SIZE(commands); i++) {
struct cmd_struct *p = commands+i;
+ const char *prefix;
if (strcmp(p->cmd, cmd))
continue;
- exit(p->fn(argc, argv, envp));
+
+ prefix = NULL;
+ if (p->option & RUN_SETUP)
+ prefix = setup_git_directory();
+ if (p->option & USE_PAGER)
+ setup_pager();
+ if (getenv("GIT_TRACE")) {
+ int i;
+ fprintf(stderr, "trace: built-in: git");
+ for (i = 0; i < argc; ++i) {
+ fputc(' ', stderr);
+ sq_quote_print(stderr, argv[i]);
+ }
+ putc('\n', stderr);
+ fflush(stderr);
+ }
+
+ exit(p->fn(argc, argv, prefix));
}
}
{
const char *cmd = argv[0];
char *slash = strrchr(cmd, '/');
- char git_command[PATH_MAX + 1];
const char *exec_path = NULL;
int done_alias = 0;
die("cannot handle %s internally", cmd);
}
- /* Default command: "help" */
- cmd = "help";
-
/* Look for flags.. */
- while (argc > 1) {
- cmd = *++argv;
- argc--;
-
- if (strncmp(cmd, "--", 2))
- break;
-
- cmd += 2;
-
- /*
- * For legacy reasons, the "version" and "help"
- * commands can be written with "--" prepended
- * to make them look like flags.
- */
- if (!strcmp(cmd, "help"))
- break;
- if (!strcmp(cmd, "version"))
- break;
-
- /*
- * Check remaining flags (which by now must be
- * "--exec-path", but maybe we will accept
- * other arguments some day)
- */
- if (!strncmp(cmd, "exec-path", 9)) {
- cmd += 9;
- if (*cmd == '=') {
- git_set_exec_path(cmd + 1);
- continue;
- }
- puts(git_exec_path());
- exit(0);
- }
- cmd_usage(0, NULL, NULL);
+ argv++;
+ argc--;
+ handle_options(&argv, &argc);
+ if (argc > 0) {
+ if (!strncmp(argv[0], "--", 2))
+ argv[0] += 2;
+ } else {
+ /* Default command: "help" */
+ argv[0] = "help";
+ argc = 1;
}
- argv[0] = cmd;
+ cmd = argv[0];
/*
* We search for git commands in the following order:
}
if (errno == ENOENT)
- cmd_usage(0, exec_path, "'%s' is not a git-command", cmd);
+ help_unknown_cmd(cmd);
fprintf(stderr, "Failed to run command '%s': %s\n",
- git_command, strerror(errno));
+ cmd, strerror(errno));
return 1;
}
proc readrefs {} {
global tagids idtags headids idheads tagcontents
- global otherrefids idotherrefs
+ global otherrefids idotherrefs mainhead
foreach v {tagids idtags headids idheads otherrefids idotherrefs} {
catch {unset $v}
}
}
close $refd
+ set mainhead {}
+ catch {
+ set thehead [exec git symbolic-ref HEAD]
+ if {[string match "refs/heads/*" $thehead]} {
+ set mainhead [string range $thehead 11 end]
+ }
+ }
}
proc show_error {w top msg} {
global rowctxmenu mergemax wrapcomment
global highlight_files gdttype
global searchstring sstring
+ global bgcolor fgcolor bglist fglist diffcolors
menu .bar
.bar add cascade -label "File" -menu .bar.file
.ctop add .ctop.top
set canv .ctop.top.clist.canv
canvas $canv -height $geometry(canvh) -width $geometry(canv1) \
- -bg white -bd 0 \
+ -background $bgcolor -bd 0 \
-yscrollincr $linespc -yscrollcommand "scrollcanv $cscroll"
.ctop.top.clist add $canv
set canv2 .ctop.top.clist.canv2
canvas $canv2 -height $geometry(canvh) -width $geometry(canv2) \
- -bg white -bd 0 -yscrollincr $linespc
+ -background $bgcolor -bd 0 -yscrollincr $linespc
.ctop.top.clist add $canv2
set canv3 .ctop.top.clist.canv3
canvas $canv3 -height $geometry(canvh) -width $geometry(canv3) \
- -bg white -bd 0 -yscrollincr $linespc
+ -background $bgcolor -bd 0 -yscrollincr $linespc
.ctop.top.clist add $canv3
bind .ctop.top.clist <Configure> {resizeclistpanes %W %w}
+ lappend bglist $canv $canv2 $canv3
set sha1entry .ctop.top.bar.sha1
set entries $sha1entry
trace add variable searchstring write incrsearch
pack $sstring -side left -expand 1 -fill x
set ctext .ctop.cdet.left.ctext
- text $ctext -bg white -state disabled -font $textfont \
+ text $ctext -background $bgcolor -foreground $fgcolor \
+ -state disabled -font $textfont \
-width $geometry(ctextw) -height $geometry(ctexth) \
-yscrollcommand scrolltext -wrap none
scrollbar .ctop.cdet.left.sb -command "$ctext yview"
pack .ctop.cdet.left.sb -side right -fill y
pack $ctext -side left -fill both -expand 1
.ctop.cdet add .ctop.cdet.left
+ lappend bglist $ctext
+ lappend fglist $ctext
$ctext tag conf comment -wrap $wrapcomment
$ctext tag conf filesep -font [concat $textfont bold] -back "#aaaaaa"
- $ctext tag conf hunksep -fore blue
- $ctext tag conf d0 -fore red
- $ctext tag conf d1 -fore "#00a000"
+ $ctext tag conf hunksep -fore [lindex $diffcolors 2]
+ $ctext tag conf d0 -fore [lindex $diffcolors 0]
+ $ctext tag conf d1 -fore [lindex $diffcolors 1]
$ctext tag conf m0 -fore red
$ctext tag conf m1 -fore blue
$ctext tag conf m2 -fore green
pack .ctop.cdet.right.mode -side top -fill x
set cflist .ctop.cdet.right.cfiles
set indent [font measure $mainfont "nn"]
- text $cflist -width $geometry(cflistw) -background white -font $mainfont \
+ text $cflist -width $geometry(cflistw) \
+ -background $bgcolor -foreground $fgcolor \
+ -font $mainfont \
-tabs [list $indent [expr {2 * $indent}]] \
-yscrollcommand ".ctop.cdet.right.sb set" \
-cursor [. cget -cursor] \
-spacing1 1 -spacing3 1
+ lappend bglist $cflist
+ lappend fglist $cflist
scrollbar .ctop.cdet.right.sb -command "$cflist yview"
pack .ctop.cdet.right.sb -side right -fill y
pack $cflist -side left -fill both -expand 1
global maxwidth showneartags
global viewname viewfiles viewargs viewperm nextviewnum
global cmitmode wrapcomment
+ global colors bgcolor fgcolor diffcolors
if {$stuffsaved} return
if {![winfo viewable .]} return
puts $f [list set cmitmode $cmitmode]
puts $f [list set wrapcomment $wrapcomment]
puts $f [list set showneartags $showneartags]
+ puts $f [list set bgcolor $bgcolor]
+ puts $f [list set fgcolor $fgcolor]
+ puts $f [list set colors $colors]
+ puts $f [list set diffcolors $diffcolors]
puts $f "set geometry(width) [winfo width .ctop]"
puts $f "set geometry(height) [winfo height .ctop]"
puts $f "set geometry(canv1) [expr {[winfo width $canv]-2}]"
}
proc drawcmittext {id row col rmx} {
- global linespc canv canv2 canv3 canvy0
+ global linespc canv canv2 canv3 canvy0 fgcolor
global commitlisted commitinfo rowidlist
global rowtextx idpos idtags idheads idotherrefs
global linehtag linentag linedtag
- global mainfont canvxmax boldrows boldnamerows
+ global mainfont canvxmax boldrows boldnamerows fgcolor
set ofill [expr {[lindex $commitlisted $row]? "blue": "white"}]
set x [xc $row $col]
set orad [expr {$linespc / 3}]
set t [$canv create oval [expr {$x - $orad}] [expr {$y - $orad}] \
[expr {$x + $orad - 1}] [expr {$y + $orad - 1}] \
- -fill $ofill -outline black -width 1]
+ -fill $ofill -outline $fgcolor -width 1 -tags circle]
$canv raise $t
$canv bind $t <1> {selcanvline {} %x %y}
set xt [xc $row [llength [lindex $rowidlist $row]]]
lappend nfont bold
}
}
- set linehtag($row) [$canv create text $xt $y -anchor w \
- -text $headline -font $font]
+ set linehtag($row) [$canv create text $xt $y -anchor w -fill $fgcolor \
+ -text $headline -font $font -tags text]
$canv bind $linehtag($row) <Button-3> "rowmenu %X %Y $id"
- set linentag($row) [$canv2 create text 3 $y -anchor w \
- -text $name -font $nfont]
- set linedtag($row) [$canv3 create text 3 $y -anchor w \
- -text $date -font $mainfont]
+ set linentag($row) [$canv2 create text 3 $y -anchor w -fill $fgcolor \
+ -text $name -font $nfont -tags text]
+ set linedtag($row) [$canv3 create text 3 $y -anchor w -fill $fgcolor \
+ -text $date -font $mainfont -tags text]
set xr [expr {$xt + [font measure $mainfont $headline]}]
if {$xr > $canvxmax} {
set canvxmax $xr
}
proc drawtags {id x xt y1} {
- global idtags idheads idotherrefs
+ global idtags idheads idotherrefs mainhead
global linespc lthickness
- global canv mainfont commitrow rowtextx curview
+ global canv mainfont commitrow rowtextx curview fgcolor bgcolor
set marks {}
set ntags 0
set yb [expr {$yt + $linespc - 1}]
set xvals {}
set wvals {}
+ set i -1
foreach tag $marks {
- set wid [font measure $mainfont $tag]
+ incr i
+ if {$i >= $ntags && $i < $ntags + $nheads && $tag eq $mainhead} {
+ set wid [font measure [concat $mainfont bold] $tag]
+ } else {
+ set wid [font measure $mainfont $tag]
+ }
lappend xvals $xt
lappend wvals $wid
set xt [expr {$xt + $delta + $wid + $lthickness + $linespc}]
foreach tag $marks x $xvals wid $wvals {
set xl [expr {$x + $delta}]
set xr [expr {$x + $delta + $wid + $lthickness}]
+ set font $mainfont
if {[incr ntags -1] >= 0} {
# draw a tag
set t [$canv create polygon $x [expr {$yt + $delta}] $xl $yt \
# draw a head or other ref
if {[incr nheads -1] >= 0} {
set col green
+ if {$tag eq $mainhead} {
+ lappend font bold
+ }
} else {
set col "#ddddff"
}
-width 0 -fill "#ffddaa" -tags tag.$id
}
}
- set t [$canv create text $xl $y1 -anchor w -text $tag \
- -font $mainfont -tags tag.$id]
+ set t [$canv create text $xl $y1 -anchor w -text $tag -fill $fgcolor \
+ -font $font -tags [list tag.$id text]]
if {$ntags >= 0} {
$canv bind $t <1> [list showtag $tag 1]
}
}
proc show_status {msg} {
- global canv mainfont
+ global canv mainfont fgcolor
clear_display
- $canv create text 3 3 -anchor nw -text $msg -font $mainfont -tags textitems
+ $canv create text 3 3 -anchor nw -text $msg -font $mainfont \
+ -tags text -fill $fgcolor
}
proc finishcommits {} {
set t [$canv create rectangle $x0 $y0 $x1 $y1 \
-fill \#ffff80 -outline black -width 1 -tags hover]
$canv raise $t
- set t [$canv create text $x $y -anchor nw -text $text -tags hover -font $mainfont]
+ set t [$canv create text $x $y -anchor nw -text $text -tags hover \
+ -font $mainfont]
$canv raise $t
}
proc redrawtags {id} {
global canv linehtag commitrow idpos selectedline curview
- global mainfont
+ global mainfont canvxmax
if {![info exists commitrow($curview,$id)]} return
drawcmitrow $commitrow($curview,$id)
proc doprefs {} {
global maxwidth maxgraphpct diffopts
global oldprefs prefstop showneartags
+ global bgcolor fgcolor ctext diffcolors
set top .gitkprefs
set prefstop $top
-font optionfont
spinbox $top.maxpct -from 1 -to 100 -width 4 -textvariable maxgraphpct
grid x $top.maxpctl $top.maxpct -sticky w
+
label $top.ddisp -text "Diff display options"
grid $top.ddisp - -sticky w -pady 10
label $top.diffoptl -text "Options for diff program" \
checkbutton $top.ntag.b -variable showneartags
pack $top.ntag.b $top.ntag.l -side left
grid x $top.ntag -sticky w
+
+ label $top.cdisp -text "Colors: press to choose"
+ grid $top.cdisp - -sticky w -pady 10
+ label $top.bg -padx 40 -relief sunk -background $bgcolor
+ button $top.bgbut -text "Background" -font optionfont \
+ -command [list choosecolor bgcolor 0 $top.bg background setbg]
+ grid x $top.bgbut $top.bg -sticky w
+ label $top.fg -padx 40 -relief sunk -background $fgcolor
+ button $top.fgbut -text "Foreground" -font optionfont \
+ -command [list choosecolor fgcolor 0 $top.fg foreground setfg]
+ grid x $top.fgbut $top.fg -sticky w
+ label $top.diffold -padx 40 -relief sunk -background [lindex $diffcolors 0]
+ button $top.diffoldbut -text "Diff: old lines" -font optionfont \
+ -command [list choosecolor diffcolors 0 $top.diffold "diff old lines" \
+ [list $ctext tag conf d0 -foreground]]
+ grid x $top.diffoldbut $top.diffold -sticky w
+ label $top.diffnew -padx 40 -relief sunk -background [lindex $diffcolors 1]
+ button $top.diffnewbut -text "Diff: new lines" -font optionfont \
+ -command [list choosecolor diffcolors 1 $top.diffnew "diff new lines" \
+ [list $ctext tag conf d1 -foreground]]
+ grid x $top.diffnewbut $top.diffnew -sticky w
+ label $top.hunksep -padx 40 -relief sunk -background [lindex $diffcolors 2]
+ button $top.hunksepbut -text "Diff: hunk header" -font optionfont \
+ -command [list choosecolor diffcolors 2 $top.hunksep \
+ "diff hunk header" \
+ [list $ctext tag conf hunksep -foreground]]
+ grid x $top.hunksepbut $top.hunksep -sticky w
+
frame $top.buts
button $top.buts.ok -text "OK" -command prefsok
button $top.buts.can -text "Cancel" -command prefscan
grid $top.buts - - -pady 10 -sticky ew
}
+proc choosecolor {v vi w x cmd} {
+ global $v
+
+ set c [tk_chooseColor -initialcolor [lindex [set $v] $vi] \
+ -title "Gitk: choose color for $x"]
+ if {$c eq {}} return
+ $w conf -background $c
+ lset $v $vi $c
+ eval $cmd $c
+}
+
+proc setbg {c} {
+ global bglist
+
+ foreach w $bglist {
+ $w conf -background $c
+ }
+}
+
+proc setfg {c} {
+ global fglist canv
+
+ foreach w $fglist {
+ $w conf -foreground $c
+ }
+ allcanvs itemconf text -fill $c
+ $canv itemconf circle -outline $c
+}
+
proc prefscan {} {
global maxwidth maxgraphpct diffopts
global oldprefs prefstop showneartags
set showneartags 1
set colors {green red blue magenta darkgrey brown orange}
+set bgcolor white
+set fgcolor black
+set diffcolors {red "#00a000" blue}
catch {source ~/.gitk}
From the git version 1.4.0 gitweb is bundled with git.
-Any comment/question/concern to:
+
+How to configure gitweb for your local system:
+
+You can specify the following configuration variables when building GIT:
+ * GITWEB_SITENAME
+ Shown in the title of all generated pages, defaults to the servers name.
+ * GITWEB_PROJECTROOT
+ The root directory for all projects shown by gitweb.
+ * GITWEB_LIST
+ points to a directory to scan for projects (defaults to project root)
+ or to a file for explicit listing of projects.
+ * GITWEB_HOMETEXT
+ points to an .html file which is included on the gitweb project
+ overview page.
+ * GITWEB_CSS
+ Points to the location where you put gitweb.css on your web server.
+ * GITWEB_LOGO
+ Points to the location where you put git-logo.png on your web server.
+ * GITWEB_CONFIG
+ This file will be loaded using 'require'. If the environment
+ $GITWEB_CONFIG is set when gitweb.cgi is executed the file in the
+ environment variable will be loaded instead of the file
+ specified when gitweb.cgi was created.
+
+Originally written by:
Kay Sievers <kay.sievers@vrfy.org>
+
+Any comment/question/concern to:
+ Git mailing list <git@vger.kernel.org>
+
+++ /dev/null
-#!/usr/bin/perl
-
-# gitweb - simple web interface to track changes in git repositories
-#
-# (C) 2005-2006, Kay Sievers <kay.sievers@vrfy.org>
-# (C) 2005, Christian Gierke
-#
-# This program is licensed under the GPLv2
-
-use strict;
-use warnings;
-use CGI qw(:standard :escapeHTML -nosticky);
-use CGI::Util qw(unescape);
-use CGI::Carp qw(fatalsToBrowser);
-use Encode;
-use Fcntl ':mode';
-binmode STDOUT, ':utf8';
-
-my $cgi = new CGI;
-my $version = "267";
-my $my_url = $cgi->url();
-my $my_uri = $cgi->url(-absolute => 1);
-my $rss_link = "";
-
-# location of the git-core binaries
-my $gitbin = "/usr/bin";
-
-# absolute fs-path which will be prepended to the project path
-#my $projectroot = "/pub/scm";
-my $projectroot = "/home/kay/public_html/pub/scm";
-
-# version of the git-core binaries
-my $git_version = qx($gitbin/git --version);
-if ($git_version =~ m/git version (.*)$/) {
- $git_version = $1;
-} else {
- $git_version = "unknown";
-}
-
-# location for temporary files needed for diffs
-my $git_temp = "/tmp/gitweb";
-
-# target of the home link on top of all pages
-my $home_link = $my_uri;
-
-# html text to include at home page
-my $home_text = "indextext.html";
-
-# URI of default stylesheet
-my $stylesheet = "gitweb.css";
-
-# source of projects list
-#my $projects_list = $projectroot;
-my $projects_list = "index/index.aux";
-
-# default blob_plain mimetype and default charset for text/plain blob
-my $default_blob_plain_mimetype = 'text/plain';
-my $default_text_plain_charset = undef;
-
-# file to use for guessing MIME types before trying /etc/mime.types
-# (relative to the current git repository)
-my $mimetypes_file = undef;
-
-
-# input validation and dispatch
-my $action = $cgi->param('a');
-if (defined $action) {
- if ($action =~ m/[^0-9a-zA-Z\.\-_]/) {
- undef $action;
- die_error(undef, "Invalid action parameter.");
- }
- if ($action eq "git-logo.png") {
- git_logo();
- exit;
- } elsif ($action eq "opml") {
- git_opml();
- exit;
- }
-}
-
-my $order = $cgi->param('o');
-if (defined $order) {
- if ($order =~ m/[^0-9a-zA-Z_]/) {
- undef $order;
- die_error(undef, "Invalid order parameter.");
- }
-}
-
-my $project = ($cgi->param('p') || $ENV{'PATH_INFO'});
-if (defined $project) {
- $project =~ s|^/||; $project =~ s|/$||;
- $project = validate_input($project);
- if (!defined($project)) {
- die_error(undef, "Invalid project parameter.");
- }
- if (!(-d "$projectroot/$project")) {
- undef $project;
- die_error(undef, "No such directory.");
- }
- if (!(-e "$projectroot/$project/HEAD")) {
- undef $project;
- die_error(undef, "No such project.");
- }
- $rss_link = "<link rel=\"alternate\" title=\"" . esc_param($project) . " log\" href=\"" .
- "$my_uri?" . esc_param("p=$project;a=rss") . "\" type=\"application/rss+xml\"/>";
- $ENV{'GIT_DIR'} = "$projectroot/$project";
-} else {
- git_project_list();
- exit;
-}
-
-my $file_name = $cgi->param('f');
-if (defined $file_name) {
- $file_name = validate_input($file_name);
- if (!defined($file_name)) {
- die_error(undef, "Invalid file parameter.");
- }
-}
-
-my $hash = $cgi->param('h');
-if (defined $hash) {
- $hash = validate_input($hash);
- if (!defined($hash)) {
- die_error(undef, "Invalid hash parameter.");
- }
-}
-
-my $hash_parent = $cgi->param('hp');
-if (defined $hash_parent) {
- $hash_parent = validate_input($hash_parent);
- if (!defined($hash_parent)) {
- die_error(undef, "Invalid hash parent parameter.");
- }
-}
-
-my $hash_base = $cgi->param('hb');
-if (defined $hash_base) {
- $hash_base = validate_input($hash_base);
- if (!defined($hash_base)) {
- die_error(undef, "Invalid hash base parameter.");
- }
-}
-
-my $page = $cgi->param('pg');
-if (defined $page) {
- if ($page =~ m/[^0-9]$/) {
- undef $page;
- die_error(undef, "Invalid page parameter.");
- }
-}
-
-my $searchtext = $cgi->param('s');
-if (defined $searchtext) {
- if ($searchtext =~ m/[^a-zA-Z0-9_\.\/\-\+\:\@ ]/) {
- undef $searchtext;
- die_error(undef, "Invalid search parameter.");
- }
- $searchtext = quotemeta $searchtext;
-}
-
-sub validate_input {
- my $input = shift;
-
- if ($input =~ m/^[0-9a-fA-F]{40}$/) {
- return $input;
- }
- if ($input =~ m/(^|\/)(|\.|\.\.)($|\/)/) {
- return undef;
- }
- if ($input =~ m/[^a-zA-Z0-9_\x80-\xff\ \t\.\/\-\+\#\~\%]/) {
- return undef;
- }
- return $input;
-}
-
-if (!defined $action || $action eq "summary") {
- git_summary();
- exit;
-} elsif ($action eq "heads") {
- git_heads();
- exit;
-} elsif ($action eq "tags") {
- git_tags();
- exit;
-} elsif ($action eq "blob") {
- git_blob();
- exit;
-} elsif ($action eq "blob_plain") {
- git_blob_plain();
- exit;
-} elsif ($action eq "tree") {
- git_tree();
- exit;
-} elsif ($action eq "rss") {
- git_rss();
- exit;
-} elsif ($action eq "commit") {
- git_commit();
- exit;
-} elsif ($action eq "log") {
- git_log();
- exit;
-} elsif ($action eq "blobdiff") {
- git_blobdiff();
- exit;
-} elsif ($action eq "blobdiff_plain") {
- git_blobdiff_plain();
- exit;
-} elsif ($action eq "commitdiff") {
- git_commitdiff();
- exit;
-} elsif ($action eq "commitdiff_plain") {
- git_commitdiff_plain();
- exit;
-} elsif ($action eq "history") {
- git_history();
- exit;
-} elsif ($action eq "search") {
- git_search();
- exit;
-} elsif ($action eq "shortlog") {
- git_shortlog();
- exit;
-} elsif ($action eq "tag") {
- git_tag();
- exit;
-} elsif ($action eq "blame") {
- git_blame();
- exit;
-} else {
- undef $action;
- die_error(undef, "Unknown action.");
- exit;
-}
-
-# quote unsafe chars, but keep the slash, even when it's not
-# correct, but quoted slashes look too horrible in bookmarks
-sub esc_param {
- my $str = shift;
- $str =~ s/([^A-Za-z0-9\-_.~();\/;?:@&=])/sprintf("%%%02X", ord($1))/eg;
- $str =~ s/\+/%2B/g;
- $str =~ s/ /\+/g;
- return $str;
-}
-
-# replace invalid utf8 character with SUBSTITUTION sequence
-sub esc_html {
- my $str = shift;
- $str = decode("utf8", $str, Encode::FB_DEFAULT);
- $str = escapeHTML($str);
- return $str;
-}
-
-# git may return quoted and escaped filenames
-sub unquote {
- my $str = shift;
- if ($str =~ m/^"(.*)"$/) {
- $str = $1;
- $str =~ s/\\([0-7]{1,3})/chr(oct($1))/eg;
- }
- return $str;
-}
-
-# CSS class for given age value (in seconds)
-sub age_class {
- my $age = shift;
-
- if ($age < 60*60*2) {
- return "age0";
- } elsif ($age < 60*60*24*2) {
- return "age1";
- } else {
- return "age2";
- }
-}
-
-sub git_header_html {
- my $status = shift || "200 OK";
- my $expires = shift;
-
- my $title = "git";
- if (defined $project) {
- $title .= " - $project";
- if (defined $action) {
- $title .= "/$action";
- if (defined $file_name) {
- $title .= " - $file_name";
- if ($action eq "tree" && $file_name !~ m|/$|) {
- $title .= "/";
- }
- }
- }
- }
- print $cgi->header(-type=>'text/html', -charset => 'utf-8', -status=> $status, -expires => $expires);
- print <<EOF;
-<?xml version="1.0" encoding="utf-8"?>
-<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
-<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en-US" lang="en-US">
-<!-- git web interface v$version, (C) 2005-2006, Kay Sievers <kay.sievers\@vrfy.org>, Christian Gierke -->
-<!-- git core binaries version $git_version -->
-<head>
-<meta http-equiv="content-type" content="text/html; charset=utf-8"/>
-<meta name="robots" content="index, nofollow"/>
-<title>$title</title>
-<link rel="stylesheet" type="text/css" href="$stylesheet"/>
-$rss_link
-</head>
-<body>
-EOF
- print "<div class=\"page_header\">\n" .
- "<a href=\"http://www.kernel.org/pub/software/scm/git/docs/\" title=\"git documentation\">" .
- "<img src=\"$my_uri?" . esc_param("a=git-logo.png") . "\" width=\"72\" height=\"27\" alt=\"git\" style=\"float:right; border-width:0px;\"/>" .
- "</a>\n";
- print $cgi->a({-href => esc_param($home_link)}, "projects") . " / ";
- if (defined $project) {
- print $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=summary")}, esc_html($project));
- if (defined $action) {
- print " / $action";
- }
- print "\n";
- if (!defined $searchtext) {
- $searchtext = "";
- }
- my $search_hash;
- if (defined $hash_base) {
- $search_hash = $hash_base;
- } elsif (defined $hash) {
- $search_hash = $hash;
- } else {
- $search_hash = "HEAD";
- }
- $cgi->param("a", "search");
- $cgi->param("h", $search_hash);
- print $cgi->startform(-method => "get", -action => $my_uri) .
- "<div class=\"search\">\n" .
- $cgi->hidden(-name => "p") . "\n" .
- $cgi->hidden(-name => "a") . "\n" .
- $cgi->hidden(-name => "h") . "\n" .
- $cgi->textfield(-name => "s", -value => $searchtext) . "\n" .
- "</div>" .
- $cgi->end_form() . "\n";
- }
- print "</div>\n";
-}
-
-sub git_footer_html {
- print "<div class=\"page_footer\">\n";
- if (defined $project) {
- my $descr = git_read_description($project);
- if (defined $descr) {
- print "<div class=\"page_footer_text\">" . esc_html($descr) . "</div>\n";
- }
- print $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=rss"), -class => "rss_logo"}, "RSS") . "\n";
- } else {
- print $cgi->a({-href => "$my_uri?" . esc_param("a=opml"), -class => "rss_logo"}, "OPML") . "\n";
- }
- print "</div>\n" .
- "</body>\n" .
- "</html>";
-}
-
-sub die_error {
- my $status = shift || "403 Forbidden";
- my $error = shift || "Malformed query, file missing or permission denied";
-
- git_header_html($status);
- print "<div class=\"page_body\">\n" .
- "<br/><br/>\n" .
- "$status - $error\n" .
- "<br/>\n" .
- "</div>\n";
- git_footer_html();
- exit;
-}
-
-sub git_get_type {
- my $hash = shift;
-
- open my $fd, "-|", "$gitbin/git-cat-file -t $hash" or return;
- my $type = <$fd>;
- close $fd or return;
- chomp $type;
- return $type;
-}
-
-sub git_read_head {
- my $project = shift;
- my $oENV = $ENV{'GIT_DIR'};
- my $retval = undef;
- $ENV{'GIT_DIR'} = "$projectroot/$project";
- if (open my $fd, "-|", "$gitbin/git-rev-parse", "--verify", "HEAD") {
- my $head = <$fd>;
- close $fd;
- if (defined $head && $head =~ /^([0-9a-fA-F]{40})$/) {
- $retval = $1;
- }
- }
- if (defined $oENV) {
- $ENV{'GIT_DIR'} = $oENV;
- }
- return $retval;
-}
-
-sub git_read_hash {
- my $path = shift;
-
- open my $fd, "$projectroot/$path" or return undef;
- my $head = <$fd>;
- close $fd;
- chomp $head;
- if ($head =~ m/^[0-9a-fA-F]{40}$/) {
- return $head;
- }
-}
-
-sub git_read_description {
- my $path = shift;
-
- open my $fd, "$projectroot/$path/description" or return undef;
- my $descr = <$fd>;
- close $fd;
- chomp $descr;
- return $descr;
-}
-
-sub git_read_tag {
- my $tag_id = shift;
- my %tag;
- my @comment;
-
- open my $fd, "-|", "$gitbin/git-cat-file tag $tag_id" or return;
- $tag{'id'} = $tag_id;
- while (my $line = <$fd>) {
- chomp $line;
- if ($line =~ m/^object ([0-9a-fA-F]{40})$/) {
- $tag{'object'} = $1;
- } elsif ($line =~ m/^type (.+)$/) {
- $tag{'type'} = $1;
- } elsif ($line =~ m/^tag (.+)$/) {
- $tag{'name'} = $1;
- } elsif ($line =~ m/^tagger (.*) ([0-9]+) (.*)$/) {
- $tag{'author'} = $1;
- $tag{'epoch'} = $2;
- $tag{'tz'} = $3;
- } elsif ($line =~ m/--BEGIN/) {
- push @comment, $line;
- last;
- } elsif ($line eq "") {
- last;
- }
- }
- push @comment, <$fd>;
- $tag{'comment'} = \@comment;
- close $fd or return;
- if (!defined $tag{'name'}) {
- return
- };
- return %tag
-}
-
-sub age_string {
- my $age = shift;
- my $age_str;
-
- if ($age > 60*60*24*365*2) {
- $age_str = (int $age/60/60/24/365);
- $age_str .= " years ago";
- } elsif ($age > 60*60*24*(365/12)*2) {
- $age_str = int $age/60/60/24/(365/12);
- $age_str .= " months ago";
- } elsif ($age > 60*60*24*7*2) {
- $age_str = int $age/60/60/24/7;
- $age_str .= " weeks ago";
- } elsif ($age > 60*60*24*2) {
- $age_str = int $age/60/60/24;
- $age_str .= " days ago";
- } elsif ($age > 60*60*2) {
- $age_str = int $age/60/60;
- $age_str .= " hours ago";
- } elsif ($age > 60*2) {
- $age_str = int $age/60;
- $age_str .= " min ago";
- } elsif ($age > 2) {
- $age_str = int $age;
- $age_str .= " sec ago";
- } else {
- $age_str .= " right now";
- }
- return $age_str;
-}
-
-sub git_read_commit {
- my $commit_id = shift;
- my $commit_text = shift;
-
- my @commit_lines;
- my %co;
-
- if (defined $commit_text) {
- @commit_lines = @$commit_text;
- } else {
- $/ = "\0";
- open my $fd, "-|", "$gitbin/git-rev-list --header --parents --max-count=1 $commit_id" or return;
- @commit_lines = split '\n', <$fd>;
- close $fd or return;
- $/ = "\n";
- pop @commit_lines;
- }
- my $header = shift @commit_lines;
- if (!($header =~ m/^[0-9a-fA-F]{40}/)) {
- return;
- }
- ($co{'id'}, my @parents) = split ' ', $header;
- $co{'parents'} = \@parents;
- $co{'parent'} = $parents[0];
- while (my $line = shift @commit_lines) {
- last if $line eq "\n";
- if ($line =~ m/^tree ([0-9a-fA-F]{40})$/) {
- $co{'tree'} = $1;
- } elsif ($line =~ m/^author (.*) ([0-9]+) (.*)$/) {
- $co{'author'} = $1;
- $co{'author_epoch'} = $2;
- $co{'author_tz'} = $3;
- if ($co{'author'} =~ m/^([^<]+) </) {
- $co{'author_name'} = $1;
- } else {
- $co{'author_name'} = $co{'author'};
- }
- } elsif ($line =~ m/^committer (.*) ([0-9]+) (.*)$/) {
- $co{'committer'} = $1;
- $co{'committer_epoch'} = $2;
- $co{'committer_tz'} = $3;
- $co{'committer_name'} = $co{'committer'};
- $co{'committer_name'} =~ s/ <.*//;
- }
- }
- if (!defined $co{'tree'}) {
- return;
- };
-
- foreach my $title (@commit_lines) {
- $title =~ s/^ //;
- if ($title ne "") {
- $co{'title'} = chop_str($title, 80, 5);
- # remove leading stuff of merges to make the interesting part visible
- if (length($title) > 50) {
- $title =~ s/^Automatic //;
- $title =~ s/^merge (of|with) /Merge ... /i;
- if (length($title) > 50) {
- $title =~ s/(http|rsync):\/\///;
- }
- if (length($title) > 50) {
- $title =~ s/(master|www|rsync)\.//;
- }
- if (length($title) > 50) {
- $title =~ s/kernel.org:?//;
- }
- if (length($title) > 50) {
- $title =~ s/\/pub\/scm//;
- }
- }
- $co{'title_short'} = chop_str($title, 50, 5);
- last;
- }
- }
- # remove added spaces
- foreach my $line (@commit_lines) {
- $line =~ s/^ //;
- }
- $co{'comment'} = \@commit_lines;
-
- my $age = time - $co{'committer_epoch'};
- $co{'age'} = $age;
- $co{'age_string'} = age_string($age);
- my ($sec, $min, $hour, $mday, $mon, $year, $wday, $yday) = gmtime($co{'committer_epoch'});
- if ($age > 60*60*24*7*2) {
- $co{'age_string_date'} = sprintf "%4i-%02u-%02i", 1900 + $year, $mon+1, $mday;
- $co{'age_string_age'} = $co{'age_string'};
- } else {
- $co{'age_string_date'} = $co{'age_string'};
- $co{'age_string_age'} = sprintf "%4i-%02u-%02i", 1900 + $year, $mon+1, $mday;
- }
- return %co;
-}
-
-sub git_diff_print {
- my $from = shift;
- my $from_name = shift;
- my $to = shift;
- my $to_name = shift;
- my $format = shift || "html";
-
- my $from_tmp = "/dev/null";
- my $to_tmp = "/dev/null";
- my $pid = $$;
-
- # create tmp from-file
- if (defined $from) {
- $from_tmp = "$git_temp/gitweb_" . $$ . "_from";
- open my $fd2, "> $from_tmp";
- open my $fd, "-|", "$gitbin/git-cat-file blob $from";
- my @file = <$fd>;
- print $fd2 @file;
- close $fd2;
- close $fd;
- }
-
- # create tmp to-file
- if (defined $to) {
- $to_tmp = "$git_temp/gitweb_" . $$ . "_to";
- open my $fd2, "> $to_tmp";
- open my $fd, "-|", "$gitbin/git-cat-file blob $to";
- my @file = <$fd>;
- print $fd2 @file;
- close $fd2;
- close $fd;
- }
-
- open my $fd, "-|", "/usr/bin/diff -u -p -L \'$from_name\' -L \'$to_name\' $from_tmp $to_tmp";
- if ($format eq "plain") {
- undef $/;
- print <$fd>;
- $/ = "\n";
- } else {
- while (my $line = <$fd>) {
- chomp($line);
- my $char = substr($line, 0, 1);
- my $diff_class = "";
- if ($char eq '+') {
- $diff_class = " add";
- } elsif ($char eq "-") {
- $diff_class = " rem";
- } elsif ($char eq "@") {
- $diff_class = " chunk_header";
- } elsif ($char eq "\\") {
- # skip errors
- next;
- }
- while ((my $pos = index($line, "\t")) != -1) {
- if (my $count = (8 - (($pos-1) % 8))) {
- my $spaces = ' ' x $count;
- $line =~ s/\t/$spaces/;
- }
- }
- print "<div class=\"diff$diff_class\">" . esc_html($line) . "</div>\n";
- }
- }
- close $fd;
-
- if (defined $from) {
- unlink($from_tmp);
- }
- if (defined $to) {
- unlink($to_tmp);
- }
-}
-
-sub mode_str {
- my $mode = oct shift;
-
- if (S_ISDIR($mode & S_IFMT)) {
- return 'drwxr-xr-x';
- } elsif (S_ISLNK($mode)) {
- return 'lrwxrwxrwx';
- } elsif (S_ISREG($mode)) {
- # git cares only about the executable bit
- if ($mode & S_IXUSR) {
- return '-rwxr-xr-x';
- } else {
- return '-rw-r--r--';
- };
- } else {
- return '----------';
- }
-}
-
-sub chop_str {
- my $str = shift;
- my $len = shift;
- my $add_len = shift || 10;
-
- # allow only $len chars, but don't cut a word if it would fit in $add_len
- # if it doesn't fit, cut it if it's still longer than the dots we would add
- $str =~ m/^(.{0,$len}[^ \/\-_:\.@]{0,$add_len})(.*)/;
- my $body = $1;
- my $tail = $2;
- if (length($tail) > 4) {
- $tail = " ...";
- }
- return "$body$tail";
-}
-
-sub file_type {
- my $mode = oct shift;
-
- if (S_ISDIR($mode & S_IFMT)) {
- return "directory";
- } elsif (S_ISLNK($mode)) {
- return "symlink";
- } elsif (S_ISREG($mode)) {
- return "file";
- } else {
- return "unknown";
- }
-}
-
-sub format_log_line_html {
- my $line = shift;
-
- $line = esc_html($line);
- $line =~ s/ / /g;
- if ($line =~ m/([0-9a-fA-F]{40})/) {
- my $hash_text = $1;
- if (git_get_type($hash_text) eq "commit") {
- my $link = $cgi->a({-class => "text", -href => "$my_uri?" . esc_param("p=$project;a=commit;h=$hash_text")}, $hash_text);
- $line =~ s/$hash_text/$link/;
- }
- }
- return $line;
-}
-
-sub date_str {
- my $epoch = shift;
- my $tz = shift || "-0000";
-
- my %date;
- my @months = ("Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec");
- my @days = ("Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat");
- my ($sec, $min, $hour, $mday, $mon, $year, $wday, $yday) = gmtime($epoch);
- $date{'hour'} = $hour;
- $date{'minute'} = $min;
- $date{'mday'} = $mday;
- $date{'day'} = $days[$wday];
- $date{'month'} = $months[$mon];
- $date{'rfc2822'} = sprintf "%s, %d %s %4d %02d:%02d:%02d +0000", $days[$wday], $mday, $months[$mon], 1900+$year, $hour ,$min, $sec;
- $date{'mday-time'} = sprintf "%d %s %02d:%02d", $mday, $months[$mon], $hour ,$min;
-
- $tz =~ m/^([+\-][0-9][0-9])([0-9][0-9])$/;
- my $local = $epoch + ((int $1 + ($2/60)) * 3600);
- ($sec, $min, $hour, $mday, $mon, $year, $wday, $yday) = gmtime($local);
- $date{'hour_local'} = $hour;
- $date{'minute_local'} = $min;
- $date{'tz_local'} = $tz;
- return %date;
-}
-
-# git-logo (cached in browser for one day)
-sub git_logo {
- binmode STDOUT, ':raw';
- print $cgi->header(-type => 'image/png', -expires => '+1d');
- # cat git-logo.png | hexdump -e '16/1 " %02x" "\n"' | sed 's/ /\\x/g'
- print "\x89\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52" .
- "\x00\x00\x00\x48\x00\x00\x00\x1b\x04\x03\x00\x00\x00\x2d\xd9\xd4" .
- "\x2d\x00\x00\x00\x18\x50\x4c\x54\x45\xff\xff\xff\x60\x60\x5d\xb0" .
- "\xaf\xaa\x00\x80\x00\xce\xcd\xc7\xc0\x00\x00\xe8\xe8\xe6\xf7\xf7" .
- "\xf6\x95\x0c\xa7\x47\x00\x00\x00\x73\x49\x44\x41\x54\x28\xcf\x63" .
- "\x48\x67\x20\x04\x4a\x5c\x18\x0a\x08\x2a\x62\x53\x61\x20\x02\x08" .
- "\x0d\x69\x45\xac\xa1\xa1\x01\x30\x0c\x93\x60\x36\x26\x52\x91\xb1" .
- "\x01\x11\xd6\xe1\x55\x64\x6c\x6c\xcc\x6c\x6c\x0c\xa2\x0c\x70\x2a" .
- "\x62\x06\x2a\xc1\x62\x1d\xb3\x01\x02\x53\xa4\x08\xe8\x00\x03\x18" .
- "\x26\x56\x11\xd4\xe1\x20\x97\x1b\xe0\xb4\x0e\x35\x24\x71\x29\x82" .
- "\x99\x30\xb8\x93\x0a\x11\xb9\x45\x88\xc1\x8d\xa0\xa2\x44\x21\x06" .
- "\x27\x41\x82\x40\x85\xc1\x45\x89\x20\x70\x01\x00\xa4\x3d\x21\xc5" .
- "\x12\x1c\x9a\xfe\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82";
-}
-
-sub get_file_owner {
- my $path = shift;
-
- my ($dev, $ino, $mode, $nlink, $st_uid, $st_gid, $rdev, $size) = stat($path);
- my ($name, $passwd, $uid, $gid, $quota, $comment, $gcos, $dir, $shell) = getpwuid($st_uid);
- if (!defined $gcos) {
- return undef;
- }
- my $owner = $gcos;
- $owner =~ s/[,;].*$//;
- return decode("utf8", $owner, Encode::FB_DEFAULT);
-}
-
-sub git_read_projects {
- my @list;
-
- if (-d $projects_list) {
- # search in directory
- my $dir = $projects_list;
- opendir my $dh, $dir or return undef;
- while (my $dir = readdir($dh)) {
- if (-e "$projectroot/$dir/HEAD") {
- my $pr = {
- path => $dir,
- };
- push @list, $pr
- }
- }
- closedir($dh);
- } elsif (-f $projects_list) {
- # read from file(url-encoded):
- # 'git%2Fgit.git Linus+Torvalds'
- # 'libs%2Fklibc%2Fklibc.git H.+Peter+Anvin'
- # 'linux%2Fhotplug%2Fudev.git Greg+Kroah-Hartman'
- open my $fd , $projects_list or return undef;
- while (my $line = <$fd>) {
- chomp $line;
- my ($path, $owner) = split ' ', $line;
- $path = unescape($path);
- $owner = unescape($owner);
- if (!defined $path) {
- next;
- }
- if (-e "$projectroot/$path/HEAD") {
- my $pr = {
- path => $path,
- owner => decode("utf8", $owner, Encode::FB_DEFAULT),
- };
- push @list, $pr
- }
- }
- close $fd;
- }
- @list = sort {$a->{'path'} cmp $b->{'path'}} @list;
- return @list;
-}
-
-sub git_get_project_config {
- my $key = shift;
-
- return unless ($key);
- $key =~ s/^gitweb\.//;
- return if ($key =~ m/\W/);
-
- my $val = qx($gitbin/git-repo-config --get gitweb.$key);
- return ($val);
-}
-
-sub git_get_project_config_bool {
- my $val = git_get_project_config (@_);
- if ($val and $val =~ m/true|yes|on/) {
- return (1);
- }
- return; # implicit false
-}
-
-sub git_project_list {
- my @list = git_read_projects();
- my @projects;
- if (!@list) {
- die_error(undef, "No project found.");
- }
- foreach my $pr (@list) {
- my $head = git_read_head($pr->{'path'});
- if (!defined $head) {
- next;
- }
- $ENV{'GIT_DIR'} = "$projectroot/$pr->{'path'}";
- my %co = git_read_commit($head);
- if (!%co) {
- next;
- }
- $pr->{'commit'} = \%co;
- if (!defined $pr->{'descr'}) {
- my $descr = git_read_description($pr->{'path'}) || "";
- $pr->{'descr'} = chop_str($descr, 25, 5);
- }
- if (!defined $pr->{'owner'}) {
- $pr->{'owner'} = get_file_owner("$projectroot/$pr->{'path'}") || "";
- }
- push @projects, $pr;
- }
- git_header_html();
- if (-f $home_text) {
- print "<div class=\"index_include\">\n";
- open (my $fd, $home_text);
- print <$fd>;
- close $fd;
- print "</div>\n";
- }
- print "<table class=\"project_list\">\n" .
- "<tr>\n";
- if (!defined($order) || (defined($order) && ($order eq "project"))) {
- @projects = sort {$a->{'path'} cmp $b->{'path'}} @projects;
- print "<th>Project</th>\n";
- } else {
- print "<th>" . $cgi->a({-class => "header", -href => "$my_uri?" . esc_param("o=project")}, "Project") . "</th>\n";
- }
- if (defined($order) && ($order eq "descr")) {
- @projects = sort {$a->{'descr'} cmp $b->{'descr'}} @projects;
- print "<th>Description</th>\n";
- } else {
- print "<th>" . $cgi->a({-class => "header", -href => "$my_uri?" . esc_param("o=descr")}, "Description") . "</th>\n";
- }
- if (defined($order) && ($order eq "owner")) {
- @projects = sort {$a->{'owner'} cmp $b->{'owner'}} @projects;
- print "<th>Owner</th>\n";
- } else {
- print "<th>" . $cgi->a({-class => "header", -href => "$my_uri?" . esc_param("o=owner")}, "Owner") . "</th>\n";
- }
- if (defined($order) && ($order eq "age")) {
- @projects = sort {$a->{'commit'}{'age'} <=> $b->{'commit'}{'age'}} @projects;
- print "<th>Last Change</th>\n";
- } else {
- print "<th>" . $cgi->a({-class => "header", -href => "$my_uri?" . esc_param("o=age")}, "Last Change") . "</th>\n";
- }
- print "<th></th>\n" .
- "</tr>\n";
- my $alternate = 0;
- foreach my $pr (@projects) {
- if ($alternate) {
- print "<tr class=\"dark\">\n";
- } else {
- print "<tr class=\"light\">\n";
- }
- $alternate ^= 1;
- print "<td>" . $cgi->a({-href => "$my_uri?" . esc_param("p=$pr->{'path'};a=summary"), -class => "list"}, esc_html($pr->{'path'})) . "</td>\n" .
- "<td>$pr->{'descr'}</td>\n" .
- "<td><i>" . chop_str($pr->{'owner'}, 15) . "</i></td>\n";
- print "<td class=\"". age_class($pr->{'commit'}{'age'}) . "\">" . $pr->{'commit'}{'age_string'} . "</td>\n" .
- "<td class=\"link\">" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$pr->{'path'};a=summary")}, "summary") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$pr->{'path'};a=shortlog")}, "shortlog") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$pr->{'path'};a=log")}, "log") .
- "</td>\n" .
- "</tr>\n";
- }
- print "</table>\n";
- git_footer_html();
-}
-
-sub read_info_ref {
- my $type = shift || "";
- my %refs;
- # 5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11
- # c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11^{}
- open my $fd, "$projectroot/$project/info/refs" or return;
- while (my $line = <$fd>) {
- chomp($line);
- if ($line =~ m/^([0-9a-fA-F]{40})\t.*$type\/([^\^]+)/) {
- if (defined $refs{$1}) {
- $refs{$1} .= " / $2";
- } else {
- $refs{$1} = $2;
- }
- }
- }
- close $fd or return;
- return \%refs;
-}
-
-sub git_read_refs {
- my $ref_dir = shift;
- my @reflist;
-
- my @refs;
- opendir my $dh, "$projectroot/$project/$ref_dir";
- while (my $dir = readdir($dh)) {
- if ($dir =~ m/^\./) {
- next;
- }
- if (-d "$projectroot/$project/$ref_dir/$dir") {
- opendir my $dh2, "$projectroot/$project/$ref_dir/$dir";
- my @subdirs = grep !m/^\./, readdir $dh2;
- closedir($dh2);
- foreach my $subdir (@subdirs) {
- push @refs, "$dir/$subdir"
- }
- next;
- }
- push @refs, $dir;
- }
- closedir($dh);
- foreach my $ref_file (@refs) {
- my $ref_id = git_read_hash("$project/$ref_dir/$ref_file");
- my $type = git_get_type($ref_id) || next;
- my %ref_item;
- my %co;
- $ref_item{'type'} = $type;
- $ref_item{'id'} = $ref_id;
- $ref_item{'epoch'} = 0;
- $ref_item{'age'} = "unknown";
- if ($type eq "tag") {
- my %tag = git_read_tag($ref_id);
- $ref_item{'comment'} = $tag{'comment'};
- if ($tag{'type'} eq "commit") {
- %co = git_read_commit($tag{'object'});
- $ref_item{'epoch'} = $co{'committer_epoch'};
- $ref_item{'age'} = $co{'age_string'};
- } elsif (defined($tag{'epoch'})) {
- my $age = time - $tag{'epoch'};
- $ref_item{'epoch'} = $tag{'epoch'};
- $ref_item{'age'} = age_string($age);
- }
- $ref_item{'reftype'} = $tag{'type'};
- $ref_item{'name'} = $tag{'name'};
- $ref_item{'refid'} = $tag{'object'};
- } elsif ($type eq "commit"){
- %co = git_read_commit($ref_id);
- $ref_item{'reftype'} = "commit";
- $ref_item{'name'} = $ref_file;
- $ref_item{'title'} = $co{'title'};
- $ref_item{'refid'} = $ref_id;
- $ref_item{'epoch'} = $co{'committer_epoch'};
- $ref_item{'age'} = $co{'age_string'};
- }
-
- push @reflist, \%ref_item;
- }
- # sort tags by age
- @reflist = sort {$b->{'epoch'} <=> $a->{'epoch'}} @reflist;
- return \@reflist;
-}
-
-sub git_summary {
- my $descr = git_read_description($project) || "none";
- my $head = git_read_head($project);
- my %co = git_read_commit($head);
- my %cd = date_str($co{'committer_epoch'}, $co{'committer_tz'});
-
- my $owner;
- if (-f $projects_list) {
- open (my $fd , $projects_list);
- while (my $line = <$fd>) {
- chomp $line;
- my ($pr, $ow) = split ' ', $line;
- $pr = unescape($pr);
- $ow = unescape($ow);
- if ($pr eq $project) {
- $owner = decode("utf8", $ow, Encode::FB_DEFAULT);
- last;
- }
- }
- close $fd;
- }
- if (!defined $owner) {
- $owner = get_file_owner("$projectroot/$project");
- }
-
- my $refs = read_info_ref();
- git_header_html();
- print "<div class=\"page_nav\">\n" .
- "summary".
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=shortlog")}, "shortlog") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=log")}, "log") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$head")}, "commit") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commitdiff;h=$head")}, "commitdiff") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=tree")}, "tree") .
- "<br/><br/>\n" .
- "</div>\n";
- print "<div class=\"title\"> </div>\n";
- print "<table cellspacing=\"0\">\n" .
- "<tr><td>description</td><td>" . esc_html($descr) . "</td></tr>\n" .
- "<tr><td>owner</td><td>$owner</td></tr>\n" .
- "<tr><td>last change</td><td>$cd{'rfc2822'}</td></tr>\n" .
- "</table>\n";
- open my $fd, "-|", "$gitbin/git-rev-list --max-count=17 " . git_read_head($project) or die_error(undef, "Open failed.");
- my (@revlist) = map { chomp; $_ } <$fd>;
- close $fd;
- print "<div>\n" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=shortlog"), -class => "title"}, "shortlog") .
- "</div>\n";
- my $i = 16;
- print "<table cellspacing=\"0\">\n";
- my $alternate = 0;
- foreach my $commit (@revlist) {
- my %co = git_read_commit($commit);
- my %ad = date_str($co{'author_epoch'});
- if ($alternate) {
- print "<tr class=\"dark\">\n";
- } else {
- print "<tr class=\"light\">\n";
- }
- $alternate ^= 1;
- if ($i-- > 0) {
- my $ref = "";
- if (defined $refs->{$commit}) {
- $ref = " <span class=\"tag\">" . esc_html($refs->{$commit}) . "</span>";
- }
- print "<td><i>$co{'age_string'}</i></td>\n" .
- "<td><i>" . esc_html(chop_str($co{'author_name'}, 10)) . "</i></td>\n" .
- "<td>";
- if (length($co{'title_short'}) < length($co{'title'})) {
- print $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$commit"), -class => "list", -title => "$co{'title'}"},
- "<b>" . esc_html($co{'title_short'}) . "$ref</b>");
- } else {
- print $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$commit"), -class => "list"},
- "<b>" . esc_html($co{'title'}) . "$ref</b>");
- }
- print "</td>\n" .
- "<td class=\"link\">" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$commit")}, "commit") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commitdiff;h=$commit")}, "commitdiff") .
- "</td>\n" .
- "</tr>";
- } else {
- print "<td>" . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=shortlog")}, "...") . "</td>\n" .
- "</tr>";
- last;
- }
- }
- print "</table\n>";
-
- my $taglist = git_read_refs("refs/tags");
- if (defined @$taglist) {
- print "<div>\n" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=tags"), -class => "title"}, "tags") .
- "</div>\n";
- my $i = 16;
- print "<table cellspacing=\"0\">\n";
- my $alternate = 0;
- foreach my $entry (@$taglist) {
- my %tag = %$entry;
- my $comment_lines = $tag{'comment'};
- my $comment = shift @$comment_lines;
- if (defined($comment)) {
- $comment = chop_str($comment, 30, 5);
- }
- if ($alternate) {
- print "<tr class=\"dark\">\n";
- } else {
- print "<tr class=\"light\">\n";
- }
- $alternate ^= 1;
- if ($i-- > 0) {
- print "<td><i>$tag{'age'}</i></td>\n" .
- "<td>" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=$tag{'reftype'};h=$tag{'refid'}"), -class => "list"},
- "<b>" . esc_html($tag{'name'}) . "</b>") .
- "</td>\n" .
- "<td>";
- if (defined($comment)) {
- print $cgi->a({-class => "list", -href => "$my_uri?" . esc_param("p=$project;a=tag;h=$tag{'id'}")}, $comment);
- }
- print "</td>\n" .
- "<td class=\"link\">";
- if ($tag{'type'} eq "tag") {
- print $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=tag;h=$tag{'id'}")}, "tag") . " | ";
- }
- print $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=$tag{'reftype'};h=$tag{'refid'}")}, $tag{'reftype'});
- if ($tag{'reftype'} eq "commit") {
- print " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=shortlog;h=$tag{'name'}")}, "shortlog") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=log;h=$tag{'refid'}")}, "log");
- }
- print "</td>\n" .
- "</tr>";
- } else {
- print "<td>" . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=tags")}, "...") . "</td>\n" .
- "</tr>";
- last;
- }
- }
- print "</table\n>";
- }
-
- my $headlist = git_read_refs("refs/heads");
- if (defined @$headlist) {
- print "<div>\n" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=heads"), -class => "title"}, "heads") .
- "</div>\n";
- my $i = 16;
- print "<table cellspacing=\"0\">\n";
- my $alternate = 0;
- foreach my $entry (@$headlist) {
- my %tag = %$entry;
- if ($alternate) {
- print "<tr class=\"dark\">\n";
- } else {
- print "<tr class=\"light\">\n";
- }
- $alternate ^= 1;
- if ($i-- > 0) {
- print "<td><i>$tag{'age'}</i></td>\n" .
- "<td>" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=shortlog;h=$tag{'name'}"), -class => "list"},
- "<b>" . esc_html($tag{'name'}) . "</b>") .
- "</td>\n" .
- "<td class=\"link\">" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=shortlog;h=$tag{'name'}")}, "shortlog") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=log;h=$tag{'name'}")}, "log") .
- "</td>\n" .
- "</tr>";
- } else {
- print "<td>" . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=heads")}, "...") . "</td>\n" .
- "</tr>";
- last;
- }
- }
- print "</table\n>";
- }
- git_footer_html();
-}
-
-sub git_tag {
- my $head = git_read_head($project);
- git_header_html();
- print "<div class=\"page_nav\">\n" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=summary")}, "summary") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=shortlog")}, "shortlog") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=log")}, "log") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$head")}, "commit") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commitdiff;h=$head")}, "commitdiff") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=tree;hb=$head")}, "tree") . "<br/>\n" .
- "<br/>\n" .
- "</div>\n";
- my %tag = git_read_tag($hash);
- print "<div>\n" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$hash"), -class => "title"}, esc_html($tag{'name'})) . "\n" .
- "</div>\n";
- print "<div class=\"title_text\">\n" .
- "<table cellspacing=\"0\">\n" .
- "<tr>\n" .
- "<td>object</td>\n" .
- "<td>" . $cgi->a({-class => "list", -href => "$my_uri?" . esc_param("p=$project;a=$tag{'type'};h=$tag{'object'}")}, $tag{'object'}) . "</td>\n" .
- "<td class=\"link\">" . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=$tag{'type'};h=$tag{'object'}")}, $tag{'type'}) . "</td>\n" .
- "</tr>\n";
- if (defined($tag{'author'})) {
- my %ad = date_str($tag{'epoch'}, $tag{'tz'});
- print "<tr><td>author</td><td>" . esc_html($tag{'author'}) . "</td></tr>\n";
- print "<tr><td></td><td>" . $ad{'rfc2822'} . sprintf(" (%02d:%02d %s)", $ad{'hour_local'}, $ad{'minute_local'}, $ad{'tz_local'}) . "</td></tr>\n";
- }
- print "</table>\n\n" .
- "</div>\n";
- print "<div class=\"page_body\">";
- my $comment = $tag{'comment'};
- foreach my $line (@$comment) {
- print esc_html($line) . "<br/>\n";
- }
- print "</div>\n";
- git_footer_html();
-}
-
-sub git_blame {
- my $fd;
- die_error('403 Permission denied', "Permission denied.") if (!git_get_project_config_bool ('blame'));
- die_error('404 Not Found', "What file will it be, master?") if (!$file_name);
- $hash_base ||= git_read_head($project);
- die_error(undef, "Reading commit failed.") unless ($hash_base);
- my %co = git_read_commit($hash_base)
- or die_error(undef, "Reading commit failed.");
- if (!defined $hash) {
- $hash = git_get_hash_by_path($hash_base, $file_name, "blob")
- or die_error(undef, "Error lookup file.");
- }
- open ($fd, "-|", "$gitbin/git-annotate", '-l', '-t', '-r', $file_name, $hash_base)
- or die_error(undef, "Open failed.");
- git_header_html();
- print "<div class=\"page_nav\">\n" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=summary")}, "summary") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=shortlog")}, "shortlog") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=log")}, "log") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$hash_base")}, "commit") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commitdiff;h=$hash_base")}, "commitdiff") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=tree;h=$co{'tree'};hb=$hash_base")}, "tree") . "<br/>\n";
- print $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob;h=$hash;hb=$hash_base;f=$file_name")}, "blob") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blame;f=$file_name")}, "head") . "<br/>\n";
- print "</div>\n".
- "<div>" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$hash_base"), -class => "title"}, esc_html($co{'title'})) .
- "</div>\n";
- print "<div class=\"page_path\"><b>" . esc_html($file_name) . "</b></div>\n";
- print "<div class=\"page_body\">\n";
- print <<HTML;
-<table class="blame">
- <tr>
- <th>Commit</th>
- <th>Age</th>
- <th>Author</th>
- <th>Line</th>
- <th>Data</th>
- </tr>
-HTML
- my @line_class = (qw(light dark));
- my $line_class_len = scalar (@line_class);
- my $line_class_num = $#line_class;
- while (my $line = <$fd>) {
- my $long_rev;
- my $short_rev;
- my $author;
- my $time;
- my $lineno;
- my $data;
- my $age;
- my $age_str;
- my $age_class;
-
- chomp $line;
- $line_class_num = ($line_class_num + 1) % $line_class_len;
-
- if ($line =~ m/^([0-9a-fA-F]{40})\t\(\s*([^\t]+)\t(\d+) \+\d\d\d\d\t(\d+)\)(.*)$/) {
- $long_rev = $1;
- $author = $2;
- $time = $3;
- $lineno = $4;
- $data = $5;
- } else {
- print qq( <tr><td colspan="5" class="error">Unable to parse: $line</td></tr>\n);
- next;
- }
- $short_rev = substr ($long_rev, 0, 8);
- $age = time () - $time;
- $age_str = age_string ($age);
- $age_str =~ s/ / /g;
- $age_class = age_class($age);
- $author = esc_html ($author);
- $author =~ s/ / /g;
- # escape tabs
- while ((my $pos = index($data, "\t")) != -1) {
- if (my $count = (8 - ($pos % 8))) {
- my $spaces = ' ' x $count;
- $data =~ s/\t/$spaces/;
- }
- }
- $data = esc_html ($data);
-
- print <<HTML;
- <tr class="$line_class[$line_class_num]">
- <td class="sha1"><a href="$my_uri?${\esc_param ("p=$project;a=commit;h=$long_rev")}" class="text">$short_rev..</a></td>
- <td class="$age_class">$age_str</td>
- <td>$author</td>
- <td class="linenr"><a id="$lineno" href="#$lineno" class="linenr">$lineno</a></td>
- <td class="pre">$data</td>
- </tr>
-HTML
- } # while (my $line = <$fd>)
- print "</table>\n\n";
- close $fd or print "Reading blob failed.\n";
- print "</div>";
- git_footer_html();
-}
-
-sub git_tags {
- my $head = git_read_head($project);
- git_header_html();
- print "<div class=\"page_nav\">\n" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=summary")}, "summary") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=shortlog")}, "shortlog") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=log")}, "log") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$head")}, "commit") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commitdiff;h=$head")}, "commitdiff") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=tree;hb=$head")}, "tree") . "<br/>\n" .
- "<br/>\n" .
- "</div>\n";
- my $taglist = git_read_refs("refs/tags");
- print "<div>\n" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=summary"), -class => "title"}, " ") .
- "</div>\n";
- print "<table cellspacing=\"0\">\n";
- my $alternate = 0;
- if (defined @$taglist) {
- foreach my $entry (@$taglist) {
- my %tag = %$entry;
- my $comment_lines = $tag{'comment'};
- my $comment = shift @$comment_lines;
- if (defined($comment)) {
- $comment = chop_str($comment, 30, 5);
- }
- if ($alternate) {
- print "<tr class=\"dark\">\n";
- } else {
- print "<tr class=\"light\">\n";
- }
- $alternate ^= 1;
- print "<td><i>$tag{'age'}</i></td>\n" .
- "<td>" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=$tag{'reftype'};h=$tag{'refid'}"), -class => "list"},
- "<b>" . esc_html($tag{'name'}) . "</b>") .
- "</td>\n" .
- "<td>";
- if (defined($comment)) {
- print $cgi->a({-class => "list", -href => "$my_uri?" . esc_param("p=$project;a=tag;h=$tag{'id'}")}, $comment);
- }
- print "</td>\n" .
- "<td class=\"link\">";
- if ($tag{'type'} eq "tag") {
- print $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=tag;h=$tag{'id'}")}, "tag") . " | ";
- }
- print $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=$tag{'reftype'};h=$tag{'refid'}")}, $tag{'reftype'});
- if ($tag{'reftype'} eq "commit") {
- print " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=shortlog;h=$tag{'name'}")}, "shortlog") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=log;h=$tag{'refid'}")}, "log");
- }
- print "</td>\n" .
- "</tr>";
- }
- }
- print "</table\n>";
- git_footer_html();
-}
-
-sub git_heads {
- my $head = git_read_head($project);
- git_header_html();
- print "<div class=\"page_nav\">\n" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=summary")}, "summary") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=shortlog")}, "shortlog") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=log")}, "log") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$head")}, "commit") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commitdiff;h=$head")}, "commitdiff") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=tree;hb=$head")}, "tree") . "<br/>\n" .
- "<br/>\n" .
- "</div>\n";
- my $taglist = git_read_refs("refs/heads");
- print "<div>\n" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=summary"), -class => "title"}, " ") .
- "</div>\n";
- print "<table cellspacing=\"0\">\n";
- my $alternate = 0;
- if (defined @$taglist) {
- foreach my $entry (@$taglist) {
- my %tag = %$entry;
- if ($alternate) {
- print "<tr class=\"dark\">\n";
- } else {
- print "<tr class=\"light\">\n";
- }
- $alternate ^= 1;
- print "<td><i>$tag{'age'}</i></td>\n" .
- "<td>" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=shortlog;h=$tag{'name'}"), -class => "list"}, "<b>" . esc_html($tag{'name'}) . "</b>") .
- "</td>\n" .
- "<td class=\"link\">" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=shortlog;h=$tag{'name'}")}, "shortlog") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=log;h=$tag{'name'}")}, "log") .
- "</td>\n" .
- "</tr>";
- }
- }
- print "</table\n>";
- git_footer_html();
-}
-
-sub git_get_hash_by_path {
- my $base = shift;
- my $path = shift || return undef;
-
- my $tree = $base;
- my @parts = split '/', $path;
- while (my $part = shift @parts) {
- open my $fd, "-|", "$gitbin/git-ls-tree $tree" or die_error(undef, "Open git-ls-tree failed.");
- my (@entries) = map { chomp; $_ } <$fd>;
- close $fd or return undef;
- foreach my $line (@entries) {
- #'100644 blob 0fa3f3a66fb6a137f6ec2c19351ed4d807070ffa panic.c'
- $line =~ m/^([0-9]+) (.+) ([0-9a-fA-F]{40})\t(.+)$/;
- my $t_mode = $1;
- my $t_type = $2;
- my $t_hash = $3;
- my $t_name = validate_input(unquote($4));
- if ($t_name eq $part) {
- if (!(@parts)) {
- return $t_hash;
- }
- if ($t_type eq "tree") {
- $tree = $t_hash;
- }
- last;
- }
- }
- }
-}
-
-sub git_blob {
- if (!defined $hash && defined $file_name) {
- my $base = $hash_base || git_read_head($project);
- $hash = git_get_hash_by_path($base, $file_name, "blob") || die_error(undef, "Error lookup file.");
- }
- my $have_blame = git_get_project_config_bool ('blame');
- open my $fd, "-|", "$gitbin/git-cat-file blob $hash" or die_error(undef, "Open failed.");
- git_header_html();
- if (defined $hash_base && (my %co = git_read_commit($hash_base))) {
- print "<div class=\"page_nav\">\n" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=summary")}, "summary") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=shortlog")}, "shortlog") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=log")}, "log") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$hash_base")}, "commit") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commitdiff;h=$hash_base")}, "commitdiff") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=tree;h=$co{'tree'};hb=$hash_base")}, "tree") . "<br/>\n";
- if (defined $file_name) {
- if ($have_blame) {
- print $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blame;h=$hash;hb=$hash_base;f=$file_name")}, "blame") . " | ";
- }
- print $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob_plain;h=$hash;f=$file_name")}, "plain") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob;hb=HEAD;f=$file_name")}, "head") . "<br/>\n";
- } else {
- print $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob_plain;h=$hash")}, "plain") . "<br/>\n";
- }
- print "</div>\n".
- "<div>" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$hash_base"), -class => "title"}, esc_html($co{'title'})) .
- "</div>\n";
- } else {
- print "<div class=\"page_nav\">\n" .
- "<br/><br/></div>\n" .
- "<div class=\"title\">$hash</div>\n";
- }
- if (defined $file_name) {
- print "<div class=\"page_path\"><b>" . esc_html($file_name) . "</b></div>\n";
- }
- print "<div class=\"page_body\">\n";
- my $nr;
- while (my $line = <$fd>) {
- chomp $line;
- $nr++;
- while ((my $pos = index($line, "\t")) != -1) {
- if (my $count = (8 - ($pos % 8))) {
- my $spaces = ' ' x $count;
- $line =~ s/\t/$spaces/;
- }
- }
- printf "<div class=\"pre\"><a id=\"l%i\" href=\"#l%i\" class=\"linenr\">%4i</a> %s</div>\n", $nr, $nr, $nr, esc_html($line);
- }
- close $fd or print "Reading blob failed.\n";
- print "</div>";
- git_footer_html();
-}
-
-sub mimetype_guess_file {
- my $filename = shift;
- my $mimemap = shift;
- -r $mimemap or return undef;
-
- my %mimemap;
- open(MIME, $mimemap) or return undef;
- while (<MIME>) {
- my ($mime, $exts) = split(/\t+/);
- my @exts = split(/\s+/, $exts);
- foreach my $ext (@exts) {
- $mimemap{$ext} = $mime;
- }
- }
- close(MIME);
-
- $filename =~ /\.(.*?)$/;
- return $mimemap{$1};
-}
-
-sub mimetype_guess {
- my $filename = shift;
- my $mime;
- $filename =~ /\./ or return undef;
-
- if ($mimetypes_file) {
- my $file = $mimetypes_file;
- #$file =~ m#^/# or $file = "$projectroot/$path/$file";
- $mime = mimetype_guess_file($filename, $file);
- }
- $mime ||= mimetype_guess_file($filename, '/etc/mime.types');
- return $mime;
-}
-
-sub git_blob_plain_mimetype {
- my $fd = shift;
- my $filename = shift;
-
- # just in case
- return $default_blob_plain_mimetype unless $fd;
-
- if ($filename) {
- my $mime = mimetype_guess($filename);
- $mime and return $mime;
- }
-
- if (-T $fd) {
- return 'text/plain' .
- ($default_text_plain_charset ? '; charset='.$default_text_plain_charset : '');
- } elsif (! $filename) {
- return 'application/octet-stream';
- } elsif ($filename =~ m/\.png$/i) {
- return 'image/png';
- } elsif ($filename =~ m/\.gif$/i) {
- return 'image/gif';
- } elsif ($filename =~ m/\.jpe?g$/i) {
- return 'image/jpeg';
- } else {
- return 'application/octet-stream';
- }
-}
-
-sub git_blob_plain {
- open my $fd, "-|", "$gitbin/git-cat-file blob $hash" or return;
- my $type = git_blob_plain_mimetype($fd, $file_name);
-
- # save as filename, even when no $file_name is given
- my $save_as = "$hash";
- if (defined $file_name) {
- $save_as = $file_name;
- } elsif ($type =~ m/^text\//) {
- $save_as .= '.txt';
- }
-
- print $cgi->header(-type => "$type", '-content-disposition' => "inline; filename=\"$save_as\"");
- undef $/;
- binmode STDOUT, ':raw';
- print <$fd>;
- binmode STDOUT, ':utf8'; # as set at the beginning of gitweb.cgi
- $/ = "\n";
- close $fd;
-}
-
-sub git_tree {
- if (!defined $hash) {
- $hash = git_read_head($project);
- if (defined $file_name) {
- my $base = $hash_base || $hash;
- $hash = git_get_hash_by_path($base, $file_name, "tree");
- }
- if (!defined $hash_base) {
- $hash_base = $hash;
- }
- }
- $/ = "\0";
- open my $fd, "-|", "$gitbin/git-ls-tree -z $hash" or die_error(undef, "Open git-ls-tree failed.");
- chomp (my (@entries) = <$fd>);
- close $fd or die_error(undef, "Reading tree failed.");
- $/ = "\n";
-
- my $refs = read_info_ref();
- my $ref = "";
- if (defined $refs->{$hash_base}) {
- $ref = " <span class=\"tag\">" . esc_html($refs->{$hash_base}) . "</span>";
- }
- git_header_html();
- my $base_key = "";
- my $base = "";
- if (defined $hash_base && (my %co = git_read_commit($hash_base))) {
- $base_key = ";hb=$hash_base";
- print "<div class=\"page_nav\">\n" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=summary")}, "summary") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=shortlog;h=$hash_base")}, "shortlog") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=log;h=$hash_base")}, "log") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$hash_base")}, "commit") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commitdiff;h=$hash_base")}, "commitdiff") .
- " | tree" .
- "<br/><br/>\n" .
- "</div>\n";
- print "<div>\n" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$hash_base"), -class => "title"}, esc_html($co{'title'}) . $ref) . "\n" .
- "</div>\n";
- } else {
- print "<div class=\"page_nav\">\n";
- print "<br/><br/></div>\n";
- print "<div class=\"title\">$hash</div>\n";
- }
- if (defined $file_name) {
- $base = esc_html("$file_name/");
- print "<div class=\"page_path\"><b>/" . esc_html($file_name) . "</b></div>\n";
- } else {
- print "<div class=\"page_path\"><b>/</b></div>\n";
- }
- print "<div class=\"page_body\">\n";
- print "<table cellspacing=\"0\">\n";
- my $alternate = 0;
- foreach my $line (@entries) {
- #'100644 blob 0fa3f3a66fb6a137f6ec2c19351ed4d807070ffa panic.c'
- $line =~ m/^([0-9]+) (.+) ([0-9a-fA-F]{40})\t(.+)$/;
- my $t_mode = $1;
- my $t_type = $2;
- my $t_hash = $3;
- my $t_name = validate_input($4);
- if ($alternate) {
- print "<tr class=\"dark\">\n";
- } else {
- print "<tr class=\"light\">\n";
- }
- $alternate ^= 1;
- print "<td class=\"mode\">" . mode_str($t_mode) . "</td>\n";
- if ($t_type eq "blob") {
- print "<td class=\"list\">" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob;h=$t_hash$base_key;f=$base$t_name"), -class => "list"}, esc_html($t_name)) .
- "</td>\n" .
- "<td class=\"link\">" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob;h=$t_hash$base_key;f=$base$t_name")}, "blob") .
-# " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blame;h=$t_hash$base_key;f=$base$t_name")}, "blame") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=history;h=$hash_base;f=$base$t_name")}, "history") .
- "</td>\n";
- } elsif ($t_type eq "tree") {
- print "<td class=\"list\">" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=tree;h=$t_hash$base_key;f=$base$t_name")}, esc_html($t_name)) .
- "</td>\n" .
- "<td class=\"link\">" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=tree;h=$t_hash$base_key;f=$base$t_name")}, "tree") .
- "</td>\n";
- }
- print "</tr>\n";
- }
- print "</table>\n" .
- "</div>";
- git_footer_html();
-}
-
-sub git_rss {
- # http://www.notestips.com/80256B3A007F2692/1/NAMO5P9UPQ
- open my $fd, "-|", "$gitbin/git-rev-list --max-count=150 " . git_read_head($project) or die_error(undef, "Open failed.");
- my (@revlist) = map { chomp; $_ } <$fd>;
- close $fd or die_error(undef, "Reading rev-list failed.");
- print $cgi->header(-type => 'text/xml', -charset => 'utf-8');
- print "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n".
- "<rss version=\"2.0\" xmlns:content=\"http://purl.org/rss/1.0/modules/content/\">\n";
- print "<channel>\n";
- print "<title>$project</title>\n".
- "<link>" . esc_html("$my_url?p=$project;a=summary") . "</link>\n".
- "<description>$project log</description>\n".
- "<language>en</language>\n";
-
- for (my $i = 0; $i <= $#revlist; $i++) {
- my $commit = $revlist[$i];
- my %co = git_read_commit($commit);
- # we read 150, we always show 30 and the ones more recent than 48 hours
- if (($i >= 20) && ((time - $co{'committer_epoch'}) > 48*60*60)) {
- last;
- }
- my %cd = date_str($co{'committer_epoch'});
- open $fd, "-|", "$gitbin/git-diff-tree -r $co{'parent'} $co{'id'}" or next;
- my @difftree = map { chomp; $_ } <$fd>;
- close $fd or next;
- print "<item>\n" .
- "<title>" .
- sprintf("%d %s %02d:%02d", $cd{'mday'}, $cd{'month'}, $cd{'hour'}, $cd{'minute'}) . " - " . esc_html($co{'title'}) .
- "</title>\n" .
- "<author>" . esc_html($co{'author'}) . "</author>\n" .
- "<pubDate>$cd{'rfc2822'}</pubDate>\n" .
- "<guid isPermaLink=\"true\">" . esc_html("$my_url?p=$project;a=commit;h=$commit") . "</guid>\n" .
- "<link>" . esc_html("$my_url?p=$project;a=commit;h=$commit") . "</link>\n" .
- "<description>" . esc_html($co{'title'}) . "</description>\n" .
- "<content:encoded>" .
- "<![CDATA[\n";
- my $comment = $co{'comment'};
- foreach my $line (@$comment) {
- $line = decode("utf8", $line, Encode::FB_DEFAULT);
- print "$line<br/>\n";
- }
- print "<br/>\n";
- foreach my $line (@difftree) {
- if (!($line =~ m/^:([0-7]{6}) ([0-7]{6}) ([0-9a-fA-F]{40}) ([0-9a-fA-F]{40}) (.)([0-9]{0,3})\t(.*)$/)) {
- next;
- }
- my $file = validate_input(unquote($7));
- $file = decode("utf8", $file, Encode::FB_DEFAULT);
- print "$file<br/>\n";
- }
- print "]]>\n" .
- "</content:encoded>\n" .
- "</item>\n";
- }
- print "</channel></rss>";
-}
-
-sub git_opml {
- my @list = git_read_projects();
-
- print $cgi->header(-type => 'text/xml', -charset => 'utf-8');
- print "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n".
- "<opml version=\"1.0\">\n".
- "<head>".
- " <title>Git OPML Export</title>\n".
- "</head>\n".
- "<body>\n".
- "<outline text=\"git RSS feeds\">\n";
-
- foreach my $pr (@list) {
- my %proj = %$pr;
- my $head = git_read_head($proj{'path'});
- if (!defined $head) {
- next;
- }
- $ENV{'GIT_DIR'} = "$projectroot/$proj{'path'}";
- my %co = git_read_commit($head);
- if (!%co) {
- next;
- }
-
- my $path = esc_html(chop_str($proj{'path'}, 25, 5));
- my $rss = "$my_url?p=$proj{'path'};a=rss";
- my $html = "$my_url?p=$proj{'path'};a=summary";
- print "<outline type=\"rss\" text=\"$path\" title=\"$path\" xmlUrl=\"$rss\" htmlUrl=\"$html\"/>\n";
- }
- print "</outline>\n".
- "</body>\n".
- "</opml>\n";
-}
-
-sub git_log {
- my $head = git_read_head($project);
- if (!defined $hash) {
- $hash = $head;
- }
- if (!defined $page) {
- $page = 0;
- }
- my $refs = read_info_ref();
- git_header_html();
- print "<div class=\"page_nav\">\n";
- print $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=summary")}, "summary") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=shortlog;h=$hash")}, "shortlog") .
- " | log" .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$hash")}, "commit") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commitdiff;h=$hash")}, "commitdiff") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=tree;h=$hash;hb=$hash")}, "tree") . "<br/>\n";
-
- my $limit = sprintf("--max-count=%i", (100 * ($page+1)));
- open my $fd, "-|", "$gitbin/git-rev-list $limit $hash" or die_error(undef, "Open failed.");
- my (@revlist) = map { chomp; $_ } <$fd>;
- close $fd;
-
- if ($hash ne $head || $page) {
- print $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=log")}, "HEAD");
- } else {
- print "HEAD";
- }
- if ($page > 0) {
- print " ⋅ " .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=log;h=$hash;pg=" . ($page-1)), -accesskey => "p", -title => "Alt-p"}, "prev");
- } else {
- print " ⋅ prev";
- }
- if ($#revlist >= (100 * ($page+1)-1)) {
- print " ⋅ " .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=log;h=$hash;pg=" . ($page+1)), -accesskey => "n", -title => "Alt-n"}, "next");
- } else {
- print " ⋅ next";
- }
- print "<br/>\n" .
- "</div>\n";
- if (!@revlist) {
- print "<div>\n" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=summary"), -class => "title"}, " ") .
- "</div>\n";
- my %co = git_read_commit($hash);
- print "<div class=\"page_body\"> Last change $co{'age_string'}.<br/><br/></div>\n";
- }
- for (my $i = ($page * 100); $i <= $#revlist; $i++) {
- my $commit = $revlist[$i];
- my $ref = "";
- if (defined $refs->{$commit}) {
- $ref = " <span class=\"tag\">" . esc_html($refs->{$commit}) . "</span>";
- }
- my %co = git_read_commit($commit);
- next if !%co;
- my %ad = date_str($co{'author_epoch'});
- print "<div>\n" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$commit"), -class => "title"},
- "<span class=\"age\">$co{'age_string'}</span>" . esc_html($co{'title'}) . $ref) . "\n";
- print "</div>\n";
- print "<div class=\"title_text\">\n" .
- "<div class=\"log_link\">\n" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$commit")}, "commit") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commitdiff;h=$commit")}, "commitdiff") .
- "<br/>\n" .
- "</div>\n" .
- "<i>" . esc_html($co{'author_name'}) . " [$ad{'rfc2822'}]</i><br/>\n" .
- "</div>\n" .
- "<div class=\"log_body\">\n";
- my $comment = $co{'comment'};
- my $empty = 0;
- foreach my $line (@$comment) {
- if ($line =~ m/^ *(signed[ \-]off[ \-]by[ :]|acked[ \-]by[ :]|cc[ :])/i) {
- next;
- }
- if ($line eq "") {
- if ($empty) {
- next;
- }
- $empty = 1;
- } else {
- $empty = 0;
- }
- print format_log_line_html($line) . "<br/>\n";
- }
- if (!$empty) {
- print "<br/>\n";
- }
- print "</div>\n";
- }
- git_footer_html();
-}
-
-sub git_commit {
- my %co = git_read_commit($hash);
- if (!%co) {
- die_error(undef, "Unknown commit object.");
- }
- my %ad = date_str($co{'author_epoch'}, $co{'author_tz'});
- my %cd = date_str($co{'committer_epoch'}, $co{'committer_tz'});
-
- my @difftree;
- my $root = "";
- my $parent = $co{'parent'};
- if (!defined $parent) {
- $root = " --root";
- $parent = "";
- }
- open my $fd, "-|", "$gitbin/git-diff-tree -r -M $root $parent $hash" or die_error(undef, "Open failed.");
- @difftree = map { chomp; $_ } <$fd>;
- close $fd or die_error(undef, "Reading diff-tree failed.");
-
- # non-textual hash id's can be cached
- my $expires;
- if ($hash =~ m/^[0-9a-fA-F]{40}$/) {
- $expires = "+1d";
- }
- my $refs = read_info_ref();
- my $ref = "";
- if (defined $refs->{$co{'id'}}) {
- $ref = " <span class=\"tag\">" . esc_html($refs->{$co{'id'}}) . "</span>";
- }
- git_header_html(undef, $expires);
- print "<div class=\"page_nav\">\n" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=summary")}, "summary") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=shortlog;h=$hash")}, "shortlog") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=log;h=$hash")}, "log") .
- " | commit";
- if (defined $co{'parent'}) {
- print " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commitdiff;h=$hash")}, "commitdiff");
- }
- print " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=tree;h=$co{'tree'};hb=$hash")}, "tree") . "\n" .
- "<br/><br/></div>\n";
- if (defined $co{'parent'}) {
- print "<div>\n" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commitdiff;h=$hash"), -class => "title"}, esc_html($co{'title'}) . $ref) . "\n" .
- "</div>\n";
- } else {
- print "<div>\n" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=tree;h=$co{'tree'};hb=$hash"), -class => "title"}, esc_html($co{'title'})) . "\n" .
- "</div>\n";
- }
- print "<div class=\"title_text\">\n" .
- "<table cellspacing=\"0\">\n";
- print "<tr><td>author</td><td>" . esc_html($co{'author'}) . "</td></tr>\n".
- "<tr>" .
- "<td></td><td> $ad{'rfc2822'}";
- if ($ad{'hour_local'} < 6) {
- printf(" (<span class=\"atnight\">%02d:%02d</span> %s)", $ad{'hour_local'}, $ad{'minute_local'}, $ad{'tz_local'});
- } else {
- printf(" (%02d:%02d %s)", $ad{'hour_local'}, $ad{'minute_local'}, $ad{'tz_local'});
- }
- print "</td>" .
- "</tr>\n";
- print "<tr><td>committer</td><td>" . esc_html($co{'committer'}) . "</td></tr>\n";
- print "<tr><td></td><td> $cd{'rfc2822'}" . sprintf(" (%02d:%02d %s)", $cd{'hour_local'}, $cd{'minute_local'}, $cd{'tz_local'}) . "</td></tr>\n";
- print "<tr><td>commit</td><td class=\"sha1\">$co{'id'}</td></tr>\n";
- print "<tr>" .
- "<td>tree</td>" .
- "<td class=\"sha1\">" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=tree;h=$co{'tree'};hb=$hash"), class => "list"}, $co{'tree'}) .
- "</td>" .
- "<td class=\"link\">" . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=tree;h=$co{'tree'};hb=$hash")}, "tree") .
- "</td>" .
- "</tr>\n";
- my $parents = $co{'parents'};
- foreach my $par (@$parents) {
- print "<tr>" .
- "<td>parent</td>" .
- "<td class=\"sha1\">" . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$par"), class => "list"}, $par) . "</td>" .
- "<td class=\"link\">" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$par")}, "commit") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commitdiff;h=$hash;hp=$par")}, "commitdiff") .
- "</td>" .
- "</tr>\n";
- }
- print "</table>".
- "</div>\n";
- print "<div class=\"page_body\">\n";
- my $comment = $co{'comment'};
- my $empty = 0;
- my $signed = 0;
- foreach my $line (@$comment) {
- # print only one empty line
- if ($line eq "") {
- if ($empty || $signed) {
- next;
- }
- $empty = 1;
- } else {
- $empty = 0;
- }
- if ($line =~ m/^ *(signed[ \-]off[ \-]by[ :]|acked[ \-]by[ :]|cc[ :])/i) {
- $signed = 1;
- print "<span class=\"signoff\">" . esc_html($line) . "</span><br/>\n";
- } else {
- $signed = 0;
- print format_log_line_html($line) . "<br/>\n";
- }
- }
- print "</div>\n";
- print "<div class=\"list_head\">\n";
- if ($#difftree > 10) {
- print(($#difftree + 1) . " files changed:\n");
- }
- print "</div>\n";
- print "<table class=\"diff_tree\">\n";
- my $alternate = 0;
- foreach my $line (@difftree) {
- # ':100644 100644 03b218260e99b78c6df0ed378e59ed9205ccc96d 3b93d5e7cc7f7dd4ebed13a5cc1a4ad976fc94d8 M ls-files.c'
- # ':100644 100644 7f9281985086971d3877aca27704f2aaf9c448ce bc190ebc71bbd923f2b728e505408f5e54bd073a M rev-tree.c'
- if (!($line =~ m/^:([0-7]{6}) ([0-7]{6}) ([0-9a-fA-F]{40}) ([0-9a-fA-F]{40}) (.)([0-9]{0,3})\t(.*)$/)) {
- next;
- }
- my $from_mode = $1;
- my $to_mode = $2;
- my $from_id = $3;
- my $to_id = $4;
- my $status = $5;
- my $similarity = $6;
- my $file = validate_input(unquote($7));
- if ($alternate) {
- print "<tr class=\"dark\">\n";
- } else {
- print "<tr class=\"light\">\n";
- }
- $alternate ^= 1;
- if ($status eq "A") {
- my $mode_chng = "";
- if (S_ISREG(oct $to_mode)) {
- $mode_chng = sprintf(" with mode: %04o", (oct $to_mode) & 0777);
- }
- print "<td>" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob;h=$to_id;hb=$hash;f=$file"), -class => "list"}, esc_html($file)) . "</td>\n" .
- "<td><span class=\"file_status new\">[new " . file_type($to_mode) . "$mode_chng]</span></td>\n" .
- "<td class=\"link\">" . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob;h=$to_id;hb=$hash;f=$file")}, "blob") . "</td>\n";
- } elsif ($status eq "D") {
- print "<td>" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob;h=$from_id;hb=$hash;f=$file"), -class => "list"}, esc_html($file)) . "</td>\n" .
- "<td><span class=\"file_status deleted\">[deleted " . file_type($from_mode). "]</span></td>\n" .
- "<td class=\"link\">" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob;h=$from_id;hb=$hash;f=$file")}, "blob") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=history;h=$hash;f=$file")}, "history") .
- "</td>\n"
- } elsif ($status eq "M" || $status eq "T") {
- my $mode_chnge = "";
- if ($from_mode != $to_mode) {
- $mode_chnge = " <span class=\"file_status mode_chnge\">[changed";
- if (((oct $from_mode) & S_IFMT) != ((oct $to_mode) & S_IFMT)) {
- $mode_chnge .= " from " . file_type($from_mode) . " to " . file_type($to_mode);
- }
- if (((oct $from_mode) & 0777) != ((oct $to_mode) & 0777)) {
- if (S_ISREG($from_mode) && S_ISREG($to_mode)) {
- $mode_chnge .= sprintf(" mode: %04o->%04o", (oct $from_mode) & 0777, (oct $to_mode) & 0777);
- } elsif (S_ISREG($to_mode)) {
- $mode_chnge .= sprintf(" mode: %04o", (oct $to_mode) & 0777);
- }
- }
- $mode_chnge .= "]</span>\n";
- }
- print "<td>";
- if ($to_id ne $from_id) {
- print $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blobdiff;h=$to_id;hp=$from_id;hb=$hash;f=$file"), -class => "list"}, esc_html($file));
- } else {
- print $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob;h=$to_id;hb=$hash;f=$file"), -class => "list"}, esc_html($file));
- }
- print "</td>\n" .
- "<td>$mode_chnge</td>\n" .
- "<td class=\"link\">";
- print $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob;h=$to_id;hb=$hash;f=$file")}, "blob");
- if ($to_id ne $from_id) {
- print " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blobdiff;h=$to_id;hp=$from_id;hb=$hash;f=$file")}, "diff");
- }
- print " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=history;h=$hash;f=$file")}, "history") . "\n";
- print "</td>\n";
- } elsif ($status eq "R") {
- my ($from_file, $to_file) = split "\t", $file;
- my $mode_chng = "";
- if ($from_mode != $to_mode) {
- $mode_chng = sprintf(", mode: %04o", (oct $to_mode) & 0777);
- }
- print "<td>" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob;h=$to_id;hb=$hash;f=$to_file"), -class => "list"}, esc_html($to_file)) . "</td>\n" .
- "<td><span class=\"file_status moved\">[moved from " .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob;h=$from_id;hb=$hash;f=$from_file"), -class => "list"}, esc_html($from_file)) .
- " with " . (int $similarity) . "% similarity$mode_chng]</span></td>\n" .
- "<td class=\"link\">" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob;h=$to_id;hb=$hash;f=$to_file")}, "blob");
- if ($to_id ne $from_id) {
- print " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blobdiff;h=$to_id;hp=$from_id;hb=$hash;f=$to_file")}, "diff");
- }
- print "</td>\n";
- }
- print "</tr>\n";
- }
- print "</table>\n";
- git_footer_html();
-}
-
-sub git_blobdiff {
- mkdir($git_temp, 0700);
- git_header_html();
- if (defined $hash_base && (my %co = git_read_commit($hash_base))) {
- print "<div class=\"page_nav\">\n" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=summary")}, "summary") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=shortlog")}, "shortlog") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=log")}, "log") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$hash_base")}, "commit") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commitdiff;h=$hash_base")}, "commitdiff") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=tree;h=$co{'tree'};hb=$hash_base")}, "tree") .
- "<br/>\n";
- print $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blobdiff_plain;h=$hash;hp=$hash_parent")}, "plain") .
- "</div>\n";
- print "<div>\n" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$hash_base"), -class => "title"}, esc_html($co{'title'})) . "\n" .
- "</div>\n";
- } else {
- print "<div class=\"page_nav\">\n" .
- "<br/><br/></div>\n" .
- "<div class=\"title\">$hash vs $hash_parent</div>\n";
- }
- if (defined $file_name) {
- print "<div class=\"page_path\"><b>/" . esc_html($file_name) . "</b></div>\n";
- }
- print "<div class=\"page_body\">\n" .
- "<div class=\"diff_info\">blob:" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob;h=$hash_parent;hb=$hash_base;f=$file_name")}, $hash_parent) .
- " -> blob:" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob;h=$hash;hb=$hash_base;f=$file_name")}, $hash) .
- "</div>\n";
- git_diff_print($hash_parent, $file_name || $hash_parent, $hash, $file_name || $hash);
- print "</div>";
- git_footer_html();
-}
-
-sub git_blobdiff_plain {
- mkdir($git_temp, 0700);
- print $cgi->header(-type => "text/plain", -charset => 'utf-8');
- git_diff_print($hash_parent, $file_name || $hash_parent, $hash, $file_name || $hash, "plain");
-}
-
-sub git_commitdiff {
- mkdir($git_temp, 0700);
- my %co = git_read_commit($hash);
- if (!%co) {
- die_error(undef, "Unknown commit object.");
- }
- if (!defined $hash_parent) {
- $hash_parent = $co{'parent'};
- }
- open my $fd, "-|", "$gitbin/git-diff-tree -r $hash_parent $hash" or die_error(undef, "Open failed.");
- my (@difftree) = map { chomp; $_ } <$fd>;
- close $fd or die_error(undef, "Reading diff-tree failed.");
-
- # non-textual hash id's can be cached
- my $expires;
- if ($hash =~ m/^[0-9a-fA-F]{40}$/) {
- $expires = "+1d";
- }
- my $refs = read_info_ref();
- my $ref = "";
- if (defined $refs->{$co{'id'}}) {
- $ref = " <span class=\"tag\">" . esc_html($refs->{$co{'id'}}) . "</span>";
- }
- git_header_html(undef, $expires);
- print "<div class=\"page_nav\">\n" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=summary")}, "summary") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=shortlog;h=$hash")}, "shortlog") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=log;h=$hash")}, "log") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$hash")}, "commit") .
- " | commitdiff" .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=tree;h=$co{'tree'};hb=$hash")}, "tree") . "<br/>\n";
- print $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commitdiff_plain;h=$hash;hp=$hash_parent")}, "plain") . "\n" .
- "</div>\n";
- print "<div>\n" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$hash"), -class => "title"}, esc_html($co{'title'}) . $ref) . "\n" .
- "</div>\n";
- print "<div class=\"page_body\">\n";
- my $comment = $co{'comment'};
- my $empty = 0;
- my $signed = 0;
- my @log = @$comment;
- # remove first and empty lines after that
- shift @log;
- while (defined $log[0] && $log[0] eq "") {
- shift @log;
- }
- foreach my $line (@log) {
- if ($line =~ m/^ *(signed[ \-]off[ \-]by[ :]|acked[ \-]by[ :]|cc[ :])/i) {
- next;
- }
- if ($line eq "") {
- if ($empty) {
- next;
- }
- $empty = 1;
- } else {
- $empty = 0;
- }
- print format_log_line_html($line) . "<br/>\n";
- }
- print "<br/>\n";
- foreach my $line (@difftree) {
- # ':100644 100644 03b218260e99b78c6df0ed378e59ed9205ccc96d 3b93d5e7cc7f7dd4ebed13a5cc1a4ad976fc94d8 M ls-files.c'
- # ':100644 100644 7f9281985086971d3877aca27704f2aaf9c448ce bc190ebc71bbd923f2b728e505408f5e54bd073a M rev-tree.c'
- $line =~ m/^:([0-7]{6}) ([0-7]{6}) ([0-9a-fA-F]{40}) ([0-9a-fA-F]{40}) (.)\t(.*)$/;
- my $from_mode = $1;
- my $to_mode = $2;
- my $from_id = $3;
- my $to_id = $4;
- my $status = $5;
- my $file = validate_input(unquote($6));
- if ($status eq "A") {
- print "<div class=\"diff_info\">" . file_type($to_mode) . ":" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob;h=$to_id;hb=$hash;f=$file")}, $to_id) . "(new)" .
- "</div>\n";
- git_diff_print(undef, "/dev/null", $to_id, "b/$file");
- } elsif ($status eq "D") {
- print "<div class=\"diff_info\">" . file_type($from_mode) . ":" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob;h=$from_id;hb=$hash;f=$file")}, $from_id) . "(deleted)" .
- "</div>\n";
- git_diff_print($from_id, "a/$file", undef, "/dev/null");
- } elsif ($status eq "M") {
- if ($from_id ne $to_id) {
- print "<div class=\"diff_info\">" .
- file_type($from_mode) . ":" . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob;h=$from_id;hb=$hash;f=$file")}, $from_id) .
- " -> " .
- file_type($to_mode) . ":" . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob;h=$to_id;hb=$hash;f=$file")}, $to_id);
- print "</div>\n";
- git_diff_print($from_id, "a/$file", $to_id, "b/$file");
- }
- }
- }
- print "<br/>\n" .
- "</div>";
- git_footer_html();
-}
-
-sub git_commitdiff_plain {
- mkdir($git_temp, 0700);
- open my $fd, "-|", "$gitbin/git-diff-tree -r $hash_parent $hash" or die_error(undef, "Open failed.");
- my (@difftree) = map { chomp; $_ } <$fd>;
- close $fd or die_error(undef, "Reading diff-tree failed.");
-
- # try to figure out the next tag after this commit
- my $tagname;
- my $refs = read_info_ref("tags");
- open $fd, "-|", "$gitbin/git-rev-list HEAD";
- chomp (my (@commits) = <$fd>);
- close $fd;
- foreach my $commit (@commits) {
- if (defined $refs->{$commit}) {
- $tagname = $refs->{$commit}
- }
- if ($commit eq $hash) {
- last;
- }
- }
-
- print $cgi->header(-type => "text/plain", -charset => 'utf-8', '-content-disposition' => "inline; filename=\"git-$hash.patch\"");
- my %co = git_read_commit($hash);
- my %ad = date_str($co{'author_epoch'}, $co{'author_tz'});
- my $comment = $co{'comment'};
- print "From: $co{'author'}\n" .
- "Date: $ad{'rfc2822'} ($ad{'tz_local'})\n".
- "Subject: $co{'title'}\n";
- if (defined $tagname) {
- print "X-Git-Tag: $tagname\n";
- }
- print "X-Git-Url: $my_url?p=$project;a=commitdiff;h=$hash\n" .
- "\n";
-
- foreach my $line (@$comment) {;
- print "$line\n";
- }
- print "---\n\n";
-
- foreach my $line (@difftree) {
- $line =~ m/^:([0-7]{6}) ([0-7]{6}) ([0-9a-fA-F]{40}) ([0-9a-fA-F]{40}) (.)\t(.*)$/;
- my $from_id = $3;
- my $to_id = $4;
- my $status = $5;
- my $file = $6;
- if ($status eq "A") {
- git_diff_print(undef, "/dev/null", $to_id, "b/$file", "plain");
- } elsif ($status eq "D") {
- git_diff_print($from_id, "a/$file", undef, "/dev/null", "plain");
- } elsif ($status eq "M") {
- git_diff_print($from_id, "a/$file", $to_id, "b/$file", "plain");
- }
- }
-}
-
-sub git_history {
- if (!defined $hash) {
- $hash = git_read_head($project);
- }
- my %co = git_read_commit($hash);
- if (!%co) {
- die_error(undef, "Unknown commit object.");
- }
- my $refs = read_info_ref();
- git_header_html();
- print "<div class=\"page_nav\">\n" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=summary")}, "summary") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=shortlog")}, "shortlog") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=log")}, "log") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$hash")}, "commit") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commitdiff;h=$hash")}, "commitdiff") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=tree;h=$co{'tree'};hb=$hash")}, "tree") .
- "<br/><br/>\n" .
- "</div>\n";
- print "<div>\n" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$hash"), -class => "title"}, esc_html($co{'title'})) . "\n" .
- "</div>\n";
- print "<div class=\"page_path\"><b>/" . esc_html($file_name) . "</b><br/></div>\n";
-
- open my $fd, "-|", "$gitbin/git-rev-list $hash | $gitbin/git-diff-tree -r --stdin -- \'$file_name\'";
- my $commit;
- print "<table cellspacing=\"0\">\n";
- my $alternate = 0;
- while (my $line = <$fd>) {
- if ($line =~ m/^([0-9a-fA-F]{40})/){
- $commit = $1;
- next;
- }
- if ($line =~ m/^:([0-7]{6}) ([0-7]{6}) ([0-9a-fA-F]{40}) ([0-9a-fA-F]{40}) (.)\t(.*)$/ && (defined $commit)) {
- my %co = git_read_commit($commit);
- if (!%co) {
- next;
- }
- my $ref = "";
- if (defined $refs->{$commit}) {
- $ref = " <span class=\"tag\">" . esc_html($refs->{$commit}) . "</span>";
- }
- if ($alternate) {
- print "<tr class=\"dark\">\n";
- } else {
- print "<tr class=\"light\">\n";
- }
- $alternate ^= 1;
- print "<td title=\"$co{'age_string_age'}\"><i>$co{'age_string_date'}</i></td>\n" .
- "<td><i>" . esc_html(chop_str($co{'author_name'}, 15, 3)) . "</i></td>\n" .
- "<td>" . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$commit"), -class => "list"}, "<b>" .
- esc_html(chop_str($co{'title'}, 50)) . "$ref</b>") . "</td>\n" .
- "<td class=\"link\">" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$commit")}, "commit") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commitdiff;h=$commit")}, "commitdiff") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob;hb=$commit;f=$file_name")}, "blob");
- my $blob = git_get_hash_by_path($hash, $file_name);
- my $blob_parent = git_get_hash_by_path($commit, $file_name);
- if (defined $blob && defined $blob_parent && $blob ne $blob_parent) {
- print " | " .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blobdiff;h=$blob;hp=$blob_parent;hb=$commit;f=$file_name")},
- "diff to current");
- }
- print "</td>\n" .
- "</tr>\n";
- undef $commit;
- }
- }
- print "</table>\n";
- close $fd;
- git_footer_html();
-}
-
-sub git_search {
- if (!defined $searchtext) {
- die_error("", "Text field empty.");
- }
- if (!defined $hash) {
- $hash = git_read_head($project);
- }
- my %co = git_read_commit($hash);
- if (!%co) {
- die_error(undef, "Unknown commit object.");
- }
- # pickaxe may take all resources of your box and run for several minutes
- # with every query - so decide by yourself how public you make this feature :)
- my $commit_search = 1;
- my $author_search = 0;
- my $committer_search = 0;
- my $pickaxe_search = 0;
- if ($searchtext =~ s/^author\\://i) {
- $author_search = 1;
- } elsif ($searchtext =~ s/^committer\\://i) {
- $committer_search = 1;
- } elsif ($searchtext =~ s/^pickaxe\\://i) {
- $commit_search = 0;
- $pickaxe_search = 1;
- }
- git_header_html();
- print "<div class=\"page_nav\">\n" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=summary;h=$hash")}, "summary") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=shortlog")}, "shortlog") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=log;h=$hash")}, "log") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$hash")}, "commit") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commitdiff;h=$hash")}, "commitdiff") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=tree;h=$co{'tree'};hb=$hash")}, "tree") .
- "<br/><br/>\n" .
- "</div>\n";
-
- print "<div>\n" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$hash"), -class => "title"}, esc_html($co{'title'})) . "\n" .
- "</div>\n";
- print "<table cellspacing=\"0\">\n";
- my $alternate = 0;
- if ($commit_search) {
- $/ = "\0";
- open my $fd, "-|", "$gitbin/git-rev-list --header --parents $hash" or next;
- while (my $commit_text = <$fd>) {
- if (!grep m/$searchtext/i, $commit_text) {
- next;
- }
- if ($author_search && !grep m/\nauthor .*$searchtext/i, $commit_text) {
- next;
- }
- if ($committer_search && !grep m/\ncommitter .*$searchtext/i, $commit_text) {
- next;
- }
- my @commit_lines = split "\n", $commit_text;
- my %co = git_read_commit(undef, \@commit_lines);
- if (!%co) {
- next;
- }
- if ($alternate) {
- print "<tr class=\"dark\">\n";
- } else {
- print "<tr class=\"light\">\n";
- }
- $alternate ^= 1;
- print "<td title=\"$co{'age_string_age'}\"><i>$co{'age_string_date'}</i></td>\n" .
- "<td><i>" . esc_html(chop_str($co{'author_name'}, 15, 5)) . "</i></td>\n" .
- "<td>" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$co{'id'}"), -class => "list"}, "<b>" . esc_html(chop_str($co{'title'}, 50)) . "</b><br/>");
- my $comment = $co{'comment'};
- foreach my $line (@$comment) {
- if ($line =~ m/^(.*)($searchtext)(.*)$/i) {
- my $lead = esc_html($1) || "";
- $lead = chop_str($lead, 30, 10);
- my $match = esc_html($2) || "";
- my $trail = esc_html($3) || "";
- $trail = chop_str($trail, 30, 10);
- my $text = "$lead<span class=\"match\">$match</span>$trail";
- print chop_str($text, 80, 5) . "<br/>\n";
- }
- }
- print "</td>\n" .
- "<td class=\"link\">" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$co{'id'}")}, "commit") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=tree;h=$co{'tree'};hb=$co{'id'}")}, "tree");
- print "</td>\n" .
- "</tr>\n";
- }
- close $fd;
- }
-
- if ($pickaxe_search) {
- $/ = "\n";
- open my $fd, "-|", "$gitbin/git-rev-list $hash | $gitbin/git-diff-tree -r --stdin -S\'$searchtext\'";
- undef %co;
- my @files;
- while (my $line = <$fd>) {
- if (%co && $line =~ m/^:([0-7]{6}) ([0-7]{6}) ([0-9a-fA-F]{40}) ([0-9a-fA-F]{40}) (.)\t(.*)$/) {
- my %set;
- $set{'file'} = $6;
- $set{'from_id'} = $3;
- $set{'to_id'} = $4;
- $set{'id'} = $set{'to_id'};
- if ($set{'id'} =~ m/0{40}/) {
- $set{'id'} = $set{'from_id'};
- }
- if ($set{'id'} =~ m/0{40}/) {
- next;
- }
- push @files, \%set;
- } elsif ($line =~ m/^([0-9a-fA-F]{40})$/){
- if (%co) {
- if ($alternate) {
- print "<tr class=\"dark\">\n";
- } else {
- print "<tr class=\"light\">\n";
- }
- $alternate ^= 1;
- print "<td title=\"$co{'age_string_age'}\"><i>$co{'age_string_date'}</i></td>\n" .
- "<td><i>" . esc_html(chop_str($co{'author_name'}, 15, 5)) . "</i></td>\n" .
- "<td>" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$co{'id'}"), -class => "list"}, "<b>" .
- esc_html(chop_str($co{'title'}, 50)) . "</b><br/>");
- while (my $setref = shift @files) {
- my %set = %$setref;
- print $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob;h=$set{'id'};hb=$co{'id'};f=$set{'file'}"), class => "list"},
- "<span class=\"match\">" . esc_html($set{'file'}) . "</span>") .
- "<br/>\n";
- }
- print "</td>\n" .
- "<td class=\"link\">" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$co{'id'}")}, "commit") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=tree;h=$co{'tree'};hb=$co{'id'}")}, "tree");
- print "</td>\n" .
- "</tr>\n";
- }
- %co = git_read_commit($1);
- }
- }
- close $fd;
- }
- print "</table>\n";
- git_footer_html();
-}
-
-sub git_shortlog {
- my $head = git_read_head($project);
- if (!defined $hash) {
- $hash = $head;
- }
- if (!defined $page) {
- $page = 0;
- }
- my $refs = read_info_ref();
- git_header_html();
- print "<div class=\"page_nav\">\n" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=summary")}, "summary") .
- " | shortlog" .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=log;h=$hash")}, "log") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$hash")}, "commit") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commitdiff;h=$hash")}, "commitdiff") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=tree;h=$hash;hb=$hash")}, "tree") . "<br/>\n";
-
- my $limit = sprintf("--max-count=%i", (100 * ($page+1)));
- open my $fd, "-|", "$gitbin/git-rev-list $limit $hash" or die_error(undef, "Open failed.");
- my (@revlist) = map { chomp; $_ } <$fd>;
- close $fd;
-
- if ($hash ne $head || $page) {
- print $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=shortlog")}, "HEAD");
- } else {
- print "HEAD";
- }
- if ($page > 0) {
- print " ⋅ " .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=shortlog;h=$hash;pg=" . ($page-1)), -accesskey => "p", -title => "Alt-p"}, "prev");
- } else {
- print " ⋅ prev";
- }
- if ($#revlist >= (100 * ($page+1)-1)) {
- print " ⋅ " .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=shortlog;h=$hash;pg=" . ($page+1)), -accesskey => "n", -title => "Alt-n"}, "next");
- } else {
- print " ⋅ next";
- }
- print "<br/>\n" .
- "</div>\n";
- print "<div>\n" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=summary"), -class => "title"}, " ") .
- "</div>\n";
- print "<table cellspacing=\"0\">\n";
- my $alternate = 0;
- for (my $i = ($page * 100); $i <= $#revlist; $i++) {
- my $commit = $revlist[$i];
- my $ref = "";
- if (defined $refs->{$commit}) {
- $ref = " <span class=\"tag\">" . esc_html($refs->{$commit}) . "</span>";
- }
- my %co = git_read_commit($commit);
- my %ad = date_str($co{'author_epoch'});
- if ($alternate) {
- print "<tr class=\"dark\">\n";
- } else {
- print "<tr class=\"light\">\n";
- }
- $alternate ^= 1;
- print "<td title=\"$co{'age_string_age'}\"><i>$co{'age_string_date'}</i></td>\n" .
- "<td><i>" . esc_html(chop_str($co{'author_name'}, 10)) . "</i></td>\n" .
- "<td>";
- if (length($co{'title_short'}) < length($co{'title'})) {
- print $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$commit"), -class => "list", -title => "$co{'title'}"},
- "<b>" . esc_html($co{'title_short'}) . "$ref</b>");
- } else {
- print $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$commit"), -class => "list"},
- "<b>" . esc_html($co{'title_short'}) . "$ref</b>");
- }
- print "</td>\n" .
- "<td class=\"link\">" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$commit")}, "commit") .
- " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commitdiff;h=$commit")}, "commitdiff") .
- "</td>\n" .
- "</tr>";
- }
- if ($#revlist >= (100 * ($page+1)-1)) {
- print "<tr>\n" .
- "<td>" .
- $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=shortlog;h=$hash;pg=" . ($page+1)), -title => "Alt-n"}, "next") .
- "</td>\n" .
- "</tr>\n";
- }
- print "</table\n>";
- git_footer_html();
-}
div.page_body {
padding: 8px;
+ font-family: monospace;
}
div.title, a.title {
padding: 6px 0px;
border: solid #d9d8d1;
border-width: 0px 0px 1px;
+ font-family: monospace;
}
div.log_body {
padding: 8px 4px;
}
-table.project_list, table.diff_tree {
+table.project_list {
border-spacing: 0;
}
+table.diff_tree {
+ border-spacing: 0;
+ font-family: monospace;
+}
+
table.blame {
border-collapse: collapse;
}
background-color: #f6f6f0;
}
+tr.dark2 {
+ background-color: #f6f6f0;
+}
+
tr.dark:hover {
background-color: #edece6;
}
vertical-align: top;
}
-td.link {
+td.link, td.selflink {
padding: 2px 5px;
font-family: sans-serif;
font-size: 10px;
}
+td.selflink {
+ padding-right: 0px;
+}
+
td.sha1 {
font-family: monospace;
}
background-color: yellow;
}
+td.current_head {
+ text-decoration: underline;
+}
+
table.diff_tree span.file_status.new {
color: #008000;
}
--- /dev/null
+#!/usr/bin/perl
+
+# gitweb - simple web interface to track changes in git repositories
+#
+# (C) 2005-2006, Kay Sievers <kay.sievers@vrfy.org>
+# (C) 2005, Christian Gierke
+#
+# This program is licensed under the GPLv2
+
+use strict;
+use warnings;
+use CGI qw(:standard :escapeHTML -nosticky);
+use CGI::Util qw(unescape);
+use CGI::Carp qw(fatalsToBrowser);
+use Encode;
+use Fcntl ':mode';
+use File::Find qw();
+binmode STDOUT, ':utf8';
+
+our $cgi = new CGI;
+our $version = "++GIT_VERSION++";
+our $my_url = $cgi->url();
+our $my_uri = $cgi->url(-absolute => 1);
+
+# core git executable to use
+# this can just be "git" if your webserver has a sensible PATH
+our $GIT = "++GIT_BINDIR++/git";
+
+# absolute fs-path which will be prepended to the project path
+#our $projectroot = "/pub/scm";
+our $projectroot = "++GITWEB_PROJECTROOT++";
+
+# location for temporary files needed for diffs
+our $git_temp = "/tmp/gitweb";
+
+# target of the home link on top of all pages
+our $home_link = $my_uri;
+
+# name of your site or organization to appear in page titles
+# replace this with something more descriptive for clearer bookmarks
+our $site_name = "++GITWEB_SITENAME++" || $ENV{'SERVER_NAME'} || "Untitled";
+
+# html text to include at home page
+our $home_text = "++GITWEB_HOMETEXT++";
+
+# URI of default stylesheet
+our $stylesheet = "++GITWEB_CSS++";
+# URI of GIT logo
+our $logo = "++GITWEB_LOGO++";
+
+# source of projects list
+our $projects_list = "++GITWEB_LIST++";
+
+# default blob_plain mimetype and default charset for text/plain blob
+our $default_blob_plain_mimetype = 'text/plain';
+our $default_text_plain_charset = undef;
+
+# file to use for guessing MIME types before trying /etc/mime.types
+# (relative to the current git repository)
+our $mimetypes_file = undef;
+
+our $GITWEB_CONFIG = $ENV{'GITWEB_CONFIG'} || "++GITWEB_CONFIG++";
+require $GITWEB_CONFIG if -e $GITWEB_CONFIG;
+
+# version of the core git binary
+our $git_version = qx($GIT --version) =~ m/git version (.*)$/ ? $1 : "unknown";
+
+$projects_list ||= $projectroot;
+if (! -d $git_temp) {
+ mkdir($git_temp, 0700) || die_error(undef, "Couldn't mkdir $git_temp");
+}
+
+# ======================================================================
+# input validation and dispatch
+our $action = $cgi->param('a');
+if (defined $action) {
+ if ($action =~ m/[^0-9a-zA-Z\.\-_]/) {
+ die_error(undef, "Invalid action parameter");
+ }
+ # action which does not check rest of parameters
+ if ($action eq "opml") {
+ git_opml();
+ exit;
+ }
+}
+
+our $project = ($cgi->param('p') || $ENV{'PATH_INFO'});
+if (defined $project) {
+ $project =~ s|^/||;
+ $project =~ s|/$||;
+}
+if (defined $project && $project) {
+ if (!validate_input($project)) {
+ die_error(undef, "Invalid project parameter");
+ }
+ if (!(-d "$projectroot/$project")) {
+ die_error(undef, "No such directory");
+ }
+ if (!(-e "$projectroot/$project/HEAD")) {
+ die_error(undef, "No such project");
+ }
+ $ENV{'GIT_DIR'} = "$projectroot/$project";
+} else {
+ git_project_list();
+ exit;
+}
+
+our $file_name = $cgi->param('f');
+if (defined $file_name) {
+ if (!validate_input($file_name)) {
+ die_error(undef, "Invalid file parameter");
+ }
+}
+
+our $hash = $cgi->param('h');
+if (defined $hash) {
+ if (!validate_input($hash)) {
+ die_error(undef, "Invalid hash parameter");
+ }
+}
+
+our $hash_parent = $cgi->param('hp');
+if (defined $hash_parent) {
+ if (!validate_input($hash_parent)) {
+ die_error(undef, "Invalid hash parent parameter");
+ }
+}
+
+our $hash_base = $cgi->param('hb');
+if (defined $hash_base) {
+ if (!validate_input($hash_base)) {
+ die_error(undef, "Invalid hash base parameter");
+ }
+}
+
+our $page = $cgi->param('pg');
+if (defined $page) {
+ if ($page =~ m/[^0-9]$/) {
+ die_error(undef, "Invalid page parameter");
+ }
+}
+
+our $searchtext = $cgi->param('s');
+if (defined $searchtext) {
+ if ($searchtext =~ m/[^a-zA-Z0-9_\.\/\-\+\:\@ ]/) {
+ die_error(undef, "Invalid search parameter");
+ }
+ $searchtext = quotemeta $searchtext;
+}
+
+# dispatch
+my %actions = (
+ "blame" => \&git_blame2,
+ "blobdiff" => \&git_blobdiff,
+ "blobdiff_plain" => \&git_blobdiff_plain,
+ "blob" => \&git_blob,
+ "blob_plain" => \&git_blob_plain,
+ "commitdiff" => \&git_commitdiff,
+ "commitdiff_plain" => \&git_commitdiff_plain,
+ "commit" => \&git_commit,
+ "heads" => \&git_heads,
+ "history" => \&git_history,
+ "log" => \&git_log,
+ "rss" => \&git_rss,
+ "search" => \&git_search,
+ "shortlog" => \&git_shortlog,
+ "summary" => \&git_summary,
+ "tag" => \&git_tag,
+ "tags" => \&git_tags,
+ "tree" => \&git_tree,
+);
+
+$action = 'summary' if (!defined($action));
+if (!defined($actions{$action})) {
+ die_error(undef, "Unknown action");
+}
+$actions{$action}->();
+exit;
+
+## ======================================================================
+## validation, quoting/unquoting and escaping
+
+sub validate_input {
+ my $input = shift;
+
+ if ($input =~ m/^[0-9a-fA-F]{40}$/) {
+ return $input;
+ }
+ if ($input =~ m/(^|\/)(|\.|\.\.)($|\/)/) {
+ return undef;
+ }
+ if ($input =~ m/[^a-zA-Z0-9_\x80-\xff\ \t\.\/\-\+\#\~\%]/) {
+ return undef;
+ }
+ return $input;
+}
+
+# quote unsafe chars, but keep the slash, even when it's not
+# correct, but quoted slashes look too horrible in bookmarks
+sub esc_param {
+ my $str = shift;
+ $str =~ s/([^A-Za-z0-9\-_.~();\/;?:@&=])/sprintf("%%%02X", ord($1))/eg;
+ $str =~ s/\+/%2B/g;
+ $str =~ s/ /\+/g;
+ return $str;
+}
+
+# replace invalid utf8 character with SUBSTITUTION sequence
+sub esc_html {
+ my $str = shift;
+ $str = decode("utf8", $str, Encode::FB_DEFAULT);
+ $str = escapeHTML($str);
+ $str =~ s/\014/^L/g; # escape FORM FEED (FF) character (e.g. in COPYING file)
+ return $str;
+}
+
+# git may return quoted and escaped filenames
+sub unquote {
+ my $str = shift;
+ if ($str =~ m/^"(.*)"$/) {
+ $str = $1;
+ $str =~ s/\\([0-7]{1,3})/chr(oct($1))/eg;
+ }
+ return $str;
+}
+
+# escape tabs (convert tabs to spaces)
+sub untabify {
+ my $line = shift;
+
+ while ((my $pos = index($line, "\t")) != -1) {
+ if (my $count = (8 - ($pos % 8))) {
+ my $spaces = ' ' x $count;
+ $line =~ s/\t/$spaces/;
+ }
+ }
+
+ return $line;
+}
+
+## ----------------------------------------------------------------------
+## HTML aware string manipulation
+
+sub chop_str {
+ my $str = shift;
+ my $len = shift;
+ my $add_len = shift || 10;
+
+ # allow only $len chars, but don't cut a word if it would fit in $add_len
+ # if it doesn't fit, cut it if it's still longer than the dots we would add
+ $str =~ m/^(.{0,$len}[^ \/\-_:\.@]{0,$add_len})(.*)/;
+ my $body = $1;
+ my $tail = $2;
+ if (length($tail) > 4) {
+ $tail = " ...";
+ $body =~ s/&[^;]*$//; # remove chopped character entities
+ }
+ return "$body$tail";
+}
+
+## ----------------------------------------------------------------------
+## functions returning short strings
+
+# CSS class for given age value (in seconds)
+sub age_class {
+ my $age = shift;
+
+ if ($age < 60*60*2) {
+ return "age0";
+ } elsif ($age < 60*60*24*2) {
+ return "age1";
+ } else {
+ return "age2";
+ }
+}
+
+# convert age in seconds to "nn units ago" string
+sub age_string {
+ my $age = shift;
+ my $age_str;
+
+ if ($age > 60*60*24*365*2) {
+ $age_str = (int $age/60/60/24/365);
+ $age_str .= " years ago";
+ } elsif ($age > 60*60*24*(365/12)*2) {
+ $age_str = int $age/60/60/24/(365/12);
+ $age_str .= " months ago";
+ } elsif ($age > 60*60*24*7*2) {
+ $age_str = int $age/60/60/24/7;
+ $age_str .= " weeks ago";
+ } elsif ($age > 60*60*24*2) {
+ $age_str = int $age/60/60/24;
+ $age_str .= " days ago";
+ } elsif ($age > 60*60*2) {
+ $age_str = int $age/60/60;
+ $age_str .= " hours ago";
+ } elsif ($age > 60*2) {
+ $age_str = int $age/60;
+ $age_str .= " min ago";
+ } elsif ($age > 2) {
+ $age_str = int $age;
+ $age_str .= " sec ago";
+ } else {
+ $age_str .= " right now";
+ }
+ return $age_str;
+}
+
+# convert file mode in octal to symbolic file mode string
+sub mode_str {
+ my $mode = oct shift;
+
+ if (S_ISDIR($mode & S_IFMT)) {
+ return 'drwxr-xr-x';
+ } elsif (S_ISLNK($mode)) {
+ return 'lrwxrwxrwx';
+ } elsif (S_ISREG($mode)) {
+ # git cares only about the executable bit
+ if ($mode & S_IXUSR) {
+ return '-rwxr-xr-x';
+ } else {
+ return '-rw-r--r--';
+ };
+ } else {
+ return '----------';
+ }
+}
+
+# convert file mode in octal to file type string
+sub file_type {
+ my $mode = oct shift;
+
+ if (S_ISDIR($mode & S_IFMT)) {
+ return "directory";
+ } elsif (S_ISLNK($mode)) {
+ return "symlink";
+ } elsif (S_ISREG($mode)) {
+ return "file";
+ } else {
+ return "unknown";
+ }
+}
+
+## ----------------------------------------------------------------------
+## functions returning short HTML fragments, or transforming HTML fragments
+## which don't beling to other sections
+
+# format line of commit message or tag comment
+sub format_log_line_html {
+ my $line = shift;
+
+ $line = esc_html($line);
+ $line =~ s/ / /g;
+ if ($line =~ m/([0-9a-fA-F]{40})/) {
+ my $hash_text = $1;
+ if (git_get_type($hash_text) eq "commit") {
+ my $link = $cgi->a({-class => "text", -href => "$my_uri?" . esc_param("p=$project;a=commit;h=$hash_text")}, $hash_text);
+ $line =~ s/$hash_text/$link/;
+ }
+ }
+ return $line;
+}
+
+# format marker of refs pointing to given object
+sub git_get_referencing {
+ my ($refs, $id) = @_;
+
+ if (defined $refs->{$id}) {
+ return ' <span class="tag">' . esc_html($refs->{$id}) . '</span>';
+ } else {
+ return "";
+ }
+}
+
+## ----------------------------------------------------------------------
+## git utility subroutines, invoking git commands
+
+# get HEAD ref of given project as hash
+sub git_read_head {
+ my $project = shift;
+ my $oENV = $ENV{'GIT_DIR'};
+ my $retval = undef;
+ $ENV{'GIT_DIR'} = "$projectroot/$project";
+ if (open my $fd, "-|", $GIT, "rev-parse", "--verify", "HEAD") {
+ my $head = <$fd>;
+ close $fd;
+ if (defined $head && $head =~ /^([0-9a-fA-F]{40})$/) {
+ $retval = $1;
+ }
+ }
+ if (defined $oENV) {
+ $ENV{'GIT_DIR'} = $oENV;
+ }
+ return $retval;
+}
+
+# get type of given object
+sub git_get_type {
+ my $hash = shift;
+
+ open my $fd, "-|", $GIT, "cat-file", '-t', $hash or return;
+ my $type = <$fd>;
+ close $fd or return;
+ chomp $type;
+ return $type;
+}
+
+sub git_get_project_config {
+ my $key = shift;
+
+ return unless ($key);
+ $key =~ s/^gitweb\.//;
+ return if ($key =~ m/\W/);
+
+ my $val = qx($GIT repo-config --get gitweb.$key);
+ return ($val);
+}
+
+sub git_get_project_config_bool {
+ my $val = git_get_project_config (@_);
+ if ($val and $val =~ m/true|yes|on/) {
+ return (1);
+ }
+ return; # implicit false
+}
+
+# get hash of given path at given ref
+sub git_get_hash_by_path {
+ my $base = shift;
+ my $path = shift || return undef;
+
+ my $tree = $base;
+
+ open my $fd, "-|", $GIT, "ls-tree", $base, "--", $path
+ or die_error(undef, "Open git-ls-tree failed");
+ my $line = <$fd>;
+ close $fd or return undef;
+
+ #'100644 blob 0fa3f3a66fb6a137f6ec2c19351ed4d807070ffa panic.c'
+ $line =~ m/^([0-9]+) (.+) ([0-9a-fA-F]{40})\t(.+)$/;
+ return $3;
+}
+
+## ......................................................................
+## git utility functions, directly accessing git repository
+
+# assumes that PATH is not symref
+sub git_read_hash {
+ my $path = shift;
+
+ open my $fd, "$projectroot/$path" or return undef;
+ my $head = <$fd>;
+ close $fd;
+ chomp $head;
+ if ($head =~ m/^[0-9a-fA-F]{40}$/) {
+ return $head;
+ }
+}
+
+sub git_read_description {
+ my $path = shift;
+
+ open my $fd, "$projectroot/$path/description" or return undef;
+ my $descr = <$fd>;
+ close $fd;
+ chomp $descr;
+ return $descr;
+}
+
+sub git_read_projects {
+ my @list;
+
+ if (-d $projects_list) {
+ # search in directory
+ my $dir = $projects_list;
+ opendir my ($dh), $dir or return undef;
+ while (my $dir = readdir($dh)) {
+ if (-e "$projectroot/$dir/HEAD") {
+ my $pr = {
+ path => $dir,
+ };
+ push @list, $pr
+ }
+ }
+ closedir($dh);
+ } elsif (-f $projects_list) {
+ # read from file(url-encoded):
+ # 'git%2Fgit.git Linus+Torvalds'
+ # 'libs%2Fklibc%2Fklibc.git H.+Peter+Anvin'
+ # 'linux%2Fhotplug%2Fudev.git Greg+Kroah-Hartman'
+ open my ($fd), $projects_list or return undef;
+ while (my $line = <$fd>) {
+ chomp $line;
+ my ($path, $owner) = split ' ', $line;
+ $path = unescape($path);
+ $owner = unescape($owner);
+ if (!defined $path) {
+ next;
+ }
+ if (-e "$projectroot/$path/HEAD") {
+ my $pr = {
+ path => $path,
+ owner => decode("utf8", $owner, Encode::FB_DEFAULT),
+ };
+ push @list, $pr
+ }
+ }
+ close $fd;
+ }
+ @list = sort {$a->{'path'} cmp $b->{'path'}} @list;
+ return @list;
+}
+
+sub read_info_ref {
+ my $type = shift || "";
+ my %refs;
+ # 5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c refs/tags/v2.6.11
+ # c39ae07f393806ccf406ef966e9a15afc43cc36a refs/tags/v2.6.11^{}
+ open my $fd, "$projectroot/$project/info/refs" or return;
+ while (my $line = <$fd>) {
+ chomp $line;
+ # attention: for $type == "" it saves only last path part of ref name
+ # e.g. from 'refs/heads/jn/gitweb' it would leave only 'gitweb'
+ if ($line =~ m/^([0-9a-fA-F]{40})\t.*$type\/([^\^]+)/) {
+ if (defined $refs{$1}) {
+ $refs{$1} .= " / $2";
+ } else {
+ $refs{$1} = $2;
+ }
+ }
+ }
+ close $fd or return;
+ return \%refs;
+}
+
+## ----------------------------------------------------------------------
+## parse to hash functions
+
+sub date_str {
+ my $epoch = shift;
+ my $tz = shift || "-0000";
+
+ my %date;
+ my @months = ("Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec");
+ my @days = ("Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat");
+ my ($sec, $min, $hour, $mday, $mon, $year, $wday, $yday) = gmtime($epoch);
+ $date{'hour'} = $hour;
+ $date{'minute'} = $min;
+ $date{'mday'} = $mday;
+ $date{'day'} = $days[$wday];
+ $date{'month'} = $months[$mon];
+ $date{'rfc2822'} = sprintf "%s, %d %s %4d %02d:%02d:%02d +0000", $days[$wday], $mday, $months[$mon], 1900+$year, $hour ,$min, $sec;
+ $date{'mday-time'} = sprintf "%d %s %02d:%02d", $mday, $months[$mon], $hour ,$min;
+
+ $tz =~ m/^([+\-][0-9][0-9])([0-9][0-9])$/;
+ my $local = $epoch + ((int $1 + ($2/60)) * 3600);
+ ($sec, $min, $hour, $mday, $mon, $year, $wday, $yday) = gmtime($local);
+ $date{'hour_local'} = $hour;
+ $date{'minute_local'} = $min;
+ $date{'tz_local'} = $tz;
+ return %date;
+}
+
+sub git_read_tag {
+ my $tag_id = shift;
+ my %tag;
+ my @comment;
+
+ open my $fd, "-|", $GIT, "cat-file", "tag", $tag_id or return;
+ $tag{'id'} = $tag_id;
+ while (my $line = <$fd>) {
+ chomp $line;
+ if ($line =~ m/^object ([0-9a-fA-F]{40})$/) {
+ $tag{'object'} = $1;
+ } elsif ($line =~ m/^type (.+)$/) {
+ $tag{'type'} = $1;
+ } elsif ($line =~ m/^tag (.+)$/) {
+ $tag{'name'} = $1;
+ } elsif ($line =~ m/^tagger (.*) ([0-9]+) (.*)$/) {
+ $tag{'author'} = $1;
+ $tag{'epoch'} = $2;
+ $tag{'tz'} = $3;
+ } elsif ($line =~ m/--BEGIN/) {
+ push @comment, $line;
+ last;
+ } elsif ($line eq "") {
+ last;
+ }
+ }
+ push @comment, <$fd>;
+ $tag{'comment'} = \@comment;
+ close $fd or return;
+ if (!defined $tag{'name'}) {
+ return
+ };
+ return %tag
+}
+
+sub git_read_commit {
+ my $commit_id = shift;
+ my $commit_text = shift;
+
+ my @commit_lines;
+ my %co;
+
+ if (defined $commit_text) {
+ @commit_lines = @$commit_text;
+ } else {
+ $/ = "\0";
+ open my $fd, "-|", $GIT, "rev-list", "--header", "--parents", "--max-count=1", $commit_id or return;
+ @commit_lines = split '\n', <$fd>;
+ close $fd or return;
+ $/ = "\n";
+ pop @commit_lines;
+ }
+ my $header = shift @commit_lines;
+ if (!($header =~ m/^[0-9a-fA-F]{40}/)) {
+ return;
+ }
+ ($co{'id'}, my @parents) = split ' ', $header;
+ $co{'parents'} = \@parents;
+ $co{'parent'} = $parents[0];
+ while (my $line = shift @commit_lines) {
+ last if $line eq "\n";
+ if ($line =~ m/^tree ([0-9a-fA-F]{40})$/) {
+ $co{'tree'} = $1;
+ } elsif ($line =~ m/^author (.*) ([0-9]+) (.*)$/) {
+ $co{'author'} = $1;
+ $co{'author_epoch'} = $2;
+ $co{'author_tz'} = $3;
+ if ($co{'author'} =~ m/^([^<]+) </) {
+ $co{'author_name'} = $1;
+ } else {
+ $co{'author_name'} = $co{'author'};
+ }
+ } elsif ($line =~ m/^committer (.*) ([0-9]+) (.*)$/) {
+ $co{'committer'} = $1;
+ $co{'committer_epoch'} = $2;
+ $co{'committer_tz'} = $3;
+ $co{'committer_name'} = $co{'committer'};
+ $co{'committer_name'} =~ s/ <.*//;
+ }
+ }
+ if (!defined $co{'tree'}) {
+ return;
+ };
+
+ foreach my $title (@commit_lines) {
+ $title =~ s/^ //;
+ if ($title ne "") {
+ $co{'title'} = chop_str($title, 80, 5);
+ # remove leading stuff of merges to make the interesting part visible
+ if (length($title) > 50) {
+ $title =~ s/^Automatic //;
+ $title =~ s/^merge (of|with) /Merge ... /i;
+ if (length($title) > 50) {
+ $title =~ s/(http|rsync):\/\///;
+ }
+ if (length($title) > 50) {
+ $title =~ s/(master|www|rsync)\.//;
+ }
+ if (length($title) > 50) {
+ $title =~ s/kernel.org:?//;
+ }
+ if (length($title) > 50) {
+ $title =~ s/\/pub\/scm//;
+ }
+ }
+ $co{'title_short'} = chop_str($title, 50, 5);
+ last;
+ }
+ }
+ # remove added spaces
+ foreach my $line (@commit_lines) {
+ $line =~ s/^ //;
+ }
+ $co{'comment'} = \@commit_lines;
+
+ my $age = time - $co{'committer_epoch'};
+ $co{'age'} = $age;
+ $co{'age_string'} = age_string($age);
+ my ($sec, $min, $hour, $mday, $mon, $year, $wday, $yday) = gmtime($co{'committer_epoch'});
+ if ($age > 60*60*24*7*2) {
+ $co{'age_string_date'} = sprintf "%4i-%02u-%02i", 1900 + $year, $mon+1, $mday;
+ $co{'age_string_age'} = $co{'age_string'};
+ } else {
+ $co{'age_string_date'} = $co{'age_string'};
+ $co{'age_string_age'} = sprintf "%4i-%02u-%02i", 1900 + $year, $mon+1, $mday;
+ }
+ return %co;
+}
+
+## ......................................................................
+## parse to array of hashes functions
+
+sub git_read_refs {
+ my $ref_dir = shift;
+ my @reflist;
+
+ my @refs;
+ my $pfxlen = length("$projectroot/$project/$ref_dir");
+ File::Find::find(sub {
+ return if (/^\./);
+ if (-f $_) {
+ push @refs, substr($File::Find::name, $pfxlen + 1);
+ }
+ }, "$projectroot/$project/$ref_dir");
+
+ foreach my $ref_file (@refs) {
+ my $ref_id = git_read_hash("$project/$ref_dir/$ref_file");
+ my $type = git_get_type($ref_id) || next;
+ my %ref_item;
+ my %co;
+ $ref_item{'type'} = $type;
+ $ref_item{'id'} = $ref_id;
+ $ref_item{'epoch'} = 0;
+ $ref_item{'age'} = "unknown";
+ if ($type eq "tag") {
+ my %tag = git_read_tag($ref_id);
+ $ref_item{'comment'} = $tag{'comment'};
+ if ($tag{'type'} eq "commit") {
+ %co = git_read_commit($tag{'object'});
+ $ref_item{'epoch'} = $co{'committer_epoch'};
+ $ref_item{'age'} = $co{'age_string'};
+ } elsif (defined($tag{'epoch'})) {
+ my $age = time - $tag{'epoch'};
+ $ref_item{'epoch'} = $tag{'epoch'};
+ $ref_item{'age'} = age_string($age);
+ }
+ $ref_item{'reftype'} = $tag{'type'};
+ $ref_item{'name'} = $tag{'name'};
+ $ref_item{'refid'} = $tag{'object'};
+ } elsif ($type eq "commit"){
+ %co = git_read_commit($ref_id);
+ $ref_item{'reftype'} = "commit";
+ $ref_item{'name'} = $ref_file;
+ $ref_item{'title'} = $co{'title'};
+ $ref_item{'refid'} = $ref_id;
+ $ref_item{'epoch'} = $co{'committer_epoch'};
+ $ref_item{'age'} = $co{'age_string'};
+ } else {
+ $ref_item{'reftype'} = $type;
+ $ref_item{'name'} = $ref_file;
+ $ref_item{'refid'} = $ref_id;
+ }
+
+ push @reflist, \%ref_item;
+ }
+ # sort tags by age
+ @reflist = sort {$b->{'epoch'} <=> $a->{'epoch'}} @reflist;
+ return \@reflist;
+}
+
+## ----------------------------------------------------------------------
+## filesystem-related functions
+
+sub get_file_owner {
+ my $path = shift;
+
+ my ($dev, $ino, $mode, $nlink, $st_uid, $st_gid, $rdev, $size) = stat($path);
+ my ($name, $passwd, $uid, $gid, $quota, $comment, $gcos, $dir, $shell) = getpwuid($st_uid);
+ if (!defined $gcos) {
+ return undef;
+ }
+ my $owner = $gcos;
+ $owner =~ s/[,;].*$//;
+ return decode("utf8", $owner, Encode::FB_DEFAULT);
+}
+
+## ......................................................................
+## mimetype related functions
+
+sub mimetype_guess_file {
+ my $filename = shift;
+ my $mimemap = shift;
+ -r $mimemap or return undef;
+
+ my %mimemap;
+ open(MIME, $mimemap) or return undef;
+ while (<MIME>) {
+ my ($mime, $exts) = split(/\t+/);
+ if (defined $exts) {
+ my @exts = split(/\s+/, $exts);
+ foreach my $ext (@exts) {
+ $mimemap{$ext} = $mime;
+ }
+ }
+ }
+ close(MIME);
+
+ $filename =~ /\.(.*?)$/;
+ return $mimemap{$1};
+}
+
+sub mimetype_guess {
+ my $filename = shift;
+ my $mime;
+ $filename =~ /\./ or return undef;
+
+ if ($mimetypes_file) {
+ my $file = $mimetypes_file;
+ #$file =~ m#^/# or $file = "$projectroot/$path/$file";
+ $mime = mimetype_guess_file($filename, $file);
+ }
+ $mime ||= mimetype_guess_file($filename, '/etc/mime.types');
+ return $mime;
+}
+
+sub git_blob_plain_mimetype {
+ my $fd = shift;
+ my $filename = shift;
+
+ if ($filename) {
+ my $mime = mimetype_guess($filename);
+ $mime and return $mime;
+ }
+
+ # just in case
+ return $default_blob_plain_mimetype unless $fd;
+
+ if (-T $fd) {
+ return 'text/plain' .
+ ($default_text_plain_charset ? '; charset='.$default_text_plain_charset : '');
+ } elsif (! $filename) {
+ return 'application/octet-stream';
+ } elsif ($filename =~ m/\.png$/i) {
+ return 'image/png';
+ } elsif ($filename =~ m/\.gif$/i) {
+ return 'image/gif';
+ } elsif ($filename =~ m/\.jpe?g$/i) {
+ return 'image/jpeg';
+ } else {
+ return 'application/octet-stream';
+ }
+}
+
+## ======================================================================
+## functions printing HTML: header, footer, error page
+
+sub git_header_html {
+ my $status = shift || "200 OK";
+ my $expires = shift;
+
+ my $title = "$site_name git";
+ if (defined $project) {
+ $title .= " - $project";
+ if (defined $action) {
+ $title .= "/$action";
+ if (defined $file_name) {
+ $title .= " - $file_name";
+ if ($action eq "tree" && $file_name !~ m|/$|) {
+ $title .= "/";
+ }
+ }
+ }
+ }
+ my $content_type;
+ # require explicit support from the UA if we are to send the page as
+ # 'application/xhtml+xml', otherwise send it as plain old 'text/html'.
+ # we have to do this because MSIE sometimes globs '*/*', pretending to
+ # support xhtml+xml but choking when it gets what it asked for.
+ if (defined $cgi->http('HTTP_ACCEPT') && $cgi->http('HTTP_ACCEPT') =~ m/(,|;|\s|^)application\/xhtml\+xml(,|;|\s|$)/ && $cgi->Accept('application/xhtml+xml') != 0) {
+ $content_type = 'application/xhtml+xml';
+ } else {
+ $content_type = 'text/html';
+ }
+ print $cgi->header(-type=>$content_type, -charset => 'utf-8', -status=> $status, -expires => $expires);
+ print <<EOF;
+<?xml version="1.0" encoding="utf-8"?>
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en-US" lang="en-US">
+<!-- git web interface v$version, (C) 2005-2006, Kay Sievers <kay.sievers\@vrfy.org>, Christian Gierke -->
+<!-- git core binaries version $git_version -->
+<head>
+<meta http-equiv="content-type" content="$content_type; charset=utf-8"/>
+<meta name="robots" content="index, nofollow"/>
+<title>$title</title>
+<link rel="stylesheet" type="text/css" href="$stylesheet"/>
+EOF
+ if (defined $project) {
+ printf('<link rel="alternate" title="%s log" '.
+ 'href="%s" type="application/rss+xml"/>'."\n",
+ esc_param($project),
+ esc_param("$my_uri?p=$project;a=rss"));
+ }
+
+ print "</head>\n" .
+ "<body>\n" .
+ "<div class=\"page_header\">\n" .
+ "<a href=\"http://www.kernel.org/pub/software/scm/git/docs/\" title=\"git documentation\">" .
+ "<img src=\"$logo\" width=\"72\" height=\"27\" alt=\"git\" style=\"float:right; border-width:0px;\"/>" .
+ "</a>\n";
+ print $cgi->a({-href => esc_param($home_link)}, "projects") . " / ";
+ if (defined $project) {
+ print $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=summary")}, esc_html($project));
+ if (defined $action) {
+ print " / $action";
+ }
+ print "\n";
+ if (!defined $searchtext) {
+ $searchtext = "";
+ }
+ my $search_hash;
+ if (defined $hash_base) {
+ $search_hash = $hash_base;
+ } elsif (defined $hash) {
+ $search_hash = $hash;
+ } else {
+ $search_hash = "HEAD";
+ }
+ $cgi->param("a", "search");
+ $cgi->param("h", $search_hash);
+ print $cgi->startform(-method => "get", -action => $my_uri) .
+ "<div class=\"search\">\n" .
+ $cgi->hidden(-name => "p") . "\n" .
+ $cgi->hidden(-name => "a") . "\n" .
+ $cgi->hidden(-name => "h") . "\n" .
+ $cgi->textfield(-name => "s", -value => $searchtext) . "\n" .
+ "</div>" .
+ $cgi->end_form() . "\n";
+ }
+ print "</div>\n";
+}
+
+sub git_footer_html {
+ print "<div class=\"page_footer\">\n";
+ if (defined $project) {
+ my $descr = git_read_description($project);
+ if (defined $descr) {
+ print "<div class=\"page_footer_text\">" . esc_html($descr) . "</div>\n";
+ }
+ print $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=rss"), -class => "rss_logo"}, "RSS") . "\n";
+ } else {
+ print $cgi->a({-href => "$my_uri?" . esc_param("a=opml"), -class => "rss_logo"}, "OPML") . "\n";
+ }
+ print "</div>\n" .
+ "</body>\n" .
+ "</html>";
+}
+
+sub die_error {
+ my $status = shift || "403 Forbidden";
+ my $error = shift || "Malformed query, file missing or permission denied";
+
+ git_header_html($status);
+ print "<div class=\"page_body\">\n" .
+ "<br/><br/>\n" .
+ "$status - $error\n" .
+ "<br/>\n" .
+ "</div>\n";
+ git_footer_html();
+ exit;
+}
+
+## ----------------------------------------------------------------------
+## functions printing or outputting HTML: navigation
+
+sub git_page_nav {
+ my ($current, $suppress, $head, $treehead, $treebase, $extra) = @_;
+ $extra = '' if !defined $extra; # pager or formats
+
+ my @navs = qw(summary shortlog log commit commitdiff tree);
+ if ($suppress) {
+ @navs = grep { $_ ne $suppress } @navs;
+ }
+
+ my %arg = map { $_, ''} @navs;
+ if (defined $head) {
+ for (qw(commit commitdiff)) {
+ $arg{$_} = ";h=$head";
+ }
+ if ($current =~ m/^(tree | log | shortlog | commit | commitdiff | search)$/x) {
+ for (qw(shortlog log)) {
+ $arg{$_} = ";h=$head";
+ }
+ }
+ }
+ $arg{tree} .= ";h=$treehead" if defined $treehead;
+ $arg{tree} .= ";hb=$treebase" if defined $treebase;
+
+ print "<div class=\"page_nav\">\n" .
+ (join " | ",
+ map { $_ eq $current
+ ? $_
+ : $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=$_$arg{$_}")}, "$_")
+ }
+ @navs);
+ print "<br/>\n$extra<br/>\n" .
+ "</div>\n";
+}
+
+sub git_get_paging_nav {
+ my ($action, $hash, $head, $page, $nrevs) = @_;
+ my $paging_nav;
+
+
+ if ($hash ne $head || $page) {
+ $paging_nav .= $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=$action")}, "HEAD");
+ } else {
+ $paging_nav .= "HEAD";
+ }
+
+ if ($page > 0) {
+ $paging_nav .= " ⋅ " .
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=$action;h=$hash;pg=" . ($page-1)),
+ -accesskey => "p", -title => "Alt-p"}, "prev");
+ } else {
+ $paging_nav .= " ⋅ prev";
+ }
+
+ if ($nrevs >= (100 * ($page+1)-1)) {
+ $paging_nav .= " ⋅ " .
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=$action;h=$hash;pg=" . ($page+1)),
+ -accesskey => "n", -title => "Alt-n"}, "next");
+ } else {
+ $paging_nav .= " ⋅ next";
+ }
+
+ return $paging_nav;
+}
+
+## ......................................................................
+## functions printing or outputting HTML: div
+
+sub git_header_div {
+ my ($action, $title, $hash, $hash_base) = @_;
+ my $rest = '';
+
+ $rest .= ";h=$hash" if $hash;
+ $rest .= ";hb=$hash_base" if $hash_base;
+
+ print "<div class=\"header\">\n" .
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=$action$rest"),
+ -class => "title"}, $title ? $title : $action) . "\n" .
+ "</div>\n";
+}
+
+sub git_print_page_path {
+ my $name = shift;
+ my $type = shift;
+
+ if (!defined $name) {
+ print "<div class=\"page_path\"><b>/</b></div>\n";
+ } elsif (defined $type && $type eq 'blob') {
+ print "<div class=\"page_path\"><b>" .
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob_plain;f=$file_name")}, esc_html($name)) . "</b><br/></div>\n";
+ } else {
+ print "<div class=\"page_path\"><b>" . esc_html($name) . "</b><br/></div>\n";
+ }
+}
+
+## ......................................................................
+## functions printing large fragments of HTML
+
+sub git_shortlog_body {
+ # uses global variable $project
+ my ($revlist, $from, $to, $refs, $extra) = @_;
+ $from = 0 unless defined $from;
+ $to = $#{$revlist} if (!defined $to || $#{$revlist} < $to);
+
+ print "<table class=\"shortlog\" cellspacing=\"0\">\n";
+ my $alternate = 0;
+ for (my $i = $from; $i <= $to; $i++) {
+ my $commit = $revlist->[$i];
+ #my $ref = defined $refs ? git_get_referencing($refs, $commit) : '';
+ my $ref = git_get_referencing($refs, $commit);
+ my %co = git_read_commit($commit);
+ if ($alternate) {
+ print "<tr class=\"dark\">\n";
+ } else {
+ print "<tr class=\"light\">\n";
+ }
+ $alternate ^= 1;
+ # git_summary() used print "<td><i>$co{'age_string'}</i></td>\n" .
+ print "<td title=\"$co{'age_string_age'}\"><i>$co{'age_string_date'}</i></td>\n" .
+ "<td><i>" . esc_html(chop_str($co{'author_name'}, 10)) . "</i></td>\n" .
+ "<td>";
+ if (length($co{'title_short'}) < length($co{'title'})) {
+ print $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$commit"),
+ -class => "list", -title => "$co{'title'}"},
+ "<b>" . esc_html($co{'title_short'}) . "$ref</b>");
+ } else {
+ print $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$commit"),
+ -class => "list"},
+ "<b>" . esc_html($co{'title'}) . "$ref</b>");
+ }
+ print "</td>\n" .
+ "<td class=\"link\">" .
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$commit")}, "commit") . " | " .
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commitdiff;h=$commit")}, "commitdiff") .
+ "</td>\n" .
+ "</tr>\n";
+ }
+ if (defined $extra) {
+ print "<tr>\n" .
+ "<td colspan=\"4\">$extra</td>\n" .
+ "</tr>\n";
+ }
+ print "</table>\n";
+}
+
+sub git_tags_body {
+ # uses global variable $project
+ my ($taglist, $from, $to, $extra) = @_;
+ $from = 0 unless defined $from;
+ $to = $#{$taglist} if (!defined $to || $#{$taglist} < $to);
+
+ print "<table class=\"tags\" cellspacing=\"0\">\n";
+ my $alternate = 0;
+ for (my $i = $from; $i <= $to; $i++) {
+ my $entry = $taglist->[$i];
+ my %tag = %$entry;
+ my $comment_lines = $tag{'comment'};
+ my $comment = shift @$comment_lines;
+ my $comment_short;
+ if (defined $comment) {
+ $comment_short = chop_str($comment, 30, 5);
+ }
+ if ($alternate) {
+ print "<tr class=\"dark\">\n";
+ } else {
+ print "<tr class=\"light\">\n";
+ }
+ $alternate ^= 1;
+ print "<td><i>$tag{'age'}</i></td>\n" .
+ "<td>" .
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=$tag{'reftype'};h=$tag{'refid'}"),
+ -class => "list"}, "<b>" . esc_html($tag{'name'}) . "</b>") .
+ "</td>\n" .
+ "<td>";
+ if (defined $comment) {
+ if (length($comment_short) < length($comment)) {
+ print $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=tag;h=$tag{'id'}"),
+ -class => "list", -title => $comment}, $comment_short);
+ } else {
+ print $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=tag;h=$tag{'id'}"),
+ -class => "list"}, $comment);
+ }
+ }
+ print "</td>\n" .
+ "<td class=\"selflink\">";
+ if ($tag{'type'} eq "tag") {
+ print $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=tag;h=$tag{'id'}")}, "tag");
+ } else {
+ print " ";
+ }
+ print "</td>\n" .
+ "<td class=\"link\">" . " | " .
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=$tag{'reftype'};h=$tag{'refid'}")}, $tag{'reftype'});
+ if ($tag{'reftype'} eq "commit") {
+ print " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=shortlog;h=$tag{'name'}")}, "shortlog") .
+ " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=log;h=$tag{'refid'}")}, "log");
+ } elsif ($tag{'reftype'} eq "blob") {
+ print " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob_plain;h=$tag{'refid'}")}, "raw");
+ }
+ print "</td>\n" .
+ "</tr>";
+ }
+ if (defined $extra) {
+ print "<tr>\n" .
+ "<td colspan=\"5\">$extra</td>\n" .
+ "</tr>\n";
+ }
+ print "</table>\n";
+}
+
+sub git_heads_body {
+ # uses global variable $project
+ my ($taglist, $head, $from, $to, $extra) = @_;
+ $from = 0 unless defined $from;
+ $to = $#{$taglist} if (!defined $to || $#{$taglist} < $to);
+
+ print "<table class=\"heads\" cellspacing=\"0\">\n";
+ my $alternate = 0;
+ for (my $i = $from; $i <= $to; $i++) {
+ my $entry = $taglist->[$i];
+ my %tag = %$entry;
+ my $curr = $tag{'id'} eq $head;
+ if ($alternate) {
+ print "<tr class=\"dark\">\n";
+ } else {
+ print "<tr class=\"light\">\n";
+ }
+ $alternate ^= 1;
+ print "<td><i>$tag{'age'}</i></td>\n" .
+ ($tag{'id'} eq $head ? "<td class=\"current_head\">" : "<td>") .
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=shortlog;h=$tag{'name'}"),
+ -class => "list"}, "<b>" . esc_html($tag{'name'}) . "</b>") .
+ "</td>\n" .
+ "<td class=\"link\">" .
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=shortlog;h=$tag{'name'}")}, "shortlog") . " | " .
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=log;h=$tag{'name'}")}, "log") .
+ "</td>\n" .
+ "</tr>";
+ }
+ if (defined $extra) {
+ print "<tr>\n" .
+ "<td colspan=\"3\">$extra</td>\n" .
+ "</tr>\n";
+ }
+ print "</table>\n";
+}
+
+## ----------------------------------------------------------------------
+## functions printing large fragments, format as one of arguments
+
+sub git_diff_print {
+ my $from = shift;
+ my $from_name = shift;
+ my $to = shift;
+ my $to_name = shift;
+ my $format = shift || "html";
+
+ my $from_tmp = "/dev/null";
+ my $to_tmp = "/dev/null";
+ my $pid = $$;
+
+ # create tmp from-file
+ if (defined $from) {
+ $from_tmp = "$git_temp/gitweb_" . $$ . "_from";
+ open my $fd2, "> $from_tmp";
+ open my $fd, "-|", $GIT, "cat-file", "blob", $from;
+ my @file = <$fd>;
+ print $fd2 @file;
+ close $fd2;
+ close $fd;
+ }
+
+ # create tmp to-file
+ if (defined $to) {
+ $to_tmp = "$git_temp/gitweb_" . $$ . "_to";
+ open my $fd2, "> $to_tmp";
+ open my $fd, "-|", $GIT, "cat-file", "blob", $to;
+ my @file = <$fd>;
+ print $fd2 @file;
+ close $fd2;
+ close $fd;
+ }
+
+ open my $fd, "-|", "/usr/bin/diff -u -p -L \'$from_name\' -L \'$to_name\' $from_tmp $to_tmp";
+ if ($format eq "plain") {
+ undef $/;
+ print <$fd>;
+ $/ = "\n";
+ } else {
+ while (my $line = <$fd>) {
+ chomp $line;
+ my $char = substr($line, 0, 1);
+ my $diff_class = "";
+ if ($char eq '+') {
+ $diff_class = " add";
+ } elsif ($char eq "-") {
+ $diff_class = " rem";
+ } elsif ($char eq "@") {
+ $diff_class = " chunk_header";
+ } elsif ($char eq "\\") {
+ # skip errors
+ next;
+ }
+ $line = untabify($line);
+ print "<div class=\"diff$diff_class\">" . esc_html($line) . "</div>\n";
+ }
+ }
+ close $fd;
+
+ if (defined $from) {
+ unlink($from_tmp);
+ }
+ if (defined $to) {
+ unlink($to_tmp);
+ }
+}
+
+
+## ======================================================================
+## ======================================================================
+## actions
+
+sub git_project_list {
+ my $order = $cgi->param('o');
+ if (defined $order && $order !~ m/project|descr|owner|age/) {
+ die_error(undef, "Unknown order parameter");
+ }
+
+ my @list = git_read_projects();
+ my @projects;
+ if (!@list) {
+ die_error(undef, "No projects found");
+ }
+ foreach my $pr (@list) {
+ my $head = git_read_head($pr->{'path'});
+ if (!defined $head) {
+ next;
+ }
+ $ENV{'GIT_DIR'} = "$projectroot/$pr->{'path'}";
+ my %co = git_read_commit($head);
+ if (!%co) {
+ next;
+ }
+ $pr->{'commit'} = \%co;
+ if (!defined $pr->{'descr'}) {
+ my $descr = git_read_description($pr->{'path'}) || "";
+ $pr->{'descr'} = chop_str($descr, 25, 5);
+ }
+ if (!defined $pr->{'owner'}) {
+ $pr->{'owner'} = get_file_owner("$projectroot/$pr->{'path'}") || "";
+ }
+ push @projects, $pr;
+ }
+
+ git_header_html();
+ if (-f $home_text) {
+ print "<div class=\"index_include\">\n";
+ open (my $fd, $home_text);
+ print <$fd>;
+ close $fd;
+ print "</div>\n";
+ }
+ print "<table class=\"project_list\">\n" .
+ "<tr>\n";
+ $order ||= "project";
+ if ($order eq "project") {
+ @projects = sort {$a->{'path'} cmp $b->{'path'}} @projects;
+ print "<th>Project</th>\n";
+ } else {
+ print "<th>" .
+ $cgi->a({-href => "$my_uri?" . esc_param("o=project"),
+ -class => "header"}, "Project") .
+ "</th>\n";
+ }
+ if ($order eq "descr") {
+ @projects = sort {$a->{'descr'} cmp $b->{'descr'}} @projects;
+ print "<th>Description</th>\n";
+ } else {
+ print "<th>" .
+ $cgi->a({-href => "$my_uri?" . esc_param("o=descr"),
+ -class => "header"}, "Description") .
+ "</th>\n";
+ }
+ if ($order eq "owner") {
+ @projects = sort {$a->{'owner'} cmp $b->{'owner'}} @projects;
+ print "<th>Owner</th>\n";
+ } else {
+ print "<th>" .
+ $cgi->a({-href => "$my_uri?" . esc_param("o=owner"),
+ -class => "header"}, "Owner") .
+ "</th>\n";
+ }
+ if ($order eq "age") {
+ @projects = sort {$a->{'commit'}{'age'} <=> $b->{'commit'}{'age'}} @projects;
+ print "<th>Last Change</th>\n";
+ } else {
+ print "<th>" .
+ $cgi->a({-href => "$my_uri?" . esc_param("o=age"),
+ -class => "header"}, "Last Change") .
+ "</th>\n";
+ }
+ print "<th></th>\n" .
+ "</tr>\n";
+ my $alternate = 0;
+ foreach my $pr (@projects) {
+ if ($alternate) {
+ print "<tr class=\"dark\">\n";
+ } else {
+ print "<tr class=\"light\">\n";
+ }
+ $alternate ^= 1;
+ print "<td>" . $cgi->a({-href => "$my_uri?" . esc_param("p=$pr->{'path'};a=summary"),
+ -class => "list"}, esc_html($pr->{'path'})) . "</td>\n" .
+ "<td>" . esc_html($pr->{'descr'}) . "</td>\n" .
+ "<td><i>" . chop_str($pr->{'owner'}, 15) . "</i></td>\n";
+ print "<td class=\"". age_class($pr->{'commit'}{'age'}) . "\">" .
+ $pr->{'commit'}{'age_string'} . "</td>\n" .
+ "<td class=\"link\">" .
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$pr->{'path'};a=summary")}, "summary") . " | " .
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$pr->{'path'};a=shortlog")}, "shortlog") . " | " .
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$pr->{'path'};a=log")}, "log") .
+ "</td>\n" .
+ "</tr>\n";
+ }
+ print "</table>\n";
+ git_footer_html();
+}
+
+sub git_summary {
+ my $descr = git_read_description($project) || "none";
+ my $head = git_read_head($project);
+ my %co = git_read_commit($head);
+ my %cd = date_str($co{'committer_epoch'}, $co{'committer_tz'});
+
+ my $owner;
+ if (-f $projects_list) {
+ open (my $fd , $projects_list);
+ while (my $line = <$fd>) {
+ chomp $line;
+ my ($pr, $ow) = split ' ', $line;
+ $pr = unescape($pr);
+ $ow = unescape($ow);
+ if ($pr eq $project) {
+ $owner = decode("utf8", $ow, Encode::FB_DEFAULT);
+ last;
+ }
+ }
+ close $fd;
+ }
+ if (!defined $owner) {
+ $owner = get_file_owner("$projectroot/$project");
+ }
+
+ my $refs = read_info_ref();
+ git_header_html();
+ git_page_nav('summary','', $head);
+
+ print "<div class=\"title\"> </div>\n";
+ print "<table cellspacing=\"0\">\n" .
+ "<tr><td>description</td><td>" . esc_html($descr) . "</td></tr>\n" .
+ "<tr><td>owner</td><td>$owner</td></tr>\n" .
+ "<tr><td>last change</td><td>$cd{'rfc2822'}</td></tr>\n" .
+ "</table>\n";
+
+ open my $fd, "-|", $GIT, "rev-list", "--max-count=17", git_read_head($project)
+ or die_error(undef, "Open git-rev-list failed");
+ my @revlist = map { chomp; $_ } <$fd>;
+ close $fd;
+ git_header_div('shortlog');
+ git_shortlog_body(\@revlist, 0, 15, $refs,
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=shortlog")}, "..."));
+
+ my $taglist = git_read_refs("refs/tags");
+ if (defined @$taglist) {
+ git_header_div('tags');
+ git_tags_body($taglist, 0, 15,
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=tags")}, "..."));
+ }
+
+ my $headlist = git_read_refs("refs/heads");
+ if (defined @$headlist) {
+ git_header_div('heads');
+ git_heads_body($headlist, $head, 0, 15,
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=heads")}, "..."));
+ }
+
+ git_footer_html();
+}
+
+sub git_tag {
+ my $head = git_read_head($project);
+ git_header_html();
+ git_page_nav('','', $head,undef,$head);
+ my %tag = git_read_tag($hash);
+ git_header_div('commit', esc_html($tag{'name'}), $hash);
+ print "<div class=\"title_text\">\n" .
+ "<table cellspacing=\"0\">\n" .
+ "<tr>\n" .
+ "<td>object</td>\n" .
+ "<td>" . $cgi->a({-class => "list", -href => "$my_uri?" . esc_param("p=$project;a=$tag{'type'};h=$tag{'object'}")}, $tag{'object'}) . "</td>\n" .
+ "<td class=\"link\">" . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=$tag{'type'};h=$tag{'object'}")}, $tag{'type'}) . "</td>\n" .
+ "</tr>\n";
+ if (defined($tag{'author'})) {
+ my %ad = date_str($tag{'epoch'}, $tag{'tz'});
+ print "<tr><td>author</td><td>" . esc_html($tag{'author'}) . "</td></tr>\n";
+ print "<tr><td></td><td>" . $ad{'rfc2822'} . sprintf(" (%02d:%02d %s)", $ad{'hour_local'}, $ad{'minute_local'}, $ad{'tz_local'}) . "</td></tr>\n";
+ }
+ print "</table>\n\n" .
+ "</div>\n";
+ print "<div class=\"page_body\">";
+ my $comment = $tag{'comment'};
+ foreach my $line (@$comment) {
+ print esc_html($line) . "<br/>\n";
+ }
+ print "</div>\n";
+ git_footer_html();
+}
+
+sub git_blame2 {
+ my $fd;
+ my $ftype;
+ die_error(undef, "Permission denied") if (!git_get_project_config_bool ('blame'));
+ die_error('404 Not Found', "File name not defined") if (!$file_name);
+ $hash_base ||= git_read_head($project);
+ die_error(undef, "Couldn't find base commit") unless ($hash_base);
+ my %co = git_read_commit($hash_base)
+ or die_error(undef, "Reading commit failed");
+ if (!defined $hash) {
+ $hash = git_get_hash_by_path($hash_base, $file_name, "blob")
+ or die_error(undef, "Error looking up file");
+ }
+ $ftype = git_get_type($hash);
+ if ($ftype !~ "blob") {
+ die_error("400 Bad Request", "Object is not a blob");
+ }
+ open ($fd, "-|", $GIT, "blame", '-l', $file_name, $hash_base)
+ or die_error(undef, "Open git-blame failed");
+ git_header_html();
+ my $formats_nav =
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob;h=$hash;hb=$hash_base;f=$file_name")}, "blob") .
+ " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blame;f=$file_name")}, "head");
+ git_page_nav('','', $hash_base,$co{'tree'},$hash_base, $formats_nav);
+ git_header_div('commit', esc_html($co{'title'}), $hash_base);
+ git_print_page_path($file_name, $ftype);
+ my @rev_color = (qw(light2 dark2));
+ my $num_colors = scalar(@rev_color);
+ my $current_color = 0;
+ my $last_rev;
+ print "<div class=\"page_body\">\n";
+ print "<table class=\"blame\">\n";
+ print "<tr><th>Commit</th><th>Line</th><th>Data</th></tr>\n";
+ while (<$fd>) {
+ /^([0-9a-fA-F]{40}).*?(\d+)\)\s{1}(\s*.*)/;
+ my $full_rev = $1;
+ my $rev = substr($full_rev, 0, 8);
+ my $lineno = $2;
+ my $data = $3;
+
+ if (!defined $last_rev) {
+ $last_rev = $full_rev;
+ } elsif ($last_rev ne $full_rev) {
+ $last_rev = $full_rev;
+ $current_color = ++$current_color % $num_colors;
+ }
+ print "<tr class=\"$rev_color[$current_color]\">\n";
+ print "<td class=\"sha1\">" .
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$full_rev;f=$file_name")}, esc_html($rev)) . "</td>\n";
+ print "<td class=\"linenr\"><a id=\"l$lineno\" href=\"#l$lineno\" class=\"linenr\">" . esc_html($lineno) . "</a></td>\n";
+ print "<td class=\"pre\">" . esc_html($data) . "</td>\n";
+ print "</tr>\n";
+ }
+ print "</table>\n";
+ print "</div>";
+ close $fd or print "Reading blob failed\n";
+ git_footer_html();
+}
+
+sub git_blame {
+ my $fd;
+ die_error('403 Permission denied', "Permission denied") if (!git_get_project_config_bool ('blame'));
+ die_error('404 Not Found', "File name not defined") if (!$file_name);
+ $hash_base ||= git_read_head($project);
+ die_error(undef, "Couldn't find base commit") unless ($hash_base);
+ my %co = git_read_commit($hash_base)
+ or die_error(undef, "Reading commit failed");
+ if (!defined $hash) {
+ $hash = git_get_hash_by_path($hash_base, $file_name, "blob")
+ or die_error(undef, "Error lookup file");
+ }
+ open ($fd, "-|", $GIT, "annotate", '-l', '-t', '-r', $file_name, $hash_base)
+ or die_error(undef, "Open git-annotate failed");
+ git_header_html();
+ my $formats_nav =
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob;h=$hash;hb=$hash_base;f=$file_name")}, "blob") .
+ " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blame;f=$file_name")}, "head");
+ git_page_nav('','', $hash_base,$co{'tree'},$hash_base, $formats_nav);
+ git_header_div('commit', esc_html($co{'title'}), $hash_base);
+ git_print_page_path($file_name, 'blob');
+ print "<div class=\"page_body\">\n";
+ print <<HTML;
+<table class="blame">
+ <tr>
+ <th>Commit</th>
+ <th>Age</th>
+ <th>Author</th>
+ <th>Line</th>
+ <th>Data</th>
+ </tr>
+HTML
+ my @line_class = (qw(light dark));
+ my $line_class_len = scalar (@line_class);
+ my $line_class_num = $#line_class;
+ while (my $line = <$fd>) {
+ my $long_rev;
+ my $short_rev;
+ my $author;
+ my $time;
+ my $lineno;
+ my $data;
+ my $age;
+ my $age_str;
+ my $age_class;
+
+ chomp $line;
+ $line_class_num = ($line_class_num + 1) % $line_class_len;
+
+ if ($line =~ m/^([0-9a-fA-F]{40})\t\(\s*([^\t]+)\t(\d+) \+\d\d\d\d\t(\d+)\)(.*)$/) {
+ $long_rev = $1;
+ $author = $2;
+ $time = $3;
+ $lineno = $4;
+ $data = $5;
+ } else {
+ print qq( <tr><td colspan="5" class="error">Unable to parse: $line</td></tr>\n);
+ next;
+ }
+ $short_rev = substr ($long_rev, 0, 8);
+ $age = time () - $time;
+ $age_str = age_string ($age);
+ $age_str =~ s/ / /g;
+ $age_class = age_class($age);
+ $author = esc_html ($author);
+ $author =~ s/ / /g;
+
+ $data = untabify($data);
+ $data = esc_html ($data);
+
+ print <<HTML;
+ <tr class="$line_class[$line_class_num]">
+ <td class="sha1"><a href="$my_uri?${\esc_param ("p=$project;a=commit;h=$long_rev")}" class="text">$short_rev..</a></td>
+ <td class="$age_class">$age_str</td>
+ <td>$author</td>
+ <td class="linenr"><a id="$lineno" href="#$lineno" class="linenr">$lineno</a></td>
+ <td class="pre">$data</td>
+ </tr>
+HTML
+ } # while (my $line = <$fd>)
+ print "</table>\n\n";
+ close $fd or print "Reading blob failed.\n";
+ print "</div>";
+ git_footer_html();
+}
+
+sub git_tags {
+ my $head = git_read_head($project);
+ git_header_html();
+ git_page_nav('','', $head,undef,$head);
+ git_header_div('summary', $project);
+
+ my $taglist = git_read_refs("refs/tags");
+ if (defined @$taglist) {
+ git_tags_body($taglist);
+ }
+ git_footer_html();
+}
+
+sub git_heads {
+ my $head = git_read_head($project);
+ git_header_html();
+ git_page_nav('','', $head,undef,$head);
+ git_header_div('summary', $project);
+
+ my $taglist = git_read_refs("refs/heads");
+ if (defined @$taglist) {
+ git_heads_body($taglist, $head);
+ }
+ git_footer_html();
+}
+
+sub git_blob_plain {
+ if (!defined $hash) {
+ if (defined $file_name) {
+ my $base = $hash_base || git_read_head($project);
+ $hash = git_get_hash_by_path($base, $file_name, "blob")
+ or die_error(undef, "Error lookup file");
+ } else {
+ die_error(undef, "No file name defined");
+ }
+ }
+ my $type = shift;
+ open my $fd, "-|", $GIT, "cat-file", "blob", $hash
+ or die_error(undef, "Couldn't cat $file_name, $hash");
+
+ $type ||= git_blob_plain_mimetype($fd, $file_name);
+
+ # save as filename, even when no $file_name is given
+ my $save_as = "$hash";
+ if (defined $file_name) {
+ $save_as = $file_name;
+ } elsif ($type =~ m/^text\//) {
+ $save_as .= '.txt';
+ }
+
+ print $cgi->header(-type => "$type", '-content-disposition' => "inline; filename=\"$save_as\"");
+ undef $/;
+ binmode STDOUT, ':raw';
+ print <$fd>;
+ binmode STDOUT, ':utf8'; # as set at the beginning of gitweb.cgi
+ $/ = "\n";
+ close $fd;
+}
+
+sub git_blob {
+ if (!defined $hash) {
+ if (defined $file_name) {
+ my $base = $hash_base || git_read_head($project);
+ $hash = git_get_hash_by_path($base, $file_name, "blob")
+ or die_error(undef, "Error lookup file");
+ } else {
+ die_error(undef, "No file name defined");
+ }
+ }
+ my $have_blame = git_get_project_config_bool ('blame');
+ open my $fd, "-|", $GIT, "cat-file", "blob", $hash
+ or die_error(undef, "Couldn't cat $file_name, $hash");
+ my $mimetype = git_blob_plain_mimetype($fd, $file_name);
+ if ($mimetype !~ m/^text\//) {
+ close $fd;
+ return git_blob_plain($mimetype);
+ }
+ git_header_html();
+ my $formats_nav = '';
+ if (defined $hash_base && (my %co = git_read_commit($hash_base))) {
+ if (defined $file_name) {
+ if ($have_blame) {
+ $formats_nav .= $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blame;h=$hash;hb=$hash_base;f=$file_name")}, "blame") . " | ";
+ }
+ $formats_nav .=
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob_plain;h=$hash;f=$file_name")}, "plain") .
+ " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob;hb=HEAD;f=$file_name")}, "head");
+ } else {
+ $formats_nav .= $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob_plain;h=$hash")}, "plain");
+ }
+ git_page_nav('','', $hash_base,$co{'tree'},$hash_base, $formats_nav);
+ git_header_div('commit', esc_html($co{'title'}), $hash_base);
+ } else {
+ print "<div class=\"page_nav\">\n" .
+ "<br/><br/></div>\n" .
+ "<div class=\"title\">$hash</div>\n";
+ }
+ git_print_page_path($file_name, "blob");
+ print "<div class=\"page_body\">\n";
+ my $nr;
+ while (my $line = <$fd>) {
+ chomp $line;
+ $nr++;
+ $line = untabify($line);
+ printf "<div class=\"pre\"><a id=\"l%i\" href=\"#l%i\" class=\"linenr\">%4i</a> %s</div>\n", $nr, $nr, $nr, esc_html($line);
+ }
+ close $fd or print "Reading blob failed.\n";
+ print "</div>";
+ git_footer_html();
+}
+
+sub git_tree {
+ if (!defined $hash) {
+ $hash = git_read_head($project);
+ if (defined $file_name) {
+ my $base = $hash_base || $hash;
+ $hash = git_get_hash_by_path($base, $file_name, "tree");
+ }
+ if (!defined $hash_base) {
+ $hash_base = $hash;
+ }
+ }
+ $/ = "\0";
+ open my $fd, "-|", $GIT, "ls-tree", '-z', $hash
+ or die_error(undef, "Open git-ls-tree failed");
+ my @entries = map { chomp; $_ } <$fd>;
+ close $fd or die_error(undef, "Reading tree failed");
+ $/ = "\n";
+
+ my $refs = read_info_ref();
+ my $ref = git_get_referencing($refs, $hash_base);
+ git_header_html();
+ my $base_key = "";
+ my $base = "";
+ my $have_blame = git_get_project_config_bool ('blame');
+ if (defined $hash_base && (my %co = git_read_commit($hash_base))) {
+ $base_key = ";hb=$hash_base";
+ git_page_nav('tree','', $hash_base);
+ git_header_div('commit', esc_html($co{'title'}) . $ref, $hash_base);
+ } else {
+ print "<div class=\"page_nav\">\n";
+ print "<br/><br/></div>\n";
+ print "<div class=\"title\">$hash</div>\n";
+ }
+ if (defined $file_name) {
+ $base = esc_html("$file_name/");
+ }
+ git_print_page_path($file_name, 'tree');
+ print "<div class=\"page_body\">\n";
+ print "<table cellspacing=\"0\">\n";
+ my $alternate = 0;
+ foreach my $line (@entries) {
+ #'100644 blob 0fa3f3a66fb6a137f6ec2c19351ed4d807070ffa panic.c'
+ $line =~ m/^([0-9]+) (.+) ([0-9a-fA-F]{40})\t(.+)$/;
+ my $t_mode = $1;
+ my $t_type = $2;
+ my $t_hash = $3;
+ my $t_name = validate_input($4);
+ if ($alternate) {
+ print "<tr class=\"dark\">\n";
+ } else {
+ print "<tr class=\"light\">\n";
+ }
+ $alternate ^= 1;
+ print "<td class=\"mode\">" . mode_str($t_mode) . "</td>\n";
+ if ($t_type eq "blob") {
+ print "<td class=\"list\">" .
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob;h=$t_hash$base_key;f=$base$t_name"), -class => "list"}, esc_html($t_name)) .
+ "</td>\n" .
+ "<td class=\"link\">" .
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob;h=$t_hash$base_key;f=$base$t_name")}, "blob");
+ if ($have_blame) {
+ print " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blame;h=$t_hash$base_key;f=$base$t_name")}, "blame");
+ }
+ print " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=history;h=$t_hash;hb=$hash_base;f=$base$t_name")}, "history") .
+ " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob_plain;h=$t_hash;f=$base$t_name")}, "raw") .
+ "</td>\n";
+ } elsif ($t_type eq "tree") {
+ print "<td class=\"list\">" .
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=tree;h=$t_hash$base_key;f=$base$t_name")}, esc_html($t_name)) .
+ "</td>\n" .
+ "<td class=\"link\">" .
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=tree;h=$t_hash$base_key;f=$base$t_name")}, "tree") .
+ " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=history;hb=$hash_base;f=$base$t_name")}, "history") .
+ "</td>\n";
+ }
+ print "</tr>\n";
+ }
+ print "</table>\n" .
+ "</div>";
+ git_footer_html();
+}
+
+sub git_log {
+ my $head = git_read_head($project);
+ if (!defined $hash) {
+ $hash = $head;
+ }
+ if (!defined $page) {
+ $page = 0;
+ }
+ my $refs = read_info_ref();
+
+ my $limit = sprintf("--max-count=%i", (100 * ($page+1)));
+ open my $fd, "-|", $GIT, "rev-list", $limit, $hash
+ or die_error(undef, "Open git-rev-list failed");
+ my @revlist = map { chomp; $_ } <$fd>;
+ close $fd;
+
+ my $paging_nav = git_get_paging_nav('log', $hash, $head, $page, $#revlist);
+
+ git_header_html();
+ git_page_nav('log','', $hash,undef,undef, $paging_nav);
+
+ if (!@revlist) {
+ my %co = git_read_commit($hash);
+
+ git_header_div('summary', $project);
+ print "<div class=\"page_body\"> Last change $co{'age_string'}.<br/><br/></div>\n";
+ }
+ for (my $i = ($page * 100); $i <= $#revlist; $i++) {
+ my $commit = $revlist[$i];
+ my $ref = git_get_referencing($refs, $commit);
+ my %co = git_read_commit($commit);
+ next if !%co;
+ my %ad = date_str($co{'author_epoch'});
+ git_header_div('commit',
+ "<span class=\"age\">$co{'age_string'}</span>" .
+ esc_html($co{'title'}) . $ref,
+ $commit);
+ print "<div class=\"title_text\">\n" .
+ "<div class=\"log_link\">\n" .
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$commit")}, "commit") .
+ " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commitdiff;h=$commit")}, "commitdiff") .
+ "<br/>\n" .
+ "</div>\n" .
+ "<i>" . esc_html($co{'author_name'}) . " [$ad{'rfc2822'}]</i><br/>\n" .
+ "</div>\n" .
+ "<div class=\"log_body\">\n";
+ my $comment = $co{'comment'};
+ my $empty = 0;
+ foreach my $line (@$comment) {
+ if ($line =~ m/^ *(signed[ \-]off[ \-]by[ :]|acked[ \-]by[ :]|cc[ :])/i) {
+ next;
+ }
+ if ($line eq "") {
+ if ($empty) {
+ next;
+ }
+ $empty = 1;
+ } else {
+ $empty = 0;
+ }
+ print format_log_line_html($line) . "<br/>\n";
+ }
+ if (!$empty) {
+ print "<br/>\n";
+ }
+ print "</div>\n";
+ }
+ git_footer_html();
+}
+
+sub git_commit {
+ my %co = git_read_commit($hash);
+ if (!%co) {
+ die_error(undef, "Unknown commit object");
+ }
+ my %ad = date_str($co{'author_epoch'}, $co{'author_tz'});
+ my %cd = date_str($co{'committer_epoch'}, $co{'committer_tz'});
+
+ my $parent = $co{'parent'};
+ if (!defined $parent) {
+ $parent = "--root";
+ }
+ open my $fd, "-|", $GIT, "diff-tree", '-r', '-M', $parent, $hash
+ or die_error(undef, "Open git-diff-tree failed");
+ my @difftree = map { chomp; $_ } <$fd>;
+ close $fd or die_error(undef, "Reading git-diff-tree failed");
+
+ # non-textual hash id's can be cached
+ my $expires;
+ if ($hash =~ m/^[0-9a-fA-F]{40}$/) {
+ $expires = "+1d";
+ }
+ my $refs = read_info_ref();
+ my $ref = git_get_referencing($refs, $co{'id'});
+ my $formats_nav = '';
+ if (defined $file_name && defined $co{'parent'}) {
+ my $parent = $co{'parent'};
+ $formats_nav .= $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blame;hb=$parent;f=$file_name")}, "blame");
+ }
+ git_header_html(undef, $expires);
+ git_page_nav('commit', defined $co{'parent'} ? '' : 'commitdiff',
+ $hash, $co{'tree'}, $hash,
+ $formats_nav);
+
+ if (defined $co{'parent'}) {
+ git_header_div('commitdiff', esc_html($co{'title'}) . $ref, $hash);
+ } else {
+ git_header_div('tree', esc_html($co{'title'}) . $ref, $co{'tree'}, $hash);
+ }
+ print "<div class=\"title_text\">\n" .
+ "<table cellspacing=\"0\">\n";
+ print "<tr><td>author</td><td>" . esc_html($co{'author'}) . "</td></tr>\n".
+ "<tr>" .
+ "<td></td><td> $ad{'rfc2822'}";
+ if ($ad{'hour_local'} < 6) {
+ printf(" (<span class=\"atnight\">%02d:%02d</span> %s)", $ad{'hour_local'}, $ad{'minute_local'}, $ad{'tz_local'});
+ } else {
+ printf(" (%02d:%02d %s)", $ad{'hour_local'}, $ad{'minute_local'}, $ad{'tz_local'});
+ }
+ print "</td>" .
+ "</tr>\n";
+ print "<tr><td>committer</td><td>" . esc_html($co{'committer'}) . "</td></tr>\n";
+ print "<tr><td></td><td> $cd{'rfc2822'}" . sprintf(" (%02d:%02d %s)", $cd{'hour_local'}, $cd{'minute_local'}, $cd{'tz_local'}) . "</td></tr>\n";
+ print "<tr><td>commit</td><td class=\"sha1\">$co{'id'}</td></tr>\n";
+ print "<tr>" .
+ "<td>tree</td>" .
+ "<td class=\"sha1\">" .
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=tree;h=$co{'tree'};hb=$hash"), class => "list"}, $co{'tree'}) .
+ "</td>" .
+ "<td class=\"link\">" . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=tree;h=$co{'tree'};hb=$hash")}, "tree") .
+ "</td>" .
+ "</tr>\n";
+ my $parents = $co{'parents'};
+ foreach my $par (@$parents) {
+ print "<tr>" .
+ "<td>parent</td>" .
+ "<td class=\"sha1\">" . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$par"), class => "list"}, $par) . "</td>" .
+ "<td class=\"link\">" .
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$par")}, "commit") .
+ " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commitdiff;h=$hash;hp=$par")}, "commitdiff") .
+ "</td>" .
+ "</tr>\n";
+ }
+ print "</table>".
+ "</div>\n";
+ print "<div class=\"page_body\">\n";
+ my $comment = $co{'comment'};
+ my $empty = 0;
+ my $signed = 0;
+ foreach my $line (@$comment) {
+ # print only one empty line
+ if ($line eq "") {
+ if ($empty || $signed) {
+ next;
+ }
+ $empty = 1;
+ } else {
+ $empty = 0;
+ }
+ if ($line =~ m/^ *(signed[ \-]off[ \-]by[ :]|acked[ \-]by[ :]|cc[ :])/i) {
+ $signed = 1;
+ print "<span class=\"signoff\">" . esc_html($line) . "</span><br/>\n";
+ } else {
+ $signed = 0;
+ print format_log_line_html($line) . "<br/>\n";
+ }
+ }
+ print "</div>\n";
+ print "<div class=\"list_head\">\n";
+ if ($#difftree > 10) {
+ print(($#difftree + 1) . " files changed:\n");
+ }
+ print "</div>\n";
+ print "<table class=\"diff_tree\">\n";
+ my $alternate = 0;
+ foreach my $line (@difftree) {
+ # ':100644 100644 03b218260e99b78c6df0ed378e59ed9205ccc96d 3b93d5e7cc7f7dd4ebed13a5cc1a4ad976fc94d8 M ls-files.c'
+ # ':100644 100644 7f9281985086971d3877aca27704f2aaf9c448ce bc190ebc71bbd923f2b728e505408f5e54bd073a M rev-tree.c'
+ if ($line !~ m/^:([0-7]{6}) ([0-7]{6}) ([0-9a-fA-F]{40}) ([0-9a-fA-F]{40}) (.)([0-9]{0,3})\t(.*)$/) {
+ next;
+ }
+ my $from_mode = $1;
+ my $to_mode = $2;
+ my $from_id = $3;
+ my $to_id = $4;
+ my $status = $5;
+ my $similarity = $6;
+ my $file = validate_input(unquote($7));
+ if ($alternate) {
+ print "<tr class=\"dark\">\n";
+ } else {
+ print "<tr class=\"light\">\n";
+ }
+ $alternate ^= 1;
+ if ($status eq "A") {
+ my $mode_chng = "";
+ if (S_ISREG(oct $to_mode)) {
+ $mode_chng = sprintf(" with mode: %04o", (oct $to_mode) & 0777);
+ }
+ print "<td>" .
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob;h=$to_id;hb=$hash;f=$file"), -class => "list"}, esc_html($file)) . "</td>\n" .
+ "<td><span class=\"file_status new\">[new " . file_type($to_mode) . "$mode_chng]</span></td>\n" .
+ "<td class=\"link\">" . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob;h=$to_id;hb=$hash;f=$file")}, "blob") . "</td>\n";
+ } elsif ($status eq "D") {
+ print "<td>" .
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob;h=$from_id;hb=$parent;f=$file"), -class => "list"}, esc_html($file)) . "</td>\n" .
+ "<td><span class=\"file_status deleted\">[deleted " . file_type($from_mode). "]</span></td>\n" .
+ "<td class=\"link\">" .
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob;h=$from_id;hb=$parent;f=$file")}, "blob") .
+ " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=history;hb=$parent;f=$file")}, "history") .
+ "</td>\n"
+ } elsif ($status eq "M" || $status eq "T") {
+ my $mode_chnge = "";
+ if ($from_mode != $to_mode) {
+ $mode_chnge = " <span class=\"file_status mode_chnge\">[changed";
+ if (((oct $from_mode) & S_IFMT) != ((oct $to_mode) & S_IFMT)) {
+ $mode_chnge .= " from " . file_type($from_mode) . " to " . file_type($to_mode);
+ }
+ if (((oct $from_mode) & 0777) != ((oct $to_mode) & 0777)) {
+ if (S_ISREG($from_mode) && S_ISREG($to_mode)) {
+ $mode_chnge .= sprintf(" mode: %04o->%04o", (oct $from_mode) & 0777, (oct $to_mode) & 0777);
+ } elsif (S_ISREG($to_mode)) {
+ $mode_chnge .= sprintf(" mode: %04o", (oct $to_mode) & 0777);
+ }
+ }
+ $mode_chnge .= "]</span>\n";
+ }
+ print "<td>";
+ if ($to_id ne $from_id) {
+ print $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blobdiff;h=$to_id;hp=$from_id;hb=$hash;f=$file"), -class => "list"}, esc_html($file));
+ } else {
+ print $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob;h=$to_id;hb=$hash;f=$file"), -class => "list"}, esc_html($file));
+ }
+ print "</td>\n" .
+ "<td>$mode_chnge</td>\n" .
+ "<td class=\"link\">";
+ print $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob;h=$to_id;hb=$hash;f=$file")}, "blob");
+ if ($to_id ne $from_id) {
+ print " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blobdiff;h=$to_id;hp=$from_id;hb=$hash;f=$file")}, "diff");
+ }
+ print " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=history;hb=$hash;f=$file")}, "history") . "\n";
+ print "</td>\n";
+ } elsif ($status eq "R") {
+ my ($from_file, $to_file) = split "\t", $file;
+ my $mode_chng = "";
+ if ($from_mode != $to_mode) {
+ $mode_chng = sprintf(", mode: %04o", (oct $to_mode) & 0777);
+ }
+ print "<td>" .
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob;h=$to_id;hb=$hash;f=$to_file"), -class => "list"}, esc_html($to_file)) . "</td>\n" .
+ "<td><span class=\"file_status moved\">[moved from " .
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob;h=$from_id;hb=$parent;f=$from_file"), -class => "list"}, esc_html($from_file)) .
+ " with " . (int $similarity) . "% similarity$mode_chng]</span></td>\n" .
+ "<td class=\"link\">" .
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob;h=$to_id;hb=$hash;f=$to_file")}, "blob");
+ if ($to_id ne $from_id) {
+ print " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blobdiff;h=$to_id;hp=$from_id;hb=$hash;f=$to_file")}, "diff");
+ }
+ print "</td>\n";
+ }
+ print "</tr>\n";
+ }
+ print "</table>\n";
+ git_footer_html();
+}
+
+sub git_blobdiff {
+ mkdir($git_temp, 0700);
+ git_header_html();
+ if (defined $hash_base && (my %co = git_read_commit($hash_base))) {
+ my $formats_nav =
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blobdiff_plain;h=$hash;hp=$hash_parent")}, "plain");
+ git_page_nav('','', $hash_base,$co{'tree'},$hash_base, $formats_nav);
+ git_header_div('commit', esc_html($co{'title'}), $hash_base);
+ } else {
+ print "<div class=\"page_nav\">\n" .
+ "<br/><br/></div>\n" .
+ "<div class=\"title\">$hash vs $hash_parent</div>\n";
+ }
+ git_print_page_path($file_name, "blob");
+ print "<div class=\"page_body\">\n" .
+ "<div class=\"diff_info\">blob:" .
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob;h=$hash_parent;hb=$hash_base;f=$file_name")}, $hash_parent) .
+ " -> blob:" .
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob;h=$hash;hb=$hash_base;f=$file_name")}, $hash) .
+ "</div>\n";
+ git_diff_print($hash_parent, $file_name || $hash_parent, $hash, $file_name || $hash);
+ print "</div>";
+ git_footer_html();
+}
+
+sub git_blobdiff_plain {
+ mkdir($git_temp, 0700);
+ print $cgi->header(-type => "text/plain", -charset => 'utf-8');
+ git_diff_print($hash_parent, $file_name || $hash_parent, $hash, $file_name || $hash, "plain");
+}
+
+sub git_commitdiff {
+ mkdir($git_temp, 0700);
+ my %co = git_read_commit($hash);
+ if (!%co) {
+ die_error(undef, "Unknown commit object");
+ }
+ if (!defined $hash_parent) {
+ $hash_parent = $co{'parent'} || '--root';
+ }
+ open my $fd, "-|", $GIT, "diff-tree", '-r', $hash_parent, $hash
+ or die_error(undef, "Open git-diff-tree failed");
+ my @difftree = map { chomp; $_ } <$fd>;
+ close $fd or die_error(undef, "Reading git-diff-tree failed");
+
+ # non-textual hash id's can be cached
+ my $expires;
+ if ($hash =~ m/^[0-9a-fA-F]{40}$/) {
+ $expires = "+1d";
+ }
+ my $refs = read_info_ref();
+ my $ref = git_get_referencing($refs, $co{'id'});
+ my $formats_nav =
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commitdiff_plain;h=$hash;hp=$hash_parent")}, "plain");
+ git_header_html(undef, $expires);
+ git_page_nav('commitdiff','', $hash,$co{'tree'},$hash, $formats_nav);
+ git_header_div('commit', esc_html($co{'title'}) . $ref, $hash);
+ print "<div class=\"page_body\">\n";
+ my $comment = $co{'comment'};
+ my $empty = 0;
+ my $signed = 0;
+ my @log = @$comment;
+ # remove first and empty lines after that
+ shift @log;
+ while (defined $log[0] && $log[0] eq "") {
+ shift @log;
+ }
+ foreach my $line (@log) {
+ if ($line =~ m/^ *(signed[ \-]off[ \-]by[ :]|acked[ \-]by[ :]|cc[ :])/i) {
+ next;
+ }
+ if ($line eq "") {
+ if ($empty) {
+ next;
+ }
+ $empty = 1;
+ } else {
+ $empty = 0;
+ }
+ print format_log_line_html($line) . "<br/>\n";
+ }
+ print "<br/>\n";
+ foreach my $line (@difftree) {
+ # ':100644 100644 03b218260e99b78c6df0ed378e59ed9205ccc96d 3b93d5e7cc7f7dd4ebed13a5cc1a4ad976fc94d8 M ls-files.c'
+ # ':100644 100644 7f9281985086971d3877aca27704f2aaf9c448ce bc190ebc71bbd923f2b728e505408f5e54bd073a M rev-tree.c'
+ if ($line !~ m/^:([0-7]{6}) ([0-7]{6}) ([0-9a-fA-F]{40}) ([0-9a-fA-F]{40}) (.)\t(.*)$/) {
+ next;
+ }
+ my $from_mode = $1;
+ my $to_mode = $2;
+ my $from_id = $3;
+ my $to_id = $4;
+ my $status = $5;
+ my $file = validate_input(unquote($6));
+ if ($status eq "A") {
+ print "<div class=\"diff_info\">" . file_type($to_mode) . ":" .
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob;h=$to_id;hb=$hash;f=$file")}, $to_id) . "(new)" .
+ "</div>\n";
+ git_diff_print(undef, "/dev/null", $to_id, "b/$file");
+ } elsif ($status eq "D") {
+ print "<div class=\"diff_info\">" . file_type($from_mode) . ":" .
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob;h=$from_id;hb=$hash_parent;f=$file")}, $from_id) . "(deleted)" .
+ "</div>\n";
+ git_diff_print($from_id, "a/$file", undef, "/dev/null");
+ } elsif ($status eq "M") {
+ if ($from_id ne $to_id) {
+ print "<div class=\"diff_info\">" .
+ file_type($from_mode) . ":" .
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob;h=$from_id;hb=$hash_parent;f=$file")}, $from_id) .
+ " -> " .
+ file_type($to_mode) . ":" .
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob;h=$to_id;hb=$hash;f=$file")}, $to_id);
+ print "</div>\n";
+ git_diff_print($from_id, "a/$file", $to_id, "b/$file");
+ }
+ }
+ }
+ print "<br/>\n" .
+ "</div>";
+ git_footer_html();
+}
+
+sub git_commitdiff_plain {
+ mkdir($git_temp, 0700);
+ my %co = git_read_commit($hash);
+ if (!%co) {
+ die_error(undef, "Unknown commit object");
+ }
+ if (!defined $hash_parent) {
+ $hash_parent = $co{'parent'} || '--root';
+ }
+ open my $fd, "-|", $GIT, "diff-tree", '-r', $hash_parent, $hash
+ or die_error(undef, "Open git-diff-tree failed");
+ my @difftree = map { chomp; $_ } <$fd>;
+ close $fd or die_error(undef, "Reading diff-tree failed");
+
+ # try to figure out the next tag after this commit
+ my $tagname;
+ my $refs = read_info_ref("tags");
+ open $fd, "-|", $GIT, "rev-list", "HEAD";
+ my @commits = map { chomp; $_ } <$fd>;
+ close $fd;
+ foreach my $commit (@commits) {
+ if (defined $refs->{$commit}) {
+ $tagname = $refs->{$commit}
+ }
+ if ($commit eq $hash) {
+ last;
+ }
+ }
+
+ print $cgi->header(-type => "text/plain", -charset => 'utf-8', '-content-disposition' => "inline; filename=\"git-$hash.patch\"");
+ my %ad = date_str($co{'author_epoch'}, $co{'author_tz'});
+ my $comment = $co{'comment'};
+ print "From: $co{'author'}\n" .
+ "Date: $ad{'rfc2822'} ($ad{'tz_local'})\n".
+ "Subject: $co{'title'}\n";
+ if (defined $tagname) {
+ print "X-Git-Tag: $tagname\n";
+ }
+ print "X-Git-Url: $my_url?p=$project;a=commitdiff;h=$hash\n" .
+ "\n";
+
+ foreach my $line (@$comment) {;
+ print "$line\n";
+ }
+ print "---\n\n";
+
+ foreach my $line (@difftree) {
+ if ($line !~ m/^:([0-7]{6}) ([0-7]{6}) ([0-9a-fA-F]{40}) ([0-9a-fA-F]{40}) (.)\t(.*)$/) {
+ next;
+ }
+ my $from_id = $3;
+ my $to_id = $4;
+ my $status = $5;
+ my $file = $6;
+ if ($status eq "A") {
+ git_diff_print(undef, "/dev/null", $to_id, "b/$file", "plain");
+ } elsif ($status eq "D") {
+ git_diff_print($from_id, "a/$file", undef, "/dev/null", "plain");
+ } elsif ($status eq "M") {
+ git_diff_print($from_id, "a/$file", $to_id, "b/$file", "plain");
+ }
+ }
+}
+
+sub git_history {
+ if (!defined $hash_base) {
+ $hash_base = git_read_head($project);
+ }
+ my $ftype;
+ my %co = git_read_commit($hash_base);
+ if (!%co) {
+ die_error(undef, "Unknown commit object");
+ }
+ my $refs = read_info_ref();
+ git_header_html();
+ git_page_nav('','', $hash_base,$co{'tree'},$hash_base);
+ git_header_div('commit', esc_html($co{'title'}), $hash_base);
+ if (!defined $hash && defined $file_name) {
+ $hash = git_get_hash_by_path($hash_base, $file_name);
+ }
+ if (defined $hash) {
+ $ftype = git_get_type($hash);
+ }
+ git_print_page_path($file_name, $ftype);
+
+ open my $fd, "-|",
+ $GIT, "rev-list", "--full-history", $hash_base, "--", $file_name;
+ print "<table cellspacing=\"0\">\n";
+ my $alternate = 0;
+ while (my $line = <$fd>) {
+ if ($line =~ m/^([0-9a-fA-F]{40})/){
+ my $commit = $1;
+ my %co = git_read_commit($commit);
+ if (!%co) {
+ next;
+ }
+ my $ref = git_get_referencing($refs, $commit);
+ if ($alternate) {
+ print "<tr class=\"dark\">\n";
+ } else {
+ print "<tr class=\"light\">\n";
+ }
+ $alternate ^= 1;
+ print "<td title=\"$co{'age_string_age'}\"><i>$co{'age_string_date'}</i></td>\n" .
+ "<td><i>" . esc_html(chop_str($co{'author_name'}, 15, 3)) . "</i></td>\n" .
+ "<td>" . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$commit"), -class => "list"}, "<b>" .
+ esc_html(chop_str($co{'title'}, 50)) . "$ref</b>") . "</td>\n" .
+ "<td class=\"link\">" .
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$commit")}, "commit") .
+ " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commitdiff;h=$commit")}, "commitdiff") .
+ " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=$ftype;hb=$commit;f=$file_name")}, $ftype);
+ my $blob = git_get_hash_by_path($hash_base, $file_name);
+ my $blob_parent = git_get_hash_by_path($commit, $file_name);
+ if (defined $blob && defined $blob_parent && $blob ne $blob_parent) {
+ print " | " .
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blobdiff;h=$blob;hp=$blob_parent;hb=$commit;f=$file_name")},
+ "diff to current");
+ }
+ print "</td>\n" .
+ "</tr>\n";
+ }
+ }
+ print "</table>\n";
+ close $fd;
+ git_footer_html();
+}
+
+sub git_search {
+ if (!defined $searchtext) {
+ die_error(undef, "Text field empty");
+ }
+ if (!defined $hash) {
+ $hash = git_read_head($project);
+ }
+ my %co = git_read_commit($hash);
+ if (!%co) {
+ die_error(undef, "Unknown commit object");
+ }
+ # pickaxe may take all resources of your box and run for several minutes
+ # with every query - so decide by yourself how public you make this feature :)
+ my $commit_search = 1;
+ my $author_search = 0;
+ my $committer_search = 0;
+ my $pickaxe_search = 0;
+ if ($searchtext =~ s/^author\\://i) {
+ $author_search = 1;
+ } elsif ($searchtext =~ s/^committer\\://i) {
+ $committer_search = 1;
+ } elsif ($searchtext =~ s/^pickaxe\\://i) {
+ $commit_search = 0;
+ $pickaxe_search = 1;
+ }
+ git_header_html();
+ git_page_nav('','', $hash,$co{'tree'},$hash);
+ git_header_div('commit', esc_html($co{'title'}), $hash);
+
+ print "<table cellspacing=\"0\">\n";
+ my $alternate = 0;
+ if ($commit_search) {
+ $/ = "\0";
+ open my $fd, "-|", $GIT, "rev-list", "--header", "--parents", $hash or next;
+ while (my $commit_text = <$fd>) {
+ if (!grep m/$searchtext/i, $commit_text) {
+ next;
+ }
+ if ($author_search && !grep m/\nauthor .*$searchtext/i, $commit_text) {
+ next;
+ }
+ if ($committer_search && !grep m/\ncommitter .*$searchtext/i, $commit_text) {
+ next;
+ }
+ my @commit_lines = split "\n", $commit_text;
+ my %co = git_read_commit(undef, \@commit_lines);
+ if (!%co) {
+ next;
+ }
+ if ($alternate) {
+ print "<tr class=\"dark\">\n";
+ } else {
+ print "<tr class=\"light\">\n";
+ }
+ $alternate ^= 1;
+ print "<td title=\"$co{'age_string_age'}\"><i>$co{'age_string_date'}</i></td>\n" .
+ "<td><i>" . esc_html(chop_str($co{'author_name'}, 15, 5)) . "</i></td>\n" .
+ "<td>" .
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$co{'id'}"), -class => "list"}, "<b>" . esc_html(chop_str($co{'title'}, 50)) . "</b><br/>");
+ my $comment = $co{'comment'};
+ foreach my $line (@$comment) {
+ if ($line =~ m/^(.*)($searchtext)(.*)$/i) {
+ my $lead = esc_html($1) || "";
+ $lead = chop_str($lead, 30, 10);
+ my $match = esc_html($2) || "";
+ my $trail = esc_html($3) || "";
+ $trail = chop_str($trail, 30, 10);
+ my $text = "$lead<span class=\"match\">$match</span>$trail";
+ print chop_str($text, 80, 5) . "<br/>\n";
+ }
+ }
+ print "</td>\n" .
+ "<td class=\"link\">" .
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$co{'id'}")}, "commit") .
+ " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=tree;h=$co{'tree'};hb=$co{'id'}")}, "tree");
+ print "</td>\n" .
+ "</tr>\n";
+ }
+ close $fd;
+ }
+
+ if ($pickaxe_search) {
+ $/ = "\n";
+ open my $fd, "-|", "$GIT rev-list $hash | $GIT diff-tree -r --stdin -S\'$searchtext\'";
+ undef %co;
+ my @files;
+ while (my $line = <$fd>) {
+ if (%co && $line =~ m/^:([0-7]{6}) ([0-7]{6}) ([0-9a-fA-F]{40}) ([0-9a-fA-F]{40}) (.)\t(.*)$/) {
+ my %set;
+ $set{'file'} = $6;
+ $set{'from_id'} = $3;
+ $set{'to_id'} = $4;
+ $set{'id'} = $set{'to_id'};
+ if ($set{'id'} =~ m/0{40}/) {
+ $set{'id'} = $set{'from_id'};
+ }
+ if ($set{'id'} =~ m/0{40}/) {
+ next;
+ }
+ push @files, \%set;
+ } elsif ($line =~ m/^([0-9a-fA-F]{40})$/){
+ if (%co) {
+ if ($alternate) {
+ print "<tr class=\"dark\">\n";
+ } else {
+ print "<tr class=\"light\">\n";
+ }
+ $alternate ^= 1;
+ print "<td title=\"$co{'age_string_age'}\"><i>$co{'age_string_date'}</i></td>\n" .
+ "<td><i>" . esc_html(chop_str($co{'author_name'}, 15, 5)) . "</i></td>\n" .
+ "<td>" .
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$co{'id'}"), -class => "list"}, "<b>" .
+ esc_html(chop_str($co{'title'}, 50)) . "</b><br/>");
+ while (my $setref = shift @files) {
+ my %set = %$setref;
+ print $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=blob;h=$set{'id'};hb=$co{'id'};f=$set{'file'}"), class => "list"},
+ "<span class=\"match\">" . esc_html($set{'file'}) . "</span>") .
+ "<br/>\n";
+ }
+ print "</td>\n" .
+ "<td class=\"link\">" .
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=commit;h=$co{'id'}")}, "commit") .
+ " | " . $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=tree;h=$co{'tree'};hb=$co{'id'}")}, "tree");
+ print "</td>\n" .
+ "</tr>\n";
+ }
+ %co = git_read_commit($1);
+ }
+ }
+ close $fd;
+ }
+ print "</table>\n";
+ git_footer_html();
+}
+
+sub git_shortlog {
+ my $head = git_read_head($project);
+ if (!defined $hash) {
+ $hash = $head;
+ }
+ if (!defined $page) {
+ $page = 0;
+ }
+ my $refs = read_info_ref();
+
+ my $limit = sprintf("--max-count=%i", (100 * ($page+1)));
+ open my $fd, "-|", $GIT, "rev-list", $limit, $hash
+ or die_error(undef, "Open git-rev-list failed");
+ my @revlist = map { chomp; $_ } <$fd>;
+ close $fd;
+
+ my $paging_nav = git_get_paging_nav('shortlog', $hash, $head, $page, $#revlist);
+ my $next_link = '';
+ if ($#revlist >= (100 * ($page+1)-1)) {
+ $next_link =
+ $cgi->a({-href => "$my_uri?" . esc_param("p=$project;a=shortlog;h=$hash;pg=" . ($page+1)),
+ -title => "Alt-n"}, "next");
+ }
+
+
+ git_header_html();
+ git_page_nav('shortlog','', $hash,$hash,$hash, $paging_nav);
+ git_header_div('summary', $project);
+
+ git_shortlog_body(\@revlist, ($page * 100), $#revlist, $refs, $next_link);
+
+ git_footer_html();
+}
+
+## ......................................................................
+## feeds (RSS, OPML)
+
+sub git_rss {
+ # http://www.notestips.com/80256B3A007F2692/1/NAMO5P9UPQ
+ open my $fd, "-|", $GIT, "rev-list", "--max-count=150", git_read_head($project)
+ or die_error(undef, "Open git-rev-list failed");
+ my @revlist = map { chomp; $_ } <$fd>;
+ close $fd or die_error(undef, "Reading git-rev-list failed");
+ print $cgi->header(-type => 'text/xml', -charset => 'utf-8');
+ print "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n".
+ "<rss version=\"2.0\" xmlns:content=\"http://purl.org/rss/1.0/modules/content/\">\n";
+ print "<channel>\n";
+ print "<title>$project</title>\n".
+ "<link>" . esc_html("$my_url?p=$project;a=summary") . "</link>\n".
+ "<description>$project log</description>\n".
+ "<language>en</language>\n";
+
+ for (my $i = 0; $i <= $#revlist; $i++) {
+ my $commit = $revlist[$i];
+ my %co = git_read_commit($commit);
+ # we read 150, we always show 30 and the ones more recent than 48 hours
+ if (($i >= 20) && ((time - $co{'committer_epoch'}) > 48*60*60)) {
+ last;
+ }
+ my %cd = date_str($co{'committer_epoch'});
+ open $fd, "-|", $GIT, "diff-tree", '-r', $co{'parent'}, $co{'id'} or next;
+ my @difftree = map { chomp; $_ } <$fd>;
+ close $fd or next;
+ print "<item>\n" .
+ "<title>" .
+ sprintf("%d %s %02d:%02d", $cd{'mday'}, $cd{'month'}, $cd{'hour'}, $cd{'minute'}) . " - " . esc_html($co{'title'}) .
+ "</title>\n" .
+ "<author>" . esc_html($co{'author'}) . "</author>\n" .
+ "<pubDate>$cd{'rfc2822'}</pubDate>\n" .
+ "<guid isPermaLink=\"true\">" . esc_html("$my_url?p=$project;a=commit;h=$commit") . "</guid>\n" .
+ "<link>" . esc_html("$my_url?p=$project;a=commit;h=$commit") . "</link>\n" .
+ "<description>" . esc_html($co{'title'}) . "</description>\n" .
+ "<content:encoded>" .
+ "<![CDATA[\n";
+ my $comment = $co{'comment'};
+ foreach my $line (@$comment) {
+ $line = decode("utf8", $line, Encode::FB_DEFAULT);
+ print "$line<br/>\n";
+ }
+ print "<br/>\n";
+ foreach my $line (@difftree) {
+ if (!($line =~ m/^:([0-7]{6}) ([0-7]{6}) ([0-9a-fA-F]{40}) ([0-9a-fA-F]{40}) (.)([0-9]{0,3})\t(.*)$/)) {
+ next;
+ }
+ my $file = validate_input(unquote($7));
+ $file = decode("utf8", $file, Encode::FB_DEFAULT);
+ print "$file<br/>\n";
+ }
+ print "]]>\n" .
+ "</content:encoded>\n" .
+ "</item>\n";
+ }
+ print "</channel></rss>";
+}
+
+sub git_opml {
+ my @list = git_read_projects();
+
+ print $cgi->header(-type => 'text/xml', -charset => 'utf-8');
+ print "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n".
+ "<opml version=\"1.0\">\n".
+ "<head>".
+ " <title>$site_name Git OPML Export</title>\n".
+ "</head>\n".
+ "<body>\n".
+ "<outline text=\"git RSS feeds\">\n";
+
+ foreach my $pr (@list) {
+ my %proj = %$pr;
+ my $head = git_read_head($proj{'path'});
+ if (!defined $head) {
+ next;
+ }
+ $ENV{'GIT_DIR'} = "$projectroot/$proj{'path'}";
+ my %co = git_read_commit($head);
+ if (!%co) {
+ next;
+ }
+
+ my $path = esc_html(chop_str($proj{'path'}, 25, 5));
+ my $rss = "$my_url?p=$proj{'path'};a=rss";
+ my $html = "$my_url?p=$proj{'path'};a=summary";
+ print "<outline type=\"rss\" text=\"$path\" title=\"$path\" xmlUrl=\"$rss\" htmlUrl=\"$html\"/>\n";
+ }
+ print "</outline>\n".
+ "</body>\n".
+ "</opml>\n";
+}
if (!no_more_flags && argv[i][0] == '-') {
if (!strcmp(argv[i], "-t")) {
if (argc <= ++i)
- die(hash_object_usage);
+ usage(hash_object_usage);
type = argv[i];
}
else if (!strcmp(argv[i], "-w")) {
hash_stdin(type, write_object);
}
else
- die(hash_object_usage);
- }
+ usage(hash_object_usage);
+ }
else {
const char *arg = argv[i];
if (0 <= prefix_length)
--- /dev/null
+/*
+ * builtin-help.c
+ *
+ * Builtin help-related commands (help, usage, version)
+ */
+#include <sys/ioctl.h>
+#include "cache.h"
+#include "builtin.h"
+#include "exec_cmd.h"
+#include "common-cmds.h"
+
+
+/* most GUI terminals set COLUMNS (although some don't export it) */
+static int term_columns(void)
+{
+ char *col_string = getenv("COLUMNS");
+ int n_cols = 0;
+
+ if (col_string && (n_cols = atoi(col_string)) > 0)
+ return n_cols;
+
+#ifdef TIOCGWINSZ
+ {
+ struct winsize ws;
+ if (!ioctl(1, TIOCGWINSZ, &ws)) {
+ if (ws.ws_col)
+ return ws.ws_col;
+ }
+ }
+#endif
+
+ return 80;
+}
+
+static void oom(void)
+{
+ fprintf(stderr, "git: out of memory\n");
+ exit(1);
+}
+
+static inline void mput_char(char c, unsigned int num)
+{
+ while(num--)
+ putchar(c);
+}
+
+static struct cmdname {
+ size_t len;
+ char name[1];
+} **cmdname;
+static int cmdname_alloc, cmdname_cnt;
+
+static void add_cmdname(const char *name, int len)
+{
+ struct cmdname *ent;
+ if (cmdname_alloc <= cmdname_cnt) {
+ cmdname_alloc = cmdname_alloc + 200;
+ cmdname = realloc(cmdname, cmdname_alloc * sizeof(*cmdname));
+ if (!cmdname)
+ oom();
+ }
+ ent = malloc(sizeof(*ent) + len);
+ if (!ent)
+ oom();
+ ent->len = len;
+ memcpy(ent->name, name, len);
+ ent->name[len] = 0;
+ cmdname[cmdname_cnt++] = ent;
+}
+
+static int cmdname_compare(const void *a_, const void *b_)
+{
+ struct cmdname *a = *(struct cmdname **)a_;
+ struct cmdname *b = *(struct cmdname **)b_;
+ return strcmp(a->name, b->name);
+}
+
+static void pretty_print_string_list(struct cmdname **cmdname, int longest)
+{
+ int cols = 1, rows;
+ int space = longest + 1; /* min 1 SP between words */
+ int max_cols = term_columns() - 1; /* don't print *on* the edge */
+ int i, j;
+
+ if (space < max_cols)
+ cols = max_cols / space;
+ rows = (cmdname_cnt + cols - 1) / cols;
+
+ qsort(cmdname, cmdname_cnt, sizeof(*cmdname), cmdname_compare);
+
+ for (i = 0; i < rows; i++) {
+ printf(" ");
+
+ for (j = 0; j < cols; j++) {
+ int n = j * rows + i;
+ int size = space;
+ if (n >= cmdname_cnt)
+ break;
+ if (j == cols-1 || n + rows >= cmdname_cnt)
+ size = 1;
+ printf("%-*s", size, cmdname[n]->name);
+ }
+ putchar('\n');
+ }
+}
+
+static void list_commands(const char *exec_path, const char *pattern)
+{
+ unsigned int longest = 0;
+ char path[PATH_MAX];
+ int dirlen;
+ DIR *dir = opendir(exec_path);
+ struct dirent *de;
+
+ if (!dir) {
+ fprintf(stderr, "git: '%s': %s\n", exec_path, strerror(errno));
+ exit(1);
+ }
+
+ dirlen = strlen(exec_path);
+ if (PATH_MAX - 20 < dirlen) {
+ fprintf(stderr, "git: insanely long exec-path '%s'\n",
+ exec_path);
+ exit(1);
+ }
+
+ memcpy(path, exec_path, dirlen);
+ path[dirlen++] = '/';
+
+ while ((de = readdir(dir)) != NULL) {
+ struct stat st;
+ int entlen;
+
+ if (strncmp(de->d_name, "git-", 4))
+ continue;
+ strcpy(path+dirlen, de->d_name);
+ if (stat(path, &st) || /* stat, not lstat */
+ !S_ISREG(st.st_mode) ||
+ !(st.st_mode & S_IXUSR))
+ continue;
+
+ entlen = strlen(de->d_name);
+ if (has_extension(de->d_name, ".exe"))
+ entlen -= 4;
+
+ if (longest < entlen)
+ longest = entlen;
+
+ add_cmdname(de->d_name + 4, entlen-4);
+ }
+ closedir(dir);
+
+ printf("git commands available in '%s'\n", exec_path);
+ printf("----------------------------");
+ mput_char('-', strlen(exec_path));
+ putchar('\n');
+ pretty_print_string_list(cmdname, longest - 4);
+ putchar('\n');
+}
+
+static void list_common_cmds_help(void)
+{
+ int i, longest = 0;
+
+ for (i = 0; i < ARRAY_SIZE(common_cmds); i++) {
+ if (longest < strlen(common_cmds[i].name))
+ longest = strlen(common_cmds[i].name);
+ }
+
+ puts("The most commonly used git commands are:");
+ for (i = 0; i < ARRAY_SIZE(common_cmds); i++) {
+ printf(" %s", common_cmds[i].name);
+ mput_char(' ', longest - strlen(common_cmds[i].name) + 4);
+ puts(common_cmds[i].help);
+ }
+ puts("(use 'git help -a' to get a list of all installed git commands)");
+}
+
+static void show_man_page(const char *git_cmd)
+{
+ const char *page;
+
+ if (!strncmp(git_cmd, "git", 3))
+ page = git_cmd;
+ else {
+ int page_len = strlen(git_cmd) + 4;
+ char *p = malloc(page_len + 1);
+ strcpy(p, "git-");
+ strcpy(p + 4, git_cmd);
+ p[page_len] = 0;
+ page = p;
+ }
+
+ execlp("man", "man", page, NULL);
+}
+
+void help_unknown_cmd(const char *cmd)
+{
+ printf("git: '%s' is not a git-command\n\n", cmd);
+ list_common_cmds_help();
+ exit(1);
+}
+
+int cmd_version(int argc, const char **argv, const char *prefix)
+{
+ printf("git version %s\n", git_version_string);
+ return 0;
+}
+
+int cmd_help(int argc, const char **argv, const char *prefix)
+{
+ const char *help_cmd = argc > 1 ? argv[1] : NULL;
+ const char *exec_path = git_exec_path();
+
+ if (!help_cmd) {
+ printf("usage: %s\n\n", git_usage_string);
+ list_common_cmds_help();
+ exit(1);
+ }
+
+ else if (!strcmp(help_cmd, "--all") || !strcmp(help_cmd, "-a")) {
+ printf("usage: %s\n\n", git_usage_string);
+ if(exec_path)
+ list_commands(exec_path, "git-*");
+ exit(1);
+ }
+
+ else
+ show_man_page(help_cmd);
+
+ return 0;
+}
+
+
#define PREV_BUF_SIZE 4096
#define RANGE_HEADER_SIZE 30
+static int commits_on_stdin = 0;
+
static int got_alternates = -1;
static int corrupt_object_found = 0;
struct alt_base
{
- char *base;
+ const char *base;
int path_len;
int got_indices;
struct packed_git *packs;
};
struct alternates_request {
- char *base;
+ const char *base;
char *url;
struct buffer *buffer;
struct active_request_slot *slot;
return size;
}
-static void fetch_alternates(char *base);
+static void fetch_alternates(const char *base);
static void process_object_response(void *callback_data);
{
struct packed_git *new_pack;
if (has_pack_file(sha1))
- return 0; // don't list this as something we can get
+ return 0; /* don't list this as something we can get */
if (fetch_index(repo, sha1))
return -1;
(struct alternates_request *)callback_data;
struct active_request_slot *slot = alt_req->slot;
struct alt_base *tail = alt;
- char *base = alt_req->base;
+ const char *base = alt_req->base;
static const char null_byte = '\0';
char *data;
int i = 0;
base[serverlen - 1] != '/');
i += 3;
}
- // If the server got removed, give up.
+ /* If the server got removed, give up. */
okay = strchr(base, ':') - base + 3 <
serverlen;
} else if (alt_req->http_specific) {
okay = 1;
}
}
- // skip 'objects' at end
+ /* skip 'objects' at end */
if (okay) {
target = xmalloc(serverlen + posn - i - 6);
strlcpy(target, base, serverlen);
got_alternates = 1;
}
-static void fetch_alternates(char *base)
+static void fetch_alternates(const char *base)
{
struct buffer buffer;
char *url;
if (strlen(ls->dentry_name) == 63 &&
!strncmp(ls->dentry_name, "objects/pack/pack-", 18) &&
- !strncmp(ls->dentry_name+58, ".pack", 5)) {
+ has_extension(ls->dentry_name, ".pack")) {
get_sha1_hex(ls->dentry_name + 18, sha1);
setup_index(ls->repo, sha1);
}
char *url;
char hex[42];
struct buffer buffer;
- char *base = alt->base;
+ const char *base = alt->base;
struct active_request_slot *slot;
struct slot_results results;
buffer.size = 41;
return 0;
}
-int main(int argc, char **argv)
+int main(int argc, const char **argv)
{
- char *commit_id;
- char *url;
+ int commits;
+ const char **write_ref = NULL;
+ char **commit_id;
+ const char *url;
char *path;
int arg = 1;
int rc = 0;
+ setup_ident();
setup_git_directory();
git_config(git_default_config);
} else if (argv[arg][1] == 'v') {
get_verbosely = 1;
} else if (argv[arg][1] == 'w') {
- write_ref = argv[arg + 1];
+ write_ref = &argv[arg + 1];
arg++;
} else if (!strcmp(argv[arg], "--recover")) {
get_recover = 1;
+ } else if (!strcmp(argv[arg], "--stdin")) {
+ commits_on_stdin = 1;
}
arg++;
}
- if (argc < arg + 2) {
- usage("git-http-fetch [-c] [-t] [-a] [-d] [-v] [--recover] [-w ref] commit-id url");
+ if (argc < arg + 2 - commits_on_stdin) {
+ usage("git-http-fetch [-c] [-t] [-a] [-v] [--recover] [-w ref] [--stdin] commit-id url");
return 1;
}
- commit_id = argv[arg];
- url = argv[arg + 1];
- write_ref_log_details = url;
+ if (commits_on_stdin) {
+ commits = pull_targets_stdin(&commit_id, &write_ref);
+ } else {
+ commit_id = (char **) &argv[arg++];
+ commits = 1;
+ }
+ url = argv[arg];
http_init();
alt->path_len = strlen(path);
}
- if (pull(commit_id))
+ if (pull(commits, commit_id, write_ref, url))
rc = 1;
http_cleanup();
curl_slist_free_all(no_pragma_header);
+ if (commits_on_stdin)
+ pull_targets_free(commits, commit_id, write_ref);
+
if (corrupt_object_found) {
fprintf(stderr,
"Some loose object were found to be corrupt, but they might be just\n"
/* Set it up */
memset(&stream, 0, sizeof(stream));
- deflateInit(&stream, Z_BEST_COMPRESSION);
+ deflateInit(&stream, zlib_compression_level);
size = deflateBound(&stream, len + hdrlen);
request->buffer.buffer = xmalloc(size);
request->dest = xmalloc(strlen(request->url) + 14);
sprintf(request->dest, "Destination: %s", request->url);
posn += 38;
- *(posn++) = '.';
+ *(posn++) = '_';
strcpy(posn, request->lock->token);
slot = get_active_slot();
strlcpy(ctx->cdata, s, len + 1);
}
-static struct remote_lock *lock_remote(char *path, long timeout)
+static struct remote_lock *lock_remote(const char *path, long timeout)
{
struct active_request_slot *slot;
struct slot_results results;
if (obj->flags & (UNINTERESTING | SEEN))
continue;
- if (obj->type == TYPE_TAG) {
+ if (obj->type == OBJ_TAG) {
obj->flags |= SEEN;
p = add_one_object(obj, p);
continue;
}
- if (obj->type == TYPE_TREE) {
+ if (obj->type == OBJ_TREE) {
p = process_tree((struct tree *)obj, p, NULL, name);
continue;
}
- if (obj->type == TYPE_BLOB) {
+ if (obj->type == OBJ_BLOB) {
p = process_blob((struct blob *)obj, p, NULL, name);
continue;
}
* old. Otherwise we require --force.
*/
o = deref_tag(parse_object(old_sha1), NULL, 0);
- if (!o || o->type != TYPE_COMMIT)
+ if (!o || o->type != OBJ_COMMIT)
return 0;
old = (struct commit *) o;
o = deref_tag(parse_object(new_sha1), NULL, 0);
- if (!o || o->type != TYPE_COMMIT)
+ if (!o || o->type != OBJ_COMMIT)
return 0;
new = (struct commit *) o;
fwrite_buffer(ref_info, 1, len, buf);
free(ref_info);
- if (o->type == TYPE_TAG) {
+ if (o->type == OBJ_TAG) {
o = deref_tag(o, ls->dentry_name, 0);
if (o) {
len = strlen(ls->dentry_name) + 45;
return -1;
}
-static void fetch_symref(char *path, char **symref, unsigned char *sha1)
+static void fetch_symref(const char *path, char **symref, unsigned char *sha1)
{
char *url;
struct buffer buffer;
static int verify_merge_base(unsigned char *head_sha1, unsigned char *branch_sha1)
{
- int pipe_fd[2];
- pid_t merge_base_pid;
- char line[PATH_MAX + 20];
- unsigned char merge_sha1[20];
- int verified = 0;
-
- if (pipe(pipe_fd) < 0)
- die("Verify merge base: pipe failed");
-
- merge_base_pid = fork();
- if (!merge_base_pid) {
- static const char *args[] = {
- "merge-base",
- "-a",
- NULL,
- NULL,
- NULL
- };
- args[2] = strdup(sha1_to_hex(head_sha1));
- args[3] = sha1_to_hex(branch_sha1);
-
- dup2(pipe_fd[1], 1);
- close(pipe_fd[0]);
- close(pipe_fd[1]);
- execv_git_cmd(args);
- die("merge-base setup failed");
- }
- if (merge_base_pid < 0)
- die("merge-base fork failed");
-
- dup2(pipe_fd[0], 0);
- close(pipe_fd[0]);
- close(pipe_fd[1]);
- while (fgets(line, sizeof(line), stdin) != NULL) {
- if (get_sha1_hex(line, merge_sha1))
- die("expected sha1, got garbage:\n %s", line);
- if (!memcmp(branch_sha1, merge_sha1, 20)) {
- verified = 1;
- break;
- }
- }
+ struct commit *head = lookup_commit(head_sha1);
+ struct commit *branch = lookup_commit(branch_sha1);
+ struct commit_list *merge_bases = get_merge_bases(head, branch, 1);
- return verified;
+ if (merge_bases && !merge_bases->next && merge_bases->item == branch)
+ return 1;
+
+ return 0;
}
static int delete_remote_branch(char *pattern, int force)
commit_argv[3] = old_sha1_hex;
commit_argc++;
}
- init_revisions(&revs);
+ init_revisions(&revs, setup_git_directory());
setup_revisions(commit_argc, commit_argv, &revs, NULL);
free(new_sha1_hex);
if (old_sha1_hex) {
}
static int
-socket_write( Socket_t *sock, char *buf, int len )
+socket_write( Socket_t *sock, const char *buf, int len )
{
int n = write( sock->fd, buf, len );
if (n != len) {
usage(index_pack_usage);
if (!index_name) {
int len = strlen(pack_name);
- if (len < 5 || strcmp(pack_name + len - 5, ".pack"))
+ if (!has_extension(pack_name, ".pack"))
die("packfile name '%s' does not end with '.pack'",
pack_name);
index_name_buf = xmalloc(len);
static int use_link = 0;
static int use_symlink = 0;
static int use_filecopy = 1;
+static int commits_on_stdin = 0;
-static char *path; /* "Remote" git repository */
+static const char *path; /* "Remote" git repository */
void prefetch(unsigned char *sha1)
{
return -1;
while ((de = readdir(dir)) != NULL) {
int namelen = strlen(de->d_name);
- if (namelen != 50 ||
- strcmp(de->d_name + namelen - 5, ".pack"))
+ if (namelen != 50 ||
+ !has_extension(de->d_name, ".pack"))
continue;
get_sha1_hex(de->d_name + 5, sha1);
setup_index(sha1);
}
static const char local_pull_usage[] =
-"git-local-fetch [-c] [-t] [-a] [-d] [-v] [-w filename] [--recover] [-l] [-s] [-n] commit-id path";
+"git-local-fetch [-c] [-t] [-a] [-v] [-w filename] [--recover] [-l] [-s] [-n] [--stdin] commit-id path";
-/*
+/*
* By default we only use file copy.
* If -l is specified, a hard link is attempted.
* If -s is specified, then a symlink is attempted.
* If -n is _not_ specified, then a regular file-to-file copy is done.
*/
-int main(int argc, char **argv)
+int main(int argc, const char **argv)
{
- char *commit_id;
+ int commits;
+ const char **write_ref = NULL;
+ char **commit_id;
int arg = 1;
+ setup_ident();
setup_git_directory();
git_config(git_default_config);
else if (argv[arg][1] == 'v')
get_verbosely = 1;
else if (argv[arg][1] == 'w')
- write_ref = argv[++arg];
+ write_ref = &argv[++arg];
else if (!strcmp(argv[arg], "--recover"))
get_recover = 1;
+ else if (!strcmp(argv[arg], "--stdin"))
+ commits_on_stdin = 1;
else
usage(local_pull_usage);
arg++;
}
- if (argc < arg + 2)
+ if (argc < arg + 2 - commits_on_stdin)
usage(local_pull_usage);
- commit_id = argv[arg];
- path = argv[arg + 1];
- write_ref_log_details = path;
+ if (commits_on_stdin) {
+ commits = pull_targets_stdin(&commit_id, &write_ref);
+ } else {
+ commit_id = (char **) &argv[arg++];
+ commits = 1;
+ }
+ path = argv[arg];
- if (pull(commit_id))
+ if (pull(commits, commit_id, write_ref, path))
return 1;
+ if (commits_on_stdin)
+ pull_targets_free(commits, commit_id, write_ref);
+
return 0;
}
raise(signo);
}
-int hold_lock_file_for_update(struct lock_file *lk, const char *path)
+static int lock_file(struct lock_file *lk, const char *path)
{
int fd;
sprintf(lk->filename, "%s.lock", path);
return fd;
}
+int hold_lock_file_for_update(struct lock_file *lk, const char *path, int die_on_error)
+{
+ int fd = lock_file(lk, path);
+ if (fd < 0 && die_on_error)
+ die("unable to create '%s': %s", path, strerror(errno));
+ return fd;
+}
+
int commit_lock_file(struct lock_file *lk)
{
char result_file[PATH_MAX];
fputs(diff_unique_abbrev(commit->object.sha1, abbrev_commit), stdout);
if (opt->parents)
show_parents(commit, abbrev_commit);
- putchar('\n');
+ putchar(opt->diffopt.line_termination);
return;
}
subject = "Subject: ";
printf("From %s Mon Sep 17 00:00:00 2001\n", sha1);
+ if (opt->message_id)
+ printf("Message-Id: <%s>\n", opt->message_id);
+ if (opt->ref_message_id)
+ printf("In-Reply-To: <%s>\nReferences: <%s>\n",
+ opt->ref_message_id, opt->ref_message_id);
if (opt->mime_boundary) {
static char subject_buffer[1024];
static char buffer[1024];
opt->diffopt.stat_sep = buffer;
}
} else {
- printf("%s%s",
+ printf("%s%s%s",
+ diff_get_color(opt->diffopt.color_diff, DIFF_COMMIT),
opt->commit_format == CMIT_FMT_ONELINE ? "" : "commit ",
diff_unique_abbrev(commit->object.sha1, abbrev_commit));
if (opt->parents)
printf(" (from %s)",
diff_unique_abbrev(parent->object.sha1,
abbrev_commit));
+ printf("%s",
+ diff_get_color(opt->diffopt.color_diff, DIFF_RESET));
putchar(opt->commit_format == CMIT_FMT_ONELINE ? ' ' : '\n');
}
#include "cache.h"
#include "commit.h"
-#define PARENT1 1
-#define PARENT2 2
-#define UNINTERESTING 4
-
-static struct commit *interesting(struct commit_list *list)
-{
- while (list) {
- struct commit *commit = list->item;
- list = list->next;
- if (commit->object.flags & UNINTERESTING)
- continue;
- return commit;
- }
- return NULL;
-}
-
-/*
- * A pathological example of how this thing works.
- *
- * Suppose we had this commit graph, where chronologically
- * the timestamp on the commit are A <= B <= C <= D <= E <= F
- * and we are trying to figure out the merge base for E and F
- * commits.
- *
- * F
- * / \
- * E A D
- * \ / /
- * B /
- * \ /
- * C
- *
- * First we push E and F to list to be processed. E gets bit 1
- * and F gets bit 2. The list becomes:
- *
- * list=F(2) E(1), result=empty
- *
- * Then we pop F, the newest commit, from the list. Its flag is 2.
- * We scan its parents, mark them reachable from the side that F is
- * reachable from, and push them to the list:
- *
- * list=E(1) D(2) A(2), result=empty
- *
- * Next pop E and do the same.
- *
- * list=D(2) B(1) A(2), result=empty
- *
- * Next pop D and do the same.
- *
- * list=C(2) B(1) A(2), result=empty
- *
- * Next pop C and do the same.
- *
- * list=B(1) A(2), result=empty
- *
- * Now it is B's turn. We mark its parent, C, reachable from B's side,
- * and push it to the list:
- *
- * list=C(3) A(2), result=empty
- *
- * Now pop C and notice it has flags==3. It is placed on the result list,
- * and the list now contains:
- *
- * list=A(2), result=C(3)
- *
- * We pop A and do the same.
- *
- * list=B(3), result=C(3)
- *
- * Next, we pop B and something very interesting happens. It has flags==3
- * so it is also placed on the result list, and its parents are marked
- * uninteresting, retroactively, and placed back on the list:
- *
- * list=C(7), result=C(7) B(3)
- *
- * Now, list does not have any interesting commit. So we find the newest
- * commit from the result list that is not marked uninteresting. Which is
- * commit B.
- *
- *
- * Another pathological example how this thing used to fail to mark an
- * ancestor of a merge base as UNINTERESTING before we introduced the
- * postprocessing phase (mark_reachable_commits).
- *
- * 2
- * H
- * 1 / \
- * G A \
- * |\ / \
- * | B \
- * | \ \
- * \ C F
- * \ \ /
- * \ D /
- * \ | /
- * \| /
- * E
- *
- * list A B C D E F G H
- * G1 H2 - - - - - - 1 2
- * H2 E1 B1 - 1 - - 1 - 1 2
- * F2 E1 B1 A2 2 1 - - 1 2 1 2
- * E3 B1 A2 2 1 - - 3 2 1 2
- * B1 A2 2 1 - - 3 2 1 2
- * C1 A2 2 1 1 - 3 2 1 2
- * D1 A2 2 1 1 1 3 2 1 2
- * A2 2 1 1 1 3 2 1 2
- * B3 2 3 1 1 3 2 1 2
- * C7 2 3 7 1 3 2 1 2
- *
- * At this point, unfortunately, everybody in the list is
- * uninteresting, so we fail to complete the following two
- * steps to fully marking uninteresting commits.
- *
- * D7 2 3 7 7 3 2 1 2
- * E7 2 3 7 7 7 2 1 2
- *
- * and we ended up showing E as an interesting merge base.
- * The postprocessing phase re-injects C and continues traversal
- * to contaminate D and E.
- */
-
static int show_all = 0;
-static void mark_reachable_commits(struct commit_list *result,
- struct commit_list *list)
-{
- struct commit_list *tmp;
-
- /*
- * Postprocess to fully contaminate the well.
- */
- for (tmp = result; tmp; tmp = tmp->next) {
- struct commit *c = tmp->item;
- /* Reinject uninteresting ones to list,
- * so we can scan their parents.
- */
- if (c->object.flags & UNINTERESTING)
- commit_list_insert(c, &list);
- }
- while (list) {
- struct commit *c = list->item;
- struct commit_list *parents;
-
- tmp = list;
- list = list->next;
- free(tmp);
-
- /* Anything taken out of the list is uninteresting, so
- * mark all its parents uninteresting. We do not
- * parse new ones (we already parsed all the relevant
- * ones).
- */
- parents = c->parents;
- while (parents) {
- struct commit *p = parents->item;
- parents = parents->next;
- if (!(p->object.flags & UNINTERESTING)) {
- p->object.flags |= UNINTERESTING;
- commit_list_insert(p, &list);
- }
- }
- }
-}
-
static int merge_base(struct commit *rev1, struct commit *rev2)
{
- struct commit_list *list = NULL;
- struct commit_list *result = NULL;
- struct commit_list *tmp = NULL;
-
- if (rev1 == rev2) {
- printf("%s\n", sha1_to_hex(rev1->object.sha1));
- return 0;
- }
-
- parse_commit(rev1);
- parse_commit(rev2);
-
- rev1->object.flags |= 1;
- rev2->object.flags |= 2;
- insert_by_date(rev1, &list);
- insert_by_date(rev2, &list);
-
- while (interesting(list)) {
- struct commit *commit = list->item;
- struct commit_list *parents;
- int flags = commit->object.flags & 7;
-
- tmp = list;
- list = list->next;
- free(tmp);
- if (flags == 3) {
- insert_by_date(commit, &result);
-
- /* Mark parents of a found merge uninteresting */
- flags |= UNINTERESTING;
- }
- parents = commit->parents;
- while (parents) {
- struct commit *p = parents->item;
- parents = parents->next;
- if ((p->object.flags & flags) == flags)
- continue;
- parse_commit(p);
- p->object.flags |= flags;
- insert_by_date(p, &list);
- }
- }
+ struct commit_list *result = get_merge_bases(rev1, rev2, 0);
if (!result)
return 1;
- if (result->next && list)
- mark_reachable_commits(result, list);
-
while (result) {
- struct commit *commit = result->item;
- result = result->next;
- if (commit->object.flags & UNINTERESTING)
- continue;
- printf("%s\n", sha1_to_hex(commit->object.sha1));
+ printf("%s\n", sha1_to_hex(result->item->object.sha1));
if (!show_all)
return 0;
- commit->object.flags |= UNINTERESTING;
+ result = result->next;
}
+
return 0;
}
--- /dev/null
+#include "cache.h"
+#include "run-command.h"
+#include "xdiff-interface.h"
+#include "blob.h"
+
+static void rm_temp_file(const char *filename)
+{
+ unlink(filename);
+ free((void *)filename);
+}
+
+static const char *write_temp_file(mmfile_t *f)
+{
+ int fd;
+ const char *tmp = getenv("TMPDIR");
+ char *filename;
+
+ if (!tmp)
+ tmp = "/tmp";
+ filename = mkpath("%s/%s", tmp, "git-tmp-XXXXXX");
+ fd = mkstemp(filename);
+ if (fd < 0)
+ return NULL;
+ filename = strdup(filename);
+ if (f->size != xwrite(fd, f->ptr, f->size)) {
+ rm_temp_file(filename);
+ return NULL;
+ }
+ close(fd);
+ return filename;
+}
+
+static void *read_temp_file(const char *filename, unsigned long *size)
+{
+ struct stat st;
+ char *buf = NULL;
+ int fd = open(filename, O_RDONLY);
+ if (fd < 0)
+ return NULL;
+ if (!fstat(fd, &st)) {
+ *size = st.st_size;
+ buf = xmalloc(st.st_size);
+ if (st.st_size != xread(fd, buf, st.st_size)) {
+ free(buf);
+ buf = NULL;
+ }
+ }
+ close(fd);
+ return buf;
+}
+
+static int fill_mmfile_blob(mmfile_t *f, struct blob *obj)
+{
+ void *buf;
+ unsigned long size;
+ char type[20];
+
+ buf = read_sha1_file(obj->object.sha1, type, &size);
+ if (!buf)
+ return -1;
+ if (strcmp(type, blob_type))
+ return -1;
+ f->ptr = buf;
+ f->size = size;
+ return 0;
+}
+
+static void free_mmfile(mmfile_t *f)
+{
+ free(f->ptr);
+}
+
+static void *three_way_filemerge(mmfile_t *base, mmfile_t *our, mmfile_t *their, unsigned long *size)
+{
+ void *res;
+ const char *t1, *t2, *t3;
+
+ t1 = write_temp_file(base);
+ t2 = write_temp_file(our);
+ t3 = write_temp_file(their);
+ res = NULL;
+ if (t1 && t2 && t3) {
+ int code = run_command("merge", t2, t1, t3, NULL);
+ if (!code || code == -1)
+ res = read_temp_file(t2, size);
+ }
+ rm_temp_file(t1);
+ rm_temp_file(t2);
+ rm_temp_file(t3);
+ return res;
+}
+
+static int common_outf(void *priv_, mmbuffer_t *mb, int nbuf)
+{
+ int i;
+ mmfile_t *dst = priv_;
+
+ for (i = 0; i < nbuf; i++) {
+ memcpy(dst->ptr + dst->size, mb[i].ptr, mb[i].size);
+ dst->size += mb[i].size;
+ }
+ return 0;
+}
+
+static int generate_common_file(mmfile_t *res, mmfile_t *f1, mmfile_t *f2)
+{
+ unsigned long size = f1->size < f2->size ? f1->size : f2->size;
+ void *ptr = xmalloc(size);
+ xpparam_t xpp;
+ xdemitconf_t xecfg;
+ xdemitcb_t ecb;
+
+ xpp.flags = XDF_NEED_MINIMAL;
+ xecfg.ctxlen = 3;
+ xecfg.flags = XDL_EMIT_COMMON;
+ ecb.outf = common_outf;
+
+ res->ptr = ptr;
+ res->size = 0;
+
+ ecb.priv = res;
+ return xdl_diff(f1, f2, &xpp, &xecfg, &ecb);
+}
+
+void *merge_file(struct blob *base, struct blob *our, struct blob *their, unsigned long *size)
+{
+ void *res = NULL;
+ mmfile_t f1, f2, common;
+
+ /*
+ * Removed in either branch?
+ *
+ * NOTE! This depends on the caller having done the
+ * proper warning about removing a file that got
+ * modified in the other branch!
+ */
+ if (!our || !their) {
+ char type[20];
+ if (base)
+ return NULL;
+ if (!our)
+ our = their;
+ return read_sha1_file(our->object.sha1, type, size);
+ }
+
+ if (fill_mmfile_blob(&f1, our) < 0)
+ goto out_no_mmfile;
+ if (fill_mmfile_blob(&f2, their) < 0)
+ goto out_free_f1;
+
+ if (base) {
+ if (fill_mmfile_blob(&common, base) < 0)
+ goto out_free_f2_f1;
+ } else {
+ if (generate_common_file(&common, &f1, &f2) < 0)
+ goto out_free_f2_f1;
+ }
+ res = three_way_filemerge(&common, &f1, &f2, size);
+ free_mmfile(&common);
+out_free_f2_f1:
+ free_mmfile(&f2);
+out_free_f1:
+ free_mmfile(&f1);
+out_no_mmfile:
+ return res;
+}
#include "cache.h"
#include "tree-walk.h"
+#include "xdiff-interface.h"
+#include "blob.h"
static const char merge_tree_usage[] = "git-merge-tree <base-tree> <branch1> <branch2>";
static int resolve_directories = 1;
+struct merge_list {
+ struct merge_list *next;
+ struct merge_list *link; /* other stages for this object */
+
+ unsigned int stage : 2,
+ flags : 30;
+ unsigned int mode;
+ const char *path;
+ struct blob *blob;
+};
+
+static struct merge_list *merge_result, **merge_result_end = &merge_result;
+
+static void add_merge_entry(struct merge_list *entry)
+{
+ *merge_result_end = entry;
+ merge_result_end = &entry->next;
+}
+
static void merge_trees(struct tree_desc t[3], const char *base);
+static const char *explanation(struct merge_list *entry)
+{
+ switch (entry->stage) {
+ case 0:
+ return "merged";
+ case 3:
+ return "added in remote";
+ case 2:
+ if (entry->link)
+ return "added in both";
+ return "added in local";
+ }
+
+ /* Existed in base */
+ entry = entry->link;
+ if (!entry)
+ return "removed in both";
+
+ if (entry->link)
+ return "changed in both";
+
+ if (entry->stage == 3)
+ return "removed in local";
+ return "removed in remote";
+}
+
+extern void *merge_file(struct blob *, struct blob *, struct blob *, unsigned long *);
+
+static void *result(struct merge_list *entry, unsigned long *size)
+{
+ char type[20];
+ struct blob *base, *our, *their;
+
+ if (!entry->stage)
+ return read_sha1_file(entry->blob->object.sha1, type, size);
+ base = NULL;
+ if (entry->stage == 1) {
+ base = entry->blob;
+ entry = entry->link;
+ }
+ our = NULL;
+ if (entry && entry->stage == 2) {
+ our = entry->blob;
+ entry = entry->link;
+ }
+ their = NULL;
+ if (entry)
+ their = entry->blob;
+ return merge_file(base, our, their, size);
+}
+
+static void *origin(struct merge_list *entry, unsigned long *size)
+{
+ char type[20];
+ while (entry) {
+ if (entry->stage == 2)
+ return read_sha1_file(entry->blob->object.sha1, type, size);
+ entry = entry->link;
+ }
+ return NULL;
+}
+
+static int show_outf(void *priv_, mmbuffer_t *mb, int nbuf)
+{
+ int i;
+ for (i = 0; i < nbuf; i++)
+ printf("%.*s", (int) mb[i].size, mb[i].ptr);
+ return 0;
+}
+
+static void show_diff(struct merge_list *entry)
+{
+ unsigned long size;
+ mmfile_t src, dst;
+ xpparam_t xpp;
+ xdemitconf_t xecfg;
+ xdemitcb_t ecb;
+
+ xpp.flags = XDF_NEED_MINIMAL;
+ xecfg.ctxlen = 3;
+ xecfg.flags = 0;
+ ecb.outf = show_outf;
+ ecb.priv = NULL;
+
+ src.ptr = origin(entry, &size);
+ if (!src.ptr)
+ size = 0;
+ src.size = size;
+ dst.ptr = result(entry, &size);
+ if (!dst.ptr)
+ size = 0;
+ dst.size = size;
+ xdl_diff(&src, &dst, &xpp, &xecfg, &ecb);
+ free(src.ptr);
+ free(dst.ptr);
+}
+
+static void show_result_list(struct merge_list *entry)
+{
+ printf("%s\n", explanation(entry));
+ do {
+ struct merge_list *link = entry->link;
+ static const char *desc[4] = { "result", "base", "our", "their" };
+ printf(" %-6s %o %s %s\n", desc[entry->stage], entry->mode, sha1_to_hex(entry->blob->object.sha1), entry->path);
+ entry = link;
+ } while (entry);
+}
+
+static void show_result(void)
+{
+ struct merge_list *walk;
+
+ walk = merge_result;
+ while (walk) {
+ show_result_list(walk);
+ show_diff(walk);
+ walk = walk->next;
+ }
+}
+
/* An empty entry never compares same, not even to another empty entry */
static int same_entry(struct name_entry *a, struct name_entry *b)
{
a->mode == b->mode;
}
-static const char *sha1_to_hex_zero(const unsigned char *sha1)
+static struct merge_list *create_entry(unsigned stage, unsigned mode, const unsigned char *sha1, const char *path)
{
- if (sha1)
- return sha1_to_hex(sha1);
- return "0000000000000000000000000000000000000000";
+ struct merge_list *res = xmalloc(sizeof(*res));
+
+ memset(res, 0, sizeof(*res));
+ res->stage = stage;
+ res->path = path;
+ res->mode = mode;
+ res->blob = lookup_blob(sha1);
+ return res;
}
static void resolve(const char *base, struct name_entry *branch1, struct name_entry *result)
{
+ struct merge_list *orig, *final;
+ const char *path;
+
/* If it's already branch1, don't bother showing it */
if (!branch1)
return;
- printf("0 %06o->%06o %s->%s %s%s\n",
- branch1->mode, result->mode,
- sha1_to_hex_zero(branch1->sha1),
- sha1_to_hex_zero(result->sha1),
- base, result->path);
+ path = strdup(mkpath("%s%s", base, result->path));
+ orig = create_entry(2, branch1->mode, branch1->sha1, path);
+ final = create_entry(0, result->mode, result->sha1, path);
+
+ final->link = orig;
+
+ add_merge_entry(final);
}
static int unresolved_directory(const char *base, struct name_entry n[3])
return 1;
}
+
+static struct merge_list *link_entry(unsigned stage, const char *base, struct name_entry *n, struct merge_list *entry)
+{
+ const char *path;
+ struct merge_list *link;
+
+ if (!n->mode)
+ return entry;
+ if (entry)
+ path = entry->path;
+ else
+ path = strdup(mkpath("%s%s", base, n->path));
+ link = create_entry(stage, n->mode, n->sha1, path);
+ link->link = entry;
+ return link;
+}
+
static void unresolved(const char *base, struct name_entry n[3])
{
+ struct merge_list *entry = NULL;
+
if (unresolved_directory(base, n))
return;
- if (n[0].sha1)
- printf("1 %06o %s %s%s\n", n[0].mode, sha1_to_hex(n[0].sha1), base, n[0].path);
- if (n[1].sha1)
- printf("2 %06o %s %s%s\n", n[1].mode, sha1_to_hex(n[1].sha1), base, n[1].path);
- if (n[2].sha1)
- printf("3 %06o %s %s%s\n", n[2].mode, sha1_to_hex(n[2].sha1), base, n[2].path);
+
+ /*
+ * Do them in reverse order so that the resulting link
+ * list has the stages in order - link_entry adds new
+ * links at the front.
+ */
+ entry = link_entry(3, base, n + 2, entry);
+ entry = link_entry(2, base, n + 1, entry);
+ entry = link_entry(1, base, n + 0, entry);
+
+ add_merge_entry(entry);
}
/*
free(buf1);
free(buf2);
free(buf3);
+
+ show_result();
return 0;
}
#include "tag.h"
/*
- * A signature file has a very simple fixed format: three lines
- * of "object <sha1>" + "type <typename>" + "tag <tagname>",
- * followed by some free-form signature that git itself doesn't
- * care about, but that can be verified with gpg or similar.
+ * A signature file has a very simple fixed format: four lines
+ * of "object <sha1>" + "type <typename>" + "tag <tagname>" +
+ * "tagger <committer>", followed by a blank line, a free-form tag
+ * message and a signature block that git itself doesn't care about,
+ * but that can be verified with gpg or similar.
*
* The first three lines are guaranteed to be at least 63 bytes:
* "object <sha1>\n" is 48 bytes, "type tag\n" at 9 bytes is the
* in that size, you're doing something wrong.
*/
-// Some random size
+/* Some random size */
#define MAXSIZE (8192)
/*
return ret;
}
+#ifdef NO_C99_FORMAT
+#define PD_FMT "%d"
+#else
+#define PD_FMT "%td"
+#endif
+
static int verify_tag(char *buffer, unsigned long size)
{
int typelen;
const char *object, *type_line, *tag_line, *tagger_line;
if (size < 64)
- return error("wanna fool me ? you obviously got the size wrong !\n");
+ return error("wanna fool me ? you obviously got the size wrong !");
buffer[size] = 0;
/* Verify object line */
object = buffer;
if (memcmp(object, "object ", 7))
- return error("char%d: does not start with \"object \"\n", 0);
+ return error("char%d: does not start with \"object \"", 0);
if (get_sha1_hex(object + 7, sha1))
- return error("char%d: could not get SHA1 hash\n", 7);
+ return error("char%d: could not get SHA1 hash", 7);
/* Verify type line */
type_line = object + 48;
if (memcmp(type_line - 1, "\ntype ", 6))
- return error("char%d: could not find \"\\ntype \"\n", 47);
+ return error("char%d: could not find \"\\ntype \"", 47);
/* Verify tag-line */
tag_line = strchr(type_line, '\n');
if (!tag_line)
- return error("char%td: could not find next \"\\n\"\n", type_line - buffer);
+ return error("char" PD_FMT ": could not find next \"\\n\"", type_line - buffer);
tag_line++;
if (memcmp(tag_line, "tag ", 4) || tag_line[4] == '\n')
- return error("char%td: no \"tag \" found\n", tag_line - buffer);
+ return error("char" PD_FMT ": no \"tag \" found", tag_line - buffer);
/* Get the actual type */
typelen = tag_line - type_line - strlen("type \n");
if (typelen >= sizeof(type))
- return error("char%td: type too long\n", type_line+5 - buffer);
+ return error("char" PD_FMT ": type too long", type_line+5 - buffer);
memcpy(type, type_line+5, typelen);
type[typelen] = 0;
/* Verify that the object matches */
- if (get_sha1_hex(object + 7, sha1))
- return error("char%d: could not get SHA1 hash but this is really odd since i got it before !\n", 7);
-
if (verify_object(sha1, type))
- return error("char%d: could not verify object %s\n", 7, sha1);
+ return error("char%d: could not verify object %s", 7, sha1_to_hex(sha1));
/* Verify the tag-name: we don't allow control characters or spaces in it */
tag_line += 4;
break;
if (c > ' ')
continue;
- return error("char%td: could not verify tag name\n", tag_line - buffer);
+ return error("char" PD_FMT ": could not verify tag name", tag_line - buffer);
}
/* Verify the tagger line */
tagger_line = tag_line;
if (memcmp(tagger_line, "tagger", 6) || (tagger_line[6] == '\n'))
- return error("char%td: could not find \"tagger\"\n", tagger_line - buffer);
+ return error("char" PD_FMT ": could not find \"tagger\"", tagger_line - buffer);
+
+ /* TODO: check for committer info + blank line? */
+ /* Also, the minimum length is probably + "tagger .", or 63+8=71 */
/* The actual stuff afterwards we don't care about.. */
return 0;
}
+#undef PD_FMT
+
int main(int argc, char **argv)
{
unsigned long size = 4096;
unsigned char result_sha1[20];
if (argc != 1)
- usage("cat <signaturefile> | git-mktag");
+ usage("git-mktag < signaturefile");
setup_git_directory();
die("could not read from stdin");
}
- // Verify it for some basic sanity: it needs to start with "object <sha1>\ntype\ntagger "
+ /* Verify it for some basic sanity: it needs to start with
+ "object <sha1>\ntype\ntagger " */
if (verify_tag(buffer, size) < 0)
die("invalid tag signature file");
write_sha1_file(buffer, offset, tree_type, sha1);
}
-static const char mktree_usage[] = "mktree [-z]";
+static const char mktree_usage[] = "git-mktree [-z]";
int main(int ac, char **av)
{
+++ /dev/null
-#include <stdlib.h>
-#include "cache.h"
-#include "commit.h"
-#include "tag.h"
-#include "refs.h"
-
-static const char name_rev_usage[] =
- "git-name-rev [--tags] ( --all | --stdin | commitish [commitish...] )\n";
-
-typedef struct rev_name {
- const char *tip_name;
- int merge_traversals;
- int generation;
-} rev_name;
-
-static long cutoff = LONG_MAX;
-
-static void name_rev(struct commit *commit,
- const char *tip_name, int merge_traversals, int generation,
- int deref)
-{
- struct rev_name *name = (struct rev_name *)commit->util;
- struct commit_list *parents;
- int parent_number = 1;
-
- if (!commit->object.parsed)
- parse_commit(commit);
-
- if (commit->date < cutoff)
- return;
-
- if (deref) {
- char *new_name = xmalloc(strlen(tip_name)+3);
- strcpy(new_name, tip_name);
- strcat(new_name, "^0");
- tip_name = new_name;
-
- if (generation)
- die("generation: %d, but deref?", generation);
- }
-
- if (name == NULL) {
- name = xmalloc(sizeof(rev_name));
- commit->util = name;
- goto copy_data;
- } else if (name->merge_traversals > merge_traversals ||
- (name->merge_traversals == merge_traversals &&
- name->generation > generation)) {
-copy_data:
- name->tip_name = tip_name;
- name->merge_traversals = merge_traversals;
- name->generation = generation;
- } else
- return;
-
- for (parents = commit->parents;
- parents;
- parents = parents->next, parent_number++) {
- if (parent_number > 1) {
- char *new_name = xmalloc(strlen(tip_name)+8);
-
- if (generation > 0)
- sprintf(new_name, "%s~%d^%d", tip_name,
- generation, parent_number);
- else
- sprintf(new_name, "%s^%d", tip_name, parent_number);
-
- name_rev(parents->item, new_name,
- merge_traversals + 1 , 0, 0);
- } else {
- name_rev(parents->item, tip_name, merge_traversals,
- generation + 1, 0);
- }
- }
-}
-
-static int tags_only = 0;
-
-static int name_ref(const char *path, const unsigned char *sha1)
-{
- struct object *o = parse_object(sha1);
- int deref = 0;
-
- if (tags_only && strncmp(path, "refs/tags/", 10))
- return 0;
-
- while (o && o->type == TYPE_TAG) {
- struct tag *t = (struct tag *) o;
- if (!t->tagged)
- break; /* broken repository */
- o = parse_object(t->tagged->sha1);
- deref = 1;
- }
- if (o && o->type == TYPE_COMMIT) {
- struct commit *commit = (struct commit *)o;
-
- if (!strncmp(path, "refs/heads/", 11))
- path = path + 11;
- else if (!strncmp(path, "refs/", 5))
- path = path + 5;
-
- name_rev(commit, strdup(path), 0, 0, deref);
- }
- return 0;
-}
-
-/* returns a static buffer */
-static const char* get_rev_name(struct object *o)
-{
- static char buffer[1024];
- struct rev_name *n;
- struct commit *c;
-
- if (o->type != TYPE_COMMIT)
- return "undefined";
- c = (struct commit *) o;
- n = c->util;
- if (!n)
- return "undefined";
-
- if (!n->generation)
- return n->tip_name;
-
- snprintf(buffer, sizeof(buffer), "%s~%d", n->tip_name, n->generation);
-
- return buffer;
-}
-
-int main(int argc, char **argv)
-{
- struct object_array revs = { 0, 0, NULL };
- int as_is = 0, all = 0, transform_stdin = 0;
-
- setup_git_directory();
- git_config(git_default_config);
-
- if (argc < 2)
- usage(name_rev_usage);
-
- for (--argc, ++argv; argc; --argc, ++argv) {
- unsigned char sha1[20];
- struct object *o;
- struct commit *commit;
-
- if (!as_is && (*argv)[0] == '-') {
- if (!strcmp(*argv, "--")) {
- as_is = 1;
- continue;
- } else if (!strcmp(*argv, "--tags")) {
- tags_only = 1;
- continue;
- } else if (!strcmp(*argv, "--all")) {
- if (argc > 1)
- die("Specify either a list, or --all, not both!");
- all = 1;
- cutoff = 0;
- continue;
- } else if (!strcmp(*argv, "--stdin")) {
- if (argc > 1)
- die("Specify either a list, or --stdin, not both!");
- transform_stdin = 1;
- cutoff = 0;
- continue;
- }
- usage(name_rev_usage);
- }
-
- if (get_sha1(*argv, sha1)) {
- fprintf(stderr, "Could not get sha1 for %s. Skipping.\n",
- *argv);
- continue;
- }
-
- o = deref_tag(parse_object(sha1), *argv, 0);
- if (!o || o->type != TYPE_COMMIT) {
- fprintf(stderr, "Could not get commit for %s. Skipping.\n",
- *argv);
- continue;
- }
-
- commit = (struct commit *)o;
-
- if (cutoff > commit->date)
- cutoff = commit->date;
-
- add_object_array((struct object *)commit, *argv, &revs);
- }
-
- for_each_ref(name_ref);
-
- if (transform_stdin) {
- char buffer[2048];
- char *p, *p_start;
-
- while (!feof(stdin)) {
- int forty = 0;
- p = fgets(buffer, sizeof(buffer), stdin);
- if (!p)
- break;
-
- for (p_start = p; *p; p++) {
-#define ishex(x) (isdigit((x)) || ((x) >= 'a' && (x) <= 'f'))
- if (!ishex(*p))
- forty = 0;
- else if (++forty == 40 &&
- !ishex(*(p+1))) {
- unsigned char sha1[40];
- const char *name = "undefined";
- char c = *(p+1);
-
- forty = 0;
-
- *(p+1) = 0;
- if (!get_sha1(p - 39, sha1)) {
- struct object *o =
- lookup_object(sha1);
- if (o)
- name = get_rev_name(o);
- }
- *(p+1) = c;
-
- if (!strcmp(name, "undefined"))
- continue;
-
- fwrite(p_start, p - p_start + 1, 1,
- stdout);
- printf(" (%s)", name);
- p_start = p + 1;
- }
- }
-
- /* flush */
- if (p_start != p)
- fwrite(p_start, p - p_start, 1, stdout);
- }
- } else if (all) {
- int i;
-
- for (i = 0; i < obj_allocs; i++)
- if (objs[i])
- printf("%s %s\n", sha1_to_hex(objs[i]->sha1),
- get_rev_name(objs[i]));
- } else {
- int i;
- for (i = 0; i < revs.nr; i++)
- printf("%s %s\n",
- revs.objects[i].name,
- get_rev_name(revs.objects[i].item));
- }
-
- return 0;
-}
-
#include "commit.h"
#include "tag.h"
-struct object **objs;
-static int nr_objs;
-int obj_allocs;
+static struct object **obj_hash;
+static int nr_objs, obj_hash_size;
+
+unsigned int get_max_object_index(void)
+{
+ return obj_hash_size;
+}
+
+struct object *get_indexed_object(unsigned int idx)
+{
+ return obj_hash[idx];
+}
const char *type_names[] = {
- "none", "blob", "tree", "commit", "bad"
+ "none", "commit", "tree", "blob", "tag",
+ "bad type 5", "bad type 6", "delta", "bad",
};
+static unsigned int hash_obj(struct object *obj, unsigned int n)
+{
+ unsigned int hash = *(unsigned int *)obj->sha1;
+ return hash % n;
+}
+
+static void insert_obj_hash(struct object *obj, struct object **hash, unsigned int size)
+{
+ int j = hash_obj(obj, size);
+
+ while (hash[j]) {
+ j++;
+ if (j >= size)
+ j = 0;
+ }
+ hash[j] = obj;
+}
+
static int hashtable_index(const unsigned char *sha1)
{
unsigned int i;
memcpy(&i, sha1, sizeof(unsigned int));
- return (int)(i % obj_allocs);
+ return (int)(i % obj_hash_size);
}
-static int find_object(const unsigned char *sha1)
+struct object *lookup_object(const unsigned char *sha1)
{
int i;
+ struct object *obj;
- if (!objs)
- return -1;
+ if (!obj_hash)
+ return NULL;
i = hashtable_index(sha1);
- while (objs[i]) {
- if (memcmp(sha1, objs[i]->sha1, 20) == 0)
- return i;
+ while ((obj = obj_hash[i]) != NULL) {
+ if (!memcmp(sha1, obj->sha1, 20))
+ break;
i++;
- if (i == obj_allocs)
+ if (i == obj_hash_size)
i = 0;
}
- return -1 - i;
+ return obj;
}
-struct object *lookup_object(const unsigned char *sha1)
+static void grow_object_hash(void)
{
- int pos = find_object(sha1);
- if (pos >= 0)
- return objs[pos];
- return NULL;
+ int i;
+ int new_hash_size = obj_hash_size < 32 ? 32 : 2 * obj_hash_size;
+ struct object **new_hash;
+
+ new_hash = calloc(new_hash_size, sizeof(struct object *));
+ for (i = 0; i < obj_hash_size; i++) {
+ struct object *obj = obj_hash[i];
+ if (!obj)
+ continue;
+ insert_obj_hash(obj, new_hash, new_hash_size);
+ }
+ free(obj_hash);
+ obj_hash = new_hash;
+ obj_hash_size = new_hash_size;
}
void created_object(const unsigned char *sha1, struct object *obj)
{
- int pos;
-
obj->parsed = 0;
- memcpy(obj->sha1, sha1, 20);
- obj->type = TYPE_NONE;
obj->used = 0;
+ obj->type = OBJ_NONE;
+ obj->flags = 0;
+ memcpy(obj->sha1, sha1, 20);
- if (obj_allocs - 1 <= nr_objs * 2) {
- int i, count = obj_allocs;
- obj_allocs = (obj_allocs < 32 ? 32 : 2 * obj_allocs);
- objs = xrealloc(objs, obj_allocs * sizeof(struct object *));
- memset(objs + count, 0, (obj_allocs - count)
- * sizeof(struct object *));
- for (i = 0; i < obj_allocs; i++)
- if (objs[i]) {
- int j = find_object(objs[i]->sha1);
- if (j != i) {
- j = -1 - j;
- objs[j] = objs[i];
- objs[i] = NULL;
- }
- }
- }
-
- pos = find_object(sha1);
- if (pos >= 0)
- die("Inserting %s twice\n", sha1_to_hex(sha1));
- pos = -pos-1;
+ if (obj_hash_size - 1 <= nr_objs * 2)
+ grow_object_hash();
- objs[pos] = obj;
+ insert_obj_hash(obj, obj_hash, obj_hash_size);
nr_objs++;
}
if (!obj) {
union any_object *ret = xcalloc(1, sizeof(*ret));
created_object(sha1, &ret->object);
- ret->object.type = TYPE_NONE;
+ ret->object.type = OBJ_NONE;
return &ret->object;
}
return obj;
#define TYPE_BITS 3
#define FLAG_BITS 27
-#define TYPE_NONE 0
-#define TYPE_BLOB 1
-#define TYPE_TREE 2
-#define TYPE_COMMIT 3
-#define TYPE_TAG 4
-#define TYPE_BAD 5
+/*
+ * The object type is stored in 3 bits.
+ */
+enum object_type {
+ OBJ_NONE = 0,
+ OBJ_COMMIT = 1,
+ OBJ_TREE = 2,
+ OBJ_BLOB = 3,
+ OBJ_TAG = 4,
+ /* 5/6 for future expansion */
+ OBJ_DELTA = 7,
+ OBJ_BAD,
+};
struct object {
unsigned parsed : 1;
};
extern int track_object_refs;
-extern int obj_allocs;
-extern struct object **objs;
-extern const char *type_names[];
+extern const char *type_names[9];
+
+extern unsigned int get_max_object_index(void);
+extern struct object *get_indexed_object(unsigned int);
static inline const char *typename(unsigned int type)
{
- return type_names[type > TYPE_TAG ? TYPE_BAD : type];
+ return type_names[type > OBJ_BAD ? OBJ_BAD : type];
}
extern struct object_refs *lookup_object_refs(struct object *);
+++ /dev/null
-#include "cache.h"
-#include "object.h"
-#include "blob.h"
-#include "commit.h"
-#include "tag.h"
-#include "tree.h"
-#include "delta.h"
-#include "pack.h"
-#include "csum-file.h"
-#include "tree-walk.h"
-#include <sys/time.h>
-#include <signal.h>
-
-static const char pack_usage[] = "git-pack-objects [-q] [--no-reuse-delta] [--non-empty] [--local] [--incremental] [--window=N] [--depth=N] {--stdout | base-name} < object-list";
-
-struct object_entry {
- unsigned char sha1[20];
- unsigned long size; /* uncompressed size */
- unsigned long offset; /* offset into the final pack file;
- * nonzero if already written.
- */
- unsigned int depth; /* delta depth */
- unsigned int delta_limit; /* base adjustment for in-pack delta */
- unsigned int hash; /* name hint hash */
- enum object_type type;
- enum object_type in_pack_type; /* could be delta */
- unsigned long delta_size; /* delta data size (uncompressed) */
- struct object_entry *delta; /* delta base object */
- struct packed_git *in_pack; /* already in pack */
- unsigned int in_pack_offset;
- struct object_entry *delta_child; /* delitified objects who bases me */
- struct object_entry *delta_sibling; /* other deltified objects who
- * uses the same base as me
- */
- int preferred_base; /* we do not pack this, but is encouraged to
- * be used as the base objectto delta huge
- * objects against.
- */
-};
-
-/*
- * Objects we are going to pack are colected in objects array (dynamically
- * expanded). nr_objects & nr_alloc controls this array. They are stored
- * in the order we see -- typically rev-list --objects order that gives us
- * nice "minimum seek" order.
- *
- * sorted-by-sha ans sorted-by-type are arrays of pointers that point at
- * elements in the objects array. The former is used to build the pack
- * index (lists object names in the ascending order to help offset lookup),
- * and the latter is used to group similar things together by try_delta()
- * heuristics.
- */
-
-static unsigned char object_list_sha1[20];
-static int non_empty = 0;
-static int no_reuse_delta = 0;
-static int local = 0;
-static int incremental = 0;
-static struct object_entry **sorted_by_sha, **sorted_by_type;
-static struct object_entry *objects = NULL;
-static int nr_objects = 0, nr_alloc = 0, nr_result = 0;
-static const char *base_name;
-static unsigned char pack_file_sha1[20];
-static int progress = 1;
-static volatile sig_atomic_t progress_update = 0;
-
-/*
- * The object names in objects array are hashed with this hashtable,
- * to help looking up the entry by object name. Binary search from
- * sorted_by_sha is also possible but this was easier to code and faster.
- * This hashtable is built after all the objects are seen.
- */
-static int *object_ix = NULL;
-static int object_ix_hashsz = 0;
-
-/*
- * Pack index for existing packs give us easy access to the offsets into
- * corresponding pack file where each object's data starts, but the entries
- * do not store the size of the compressed representation (uncompressed
- * size is easily available by examining the pack entry header). We build
- * a hashtable of existing packs (pack_revindex), and keep reverse index
- * here -- pack index file is sorted by object name mapping to offset; this
- * pack_revindex[].revindex array is an ordered list of offsets, so if you
- * know the offset of an object, next offset is where its packed
- * representation ends.
- */
-struct pack_revindex {
- struct packed_git *p;
- unsigned long *revindex;
-} *pack_revindex = NULL;
-static int pack_revindex_hashsz = 0;
-
-/*
- * stats
- */
-static int written = 0;
-static int written_delta = 0;
-static int reused = 0;
-static int reused_delta = 0;
-
-static int pack_revindex_ix(struct packed_git *p)
-{
- unsigned long ui = (unsigned long)p;
- int i;
-
- ui = ui ^ (ui >> 16); /* defeat structure alignment */
- i = (int)(ui % pack_revindex_hashsz);
- while (pack_revindex[i].p) {
- if (pack_revindex[i].p == p)
- return i;
- if (++i == pack_revindex_hashsz)
- i = 0;
- }
- return -1 - i;
-}
-
-static void prepare_pack_ix(void)
-{
- int num;
- struct packed_git *p;
- for (num = 0, p = packed_git; p; p = p->next)
- num++;
- if (!num)
- return;
- pack_revindex_hashsz = num * 11;
- pack_revindex = xcalloc(sizeof(*pack_revindex), pack_revindex_hashsz);
- for (p = packed_git; p; p = p->next) {
- num = pack_revindex_ix(p);
- num = - 1 - num;
- pack_revindex[num].p = p;
- }
- /* revindex elements are lazily initialized */
-}
-
-static int cmp_offset(const void *a_, const void *b_)
-{
- unsigned long a = *(unsigned long *) a_;
- unsigned long b = *(unsigned long *) b_;
- if (a < b)
- return -1;
- else if (a == b)
- return 0;
- else
- return 1;
-}
-
-/*
- * Ordered list of offsets of objects in the pack.
- */
-static void prepare_pack_revindex(struct pack_revindex *rix)
-{
- struct packed_git *p = rix->p;
- int num_ent = num_packed_objects(p);
- int i;
- void *index = p->index_base + 256;
-
- rix->revindex = xmalloc(sizeof(unsigned long) * (num_ent + 1));
- for (i = 0; i < num_ent; i++) {
- unsigned int hl = *((unsigned int *)((char *) index + 24*i));
- rix->revindex[i] = ntohl(hl);
- }
- /* This knows the pack format -- the 20-byte trailer
- * follows immediately after the last object data.
- */
- rix->revindex[num_ent] = p->pack_size - 20;
- qsort(rix->revindex, num_ent, sizeof(unsigned long), cmp_offset);
-}
-
-static unsigned long find_packed_object_size(struct packed_git *p,
- unsigned long ofs)
-{
- int num;
- int lo, hi;
- struct pack_revindex *rix;
- unsigned long *revindex;
- num = pack_revindex_ix(p);
- if (num < 0)
- die("internal error: pack revindex uninitialized");
- rix = &pack_revindex[num];
- if (!rix->revindex)
- prepare_pack_revindex(rix);
- revindex = rix->revindex;
- lo = 0;
- hi = num_packed_objects(p) + 1;
- do {
- int mi = (lo + hi) / 2;
- if (revindex[mi] == ofs) {
- return revindex[mi+1] - ofs;
- }
- else if (ofs < revindex[mi])
- hi = mi;
- else
- lo = mi + 1;
- } while (lo < hi);
- die("internal error: pack revindex corrupt");
-}
-
-static void *delta_against(void *buf, unsigned long size, struct object_entry *entry)
-{
- unsigned long othersize, delta_size;
- char type[10];
- void *otherbuf = read_sha1_file(entry->delta->sha1, type, &othersize);
- void *delta_buf;
-
- if (!otherbuf)
- die("unable to read %s", sha1_to_hex(entry->delta->sha1));
- delta_buf = diff_delta(otherbuf, othersize,
- buf, size, &delta_size, 0);
- if (!delta_buf || delta_size != entry->delta_size)
- die("delta size changed");
- free(buf);
- free(otherbuf);
- return delta_buf;
-}
-
-/*
- * The per-object header is a pretty dense thing, which is
- * - first byte: low four bits are "size", then three bits of "type",
- * and the high bit is "size continues".
- * - each byte afterwards: low seven bits are size continuation,
- * with the high bit being "size continues"
- */
-static int encode_header(enum object_type type, unsigned long size, unsigned char *hdr)
-{
- int n = 1;
- unsigned char c;
-
- if (type < OBJ_COMMIT || type > OBJ_DELTA)
- die("bad type %d", type);
-
- c = (type << 4) | (size & 15);
- size >>= 4;
- while (size) {
- *hdr++ = c | 0x80;
- c = size & 0x7f;
- size >>= 7;
- n++;
- }
- *hdr = c;
- return n;
-}
-
-static unsigned long write_object(struct sha1file *f,
- struct object_entry *entry)
-{
- unsigned long size;
- char type[10];
- void *buf;
- unsigned char header[10];
- unsigned hdrlen, datalen;
- enum object_type obj_type;
- int to_reuse = 0;
-
- if (entry->preferred_base)
- return 0;
-
- obj_type = entry->type;
- if (! entry->in_pack)
- to_reuse = 0; /* can't reuse what we don't have */
- else if (obj_type == OBJ_DELTA)
- to_reuse = 1; /* check_object() decided it for us */
- else if (obj_type != entry->in_pack_type)
- to_reuse = 0; /* pack has delta which is unusable */
- else if (entry->delta)
- to_reuse = 0; /* we want to pack afresh */
- else
- to_reuse = 1; /* we have it in-pack undeltified,
- * and we do not need to deltify it.
- */
-
- if (! to_reuse) {
- buf = read_sha1_file(entry->sha1, type, &size);
- if (!buf)
- die("unable to read %s", sha1_to_hex(entry->sha1));
- if (size != entry->size)
- die("object %s size inconsistency (%lu vs %lu)",
- sha1_to_hex(entry->sha1), size, entry->size);
- if (entry->delta) {
- buf = delta_against(buf, size, entry);
- size = entry->delta_size;
- obj_type = OBJ_DELTA;
- }
- /*
- * The object header is a byte of 'type' followed by zero or
- * more bytes of length. For deltas, the 20 bytes of delta
- * sha1 follows that.
- */
- hdrlen = encode_header(obj_type, size, header);
- sha1write(f, header, hdrlen);
-
- if (entry->delta) {
- sha1write(f, entry->delta, 20);
- hdrlen += 20;
- }
- datalen = sha1write_compressed(f, buf, size);
- free(buf);
- }
- else {
- struct packed_git *p = entry->in_pack;
- use_packed_git(p);
-
- datalen = find_packed_object_size(p, entry->in_pack_offset);
- buf = (char *) p->pack_base + entry->in_pack_offset;
- sha1write(f, buf, datalen);
- unuse_packed_git(p);
- hdrlen = 0; /* not really */
- if (obj_type == OBJ_DELTA)
- reused_delta++;
- reused++;
- }
- if (obj_type == OBJ_DELTA)
- written_delta++;
- written++;
- return hdrlen + datalen;
-}
-
-static unsigned long write_one(struct sha1file *f,
- struct object_entry *e,
- unsigned long offset)
-{
- if (e->offset)
- /* offset starts from header size and cannot be zero
- * if it is written already.
- */
- return offset;
- e->offset = offset;
- offset += write_object(f, e);
- /* if we are deltified, write out its base object. */
- if (e->delta)
- offset = write_one(f, e->delta, offset);
- return offset;
-}
-
-static void write_pack_file(void)
-{
- int i;
- struct sha1file *f;
- unsigned long offset;
- struct pack_header hdr;
- unsigned last_percent = 999;
- int do_progress = 0;
-
- if (!base_name)
- f = sha1fd(1, "<stdout>");
- else {
- f = sha1create("%s-%s.%s", base_name,
- sha1_to_hex(object_list_sha1), "pack");
- do_progress = progress;
- }
- if (do_progress)
- fprintf(stderr, "Writing %d objects.\n", nr_result);
-
- hdr.hdr_signature = htonl(PACK_SIGNATURE);
- hdr.hdr_version = htonl(PACK_VERSION);
- hdr.hdr_entries = htonl(nr_result);
- sha1write(f, &hdr, sizeof(hdr));
- offset = sizeof(hdr);
- if (!nr_result)
- goto done;
- for (i = 0; i < nr_objects; i++) {
- offset = write_one(f, objects + i, offset);
- if (do_progress) {
- unsigned percent = written * 100 / nr_result;
- if (progress_update || percent != last_percent) {
- fprintf(stderr, "%4u%% (%u/%u) done\r",
- percent, written, nr_result);
- progress_update = 0;
- last_percent = percent;
- }
- }
- }
- if (do_progress)
- fputc('\n', stderr);
- done:
- sha1close(f, pack_file_sha1, 1);
-}
-
-static void write_index_file(void)
-{
- int i;
- struct sha1file *f = sha1create("%s-%s.%s", base_name,
- sha1_to_hex(object_list_sha1), "idx");
- struct object_entry **list = sorted_by_sha;
- struct object_entry **last = list + nr_result;
- unsigned int array[256];
-
- /*
- * Write the first-level table (the list is sorted,
- * but we use a 256-entry lookup to be able to avoid
- * having to do eight extra binary search iterations).
- */
- for (i = 0; i < 256; i++) {
- struct object_entry **next = list;
- while (next < last) {
- struct object_entry *entry = *next;
- if (entry->sha1[0] != i)
- break;
- next++;
- }
- array[i] = htonl(next - sorted_by_sha);
- list = next;
- }
- sha1write(f, array, 256 * sizeof(int));
-
- /*
- * Write the actual SHA1 entries..
- */
- list = sorted_by_sha;
- for (i = 0; i < nr_result; i++) {
- struct object_entry *entry = *list++;
- unsigned int offset = htonl(entry->offset);
- sha1write(f, &offset, 4);
- sha1write(f, entry->sha1, 20);
- }
- sha1write(f, pack_file_sha1, 20);
- sha1close(f, NULL, 1);
-}
-
-static int locate_object_entry_hash(const unsigned char *sha1)
-{
- int i;
- unsigned int ui;
- memcpy(&ui, sha1, sizeof(unsigned int));
- i = ui % object_ix_hashsz;
- while (0 < object_ix[i]) {
- if (!memcmp(sha1, objects[object_ix[i]-1].sha1, 20))
- return i;
- if (++i == object_ix_hashsz)
- i = 0;
- }
- return -1 - i;
-}
-
-static struct object_entry *locate_object_entry(const unsigned char *sha1)
-{
- int i;
-
- if (!object_ix_hashsz)
- return NULL;
-
- i = locate_object_entry_hash(sha1);
- if (0 <= i)
- return &objects[object_ix[i]-1];
- return NULL;
-}
-
-static void rehash_objects(void)
-{
- int i;
- struct object_entry *oe;
-
- object_ix_hashsz = nr_objects * 3;
- if (object_ix_hashsz < 1024)
- object_ix_hashsz = 1024;
- object_ix = xrealloc(object_ix, sizeof(int) * object_ix_hashsz);
- memset(object_ix, 0, sizeof(int) * object_ix_hashsz);
- for (i = 0, oe = objects; i < nr_objects; i++, oe++) {
- int ix = locate_object_entry_hash(oe->sha1);
- if (0 <= ix)
- continue;
- ix = -1 - ix;
- object_ix[ix] = i + 1;
- }
-}
-
-static unsigned name_hash(const char *name)
-{
- unsigned char c;
- unsigned hash = 0;
-
- /*
- * This effectively just creates a sortable number from the
- * last sixteen non-whitespace characters. Last characters
- * count "most", so things that end in ".c" sort together.
- */
- while ((c = *name++) != 0) {
- if (isspace(c))
- continue;
- hash = (hash >> 2) + (c << 24);
- }
- return hash;
-}
-
-static int add_object_entry(const unsigned char *sha1, unsigned hash, int exclude)
-{
- unsigned int idx = nr_objects;
- struct object_entry *entry;
- struct packed_git *p;
- unsigned int found_offset = 0;
- struct packed_git *found_pack = NULL;
- int ix, status = 0;
-
- if (!exclude) {
- for (p = packed_git; p; p = p->next) {
- struct pack_entry e;
- if (find_pack_entry_one(sha1, &e, p)) {
- if (incremental)
- return 0;
- if (local && !p->pack_local)
- return 0;
- if (!found_pack) {
- found_offset = e.offset;
- found_pack = e.p;
- }
- }
- }
- }
- if ((entry = locate_object_entry(sha1)) != NULL)
- goto already_added;
-
- if (idx >= nr_alloc) {
- unsigned int needed = (idx + 1024) * 3 / 2;
- objects = xrealloc(objects, needed * sizeof(*entry));
- nr_alloc = needed;
- }
- entry = objects + idx;
- nr_objects = idx + 1;
- memset(entry, 0, sizeof(*entry));
- memcpy(entry->sha1, sha1, 20);
- entry->hash = hash;
-
- if (object_ix_hashsz * 3 <= nr_objects * 4)
- rehash_objects();
- else {
- ix = locate_object_entry_hash(entry->sha1);
- if (0 <= ix)
- die("internal error in object hashing.");
- object_ix[-1 - ix] = idx + 1;
- }
- status = 1;
-
- already_added:
- if (progress_update) {
- fprintf(stderr, "Counting objects...%d\r", nr_objects);
- progress_update = 0;
- }
- if (exclude)
- entry->preferred_base = 1;
- else {
- if (found_pack) {
- entry->in_pack = found_pack;
- entry->in_pack_offset = found_offset;
- }
- }
- return status;
-}
-
-struct pbase_tree_cache {
- unsigned char sha1[20];
- int ref;
- int temporary;
- void *tree_data;
- unsigned long tree_size;
-};
-
-static struct pbase_tree_cache *(pbase_tree_cache[256]);
-static int pbase_tree_cache_ix(const unsigned char *sha1)
-{
- return sha1[0] % ARRAY_SIZE(pbase_tree_cache);
-}
-static int pbase_tree_cache_ix_incr(int ix)
-{
- return (ix+1) % ARRAY_SIZE(pbase_tree_cache);
-}
-
-static struct pbase_tree {
- struct pbase_tree *next;
- /* This is a phony "cache" entry; we are not
- * going to evict it nor find it through _get()
- * mechanism -- this is for the toplevel node that
- * would almost always change with any commit.
- */
- struct pbase_tree_cache pcache;
-} *pbase_tree;
-
-static struct pbase_tree_cache *pbase_tree_get(const unsigned char *sha1)
-{
- struct pbase_tree_cache *ent, *nent;
- void *data;
- unsigned long size;
- char type[20];
- int neigh;
- int my_ix = pbase_tree_cache_ix(sha1);
- int available_ix = -1;
-
- /* pbase-tree-cache acts as a limited hashtable.
- * your object will be found at your index or within a few
- * slots after that slot if it is cached.
- */
- for (neigh = 0; neigh < 8; neigh++) {
- ent = pbase_tree_cache[my_ix];
- if (ent && !memcmp(ent->sha1, sha1, 20)) {
- ent->ref++;
- return ent;
- }
- else if (((available_ix < 0) && (!ent || !ent->ref)) ||
- ((0 <= available_ix) &&
- (!ent && pbase_tree_cache[available_ix])))
- available_ix = my_ix;
- if (!ent)
- break;
- my_ix = pbase_tree_cache_ix_incr(my_ix);
- }
-
- /* Did not find one. Either we got a bogus request or
- * we need to read and perhaps cache.
- */
- data = read_sha1_file(sha1, type, &size);
- if (!data)
- return NULL;
- if (strcmp(type, tree_type)) {
- free(data);
- return NULL;
- }
-
- /* We need to either cache or return a throwaway copy */
-
- if (available_ix < 0)
- ent = NULL;
- else {
- ent = pbase_tree_cache[available_ix];
- my_ix = available_ix;
- }
-
- if (!ent) {
- nent = xmalloc(sizeof(*nent));
- nent->temporary = (available_ix < 0);
- }
- else {
- /* evict and reuse */
- free(ent->tree_data);
- nent = ent;
- }
- memcpy(nent->sha1, sha1, 20);
- nent->tree_data = data;
- nent->tree_size = size;
- nent->ref = 1;
- if (!nent->temporary)
- pbase_tree_cache[my_ix] = nent;
- return nent;
-}
-
-static void pbase_tree_put(struct pbase_tree_cache *cache)
-{
- if (!cache->temporary) {
- cache->ref--;
- return;
- }
- free(cache->tree_data);
- free(cache);
-}
-
-static int name_cmp_len(const char *name)
-{
- int i;
- for (i = 0; name[i] && name[i] != '\n' && name[i] != '/'; i++)
- ;
- return i;
-}
-
-static void add_pbase_object(struct tree_desc *tree,
- const char *name,
- int cmplen,
- const char *fullname)
-{
- struct name_entry entry;
-
- while (tree_entry(tree,&entry)) {
- unsigned long size;
- char type[20];
-
- if (entry.pathlen != cmplen ||
- memcmp(entry.path, name, cmplen) ||
- !has_sha1_file(entry.sha1) ||
- sha1_object_info(entry.sha1, type, &size))
- continue;
- if (name[cmplen] != '/') {
- unsigned hash = name_hash(fullname);
- add_object_entry(entry.sha1, hash, 1);
- return;
- }
- if (!strcmp(type, tree_type)) {
- struct tree_desc sub;
- struct pbase_tree_cache *tree;
- const char *down = name+cmplen+1;
- int downlen = name_cmp_len(down);
-
- tree = pbase_tree_get(entry.sha1);
- if (!tree)
- return;
- sub.buf = tree->tree_data;
- sub.size = tree->tree_size;
-
- add_pbase_object(&sub, down, downlen, fullname);
- pbase_tree_put(tree);
- }
- }
-}
-
-static unsigned *done_pbase_paths;
-static int done_pbase_paths_num;
-static int done_pbase_paths_alloc;
-static int done_pbase_path_pos(unsigned hash)
-{
- int lo = 0;
- int hi = done_pbase_paths_num;
- while (lo < hi) {
- int mi = (hi + lo) / 2;
- if (done_pbase_paths[mi] == hash)
- return mi;
- if (done_pbase_paths[mi] < hash)
- hi = mi;
- else
- lo = mi + 1;
- }
- return -lo-1;
-}
-
-static int check_pbase_path(unsigned hash)
-{
- int pos = (!done_pbase_paths) ? -1 : done_pbase_path_pos(hash);
- if (0 <= pos)
- return 1;
- pos = -pos - 1;
- if (done_pbase_paths_alloc <= done_pbase_paths_num) {
- done_pbase_paths_alloc = alloc_nr(done_pbase_paths_alloc);
- done_pbase_paths = xrealloc(done_pbase_paths,
- done_pbase_paths_alloc *
- sizeof(unsigned));
- }
- done_pbase_paths_num++;
- if (pos < done_pbase_paths_num)
- memmove(done_pbase_paths + pos + 1,
- done_pbase_paths + pos,
- (done_pbase_paths_num - pos - 1) * sizeof(unsigned));
- done_pbase_paths[pos] = hash;
- return 0;
-}
-
-static void add_preferred_base_object(char *name, unsigned hash)
-{
- struct pbase_tree *it;
- int cmplen = name_cmp_len(name);
-
- if (check_pbase_path(hash))
- return;
-
- for (it = pbase_tree; it; it = it->next) {
- if (cmplen == 0) {
- hash = name_hash("");
- add_object_entry(it->pcache.sha1, hash, 1);
- }
- else {
- struct tree_desc tree;
- tree.buf = it->pcache.tree_data;
- tree.size = it->pcache.tree_size;
- add_pbase_object(&tree, name, cmplen, name);
- }
- }
-}
-
-static void add_preferred_base(unsigned char *sha1)
-{
- struct pbase_tree *it;
- void *data;
- unsigned long size;
- unsigned char tree_sha1[20];
-
- data = read_object_with_reference(sha1, tree_type, &size, tree_sha1);
- if (!data)
- return;
-
- for (it = pbase_tree; it; it = it->next) {
- if (!memcmp(it->pcache.sha1, tree_sha1, 20)) {
- free(data);
- return;
- }
- }
-
- it = xcalloc(1, sizeof(*it));
- it->next = pbase_tree;
- pbase_tree = it;
-
- memcpy(it->pcache.sha1, tree_sha1, 20);
- it->pcache.tree_data = data;
- it->pcache.tree_size = size;
-}
-
-static void check_object(struct object_entry *entry)
-{
- char type[20];
-
- if (entry->in_pack && !entry->preferred_base) {
- unsigned char base[20];
- unsigned long size;
- struct object_entry *base_entry;
-
- /* We want in_pack_type even if we do not reuse delta.
- * There is no point not reusing non-delta representations.
- */
- check_reuse_pack_delta(entry->in_pack,
- entry->in_pack_offset,
- base, &size,
- &entry->in_pack_type);
-
- /* Check if it is delta, and the base is also an object
- * we are going to pack. If so we will reuse the existing
- * delta.
- */
- if (!no_reuse_delta &&
- entry->in_pack_type == OBJ_DELTA &&
- (base_entry = locate_object_entry(base)) &&
- (!base_entry->preferred_base)) {
-
- /* Depth value does not matter - find_deltas()
- * will never consider reused delta as the
- * base object to deltify other objects
- * against, in order to avoid circular deltas.
- */
-
- /* uncompressed size of the delta data */
- entry->size = entry->delta_size = size;
- entry->delta = base_entry;
- entry->type = OBJ_DELTA;
-
- entry->delta_sibling = base_entry->delta_child;
- base_entry->delta_child = entry;
-
- return;
- }
- /* Otherwise we would do the usual */
- }
-
- if (sha1_object_info(entry->sha1, type, &entry->size))
- die("unable to get type of object %s",
- sha1_to_hex(entry->sha1));
-
- if (!strcmp(type, commit_type)) {
- entry->type = OBJ_COMMIT;
- } else if (!strcmp(type, tree_type)) {
- entry->type = OBJ_TREE;
- } else if (!strcmp(type, blob_type)) {
- entry->type = OBJ_BLOB;
- } else if (!strcmp(type, tag_type)) {
- entry->type = OBJ_TAG;
- } else
- die("unable to pack object %s of type %s",
- sha1_to_hex(entry->sha1), type);
-}
-
-static unsigned int check_delta_limit(struct object_entry *me, unsigned int n)
-{
- struct object_entry *child = me->delta_child;
- unsigned int m = n;
- while (child) {
- unsigned int c = check_delta_limit(child, n + 1);
- if (m < c)
- m = c;
- child = child->delta_sibling;
- }
- return m;
-}
-
-static void get_object_details(void)
-{
- int i;
- struct object_entry *entry;
-
- prepare_pack_ix();
- for (i = 0, entry = objects; i < nr_objects; i++, entry++)
- check_object(entry);
-
- if (nr_objects == nr_result) {
- /*
- * Depth of objects that depend on the entry -- this
- * is subtracted from depth-max to break too deep
- * delta chain because of delta data reusing.
- * However, we loosen this restriction when we know we
- * are creating a thin pack -- it will have to be
- * expanded on the other end anyway, so do not
- * artificially cut the delta chain and let it go as
- * deep as it wants.
- */
- for (i = 0, entry = objects; i < nr_objects; i++, entry++)
- if (!entry->delta && entry->delta_child)
- entry->delta_limit =
- check_delta_limit(entry, 1);
- }
-}
-
-typedef int (*entry_sort_t)(const struct object_entry *, const struct object_entry *);
-
-static entry_sort_t current_sort;
-
-static int sort_comparator(const void *_a, const void *_b)
-{
- struct object_entry *a = *(struct object_entry **)_a;
- struct object_entry *b = *(struct object_entry **)_b;
- return current_sort(a,b);
-}
-
-static struct object_entry **create_sorted_list(entry_sort_t sort)
-{
- struct object_entry **list = xmalloc(nr_objects * sizeof(struct object_entry *));
- int i;
-
- for (i = 0; i < nr_objects; i++)
- list[i] = objects + i;
- current_sort = sort;
- qsort(list, nr_objects, sizeof(struct object_entry *), sort_comparator);
- return list;
-}
-
-static int sha1_sort(const struct object_entry *a, const struct object_entry *b)
-{
- return memcmp(a->sha1, b->sha1, 20);
-}
-
-static struct object_entry **create_final_object_list(void)
-{
- struct object_entry **list;
- int i, j;
-
- for (i = nr_result = 0; i < nr_objects; i++)
- if (!objects[i].preferred_base)
- nr_result++;
- list = xmalloc(nr_result * sizeof(struct object_entry *));
- for (i = j = 0; i < nr_objects; i++) {
- if (!objects[i].preferred_base)
- list[j++] = objects + i;
- }
- current_sort = sha1_sort;
- qsort(list, nr_result, sizeof(struct object_entry *), sort_comparator);
- return list;
-}
-
-static int type_size_sort(const struct object_entry *a, const struct object_entry *b)
-{
- if (a->type < b->type)
- return -1;
- if (a->type > b->type)
- return 1;
- if (a->hash < b->hash)
- return -1;
- if (a->hash > b->hash)
- return 1;
- if (a->preferred_base < b->preferred_base)
- return -1;
- if (a->preferred_base > b->preferred_base)
- return 1;
- if (a->size < b->size)
- return -1;
- if (a->size > b->size)
- return 1;
- return a < b ? -1 : (a > b);
-}
-
-struct unpacked {
- struct object_entry *entry;
- void *data;
- struct delta_index *index;
-};
-
-/*
- * We search for deltas _backwards_ in a list sorted by type and
- * by size, so that we see progressively smaller and smaller files.
- * That's because we prefer deltas to be from the bigger file
- * to the smaller - deletes are potentially cheaper, but perhaps
- * more importantly, the bigger file is likely the more recent
- * one.
- */
-static int try_delta(struct unpacked *trg, struct unpacked *src,
- struct delta_index *src_index, unsigned max_depth)
-{
- struct object_entry *trg_entry = trg->entry;
- struct object_entry *src_entry = src->entry;
- unsigned long size, src_size, delta_size, sizediff, max_size;
- void *delta_buf;
-
- /* Don't bother doing diffs between different types */
- if (trg_entry->type != src_entry->type)
- return -1;
-
- /* We do not compute delta to *create* objects we are not
- * going to pack.
- */
- if (trg_entry->preferred_base)
- return -1;
-
- /*
- * If the current object is at pack edge, take the depth the
- * objects that depend on the current object into account --
- * otherwise they would become too deep.
- */
- if (trg_entry->delta_child) {
- if (max_depth <= trg_entry->delta_limit)
- return 0;
- max_depth -= trg_entry->delta_limit;
- }
- if (src_entry->depth >= max_depth)
- return 0;
-
- /* Now some size filtering heuristics. */
- size = trg_entry->size;
- max_size = size/2 - 20;
- max_size = max_size * (max_depth - src_entry->depth) / max_depth;
- if (max_size == 0)
- return 0;
- if (trg_entry->delta && trg_entry->delta_size <= max_size)
- max_size = trg_entry->delta_size-1;
- src_size = src_entry->size;
- sizediff = src_size < size ? size - src_size : 0;
- if (sizediff >= max_size)
- return 0;
-
- delta_buf = create_delta(src_index, trg->data, size, &delta_size, max_size);
- if (!delta_buf)
- return 0;
-
- trg_entry->delta = src_entry;
- trg_entry->delta_size = delta_size;
- trg_entry->depth = src_entry->depth + 1;
- free(delta_buf);
- return 1;
-}
-
-static void progress_interval(int signum)
-{
- progress_update = 1;
-}
-
-static void find_deltas(struct object_entry **list, int window, int depth)
-{
- int i, idx;
- unsigned int array_size = window * sizeof(struct unpacked);
- struct unpacked *array = xmalloc(array_size);
- unsigned processed = 0;
- unsigned last_percent = 999;
-
- memset(array, 0, array_size);
- i = nr_objects;
- idx = 0;
- if (progress)
- fprintf(stderr, "Deltifying %d objects.\n", nr_result);
-
- while (--i >= 0) {
- struct object_entry *entry = list[i];
- struct unpacked *n = array + idx;
- unsigned long size;
- char type[10];
- int j;
-
- if (!entry->preferred_base)
- processed++;
-
- if (progress) {
- unsigned percent = processed * 100 / nr_result;
- if (percent != last_percent || progress_update) {
- fprintf(stderr, "%4u%% (%u/%u) done\r",
- percent, processed, nr_result);
- progress_update = 0;
- last_percent = percent;
- }
- }
-
- if (entry->delta)
- /* This happens if we decided to reuse existing
- * delta from a pack. "!no_reuse_delta &&" is implied.
- */
- continue;
-
- if (entry->size < 50)
- continue;
- free_delta_index(n->index);
- n->index = NULL;
- free(n->data);
- n->entry = entry;
- n->data = read_sha1_file(entry->sha1, type, &size);
- if (size != entry->size)
- die("object %s inconsistent object length (%lu vs %lu)",
- sha1_to_hex(entry->sha1), size, entry->size);
-
- j = window;
- while (--j > 0) {
- unsigned int other_idx = idx + j;
- struct unpacked *m;
- if (other_idx >= window)
- other_idx -= window;
- m = array + other_idx;
- if (!m->entry)
- break;
- if (try_delta(n, m, m->index, depth) < 0)
- break;
- }
- /* if we made n a delta, and if n is already at max
- * depth, leaving it in the window is pointless. we
- * should evict it first.
- */
- if (entry->delta && depth <= entry->depth)
- continue;
-
- n->index = create_delta_index(n->data, size);
- if (!n->index)
- die("out of memory");
-
- idx++;
- if (idx >= window)
- idx = 0;
- }
-
- if (progress)
- fputc('\n', stderr);
-
- for (i = 0; i < window; ++i) {
- free_delta_index(array[i].index);
- free(array[i].data);
- }
- free(array);
-}
-
-static void prepare_pack(int window, int depth)
-{
- get_object_details();
- sorted_by_type = create_sorted_list(type_size_sort);
- if (window && depth)
- find_deltas(sorted_by_type, window+1, depth);
-}
-
-static int reuse_cached_pack(unsigned char *sha1, int pack_to_stdout)
-{
- static const char cache[] = "pack-cache/pack-%s.%s";
- char *cached_pack, *cached_idx;
- int ifd, ofd, ifd_ix = -1;
-
- cached_pack = git_path(cache, sha1_to_hex(sha1), "pack");
- ifd = open(cached_pack, O_RDONLY);
- if (ifd < 0)
- return 0;
-
- if (!pack_to_stdout) {
- cached_idx = git_path(cache, sha1_to_hex(sha1), "idx");
- ifd_ix = open(cached_idx, O_RDONLY);
- if (ifd_ix < 0) {
- close(ifd);
- return 0;
- }
- }
-
- if (progress)
- fprintf(stderr, "Reusing %d objects pack %s\n", nr_objects,
- sha1_to_hex(sha1));
-
- if (pack_to_stdout) {
- if (copy_fd(ifd, 1))
- exit(1);
- close(ifd);
- }
- else {
- char name[PATH_MAX];
- snprintf(name, sizeof(name),
- "%s-%s.%s", base_name, sha1_to_hex(sha1), "pack");
- ofd = open(name, O_CREAT | O_EXCL | O_WRONLY, 0666);
- if (ofd < 0)
- die("unable to open %s (%s)", name, strerror(errno));
- if (copy_fd(ifd, ofd))
- exit(1);
- close(ifd);
-
- snprintf(name, sizeof(name),
- "%s-%s.%s", base_name, sha1_to_hex(sha1), "idx");
- ofd = open(name, O_CREAT | O_EXCL | O_WRONLY, 0666);
- if (ofd < 0)
- die("unable to open %s (%s)", name, strerror(errno));
- if (copy_fd(ifd_ix, ofd))
- exit(1);
- close(ifd_ix);
- puts(sha1_to_hex(sha1));
- }
-
- return 1;
-}
-
-static void setup_progress_signal(void)
-{
- struct sigaction sa;
- struct itimerval v;
-
- memset(&sa, 0, sizeof(sa));
- sa.sa_handler = progress_interval;
- sigemptyset(&sa.sa_mask);
- sa.sa_flags = SA_RESTART;
- sigaction(SIGALRM, &sa, NULL);
-
- v.it_interval.tv_sec = 1;
- v.it_interval.tv_usec = 0;
- v.it_value = v.it_interval;
- setitimer(ITIMER_REAL, &v, NULL);
-}
-
-int main(int argc, char **argv)
-{
- SHA_CTX ctx;
- char line[40 + 1 + PATH_MAX + 2];
- int window = 10, depth = 10, pack_to_stdout = 0;
- struct object_entry **list;
- int num_preferred_base = 0;
- int i;
-
- setup_git_directory();
-
- progress = isatty(2);
- for (i = 1; i < argc; i++) {
- const char *arg = argv[i];
-
- if (*arg == '-') {
- if (!strcmp("--non-empty", arg)) {
- non_empty = 1;
- continue;
- }
- if (!strcmp("--local", arg)) {
- local = 1;
- continue;
- }
- if (!strcmp("--progress", arg)) {
- progress = 1;
- continue;
- }
- if (!strcmp("--incremental", arg)) {
- incremental = 1;
- continue;
- }
- if (!strncmp("--window=", arg, 9)) {
- char *end;
- window = strtoul(arg+9, &end, 0);
- if (!arg[9] || *end)
- usage(pack_usage);
- continue;
- }
- if (!strncmp("--depth=", arg, 8)) {
- char *end;
- depth = strtoul(arg+8, &end, 0);
- if (!arg[8] || *end)
- usage(pack_usage);
- continue;
- }
- if (!strcmp("--progress", arg)) {
- progress = 1;
- continue;
- }
- if (!strcmp("-q", arg)) {
- progress = 0;
- continue;
- }
- if (!strcmp("--no-reuse-delta", arg)) {
- no_reuse_delta = 1;
- continue;
- }
- if (!strcmp("--stdout", arg)) {
- pack_to_stdout = 1;
- continue;
- }
- usage(pack_usage);
- }
- if (base_name)
- usage(pack_usage);
- base_name = arg;
- }
-
- if (pack_to_stdout != !base_name)
- usage(pack_usage);
-
- prepare_packed_git();
-
- if (progress) {
- fprintf(stderr, "Generating pack...\n");
- setup_progress_signal();
- }
-
- for (;;) {
- unsigned char sha1[20];
- unsigned hash;
-
- if (!fgets(line, sizeof(line), stdin)) {
- if (feof(stdin))
- break;
- if (!ferror(stdin))
- die("fgets returned NULL, not EOF, not error!");
- if (errno != EINTR)
- die("fgets: %s", strerror(errno));
- clearerr(stdin);
- continue;
- }
-
- if (line[0] == '-') {
- if (get_sha1_hex(line+1, sha1))
- die("expected edge sha1, got garbage:\n %s",
- line+1);
- if (num_preferred_base++ < window)
- add_preferred_base(sha1);
- continue;
- }
- if (get_sha1_hex(line, sha1))
- die("expected sha1, got garbage:\n %s", line);
- hash = name_hash(line+41);
- add_preferred_base_object(line+41, hash);
- add_object_entry(sha1, hash, 0);
- }
- if (progress)
- fprintf(stderr, "Done counting %d objects.\n", nr_objects);
- sorted_by_sha = create_final_object_list();
- if (non_empty && !nr_result)
- return 0;
-
- SHA1_Init(&ctx);
- list = sorted_by_sha;
- for (i = 0; i < nr_result; i++) {
- struct object_entry *entry = *list++;
- SHA1_Update(&ctx, entry->sha1, 20);
- }
- SHA1_Final(object_list_sha1, &ctx);
- if (progress && (nr_objects != nr_result))
- fprintf(stderr, "Result has %d objects.\n", nr_result);
-
- if (reuse_cached_pack(object_list_sha1, pack_to_stdout))
- ;
- else {
- if (nr_result)
- prepare_pack(window, depth);
- if (progress && pack_to_stdout) {
- /* the other end usually displays progress itself */
- struct itimerval v = {{0,},};
- setitimer(ITIMER_REAL, &v, NULL);
- signal(SIGALRM, SIG_IGN );
- progress_update = 0;
- }
- write_pack_file();
- if (!pack_to_stdout) {
- write_index_file();
- puts(sha1_to_hex(object_list_sha1));
- }
- }
- if (progress)
- fprintf(stderr, "Total %d, written %d (delta %d), reused %d (delta %d)\n",
- nr_result, written, written_delta, reused, reused_delta);
- return 0;
-}
#ifndef PACK_H
#define PACK_H
-/*
- * The packed object type is stored in 3 bits.
- * The type value 0 is a reserved prefix if ever there is more than 7
- * object types, or any future format extensions.
- */
-enum object_type {
- OBJ_EXT = 0,
- OBJ_COMMIT = 1,
- OBJ_TREE = 2,
- OBJ_BLOB = 3,
- OBJ_TAG = 4,
- /* 5/6 for future expansion */
- OBJ_DELTA = 7,
-};
+#include "object.h"
/*
* Packed object header
{
pid_t pid;
int fd[2];
- const char *pager = getenv("PAGER");
+ const char *pager = getenv("GIT_PAGER");
if (!isatty(1))
return;
+ if (!pager)
+ pager = getenv("PAGER");
if (!pager)
pager = "less";
else if (!*pager || !strcmp(pager, "cat"))
return;
+ pager_in_use = 1; /* means we are emitting to terminal */
+
if (pipe(fd) < 0)
return;
pid = fork();
--- /dev/null
+#include <stdio.h>
+#include "cache.h"
+#include "path-list.h"
+
+/* if there is no exact match, point to the index where the entry could be
+ * inserted */
+static int get_entry_index(const struct path_list *list, const char *path,
+ int *exact_match)
+{
+ int left = -1, right = list->nr;
+
+ while (left + 1 < right) {
+ int middle = (left + right) / 2;
+ int compare = strcmp(path, list->items[middle].path);
+ if (compare < 0)
+ right = middle;
+ else if (compare > 0)
+ left = middle;
+ else {
+ *exact_match = 1;
+ return middle;
+ }
+ }
+
+ *exact_match = 0;
+ return right;
+}
+
+/* returns -1-index if already exists */
+static int add_entry(struct path_list *list, const char *path)
+{
+ int exact_match;
+ int index = get_entry_index(list, path, &exact_match);
+
+ if (exact_match)
+ return -1 - index;
+
+ if (list->nr + 1 >= list->alloc) {
+ list->alloc += 32;
+ list->items = xrealloc(list->items, list->alloc
+ * sizeof(struct path_list_item));
+ }
+ if (index < list->nr)
+ memmove(list->items + index + 1, list->items + index,
+ (list->nr - index)
+ * sizeof(struct path_list_item));
+ list->items[index].path = list->strdup_paths ?
+ strdup(path) : (char *)path;
+ list->items[index].util = NULL;
+ list->nr++;
+
+ return index;
+}
+
+struct path_list_item *path_list_insert(const char *path, struct path_list *list)
+{
+ int index = add_entry(list, path);
+
+ if (index < 0)
+ index = 1 - index;
+
+ return list->items + index;
+}
+
+int path_list_has_path(const struct path_list *list, const char *path)
+{
+ int exact_match;
+ get_entry_index(list, path, &exact_match);
+ return exact_match;
+}
+
+struct path_list_item *path_list_lookup(const char *path, struct path_list *list)
+{
+ int exact_match, i = get_entry_index(list, path, &exact_match);
+ if (!exact_match)
+ return NULL;
+ return list->items + i;
+}
+
+void path_list_clear(struct path_list *list, int free_items)
+{
+ if (list->items) {
+ int i;
+ if (free_items)
+ for (i = 0; i < list->nr; i++) {
+ if (list->strdup_paths)
+ free(list->items[i].path);
+ if (list->items[i].util)
+ free(list->items[i].util);
+ }
+ free(list->items);
+ }
+ list->items = NULL;
+ list->nr = list->alloc = 0;
+}
+
+void print_path_list(const char *text, const struct path_list *p)
+{
+ int i;
+ if ( text )
+ printf("%s\n", text);
+ for (i = 0; i < p->nr; i++)
+ printf("%s:%p\n", p->items[i].path, p->items[i].util);
+}
+
--- /dev/null
+#ifndef _PATH_LIST_H_
+#define _PATH_LIST_H_
+
+struct path_list_item {
+ char *path;
+ void *util;
+};
+struct path_list
+{
+ struct path_list_item *items;
+ unsigned int nr, alloc;
+ unsigned int strdup_paths:1;
+};
+
+void print_path_list(const char *text, const struct path_list *p);
+
+int path_list_has_path(const struct path_list *list, const char *path);
+void path_list_clear(struct path_list *list, int free_items);
+struct path_list_item *path_list_insert(const char *path, struct path_list *list);
+struct path_list_item *path_list_lookup(const char *path, struct path_list *list);
+
+#endif /* _PATH_LIST_H_ */
"git-peek-remote [--exec=upload-pack] [host:]directory";
static const char *exec = "git-upload-pack";
-static int peek_remote(int fd[2])
+static int peek_remote(int fd[2], unsigned flags)
{
struct ref *ref;
- get_remote_heads(fd[0], &ref, 0, NULL, 0);
+ get_remote_heads(fd[0], &ref, 0, NULL, flags);
packet_flush(fd[1]);
while (ref) {
int fd[2];
pid_t pid;
int nongit = 0;
+ unsigned flags = 0;
setup_git_directory_gently(&nongit);
char *arg = argv[i];
if (*arg == '-') {
- if (!strncmp("--exec=", arg, 7))
+ if (!strncmp("--exec=", arg, 7)) {
exec = arg + 7;
- else
- usage(peek_remote_usage);
- continue;
+ continue;
+ }
+ if (!strcmp("--tags", arg)) {
+ flags |= REF_TAGS;
+ continue;
+ }
+ if (!strcmp("--heads", arg)) {
+ flags |= REF_HEADS;
+ continue;
+ }
+ if (!strcmp("--refs", arg)) {
+ flags |= REF_NORMAL;
+ continue;
+ }
+ usage(peek_remote_usage);
}
dest = arg;
break;
}
+
if (!dest || i != argc - 1)
usage(peek_remote_usage);
pid = git_connect(fd, dest, exec);
if (pid < 0)
return 1;
- ret = peek_remote(fd);
+ ret = peek_remote(fd, flags);
close(fd[0]);
close(fd[1]);
finish_connect(pid);
*
* Copyright (C) 2005 Paul Mackerras <paulus@samba.org>
*/
-#define FS 80
/*
- * We roll the registers for T, A, B, C, D, E around on each
- * iteration; T on iteration t is A on iteration t+1, and so on.
- * We use registers 7 - 12 for this.
+ * PowerPC calling convention:
+ * %r0 - volatile temp
+ * %r1 - stack pointer.
+ * %r2 - reserved
+ * %r3-%r12 - Incoming arguments & return values; volatile.
+ * %r13-%r31 - Callee-save registers
+ * %lr - Return address, volatile
+ * %ctr - volatile
+ *
+ * Register usage in this routine:
+ * %r0 - temp
+ * %r3 - argument (pointer to 5 words of SHA state)
+ * %r4 - argument (pointer to data to hash)
+ * %r5 - Contant K in SHA round (initially number of blocks to hash)
+ * %r6-%r10 - Working copies of SHA variables A..E (actually E..A order)
+ * %r11-%r26 - Data being hashed W[].
+ * %r27-%r31 - Previous copies of A..E, for final add back.
+ * %ctr - loop count
+ */
+
+
+/*
+ * We roll the registers for A, B, C, D, E around on each
+ * iteration; E on iteration t is D on iteration t+1, and so on.
+ * We use registers 6 - 10 for this. (Registers 27 - 31 hold
+ * the previous values.)
*/
-#define RT(t) ((((t)+5)%6)+7)
-#define RA(t) ((((t)+4)%6)+7)
-#define RB(t) ((((t)+3)%6)+7)
-#define RC(t) ((((t)+2)%6)+7)
-#define RD(t) ((((t)+1)%6)+7)
-#define RE(t) ((((t)+0)%6)+7)
-
-/* We use registers 16 - 31 for the W values */
-#define W(t) (((t)%16)+16)
-
-#define STEPD0(t) \
- and %r6,RB(t),RC(t); \
- andc %r0,RD(t),RB(t); \
- rotlwi RT(t),RA(t),5; \
- rotlwi RB(t),RB(t),30; \
- or %r6,%r6,%r0; \
- add %r0,RE(t),%r15; \
- add RT(t),RT(t),%r6; \
- add %r0,%r0,W(t); \
- add RT(t),RT(t),%r0
-
-#define STEPD1(t) \
- xor %r6,RB(t),RC(t); \
- rotlwi RT(t),RA(t),5; \
- rotlwi RB(t),RB(t),30; \
- xor %r6,%r6,RD(t); \
- add %r0,RE(t),%r15; \
- add RT(t),RT(t),%r6; \
- add %r0,%r0,W(t); \
- add RT(t),RT(t),%r0
-
-#define STEPD2(t) \
- and %r6,RB(t),RC(t); \
- and %r0,RB(t),RD(t); \
- rotlwi RT(t),RA(t),5; \
- rotlwi RB(t),RB(t),30; \
- or %r6,%r6,%r0; \
- and %r0,RC(t),RD(t); \
- or %r6,%r6,%r0; \
- add %r0,RE(t),%r15; \
- add RT(t),RT(t),%r6; \
- add %r0,%r0,W(t); \
- add RT(t),RT(t),%r0
-
-#define LOADW(t) \
- lwz W(t),(t)*4(%r4)
-
-#define UPDATEW(t) \
- xor %r0,W((t)-3),W((t)-8); \
- xor W(t),W((t)-16),W((t)-14); \
- xor W(t),W(t),%r0; \
- rotlwi W(t),W(t),1
-
-#define STEP0LD4(t) \
- STEPD0(t); LOADW((t)+4); \
- STEPD0((t)+1); LOADW((t)+5); \
- STEPD0((t)+2); LOADW((t)+6); \
- STEPD0((t)+3); LOADW((t)+7)
-
-#define STEPUP4(t, fn) \
- STEP##fn(t); UPDATEW((t)+4); \
- STEP##fn((t)+1); UPDATEW((t)+5); \
- STEP##fn((t)+2); UPDATEW((t)+6); \
- STEP##fn((t)+3); UPDATEW((t)+7)
-
-#define STEPUP20(t, fn) \
- STEPUP4(t, fn); \
- STEPUP4((t)+4, fn); \
- STEPUP4((t)+8, fn); \
- STEPUP4((t)+12, fn); \
- STEPUP4((t)+16, fn)
+#define RA(t) (((t)+4)%5+6)
+#define RB(t) (((t)+3)%5+6)
+#define RC(t) (((t)+2)%5+6)
+#define RD(t) (((t)+1)%5+6)
+#define RE(t) (((t)+0)%5+6)
+
+/* We use registers 11 - 26 for the W values */
+#define W(t) ((t)%16+11)
+
+/* Register 5 is used for the constant k */
+
+/*
+ * The basic SHA-1 round function is:
+ * E += ROTL(A,5) + F(B,C,D) + W[i] + K; B = ROTL(B,30)
+ * Then the variables are renamed: (A,B,C,D,E) = (E,A,B,C,D).
+ *
+ * Every 20 rounds, the function F() and the contant K changes:
+ * - 20 rounds of f0(b,c,d) = "bit wise b ? c : d" = (^b & d) + (b & c)
+ * - 20 rounds of f1(b,c,d) = b^c^d = (b^d)^c
+ * - 20 rounds of f2(b,c,d) = majority(b,c,d) = (b&d) + ((b^d)&c)
+ * - 20 more rounds of f1(b,c,d)
+ *
+ * These are all scheduled for near-optimal performance on a G4.
+ * The G4 is a 3-issue out-of-order machine with 3 ALUs, but it can only
+ * *consider* starting the oldest 3 instructions per cycle. So to get
+ * maximum performace out of it, you have to treat it as an in-order
+ * machine. Which means interleaving the computation round t with the
+ * computation of W[t+4].
+ *
+ * The first 16 rounds use W values loaded directly from memory, while the
+ * remaining 64 use values computed from those first 16. We preload
+ * 4 values before starting, so there are three kinds of rounds:
+ * - The first 12 (all f0) also load the W values from memory.
+ * - The next 64 compute W(i+4) in parallel. 8*f0, 20*f1, 20*f2, 16*f1.
+ * - The last 4 (all f1) do not do anything with W.
+ *
+ * Therefore, we have 6 different round functions:
+ * STEPD0_LOAD(t,s) - Perform round t and load W(s). s < 16
+ * STEPD0_UPDATE(t,s) - Perform round t and compute W(s). s >= 16.
+ * STEPD1_UPDATE(t,s)
+ * STEPD2_UPDATE(t,s)
+ * STEPD1(t) - Perform round t with no load or update.
+ *
+ * The G5 is more fully out-of-order, and can find the parallelism
+ * by itself. The big limit is that it has a 2-cycle ALU latency, so
+ * even though it's 2-way, the code has to be scheduled as if it's
+ * 4-way, which can be a limit. To help it, we try to schedule the
+ * read of RA(t) as late as possible so it doesn't stall waiting for
+ * the previous round's RE(t-1), and we try to rotate RB(t) as early
+ * as possible while reading RC(t) (= RB(t-1)) as late as possible.
+ */
+
+/* the initial loads. */
+#define LOADW(s) \
+ lwz W(s),(s)*4(%r4)
+
+/*
+ * Perform a step with F0, and load W(s). Uses W(s) as a temporary
+ * before loading it.
+ * This is actually 10 instructions, which is an awkward fit.
+ * It can execute grouped as listed, or delayed one instruction.
+ * (If delayed two instructions, there is a stall before the start of the
+ * second line.) Thus, two iterations take 7 cycles, 3.5 cycles per round.
+ */
+#define STEPD0_LOAD(t,s) \
+add RE(t),RE(t),W(t); andc %r0,RD(t),RB(t); and W(s),RC(t),RB(t); \
+add RE(t),RE(t),%r0; rotlwi %r0,RA(t),5; rotlwi RB(t),RB(t),30; \
+add RE(t),RE(t),W(s); add %r0,%r0,%r5; lwz W(s),(s)*4(%r4); \
+add RE(t),RE(t),%r0
+
+/*
+ * This is likewise awkward, 13 instructions. However, it can also
+ * execute starting with 2 out of 3 possible moduli, so it does 2 rounds
+ * in 9 cycles, 4.5 cycles/round.
+ */
+#define STEPD0_UPDATE(t,s,loadk...) \
+add RE(t),RE(t),W(t); andc %r0,RD(t),RB(t); xor W(s),W((s)-16),W((s)-3); \
+add RE(t),RE(t),%r0; and %r0,RC(t),RB(t); xor W(s),W(s),W((s)-8); \
+add RE(t),RE(t),%r0; rotlwi %r0,RA(t),5; xor W(s),W(s),W((s)-14); \
+add RE(t),RE(t),%r5; loadk; rotlwi RB(t),RB(t),30; rotlwi W(s),W(s),1; \
+add RE(t),RE(t),%r0
+
+/* Nicely optimal. Conveniently, also the most common. */
+#define STEPD1_UPDATE(t,s,loadk...) \
+add RE(t),RE(t),W(t); xor %r0,RD(t),RB(t); xor W(s),W((s)-16),W((s)-3); \
+add RE(t),RE(t),%r5; loadk; xor %r0,%r0,RC(t); xor W(s),W(s),W((s)-8); \
+add RE(t),RE(t),%r0; rotlwi %r0,RA(t),5; xor W(s),W(s),W((s)-14); \
+add RE(t),RE(t),%r0; rotlwi RB(t),RB(t),30; rotlwi W(s),W(s),1
+
+/*
+ * The naked version, no UPDATE, for the last 4 rounds. 3 cycles per.
+ * We could use W(s) as a temp register, but we don't need it.
+ */
+#define STEPD1(t) \
+ add RE(t),RE(t),W(t); xor %r0,RD(t),RB(t); \
+rotlwi RB(t),RB(t),30; add RE(t),RE(t),%r5; xor %r0,%r0,RC(t); \
+add RE(t),RE(t),%r0; rotlwi %r0,RA(t),5; /* spare slot */ \
+add RE(t),RE(t),%r0
+
+/*
+ * 14 instructions, 5 cycles per. The majority function is a bit
+ * awkward to compute. This can execute with a 1-instruction delay,
+ * but it causes a 2-instruction delay, which triggers a stall.
+ */
+#define STEPD2_UPDATE(t,s,loadk...) \
+add RE(t),RE(t),W(t); and %r0,RD(t),RB(t); xor W(s),W((s)-16),W((s)-3); \
+add RE(t),RE(t),%r0; xor %r0,RD(t),RB(t); xor W(s),W(s),W((s)-8); \
+add RE(t),RE(t),%r5; loadk; and %r0,%r0,RC(t); xor W(s),W(s),W((s)-14); \
+add RE(t),RE(t),%r0; rotlwi %r0,RA(t),5; rotlwi W(s),W(s),1; \
+add RE(t),RE(t),%r0; rotlwi RB(t),RB(t),30
+
+#define STEP0_LOAD4(t,s) \
+ STEPD0_LOAD(t,s); \
+ STEPD0_LOAD((t+1),(s)+1); \
+ STEPD0_LOAD((t)+2,(s)+2); \
+ STEPD0_LOAD((t)+3,(s)+3)
+
+#define STEPUP4(fn, t, s, loadk...) \
+ STEP##fn##_UPDATE(t,s,); \
+ STEP##fn##_UPDATE((t)+1,(s)+1,); \
+ STEP##fn##_UPDATE((t)+2,(s)+2,); \
+ STEP##fn##_UPDATE((t)+3,(s)+3,loadk)
+
+#define STEPUP20(fn, t, s, loadk...) \
+ STEPUP4(fn, t, s,); \
+ STEPUP4(fn, (t)+4, (s)+4,); \
+ STEPUP4(fn, (t)+8, (s)+8,); \
+ STEPUP4(fn, (t)+12, (s)+12,); \
+ STEPUP4(fn, (t)+16, (s)+16, loadk)
.globl sha1_core
sha1_core:
- stwu %r1,-FS(%r1)
- stw %r15,FS-68(%r1)
- stw %r16,FS-64(%r1)
- stw %r17,FS-60(%r1)
- stw %r18,FS-56(%r1)
- stw %r19,FS-52(%r1)
- stw %r20,FS-48(%r1)
- stw %r21,FS-44(%r1)
- stw %r22,FS-40(%r1)
- stw %r23,FS-36(%r1)
- stw %r24,FS-32(%r1)
- stw %r25,FS-28(%r1)
- stw %r26,FS-24(%r1)
- stw %r27,FS-20(%r1)
- stw %r28,FS-16(%r1)
- stw %r29,FS-12(%r1)
- stw %r30,FS-8(%r1)
- stw %r31,FS-4(%r1)
+ stwu %r1,-80(%r1)
+ stmw %r13,4(%r1)
/* Load up A - E */
- lwz RA(0),0(%r3) /* A */
- lwz RB(0),4(%r3) /* B */
- lwz RC(0),8(%r3) /* C */
- lwz RD(0),12(%r3) /* D */
- lwz RE(0),16(%r3) /* E */
+ lmw %r27,0(%r3)
mtctr %r5
-1: LOADW(0)
+1:
+ LOADW(0)
+ lis %r5,0x5a82
+ mr RE(0),%r31
LOADW(1)
+ mr RD(0),%r30
+ mr RC(0),%r29
LOADW(2)
+ ori %r5,%r5,0x7999 /* K0-19 */
+ mr RB(0),%r28
LOADW(3)
+ mr RA(0),%r27
+
+ STEP0_LOAD4(0, 4)
+ STEP0_LOAD4(4, 8)
+ STEP0_LOAD4(8, 12)
+ STEPUP4(D0, 12, 16,)
+ STEPUP4(D0, 16, 20, lis %r5,0x6ed9)
- lis %r15,0x5a82 /* K0-19 */
- ori %r15,%r15,0x7999
- STEP0LD4(0)
- STEP0LD4(4)
- STEP0LD4(8)
- STEPUP4(12, D0)
- STEPUP4(16, D0)
-
- lis %r15,0x6ed9 /* K20-39 */
- ori %r15,%r15,0xeba1
- STEPUP20(20, D1)
-
- lis %r15,0x8f1b /* K40-59 */
- ori %r15,%r15,0xbcdc
- STEPUP20(40, D2)
-
- lis %r15,0xca62 /* K60-79 */
- ori %r15,%r15,0xc1d6
- STEPUP4(60, D1)
- STEPUP4(64, D1)
- STEPUP4(68, D1)
- STEPUP4(72, D1)
+ ori %r5,%r5,0xeba1 /* K20-39 */
+ STEPUP20(D1, 20, 24, lis %r5,0x8f1b)
+
+ ori %r5,%r5,0xbcdc /* K40-59 */
+ STEPUP20(D2, 40, 44, lis %r5,0xca62)
+
+ ori %r5,%r5,0xc1d6 /* K60-79 */
+ STEPUP4(D1, 60, 64,)
+ STEPUP4(D1, 64, 68,)
+ STEPUP4(D1, 68, 72,)
+ STEPUP4(D1, 72, 76,)
+ addi %r4,%r4,64
STEPD1(76)
STEPD1(77)
STEPD1(78)
STEPD1(79)
- lwz %r20,16(%r3)
- lwz %r19,12(%r3)
- lwz %r18,8(%r3)
- lwz %r17,4(%r3)
- lwz %r16,0(%r3)
- add %r20,RE(80),%r20
- add RD(0),RD(80),%r19
- add RC(0),RC(80),%r18
- add RB(0),RB(80),%r17
- add RA(0),RA(80),%r16
- mr RE(0),%r20
- stw RA(0),0(%r3)
- stw RB(0),4(%r3)
- stw RC(0),8(%r3)
- stw RD(0),12(%r3)
- stw RE(0),16(%r3)
+ /* Add results to original values */
+ add %r31,%r31,RE(0)
+ add %r30,%r30,RD(0)
+ add %r29,%r29,RC(0)
+ add %r28,%r28,RB(0)
+ add %r27,%r27,RA(0)
- addi %r4,%r4,64
bdnz 1b
- lwz %r15,FS-68(%r1)
- lwz %r16,FS-64(%r1)
- lwz %r17,FS-60(%r1)
- lwz %r18,FS-56(%r1)
- lwz %r19,FS-52(%r1)
- lwz %r20,FS-48(%r1)
- lwz %r21,FS-44(%r1)
- lwz %r22,FS-40(%r1)
- lwz %r23,FS-36(%r1)
- lwz %r24,FS-32(%r1)
- lwz %r25,FS-28(%r1)
- lwz %r26,FS-24(%r1)
- lwz %r27,FS-20(%r1)
- lwz %r28,FS-16(%r1)
- lwz %r29,FS-12(%r1)
- lwz %r30,FS-8(%r1)
- lwz %r31,FS-4(%r1)
- addi %r1,%r1,FS
+ /* Save final hash, restore registers, and return */
+ stmw %r27,0(%r3)
+ lmw %r13,4(%r1)
+ addi %r1,%r1,80
blr
+++ /dev/null
-#include "cache.h"
-
-static const char prune_packed_usage[] =
-"git-prune-packed [-n]";
-
-static int dryrun;
-
-static void prune_dir(int i, DIR *dir, char *pathname, int len)
-{
- struct dirent *de;
- char hex[40];
-
- sprintf(hex, "%02x", i);
- while ((de = readdir(dir)) != NULL) {
- unsigned char sha1[20];
- if (strlen(de->d_name) != 38)
- continue;
- memcpy(hex+2, de->d_name, 38);
- if (get_sha1_hex(hex, sha1))
- continue;
- if (!has_sha1_pack(sha1))
- continue;
- memcpy(pathname + len, de->d_name, 38);
- if (dryrun)
- printf("rm -f %s\n", pathname);
- else if (unlink(pathname) < 0)
- error("unable to unlink %s", pathname);
- }
- pathname[len] = 0;
- rmdir(pathname);
-}
-
-static void prune_packed_objects(void)
-{
- int i;
- static char pathname[PATH_MAX];
- const char *dir = get_object_directory();
- int len = strlen(dir);
-
- if (len > PATH_MAX - 42)
- die("impossible object directory");
- memcpy(pathname, dir, len);
- if (len && pathname[len-1] != '/')
- pathname[len++] = '/';
- for (i = 0; i < 256; i++) {
- DIR *d;
-
- sprintf(pathname + len, "%02x/", i);
- d = opendir(pathname);
- if (!d)
- continue;
- prune_dir(i, d, pathname, len + 3);
- closedir(d);
- }
-}
-
-int main(int argc, char **argv)
-{
- int i;
-
- setup_git_directory();
-
- for (i = 1; i < argc; i++) {
- const char *arg = argv[i];
-
- if (*arg == '-') {
- if (!strcmp(arg, "-n"))
- dryrun = 1;
- else
- usage(prune_packed_usage);
- continue;
- }
- /* Handle arguments here .. */
- usage(prune_packed_usage);
- }
- sync();
- prune_packed_objects();
- return 0;
-}
* a!b ==> a'\!'b ==> 'a'\!'b'
*/
#undef EMIT
-#define EMIT(x) ( (++len < n) && (*bp++ = (x)) )
+#define EMIT(x) do { if (++len < n) *bp++ = (x); } while(0)
static inline int need_bs_quote(char c)
{
return len;
}
+void sq_quote_print(FILE *stream, const char *src)
+{
+ char c;
+
+ fputc('\'', stream);
+ while ((c = *src++)) {
+ if (need_bs_quote(c)) {
+ fputs("'\\", stream);
+ fputc(c, stream);
+ fputc('\'', stream);
+ } else {
+ fputc(c, stream);
+ }
+ }
+ fputc('\'', stream);
+}
+
char *sq_quote(const char *src)
{
char *buf;
*/
extern char *sq_quote(const char *src);
+extern void sq_quote_print(FILE *stream, const char *src);
extern size_t sq_quote_buf(char *dst, size_t n, const char *src);
/* This unwraps what sq_quote() produces in place, but returns
struct cache_tree *active_cache_tree = NULL;
+int cache_errno = 0;
+
+static void *cache_mmap = NULL;
+static size_t cache_mmap_size = 0;
+
/*
* This only updates the "non-critical" parts of the directory
* cache, ie the parts that aren't tracked by GIT, and only used
unsigned char sha1[20];
if (!index_fd(sha1, fd, st, 0, NULL))
match = memcmp(sha1, ce->sha1, 20);
- close(fd);
+ /* index_fd() closed the file descriptor already */
}
return match;
}
return 0;
}
+int add_file_to_index(const char *path, int verbose)
+{
+ int size, namelen;
+ struct stat st;
+ struct cache_entry *ce;
+
+ if (lstat(path, &st))
+ die("%s: unable to stat (%s)", path, strerror(errno));
+
+ if (!S_ISREG(st.st_mode) && !S_ISLNK(st.st_mode))
+ die("%s: can only add regular files or symbolic links", path);
+
+ namelen = strlen(path);
+ size = cache_entry_size(namelen);
+ ce = xcalloc(1, size);
+ memcpy(ce->name, path, namelen);
+ ce->ce_flags = htons(namelen);
+ fill_stat_cache_info(ce, &st);
+
+ ce->ce_mode = create_ce_mode(st.st_mode);
+ if (!trust_executable_bit) {
+ /* If there is an existing entry, pick the mode bits
+ * from it.
+ */
+ int pos = cache_name_pos(path, namelen);
+ if (pos >= 0)
+ ce->ce_mode = active_cache[pos]->ce_mode;
+ }
+
+ if (index_path(ce->sha1, path, &st, 1))
+ die("unable to index file %s", path);
+ if (add_cache_entry(ce, ADD_CACHE_OK_TO_ADD))
+ die("unable to add %s to index",path);
+ if (verbose)
+ printf("add '%s'\n", path);
+ cache_tree_invalidate_path(active_cache_tree, path);
+ return 0;
+}
+
int ce_same_name(struct cache_entry *a, struct cache_entry *b)
{
int len = ce_namelen(a);
return 0;
}
-/* Three functions to allow overloaded pointer return; see linux/err.h */
-static inline void *ERR_PTR(long error)
-{
- return (void *) error;
-}
-
-static inline long PTR_ERR(const void *ptr)
-{
- return (long) ptr;
-}
-
-static inline long IS_ERR(const void *ptr)
-{
- return (unsigned long)ptr > (unsigned long)-1000L;
-}
-
/*
* "refresh" does not calculate a new sha1 file or bring the
* cache up-to-date for mode/content changes. But what it
* For example, you'd want to do this after doing a "git-read-tree",
* to link up the stat cache details with the proper files.
*/
-static struct cache_entry *refresh_entry(struct cache_entry *ce, int really)
+struct cache_entry *refresh_cache_entry(struct cache_entry *ce, int really)
{
struct stat st;
struct cache_entry *updated;
int changed, size;
- if (lstat(ce->name, &st) < 0)
- return ERR_PTR(-errno);
+ if (lstat(ce->name, &st) < 0) {
+ cache_errno = errno;
+ return NULL;
+ }
changed = ce_match_stat(ce, &st, really);
if (!changed) {
!(ce->ce_flags & htons(CE_VALID)))
; /* mark this one VALID again */
else
- return NULL;
+ return ce;
}
- if (ce_modified(ce, &st, really))
- return ERR_PTR(-EINVAL);
+ if (ce_modified(ce, &st, really)) {
+ cache_errno = EINVAL;
+ return NULL;
+ }
size = ce_size(ce);
updated = xmalloc(size);
continue;
}
- new = refresh_entry(ce, really);
- if (!new)
+ new = refresh_cache_entry(ce, really);
+ if (new == ce)
continue;
- if (IS_ERR(new)) {
- if (not_new && PTR_ERR(new) == -ENOENT)
+ if (!new) {
+ if (not_new && cache_errno == ENOENT)
continue;
- if (really && PTR_ERR(new) == -EINVAL) {
+ if (really && cache_errno == EINVAL) {
/* If we are doing --really-refresh that
* means the index is not valid anymore.
*/
}
int read_cache(void)
+{
+ return read_cache_from(get_index_file());
+}
+
+/* remember to discard_cache() before reading a different cache! */
+int read_cache_from(const char *path)
{
int fd, i;
struct stat st;
- unsigned long size, offset;
- void *map;
+ unsigned long offset;
struct cache_header *hdr;
errno = EBUSY;
- if (active_cache)
+ if (cache_mmap)
return active_nr;
errno = ENOENT;
index_file_timestamp = 0;
- fd = open(get_index_file(), O_RDONLY);
+ fd = open(path, O_RDONLY);
if (fd < 0) {
if (errno == ENOENT)
return 0;
die("index file open failed (%s)", strerror(errno));
}
- size = 0; // avoid gcc warning
- map = MAP_FAILED;
+ cache_mmap = MAP_FAILED;
if (!fstat(fd, &st)) {
- size = st.st_size;
+ cache_mmap_size = st.st_size;
errno = EINVAL;
- if (size >= sizeof(struct cache_header) + 20)
- map = mmap(NULL, size, PROT_READ | PROT_WRITE, MAP_PRIVATE, fd, 0);
+ if (cache_mmap_size >= sizeof(struct cache_header) + 20)
+ cache_mmap = mmap(NULL, cache_mmap_size, PROT_READ | PROT_WRITE, MAP_PRIVATE, fd, 0);
}
close(fd);
- if (map == MAP_FAILED)
+ if (cache_mmap == MAP_FAILED)
die("index file mmap failed (%s)", strerror(errno));
- hdr = map;
- if (verify_hdr(hdr, size) < 0)
+ hdr = cache_mmap;
+ if (verify_hdr(hdr, cache_mmap_size) < 0)
goto unmap;
active_nr = ntohl(hdr->hdr_entries);
offset = sizeof(*hdr);
for (i = 0; i < active_nr; i++) {
- struct cache_entry *ce = (struct cache_entry *) ((char *) map + offset);
+ struct cache_entry *ce = (struct cache_entry *) ((char *) cache_mmap + offset);
offset = offset + ce_size(ce);
active_cache[i] = ce;
}
index_file_timestamp = st.st_mtime;
- while (offset <= size - 20 - 8) {
+ while (offset <= cache_mmap_size - 20 - 8) {
/* After an array of active_nr index entries,
* there can be arbitrary number of extended
* sections, each of which is prefixed with
* in 4-byte network byte order.
*/
unsigned long extsize;
- memcpy(&extsize, (char *) map + offset + 4, 4);
+ memcpy(&extsize, (char *) cache_mmap + offset + 4, 4);
extsize = ntohl(extsize);
- if (read_index_extension(((const char *) map) + offset,
- (char *) map + offset + 8,
+ if (read_index_extension(((const char *) cache_mmap) + offset,
+ (char *) cache_mmap + offset + 8,
extsize) < 0)
goto unmap;
offset += 8;
return active_nr;
unmap:
- munmap(map, size);
+ munmap(cache_mmap, cache_mmap_size);
errno = EINVAL;
die("index file corrupt");
}
namelen = strlen(de->d_name);
if (namelen > 255)
continue;
- if (namelen>5 && !strcmp(de->d_name+namelen-5,".lock"))
+ if (has_extension(de->d_name, ".lock"))
continue;
memcpy(path + baselen, de->d_name, namelen+1);
if (stat(git_path("%s", path), &st) < 0)
int plen,
const unsigned char *old_sha1, int mustexist)
{
+ const char *orig_path = path;
struct ref_lock *lock;
struct stat st;
plen = strlen(path) - plen;
path = resolve_ref(path, lock->old_sha1, mustexist);
if (!path) {
+ int last_errno = errno;
+ error("unable to resolve reference %s: %s",
+ orig_path, strerror(errno));
unlock_ref(lock);
+ errno = last_errno;
return NULL;
}
lock->lk = xcalloc(1, sizeof(struct lock_file));
if (safe_create_leading_directories(lock->ref_file))
die("unable to create directory for %s", lock->ref_file);
- lock->lock_fd = hold_lock_file_for_update(lock->lk, lock->ref_file);
- if (lock->lock_fd < 0) {
- error("Couldn't open lock file %s: %s",
- lock->lk->filename, strerror(errno));
- unlock_ref(lock);
- return NULL;
- }
+ lock->lock_fd = hold_lock_file_for_update(lock->lk, lock->ref_file, 1);
return old_sha1 ? verify_lock(lock, old_sha1, mustexist) : lock;
}
int logfd, written, oflags = O_APPEND | O_WRONLY;
unsigned maxlen, len;
char *logrec;
- const char *comitter;
+ const char *committer;
if (log_all_ref_updates) {
if (safe_create_leading_directories(lock->log_file) < 0)
lock->log_file, strerror(errno));
}
- setup_ident();
- comitter = git_committer_info(1);
+ committer = git_committer_info(1);
if (msg) {
- maxlen = strlen(comitter) + strlen(msg) + 2*40 + 5;
+ maxlen = strlen(committer) + strlen(msg) + 2*40 + 5;
logrec = xmalloc(maxlen);
len = snprintf(logrec, maxlen, "%s %s %s\t%s\n",
sha1_to_hex(lock->old_sha1),
sha1_to_hex(sha1),
- comitter,
+ committer,
msg);
}
else {
- maxlen = strlen(comitter) + 2*40 + 4;
+ maxlen = strlen(committer) + 2*40 + 4;
logrec = xmalloc(maxlen);
len = snprintf(logrec, maxlen, "%s %s %s\n",
sha1_to_hex(lock->old_sha1),
sha1_to_hex(sha1),
- comitter);
+ committer);
}
written = len <= maxlen ? write(logfd, logrec, len) : -1;
free(logrec);
+++ /dev/null
-#include "cache.h"
-#include <regex.h>
-
-static const char git_config_set_usage[] =
-"git-repo-config [ --bool | --int ] [--get | --get-all | --get-regexp | --replace-all | --unset | --unset-all] name [value [value_regex]] | --list";
-
-static char* key = NULL;
-static regex_t* key_regexp = NULL;
-static regex_t* regexp = NULL;
-static int show_keys = 0;
-static int use_key_regexp = 0;
-static int do_all = 0;
-static int do_not_match = 0;
-static int seen = 0;
-static enum { T_RAW, T_INT, T_BOOL } type = T_RAW;
-
-static int show_all_config(const char *key_, const char *value_)
-{
- if (value_)
- printf("%s=%s\n", key_, value_);
- else
- printf("%s\n", key_);
- return 0;
-}
-
-static int show_config(const char* key_, const char* value_)
-{
- char value[256];
- const char *vptr = value;
- int dup_error = 0;
-
- if (!use_key_regexp && strcmp(key_, key))
- return 0;
- if (use_key_regexp && regexec(key_regexp, key_, 0, NULL, 0))
- return 0;
- if (regexp != NULL &&
- (do_not_match ^
- regexec(regexp, (value_?value_:""), 0, NULL, 0)))
- return 0;
-
- if (show_keys)
- printf("%s ", key_);
- if (seen && !do_all)
- dup_error = 1;
- if (type == T_INT)
- sprintf(value, "%d", git_config_int(key_, value_?value_:""));
- else if (type == T_BOOL)
- vptr = git_config_bool(key_, value_) ? "true" : "false";
- else
- vptr = value_?value_:"";
- seen++;
- if (dup_error) {
- error("More than one value for the key %s: %s",
- key_, vptr);
- }
- else
- printf("%s\n", vptr);
-
- return 0;
-}
-
-static int get_value(const char* key_, const char* regex_)
-{
- int ret = -1;
- char *tl;
- char *global = NULL, *repo_config = NULL;
- const char *local;
-
- local = getenv("GIT_CONFIG");
- if (!local) {
- const char *home = getenv("HOME");
- local = getenv("GIT_CONFIG_LOCAL");
- if (!local)
- local = repo_config = strdup(git_path("config"));
- if (home)
- global = strdup(mkpath("%s/.gitconfig", home));
- }
-
- key = strdup(key_);
- for (tl=key+strlen(key)-1; tl >= key && *tl != '.'; --tl)
- *tl = tolower(*tl);
- for (tl=key; *tl && *tl != '.'; ++tl)
- *tl = tolower(*tl);
-
- if (use_key_regexp) {
- key_regexp = (regex_t*)malloc(sizeof(regex_t));
- if (regcomp(key_regexp, key, REG_EXTENDED)) {
- fprintf(stderr, "Invalid key pattern: %s\n", key_);
- goto free_strings;
- }
- }
-
- if (regex_) {
- if (regex_[0] == '!') {
- do_not_match = 1;
- regex_++;
- }
-
- regexp = (regex_t*)malloc(sizeof(regex_t));
- if (regcomp(regexp, regex_, REG_EXTENDED)) {
- fprintf(stderr, "Invalid pattern: %s\n", regex_);
- goto free_strings;
- }
- }
-
- if (do_all && global)
- git_config_from_file(show_config, global);
- git_config_from_file(show_config, local);
- if (!do_all && !seen && global)
- git_config_from_file(show_config, global);
-
- free(key);
- if (regexp) {
- regfree(regexp);
- free(regexp);
- }
-
- if (do_all)
- ret = !seen;
- else
- ret = (seen == 1) ? 0 : 1;
-
-free_strings:
- if (repo_config)
- free(repo_config);
- if (global)
- free(global);
- return ret;
-}
-
-int main(int argc, const char **argv)
-{
- int nongit = 0;
- setup_git_directory_gently(&nongit);
-
- while (1 < argc) {
- if (!strcmp(argv[1], "--int"))
- type = T_INT;
- else if (!strcmp(argv[1], "--bool"))
- type = T_BOOL;
- else if (!strcmp(argv[1], "--list") || !strcmp(argv[1], "-l"))
- return git_config(show_all_config);
- else
- break;
- argc--;
- argv++;
- }
-
- switch (argc) {
- case 2:
- return get_value(argv[1], NULL);
- case 3:
- if (!strcmp(argv[1], "--unset"))
- return git_config_set(argv[2], NULL);
- else if (!strcmp(argv[1], "--unset-all"))
- return git_config_set_multivar(argv[2], NULL, NULL, 1);
- else if (!strcmp(argv[1], "--get"))
- return get_value(argv[2], NULL);
- else if (!strcmp(argv[1], "--get-all")) {
- do_all = 1;
- return get_value(argv[2], NULL);
- } else if (!strcmp(argv[1], "--get-regexp")) {
- show_keys = 1;
- use_key_regexp = 1;
- do_all = 1;
- return get_value(argv[2], NULL);
- } else
-
- return git_config_set(argv[1], argv[2]);
- case 4:
- if (!strcmp(argv[1], "--unset"))
- return git_config_set_multivar(argv[2], NULL, argv[3], 0);
- else if (!strcmp(argv[1], "--unset-all"))
- return git_config_set_multivar(argv[2], NULL, argv[3], 1);
- else if (!strcmp(argv[1], "--get"))
- return get_value(argv[2], argv[3]);
- else if (!strcmp(argv[1], "--get-all")) {
- do_all = 1;
- return get_value(argv[2], argv[3]);
- } else if (!strcmp(argv[1], "--get-regexp")) {
- show_keys = 1;
- use_key_regexp = 1;
- do_all = 1;
- return get_value(argv[2], argv[3]);
- } else if (!strcmp(argv[1], "--replace-all"))
-
- return git_config_set_multivar(argv[2], argv[3], NULL, 1);
- else
-
- return git_config_set_multivar(argv[1], argv[2], argv[3], 0);
- case 5:
- if (!strcmp(argv[1], "--replace-all"))
- return git_config_set_multivar(argv[2], argv[3], argv[4], 1);
- case 1:
- default:
- usage(git_config_set_usage);
- }
- return 0;
-}
/*
* Tag object? Look what it points to..
*/
- while (object->type == TYPE_TAG) {
+ while (object->type == OBJ_TAG) {
struct tag *tag = (struct tag *) object;
if (revs->tag_objects && !(flags & UNINTERESTING))
add_pending_object(revs, object, tag->tag);
* Commit object? Just return it, we'll do all the complex
* reachability crud.
*/
- if (object->type == TYPE_COMMIT) {
+ if (object->type == OBJ_COMMIT) {
struct commit *commit = (struct commit *)object;
if (parse_commit(commit) < 0)
die("unable to parse commit %s", name);
* Tree object? Either mark it uniniteresting, or add it
* to the list of objects to look at later..
*/
- if (object->type == TYPE_TREE) {
+ if (object->type == OBJ_TREE) {
struct tree *tree = (struct tree *)object;
if (!revs->tree_objects)
return NULL;
/*
* Blob object? You know the drill by now..
*/
- if (object->type == TYPE_BLOB) {
+ if (object->type == OBJ_BLOB) {
struct blob *blob = (struct blob *)object;
if (!revs->blob_objects)
return NULL;
static void try_to_simplify_commit(struct rev_info *revs, struct commit *commit)
{
struct commit_list **pp, *parent;
- int tree_changed = 0;
+ int tree_changed = 0, tree_same = 0;
if (!commit->tree)
return;
parse_commit(p);
switch (rev_compare_tree(revs, p->tree, commit->tree)) {
case REV_TREE_SAME:
+ tree_same = 1;
if (!revs->simplify_history || (p->object.flags & UNINTERESTING)) {
/* Even if a merge with an uninteresting
* side branch brought the entire change
}
die("bad tree compare for commit %s", sha1_to_hex(commit->object.sha1));
}
- if (tree_changed)
+ if (tree_changed && !tree_same)
commit->object.flags |= TREECHANGE;
}
return 0;
while (1) {
it = get_reference(revs, arg, sha1, 0);
- if (it->type != TYPE_TAG)
+ if (it->type != OBJ_TAG)
break;
memcpy(sha1, ((struct tag*)it)->tagged->sha1, 20);
}
- if (it->type != TYPE_COMMIT)
+ if (it->type != OBJ_COMMIT)
return 0;
commit = (struct commit *)it;
for (parents = commit->parents; parents; parents = parents->next) {
return 1;
}
-void init_revisions(struct rev_info *revs)
+void init_revisions(struct rev_info *revs, const char *prefix)
{
memset(revs, 0, sizeof(*revs));
revs->pruning.change = file_change;
revs->lifo = 1;
revs->dense = 1;
- revs->prefix = setup_git_directory();
+ revs->prefix = prefix;
revs->max_age = -1;
revs->min_age = -1;
revs->max_count = -1;
diff_setup(&revs->diffopt);
}
+static void add_pending_commit_list(struct rev_info *revs,
+ struct commit_list *commit_list,
+ unsigned int flags)
+{
+ while (commit_list) {
+ struct object *object = &commit_list->item->object;
+ object->flags |= flags;
+ add_pending_object(revs, object, sha1_to_hex(object->sha1));
+ commit_list = commit_list->next;
+ }
+}
+
+static void prepare_show_merge(struct rev_info *revs)
+{
+ struct commit_list *bases;
+ struct commit *head, *other;
+ unsigned char sha1[20];
+ const char **prune = NULL;
+ int i, prune_num = 1; /* counting terminating NULL */
+
+ if (get_sha1("HEAD", sha1) || !(head = lookup_commit(sha1)))
+ die("--merge without HEAD?");
+ if (get_sha1("MERGE_HEAD", sha1) || !(other = lookup_commit(sha1)))
+ die("--merge without MERGE_HEAD?");
+ add_pending_object(revs, &head->object, "HEAD");
+ add_pending_object(revs, &other->object, "MERGE_HEAD");
+ bases = get_merge_bases(head, other, 1);
+ while (bases) {
+ struct commit *it = bases->item;
+ struct commit_list *n = bases->next;
+ free(bases);
+ bases = n;
+ it->object.flags |= UNINTERESTING;
+ add_pending_object(revs, &it->object, "(merge-base)");
+ }
+
+ if (!active_nr)
+ read_cache();
+ for (i = 0; i < active_nr; i++) {
+ struct cache_entry *ce = active_cache[i];
+ if (!ce_stage(ce))
+ continue;
+ if (ce_path_match(ce, revs->prune_data)) {
+ prune_num++;
+ prune = xrealloc(prune, sizeof(*prune) * prune_num);
+ prune[prune_num-2] = ce->name;
+ prune[prune_num-1] = NULL;
+ }
+ while ((i+1 < active_nr) &&
+ ce_same_name(ce, active_cache[i+1]))
+ i++;
+ }
+ revs->prune_data = prune;
+}
+
/*
* Parse revision information, filling in the "rev_info" structure,
* and removing the used arguments from the argument list.
*/
int setup_revisions(int argc, const char **argv, struct rev_info *revs, const char *def)
{
- int i, flags, seen_dashdash;
+ int i, flags, seen_dashdash, show_merge;
const char **unrecognized = argv + 1;
int left = 1;
break;
}
- flags = 0;
+ flags = show_merge = 0;
for (i = 1; i < argc; i++) {
struct object *object;
const char *arg = argv[i];
def = argv[i];
continue;
}
+ if (!strcmp(arg, "--merge")) {
+ show_merge = 1;
+ continue;
+ }
if (!strcmp(arg, "--topo-order")) {
revs->topo_order = 1;
continue;
unsigned char from_sha1[20];
const char *next = dotdot + 2;
const char *this = arg;
+ int symmetric = *next == '.';
+ unsigned int flags_exclude = flags ^ UNINTERESTING;
+
*dotdot = 0;
+ next += symmetric;
+
if (!*next)
next = "HEAD";
if (dotdot == arg)
this = "HEAD";
if (!get_sha1(this, from_sha1) &&
!get_sha1(next, sha1)) {
- struct object *exclude;
- struct object *include;
-
- exclude = get_reference(revs, this, from_sha1, flags ^ UNINTERESTING);
- include = get_reference(revs, next, sha1, flags);
- if (!exclude || !include)
- die("Invalid revision range %s..%s", arg, next);
+ struct commit *a, *b;
+ struct commit_list *exclude;
+
+ a = lookup_commit_reference(from_sha1);
+ b = lookup_commit_reference(sha1);
+ if (!a || !b) {
+ die(symmetric ?
+ "Invalid symmetric difference expression %s...%s" :
+ "Invalid revision range %s..%s",
+ arg, next);
+ }
if (!seen_dashdash) {
*dotdot = '.';
verify_non_filename(revs->prefix, arg);
}
- add_pending_object(revs, exclude, this);
- add_pending_object(revs, include, next);
+
+ if (symmetric) {
+ exclude = get_merge_bases(a, b, 1);
+ add_pending_commit_list(revs, exclude,
+ flags_exclude);
+ free_commit_list(exclude);
+ a->object.flags |= flags;
+ } else
+ a->object.flags |= flags_exclude;
+ b->object.flags |= flags;
+ add_pending_object(revs, &a->object, this);
+ add_pending_object(revs, &b->object, next);
continue;
}
*dotdot = '.';
object = get_reference(revs, arg, sha1, flags ^ local_flags);
add_pending_object(revs, object, arg);
}
+ if (show_merge)
+ prepare_show_merge(revs);
if (def && !revs->pending.nr) {
unsigned char sha1[20];
struct object *object;
revs->diffopt.output_format = DIFF_FORMAT_PATCH;
}
revs->diffopt.abbrev = revs->abbrev;
- diff_setup_done(&revs->diffopt);
+ if (diff_setup_done(&revs->diffopt) < 0)
+ die("diff_setup_done failed");
return left;
}
struct commit *p = *pp;
if (!revs->limited)
add_parents_to_list(revs, p, &revs->commits);
+ if (p->parents && p->parents->next)
+ return 0;
if (p->object.flags & (TREECHANGE | UNINTERESTING))
return 0;
if (!p->parents)
commit->parents && commit->parents->next)
continue;
if (revs->prune_fn && revs->dense) {
- if (!(commit->object.flags & TREECHANGE))
- continue;
+ /* Commit without changes? */
+ if (!(commit->object.flags & TREECHANGE)) {
+ /* drop merges unless we want parenthood */
+ if (!revs->parents)
+ continue;
+ /* non-merge - always ignore it */
+ if (!commit->parents || !commit->parents->next)
+ continue;
+ }
if (revs->parents)
rewrite_parents(revs, commit);
}
struct log_info *loginfo;
int nr, total;
const char *mime_boundary;
+ const char *message_id;
+ const char *ref_message_id;
const char *add_signoff;
const char *extra_headers;
extern int rev_same_tree_as_empty(struct rev_info *, struct tree *t1);
extern int rev_compare_tree(struct rev_info *, struct tree *t1, struct tree *t2);
-extern void init_revisions(struct rev_info *revs);
+extern void init_revisions(struct rev_info *revs, const char *prefix);
extern int setup_revisions(int argc, const char **argv, struct rev_info *revs, const char *def);
extern void prepare_revision_walk(struct rev_info *revs);
extern struct commit *get_revision(struct rev_info *revs);
* old. Otherwise we require --force.
*/
o = deref_tag(parse_object(old_sha1), NULL, 0);
- if (!o || o->type != TYPE_COMMIT)
+ if (!o || o->type != OBJ_COMMIT)
return 0;
old = (struct commit *) o;
o = deref_tag(parse_object(new_sha1), NULL, 0);
- if (!o || o->type != TYPE_COMMIT)
+ if (!o || o->type != OBJ_COMMIT)
return 0;
new = (struct commit *) o;
int expect_status_report = 0;
/* No funny business with the matcher */
- remote_tail = get_remote_heads(in, &remote_refs, 0, NULL, 1);
+ remote_tail = get_remote_heads(in, &remote_refs, 0, NULL, REF_NORMAL);
get_local_heads();
/* Does the other end support the reporting? */
struct object *o = parse_object(sha1);
fprintf(info_ref_fp, "%s %s\n", sha1_to_hex(sha1), path);
- if (o->type == TYPE_TAG) {
+ if (o->type == OBJ_TAG) {
o = deref_tag(o, path, 0);
if (o)
fprintf(info_ref_fp, "%s %s^{}\n",
fp = fopen(infofile, "r");
if (!fp)
- return 1; /* nonexisting is not an error. */
+ return 1; /* nonexistent is not an error. */
while (fgets(line, sizeof(line), fp)) {
int len = strlen(line);
}
return NULL;
bad_dir_environ:
+ if (nongit_ok) {
+ *nongit_ok = 1;
+ return NULL;
+ }
path[len] = 0;
die("Not a git repository: '%s'", path);
}
void prepare_alt_odb(void)
{
- char *alt;
+ const char *alt;
alt = getenv(ALTERNATE_DB_ENVIRONMENT);
if (!alt) alt = "";
{
if (!p->pack_size) {
struct stat st;
- // We created the struct before we had the pack
+ /* We created the struct before we had the pack */
stat(p->pack_name, &st);
if (!S_ISREG(st.st_mode))
die("packfile %s not a regular file", p->pack_name);
int namelen = strlen(de->d_name);
struct packed_git *p;
- if (strcmp(de->d_name + namelen - 4, ".idx"))
+ if (!has_extension(de->d_name, ".idx"))
continue;
/* we have .idx. Is it a file we can map? */
return memcmp(sha1, real_sha1, 20) ? -1 : 0;
}
-static void *map_sha1_file_internal(const unsigned char *sha1,
- unsigned long *size)
+void *map_sha1_file(const unsigned char *sha1, unsigned long *size)
{
struct stat st;
void *map;
return map;
}
-int unpack_sha1_header(z_stream *stream, void *map, unsigned long mapsize, void *buffer, unsigned long size)
+int legacy_loose_object(unsigned char *map)
{
+ unsigned int word;
+
+ /*
+ * Is it a zlib-compressed buffer? If so, the first byte
+ * must be 0x78 (15-bit window size, deflated), and the
+ * first 16-bit word is evenly divisible by 31
+ */
+ word = (map[0] << 8) + map[1];
+ if (map[0] == 0x78 && !(word % 31))
+ return 1;
+ else
+ return 0;
+}
+
+static int unpack_sha1_header(z_stream *stream, unsigned char *map, unsigned long mapsize, void *buffer, unsigned long bufsiz)
+{
+ unsigned char c;
+ unsigned int bits;
+ unsigned long size;
+ static const char *typename[8] = {
+ NULL, /* OBJ_EXT */
+ "commit", "tree", "blob", "tag",
+ NULL, NULL, NULL
+ };
+ const char *type;
+
/* Get the data stream */
memset(stream, 0, sizeof(*stream));
stream->next_in = map;
stream->avail_in = mapsize;
stream->next_out = buffer;
- stream->avail_out = size;
+ stream->avail_out = bufsiz;
+ if (legacy_loose_object(map)) {
+ inflateInit(stream);
+ return inflate(stream, 0);
+ }
+
+ c = *map++;
+ mapsize--;
+ type = typename[(c >> 4) & 7];
+ if (!type)
+ return -1;
+
+ bits = 4;
+ size = c & 0xf;
+ while ((c & 0x80)) {
+ if (bits >= 8*sizeof(long))
+ return -1;
+ c = *map++;
+ size += (c & 0x7f) << bits;
+ bits += 7;
+ mapsize--;
+ }
+
+ /* Set up the stream for the rest.. */
+ stream->next_in = map;
+ stream->avail_in = mapsize;
inflateInit(stream);
- return inflate(stream, 0);
+
+ /* And generate the fake traditional header */
+ stream->total_out = 1 + snprintf(buffer, bufsiz, "%s %lu", type, size);
+ return 0;
}
static void *unpack_sha1_rest(z_stream *stream, void *buffer, unsigned long size)
{
int bytes = strlen(buffer) + 1;
unsigned char *buf = xmalloc(1+size);
+ unsigned long n;
- memcpy(buf, (char *) buffer + bytes, stream->total_out - bytes);
- bytes = stream->total_out - bytes;
+ n = stream->total_out - bytes;
+ if (n > size)
+ n = size;
+ memcpy(buf, (char *) buffer + bytes, n);
+ bytes = n;
if (bytes < size) {
stream->next_out = buf + bytes;
stream->avail_out = size - bytes;
* too permissive for what we want to check. So do an anal
* object header parse by hand.
*/
-int parse_sha1_header(char *hdr, char *type, unsigned long *sizep)
+static int parse_sha1_header(char *hdr, char *type, unsigned long *sizep)
{
int i;
unsigned long size;
z_stream stream;
char hdr[128];
- map = map_sha1_file_internal(sha1, &mapsize);
+ map = map_sha1_file(sha1, &mapsize);
if (!map) {
struct pack_entry e;
if (find_pack_entry(sha1, &e))
return read_packed_sha1(sha1, type, size);
- map = map_sha1_file_internal(sha1, &mapsize);
+ map = map_sha1_file(sha1, &mapsize);
if (map) {
buf = unpack_sha1_file(map, mapsize, type, size);
munmap(map, mapsize);
static int link_temp_to_file(const char *tmpfile, char *filename)
{
int ret;
+ char *dir;
if (!link(tmpfile, filename))
return 0;
/*
- * Try to mkdir the last path component if that failed
- * with an ENOENT.
+ * Try to mkdir the last path component if that failed.
*
* Re-try the "link()" regardless of whether the mkdir
* succeeds, since a race might mean that somebody
* else succeeded.
*/
ret = errno;
- if (ret == ENOENT) {
- char *dir = strrchr(filename, '/');
- if (dir) {
- *dir = 0;
- mkdir(filename, 0777);
- if (adjust_shared_perm(filename))
- return -2;
- *dir = '/';
- if (!link(tmpfile, filename))
- return 0;
- ret = errno;
- }
+ dir = strrchr(filename, '/');
+ if (dir) {
+ *dir = 0;
+ mkdir(filename, 0777);
+ if (adjust_shared_perm(filename))
+ return -2;
+ *dir = '/';
+ if (!link(tmpfile, filename))
+ return 0;
+ ret = errno;
}
return ret;
}
return 0;
}
+static int write_binary_header(unsigned char *hdr, enum object_type type, unsigned long len)
+{
+ int hdr_len;
+ unsigned char c;
+
+ c = (type << 4) | (len & 15);
+ len >>= 4;
+ hdr_len = 1;
+ while (len) {
+ *hdr++ = c | 0x80;
+ hdr_len++;
+ c = (len & 0x7f);
+ len >>= 7;
+ }
+ *hdr = c;
+ return hdr_len;
+}
+
+static void setup_object_header(z_stream *stream, const char *type, unsigned long len)
+{
+ int obj_type, hdr;
+
+ if (use_legacy_headers) {
+ while (deflate(stream, 0) == Z_OK)
+ /* nothing */;
+ return;
+ }
+ if (!strcmp(type, blob_type))
+ obj_type = OBJ_BLOB;
+ else if (!strcmp(type, tree_type))
+ obj_type = OBJ_TREE;
+ else if (!strcmp(type, commit_type))
+ obj_type = OBJ_COMMIT;
+ else if (!strcmp(type, tag_type))
+ obj_type = OBJ_TAG;
+ else
+ die("trying to generate bogus object of type '%s'", type);
+ hdr = write_binary_header(stream->next_out, obj_type, len);
+ stream->total_out = hdr;
+ stream->next_out += hdr;
+ stream->avail_out -= hdr;
+}
+
int write_sha1_file(void *buf, unsigned long len, const char *type, unsigned char *returnsha1)
{
int size;
/* Set it up */
memset(&stream, 0, sizeof(stream));
- deflateInit(&stream, Z_BEST_COMPRESSION);
- size = deflateBound(&stream, len+hdrlen);
+ deflateInit(&stream, zlib_compression_level);
+ size = 8 + deflateBound(&stream, len+hdrlen);
compressed = xmalloc(size);
/* Compress it */
/* First header.. */
stream.next_in = hdr;
stream.avail_in = hdrlen;
- while (deflate(&stream, 0) == Z_OK)
- /* nothing */;
+ setup_object_header(&stream, type, len);
/* Then the data itself.. */
stream.next_in = buf;
int hdrlen;
void *buf;
- // need to unpack and recompress it by itself
+ /* need to unpack and recompress it by itself */
unpacked = read_packed_sha1(sha1, type, &len);
hdrlen = sprintf(hdr, "%s %lu", type, len) + 1;
/* Set it up */
memset(&stream, 0, sizeof(stream));
- deflateInit(&stream, Z_BEST_COMPRESSION);
+ deflateInit(&stream, zlib_compression_level);
size = deflateBound(&stream, len + hdrlen);
buf = xmalloc(size);
{
int retval;
unsigned long objsize;
- void *buf = map_sha1_file_internal(sha1, &objsize);
+ void *buf = map_sha1_file(sha1, &objsize);
if (buf) {
retval = write_buffer(fd, buf, objsize);
/*
* reads from fd as long as possible into a supplied buffer of size bytes.
- * If neccessary the buffer's size is increased using realloc()
+ * If necessary the buffer's size is increased using realloc()
*
* returns 0 if anything went fine and -1 otherwise
*
is_null = !memcmp(sha1, null_sha1, 20);
memcpy(hex, sha1_to_hex(sha1), 40);
- if (len == 40)
+ if (len == 40 || !len)
return hex;
while (len < 40) {
unsigned char sha1_ret[20];
sp++; /* beginning of type name, or closing brace for empty */
if (!strncmp(commit_type, sp, 6) && sp[6] == '}')
- expected_type = TYPE_COMMIT;
+ expected_type = OBJ_COMMIT;
else if (!strncmp(tree_type, sp, 4) && sp[4] == '}')
- expected_type = TYPE_TREE;
+ expected_type = OBJ_TREE;
else if (!strncmp(blob_type, sp, 4) && sp[4] == '}')
- expected_type = TYPE_BLOB;
+ expected_type = OBJ_BLOB;
else if (sp[0] == '}')
- expected_type = TYPE_NONE;
+ expected_type = OBJ_NONE;
else
return -1;
memcpy(sha1, o->sha1, 20);
return 0;
}
- if (o->type == TYPE_TAG)
+ if (o->type == OBJ_TAG)
o = ((struct tag*) o)->tagged;
- else if (o->type == TYPE_COMMIT)
+ else if (o->type == OBJ_COMMIT)
o = &(((struct commit *) o)->tree->object);
else
return error("%.*s: expected %s type, but the object dereferences to %s type",
struct object_list *temp;
if (memcmp(sha1, in_transit->item->sha1, 20)) {
- // we must have already fetched it to clean the queue
+ /* we must have already fetched it to clean the queue */
return has_sha1_file(sha1) ? 0 : -1;
}
prefetches--;
if (read(fd_in, &remote, 1) < 1)
return -1;
}
- //fprintf(stderr, "Got %d\n", remote);
+ /* fprintf(stderr, "Got %d\n", remote); */
if (remote < 0)
return remote;
ret = write_sha1_from_fd(sha1, fd_in, conn_buf, 4096, &conn_buf_posn);
static const char ssh_fetch_usage[] =
MY_PROGRAM_NAME
- " [-c] [-t] [-a] [-v] [-d] [--recover] [-w ref] commit-id url";
+ " [-c] [-t] [-a] [-v] [--recover] [-w ref] commit-id url";
int main(int argc, char **argv)
{
+ const char *write_ref = NULL;
char *commit_id;
char *url;
int arg = 1;
prog = getenv("GIT_SSH_PUSH");
if (!prog) prog = "git-ssh-upload";
+ setup_ident();
setup_git_directory();
git_config(git_default_config);
}
commit_id = argv[arg];
url = argv[arg + 1];
- write_ref_log_details = url;
if (setup_connection(&fd_in, &fd_out, prog, url, arg, argv + 1))
return 1;
if (get_version())
return 1;
- if (pull(commit_id))
+ if (pull(1, &commit_id, &write_ref, url))
return 1;
return 0;
+++ /dev/null
-#include "cache.h"
-
-static const char git_symbolic_ref_usage[] =
-"git-symbolic-ref name [ref]";
-
-static void check_symref(const char *HEAD)
-{
- unsigned char sha1[20];
- const char *git_HEAD = strdup(git_path("%s", HEAD));
- const char *git_refs_heads_master = resolve_ref(git_HEAD, sha1, 0);
- if (git_refs_heads_master) {
- /* we want to strip the .git/ part */
- int pfxlen = strlen(git_HEAD) - strlen(HEAD);
- puts(git_refs_heads_master + pfxlen);
- }
- else
- die("No such ref: %s", HEAD);
-}
-
-int main(int argc, const char **argv)
-{
- setup_git_directory();
- git_config(git_default_config);
- switch (argc) {
- case 2:
- check_symref(argv[1]);
- break;
- case 3:
- create_symref(strdup(git_path("%s", argv[1])), argv[2]);
- break;
- default:
- usage(git_symbolic_ref_usage);
- }
- return 0;
-}
SHELL_PATH_SQ = $(subst ','\'',$(SHELL_PATH))
T = $(wildcard t[0-9][0-9][0-9][0-9]-*.sh)
+TSVN = $(wildcard t91[0-9][0-9]-*.sh)
ifdef NO_PYTHON
GIT_TEST_OPTS += --no-python
clean:
rm -fr trash
+# we can test NO_OPTIMIZE_COMMITS independently of LC_ALL
+full-svn-test:
+ $(MAKE) $(TSVN) GIT_SVN_NO_LIB=1 GIT_SVN_NO_OPTIMIZE_COMMITS=1 LC_ALL=C
+ $(MAKE) $(TSVN) GIT_SVN_NO_LIB=0 GIT_SVN_NO_OPTIMIZE_COMMITS=1 LC_ALL=C
+ $(MAKE) $(TSVN) GIT_SVN_NO_LIB=1 GIT_SVN_NO_OPTIMIZE_COMMITS=0 \
+ LC_ALL=en_US.UTF-8
+ $(MAKE) $(TSVN) GIT_SVN_NO_LIB=0 GIT_SVN_NO_OPTIMIZE_COMMITS=0 \
+ LC_ALL=en_US.UTF-8
+
.PHONY: $(T) clean
.NOTPARALLEL:
4 - the diff commands
5 - the pull and exporting commands
6 - the revision tree commands (even e.g. merge-base)
+ 7 - the porcelainish commands concerning the working tree
Second digit tells the particular command we are testing.
check_count () {
head=
case "$1" in -h) head="$2"; shift; shift ;; esac
- $PROG file $head | perl -e '
+ $PROG file $head >.result || return 1
+ cat .result | perl -e '
my %expect = (@ARGV);
my %count = ();
while (<STDIN>) {
test_expect_success \
'merge-setup part 4' \
'echo "evil merge." >>file &&
- EDITOR=: VISUAL=: git commit -a --amend'
+ git commit -a --amend'
test_expect_success \
'Two lines blamed on A, one on B, two on B1, one on B2, one on A U Thor' \
--- /dev/null
+. ./test-lib.sh
+
+if test -n "$NO_SVN_TESTS"
+then
+ test_expect_success 'skipping git-svn tests, NO_SVN_TESTS defined' :
+ test_done
+ exit
+fi
+
+GIT_DIR=$PWD/.git
+GIT_SVN_DIR=$GIT_DIR/svn/git-svn
+SVN_TREE=$GIT_SVN_DIR/svn-tree
+
+perl -e 'use SVN::Core' >/dev/null 2>&1
+if test $? -ne 0
+then
+ echo 'Perl SVN libraries not found, tests requiring those will be skipped'
+ GIT_SVN_NO_LIB=1
+fi
+
+svnadmin >/dev/null 2>&1
+if test $? -ne 1
+then
+ test_expect_success 'skipping git-svn tests, svnadmin not found' :
+ test_done
+ exit
+fi
+
+svn >/dev/null 2>&1
+if test $? -ne 1
+then
+ test_expect_success 'skipping git-svn tests, svn not found' :
+ test_done
+ exit
+fi
+
+svnrepo=$PWD/svnrepo
+
+set -e
+
+if svnadmin create --help | grep fs-type >/dev/null
+then
+ svnadmin create --fs-type fsfs "$svnrepo"
+else
+ svnadmin create "$svnrepo"
+fi
+
+svnrepo="file://$svnrepo/test-git-svn"
+
+
--- /dev/null
+#!/bin/sh
+#
+# Copyright (c) 2006 Junio C Hamano
+#
+
+test_description='git-read-tree --prefix test.
+'
+
+. ./test-lib.sh
+
+test_expect_success setup '
+ echo hello >one &&
+ git-update-index --add one &&
+ tree=`git-write-tree` &&
+ echo tree is $tree
+'
+
+echo 'one
+two/one' >expect
+
+test_expect_success 'read-tree --prefix' '
+ git-read-tree --prefix=two/ $tree &&
+ git-ls-files >actual &&
+ cmp expect actual
+'
+
+test_done
--- /dev/null
+#!/bin/sh
+#
+# Copyright (c) 2006 Junio C Hamano
+#
+
+test_description='Try various core-level commands in subdirectory.
+'
+
+. ./test-lib.sh
+
+test_expect_success setup '
+ long="a b c d e f g h i j k l m n o p q r s t u v w x y z" &&
+ for c in $long; do echo $c; done >one &&
+ mkdir dir &&
+ for c in x y z $long a b c; do echo $c; done >dir/two &&
+ cp one original.one &&
+ cp dir/two original.two
+'
+HERE=`pwd`
+LF='
+'
+
+test_expect_success 'update-index and ls-files' '
+ cd $HERE &&
+ git-update-index --add one &&
+ case "`git-ls-files`" in
+ one) echo ok one ;;
+ *) echo bad one; exit 1 ;;
+ esac &&
+ cd dir &&
+ git-update-index --add two &&
+ case "`git-ls-files`" in
+ two) echo ok two ;;
+ *) echo bad two; exit 1 ;;
+ esac &&
+ cd .. &&
+ case "`git-ls-files`" in
+ dir/two"$LF"one) echo ok both ;;
+ *) echo bad; exit 1 ;;
+ esac
+'
+
+test_expect_success 'cat-file' '
+ cd $HERE &&
+ two=`git-ls-files -s dir/two` &&
+ two=`expr "$two" : "[0-7]* \\([0-9a-f]*\\)"` &&
+ echo "$two" &&
+ git-cat-file -p "$two" >actual &&
+ cmp dir/two actual &&
+ cd dir &&
+ git-cat-file -p "$two" >actual &&
+ cmp two actual
+'
+rm -f actual dir/actual
+
+test_expect_success 'diff-files' '
+ cd $HERE &&
+ echo a >>one &&
+ echo d >>dir/two &&
+ case "`git-diff-files --name-only`" in
+ dir/two"$LF"one) echo ok top ;;
+ *) echo bad top; exit 1 ;;
+ esac &&
+ # diff should not omit leading paths
+ cd dir &&
+ case "`git-diff-files --name-only`" in
+ dir/two"$LF"one) echo ok subdir ;;
+ *) echo bad subdir; exit 1 ;;
+ esac &&
+ case "`git-diff-files --name-only .`" in
+ dir/two) echo ok subdir limited ;;
+ *) echo bad subdir limited; exit 1 ;;
+ esac
+'
+
+test_expect_success 'write-tree' '
+ cd $HERE &&
+ top=`git-write-tree` &&
+ echo $top &&
+ cd dir &&
+ sub=`git-write-tree` &&
+ echo $sub &&
+ test "z$top" = "z$sub"
+'
+
+test_expect_success 'checkout-index' '
+ cd $HERE &&
+ git-checkout-index -f -u one &&
+ cmp one original.one &&
+ cd dir &&
+ git-checkout-index -f -u two &&
+ cmp two ../original.two
+'
+
+test_expect_success 'read-tree' '
+ cd $HERE &&
+ rm -f one dir/two &&
+ tree=`git-write-tree` &&
+ git-read-tree --reset -u "$tree" &&
+ cmp one original.one &&
+ cmp dir/two original.two &&
+ cd dir &&
+ rm -f two &&
+ git-read-tree --reset -u "$tree" &&
+ cmp two ../original.two &&
+ cmp ../one ../original.one
+'
+
+test_done
E=5555555555555555555555555555555555555555
F=6666666666666666666666666666666666666666
m=refs/heads/master
+n_dir=refs/heads/gu
+n=$n_dir/fixes
test_expect_success \
"create $m" \
test $B = $(cat .git/$m)'
rm -f .git/$m
+test_expect_success \
+ "fail to create $n" \
+ 'touch .git/$n_dir
+ git-update-ref $n $A >out 2>err
+ test $? = 1 &&
+ test "" = "$(cat out)" &&
+ grep "error: unable to resolve reference" err &&
+ grep $n err'
+rm -f .git/$n_dir out err
+
test_expect_success \
"create $m (by HEAD)" \
'git-update-ref HEAD $A &&
echo OTHER >F &&
GIT_AUTHOR_DATE="2005-05-26 23:41" \
GIT_COMMITTER_DATE="2005-05-26 23:41" git-commit -F M -a &&
- h_OTHER=$(git-rev-parse --verify HEAD)
+ h_OTHER=$(git-rev-parse --verify HEAD) &&
+ echo FIXED >F &&
+ GIT_AUTHOR_DATE="2005-05-26 23:44" \
+ GIT_COMMITTER_DATE="2005-05-26 23:44" git-commit --amend &&
+ h_FIXED=$(git-rev-parse --verify HEAD) &&
+ echo TEST+FIXED >F &&
+ echo Merged initial commit and a later commit. >M &&
+ echo $h_TEST >.git/MERGE_HEAD &&
+ GIT_AUTHOR_DATE="2005-05-26 23:45" \
+ GIT_COMMITTER_DATE="2005-05-26 23:45" git-commit -F M &&
+ h_MERGED=$(git-rev-parse --verify HEAD)
rm -f M'
cat >expect <<EOF
-$Z $h_TEST $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL> 1117150200 +0000 commit: add
+$Z $h_TEST $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL> 1117150200 +0000 commit (initial): add
$h_TEST $h_OTHER $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL> 1117150860 +0000 commit: The other day this did not work.
+$h_OTHER $h_FIXED $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL> 1117151040 +0000 commit (amend): The other day this did not work.
+$h_FIXED $h_MERGED $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL> 1117151100 +0000 commit (merge): Merged initial commit and a later commit.
EOF
test_expect_success \
'git-commit logged updates' \
'diff expect .git/logs/$m'
-unset h_TEST h_OTHER
+unset h_TEST h_OTHER h_FIXED h_MERGED
test_expect_success \
'git-cat-file blob master:F (expect OTHER)' \
--- /dev/null
+#!/bin/sh
+#
+#
+
+test_description='git-mktag: tag object verify test'
+
+. ./test-lib.sh
+
+###########################################################
+# check the tag.sig file, expecting verify_tag() to fail,
+# and checking that the error message matches the pattern
+# given in the expect.pat file.
+
+check_verify_failure () {
+ test_expect_success \
+ "$1" \
+ 'git-mktag <tag.sig 2>message ||
+ egrep -q -f expect.pat message'
+}
+
+###########################################################
+# first create a commit, so we have a valid object/type
+# for the tag.
+echo Hello >A
+git-update-index --add A
+git-commit -m "Initial commit"
+head=$(git-rev-parse --verify HEAD)
+
+############################################################
+# 1. length check
+
+cat >tag.sig <<EOF
+too short for a tag
+EOF
+
+cat >expect.pat <<EOF
+^error: .*size wrong.*$
+EOF
+
+check_verify_failure 'Tag object length check'
+
+############################################################
+# 2. object line label check
+
+cat >tag.sig <<EOF
+xxxxxx 139e9b33986b1c2670fff52c5067603117b3e895
+type tag
+tag mytag
+EOF
+
+cat >expect.pat <<EOF
+^error: char0: .*"object "$
+EOF
+
+check_verify_failure '"object" line label check'
+
+############################################################
+# 3. object line SHA1 check
+
+cat >tag.sig <<EOF
+object zz9e9b33986b1c2670fff52c5067603117b3e895
+type tag
+tag mytag
+EOF
+
+cat >expect.pat <<EOF
+^error: char7: .*SHA1 hash$
+EOF
+
+check_verify_failure '"object" line SHA1 check'
+
+############################################################
+# 4. type line label check
+
+cat >tag.sig <<EOF
+object 779e9b33986b1c2670fff52c5067603117b3e895
+xxxx tag
+tag mytag
+EOF
+
+cat >expect.pat <<EOF
+^error: char47: .*"[\]ntype "$
+EOF
+
+check_verify_failure '"type" line label check'
+
+############################################################
+# 5. type line eol check
+
+echo "object 779e9b33986b1c2670fff52c5067603117b3e895" >tag.sig
+echo -n "type tagsssssssssssssssssssssssssssssss" >>tag.sig
+
+cat >expect.pat <<EOF
+^error: char48: .*"[\]n"$
+EOF
+
+check_verify_failure '"type" line eol check'
+
+############################################################
+# 6. tag line label check #1
+
+cat >tag.sig <<EOF
+object 779e9b33986b1c2670fff52c5067603117b3e895
+type tag
+xxx mytag
+EOF
+
+cat >expect.pat <<EOF
+^error: char57: no "tag " found$
+EOF
+
+check_verify_failure '"tag" line label check #1'
+
+############################################################
+# 7. tag line label check #2
+
+cat >tag.sig <<EOF
+object 779e9b33986b1c2670fff52c5067603117b3e895
+type taggggggggggggggggggggggggggggggg
+tag
+EOF
+
+cat >expect.pat <<EOF
+^error: char87: no "tag " found$
+EOF
+
+check_verify_failure '"tag" line label check #2'
+
+############################################################
+# 8. type line type-name length check
+
+cat >tag.sig <<EOF
+object 779e9b33986b1c2670fff52c5067603117b3e895
+type taggggggggggggggggggggggggggggggg
+tag mytag
+EOF
+
+cat >expect.pat <<EOF
+^error: char53: type too long$
+EOF
+
+check_verify_failure '"type" line type-name length check'
+
+############################################################
+# 9. verify object (SHA1/type) check
+
+cat >tag.sig <<EOF
+object 779e9b33986b1c2670fff52c5067603117b3e895
+type tagggg
+tag mytag
+EOF
+
+cat >expect.pat <<EOF
+^error: char7: could not verify object.*$
+EOF
+
+check_verify_failure 'verify object (SHA1/type) check'
+
+############################################################
+# 10. verify tag-name check
+
+cat >tag.sig <<EOF
+object $head
+type commit
+tag my tag
+EOF
+
+cat >expect.pat <<EOF
+^error: char67: could not verify tag name$
+EOF
+
+check_verify_failure 'verify tag-name check'
+
+############################################################
+# 11. tagger line lable check #1
+
+cat >tag.sig <<EOF
+object $head
+type commit
+tag mytag
+EOF
+
+cat >expect.pat <<EOF
+^error: char70: could not find "tagger"$
+EOF
+
+check_verify_failure '"tagger" line label check #1'
+
+############################################################
+# 12. tagger line lable check #2
+
+cat >tag.sig <<EOF
+object $head
+type commit
+tag mytag
+tagger
+EOF
+
+cat >expect.pat <<EOF
+^error: char70: could not find "tagger"$
+EOF
+
+check_verify_failure '"tagger" line label check #2'
+
+############################################################
+# 13. create valid tag
+
+cat >tag.sig <<EOF
+object $head
+type commit
+tag mytag
+tagger another@example.com
+EOF
+
+test_expect_success \
+ 'create valid tag' \
+ 'git-mktag <tag.sig >.git/refs/tags/mytag 2>message'
+
+############################################################
+# 14. check mytag
+
+test_expect_success \
+ 'check mytag' \
+ 'git-tag -l | grep mytag'
+
+
+test_done
. ./test-lib.sh
+LF='
+'
+
test_expect_success setup '
GIT_AUTHOR_DATE="2006-06-26 00:00:00 +0000" &&
for i in C D; do echo $i; done >>dir/sub &&
rm -f file2 &&
git update-index --remove file0 file2 dir/sub &&
- git commit -m Second &&
+ git commit -m "Second${LF}${LF}This is the second commit." &&
GIT_AUTHOR_DATE="2006-06-26 00:02:00 +0000" &&
GIT_COMMITTER_DATE="2006-06-26 00:02:00 +0000" &&
for i in 1 2; do echo $i; done >>dir/sub &&
git update-index file0 dir/sub &&
- EDITOR=: VISUAL=: git commit --amend &&
+ git commit --amend &&
git show-branch
'
+*+ [initial] Initial
EOF
-V=`git version | sed -e 's/^git version //'`
+V=`git version | sed -e 's/^git version //' -e 's/\./\\./g'`
while read cmd
do
case "$cmd" in
test_expect_success "git $cmd" '
{
echo "\$ git $cmd"
- git $cmd | sed -e "s/$V/g-i-t--v-e-r-s-i-o-n/"
+ git $cmd |
+ sed -e "s/^\\(-*\\)$V\\(-*\\)\$/\\1g-i-t--v-e-r-s-i-o-n\2/" \
+ -e "s/^\\( *boundary=\"-*\\)$V\\(-*\\)\"\$/\\1g-i-t--v-e-r-s-i-o-n\2\"/"
echo "\$"
} >"$actual" &&
if test -f "$expect"
format-patch --attach --stdout initial..master^
format-patch --attach --stdout initial..master
+diff --abbrev initial..side
+diff -r initial..side
+diff --stat initial..side
+diff -r --stat initial..side
+diff initial..side
+diff --patch-with-stat initial..side
+diff --patch-with-raw initial..side
+diff --patch-with-stat -r initial..side
+diff --patch-with-raw -r initial..side
EOF
test_done
$ git diff-tree --cc --patch-with-stat --summary master
-176b998f5d647cbd77a9d8acf4531e930754d16d
+59d314ad6f356dd08601a4cd5e530381da3e3c64
dir/sub | 2 ++
file0 | 3 +++
2 files changed, 5 insertions(+), 0 deletions(-)
$ git diff-tree --cc --patch-with-stat master
-176b998f5d647cbd77a9d8acf4531e930754d16d
+59d314ad6f356dd08601a4cd5e530381da3e3c64
dir/sub | 2 ++
file0 | 3 +++
2 files changed, 5 insertions(+), 0 deletions(-)
$ git diff-tree --cc --stat --summary master
-176b998f5d647cbd77a9d8acf4531e930754d16d
+59d314ad6f356dd08601a4cd5e530381da3e3c64
dir/sub | 2 ++
file0 | 3 +++
2 files changed, 5 insertions(+), 0 deletions(-)
$ git diff-tree --cc --stat master
-176b998f5d647cbd77a9d8acf4531e930754d16d
+59d314ad6f356dd08601a4cd5e530381da3e3c64
dir/sub | 2 ++
file0 | 3 +++
2 files changed, 5 insertions(+), 0 deletions(-)
$ git diff-tree --cc master
-176b998f5d647cbd77a9d8acf4531e930754d16d
+59d314ad6f356dd08601a4cd5e530381da3e3c64
diff --cc dir/sub
index cead32e,7289e35..992913c
--- a/dir/sub
$ git diff-tree -c --abbrev master
-176b998f5d647cbd77a9d8acf4531e930754d16d
+59d314ad6f356dd08601a4cd5e530381da3e3c64
::100644 100644 100644 cead32e... 7289e35... 992913c... MM dir/sub
::100644 100644 100644 b414108... f4615da... 10a8a9f... MM file0
$
$ git diff-tree -c --stat --summary master
-176b998f5d647cbd77a9d8acf4531e930754d16d
+59d314ad6f356dd08601a4cd5e530381da3e3c64
dir/sub | 2 ++
file0 | 3 +++
2 files changed, 5 insertions(+), 0 deletions(-)
$ git diff-tree -c --stat master
-176b998f5d647cbd77a9d8acf4531e930754d16d
+59d314ad6f356dd08601a4cd5e530381da3e3c64
dir/sub | 2 ++
file0 | 3 +++
2 files changed, 5 insertions(+), 0 deletions(-)
$ git diff-tree -c master
-176b998f5d647cbd77a9d8acf4531e930754d16d
+59d314ad6f356dd08601a4cd5e530381da3e3c64
::100644 100644 100644 cead32e925b1420c84c14cbf7cf755e7e45af8ad 7289e35bff32727c08dda207511bec138fdb9ea5 992913c5aa0a5476d10c49ed0f21fc0c6d1aedf3 MM dir/sub
::100644 100644 100644 b414108e81e5091fe0974a1858b4d0d22b107f70 f4615da674c09df322d6ba8d6b21ecfb1b1ba510 10a8a9f3657f91a156b9f0184ed79a20adef9f7f MM file0
$
$ git diff-tree -p -m master
-176b998f5d647cbd77a9d8acf4531e930754d16d
+59d314ad6f356dd08601a4cd5e530381da3e3c64
diff --git a/dir/sub b/dir/sub
index cead32e..992913c 100644
--- a/dir/sub
+A
+B
+C
-176b998f5d647cbd77a9d8acf4531e930754d16d
+59d314ad6f356dd08601a4cd5e530381da3e3c64
diff --git a/dir/sub b/dir/sub
index 7289e35..992913c 100644
--- a/dir/sub
--- /dev/null
+$ git diff --abbrev initial..side
+diff --git a/dir/sub b/dir/sub
+index 35d242b..7289e35 100644
+--- a/dir/sub
++++ b/dir/sub
+@@ -1,2 +1,4 @@
+ A
+ B
++1
++2
+diff --git a/file0 b/file0
+index 01e79c3..f4615da 100644
+--- a/file0
++++ b/file0
+@@ -1,3 +1,6 @@
+ 1
+ 2
+ 3
++A
++B
++C
+diff --git a/file3 b/file3
+new file mode 100644
+index 0000000..7289e35
+--- /dev/null
++++ b/file3
+@@ -0,0 +1,4 @@
++A
++B
++1
++2
+$
--- /dev/null
+$ git diff --patch-with-raw -r initial..side
+:100644 100644 35d242b... 7289e35... M dir/sub
+:100644 100644 01e79c3... f4615da... M file0
+:000000 100644 0000000... 7289e35... A file3
+
+diff --git a/dir/sub b/dir/sub
+index 35d242b..7289e35 100644
+--- a/dir/sub
++++ b/dir/sub
+@@ -1,2 +1,4 @@
+ A
+ B
++1
++2
+diff --git a/file0 b/file0
+index 01e79c3..f4615da 100644
+--- a/file0
++++ b/file0
+@@ -1,3 +1,6 @@
+ 1
+ 2
+ 3
++A
++B
++C
+diff --git a/file3 b/file3
+new file mode 100644
+index 0000000..7289e35
+--- /dev/null
++++ b/file3
+@@ -0,0 +1,4 @@
++A
++B
++1
++2
+$
--- /dev/null
+$ git diff --patch-with-raw initial..side
+:100644 100644 35d242b... 7289e35... M dir/sub
+:100644 100644 01e79c3... f4615da... M file0
+:000000 100644 0000000... 7289e35... A file3
+
+diff --git a/dir/sub b/dir/sub
+index 35d242b..7289e35 100644
+--- a/dir/sub
++++ b/dir/sub
+@@ -1,2 +1,4 @@
+ A
+ B
++1
++2
+diff --git a/file0 b/file0
+index 01e79c3..f4615da 100644
+--- a/file0
++++ b/file0
+@@ -1,3 +1,6 @@
+ 1
+ 2
+ 3
++A
++B
++C
+diff --git a/file3 b/file3
+new file mode 100644
+index 0000000..7289e35
+--- /dev/null
++++ b/file3
+@@ -0,0 +1,4 @@
++A
++B
++1
++2
+$
--- /dev/null
+$ git diff --patch-with-stat -r initial..side
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file3 | 4 ++++
+ 3 files changed, 9 insertions(+), 0 deletions(-)
+
+diff --git a/dir/sub b/dir/sub
+index 35d242b..7289e35 100644
+--- a/dir/sub
++++ b/dir/sub
+@@ -1,2 +1,4 @@
+ A
+ B
++1
++2
+diff --git a/file0 b/file0
+index 01e79c3..f4615da 100644
+--- a/file0
++++ b/file0
+@@ -1,3 +1,6 @@
+ 1
+ 2
+ 3
++A
++B
++C
+diff --git a/file3 b/file3
+new file mode 100644
+index 0000000..7289e35
+--- /dev/null
++++ b/file3
+@@ -0,0 +1,4 @@
++A
++B
++1
++2
+$
--- /dev/null
+$ git diff --patch-with-stat initial..side
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file3 | 4 ++++
+ 3 files changed, 9 insertions(+), 0 deletions(-)
+
+diff --git a/dir/sub b/dir/sub
+index 35d242b..7289e35 100644
+--- a/dir/sub
++++ b/dir/sub
+@@ -1,2 +1,4 @@
+ A
+ B
++1
++2
+diff --git a/file0 b/file0
+index 01e79c3..f4615da 100644
+--- a/file0
++++ b/file0
+@@ -1,3 +1,6 @@
+ 1
+ 2
+ 3
++A
++B
++C
+diff --git a/file3 b/file3
+new file mode 100644
+index 0000000..7289e35
+--- /dev/null
++++ b/file3
+@@ -0,0 +1,4 @@
++A
++B
++1
++2
+$
--- /dev/null
+$ git diff --stat initial..side
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file3 | 4 ++++
+ 3 files changed, 9 insertions(+), 0 deletions(-)
+$
--- /dev/null
+$ git diff -r --stat initial..side
+ dir/sub | 2 ++
+ file0 | 3 +++
+ file3 | 4 ++++
+ 3 files changed, 9 insertions(+), 0 deletions(-)
+$
--- /dev/null
+$ git diff -r initial..side
+diff --git a/dir/sub b/dir/sub
+index 35d242b..7289e35 100644
+--- a/dir/sub
++++ b/dir/sub
+@@ -1,2 +1,4 @@
+ A
+ B
++1
++2
+diff --git a/file0 b/file0
+index 01e79c3..f4615da 100644
+--- a/file0
++++ b/file0
+@@ -1,3 +1,6 @@
+ 1
+ 2
+ 3
++A
++B
++C
+diff --git a/file3 b/file3
+new file mode 100644
+index 0000000..7289e35
+--- /dev/null
++++ b/file3
+@@ -0,0 +1,4 @@
++A
++B
++1
++2
+$
--- /dev/null
+$ git diff initial..side
+diff --git a/dir/sub b/dir/sub
+index 35d242b..7289e35 100644
+--- a/dir/sub
++++ b/dir/sub
+@@ -1,2 +1,4 @@
+ A
+ B
++1
++2
+diff --git a/file0 b/file0
+index 01e79c3..f4615da 100644
+--- a/file0
++++ b/file0
+@@ -1,3 +1,6 @@
+ 1
+ 2
+ 3
++A
++B
++C
+diff --git a/file3 b/file3
+new file mode 100644
+index 0000000..7289e35
+--- /dev/null
++++ b/file3
+@@ -0,0 +1,4 @@
++A
++B
++1
++2
+$
$ git format-patch --attach --stdout initial..master
-From 7952a93e09bf565b5592766a438b40cd81f4846f Mon Sep 17 00:00:00 2001
+From 1bde4ae5f36c8d9abe3a0fce0c6aab3c4a12fe44 Mon Sep 17 00:00:00 2001
From: A U Thor <author@example.com>
Date: Mon, 26 Jun 2006 00:01:00 +0000
Subject: [PATCH] Second
--------------g-i-t--v-e-r-s-i-o-n
Content-Type: text/plain; charset=UTF-8; format=fixed
Content-Transfer-Encoding: 8bit
+
+
+This is the second commit.
---
dir/sub | 2 ++
file0 | 3 +++
3 files changed, 5 insertions(+), 3 deletions(-)
--------------g-i-t--v-e-r-s-i-o-n
Content-Type: text/x-patch;
- name="7952a93e09bf565b5592766a438b40cd81f4846f.diff"
+ name="1bde4ae5f36c8d9abe3a0fce0c6aab3c4a12fe44.diff"
Content-Transfer-Encoding: 8bit
Content-Disposition: inline;
- filename="7952a93e09bf565b5592766a438b40cd81f4846f.diff"
+ filename="1bde4ae5f36c8d9abe3a0fce0c6aab3c4a12fe44.diff"
diff --git a/dir/sub b/dir/sub
index 35d242b..8422d40 100644
-From 889b315013ef9f2e2f90aa0b054b267c8a557847 Mon Sep 17 00:00:00 2001
+From 9a6d4949b6b76956d9d5e26f2791ec2ceff5fdc0 Mon Sep 17 00:00:00 2001
From: A U Thor <author@example.com>
Date: Mon, 26 Jun 2006 00:02:00 +0000
Subject: [PATCH] Third
--------------g-i-t--v-e-r-s-i-o-n
Content-Type: text/plain; charset=UTF-8; format=fixed
Content-Transfer-Encoding: 8bit
+
---
dir/sub | 2 ++
file1 | 3 +++
2 files changed, 5 insertions(+), 0 deletions(-)
--------------g-i-t--v-e-r-s-i-o-n
Content-Type: text/x-patch;
- name="889b315013ef9f2e2f90aa0b054b267c8a557847.diff"
+ name="9a6d4949b6b76956d9d5e26f2791ec2ceff5fdc0.diff"
Content-Transfer-Encoding: 8bit
Content-Disposition: inline;
- filename="889b315013ef9f2e2f90aa0b054b267c8a557847.diff"
+ filename="9a6d4949b6b76956d9d5e26f2791ec2ceff5fdc0.diff"
diff --git a/dir/sub b/dir/sub
index 8422d40..cead32e 100644
--------------g-i-t--v-e-r-s-i-o-n
Content-Type: text/plain; charset=UTF-8; format=fixed
Content-Transfer-Encoding: 8bit
+
---
dir/sub | 2 ++
file0 | 3 +++
$ git format-patch --attach --stdout initial..master^
-From 7952a93e09bf565b5592766a438b40cd81f4846f Mon Sep 17 00:00:00 2001
+From 1bde4ae5f36c8d9abe3a0fce0c6aab3c4a12fe44 Mon Sep 17 00:00:00 2001
From: A U Thor <author@example.com>
Date: Mon, 26 Jun 2006 00:01:00 +0000
Subject: [PATCH] Second
--------------g-i-t--v-e-r-s-i-o-n
Content-Type: text/plain; charset=UTF-8; format=fixed
Content-Transfer-Encoding: 8bit
+
+
+This is the second commit.
---
dir/sub | 2 ++
file0 | 3 +++
3 files changed, 5 insertions(+), 3 deletions(-)
--------------g-i-t--v-e-r-s-i-o-n
Content-Type: text/x-patch;
- name="7952a93e09bf565b5592766a438b40cd81f4846f.diff"
+ name="1bde4ae5f36c8d9abe3a0fce0c6aab3c4a12fe44.diff"
Content-Transfer-Encoding: 8bit
Content-Disposition: inline;
- filename="7952a93e09bf565b5592766a438b40cd81f4846f.diff"
+ filename="1bde4ae5f36c8d9abe3a0fce0c6aab3c4a12fe44.diff"
diff --git a/dir/sub b/dir/sub
index 35d242b..8422d40 100644
-From 889b315013ef9f2e2f90aa0b054b267c8a557847 Mon Sep 17 00:00:00 2001
+From 9a6d4949b6b76956d9d5e26f2791ec2ceff5fdc0 Mon Sep 17 00:00:00 2001
From: A U Thor <author@example.com>
Date: Mon, 26 Jun 2006 00:02:00 +0000
Subject: [PATCH] Third
--------------g-i-t--v-e-r-s-i-o-n
Content-Type: text/plain; charset=UTF-8; format=fixed
Content-Transfer-Encoding: 8bit
+
---
dir/sub | 2 ++
file1 | 3 +++
2 files changed, 5 insertions(+), 0 deletions(-)
--------------g-i-t--v-e-r-s-i-o-n
Content-Type: text/x-patch;
- name="889b315013ef9f2e2f90aa0b054b267c8a557847.diff"
+ name="9a6d4949b6b76956d9d5e26f2791ec2ceff5fdc0.diff"
Content-Transfer-Encoding: 8bit
Content-Disposition: inline;
- filename="889b315013ef9f2e2f90aa0b054b267c8a557847.diff"
+ filename="9a6d4949b6b76956d9d5e26f2791ec2ceff5fdc0.diff"
diff --git a/dir/sub b/dir/sub
index 8422d40..cead32e 100644
--------------g-i-t--v-e-r-s-i-o-n
Content-Type: text/plain; charset=UTF-8; format=fixed
Content-Transfer-Encoding: 8bit
+
---
dir/sub | 2 ++
file0 | 3 +++
$ git format-patch --stdout initial..master
-From 7952a93e09bf565b5592766a438b40cd81f4846f Mon Sep 17 00:00:00 2001
+From 1bde4ae5f36c8d9abe3a0fce0c6aab3c4a12fe44 Mon Sep 17 00:00:00 2001
From: A U Thor <author@example.com>
Date: Mon, 26 Jun 2006 00:01:00 +0000
Subject: [PATCH] Second
+
+This is the second commit.
---
dir/sub | 2 ++
file0 | 3 +++
g-i-t--v-e-r-s-i-o-n
-From 889b315013ef9f2e2f90aa0b054b267c8a557847 Mon Sep 17 00:00:00 2001
+From 9a6d4949b6b76956d9d5e26f2791ec2ceff5fdc0 Mon Sep 17 00:00:00 2001
From: A U Thor <author@example.com>
Date: Mon, 26 Jun 2006 00:02:00 +0000
Subject: [PATCH] Third
+
---
dir/sub | 2 ++
file1 | 3 +++
From: A U Thor <author@example.com>
Date: Mon, 26 Jun 2006 00:03:00 +0000
Subject: [PATCH] Side
+
---
dir/sub | 2 ++
file0 | 3 +++
$ git format-patch --stdout initial..master^
-From 7952a93e09bf565b5592766a438b40cd81f4846f Mon Sep 17 00:00:00 2001
+From 1bde4ae5f36c8d9abe3a0fce0c6aab3c4a12fe44 Mon Sep 17 00:00:00 2001
From: A U Thor <author@example.com>
Date: Mon, 26 Jun 2006 00:01:00 +0000
Subject: [PATCH] Second
+
+This is the second commit.
---
dir/sub | 2 ++
file0 | 3 +++
g-i-t--v-e-r-s-i-o-n
-From 889b315013ef9f2e2f90aa0b054b267c8a557847 Mon Sep 17 00:00:00 2001
+From 9a6d4949b6b76956d9d5e26f2791ec2ceff5fdc0 Mon Sep 17 00:00:00 2001
From: A U Thor <author@example.com>
Date: Mon, 26 Jun 2006 00:02:00 +0000
Subject: [PATCH] Third
+
---
dir/sub | 2 ++
file1 | 3 +++
From: A U Thor <author@example.com>
Date: Mon, 26 Jun 2006 00:03:00 +0000
Subject: [PATCH] Side
+
---
dir/sub | 2 ++
file0 | 3 +++
$ git log --patch-with-stat --summary master -- dir/
-commit 176b998f5d647cbd77a9d8acf4531e930754d16d
-Merge: 889b315... c7a2ab9...
+commit 59d314ad6f356dd08601a4cd5e530381da3e3c64
+Merge: 9a6d494... c7a2ab9...
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:04:00 2006 +0000
+1
+2
-commit 889b315013ef9f2e2f90aa0b054b267c8a557847
+commit 9a6d4949b6b76956d9d5e26f2791ec2ceff5fdc0
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:02:00 2006 +0000
+E
+F
-commit 7952a93e09bf565b5592766a438b40cd81f4846f
+commit 1bde4ae5f36c8d9abe3a0fce0c6aab3c4a12fe44
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:01:00 2006 +0000
Second
+
+ This is the second commit.
---
dir/sub | 2 ++
1 files changed, 2 insertions(+), 0 deletions(-)
$ git log --patch-with-stat master
-commit 176b998f5d647cbd77a9d8acf4531e930754d16d
-Merge: 889b315... c7a2ab9...
+commit 59d314ad6f356dd08601a4cd5e530381da3e3c64
+Merge: 9a6d494... c7a2ab9...
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:04:00 2006 +0000
+1
+2
-commit 889b315013ef9f2e2f90aa0b054b267c8a557847
+commit 9a6d4949b6b76956d9d5e26f2791ec2ceff5fdc0
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:02:00 2006 +0000
+B
+C
-commit 7952a93e09bf565b5592766a438b40cd81f4846f
+commit 1bde4ae5f36c8d9abe3a0fce0c6aab3c4a12fe44
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:01:00 2006 +0000
Second
+
+ This is the second commit.
---
dir/sub | 2 ++
file0 | 3 +++
$ git log --patch-with-stat master -- dir/
-commit 176b998f5d647cbd77a9d8acf4531e930754d16d
-Merge: 889b315... c7a2ab9...
+commit 59d314ad6f356dd08601a4cd5e530381da3e3c64
+Merge: 9a6d494... c7a2ab9...
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:04:00 2006 +0000
+1
+2
-commit 889b315013ef9f2e2f90aa0b054b267c8a557847
+commit 9a6d4949b6b76956d9d5e26f2791ec2ceff5fdc0
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:02:00 2006 +0000
+E
+F
-commit 7952a93e09bf565b5592766a438b40cd81f4846f
+commit 1bde4ae5f36c8d9abe3a0fce0c6aab3c4a12fe44
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:01:00 2006 +0000
Second
+
+ This is the second commit.
---
dir/sub | 2 ++
1 files changed, 2 insertions(+), 0 deletions(-)
$ git log --root --cc --patch-with-stat --summary master
-commit 176b998f5d647cbd77a9d8acf4531e930754d16d
-Merge: 889b315... c7a2ab9...
+commit 59d314ad6f356dd08601a4cd5e530381da3e3c64
+Merge: 9a6d494... c7a2ab9...
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:04:00 2006 +0000
+1
+2
-commit 889b315013ef9f2e2f90aa0b054b267c8a557847
+commit 9a6d4949b6b76956d9d5e26f2791ec2ceff5fdc0
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:02:00 2006 +0000
+B
+C
-commit 7952a93e09bf565b5592766a438b40cd81f4846f
+commit 1bde4ae5f36c8d9abe3a0fce0c6aab3c4a12fe44
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:01:00 2006 +0000
Second
+
+ This is the second commit.
---
dir/sub | 2 ++
file0 | 3 +++
$ git log --root --patch-with-stat --summary master
-commit 176b998f5d647cbd77a9d8acf4531e930754d16d
-Merge: 889b315... c7a2ab9...
+commit 59d314ad6f356dd08601a4cd5e530381da3e3c64
+Merge: 9a6d494... c7a2ab9...
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:04:00 2006 +0000
+1
+2
-commit 889b315013ef9f2e2f90aa0b054b267c8a557847
+commit 9a6d4949b6b76956d9d5e26f2791ec2ceff5fdc0
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:02:00 2006 +0000
+B
+C
-commit 7952a93e09bf565b5592766a438b40cd81f4846f
+commit 1bde4ae5f36c8d9abe3a0fce0c6aab3c4a12fe44
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:01:00 2006 +0000
Second
+
+ This is the second commit.
---
dir/sub | 2 ++
file0 | 3 +++
$ git log --root --patch-with-stat master
-commit 176b998f5d647cbd77a9d8acf4531e930754d16d
-Merge: 889b315... c7a2ab9...
+commit 59d314ad6f356dd08601a4cd5e530381da3e3c64
+Merge: 9a6d494... c7a2ab9...
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:04:00 2006 +0000
+1
+2
-commit 889b315013ef9f2e2f90aa0b054b267c8a557847
+commit 9a6d4949b6b76956d9d5e26f2791ec2ceff5fdc0
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:02:00 2006 +0000
+B
+C
-commit 7952a93e09bf565b5592766a438b40cd81f4846f
+commit 1bde4ae5f36c8d9abe3a0fce0c6aab3c4a12fe44
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:01:00 2006 +0000
Second
+
+ This is the second commit.
---
dir/sub | 2 ++
file0 | 3 +++
$ git log --root -c --patch-with-stat --summary master
-commit 176b998f5d647cbd77a9d8acf4531e930754d16d
-Merge: 889b315... c7a2ab9...
+commit 59d314ad6f356dd08601a4cd5e530381da3e3c64
+Merge: 9a6d494... c7a2ab9...
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:04:00 2006 +0000
+1
+2
-commit 889b315013ef9f2e2f90aa0b054b267c8a557847
+commit 9a6d4949b6b76956d9d5e26f2791ec2ceff5fdc0
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:02:00 2006 +0000
+B
+C
-commit 7952a93e09bf565b5592766a438b40cd81f4846f
+commit 1bde4ae5f36c8d9abe3a0fce0c6aab3c4a12fe44
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:01:00 2006 +0000
Second
+
+ This is the second commit.
---
dir/sub | 2 ++
file0 | 3 +++
$ git log --root -p master
-commit 176b998f5d647cbd77a9d8acf4531e930754d16d
-Merge: 889b315... c7a2ab9...
+commit 59d314ad6f356dd08601a4cd5e530381da3e3c64
+Merge: 9a6d494... c7a2ab9...
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:04:00 2006 +0000
+1
+2
-commit 889b315013ef9f2e2f90aa0b054b267c8a557847
+commit 9a6d4949b6b76956d9d5e26f2791ec2ceff5fdc0
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:02:00 2006 +0000
+B
+C
-commit 7952a93e09bf565b5592766a438b40cd81f4846f
+commit 1bde4ae5f36c8d9abe3a0fce0c6aab3c4a12fe44
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:01:00 2006 +0000
Second
+
+ This is the second commit.
diff --git a/dir/sub b/dir/sub
index 35d242b..8422d40 100644
$ git log --root master
-commit 176b998f5d647cbd77a9d8acf4531e930754d16d
-Merge: 889b315... c7a2ab9...
+commit 59d314ad6f356dd08601a4cd5e530381da3e3c64
+Merge: 9a6d494... c7a2ab9...
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:04:00 2006 +0000
Side
-commit 889b315013ef9f2e2f90aa0b054b267c8a557847
+commit 9a6d4949b6b76956d9d5e26f2791ec2ceff5fdc0
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:02:00 2006 +0000
Third
-commit 7952a93e09bf565b5592766a438b40cd81f4846f
+commit 1bde4ae5f36c8d9abe3a0fce0c6aab3c4a12fe44
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:01:00 2006 +0000
Second
+
+ This is the second commit.
commit 444ac553ac7612cc88969031b02b3767fb8a353a
Author: A U Thor <author@example.com>
$ git log -SF -p master
-commit 889b315013ef9f2e2f90aa0b054b267c8a557847
+commit 9a6d4949b6b76956d9d5e26f2791ec2ceff5fdc0
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:02:00 2006 +0000
$ git log -SF master
-commit 889b315013ef9f2e2f90aa0b054b267c8a557847
+commit 9a6d4949b6b76956d9d5e26f2791ec2ceff5fdc0
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:02:00 2006 +0000
$ git log -p master
-commit 176b998f5d647cbd77a9d8acf4531e930754d16d
-Merge: 889b315... c7a2ab9...
+commit 59d314ad6f356dd08601a4cd5e530381da3e3c64
+Merge: 9a6d494... c7a2ab9...
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:04:00 2006 +0000
+1
+2
-commit 889b315013ef9f2e2f90aa0b054b267c8a557847
+commit 9a6d4949b6b76956d9d5e26f2791ec2ceff5fdc0
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:02:00 2006 +0000
+B
+C
-commit 7952a93e09bf565b5592766a438b40cd81f4846f
+commit 1bde4ae5f36c8d9abe3a0fce0c6aab3c4a12fe44
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:01:00 2006 +0000
Second
+
+ This is the second commit.
diff --git a/dir/sub b/dir/sub
index 35d242b..8422d40 100644
$ git log master
-commit 176b998f5d647cbd77a9d8acf4531e930754d16d
-Merge: 889b315... c7a2ab9...
+commit 59d314ad6f356dd08601a4cd5e530381da3e3c64
+Merge: 9a6d494... c7a2ab9...
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:04:00 2006 +0000
Side
-commit 889b315013ef9f2e2f90aa0b054b267c8a557847
+commit 9a6d4949b6b76956d9d5e26f2791ec2ceff5fdc0
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:02:00 2006 +0000
Third
-commit 7952a93e09bf565b5592766a438b40cd81f4846f
+commit 1bde4ae5f36c8d9abe3a0fce0c6aab3c4a12fe44
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:01:00 2006 +0000
Second
+
+ This is the second commit.
commit 444ac553ac7612cc88969031b02b3767fb8a353a
Author: A U Thor <author@example.com>
$ git show master
-commit 176b998f5d647cbd77a9d8acf4531e930754d16d
-Merge: 889b315... c7a2ab9...
+commit 59d314ad6f356dd08601a4cd5e530381da3e3c64
+Merge: 9a6d494... c7a2ab9...
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:04:00 2006 +0000
+1
+2
-commit 889b315013ef9f2e2f90aa0b054b267c8a557847
+commit 9a6d4949b6b76956d9d5e26f2791ec2ceff5fdc0
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:02:00 2006 +0000
+E
+F
-commit 7952a93e09bf565b5592766a438b40cd81f4846f
+commit 1bde4ae5f36c8d9abe3a0fce0c6aab3c4a12fe44
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:01:00 2006 +0000
Second
+
+ This is the second commit.
---
dir/sub | 2 ++
1 files changed, 2 insertions(+), 0 deletions(-)
+1
+2
-commit 889b315013ef9f2e2f90aa0b054b267c8a557847
+commit 9a6d4949b6b76956d9d5e26f2791ec2ceff5fdc0
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:02:00 2006 +0000
+B
+C
-commit 7952a93e09bf565b5592766a438b40cd81f4846f
+commit 1bde4ae5f36c8d9abe3a0fce0c6aab3c4a12fe44
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:01:00 2006 +0000
Second
+
+ This is the second commit.
---
dir/sub | 2 ++
file0 | 3 +++
+1
+2
-commit 889b315013ef9f2e2f90aa0b054b267c8a557847
+commit 9a6d4949b6b76956d9d5e26f2791ec2ceff5fdc0
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:02:00 2006 +0000
+E
+F
-commit 7952a93e09bf565b5592766a438b40cd81f4846f
+commit 1bde4ae5f36c8d9abe3a0fce0c6aab3c4a12fe44
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:01:00 2006 +0000
Second
+
+ This is the second commit.
---
dir/sub | 2 ++
1 files changed, 2 insertions(+), 0 deletions(-)
$ git whatchanged --root --cc --patch-with-stat --summary master
-commit 176b998f5d647cbd77a9d8acf4531e930754d16d
-Merge: 889b315... c7a2ab9...
+commit 59d314ad6f356dd08601a4cd5e530381da3e3c64
+Merge: 9a6d494... c7a2ab9...
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:04:00 2006 +0000
+1
+2
-commit 889b315013ef9f2e2f90aa0b054b267c8a557847
+commit 9a6d4949b6b76956d9d5e26f2791ec2ceff5fdc0
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:02:00 2006 +0000
+B
+C
-commit 7952a93e09bf565b5592766a438b40cd81f4846f
+commit 1bde4ae5f36c8d9abe3a0fce0c6aab3c4a12fe44
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:01:00 2006 +0000
Second
+
+ This is the second commit.
---
dir/sub | 2 ++
file0 | 3 +++
+1
+2
-commit 889b315013ef9f2e2f90aa0b054b267c8a557847
+commit 9a6d4949b6b76956d9d5e26f2791ec2ceff5fdc0
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:02:00 2006 +0000
+B
+C
-commit 7952a93e09bf565b5592766a438b40cd81f4846f
+commit 1bde4ae5f36c8d9abe3a0fce0c6aab3c4a12fe44
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:01:00 2006 +0000
Second
+
+ This is the second commit.
---
dir/sub | 2 ++
file0 | 3 +++
+1
+2
-commit 889b315013ef9f2e2f90aa0b054b267c8a557847
+commit 9a6d4949b6b76956d9d5e26f2791ec2ceff5fdc0
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:02:00 2006 +0000
+B
+C
-commit 7952a93e09bf565b5592766a438b40cd81f4846f
+commit 1bde4ae5f36c8d9abe3a0fce0c6aab3c4a12fe44
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:01:00 2006 +0000
Second
+
+ This is the second commit.
---
dir/sub | 2 ++
file0 | 3 +++
$ git whatchanged --root -c --patch-with-stat --summary master
-commit 176b998f5d647cbd77a9d8acf4531e930754d16d
-Merge: 889b315... c7a2ab9...
+commit 59d314ad6f356dd08601a4cd5e530381da3e3c64
+Merge: 9a6d494... c7a2ab9...
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:04:00 2006 +0000
+1
+2
-commit 889b315013ef9f2e2f90aa0b054b267c8a557847
+commit 9a6d4949b6b76956d9d5e26f2791ec2ceff5fdc0
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:02:00 2006 +0000
+B
+C
-commit 7952a93e09bf565b5592766a438b40cd81f4846f
+commit 1bde4ae5f36c8d9abe3a0fce0c6aab3c4a12fe44
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:01:00 2006 +0000
Second
+
+ This is the second commit.
---
dir/sub | 2 ++
file0 | 3 +++
+1
+2
-commit 889b315013ef9f2e2f90aa0b054b267c8a557847
+commit 9a6d4949b6b76956d9d5e26f2791ec2ceff5fdc0
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:02:00 2006 +0000
+B
+C
-commit 7952a93e09bf565b5592766a438b40cd81f4846f
+commit 1bde4ae5f36c8d9abe3a0fce0c6aab3c4a12fe44
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:01:00 2006 +0000
Second
+
+ This is the second commit.
diff --git a/dir/sub b/dir/sub
index 35d242b..8422d40 100644
:100644 100644 01e79c3... f4615da... M file0
:000000 100644 0000000... 7289e35... A file3
-commit 889b315013ef9f2e2f90aa0b054b267c8a557847
+commit 9a6d4949b6b76956d9d5e26f2791ec2ceff5fdc0
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:02:00 2006 +0000
:100644 100644 8422d40... cead32e... M dir/sub
:000000 100644 0000000... b1e6722... A file1
-commit 7952a93e09bf565b5592766a438b40cd81f4846f
+commit 1bde4ae5f36c8d9abe3a0fce0c6aab3c4a12fe44
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:01:00 2006 +0000
Second
+
+ This is the second commit.
:100644 100644 35d242b... 8422d40... M dir/sub
:100644 100644 01e79c3... b414108... M file0
$ git whatchanged -SF -p master
-commit 889b315013ef9f2e2f90aa0b054b267c8a557847
+commit 9a6d4949b6b76956d9d5e26f2791ec2ceff5fdc0
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:02:00 2006 +0000
$ git whatchanged -SF master
-commit 889b315013ef9f2e2f90aa0b054b267c8a557847
+commit 9a6d4949b6b76956d9d5e26f2791ec2ceff5fdc0
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:02:00 2006 +0000
+1
+2
-commit 889b315013ef9f2e2f90aa0b054b267c8a557847
+commit 9a6d4949b6b76956d9d5e26f2791ec2ceff5fdc0
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:02:00 2006 +0000
+B
+C
-commit 7952a93e09bf565b5592766a438b40cd81f4846f
+commit 1bde4ae5f36c8d9abe3a0fce0c6aab3c4a12fe44
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:01:00 2006 +0000
Second
+
+ This is the second commit.
diff --git a/dir/sub b/dir/sub
index 35d242b..8422d40 100644
:100644 100644 01e79c3... f4615da... M file0
:000000 100644 0000000... 7289e35... A file3
-commit 889b315013ef9f2e2f90aa0b054b267c8a557847
+commit 9a6d4949b6b76956d9d5e26f2791ec2ceff5fdc0
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:02:00 2006 +0000
:100644 100644 8422d40... cead32e... M dir/sub
:000000 100644 0000000... b1e6722... A file1
-commit 7952a93e09bf565b5592766a438b40cd81f4846f
+commit 1bde4ae5f36c8d9abe3a0fce0c6aab3c4a12fe44
Author: A U Thor <author@example.com>
Date: Mon Jun 26 00:01:00 2006 +0000
Second
+
+ This is the second commit.
:100644 100644 35d242b... 8422d40... M dir/sub
:100644 100644 01e79c3... b414108... M file0
--- /dev/null
+#!/bin/sh
+#
+# Copyright (c) 2006 Junio C Hamano
+#
+
+test_description='Format-patch skipping already incorporated patches'
+
+. ./test-lib.sh
+
+test_expect_success setup '
+
+ for i in 1 2 3 4 5 6 7 8 9 10; do echo "$i"; done >file &&
+ git add file &&
+ git commit -m Initial &&
+ git checkout -b side &&
+
+ for i in 1 2 5 6 A B C 7 8 9 10; do echo "$i"; done >file &&
+ git update-index file &&
+ git commit -m "Side change #1" &&
+
+ for i in D E F; do echo "$i"; done >>file &&
+ git update-index file &&
+ git commit -m "Side change #2" &&
+ git tag C2 &&
+
+ for i in 5 6 1 2 3 A 4 B C 7 8 9 10 D E F; do echo "$i"; done >file &&
+ git update-index file &&
+ git commit -m "Side change #3" &&
+
+ git checkout master &&
+ git diff-tree -p C2 | git apply --index &&
+ git commit -m "Master accepts moral equivalent of #2"
+
+'
+
+test_expect_success "format-patch --ignore-if-in-upstream" '
+
+ git format-patch --stdout master..side >patch0 &&
+ cnt=`grep "^From " patch0 | wc -l` &&
+ test $cnt = 3
+
+'
+
+test_expect_success "format-patch --ignore-if-in-upstream" '
+
+ git format-patch --stdout \
+ --ignore-if-in-upstream master..side >patch1 &&
+ cnt=`grep "^From " patch1 | wc -l` &&
+ test $cnt = 2
+
+'
+
+test_expect_success "format-patch result applies" '
+
+ git checkout -b rebuild-0 master &&
+ git am -3 patch0 &&
+ cnt=`git rev-list master.. | wc -l` &&
+ test $cnt = 2
+'
+
+test_expect_success "format-patch --ignore-if-in-upstream result applies" '
+
+ git checkout -b rebuild-1 master &&
+ git am -3 patch1 &&
+ cnt=`git rev-list master.. | wc -l` &&
+ test $cnt = 2
+'
+
+test_done
cat >test-patch <<\EOF
diff --git a/foo b/bar
similarity index 47%
-copy from foo
-copy to bar
+rename from foo
+rename to bar
--- a/foo
+++ b/bar
@@ -1 +1 @@
'test -f bar && ls -l bar | grep "^-..x......"'
fi
+test_expect_success 'apply reverse' \
+ 'git-apply -R --index --stat --summary --apply test-patch &&
+ test "$(cat foo)" = "This is foo"'
+
+cat >test-patch <<\EOF
+diff --git a/foo b/bar
+similarity index 47%
+copy from foo
+copy to bar
+--- a/foo
++++ b/bar
+@@ -1 +1 @@
+-This is foo
++This is bar
+EOF
+
+test_expect_success 'apply copy' \
+ 'git-apply --index --stat --summary --apply test-patch &&
+ test "$(cat bar)" = "This is bar" -a "$(cat foo)" = "This is foo"'
+
test_done
git-diff-tree -p master binary >B.diff
git-diff-tree -p -C master binary >C.diff
-git-diff-tree -p --full-index master binary >BF.diff
-git-diff-tree -p --full-index -C master binary >CF.diff
+git-diff-tree -p --binary master binary >BF.diff
+git-diff-tree -p --binary -C master binary >CF.diff
test_expect_success 'stat binary diff -- should not fail.' \
'git-checkout master
# setup
-mkdir -p include/arch/x86_64/klibc klibc/arch/x86_64/include/klibc
-
-cat >include/arch/x86_64/klibc/archsetjmp.h <<\EOF
-/*
- * arch/x86_64/include/klibc/archsetjmp.h
- */
-
-#ifndef _KLIBC_ARCHSETJMP_H
-#define _KLIBC_ARCHSETJMP_H
-
-struct __jmp_buf {
- unsigned long __rbx;
- unsigned long __rsp;
- unsigned long __rbp;
- unsigned long __r12;
- unsigned long __r13;
- unsigned long __r14;
- unsigned long __r15;
- unsigned long __rip;
-};
-
-typedef struct __jmp_buf jmp_buf[1];
-
-#endif /* _SETJMP_H */
-EOF
+mkdir -p klibc/arch/x86_64/include/klibc
cat >klibc/arch/x86_64/include/klibc/archsetjmp.h <<\EOF
/*
+#endif /* _KLIBC_ARCHSETJMP_H */
EOF
-find include klibc -type f -print | xargs git-update-index --add --
+find klibc -type f -print | xargs git-update-index --add --
test_expect_success 'check rename/copy patch' 'git-apply --check patch'
--- /dev/null
+#!/bin/sh
+#
+# Copyright (c) 2006 Eric Wong
+#
+
+test_description='git-apply should not get confused with type changes.
+
+'
+
+. ./test-lib.sh
+
+test_expect_success 'setup repository and commits' '
+ echo "hello world" > foo &&
+ echo "hi planet" > bar &&
+ git update-index --add foo bar &&
+ git commit -m initial &&
+ git branch initial &&
+ rm -f foo &&
+ ln -s bar foo &&
+ git update-index foo &&
+ git commit -m "foo symlinked to bar" &&
+ git branch foo-symlinked-to-bar &&
+ rm -f foo &&
+ echo "how far is the sun?" > foo &&
+ git update-index foo &&
+ git commit -m "foo back to file" &&
+ git branch foo-back-to-file &&
+ rm -f foo &&
+ git update-index --remove foo &&
+ mkdir foo &&
+ echo "if only I knew" > foo/baz &&
+ git update-index --add foo/baz &&
+ git commit -m "foo becomes a directory" &&
+ git branch "foo-becomes-a-directory" &&
+ echo "hello world" > foo/baz &&
+ git update-index foo/baz &&
+ git commit -m "foo/baz is the original foo" &&
+ git branch foo-baz-renamed-from-foo
+ '
+
+test_expect_success 'file renamed from foo to foo/baz' '
+ git checkout -f initial &&
+ git diff-tree -M -p HEAD foo-baz-renamed-from-foo > patch &&
+ git apply --index < patch
+ '
+test_debug 'cat patch'
+
+
+test_expect_success 'file renamed from foo/baz to foo' '
+ git checkout -f foo-baz-renamed-from-foo &&
+ git diff-tree -M -p HEAD initial > patch &&
+ git apply --index < patch
+ '
+test_debug 'cat patch'
+
+
+test_expect_success 'directory becomes file' '
+ git checkout -f foo-becomes-a-directory &&
+ git diff-tree -p HEAD initial > patch &&
+ git apply --index < patch
+ '
+test_debug 'cat patch'
+
+
+test_expect_success 'file becomes directory' '
+ git checkout -f initial &&
+ git diff-tree -p HEAD foo-becomes-a-directory > patch &&
+ git apply --index < patch
+ '
+test_debug 'cat patch'
+
+
+test_expect_success 'file becomes symlink' '
+ git checkout -f initial &&
+ git diff-tree -p HEAD foo-symlinked-to-bar > patch &&
+ git apply --index < patch
+ '
+test_debug 'cat patch'
+
+
+test_expect_success 'symlink becomes file' '
+ git checkout -f foo-symlinked-to-bar &&
+ git diff-tree -p HEAD foo-back-to-file > patch &&
+ git apply --index < patch
+ '
+test_debug 'cat patch'
+
+
+test_expect_success 'symlink becomes directory' '
+ git checkout -f foo-symlinked-to-bar &&
+ git diff-tree -p HEAD foo-becomes-a-directory > patch &&
+ git apply --index < patch
+ '
+test_debug 'cat patch'
+
+
+test_expect_success 'directory becomes symlink' '
+ git checkout -f foo-becomes-a-directory &&
+ git diff-tree -p HEAD foo-symlinked-to-bar > patch &&
+ git apply --index < patch
+ '
+test_debug 'cat patch'
+
+
+test_done
--- /dev/null
+#!/bin/sh
+#
+# Copyright (c) 2005 Junio C Hamano
+#
+
+test_description='git-apply symlinks and partial files
+
+'
+
+. ./test-lib.sh
+
+test_expect_success setup '
+
+ ln -s path1/path2/path3/path4/path5 link1 &&
+ git add link? &&
+ git commit -m initial &&
+
+ git branch side &&
+
+ rm -f link? &&
+
+ ln -s htap6 link1 &&
+ git update-index link? &&
+ git commit -m second &&
+
+ git diff-tree -p HEAD^ HEAD >patch &&
+ git apply --stat --summary patch
+
+'
+
+test_expect_success 'apply symlink patch' '
+
+ git checkout side &&
+ git apply patch &&
+ git diff-files -p >patched &&
+ diff -u patch patched
+
+'
+
+test_expect_success 'apply --index symlink patch' '
+
+ git checkout -f side &&
+ git apply --index patch &&
+ git diff-index --cached -p HEAD >patched &&
+ diff -u patch patched
+
+'
+
+test_done
cd "$base_dir"
-test_expect_success 'existance of info/alternates' \
+test_expect_success 'existence of info/alternates' \
'test `wc -l <C/.git/objects/info/alternates` = 2'
cd "$base_dir"
cd "$base_dir"
-test_expect_failure 'that info/alternates is neccessary' \
+test_expect_failure 'that info/alternates is necessary' \
'cd C &&
rm .git/objects/info/alternates &&
test_valid_repo'
test_bisection_diff 0 $_bisect_option u5 ^U
#
-# the following illustrate's Linus' binary bug blatt idea.
+# the following illustrates Linus' binary bug blatt idea.
#
# assume the bug is actually at l3, but you don't know that - all you know is that l3 is broken
# and it wasn't broken before
--- /dev/null
+#!/bin/sh
+
+test_description='git-rev-list trivial path optimization test'
+
+. ./test-lib.sh
+
+test_expect_success setup '
+echo Hello > a &&
+git add a &&
+git commit -m "Initial commit" a
+'
+
+test_expect_success path-optimization '
+ commit=$(echo "Unchanged tree" | git-commit-tree "HEAD^{tree}" -p HEAD) &&
+ test $(git-rev-list $commit | wc -l) = 2 &&
+ test $(git-rev-list $commit -- . | wc -l) = 1
+'
+
+test_done
G=$(doit 7 G $B $E)
H=$(doit 8 H $A $F)
+# Setup for second test to demonstrate that relying on timestamps in a
+# distributed SCM to provide a _consistent_ partial ordering of commits
+# leads to insanity.
+#
+# Relative
+# Structure timestamps
+#
+# PL PR +4 +4
+# / \/ \ / \/ \
+# L2 C2 R2 +3 -1 +3
+# | | | | | |
+# L1 C1 R1 +2 -2 +2
+# | | | | | |
+# L0 C0 R0 +1 -3 +1
+# \ | / \ | /
+# S 0
+#
+# The left and right chains of commits can be of any length and complexity as
+# long as all of the timestamps are greater than that of S.
+
+S=$(doit 0 S)
+
+C0=$(doit -3 C0 $S)
+C1=$(doit -2 C1 $C0)
+C2=$(doit -1 C2 $C1)
+
+L0=$(doit 1 L0 $S)
+L1=$(doit 2 L1 $L0)
+L2=$(doit 3 L2 $L1)
+
+R0=$(doit 1 R0 $S)
+R1=$(doit 2 R1 $R0)
+R2=$(doit 3 R2 $R1)
+
+PL=$(doit 4 PL $L2 $C2)
+PR=$(doit 4 PR $C2 $R2)
+
test_expect_success 'compute merge-base (single)' \
'MB=$(git-merge-base G H) &&
expr "$(git-name-rev "$MB")" : "[0-9a-f]* tags/B"'
'MB=$(git-show-branch --merge-base G H) &&
expr "$(git-name-rev "$MB")" : "[0-9a-f]* tags/B"'
+test_expect_success 'compute merge-base (single)' \
+ 'MB=$(git-merge-base PL PR) &&
+ expr "$(git-name-rev "$MB")" : "[0-9a-f]* tags/C2"'
+
+test_expect_success 'compute merge-base (all)' \
+ 'MB=$(git-merge-base --all PL PR) &&
+ expr "$(git-name-rev "$MB")" : "[0-9a-f]* tags/C2"'
+
test_done
#
# See http://marc.theaimsgroup.com/?l=git&m=111463358500362&w=2 for a
-# nice decription of what this is about.
+# nice description of what this is about.
test_description='Test criss-cross merge'
--- /dev/null
+#!/bin/sh
+#
+# Copyright (c) 2006, Junio C Hamano
+#
+
+test_description='fmt-merge-msg test'
+
+. ./test-lib.sh
+
+datestamp=1151939923
+setdate () {
+ GIT_COMMITTER_DATE="$datestamp +0200"
+ GIT_AUTHOR_DATE="$datestamp +0200"
+ datestamp=`expr "$datestamp" + 1`
+ export GIT_COMMITTER_DATE GIT_AUTHOR_DATE
+}
+
+test_expect_success setup '
+ echo one >one &&
+ git add one &&
+ setdate &&
+ git commit -m "Initial" &&
+
+ echo uno >one &&
+ echo dos >two &&
+ git add two &&
+ setdate &&
+ git commit -a -m "Second" &&
+
+ git checkout -b left &&
+
+ echo $datestamp >one &&
+ setdate &&
+ git commit -a -m "Common #1" &&
+
+ echo $datestamp >one &&
+ setdate &&
+ git commit -a -m "Common #2" &&
+
+ git branch right &&
+
+ echo $datestamp >two &&
+ setdate &&
+ git commit -a -m "Left #3" &&
+
+ echo $datestamp >two &&
+ setdate &&
+ git commit -a -m "Left #4" &&
+
+ echo $datestamp >two &&
+ setdate &&
+ git commit -a -m "Left #5" &&
+
+ git checkout right &&
+
+ echo $datestamp >three &&
+ git add three &&
+ setdate &&
+ git commit -a -m "Right #3" &&
+
+ echo $datestamp >three &&
+ setdate &&
+ git commit -a -m "Right #4" &&
+
+ echo $datestamp >three &&
+ setdate &&
+ git commit -a -m "Right #5" &&
+
+ git show-branch
+'
+
+cat >expected <<\EOF
+Merge branch 'left'
+EOF
+
+test_expect_success 'merge-msg test #1' '
+
+ git checkout master &&
+ git fetch . left &&
+
+ git fmt-merge-msg <.git/FETCH_HEAD >actual &&
+ diff -u actual expected
+'
+
+cat >expected <<\EOF
+Merge branch 'left' of ../trash
+EOF
+
+test_expect_success 'merge-msg test #2' '
+
+ git checkout master &&
+ git fetch ../trash left &&
+
+ git fmt-merge-msg <.git/FETCH_HEAD >actual &&
+ diff -u actual expected
+'
+
+cat >expected <<\EOF
+Merge branch 'left'
+
+* left:
+ Left #5
+ Left #4
+ Left #3
+ Common #2
+ Common #1
+EOF
+
+test_expect_success 'merge-msg test #3' '
+
+ git repo-config merge.summary true &&
+
+ git checkout master &&
+ setdate &&
+ git fetch . left &&
+
+ git fmt-merge-msg <.git/FETCH_HEAD >actual &&
+ diff -u actual expected
+'
+
+cat >expected <<\EOF
+Merge branches 'left' and 'right'
+
+* left:
+ Left #5
+ Left #4
+ Left #3
+ Common #2
+ Common #1
+
+* right:
+ Right #5
+ Right #4
+ Right #3
+ Common #2
+ Common #1
+EOF
+
+test_expect_success 'merge-msg test #4' '
+
+ git repo-config merge.summary true &&
+
+ git checkout master &&
+ setdate &&
+ git fetch . left right &&
+
+ git fmt-merge-msg <.git/FETCH_HEAD >actual &&
+ diff -u actual expected
+'
+
+test_expect_success 'merge-msg test #5' '
+
+ git repo-config merge.summary yes &&
+
+ git checkout master &&
+ setdate &&
+ git fetch . left right &&
+
+ git fmt-merge-msg <.git/FETCH_HEAD >actual &&
+ diff -u actual expected
+'
+
+test_done
'git-diff-tree -r -M --name-status HEAD^ HEAD | \
grep -E "^R100.+path1/COPYING.+path0/COPYING"'
+test_expect_success \
+ 'adding another file' \
+ 'cp ../../README path0/README &&
+ git-add path0/README &&
+ git-commit -m add2 -a'
+
+test_expect_success \
+ 'moving whole subdirectory' \
+ 'git-mv path0 path2'
+
+test_expect_success \
+ 'commiting the change' \
+ 'git-commit -m dir-move -a'
+
+test_expect_success \
+ 'checking the commit' \
+ 'git-diff-tree -r -M --name-status HEAD^ HEAD | \
+ grep -E "^R100.+path0/COPYING.+path2/COPYING" &&
+ git-diff-tree -r -M --name-status HEAD^ HEAD | \
+ grep -E "^R100.+path0/README.+path2/README"'
+
+test_expect_success \
+ 'moving whole subdirectory into subdirectory' \
+ 'git-mv path2 path1'
+
+test_expect_success \
+ 'commiting the change' \
+ 'git-commit -m dir-move -a'
+
+test_expect_success \
+ 'checking the commit' \
+ 'git-diff-tree -r -M --name-status HEAD^ HEAD | \
+ grep -E "^R100.+path2/COPYING.+path1/path2/COPYING" &&
+ git-diff-tree -r -M --name-status HEAD^ HEAD | \
+ grep -E "^R100.+path2/README.+path1/path2/README"'
+
+test_expect_failure \
+ 'do not move directory over existing directory' \
+ 'mkdir path0 && mkdir path0/path2 && git-mv path2 path0'
+
test_done
--- /dev/null
+#!/bin/sh
+#
+# Copyright (c) 2006 Junio C Hamano
+#
+
+test_description='git grep various.
+'
+
+. ./test-lib.sh
+
+test_expect_success setup '
+ {
+ echo foo mmap bar
+ echo foo_mmap bar
+ echo foo_mmap bar mmap
+ echo foo mmap bar_mmap
+ echo foo_mmap bar mmap baz
+ } >file &&
+ echo x x xx x >x &&
+ echo y yy >y &&
+ echo zzz > z &&
+ mkdir t &&
+ echo test >t/t &&
+ git add file x y z t/t &&
+ git commit -m initial
+'
+
+for H in HEAD ''
+do
+ case "$H" in
+ HEAD) HC='HEAD:' L='HEAD' ;;
+ '') HC= L='in working tree' ;;
+ esac
+
+ test_expect_success "grep -w $L" '
+ {
+ echo ${HC}file:1:foo mmap bar
+ echo ${HC}file:3:foo_mmap bar mmap
+ echo ${HC}file:4:foo mmap bar_mmap
+ echo ${HC}file:5:foo_mmap bar mmap baz
+ } >expected &&
+ git grep -n -w -e mmap $H >actual &&
+ diff expected actual
+ '
+
+ test_expect_success "grep -w $L (x)" '
+ {
+ echo ${HC}x:1:x x xx x
+ } >expected &&
+ git grep -n -w -e "x xx* x" $H >actual &&
+ diff expected actual
+ '
+
+ test_expect_success "grep -w $L (y-1)" '
+ {
+ echo ${HC}y:1:y yy
+ } >expected &&
+ git grep -n -w -e "^y" $H >actual &&
+ diff expected actual
+ '
+
+ test_expect_success "grep -w $L (y-2)" '
+ : >expected &&
+ if git grep -n -w -e "^y y" $H >actual
+ then
+ echo should not have matched
+ cat actual
+ false
+ else
+ diff expected actual
+ fi
+ '
+
+ test_expect_success "grep -w $L (z)" '
+ : >expected &&
+ if git grep -n -w -e "^z" $H >actual
+ then
+ echo should not have matched
+ cat actual
+ false
+ else
+ diff expected actual
+ fi
+ '
+
+ test_expect_success "grep $L (t-1)" '
+ echo "${HC}t/t:1:test" >expected &&
+ git grep -n -e test $H >actual &&
+ diff expected actual
+ '
+
+ test_expect_success "grep $L (t-2)" '
+ echo "${HC}t:1:test" >expected &&
+ (
+ cd t &&
+ git grep -n -e test $H
+ ) >actual &&
+ diff expected actual
+ '
+
+ test_expect_success "grep $L (t-3)" '
+ echo "${HC}t/t:1:test" >expected &&
+ (
+ cd t &&
+ git grep --full-name -n -e test $H
+ ) >actual &&
+ diff expected actual
+ '
+
+done
+
+test_done
--- /dev/null
+#!/bin/sh
+#
+# Copyright (c) 2006 Junio C Hamano
+#
+
+test_description='git-checkout tests.'
+
+. ./test-lib.sh
+
+fill () {
+ for i
+ do
+ echo "$i"
+ done
+}
+
+test_expect_success setup '
+
+ fill 1 2 3 4 5 >one &&
+ fill a b c d e >two &&
+ git add one two &&
+ git commit -m "Initial A one, A two" &&
+
+ git checkout -b side &&
+ fill 1 2 3 >one &&
+ fill A B C D E >three &&
+ rm -f two &&
+ git update-index --add --remove one two three &&
+ git commit -m "Side M one, D two, A three" &&
+
+ git checkout master
+'
+
+test_expect_success "checkout with dirty tree without -m" '
+
+ fill 0 1 2 3 4 5 >one &&
+ if git checkout side
+ then
+ echo Not happy
+ false
+ else
+ echo "happy - failed correctly"
+ fi
+
+'
+
+test_expect_success "checkout -m with dirty tree" '
+
+ git checkout -f master &&
+ git clean &&
+
+ fill 0 1 2 3 4 5 >one &&
+ git checkout -m side &&
+
+ fill " master" "* side" >expect.branch &&
+ git branch >current.branch &&
+ diff expect.branch current.branch &&
+
+ fill "M one" "A three" "D two" >expect.master &&
+ git diff --name-status master >current.master &&
+ diff expect.master current.master &&
+
+ fill "M one" >expect.side &&
+ git diff --name-status side >current.side &&
+ diff expect.side current.side &&
+
+ : >expect.index &&
+ git diff --cached >current.index &&
+ diff expect.index current.index
+'
+
+test_done
PROG='git annotate'
. ../annotate-tests.sh
+test_expect_success \
+ 'Annotating an old revision works' \
+ '[ $(git annotate file master | awk "{print \$3}" | grep -c "^A$") -eq 2 ] && \
+ [ $(git annotate file master | awk "{print \$3}" | grep -c "^B$") -eq 2 ]'
+
+
test_done
git add fake.sendmail
GIT_AUTHOR_NAME="A" git commit -a -m "Second."'
-test_expect_success \
- 'Extract patches and send' \
- 'git format-patch -n HEAD^1
- git send-email -from="Example <nobody@example.com>" --to=nobody@example.com --smtp-server="$(pwd)/fake.sendmail" ./0001*txt'
+test_expect_success 'Extract patches' '
+ patches=`git format-patch -n HEAD^1`
+'
+
+test_expect_success 'Send patches' '
+ git send-email -from="Example <nobody@example.com>" --to=nobody@example.com --smtp-server="$(pwd)/fake.sendmail" $patches 2>errors
+'
cat >expected <<\EOF
!nobody@example.com!
--- /dev/null
+#!/bin/sh
+#
+# Copyright (c) 2006 Eric Wong
+#
+
+test_description='git-svn basic tests'
+GIT_SVN_LC_ALL=$LC_ALL
+
+case "$LC_ALL" in
+*.UTF-8)
+ have_utf8=t
+ ;;
+*)
+ have_utf8=
+ ;;
+esac
+
+. ./lib-git-svn.sh
+
+echo 'define NO_SVN_TESTS to skip git-svn tests'
+
+mkdir import
+cd import
+
+echo foo > foo
+if test -z "$NO_SYMLINK"
+then
+ ln -s foo foo.link
+fi
+mkdir -p dir/a/b/c/d/e
+echo 'deep dir' > dir/a/b/c/d/e/file
+mkdir -p bar
+echo 'zzz' > bar/zzz
+echo '#!/bin/sh' > exec.sh
+chmod +x exec.sh
+svn import -m 'import for git-svn' . "$svnrepo" >/dev/null
+
+cd ..
+rm -rf import
+
+test_expect_success \
+ 'initialize git-svn' \
+ "git-svn init $svnrepo"
+
+test_expect_success \
+ 'import an SVN revision into git' \
+ 'git-svn fetch'
+
+test_expect_success "checkout from svn" "svn co $svnrepo $SVN_TREE"
+
+name='try a deep --rmdir with a commit'
+git checkout -f -b mybranch remotes/git-svn
+mv dir/a/b/c/d/e/file dir/file
+cp dir/file file
+git update-index --add --remove dir/a/b/c/d/e/file dir/file file
+git commit -m "$name"
+
+test_expect_success "$name" \
+ "git-svn commit --find-copies-harder --rmdir remotes/git-svn..mybranch &&
+ svn up $SVN_TREE &&
+ test -d $SVN_TREE/dir && test ! -d $SVN_TREE/dir/a"
+
+
+name='detect node change from file to directory #1'
+mkdir dir/new_file
+mv dir/file dir/new_file/file
+mv dir/new_file dir/file
+git update-index --remove dir/file
+git update-index --add dir/file/file
+git commit -m "$name"
+
+test_expect_failure "$name" \
+ 'git-svn commit --find-copies-harder --rmdir remotes/git-svn..mybranch' \
+ || true
+
+
+name='detect node change from directory to file #1'
+rm -rf dir $GIT_DIR/index
+git checkout -f -b mybranch2 remotes/git-svn
+mv bar/zzz zzz
+rm -rf bar
+mv zzz bar
+git update-index --remove -- bar/zzz
+git update-index --add -- bar
+git commit -m "$name"
+
+test_expect_failure "$name" \
+ 'git-svn commit --find-copies-harder --rmdir remotes/git-svn..mybranch2' \
+ || true
+
+
+name='detect node change from file to directory #2'
+rm -f $GIT_DIR/index
+git checkout -f -b mybranch3 remotes/git-svn
+rm bar/zzz
+git-update-index --remove bar/zzz
+mkdir bar/zzz
+echo yyy > bar/zzz/yyy
+git-update-index --add bar/zzz/yyy
+git commit -m "$name"
+
+test_expect_failure "$name" \
+ 'git-svn commit --find-copies-harder --rmdir remotes/git-svn..mybranch3' \
+ || true
+
+
+name='detect node change from directory to file #2'
+rm -f $GIT_DIR/index
+git checkout -f -b mybranch4 remotes/git-svn
+rm -rf dir
+git update-index --remove -- dir/file
+touch dir
+echo asdf > dir
+git update-index --add -- dir
+git commit -m "$name"
+
+test_expect_failure "$name" \
+ 'git-svn commit --find-copies-harder --rmdir remotes/git-svn..mybranch4' \
+ || true
+
+
+name='remove executable bit from a file'
+rm -f $GIT_DIR/index
+git checkout -f -b mybranch5 remotes/git-svn
+chmod -x exec.sh
+git update-index exec.sh
+git commit -m "$name"
+
+test_expect_success "$name" \
+ "git-svn commit --find-copies-harder --rmdir remotes/git-svn..mybranch5 &&
+ svn up $SVN_TREE &&
+ test ! -x $SVN_TREE/exec.sh"
+
+
+name='add executable bit back file'
+chmod +x exec.sh
+git update-index exec.sh
+git commit -m "$name"
+
+test_expect_success "$name" \
+ "git-svn commit --find-copies-harder --rmdir remotes/git-svn..mybranch5 &&
+ svn up $SVN_TREE &&
+ test -x $SVN_TREE/exec.sh"
+
+
+
+if test -z "$NO_SYMLINK"
+then
+ name='executable file becomes a symlink to bar/zzz (file)'
+ rm exec.sh
+ ln -s bar/zzz exec.sh
+ git update-index exec.sh
+ git commit -m "$name"
+
+ test_expect_success "$name" \
+ "git-svn commit --find-copies-harder --rmdir remotes/git-svn..mybranch5 &&
+ svn up $SVN_TREE &&
+ test -L $SVN_TREE/exec.sh"
+
+ name='new symlink is added to a file that was also just made executable'
+ chmod +x bar/zzz
+ ln -s bar/zzz exec-2.sh
+ git update-index --add bar/zzz exec-2.sh
+ git commit -m "$name"
+
+ test_expect_success "$name" \
+ "git-svn commit --find-copies-harder --rmdir remotes/git-svn..mybranch5 &&
+ svn up $SVN_TREE &&
+ test -x $SVN_TREE/bar/zzz &&
+ test -L $SVN_TREE/exec-2.sh"
+
+ name='modify a symlink to become a file'
+ echo git help > help || true
+ rm exec-2.sh
+ cp help exec-2.sh
+ git update-index exec-2.sh
+ git commit -m "$name"
+
+ test_expect_success "$name" \
+ "git-svn commit --find-copies-harder --rmdir remotes/git-svn..mybranch5 &&
+ svn up $SVN_TREE &&
+ test -f $SVN_TREE/exec-2.sh &&
+ test ! -L $SVN_TREE/exec-2.sh &&
+ diff -u help $SVN_TREE/exec-2.sh"
+fi
+
+
+if test "$have_utf8" = t
+then
+ name="commit with UTF-8 message: locale: $GIT_SVN_LC_ALL"
+ echo '# hello' >> exec-2.sh
+ git update-index exec-2.sh
+ git commit -m 'éï∏'
+ export LC_ALL="$GIT_SVN_LC_ALL"
+ test_expect_success "$name" "git-svn commit HEAD"
+ unset LC_ALL
+else
+ echo "UTF-8 locale not set, test skipped ($GIT_SVN_LC_ALL)"
+fi
+
+name='test fetch functionality (svn => git) with alternate GIT_SVN_ID'
+GIT_SVN_ID=alt
+export GIT_SVN_ID
+test_expect_success "$name" \
+ "git-svn init $svnrepo && git-svn fetch &&
+ git-rev-list --pretty=raw remotes/git-svn | grep ^tree | uniq > a &&
+ git-rev-list --pretty=raw remotes/alt | grep ^tree | uniq > b &&
+ diff -u a b"
+
+if test -n "$NO_SYMLINK"
+then
+ test_done
+ exit 0
+fi
+
+name='check imported tree checksums expected tree checksums'
+rm -f expected
+if test "$have_utf8" = t
+then
+ echo tree bf522353586b1b883488f2bc73dab0d9f774b9a9 > expected
+fi
+cat >> expected <<\EOF
+tree 83654bb36f019ae4fe77a0171f81075972087624
+tree 031b8d557afc6fea52894eaebb45bec52f1ba6d1
+tree 0b094cbff17168f24c302e297f55bfac65eb8bd3
+tree d667270a1f7b109f5eb3aaea21ede14b56bfdd6e
+tree 56a30b966619b863674f5978696f4a3594f2fca9
+tree d667270a1f7b109f5eb3aaea21ede14b56bfdd6e
+tree 8f51f74cf0163afc9ad68a4b1537288c4558b5a4
+EOF
+test_expect_success "$name" "diff -u a expected"
+
+test_done
--- /dev/null
+#!/bin/sh
+#
+# Copyright (c) 2006 Eric Wong
+#
+
+test_description='git-svn property tests'
+. ./lib-git-svn.sh
+
+mkdir import
+
+a_crlf=
+a_lf=
+a_cr=
+a_ne_crlf=
+a_ne_lf=
+a_ne_cr=
+a_empty=
+a_empty_lf=
+a_empty_cr=
+a_empty_crlf=
+
+cd import
+ cat >> kw.c <<\EOF
+/* Somebody prematurely put a keyword into this file */
+/* $Id$ */
+EOF
+
+ printf "Hello\r\nWorld\r\n" > crlf
+ a_crlf=`git-hash-object -w crlf`
+ printf "Hello\rWorld\r" > cr
+ a_cr=`git-hash-object -w cr`
+ printf "Hello\nWorld\n" > lf
+ a_lf=`git-hash-object -w lf`
+
+ printf "Hello\r\nWorld" > ne_crlf
+ a_ne_crlf=`git-hash-object -w ne_crlf`
+ printf "Hello\nWorld" > ne_lf
+ a_ne_lf=`git-hash-object -w ne_lf`
+ printf "Hello\rWorld" > ne_cr
+ a_ne_cr=`git-hash-object -w ne_cr`
+
+ touch empty
+ a_empty=`git-hash-object -w empty`
+ printf "\n" > empty_lf
+ a_empty_lf=`git-hash-object -w empty_lf`
+ printf "\r" > empty_cr
+ a_empty_cr=`git-hash-object -w empty_cr`
+ printf "\r\n" > empty_crlf
+ a_empty_crlf=`git-hash-object -w empty_crlf`
+
+ svn import -m 'import for git-svn' . "$svnrepo" >/dev/null
+cd ..
+
+rm -rf import
+test_expect_success 'checkout working copy from svn' "svn co $svnrepo test_wc"
+test_expect_success 'setup some commits to svn' \
+ 'cd test_wc &&
+ echo Greetings >> kw.c &&
+ svn commit -m "Not yet an Id" &&
+ svn up &&
+ echo Hello world >> kw.c &&
+ svn commit -m "Modified file, but still not yet an Id" &&
+ svn up &&
+ svn propset svn:keywords Id kw.c &&
+ svn commit -m "Propset Id" &&
+ svn up &&
+ cd ..'
+
+test_expect_success 'initialize git-svn' "git-svn init $svnrepo"
+test_expect_success 'fetch revisions from svn' 'git-svn fetch'
+
+name='test svn:keywords ignoring'
+test_expect_success "$name" \
+ 'git checkout -b mybranch remotes/git-svn &&
+ echo Hi again >> kw.c &&
+ git commit -a -m "test keywoards ignoring" &&
+ git-svn commit remotes/git-svn..mybranch &&
+ git pull . remotes/git-svn'
+
+expect='/* $Id$ */'
+got="`sed -ne 2p kw.c`"
+test_expect_success 'raw $Id$ found in kw.c' "test '$expect' = '$got'"
+
+test_expect_success "propset CR on crlf files" \
+ 'cd test_wc &&
+ svn propset svn:eol-style CR empty &&
+ svn propset svn:eol-style CR crlf &&
+ svn propset svn:eol-style CR ne_crlf &&
+ svn commit -m "propset CR on crlf files" &&
+ svn up &&
+ cd ..'
+
+test_expect_success 'fetch and pull latest from svn and checkout a new wc' \
+ "git-svn fetch &&
+ git pull . remotes/git-svn &&
+ svn co $svnrepo new_wc"
+
+for i in crlf ne_crlf lf ne_lf cr ne_cr empty_cr empty_lf empty empty_crlf
+do
+ test_expect_success "Comparing $i" "cmp $i new_wc/$i"
+done
+
+
+cd test_wc
+ printf '$Id$\rHello\rWorld\r' > cr
+ printf '$Id$\rHello\rWorld' > ne_cr
+ a_cr=`printf '$Id$\r\nHello\r\nWorld\r\n' | git-hash-object --stdin`
+ a_ne_cr=`printf '$Id$\r\nHello\r\nWorld' | git-hash-object --stdin`
+ test_expect_success 'Set CRLF on cr files' \
+ 'svn propset svn:eol-style CRLF cr &&
+ svn propset svn:eol-style CRLF ne_cr &&
+ svn propset svn:keywords Id cr &&
+ svn propset svn:keywords Id ne_cr &&
+ svn commit -m "propset CRLF on cr files" &&
+ svn up'
+cd ..
+test_expect_success 'fetch and pull latest from svn' \
+ 'git-svn fetch && git pull . remotes/git-svn'
+
+b_cr="`git-hash-object cr`"
+b_ne_cr="`git-hash-object ne_cr`"
+
+test_expect_success 'CRLF + $Id$' "test '$a_cr' = '$b_cr'"
+test_expect_success 'CRLF + $Id$ (no newline)' "test '$a_ne_cr' = '$b_ne_cr'"
+
+test_done
--- /dev/null
+test_description='git-svn rmdir'
+. ./lib-git-svn.sh
+
+test_expect_success 'initialize repo' "
+ mkdir import &&
+ cd import &&
+ mkdir -p deeply/nested/directory/number/1 &&
+ mkdir -p deeply/nested/directory/number/2 &&
+ echo foo > deeply/nested/directory/number/1/file &&
+ echo foo > deeply/nested/directory/number/2/another &&
+ svn import -m 'import for git-svn' . $svnrepo &&
+ cd ..
+ "
+
+test_expect_success 'mirror via git-svn' "
+ git-svn init $svnrepo &&
+ git-svn fetch &&
+ git checkout -f -b test-rmdir remotes/git-svn
+ "
+
+test_expect_success 'Try a commit on rmdir' "
+ git rm -f deeply/nested/directory/number/2/another &&
+ git commit -a -m 'remove another' &&
+ git-svn commit --rmdir HEAD &&
+ svn ls -R $svnrepo | grep ^deeply/nested/directory/number/1
+ "
+
+
+test_done
--- /dev/null
+test_description='git-svn graft-branches'
+. ./lib-git-svn.sh
+
+test_expect_success 'initialize repo' "
+ mkdir import &&
+ cd import &&
+ mkdir -p trunk branches tags &&
+ echo hello > trunk/readme &&
+ svn import -m 'import for git-svn' . $svnrepo &&
+ cd .. &&
+ svn cp -m 'tag a' $svnrepo/trunk $svnrepo/tags/a &&
+ svn cp -m 'branch a' $svnrepo/trunk $svnrepo/branches/a &&
+ svn co $svnrepo wc &&
+ cd wc &&
+ echo feedme >> branches/a/readme &&
+ svn commit -m hungry &&
+ svn up &&
+ cd trunk &&
+ svn merge -r3:4 $svnrepo/branches/a &&
+ svn commit -m 'merge with a' &&
+ cd ../.. &&
+ svn log -v $svnrepo &&
+ git-svn init -i trunk $svnrepo/trunk &&
+ git-svn init -i a $svnrepo/branches/a &&
+ git-svn init -i tags/a $svnrepo/tags/a &&
+ git-svn fetch -i tags/a &&
+ git-svn fetch -i a &&
+ git-svn fetch -i trunk
+ "
+
+r1=`git-rev-list remotes/trunk | tail -n1`
+r2=`git-rev-list remotes/tags/a | tail -n1`
+r3=`git-rev-list remotes/a | tail -n1`
+r4=`git-rev-list remotes/a | head -n1`
+r5=`git-rev-list remotes/trunk | head -n1`
+
+test_expect_success 'test graft-branches regexes and copies' "
+ test -n "$r1" &&
+ test -n "$r2" &&
+ test -n "$r3" &&
+ test -n "$r4" &&
+ test -n "$r5" &&
+ git-svn graft-branches &&
+ grep '^$r2 $r1' $GIT_DIR/info/grafts &&
+ grep '^$r3 $r1' $GIT_DIR/info/grafts &&
+ grep '^$r5 ' $GIT_DIR/info/grafts | grep '$r4' | grep '$r1'
+ "
+
+test_debug 'gitk --all & sleep 1'
+
+test_expect_success 'test graft-branches with tree-joins' "
+ rm $GIT_DIR/info/grafts &&
+ git-svn graft-branches --no-default-regex --no-graft-copy -B &&
+ grep '^$r3 ' $GIT_DIR/info/grafts | grep '$r1' | grep '$r2' &&
+ grep '^$r2 $r1' $GIT_DIR/info/grafts &&
+ grep '^$r5 ' $GIT_DIR/info/grafts | grep '$r1' | grep '$r4'
+ "
+
+# the result of this is kinda funky, we have a strange history and
+# this is just a test :)
+test_debug 'gitk --all &'
+
+test_done
--- /dev/null
+#!/bin/sh
+#
+# Copyright (c) 2006 Eric Wong
+#
+
+test_description='git-svn --follow-parent fetching'
+. ./lib-git-svn.sh
+
+if test -n "$GIT_SVN_NO_LIB" && test "$GIT_SVN_NO_LIB" -ne 0
+then
+ echo 'Skipping: --follow-parent needs SVN libraries'
+ test_done
+ exit 0
+fi
+
+test_expect_success 'initialize repo' "
+ mkdir import &&
+ cd import &&
+ mkdir -p trunk &&
+ echo hello > trunk/readme &&
+ svn import -m 'initial' . $svnrepo &&
+ cd .. &&
+ svn co $svnrepo wc &&
+ cd wc &&
+ echo world >> trunk/readme &&
+ svn commit -m 'another commit' &&
+ svn up &&
+ svn mv -m 'rename to thunk' trunk thunk &&
+ svn up &&
+ echo goodbye >> thunk/readme &&
+ svn commit -m 'bye now' &&
+ cd ..
+ "
+
+test_expect_success 'init and fetch --follow-parent a moved directory' "
+ git-svn init -i thunk $svnrepo/thunk &&
+ git-svn fetch --follow-parent -i thunk &&
+ git-rev-parse --verify refs/remotes/trunk &&
+ test '$?' -eq '0'
+ "
+
+test_debug 'gitk --all &'
+
+test_done
--- /dev/null
+#!/bin/sh
+#
+# Copyright (c) 2006 Eric Wong
+test_description='git-svn commit-diff'
+. ./lib-git-svn.sh
+
+if test -n "$GIT_SVN_NO_LIB" && test "$GIT_SVN_NO_LIB" -ne 0
+then
+ echo 'Skipping: commit-diff needs SVN libraries'
+ test_done
+ exit 0
+fi
+
+test_expect_success 'initialize repo' "
+ mkdir import &&
+ cd import &&
+ echo hello > readme &&
+ svn import -m 'initial' . $svnrepo &&
+ cd .. &&
+ echo hello > readme &&
+ git update-index --add readme &&
+ git commit -a -m 'initial' &&
+ echo world >> readme &&
+ git commit -a -m 'another'
+ "
+
+head=`git rev-parse --verify HEAD^0`
+prev=`git rev-parse --verify HEAD^1`
+
+# the internals of the commit-diff command are the same as the regular
+# commit, so only a basic test of functionality is needed since we've
+# already tested commit extensively elsewhere
+
+test_expect_success 'test the commit-diff command' "
+ test -n '$prev' && test -n '$head' &&
+ git-svn commit-diff '$prev' '$head' '$svnrepo' &&
+ svn co $svnrepo wc &&
+ cmp readme wc/readme
+ "
+
+test_done
PAGER=cat
TZ=UTC
export LANG LC_ALL PAGER TZ
+EDITOR=:
+VISUAL=:
unset AUTHOR_DATE
unset AUTHOR_EMAIL
unset AUTHOR_NAME
unset GIT_EXTERNAL_DIFF
unset GIT_INDEX_FILE
unset GIT_OBJECT_DIRECTORY
+unset GIT_TRACE
unset SHA1_FILE_DIRECTORIES
unset SHA1_FILE_DIRECTORY
export GIT_AUTHOR_EMAIL GIT_AUTHOR_NAME
export GIT_COMMITTER_EMAIL GIT_COMMITTER_NAME
+export EDITOR VISUAL
# Each test should start with something like this, after copyright notices:
#
struct object *deref_tag(struct object *o, const char *warn, int warnlen)
{
- while (o && o->type == TYPE_TAG)
+ while (o && o->type == OBJ_TAG)
o = parse_object(((struct tag *)o)->tagged->sha1);
if (!o && warn) {
if (!warnlen)
if (!obj) {
struct tag *ret = alloc_tag_node();
created_object(sha1, &ret->object);
- ret->object.type = TYPE_TAG;
+ ret->object.type = OBJ_TAG;
return ret;
}
if (!obj->type)
- obj->type = TYPE_TAG;
- if (obj->type != TYPE_TAG) {
+ obj->type = OBJ_TAG;
+ if (obj->type != OBJ_TAG) {
error("Object %s is a %s, not a tree",
sha1_to_hex(sha1), typename(obj->type));
return NULL;
template_dir ?= $(prefix)/share/git-core/templates/
# DESTDIR=
-# Shell quote;
-# Result of this needs to be placed inside ''
-shq = $(subst ','\'',$(1))
-# This has surrounding ''
-shellquote = '$(call shq,$(1))'
+# Shell quote (do not use $(call) to accomodate ancient setups);
+DESTDIR_SQ = $(subst ','\'',$(DESTDIR))
+template_dir_SQ = $(subst ','\'',$(template_dir))
all: boilerplates.made custom
rm -rf blt boilerplates.made
install: all
- $(INSTALL) -d -m755 $(call shellquote,$(DESTDIR)$(template_dir))
+ $(INSTALL) -d -m755 '$(DESTDIR_SQ)$(template_dir_SQ)'
(cd blt && $(TAR) cf - .) | \
- (cd $(call shellquote,$(DESTDIR)$(template_dir)) && $(TAR) xf -)
+ (cd '$(DESTDIR_SQ)$(template_dir_SQ)' && $(TAR) xf -)
echo "Changes since $prev:"
git rev-list --pretty $prev..$3 | $short
echo ---
- git diff $prev..$3 | diffstat -p1
+ git diff --stat $prev..$3
echo ---
fi
;;
base=$(git-merge-base "$2" "$3")
case "$base" in
"$2")
- git diff "$3" "^$base" | diffstat -p1
+ git diff --stat "$3" "^$base"
echo
echo "New commits:"
;;
--- /dev/null
+#include "cache.h"
+
+int main(int ac, char **av)
+{
+ SHA_CTX ctx;
+ unsigned char sha1[20];
+ unsigned bufsz = 8192;
+ char *buffer;
+
+ if (ac == 2)
+ bufsz = strtoul(av[1], NULL, 10) * 1024 * 1024;
+
+ if (!bufsz)
+ bufsz = 8192;
+
+ while ((buffer = malloc(bufsz)) == NULL) {
+ fprintf(stderr, "bufsz %u is too big, halving...\n", bufsz);
+ bufsz /= 2;
+ if (bufsz < 1024)
+ die("OOPS");
+ }
+
+ SHA1_Init(&ctx);
+
+ while (1) {
+ ssize_t sz, this_sz;
+ char *cp = buffer;
+ unsigned room = bufsz;
+ this_sz = 0;
+ while (room) {
+ sz = xread(0, cp, room);
+ if (sz == 0)
+ break;
+ if (sz < 0)
+ die("test-sha1: %s", strerror(errno));
+ this_sz += sz;
+ cp += sz;
+ room -= sz;
+ }
+ if (this_sz == 0)
+ break;
+ SHA1_Update(&ctx, buffer, this_sz);
+ }
+ SHA1_Final(sha1, &ctx);
+ puts(sha1_to_hex(sha1));
+ exit(0);
+}
--- /dev/null
+#!/bin/sh
+
+dd if=/dev/zero bs=1048576 count=100 2>/dev/null |
+/usr/bin/time ./test-sha1 >/dev/null
+
+while read expect cnt pfx
+do
+ case "$expect" in '#'*) continue ;; esac
+ actual=`
+ {
+ test -z "$pfx" || echo "$pfx"
+ dd if=/dev/zero bs=1048576 count=$cnt 2>/dev/null |
+ tr '[\0]' '[g]'
+ } | ./test-sha1 $cnt
+ `
+ if test "$expect" = "$actual"
+ then
+ echo "OK: $expect $cnt $pfx"
+ else
+ echo >&2 "OOPS: $cnt"
+ echo >&2 "expect: $expect"
+ echo >&2 "actual: $actual"
+ exit 1
+ fi
+done <<EOF
+da39a3ee5e6b4b0d3255bfef95601890afd80709 0
+3f786850e387550fdab836ed7e6dc881de23001b 0 a
+5277cbb45a15902137d332d97e89cf8136545485 0 ab
+03cfd743661f07975fa2f1220c5194cbaff48451 0 abc
+3330b4373640f9e4604991e73c7e86bfd8da2dc3 0 abcd
+ec11312386ad561674f724b8cca7cf1796e26d1d 0 abcde
+bdc37c074ec4ee6050d68bc133c6b912f36474df 0 abcdef
+69bca99b923859f2dc486b55b87f49689b7358c7 0 abcdefg
+e414af7161c9554089f4106d6f1797ef14a73666 0 abcdefgh
+0707f2970043f9f7c22029482db27733deaec029 0 abcdefghi
+a4dd8aa74a5636728fe52451636e2e17726033aa 1
+9986b45e2f4d7086372533bb6953a8652fa3644a 1 frotz
+23d8d4f788e8526b4877548a32577543cbaaf51f 10
+8cd23f822ab44c7f481b8c92d591f6d1fcad431c 10 frotz
+f3b5604a4e604899c1233edb3bf1cc0ede4d8c32 512
+b095bd837a371593048136e429e9ac4b476e1bb3 512 frotz
+08fa81d6190948de5ccca3966340cc48c10cceac 1200 xyzzy
+e33a291f42c30a159733dd98b8b3e4ff34158ca0 4090 4G
+#a3bf783bc20caa958f6cb24dd140a7b21984838d 9999 nitfol
+EOF
+
+exit
+
+# generating test vectors
+# inputs are number of megabytes followed by some random string to prefix.
+
+while read cnt pfx
+do
+ actual=`
+ {
+ test -z "$pfx" || echo "$pfx"
+ dd if=/dev/zero bs=1048576 count=$cnt 2>/dev/null |
+ tr '[\0]' '[g]'
+ } | sha1sum |
+ sed -e 's/ .*//'
+ `
+ echo "$actual $cnt $pfx"
+done <<EOF
+0
+0 a
+0 ab
+0 abc
+0 abcd
+0 abcde
+0 abcdef
+0 abcdefg
+0 abcdefgh
+0 abcdefghi
+1
+1 frotz
+10
+10 frotz
+512
+512 frotz
+1200 xyzzy
+4090 4G
+9999 nitfol
+EOF
if (!obj) {
struct tree *ret = alloc_tree_node();
created_object(sha1, &ret->object);
- ret->object.type = TYPE_TREE;
+ ret->object.type = OBJ_TREE;
return ret;
}
if (!obj->type)
- obj->type = TYPE_TREE;
- if (obj->type != TYPE_TREE) {
+ obj->type = OBJ_TREE;
+ if (obj->type != OBJ_TREE) {
error("Object %s is a %s, not a tree",
sha1_to_hex(sha1), typename(obj->type));
return NULL;
do {
if (!obj)
return NULL;
- if (obj->type == TYPE_TREE)
+ if (obj->type == OBJ_TREE)
return (struct tree *) obj;
- else if (obj->type == TYPE_COMMIT)
+ else if (obj->type == OBJ_COMMIT)
obj = &(((struct commit *) obj)->tree->object);
- else if (obj->type == TYPE_TAG)
+ else if (obj->type == OBJ_TAG)
obj = ((struct tag *) obj)->tagged;
else
return NULL;
+++ /dev/null
-#include "cache.h"
-#include "object.h"
-#include "delta.h"
-#include "pack.h"
-#include "blob.h"
-#include "commit.h"
-#include "tag.h"
-#include "tree.h"
-
-#include <sys/time.h>
-
-static int dry_run, quiet;
-static const char unpack_usage[] = "git-unpack-objects [-n] [-q] < pack-file";
-
-/* We always read in 4kB chunks. */
-static unsigned char buffer[4096];
-static unsigned long offset, len, eof;
-static SHA_CTX ctx;
-
-/*
- * Make sure at least "min" bytes are available in the buffer, and
- * return the pointer to the buffer.
- */
-static void * fill(int min)
-{
- if (min <= len)
- return buffer + offset;
- if (eof)
- die("unable to fill input");
- if (min > sizeof(buffer))
- die("cannot fill %d bytes", min);
- if (offset) {
- SHA1_Update(&ctx, buffer, offset);
- memcpy(buffer, buffer + offset, len);
- offset = 0;
- }
- do {
- int ret = xread(0, buffer + len, sizeof(buffer) - len);
- if (ret <= 0) {
- if (!ret)
- die("early EOF");
- die("read error on input: %s", strerror(errno));
- }
- len += ret;
- } while (len < min);
- return buffer;
-}
-
-static void use(int bytes)
-{
- if (bytes > len)
- die("used more bytes than were available");
- len -= bytes;
- offset += bytes;
-}
-
-static void *get_data(unsigned long size)
-{
- z_stream stream;
- void *buf = xmalloc(size);
-
- memset(&stream, 0, sizeof(stream));
-
- stream.next_out = buf;
- stream.avail_out = size;
- stream.next_in = fill(1);
- stream.avail_in = len;
- inflateInit(&stream);
-
- for (;;) {
- int ret = inflate(&stream, 0);
- use(len - stream.avail_in);
- if (stream.total_out == size && ret == Z_STREAM_END)
- break;
- if (ret != Z_OK)
- die("inflate returned %d\n", ret);
- stream.next_in = fill(1);
- stream.avail_in = len;
- }
- inflateEnd(&stream);
- return buf;
-}
-
-struct delta_info {
- unsigned char base_sha1[20];
- unsigned long size;
- void *delta;
- struct delta_info *next;
-};
-
-static struct delta_info *delta_list;
-
-static void add_delta_to_list(unsigned char *base_sha1, void *delta, unsigned long size)
-{
- struct delta_info *info = xmalloc(sizeof(*info));
-
- memcpy(info->base_sha1, base_sha1, 20);
- info->size = size;
- info->delta = delta;
- info->next = delta_list;
- delta_list = info;
-}
-
-static void added_object(unsigned char *sha1, const char *type, void *data, unsigned long size);
-
-static void write_object(void *buf, unsigned long size, const char *type)
-{
- unsigned char sha1[20];
- if (write_sha1_file(buf, size, type, sha1) < 0)
- die("failed to write object");
- added_object(sha1, type, buf, size);
-}
-
-static int resolve_delta(const char *type,
- void *base, unsigned long base_size,
- void *delta, unsigned long delta_size)
-{
- void *result;
- unsigned long result_size;
-
- result = patch_delta(base, base_size,
- delta, delta_size,
- &result_size);
- if (!result)
- die("failed to apply delta");
- free(delta);
- write_object(result, result_size, type);
- free(result);
- return 0;
-}
-
-static void added_object(unsigned char *sha1, const char *type, void *data, unsigned long size)
-{
- struct delta_info **p = &delta_list;
- struct delta_info *info;
-
- while ((info = *p) != NULL) {
- if (!memcmp(info->base_sha1, sha1, 20)) {
- *p = info->next;
- p = &delta_list;
- resolve_delta(type, data, size, info->delta, info->size);
- free(info);
- continue;
- }
- p = &info->next;
- }
-}
-
-static int unpack_non_delta_entry(enum object_type kind, unsigned long size)
-{
- void *buf = get_data(size);
- const char *type;
-
- switch (kind) {
- case OBJ_COMMIT: type = commit_type; break;
- case OBJ_TREE: type = tree_type; break;
- case OBJ_BLOB: type = blob_type; break;
- case OBJ_TAG: type = tag_type; break;
- default: die("bad type %d", kind);
- }
- if (!dry_run)
- write_object(buf, size, type);
- free(buf);
- return 0;
-}
-
-static int unpack_delta_entry(unsigned long delta_size)
-{
- void *delta_data, *base;
- unsigned long base_size;
- char type[20];
- unsigned char base_sha1[20];
- int result;
-
- memcpy(base_sha1, fill(20), 20);
- use(20);
-
- delta_data = get_data(delta_size);
- if (dry_run) {
- free(delta_data);
- return 0;
- }
-
- if (!has_sha1_file(base_sha1)) {
- add_delta_to_list(base_sha1, delta_data, delta_size);
- return 0;
- }
- base = read_sha1_file(base_sha1, type, &base_size);
- if (!base)
- die("failed to read delta-pack base object %s", sha1_to_hex(base_sha1));
- result = resolve_delta(type, base, base_size, delta_data, delta_size);
- free(base);
- return result;
-}
-
-static void unpack_one(unsigned nr, unsigned total)
-{
- unsigned shift;
- unsigned char *pack, c;
- unsigned long size;
- enum object_type type;
-
- pack = fill(1);
- c = *pack;
- use(1);
- type = (c >> 4) & 7;
- size = (c & 15);
- shift = 4;
- while (c & 0x80) {
- pack = fill(1);
- c = *pack++;
- use(1);
- size += (c & 0x7f) << shift;
- shift += 7;
- }
- if (!quiet) {
- static unsigned long last_sec;
- static unsigned last_percent;
- struct timeval now;
- unsigned percentage = (nr * 100) / total;
-
- gettimeofday(&now, NULL);
- if (percentage != last_percent || now.tv_sec != last_sec) {
- last_sec = now.tv_sec;
- last_percent = percentage;
- fprintf(stderr, "%4u%% (%u/%u) done\r", percentage, nr, total);
- }
- }
- switch (type) {
- case OBJ_COMMIT:
- case OBJ_TREE:
- case OBJ_BLOB:
- case OBJ_TAG:
- unpack_non_delta_entry(type, size);
- return;
- case OBJ_DELTA:
- unpack_delta_entry(size);
- return;
- default:
- die("bad object type %d", type);
- }
-}
-
-/*
- * We unpack from the end, older files first. Now, usually
- * there are deltas etc, so we'll not actually write the
- * objects in that order, but we might as well try..
- */
-static void unpack_all(void)
-{
- int i;
- struct pack_header *hdr = fill(sizeof(struct pack_header));
- unsigned nr_objects = ntohl(hdr->hdr_entries);
-
- if (ntohl(hdr->hdr_signature) != PACK_SIGNATURE)
- die("bad pack file");
- if (!pack_version_ok(hdr->hdr_version))
- die("unknown pack file version %d", ntohl(hdr->hdr_version));
- fprintf(stderr, "Unpacking %d objects\n", nr_objects);
-
- use(sizeof(struct pack_header));
- for (i = 0; i < nr_objects; i++)
- unpack_one(i+1, nr_objects);
- if (delta_list)
- die("unresolved deltas left after unpacking");
-}
-
-int main(int argc, char **argv)
-{
- int i;
- unsigned char sha1[20];
-
- setup_git_directory();
-
- quiet = !isatty(2);
-
- for (i = 1 ; i < argc; i++) {
- const char *arg = argv[i];
-
- if (*arg == '-') {
- if (!strcmp(arg, "-n")) {
- dry_run = 1;
- continue;
- }
- if (!strcmp(arg, "-q")) {
- quiet = 1;
- continue;
- }
- usage(unpack_usage);
- }
-
- /* We don't take any non-flag arguments now.. Maybe some day */
- usage(unpack_usage);
- }
- SHA1_Init(&ctx);
- unpack_all();
- SHA1_Update(&ctx, buffer, offset);
- SHA1_Final(sha1, &ctx);
- if (memcmp(fill(20), sha1, 20))
- die("final sha1 did not match");
- use(20);
-
- /* Write the last part of the buffer to stdout */
- while (len) {
- int ret = xwrite(1, buffer + offset, len);
- if (ret <= 0)
- break;
- len -= ret;
- offset += ret;
- }
-
- /* All done */
- if (!quiet)
- fprintf(stderr, "\n");
- return 0;
-}
--- /dev/null
+#include <signal.h>
+#include <sys/time.h>
+#include "cache.h"
+#include "tree.h"
+#include "tree-walk.h"
+#include "cache-tree.h"
+#include "unpack-trees.h"
+
+#define DBRT_DEBUG 1
+
+struct tree_entry_list {
+ struct tree_entry_list *next;
+ unsigned directory : 1;
+ unsigned executable : 1;
+ unsigned symlink : 1;
+ unsigned int mode;
+ const char *name;
+ const unsigned char *sha1;
+};
+
+static struct tree_entry_list *create_tree_entry_list(struct tree *tree)
+{
+ struct tree_desc desc;
+ struct name_entry one;
+ struct tree_entry_list *ret = NULL;
+ struct tree_entry_list **list_p = &ret;
+
+ if (!tree->object.parsed)
+ parse_tree(tree);
+
+ desc.buf = tree->buffer;
+ desc.size = tree->size;
+
+ while (tree_entry(&desc, &one)) {
+ struct tree_entry_list *entry;
+
+ entry = xmalloc(sizeof(struct tree_entry_list));
+ entry->name = one.path;
+ entry->sha1 = one.sha1;
+ entry->mode = one.mode;
+ entry->directory = S_ISDIR(one.mode) != 0;
+ entry->executable = (one.mode & S_IXUSR) != 0;
+ entry->symlink = S_ISLNK(one.mode) != 0;
+ entry->next = NULL;
+
+ *list_p = entry;
+ list_p = &entry->next;
+ }
+ return ret;
+}
+
+static int entcmp(const char *name1, int dir1, const char *name2, int dir2)
+{
+ int len1 = strlen(name1);
+ int len2 = strlen(name2);
+ int len = len1 < len2 ? len1 : len2;
+ int ret = memcmp(name1, name2, len);
+ unsigned char c1, c2;
+ if (ret)
+ return ret;
+ c1 = name1[len];
+ c2 = name2[len];
+ if (!c1 && dir1)
+ c1 = '/';
+ if (!c2 && dir2)
+ c2 = '/';
+ ret = (c1 < c2) ? -1 : (c1 > c2) ? 1 : 0;
+ if (c1 && c2 && !ret)
+ ret = len1 - len2;
+ return ret;
+}
+
+static int unpack_trees_rec(struct tree_entry_list **posns, int len,
+ const char *base, struct unpack_trees_options *o,
+ int *indpos,
+ struct tree_entry_list *df_conflict_list)
+{
+ int baselen = strlen(base);
+ int src_size = len + 1;
+ do {
+ int i;
+ const char *first;
+ int firstdir = 0;
+ int pathlen;
+ unsigned ce_size;
+ struct tree_entry_list **subposns;
+ struct cache_entry **src;
+ int any_files = 0;
+ int any_dirs = 0;
+ char *cache_name;
+ int ce_stage;
+
+ /* Find the first name in the input. */
+
+ first = NULL;
+ cache_name = NULL;
+
+ /* Check the cache */
+ if (o->merge && *indpos < active_nr) {
+ /* This is a bit tricky: */
+ /* If the index has a subdirectory (with
+ * contents) as the first name, it'll get a
+ * filename like "foo/bar". But that's after
+ * "foo", so the entry in trees will get
+ * handled first, at which point we'll go into
+ * "foo", and deal with "bar" from the index,
+ * because the base will be "foo/". The only
+ * way we can actually have "foo/bar" first of
+ * all the things is if the trees don't
+ * contain "foo" at all, in which case we'll
+ * handle "foo/bar" without going into the
+ * directory, but that's fine (and will return
+ * an error anyway, with the added unknown
+ * file case.
+ */
+
+ cache_name = active_cache[*indpos]->name;
+ if (strlen(cache_name) > baselen &&
+ !memcmp(cache_name, base, baselen)) {
+ cache_name += baselen;
+ first = cache_name;
+ } else {
+ cache_name = NULL;
+ }
+ }
+
+#if DBRT_DEBUG > 1
+ if (first)
+ printf("index %s\n", first);
+#endif
+ for (i = 0; i < len; i++) {
+ if (!posns[i] || posns[i] == df_conflict_list)
+ continue;
+#if DBRT_DEBUG > 1
+ printf("%d %s\n", i + 1, posns[i]->name);
+#endif
+ if (!first || entcmp(first, firstdir,
+ posns[i]->name,
+ posns[i]->directory) > 0) {
+ first = posns[i]->name;
+ firstdir = posns[i]->directory;
+ }
+ }
+ /* No name means we're done */
+ if (!first)
+ return 0;
+
+ pathlen = strlen(first);
+ ce_size = cache_entry_size(baselen + pathlen);
+
+ src = xcalloc(src_size, sizeof(struct cache_entry *));
+
+ subposns = xcalloc(len, sizeof(struct tree_list_entry *));
+
+ if (cache_name && !strcmp(cache_name, first)) {
+ any_files = 1;
+ src[0] = active_cache[*indpos];
+ remove_cache_entry_at(*indpos);
+ }
+
+ for (i = 0; i < len; i++) {
+ struct cache_entry *ce;
+
+ if (!posns[i] ||
+ (posns[i] != df_conflict_list &&
+ strcmp(first, posns[i]->name))) {
+ continue;
+ }
+
+ if (posns[i] == df_conflict_list) {
+ src[i + o->merge] = o->df_conflict_entry;
+ continue;
+ }
+
+ if (posns[i]->directory) {
+ struct tree *tree = lookup_tree(posns[i]->sha1);
+ any_dirs = 1;
+ parse_tree(tree);
+ subposns[i] = create_tree_entry_list(tree);
+ posns[i] = posns[i]->next;
+ src[i + o->merge] = o->df_conflict_entry;
+ continue;
+ }
+
+ if (!o->merge)
+ ce_stage = 0;
+ else if (i + 1 < o->head_idx)
+ ce_stage = 1;
+ else if (i + 1 > o->head_idx)
+ ce_stage = 3;
+ else
+ ce_stage = 2;
+
+ ce = xcalloc(1, ce_size);
+ ce->ce_mode = create_ce_mode(posns[i]->mode);
+ ce->ce_flags = create_ce_flags(baselen + pathlen,
+ ce_stage);
+ memcpy(ce->name, base, baselen);
+ memcpy(ce->name + baselen, first, pathlen + 1);
+
+ any_files = 1;
+
+ memcpy(ce->sha1, posns[i]->sha1, 20);
+ src[i + o->merge] = ce;
+ subposns[i] = df_conflict_list;
+ posns[i] = posns[i]->next;
+ }
+ if (any_files) {
+ if (o->merge) {
+ int ret;
+
+#if DBRT_DEBUG > 1
+ printf("%s:\n", first);
+ for (i = 0; i < src_size; i++) {
+ printf(" %d ", i);
+ if (src[i])
+ printf("%s\n", sha1_to_hex(src[i]->sha1));
+ else
+ printf("\n");
+ }
+#endif
+ ret = o->fn(src, o);
+
+#if DBRT_DEBUG > 1
+ printf("Added %d entries\n", ret);
+#endif
+ *indpos += ret;
+ } else {
+ for (i = 0; i < src_size; i++) {
+ if (src[i]) {
+ add_cache_entry(src[i], ADD_CACHE_OK_TO_ADD|ADD_CACHE_SKIP_DFCHECK);
+ }
+ }
+ }
+ }
+ if (any_dirs) {
+ char *newbase = xmalloc(baselen + 2 + pathlen);
+ memcpy(newbase, base, baselen);
+ memcpy(newbase + baselen, first, pathlen);
+ newbase[baselen + pathlen] = '/';
+ newbase[baselen + pathlen + 1] = '\0';
+ if (unpack_trees_rec(subposns, len, newbase, o,
+ indpos, df_conflict_list))
+ return -1;
+ free(newbase);
+ }
+ free(subposns);
+ free(src);
+ } while (1);
+}
+
+/* Unlink the last component and attempt to remove leading
+ * directories, in case this unlink is the removal of the
+ * last entry in the directory -- empty directories are removed.
+ */
+static void unlink_entry(char *name)
+{
+ char *cp, *prev;
+
+ if (unlink(name))
+ return;
+ prev = NULL;
+ while (1) {
+ int status;
+ cp = strrchr(name, '/');
+ if (prev)
+ *prev = '/';
+ if (!cp)
+ break;
+
+ *cp = 0;
+ status = rmdir(name);
+ if (status) {
+ *cp = '/';
+ break;
+ }
+ prev = cp;
+ }
+}
+
+static volatile int progress_update = 0;
+
+static void progress_interval(int signum)
+{
+ progress_update = 1;
+}
+
+static void setup_progress_signal(void)
+{
+ struct sigaction sa;
+ struct itimerval v;
+
+ memset(&sa, 0, sizeof(sa));
+ sa.sa_handler = progress_interval;
+ sigemptyset(&sa.sa_mask);
+ sa.sa_flags = SA_RESTART;
+ sigaction(SIGALRM, &sa, NULL);
+
+ v.it_interval.tv_sec = 1;
+ v.it_interval.tv_usec = 0;
+ v.it_value = v.it_interval;
+ setitimer(ITIMER_REAL, &v, NULL);
+}
+
+static struct checkout state;
+static void check_updates(struct cache_entry **src, int nr,
+ struct unpack_trees_options *o)
+{
+ unsigned short mask = htons(CE_UPDATE);
+ unsigned last_percent = 200, cnt = 0, total = 0;
+
+ if (o->update && o->verbose_update) {
+ for (total = cnt = 0; cnt < nr; cnt++) {
+ struct cache_entry *ce = src[cnt];
+ if (!ce->ce_mode || ce->ce_flags & mask)
+ total++;
+ }
+
+ /* Don't bother doing this for very small updates */
+ if (total < 250)
+ total = 0;
+
+ if (total) {
+ fprintf(stderr, "Checking files out...\n");
+ setup_progress_signal();
+ progress_update = 1;
+ }
+ cnt = 0;
+ }
+
+ while (nr--) {
+ struct cache_entry *ce = *src++;
+
+ if (total) {
+ if (!ce->ce_mode || ce->ce_flags & mask) {
+ unsigned percent;
+ cnt++;
+ percent = (cnt * 100) / total;
+ if (percent != last_percent ||
+ progress_update) {
+ fprintf(stderr, "%4u%% (%u/%u) done\r",
+ percent, cnt, total);
+ last_percent = percent;
+ progress_update = 0;
+ }
+ }
+ }
+ if (!ce->ce_mode) {
+ if (o->update)
+ unlink_entry(ce->name);
+ continue;
+ }
+ if (ce->ce_flags & mask) {
+ ce->ce_flags &= ~mask;
+ if (o->update)
+ checkout_entry(ce, &state, NULL);
+ }
+ }
+ if (total) {
+ signal(SIGALRM, SIG_IGN);
+ fputc('\n', stderr);
+ }
+}
+
+int unpack_trees(struct object_list *trees, struct unpack_trees_options *o)
+{
+ int indpos = 0;
+ unsigned len = object_list_length(trees);
+ struct tree_entry_list **posns;
+ int i;
+ struct object_list *posn = trees;
+ struct tree_entry_list df_conflict_list;
+ struct cache_entry df_conflict_entry;
+
+ memset(&df_conflict_list, 0, sizeof(df_conflict_list));
+ df_conflict_list.next = &df_conflict_list;
+ memset(&state, 0, sizeof(state));
+ state.base_dir = "";
+ state.force = 1;
+ state.quiet = 1;
+ state.refresh_cache = 1;
+
+ o->merge_size = len;
+ memset(&df_conflict_entry, 0, sizeof(df_conflict_entry));
+ o->df_conflict_entry = &df_conflict_entry;
+
+ if (len) {
+ posns = xmalloc(len * sizeof(struct tree_entry_list *));
+ for (i = 0; i < len; i++) {
+ posns[i] = create_tree_entry_list((struct tree *) posn->item);
+ posn = posn->next;
+ }
+ if (unpack_trees_rec(posns, len, o->prefix ? o->prefix : "",
+ o, &indpos, &df_conflict_list))
+ return -1;
+ }
+
+ if (o->trivial_merges_only && o->nontrivial_merge)
+ die("Merge requires file-level merging");
+
+ check_updates(active_cache, active_nr, o);
+ return 0;
+}
+
+/* Here come the merge functions */
+
+static void reject_merge(struct cache_entry *ce)
+{
+ die("Entry '%s' would be overwritten by merge. Cannot merge.",
+ ce->name);
+}
+
+static int same(struct cache_entry *a, struct cache_entry *b)
+{
+ if (!!a != !!b)
+ return 0;
+ if (!a && !b)
+ return 1;
+ return a->ce_mode == b->ce_mode &&
+ !memcmp(a->sha1, b->sha1, 20);
+}
+
+
+/*
+ * When a CE gets turned into an unmerged entry, we
+ * want it to be up-to-date
+ */
+static void verify_uptodate(struct cache_entry *ce,
+ struct unpack_trees_options *o)
+{
+ struct stat st;
+
+ if (o->index_only || o->reset)
+ return;
+
+ if (!lstat(ce->name, &st)) {
+ unsigned changed = ce_match_stat(ce, &st, 1);
+ if (!changed)
+ return;
+ errno = 0;
+ }
+ if (o->reset) {
+ ce->ce_flags |= htons(CE_UPDATE);
+ return;
+ }
+ if (errno == ENOENT)
+ return;
+ die("Entry '%s' not uptodate. Cannot merge.", ce->name);
+}
+
+static void invalidate_ce_path(struct cache_entry *ce)
+{
+ if (ce)
+ cache_tree_invalidate_path(active_cache_tree, ce->name);
+}
+
+/*
+ * We do not want to remove or overwrite a working tree file that
+ * is not tracked.
+ */
+static void verify_absent(const char *path, const char *action,
+ struct unpack_trees_options *o)
+{
+ struct stat st;
+
+ if (o->index_only || o->reset || !o->update)
+ return;
+ if (!lstat(path, &st))
+ die("Untracked working tree file '%s' "
+ "would be %s by merge.", path, action);
+}
+
+static int merged_entry(struct cache_entry *merge, struct cache_entry *old,
+ struct unpack_trees_options *o)
+{
+ merge->ce_flags |= htons(CE_UPDATE);
+ if (old) {
+ /*
+ * See if we can re-use the old CE directly?
+ * That way we get the uptodate stat info.
+ *
+ * This also removes the UPDATE flag on
+ * a match.
+ */
+ if (same(old, merge)) {
+ *merge = *old;
+ } else {
+ verify_uptodate(old, o);
+ invalidate_ce_path(old);
+ }
+ }
+ else {
+ verify_absent(merge->name, "overwritten", o);
+ invalidate_ce_path(merge);
+ }
+
+ merge->ce_flags &= ~htons(CE_STAGEMASK);
+ add_cache_entry(merge, ADD_CACHE_OK_TO_ADD|ADD_CACHE_OK_TO_REPLACE);
+ return 1;
+}
+
+static int deleted_entry(struct cache_entry *ce, struct cache_entry *old,
+ struct unpack_trees_options *o)
+{
+ if (old)
+ verify_uptodate(old, o);
+ else
+ verify_absent(ce->name, "removed", o);
+ ce->ce_mode = 0;
+ add_cache_entry(ce, ADD_CACHE_OK_TO_ADD|ADD_CACHE_OK_TO_REPLACE);
+ invalidate_ce_path(ce);
+ return 1;
+}
+
+static int keep_entry(struct cache_entry *ce)
+{
+ add_cache_entry(ce, ADD_CACHE_OK_TO_ADD);
+ return 1;
+}
+
+#if DBRT_DEBUG
+static void show_stage_entry(FILE *o,
+ const char *label, const struct cache_entry *ce)
+{
+ if (!ce)
+ fprintf(o, "%s (missing)\n", label);
+ else
+ fprintf(o, "%s%06o %s %d\t%s\n",
+ label,
+ ntohl(ce->ce_mode),
+ sha1_to_hex(ce->sha1),
+ ce_stage(ce),
+ ce->name);
+}
+#endif
+
+int threeway_merge(struct cache_entry **stages,
+ struct unpack_trees_options *o)
+{
+ struct cache_entry *index;
+ struct cache_entry *head;
+ struct cache_entry *remote = stages[o->head_idx + 1];
+ int count;
+ int head_match = 0;
+ int remote_match = 0;
+ const char *path = NULL;
+
+ int df_conflict_head = 0;
+ int df_conflict_remote = 0;
+
+ int any_anc_missing = 0;
+ int no_anc_exists = 1;
+ int i;
+
+ for (i = 1; i < o->head_idx; i++) {
+ if (!stages[i])
+ any_anc_missing = 1;
+ else {
+ if (!path)
+ path = stages[i]->name;
+ no_anc_exists = 0;
+ }
+ }
+
+ index = stages[0];
+ head = stages[o->head_idx];
+
+ if (head == o->df_conflict_entry) {
+ df_conflict_head = 1;
+ head = NULL;
+ }
+
+ if (remote == o->df_conflict_entry) {
+ df_conflict_remote = 1;
+ remote = NULL;
+ }
+
+ if (!path && index)
+ path = index->name;
+ if (!path && head)
+ path = head->name;
+ if (!path && remote)
+ path = remote->name;
+
+ /* First, if there's a #16 situation, note that to prevent #13
+ * and #14.
+ */
+ if (!same(remote, head)) {
+ for (i = 1; i < o->head_idx; i++) {
+ if (same(stages[i], head)) {
+ head_match = i;
+ }
+ if (same(stages[i], remote)) {
+ remote_match = i;
+ }
+ }
+ }
+
+ /* We start with cases where the index is allowed to match
+ * something other than the head: #14(ALT) and #2ALT, where it
+ * is permitted to match the result instead.
+ */
+ /* #14, #14ALT, #2ALT */
+ if (remote && !df_conflict_head && head_match && !remote_match) {
+ if (index && !same(index, remote) && !same(index, head))
+ reject_merge(index);
+ return merged_entry(remote, index, o);
+ }
+ /*
+ * If we have an entry in the index cache, then we want to
+ * make sure that it matches head.
+ */
+ if (index && !same(index, head)) {
+ reject_merge(index);
+ }
+
+ if (head) {
+ /* #5ALT, #15 */
+ if (same(head, remote))
+ return merged_entry(head, index, o);
+ /* #13, #3ALT */
+ if (!df_conflict_remote && remote_match && !head_match)
+ return merged_entry(head, index, o);
+ }
+
+ /* #1 */
+ if (!head && !remote && any_anc_missing)
+ return 0;
+
+ /* Under the new "aggressive" rule, we resolve mostly trivial
+ * cases that we historically had git-merge-one-file resolve.
+ */
+ if (o->aggressive) {
+ int head_deleted = !head && !df_conflict_head;
+ int remote_deleted = !remote && !df_conflict_remote;
+ /*
+ * Deleted in both.
+ * Deleted in one and unchanged in the other.
+ */
+ if ((head_deleted && remote_deleted) ||
+ (head_deleted && remote && remote_match) ||
+ (remote_deleted && head && head_match)) {
+ if (index)
+ return deleted_entry(index, index, o);
+ else if (path)
+ verify_absent(path, "removed", o);
+ return 0;
+ }
+ /*
+ * Added in both, identically.
+ */
+ if (no_anc_exists && head && remote && same(head, remote))
+ return merged_entry(head, index, o);
+
+ }
+
+ /* Below are "no merge" cases, which require that the index be
+ * up-to-date to avoid the files getting overwritten with
+ * conflict resolution files.
+ */
+ if (index) {
+ verify_uptodate(index, o);
+ }
+ else if (path)
+ verify_absent(path, "overwritten", o);
+
+ o->nontrivial_merge = 1;
+
+ /* #2, #3, #4, #6, #7, #9, #11. */
+ count = 0;
+ if (!head_match || !remote_match) {
+ for (i = 1; i < o->head_idx; i++) {
+ if (stages[i]) {
+ keep_entry(stages[i]);
+ count++;
+ break;
+ }
+ }
+ }
+#if DBRT_DEBUG
+ else {
+ fprintf(stderr, "read-tree: warning #16 detected\n");
+ show_stage_entry(stderr, "head ", stages[head_match]);
+ show_stage_entry(stderr, "remote ", stages[remote_match]);
+ }
+#endif
+ if (head) { count += keep_entry(head); }
+ if (remote) { count += keep_entry(remote); }
+ return count;
+}
+
+/*
+ * Two-way merge.
+ *
+ * The rule is to "carry forward" what is in the index without losing
+ * information across a "fast forward", favoring a successful merge
+ * over a merge failure when it makes sense. For details of the
+ * "carry forward" rule, please see <Documentation/git-read-tree.txt>.
+ *
+ */
+int twoway_merge(struct cache_entry **src,
+ struct unpack_trees_options *o)
+{
+ struct cache_entry *current = src[0];
+ struct cache_entry *oldtree = src[1], *newtree = src[2];
+
+ if (o->merge_size != 2)
+ return error("Cannot do a twoway merge of %d trees",
+ o->merge_size);
+
+ if (current) {
+ if ((!oldtree && !newtree) || /* 4 and 5 */
+ (!oldtree && newtree &&
+ same(current, newtree)) || /* 6 and 7 */
+ (oldtree && newtree &&
+ same(oldtree, newtree)) || /* 14 and 15 */
+ (oldtree && newtree &&
+ !same(oldtree, newtree) && /* 18 and 19*/
+ same(current, newtree))) {
+ return keep_entry(current);
+ }
+ else if (oldtree && !newtree && same(current, oldtree)) {
+ /* 10 or 11 */
+ return deleted_entry(oldtree, current, o);
+ }
+ else if (oldtree && newtree &&
+ same(current, oldtree) && !same(current, newtree)) {
+ /* 20 or 21 */
+ return merged_entry(newtree, current, o);
+ }
+ else {
+ /* all other failures */
+ if (oldtree)
+ reject_merge(oldtree);
+ if (current)
+ reject_merge(current);
+ if (newtree)
+ reject_merge(newtree);
+ return -1;
+ }
+ }
+ else if (newtree)
+ return merged_entry(newtree, current, o);
+ else
+ return deleted_entry(oldtree, current, o);
+}
+
+/*
+ * Bind merge.
+ *
+ * Keep the index entries at stage0, collapse stage1 but make sure
+ * stage0 does not have anything there.
+ */
+int bind_merge(struct cache_entry **src,
+ struct unpack_trees_options *o)
+{
+ struct cache_entry *old = src[0];
+ struct cache_entry *a = src[1];
+
+ if (o->merge_size != 1)
+ return error("Cannot do a bind merge of %d trees\n",
+ o->merge_size);
+ if (a && old)
+ die("Entry '%s' overlaps. Cannot bind.", a->name);
+ if (!a)
+ return keep_entry(old);
+ else
+ return merged_entry(a, NULL, o);
+}
+
+/*
+ * One-way merge.
+ *
+ * The rule is:
+ * - take the stat information from stage0, take the data from stage1
+ */
+int oneway_merge(struct cache_entry **src,
+ struct unpack_trees_options *o)
+{
+ struct cache_entry *old = src[0];
+ struct cache_entry *a = src[1];
+
+ if (o->merge_size != 1)
+ return error("Cannot do a oneway merge of %d trees",
+ o->merge_size);
+
+ if (!a)
+ return deleted_entry(old, old, o);
+ if (old && same(old, a)) {
+ if (o->reset) {
+ struct stat st;
+ if (lstat(old->name, &st) ||
+ ce_match_stat(old, &st, 1))
+ old->ce_flags |= htons(CE_UPDATE);
+ }
+ return keep_entry(old);
+ }
+ return merged_entry(a, old, o);
+}
--- /dev/null
+#ifndef UNPACK_TREES_H
+#define UNPACK_TREES_H
+
+struct unpack_trees_options;
+
+typedef int (*merge_fn_t)(struct cache_entry **src,
+ struct unpack_trees_options *options);
+
+struct unpack_trees_options {
+ int reset;
+ int merge;
+ int update;
+ int index_only;
+ int nontrivial_merge;
+ int trivial_merges_only;
+ int verbose_update;
+ int aggressive;
+ const char *prefix;
+ merge_fn_t fn;
+
+ int head_idx;
+ int merge_size;
+
+ struct cache_entry *df_conflict_entry;
+};
+
+extern int unpack_trees(struct object_list *trees,
+ struct unpack_trees_options *options);
+
+int threeway_merge(struct cache_entry **stages, struct unpack_trees_options *o);
+int twoway_merge(struct cache_entry **src, struct unpack_trees_options *o);
+int bind_merge(struct cache_entry **src, struct unpack_trees_options *o);
+int oneway_merge(struct cache_entry **src, struct unpack_trees_options *o);
+
+#endif
+#include <signal.h>
+#include <sys/wait.h>
+#include <sys/poll.h>
#include "cache.h"
#include "refs.h"
#include "pkt-line.h"
#include "object.h"
#include "commit.h"
#include "exec_cmd.h"
-#include <signal.h>
-#include <sys/poll.h>
-#include <sys/wait.h>
static const char upload_pack_usage[] = "git-upload-pack [--strict] [--timeout=nn] <dir>";
if (fd == 3)
/* emergency quit */
fd = 2;
+ if (fd == 2) {
+ xwrite(fd, data, sz);
+ return sz;
+ }
return safe_write(fd, data, sz);
}
p = data;
int i;
int args;
const char **argv;
+ const char **p;
char *buf;
- char **p;
if (create_full_pack) {
args = 10;
ssize_t sz;
int pe, pu, pollsize;
+ reset_timeout();
+
pollsize = 0;
pe = pu = -1;
o = parse_object(sha1);
if (!o)
die("oops (%s)", sha1_to_hex(sha1));
- if (o->type == TYPE_COMMIT) {
+ if (o->type == OBJ_COMMIT) {
struct commit_list *parents;
if (o->flags & THEY_HAVE)
return 0;
static int send_ref(const char *refname, const unsigned char *sha1)
{
- static char *capabilities = "multi_ack thin-pack side-band";
+ static const char *capabilities = "multi_ack thin-pack side-band";
struct object *o = parse_object(sha1);
if (!o)
o->flags |= OUR_REF;
nr_our_refs++;
}
- if (o->type == TYPE_TAG) {
+ if (o->type == OBJ_TAG) {
o = deref_tag(o, refname, 0);
packet_write(1, "%s %s^{}\n", sha1_to_hex(o->sha1), refname);
}
+++ /dev/null
-#include "cache.h"
-#include "pack.h"
-
-static int verify_one_pack(char *arg, int verbose)
-{
- int len = strlen(arg);
- struct packed_git *g;
-
- while (1) {
- /* Should name foo.idx, but foo.pack may be named;
- * convert it to foo.idx
- */
- if (!strcmp(arg + len - 5, ".pack")) {
- strcpy(arg + len - 5, ".idx");
- len--;
- }
- /* Should name foo.idx now */
- if ((g = add_packed_git(arg, len, 1)))
- break;
- /* No? did you name just foo? */
- strcpy(arg + len, ".idx");
- len += 4;
- if ((g = add_packed_git(arg, len, 1)))
- break;
- return error("packfile %s not found.", arg);
- }
- return verify_pack(g, verbose);
-}
-
-static const char verify_pack_usage[] = "git-verify-pack [-v] <pack>...";
-
-int main(int ac, char **av)
-{
- int errs = 0;
- int verbose = 0;
- int no_more_options = 0;
-
- while (1 < ac) {
- char path[PATH_MAX];
-
- if (!no_more_options && av[1][0] == '-') {
- if (!strcmp("-v", av[1]))
- verbose = 1;
- else if (!strcmp("--", av[1]))
- no_more_options = 1;
- else
- usage(verify_pack_usage);
- }
- else {
- strcpy(path, av[1]);
- if (verify_one_pack(path, verbose))
- errs++;
- }
- ac--; av++;
- }
- return !!errs;
-}
#define XDL_PATCH_IGNOREBSPACE (1 << 8)
#define XDL_EMIT_FUNCNAMES (1 << 0)
+#define XDL_EMIT_COMMON (1 << 1)
#define XDL_MMB_READONLY (1 << 0)
* We need to extent the diagonal "domain" by one. If the next
* values exits the box boundaries we need to change it in the
* opposite direction because (max - min) must be a power of two.
- * Also we initialize the extenal K value to -1 so that we can
+ * Also we initialize the external K value to -1 so that we can
* avoid extra conditions check inside the core loop.
*/
if (fmin > dmin)
* We need to extent the diagonal "domain" by one. If the next
* values exits the box boundaries we need to change it in the
* opposite direction because (max - min) must be a power of two.
- * Also we initialize the extenal K value to -1 so that we can
+ * Also we initialize the external K value to -1 so that we can
* avoid extra conditions check inside the core loop.
*/
if (bmin > dmin)
/*
* This is the same of what GNU diff does. Move back and forward
* change groups for a consistent and pretty diff output. This also
- * helps in finding joineable change groups and reduce the diff size.
+ * helps in finding joinable change groups and reduce the diff size.
*/
for (ix = ixo = 0;;) {
/*
}
+int xdl_emit_common(xdfenv_t *xe, xdchange_t *xscr, xdemitcb_t *ecb,
+ xdemitconf_t const *xecfg) {
+ xdfile_t *xdf = &xe->xdf1;
+ const char *rchg = xdf->rchg;
+ long ix;
+
+ for (ix = 0; ix < xdf->nrec; ix++) {
+ if (rchg[ix])
+ continue;
+ if (xdl_emit_record(xdf, ix, "", ecb))
+ return -1;
+ }
+ return 0;
+}
+
int xdl_emit_diff(xdfenv_t *xe, xdchange_t *xscr, xdemitcb_t *ecb,
xdemitconf_t const *xecfg) {
long s1, s2, e1, e2, lctx;
char funcbuf[40];
long funclen = 0;
+ if (xecfg->flags & XDL_EMIT_COMMON)
+ return xdl_emit_common(xe, xscr, ecb, xecfg);
+
for (xch = xche = xscr; xch; xch = xche->next) {
xche = xdl_get_hunk(xch, xecfg);