/GIT-CFLAGS
/GIT-LDFLAGS
/GIT-GUI-VARS
+/GIT-PREFIX
+/GIT-SCRIPT-DEFINES
+/GIT-USER-AGENT
/GIT-VERSION-FILE
/bin-wrappers/
/git
UI, Workflows & Features
+ * Git can be told to normalize pathnames it read from readdir(3) and
+ all arguments it got from the command line into precomposed UTF-8
+ (assuming that they come as decomposed UTF-8), in order to work
+ around issues on Mac OS.
+
+ I think there still are other places that need conversion
+ (e.g. paths that are read from stdin for some commands), but this
+ should be a good first step in the right direction.
+
* Per-user $HOME/.gitconfig file can optionally be stored in
$HOME/.config/git/config instead, which is in line with XDG.
$HOME/.config/attributes and $HOME/.config/ignore respectively when
these files exist.
+ * "git apply" learned to wiggle the base version and perform three-way
+ merge when a patch does not exactly apply to the version you have.
+
* Scripted Porcelain writers now have access to the credential API via
the "git credential" plumbing command.
default location where the build procedure installs them locally;
the variable can even point at a http:// URL.
+ * "git rebase [-i] --root $tip" can now be used to rewrite all the
+ history leading to "$tip" down to the root commit.
+
* "git rebase -i" learned "-x <cmd>" to insert "exec <cmd>" after
each commit in the resulting history.
Foreign Interface
+ * "mediawiki" remote helper (in contrib/) learned to handle file
+ attachments.
+
+ * "git p4" now uses "Jobs:" and "p4 move" when appropriate.
+
+ * vcs-svn has been updated to clean-up compilation, lift 32-bit
+ limitations, etc.
+
Performance, Internal Implementation, etc. (please report possible regressions)
* We no longer use AsciiDoc7 syntax in our documentation and favor a
more modern style.
+ * "git am --rebasing" codepath was taught to grab authorship, log
+ message and the patch text directly out of existing commits. This
+ will help rebasing commits that have confusing "diff" output in
+ their log messages.
+
* "git index-pack" and "git pack-objects" use streaming API to read
from the object store to avoid having to hold a large blob object
in-core while they are doing their thing.
releases are contained in this release (see release notes to them for
details).
+ * The error message from "git push $there :bogo" (and its equivalent
+ "git push $there --delete bogo") mentioned that we tried and failed
+ to guess what ref is being deleted based on the LHS of the refspec,
+ which we don't.
+ (merge 5742c82 jk/push-delete-ref-error-message later to maint).
+
+ * A handful of files and directories we create had tighter than
+ necessary permission bits when the user wanted to have group
+ writability (e.g. by setting "umask 002").
+ (merge 6ff2b72 ar/clone-honor-umask-at-top later to maint).
+
+ * "commit --amend" used to refuse amending a commit with an empty log
+ message, with or without "--allow-empty-message".
+ (merge d9a9357 cw/amend-commit-without-message later to maint).
+
+ * "git commit --amend --only --" was meant to allow "Clever" people to
+ rewrite the commit message without making any change even when they
+ have already changes for the next commit added to their index, but
+ it never worked as advertised since it was introduced in 1.3.0 era.
+ (merge ea2d4ed jk/maint-commit-amend-only-no-paths later to maint).
+
+ * Even though the index can record pathnames longer than 1<<12 bytes,
+ in some places we were not comparing them in full, potentially
+ replacing index entries instead of adding.
+ (merge d5f5333 tg/maint-cache-name-compare later to maint).
+
+ * "git show"'s auto-walking behaviour was an unreliable and
+ unpredictable hack; it now behaves just like "git log" does when it
+ walks.
+ (merge c5941f1 tr/maint-show-walk later to maint).
+
+ * "git diff", "git status" and anything that internally uses the
+ comparison machinery was utterly broken when the difference
+ involved a file with "-" as its name. This was due to the way "git
+ diff --no-index" was incorrectly bolted on to the system, making
+ any comparison that involves a file "-" at the root level
+ incorrectly read from the standard input.
+ (merge 4682d85 jc/refactor-diff-stdin later to maint).
+
* We did not have test to make sure "git rebase" without extra options
filters out an empty commit in the original history.
(merge 2b5ba7b mz/empty-rebase-test later to maint).
Advice shown when you used linkgit:git-checkout[1] to
move to the detach HEAD state, to instruct how to create
a local branch after the fact.
+ amWorkDir::
+ Advice that shows the location of the patch file when
+ linkgit:git-am[1] fails to apply it.
--
core.fileMode::
will probe and set core.ignorecase true if appropriate when the repository
is created.
+core.precomposeunicode::
+ This option is only used by Mac OS implementation of git.
+ When core.precomposeunicode=true, git reverts the unicode decomposition
+ of filenames done by Mac OS. This is useful when sharing a repository
+ between Mac OS and Linux or Windows.
+ (Git for Windows 1.7.10 or higher is needed, or git under cygwin 1.7).
+ When false, file names are handled fully transparent by git,
+ which is backward compatible with older versions of git.
+
core.trustctime::
If false, the ctime differences between the index and the
working tree are ignored; useful when the inode change time
SYNOPSIS
--------
[verse]
-'git apply' [--stat] [--numstat] [--summary] [--check] [--index]
+'git apply' [--stat] [--numstat] [--summary] [--check] [--index] [--3way]
[--apply] [--no-add] [--build-fake-ancestor=<file>] [-R | --reverse]
[--allow-binary-replacement | --binary] [--reject] [-z]
[-p<n>] [-C<n>] [--inaccurate-eof] [--recount] [--cached]
cached data, apply the patch, and store the result in the index
without using the working tree. This implies `--index`.
+-3::
+--3way::
+ When the patch does not apply cleanly, fall back on 3-way merge if
+ the patch records the identity of blobs it is supposed to apply to,
+ and we have those blobs available locally, possibly leaving the
+ conflict markers in the files in the working tree for the user to
+ resolve. This option implies the `--index` option, and is incompatible
+ with the `--reject` and the `--cached` options.
+
--build-fake-ancestor=<file>::
Newer 'git diff' output has embedded 'index information'
for each blob to help identify the original version that
can push anything into the repository, including removal
of refs). This is solely meant for a closed LAN setting
where everybody is friendly. This service can be
- enabled by `daemon.receivepack` configuration item to
+ enabled by setting `daemon.receivepack` configuration item to
`true`.
EXAMPLES
p4. By default, this is the most recent p4 commit reachable
from 'HEAD'.
--M[<n>]::
+-M::
Detect renames. See linkgit:git-diff[1]. Renames will be
represented in p4 using explicit 'move' operations. There
is no corresponding option to detect copies, but there are
Submit variables
~~~~~~~~~~~~~~~~
git-p4.detectRenames::
- Detect renames. See linkgit:git-diff[1].
+ Detect renames. See linkgit:git-diff[1]. This can be true,
+ false, or a score as expected by 'git diff -M'.
git-p4.detectCopies::
- Detect copies. See linkgit:git-diff[1].
+ Detect copies. See linkgit:git-diff[1]. This can be true,
+ false, or a score as expected by 'git diff -C'.
git-p4.detectCopiesHarder::
- Detect copies harder. See linkgit:git-diff[1].
+ Detect copies harder. See linkgit:git-diff[1]. A boolean.
git-p4.preserveUser::
On submit, re-author changes to reflect the git author,
[verse]
'git rebase' [-i | --interactive] [options] [--exec <cmd>] [--onto <newbase>]
[<upstream>] [<branch>]
-'git rebase' [-i | --interactive] [options] [--exec <cmd>] --onto <newbase>
+'git rebase' [-i | --interactive] [options] [--exec <cmd>] [--onto <newbase>]
--root [<branch>]
'git rebase' --continue | --skip | --abort
--root::
Rebase all commits reachable from <branch>, instead of
limiting them with an <upstream>. This allows you to rebase
- the root commit(s) on a branch. Must be used with --onto, and
+ the root commit(s) on a branch. When used with --onto, it
will skip changes already contained in <newbase> (instead of
- <upstream>). When used together with --preserve-merges, 'all'
- root commits will be rewritten to have <newbase> as parent
+ <upstream>) whereas without --onto it will operate on every change.
+ When used together with both --onto and --preserve-merges,
+ 'all' root commits will be rewritten to have <newbase> as parent
instead.
--autosquash::
The option core.warnAmbiguousRefs is used to select the strict
abbreviation mode.
+--disambiguate=<prefix>::
+ Show every object whose name begins with the given prefix.
+ The <prefix> must be at least 4 hexadecimal digits long to
+ avoid listing each and every object in the repository by
+ mistake.
+
--all::
Show all refs found in `refs/`.
BUILT_INS =
COMPAT_CFLAGS =
COMPAT_OBJS =
-XDIFF_H =
XDIFF_OBJS =
-VCSSVN_H =
VCSSVN_OBJS =
-VCSSVN_TEST_OBJS =
-MISC_H =
+GENERATED_H =
EXTRA_CPPFLAGS =
LIB_H =
LIB_OBJS =
XDIFF_LIB=xdiff/lib.a
VCSSVN_LIB=vcs-svn/lib.a
-XDIFF_H += xdiff/xinclude.h
-XDIFF_H += xdiff/xmacros.h
-XDIFF_H += xdiff/xdiff.h
-XDIFF_H += xdiff/xtypes.h
-XDIFF_H += xdiff/xutils.h
-XDIFF_H += xdiff/xprepare.h
-XDIFF_H += xdiff/xdiffi.h
-XDIFF_H += xdiff/xemit.h
-
-VCSSVN_H += vcs-svn/line_buffer.h
-VCSSVN_H += vcs-svn/sliding_window.h
-VCSSVN_H += vcs-svn/repo_tree.h
-VCSSVN_H += vcs-svn/fast_export.h
-VCSSVN_H += vcs-svn/svndiff.h
-VCSSVN_H += vcs-svn/svndump.h
-
-MISC_H += bisect.h
-MISC_H += branch.h
-MISC_H += bundle.h
-MISC_H += common-cmds.h
-MISC_H += fetch-pack.h
-MISC_H += reachable.h
-MISC_H += send-pack.h
-MISC_H += shortlog.h
-MISC_H += tar.h
-MISC_H += thread-utils.h
-MISC_H += url.h
-MISC_H += walker.h
-MISC_H += wt-status.h
+LIB_H += xdiff/xinclude.h
+LIB_H += xdiff/xmacros.h
+LIB_H += xdiff/xdiff.h
+LIB_H += xdiff/xtypes.h
+LIB_H += xdiff/xutils.h
+LIB_H += xdiff/xprepare.h
+LIB_H += xdiff/xdiffi.h
+LIB_H += xdiff/xemit.h
+
+LIB_H += vcs-svn/line_buffer.h
+LIB_H += vcs-svn/sliding_window.h
+LIB_H += vcs-svn/repo_tree.h
+LIB_H += vcs-svn/fast_export.h
+LIB_H += vcs-svn/svndiff.h
+LIB_H += vcs-svn/svndump.h
+
+GENERATED_H += common-cmds.h
LIB_H += advice.h
LIB_H += archive.h
LIB_H += argv-array.h
LIB_H += attr.h
+LIB_H += bisect.h
LIB_H += blob.h
+LIB_H += branch.h
LIB_H += builtin.h
LIB_H += bulk-checkin.h
-LIB_H += cache.h
+LIB_H += bundle.h
LIB_H += cache-tree.h
+LIB_H += cache.h
LIB_H += color.h
+LIB_H += column.h
LIB_H += commit.h
LIB_H += compat/bswap.h
LIB_H += compat/cygwin.h
LIB_H += compat/mingw.h
LIB_H += compat/obstack.h
+LIB_H += compat/precompose_utf8.h
LIB_H += compat/terminal.h
LIB_H += compat/win32/dirent.h
LIB_H += compat/win32/poll.h
LIB_H += diffcore.h
LIB_H += dir.h
LIB_H += exec_cmd.h
+LIB_H += fetch-pack.h
LIB_H += fmt-merge-msg.h
LIB_H += fsck.h
LIB_H += gettext.h
LIB_H += grep.h
LIB_H += hash.h
LIB_H += help.h
+LIB_H += http.h
LIB_H += kwset.h
LIB_H += levenshtein.h
LIB_H += list-objects.h
LIB_H += merge-file.h
LIB_H += merge-recursive.h
LIB_H += mergesort.h
-LIB_H += notes.h
LIB_H += notes-cache.h
LIB_H += notes-merge.h
+LIB_H += notes.h
LIB_H += object.h
-LIB_H += pack.h
LIB_H += pack-refs.h
LIB_H += pack-revindex.h
+LIB_H += pack.h
LIB_H += parse-options.h
LIB_H += patch-ids.h
LIB_H += pkt-line.h
LIB_H += progress.h
LIB_H += prompt.h
LIB_H += quote.h
+LIB_H += reachable.h
LIB_H += reflog-walk.h
LIB_H += refs.h
LIB_H += remote.h
LIB_H += resolve-undo.h
LIB_H += revision.h
LIB_H += run-command.h
+LIB_H += send-pack.h
LIB_H += sequencer.h
LIB_H += sha1-array.h
LIB_H += sha1-lookup.h
+LIB_H += shortlog.h
LIB_H += sideband.h
LIB_H += sigchain.h
LIB_H += strbuf.h
LIB_H += string-list.h
LIB_H += submodule.h
LIB_H += tag.h
+LIB_H += tar.h
LIB_H += thread-utils.h
LIB_H += transport.h
-LIB_H += tree.h
LIB_H += tree-walk.h
+LIB_H += tree.h
LIB_H += unpack-trees.h
+LIB_H += url.h
LIB_H += userdiff.h
LIB_H += utf8.h
LIB_H += varint.h
+LIB_H += walker.h
+LIB_H += wt-status.h
LIB_H += xdiff-interface.h
LIB_H += xdiff/xdiff.h
NO_MEMMEM = YesPlease
USE_ST_TIMESPEC = YesPlease
HAVE_DEV_TTY = YesPlease
+ COMPAT_OBJS += compat/precompose_utf8.o
+ BASIC_CFLAGS += -DPRECOMPOSE_UNICODE
endif
ifeq ($(uname_S),SunOS)
NEEDS_SOCKET = YesPlease
GIT_USER_AGENT_SQ = $(subst ','\'',$(GIT_USER_AGENT))
GIT_USER_AGENT_CQ = "$(subst ",\",$(subst \,\\,$(GIT_USER_AGENT)))"
GIT_USER_AGENT_CQ_SQ = $(subst ','\'',$(GIT_USER_AGENT_CQ))
-BASIC_CFLAGS += -DGIT_USER_AGENT='$(GIT_USER_AGENT_CQ_SQ)'
+GIT-USER-AGENT: FORCE
+ @if test x'$(GIT_USER_AGENT_SQ)' != x"`cat GIT-USER-AGENT 2>/dev/null`"; then \
+ echo '$(GIT_USER_AGENT_SQ)' >GIT-USER-AGENT; \
+ fi
ifdef DEFAULT_HELP_FORMAT
BASIC_CFLAGS += -DDEFAULT_HELP_FORMAT='"$(DEFAULT_HELP_FORMAT)"'
strip: $(PROGRAMS) git$X
$(STRIP) $(STRIP_OPTS) $(PROGRAMS) git$X
-git.o: common-cmds.h
+### Target-specific flags and dependencies
+
+# The generic compilation pattern rule and automatically
+# computed header dependencies (falling back to a dependency on
+# LIB_H) are enough to describe how most targets should be built,
+# but some targets are special enough to need something a little
+# different.
+#
+# - When a source file "foo.c" #includes a generated header file,
+# we need to list that dependency for the "foo.o" target.
+#
+# We also list it from other targets that are built from foo.c
+# like "foo.sp" and "foo.s", even though that is easy to forget
+# to do because the generated header is already present around
+# after a regular build attempt.
+#
+# - Some code depends on configuration kept in makefile
+# variables. The target-specific variable EXTRA_CPPFLAGS can
+# be used to convey that information to the C preprocessor
+# using -D options.
+#
+# The "foo.o" target should have a corresponding dependency on
+# a file that changes when the value of the makefile variable
+# changes. For example, targets making use of the
+# $(GIT_VERSION) variable depend on GIT-VERSION-FILE.
+#
+# Technically the ".sp" and ".s" targets do not need this
+# dependency because they are force-built, but they get the
+# same dependency for consistency. This way, you do not have to
+# know how each target is implemented. And it means the
+# dependencies here will not need to change if the force-build
+# details change some day.
+
+git.sp git.s git.o: GIT-PREFIX
git.sp git.s git.o: EXTRA_CPPFLAGS = \
'-DGIT_HTML_PATH="$(htmldir_SQ)"' \
'-DGIT_MAN_PATH="$(mandir_SQ)"' \
$(QUIET_LINK)$(CC) $(ALL_CFLAGS) -o $@ git.o \
$(BUILTIN_OBJS) $(ALL_LDFLAGS) $(LIBS)
-help.sp help.o: common-cmds.h
+help.sp help.s help.o: common-cmds.h
-builtin/help.sp builtin/help.o: common-cmds.h
+builtin/help.sp builtin/help.s builtin/help.o: common-cmds.h GIT-PREFIX
builtin/help.sp builtin/help.s builtin/help.o: EXTRA_CPPFLAGS = \
'-DGIT_HTML_PATH="$(htmldir_SQ)"' \
'-DGIT_MAN_PATH="$(mandir_SQ)"' \
'-DGIT_INFO_PATH="$(infodir_SQ)"'
+version.sp version.s version.o: GIT-VERSION-FILE GIT-USER-AGENT
version.sp version.s version.o: EXTRA_CPPFLAGS = \
- '-DGIT_VERSION="$(GIT_VERSION)"'
+ '-DGIT_VERSION="$(GIT_VERSION)"' \
+ '-DGIT_USER_AGENT=$(GIT_USER_AGENT_CQ_SQ)'
$(BUILT_INS): git$X
$(QUIET_BUILT_IN)$(RM) $@ && \
common-cmds.h: $(wildcard Documentation/git-*.txt)
$(QUIET_GEN)./generate-cmdlist.sh > $@+ && mv $@+ $@
+SCRIPT_DEFINES = $(SHELL_PATH_SQ):$(DIFF_SQ):$(GIT_VERSION):\
+ $(localedir_SQ):$(NO_CURL):$(USE_GETTEXT_SCHEME):$(SANE_TOOL_PATH_SQ):\
+ $(gitwebdir_SQ):$(PERL_PATH_SQ)
define cmd_munge_script
$(RM) $@ $@+ && \
sed -e '1s|#!.*/sh|#!$(SHELL_PATH_SQ)|' \
-e 's|@SHELL_PATH@|$(SHELL_PATH_SQ)|' \
-e 's|@@DIFF@@|$(DIFF_SQ)|' \
- -e 's/@@GIT_VERSION@@/$(GIT_VERSION)/g' \
- -e 's|@@GIT_USER_AGENT@@|$(GIT_USER_AGENT_SQ)|g' \
-e 's|@@LOCALEDIR@@|$(localedir_SQ)|g' \
-e 's/@@NO_CURL@@/$(NO_CURL)/g' \
-e 's/@@USE_GETTEXT_SCHEME@@/$(USE_GETTEXT_SCHEME)/g' \
-e $(BROKEN_PATH_FIX) \
+ -e 's|@@GITWEBDIR@@|$(gitwebdir_SQ)|g' \
+ -e 's|@@PERL@@|$(PERL_PATH_SQ)|g' \
$@.sh >$@+
endef
-$(patsubst %.sh,%,$(SCRIPT_SH)) : % : %.sh
+GIT-SCRIPT-DEFINES: FORCE
+ @FLAGS='$(SCRIPT_DEFINES)'; \
+ if test x"$$FLAGS" != x"`cat $@ 2>/dev/null`" ; then \
+ echo 1>&2 " * new script parameters"; \
+ echo "$$FLAGS" >$@; \
+ fi
+
+
+$(patsubst %.sh,%,$(SCRIPT_SH)) : % : %.sh GIT-SCRIPT-DEFINES
$(QUIET_GEN)$(cmd_munge_script) && \
chmod +x $@+ && \
mv $@+ $@
-$(SCRIPT_LIB) : % : %.sh
+$(SCRIPT_LIB) : % : %.sh GIT-SCRIPT-DEFINES
$(QUIET_GEN)$(cmd_munge_script) && \
mv $@+ $@
ifndef NO_PERL
$(patsubst %.perl,%,$(SCRIPT_PERL)): perl/perl.mak
-perl/perl.mak: GIT-CFLAGS perl/Makefile perl/Makefile.PL
+perl/perl.mak: GIT-CFLAGS GIT-PREFIX perl/Makefile perl/Makefile.PL
$(QUIET_SUBDIR0)perl $(QUIET_SUBDIR1) PERL_PATH='$(PERL_PATH_SQ)' prefix='$(prefix_SQ)' $(@F)
-$(patsubst %.perl,%,$(SCRIPT_PERL)): % : %.perl
+$(patsubst %.perl,%,$(SCRIPT_PERL)): % : %.perl GIT-VERSION-FILE
$(QUIET_GEN)$(RM) $@ $@+ && \
INSTLIBDIR=`MAKEFLAGS= $(MAKE) -C perl -s --no-print-directory instlibdir` && \
sed -e '1{' \
gitweb:
$(QUIET_SUBDIR0)gitweb $(QUIET_SUBDIR1) all
-git-instaweb: git-instaweb.sh gitweb
- $(QUIET_GEN)$(RM) $@ $@+ && \
- sed -e '1s|#!.*/sh|#!$(SHELL_PATH_SQ)|' \
- -e 's/@@GIT_VERSION@@/$(GIT_VERSION)/g' \
- -e 's/@@NO_CURL@@/$(NO_CURL)/g' \
- -e 's|@@GITWEBDIR@@|$(gitwebdir_SQ)|g' \
- -e 's|@@PERL@@|$(PERL_PATH_SQ)|g' \
- $@.sh > $@+ && \
+git-instaweb: git-instaweb.sh gitweb GIT-SCRIPT-DEFINES
+ $(QUIET_GEN)$(cmd_munge_script) && \
chmod +x $@+ && \
mv $@+ $@
else # NO_PERL
endif # NO_PERL
ifndef NO_PYTHON
-$(patsubst %.py,%,$(SCRIPT_PYTHON)): GIT-CFLAGS
+$(patsubst %.py,%,$(SCRIPT_PYTHON)): GIT-CFLAGS GIT-PREFIX
$(patsubst %.py,%,$(SCRIPT_PYTHON)): % : %.py
$(QUIET_GEN)$(RM) $@ $@+ && \
INSTLIBDIR=`MAKEFLAGS= $(MAKE) -C git_remote_helpers -s \
mv $@+ $@
endif # NO_PYTHON
-configure: configure.ac
+configure: configure.ac GIT-VERSION-FILE
$(QUIET_GEN)$(RM) $@ $<+ && \
sed -e 's/@@GIT_VERSION@@/$(GIT_VERSION)/g' \
$< > $<+ && \
autoconf -o $@ $<+ && \
$(RM) $<+
-# These can record GIT_VERSION
-version.o git.spec \
- $(patsubst %.sh,%,$(SCRIPT_SH)) \
- $(patsubst %.perl,%,$(SCRIPT_PERL)) \
- : GIT-VERSION-FILE
-
-TEST_OBJS := $(patsubst test-%$X,test-%.o,$(TEST_PROGRAMS))
-GIT_OBJS := $(LIB_OBJS) $(BUILTIN_OBJS) $(PROGRAM_OBJS) $(TEST_OBJS) \
- git.o
-ifndef NO_CURL
- GIT_OBJS += http.o http-walker.o remote-curl.o
-endif
-
XDIFF_OBJS += xdiff/xdiffi.o
XDIFF_OBJS += xdiff/xprepare.o
XDIFF_OBJS += xdiff/xutils.o
VCSSVN_OBJS += vcs-svn/svndiff.o
VCSSVN_OBJS += vcs-svn/svndump.o
-VCSSVN_TEST_OBJS += test-line-buffer.o
-
-OBJECTS := $(GIT_OBJS) $(XDIFF_OBJS) $(VCSSVN_OBJS)
+TEST_OBJS := $(patsubst test-%$X,test-%.o,$(TEST_PROGRAMS))
+OBJECTS := $(LIB_OBJS) $(BUILTIN_OBJS) $(PROGRAM_OBJS) $(TEST_OBJS) \
+ $(XDIFF_OBJS) \
+ $(VCSSVN_OBJS) \
+ git.o
+ifndef NO_CURL
+ OBJECTS += http.o http-walker.o remote-curl.o
+endif
dep_files := $(foreach f,$(OBJECTS),$(dir $f).depend/$(notdir $f).d)
dep_dirs := $(addsuffix .depend,$(sort $(dir $(OBJECTS))))
# Dependencies on automatically generated headers such as common-cmds.h
# should _not_ be included here, since they are necessary even when
# building an object for the first time.
-#
-# XXX. Please check occasionally that these include all dependencies
-# gcc detects!
-
-$(GIT_OBJS): $(LIB_H)
-builtin/branch.o builtin/checkout.o builtin/clone.o builtin/reset.o branch.o transport.o: branch.h
-builtin/bundle.o bundle.o transport.o: bundle.h
-builtin/bisect--helper.o builtin/rev-list.o bisect.o: bisect.h
-builtin/clone.o builtin/fetch-pack.o transport.o: fetch-pack.h
-builtin/index-pack.o builtin/grep.o builtin/pack-objects.o transport-helper.o thread-utils.o: thread-utils.h
-builtin/send-pack.o transport.o: send-pack.h
-builtin/log.o builtin/shortlog.o: shortlog.h
-builtin/prune.o builtin/reflog.o reachable.o: reachable.h
-builtin/commit.o builtin/revert.o wt-status.o: wt-status.h
-builtin/tar-tree.o archive-tar.o: tar.h
-connect.o transport.o url.o http-backend.o: url.h
-builtin/branch.o builtin/commit.o builtin/tag.o column.o help.o pager.o: column.h
-http-fetch.o http-walker.o remote-curl.o transport.o walker.o: walker.h
-http.o http-walker.o http-push.o http-fetch.o remote-curl.o: http.h url.h
-
-xdiff-interface.o $(XDIFF_OBJS): $(XDIFF_H)
-$(VCSSVN_OBJS) $(VCSSVN_TEST_OBJS): $(LIB_H) $(VCSSVN_H)
+$(OBJECTS): $(LIB_H)
endif
+exec_cmd.sp exec_cmd.s exec_cmd.o: GIT-PREFIX
exec_cmd.sp exec_cmd.s exec_cmd.o: EXTRA_CPPFLAGS = \
'-DGIT_EXEC_PATH="$(gitexecdir_SQ)"' \
'-DBINDIR="$(bindir_relative_SQ)"' \
'-DPREFIX="$(prefix_SQ)"'
+builtin/init-db.sp builtin/init-db.s builtin/init-db.o: GIT-PREFIX
builtin/init-db.sp builtin/init-db.s builtin/init-db.o: EXTRA_CPPFLAGS = \
-DDEFAULT_GIT_TEMPLATE_DIR='"$(template_dir_SQ)"'
+config.sp config.s config.o: GIT-PREFIX
config.sp config.s config.o: EXTRA_CPPFLAGS = \
-DETC_GITCONFIG='"$(ETC_GITCONFIG_SQ)"'
+attr.sp attr.s attr.o: GIT-PREFIX
attr.sp attr.s attr.o: EXTRA_CPPFLAGS = \
-DETC_GITATTRIBUTES='"$(ETC_GITATTRIBUTES_SQ)"'
+gettext.sp gettext.s gettext.o: GIT-PREFIX
gettext.sp gettext.s gettext.o: EXTRA_CPPFLAGS = \
-DGIT_LOCALE_PATH='"$(localedir_SQ)"'
--keyword=_ --keyword=N_ --keyword="Q_:1,2"
XGETTEXT_FLAGS_SH = $(XGETTEXT_FLAGS) --language=Shell
XGETTEXT_FLAGS_PERL = $(XGETTEXT_FLAGS) --keyword=__ --language=Perl
-LOCALIZED_C := $(C_OBJ:o=c) $(LIB_H) $(XDIFF_H) $(VCSSVN_H) $(MISC_H)
+LOCALIZED_C := $(C_OBJ:o=c) $(LIB_H) $(GENERATED_H)
LOCALIZED_SH := $(SCRIPT_SH)
LOCALIZED_PERL := $(SCRIPT_PERL)
$(FIND_SOURCE_FILES) | xargs cscope -b
### Detect prefix changes
-TRACK_CFLAGS = $(CC):$(subst ','\'',$(ALL_CFLAGS)):\
- $(bindir_SQ):$(gitexecdir_SQ):$(template_dir_SQ):$(prefix_SQ):\
- $(localedir_SQ):$(USE_GETTEXT_SCHEME)
+TRACK_PREFIX = $(bindir_SQ):$(gitexecdir_SQ):$(template_dir_SQ):$(prefix_SQ):\
+ $(localedir_SQ)
+
+GIT-PREFIX: FORCE
+ @FLAGS='$(TRACK_PREFIX)'; \
+ if test x"$$FLAGS" != x"`cat GIT-PREFIX 2>/dev/null`" ; then \
+ echo 1>&2 " * new prefix flags"; \
+ echo "$$FLAGS" >GIT-PREFIX; \
+ fi
+
+TRACK_CFLAGS = $(CC):$(subst ','\'',$(ALL_CFLAGS)):$(USE_GETTEXT_SCHEME)
GIT-CFLAGS: FORCE
@FLAGS='$(TRACK_CFLAGS)'; \
if test x"$$FLAGS" != x"`cat GIT-CFLAGS 2>/dev/null`" ; then \
- echo 1>&2 " * new build flags or prefix"; \
+ echo 1>&2 " * new build flags"; \
echo "$$FLAGS" >GIT-CFLAGS; \
fi
### Maintainer's dist rules
-git.spec: git.spec.in
+git.spec: git.spec.in GIT-VERSION-FILE
sed -e 's/@@VERSION@@/$(GIT_VERSION)/g' < $< > $@+
mv $@+ $@
$(MAKE) -C git-gui clean
endif
$(RM) GIT-VERSION-FILE GIT-CFLAGS GIT-LDFLAGS GIT-GUI-VARS GIT-BUILD-OPTIONS
+ $(RM) GIT-USER-AGENT GIT-PREFIX GIT-SCRIPT-DEFINES
.PHONY: all install profile-clean clean strip
.PHONY: shell_compatibility_test please_set_SHELL_PATH_to_a_more_modern_shell
argc = setup_revisions(argc, argv, &rev, NULL);
rev.diffopt.output_format = DIFF_FORMAT_PATCH;
DIFF_OPT_SET(&rev.diffopt, IGNORE_DIRTY_SUBMODULES);
- out = open(file, O_CREAT | O_WRONLY, 0644);
+ out = open(file, O_CREAT | O_WRONLY, 0666);
if (out < 0)
die (_("Could not open '%s' for writing."), file);
rev.diffopt.file = xfdopen(out, "w");
#include "dir.h"
#include "diff.h"
#include "parse-options.h"
+#include "xdiff-interface.h"
+#include "ll-merge.h"
+#include "rerere.h"
/*
* --check turns on checking that the working tree matches the
static int apply_verbosely;
static int allow_overlap;
static int no_add;
+static int threeway;
static const char *fake_ancestor;
static int line_termination = '\n';
static unsigned int p_context = UINT_MAX;
unsigned int is_copy:1;
unsigned int is_rename:1;
unsigned int recount:1;
+ unsigned int conflicted_threeway:1;
+ unsigned int direct_to_threeway:1;
struct fragment *fragments;
char *result;
size_t resultsize;
char old_sha1_prefix[41];
char new_sha1_prefix[41];
struct patch *next;
+
+ /* three-way fallback result */
+ unsigned char threeway_stage[3][20];
};
static void free_fragment_list(struct fragment *list)
static void clear_image(struct image *image)
{
free(image->buf);
- image->buf = NULL;
- image->len = 0;
+ free(image->line_allocated);
+ memset(image, 0, sizeof(*image));
}
/* fmt must contain _one_ %s and no other substitution */
return 0;
}
-static int read_file_or_gitlink(struct cache_entry *ce, struct strbuf *buf)
+static int read_blob_object(struct strbuf *buf, const unsigned char *sha1, unsigned mode)
{
- if (!ce)
- return 0;
-
- if (S_ISGITLINK(ce->ce_mode)) {
+ if (S_ISGITLINK(mode)) {
strbuf_grow(buf, 100);
- strbuf_addf(buf, "Subproject commit %s\n", sha1_to_hex(ce->sha1));
+ strbuf_addf(buf, "Subproject commit %s\n", sha1_to_hex(sha1));
} else {
enum object_type type;
unsigned long sz;
char *result;
- result = read_sha1_file(ce->sha1, &type, &sz);
+ result = read_sha1_file(sha1, &type, &sz);
if (!result)
return -1;
/* XXX read_sha1_file NUL-terminates */
return 0;
}
+static int read_file_or_gitlink(struct cache_entry *ce, struct strbuf *buf)
+{
+ if (!ce)
+ return 0;
+ return read_blob_object(buf, ce->sha1, ce->ce_mode);
+}
+
static struct patch *in_fn_table(const char *name)
{
struct string_list_item *item;
* item->util in the filename table records the status of the path.
* Usually it points at a patch (whose result records the contents
* of it after applying it), but it could be PATH_WAS_DELETED for a
- * path that a previously applied patch has already removed.
+ * path that a previously applied patch has already removed, or
+ * PATH_TO_BE_DELETED for a path that a later patch would remove.
+ *
+ * The latter is needed to deal with a case where two paths A and B
+ * are swapped by first renaming A to B and then renaming B to A;
+ * moving A to B should not be prevented due to presense of B as we
+ * will remove it in a later patch.
*/
- #define PATH_TO_BE_DELETED ((struct patch *) -2)
+#define PATH_TO_BE_DELETED ((struct patch *) -2)
#define PATH_WAS_DELETED ((struct patch *) -1)
static int to_be_deleted(struct patch *patch)
}
}
-static int apply_data(struct patch *patch, struct stat *st, struct cache_entry *ce)
+static int checkout_target(struct cache_entry *ce, struct stat *st)
+{
+ struct checkout costate;
+
+ memset(&costate, 0, sizeof(costate));
+ costate.base_dir = "";
+ costate.refresh_cache = 1;
+ if (checkout_entry(ce, &costate, NULL) || lstat(ce->name, st))
+ return error(_("cannot checkout %s"), ce->name);
+ return 0;
+}
+
+static struct patch *previous_patch(struct patch *patch, int *gone)
+{
+ struct patch *previous;
+
+ *gone = 0;
+ if (patch->is_copy || patch->is_rename)
+ return NULL; /* "git" patches do not depend on the order */
+
+ previous = in_fn_table(patch->old_name);
+ if (!previous)
+ return NULL;
+
+ if (to_be_deleted(previous))
+ return NULL; /* the deletion hasn't happened yet */
+
+ if (was_deleted(previous))
+ *gone = 1;
+
+ return previous;
+}
+
+static int verify_index_match(struct cache_entry *ce, struct stat *st)
+{
+ if (S_ISGITLINK(ce->ce_mode)) {
+ if (!S_ISDIR(st->st_mode))
+ return -1;
+ return 0;
+ }
+ return ce_match_stat(ce, st, CE_MATCH_IGNORE_VALID|CE_MATCH_IGNORE_SKIP_WORKTREE);
+}
+
+#define SUBMODULE_PATCH_WITHOUT_INDEX 1
+
+static int load_patch_target(struct strbuf *buf,
+ struct cache_entry *ce,
+ struct stat *st,
+ const char *name,
+ unsigned expected_mode)
+{
+ if (cached) {
+ if (read_file_or_gitlink(ce, buf))
+ return error(_("read of %s failed"), name);
+ } else if (name) {
+ if (S_ISGITLINK(expected_mode)) {
+ if (ce)
+ return read_file_or_gitlink(ce, buf);
+ else
+ return SUBMODULE_PATCH_WITHOUT_INDEX;
+ } else {
+ if (read_old_data(st, name, buf))
+ return error(_("read of %s failed"), name);
+ }
+ }
+ return 0;
+}
+
+/*
+ * We are about to apply "patch"; populate the "image" with the
+ * current version we have, from the working tree or from the index,
+ * depending on the situation e.g. --cached/--index. If we are
+ * applying a non-git patch that incrementally updates the tree,
+ * we read from the result of a previous diff.
+ */
+static int load_preimage(struct image *image,
+ struct patch *patch, struct stat *st, struct cache_entry *ce)
{
struct strbuf buf = STRBUF_INIT;
- struct image image;
size_t len;
char *img;
- struct patch *tpatch;
+ struct patch *previous;
+ int status;
- if (!(patch->is_copy || patch->is_rename) &&
- (tpatch = in_fn_table(patch->old_name)) != NULL && !to_be_deleted(tpatch)) {
- if (was_deleted(tpatch)) {
- return error(_("patch %s has been renamed/deleted"),
- patch->old_name);
- }
+ previous = previous_patch(patch, &status);
+ if (status)
+ return error(_("path %s has been renamed/deleted"),
+ patch->old_name);
+ if (previous) {
/* We have a patched copy in memory; use that. */
- strbuf_add(&buf, tpatch->result, tpatch->resultsize);
- } else if (cached) {
- if (read_file_or_gitlink(ce, &buf))
+ strbuf_add(&buf, previous->result, previous->resultsize);
+ } else {
+ status = load_patch_target(&buf, ce, st,
+ patch->old_name, patch->old_mode);
+ if (status < 0)
+ return status;
+ else if (status == SUBMODULE_PATCH_WITHOUT_INDEX) {
+ /*
+ * There is no way to apply subproject
+ * patch without looking at the index.
+ * NEEDSWORK: shouldn't this be flagged
+ * as an error???
+ */
+ free_fragment_list(patch->fragments);
+ patch->fragments = NULL;
+ } else if (status) {
return error(_("read of %s failed"), patch->old_name);
- } else if (patch->old_name) {
- if (S_ISGITLINK(patch->old_mode)) {
- if (ce) {
- read_file_or_gitlink(ce, &buf);
- } else {
- /*
- * There is no way to apply subproject
- * patch without looking at the index.
- * NEEDSWORK: shouldn't this be flagged
- * as an error???
- */
- free_fragment_list(patch->fragments);
- patch->fragments = NULL;
- }
- } else {
- if (read_old_data(st, patch->old_name, &buf))
- return error(_("read of %s failed"), patch->old_name);
}
}
img = strbuf_detach(&buf, &len);
- prepare_image(&image, img, len, !patch->is_binary);
+ prepare_image(image, img, len, !patch->is_binary);
+ return 0;
+}
- if (apply_fragments(&image, patch) < 0)
- return -1; /* note with --reject this succeeds. */
- patch->result = image.buf;
- patch->resultsize = image.len;
- add_to_fn_table(patch);
- free(image.line_allocated);
+static int three_way_merge(struct image *image,
+ char *path,
+ const unsigned char *base,
+ const unsigned char *ours,
+ const unsigned char *theirs)
+{
+ mmfile_t base_file, our_file, their_file;
+ mmbuffer_t result = { NULL };
+ int status;
- if (0 < patch->is_delete && patch->resultsize)
- return error(_("removal patch leaves file contents"));
+ read_mmblob(&base_file, base);
+ read_mmblob(&our_file, ours);
+ read_mmblob(&their_file, theirs);
+ status = ll_merge(&result, path,
+ &base_file, "base",
+ &our_file, "ours",
+ &their_file, "theirs", NULL);
+ free(base_file.ptr);
+ free(our_file.ptr);
+ free(their_file.ptr);
+ if (status < 0 || !result.ptr) {
+ free(result.ptr);
+ return -1;
+ }
+ clear_image(image);
+ image->buf = result.ptr;
+ image->len = result.size;
+ return status;
+}
+
+/*
+ * When directly falling back to add/add three-way merge, we read from
+ * the current contents of the new_name. In no cases other than that
+ * this function will be called.
+ */
+static int load_current(struct image *image, struct patch *patch)
+{
+ struct strbuf buf = STRBUF_INIT;
+ int status, pos;
+ size_t len;
+ char *img;
+ struct stat st;
+ struct cache_entry *ce;
+ char *name = patch->new_name;
+ unsigned mode = patch->new_mode;
+
+ if (!patch->is_new)
+ die("BUG: patch to %s is not a creation", patch->old_name);
+
+ pos = cache_name_pos(name, strlen(name));
+ if (pos < 0)
+ return error(_("%s: does not exist in index"), name);
+ ce = active_cache[pos];
+ if (lstat(name, &st)) {
+ if (errno != ENOENT)
+ return error(_("%s: %s"), name, strerror(errno));
+ if (checkout_target(ce, &st))
+ return -1;
+ }
+ if (verify_index_match(ce, &st))
+ return error(_("%s: does not match index"), name);
+
+ status = load_patch_target(&buf, ce, &st, name, mode);
+ if (status < 0)
+ return status;
+ else if (status)
+ return -1;
+ img = strbuf_detach(&buf, &len);
+ prepare_image(image, img, len, !patch->is_binary);
return 0;
}
-static int check_to_create_blob(const char *new_name, int ok_if_exists)
+static int try_threeway(struct image *image, struct patch *patch,
+ struct stat *st, struct cache_entry *ce)
{
- struct stat nst;
- if (!lstat(new_name, &nst)) {
- if (S_ISDIR(nst.st_mode) || ok_if_exists)
- return 0;
- /*
- * A leading component of new_name might be a symlink
- * that is going to be removed with this patch, but
- * still pointing at somewhere that has the path.
- * In such a case, path "new_name" does not exist as
- * far as git is concerned.
- */
- if (has_symlink_leading_path(new_name, strlen(new_name)))
- return 0;
+ unsigned char pre_sha1[20], post_sha1[20], our_sha1[20];
+ struct strbuf buf = STRBUF_INIT;
+ size_t len;
+ int status;
+ char *img;
+ struct image tmp_image;
+
+ /* No point falling back to 3-way merge in these cases */
+ if (patch->is_delete ||
+ S_ISGITLINK(patch->old_mode) || S_ISGITLINK(patch->new_mode))
+ return -1;
- return error(_("%s: already exists in working directory"), new_name);
+ /* Preimage the patch was prepared for */
+ if (patch->is_new)
+ write_sha1_file("", 0, blob_type, pre_sha1);
+ else if (get_sha1(patch->old_sha1_prefix, pre_sha1) ||
+ read_blob_object(&buf, pre_sha1, patch->old_mode))
+ return error("repository lacks the necessary blob to fall back on 3-way merge.");
+
+ fprintf(stderr, "Falling back to three-way merge...\n");
+
+ img = strbuf_detach(&buf, &len);
+ prepare_image(&tmp_image, img, len, 1);
+ /* Apply the patch to get the post image */
+ if (apply_fragments(&tmp_image, patch) < 0) {
+ clear_image(&tmp_image);
+ return -1;
+ }
+ /* post_sha1[] is theirs */
+ write_sha1_file(tmp_image.buf, tmp_image.len, blob_type, post_sha1);
+ clear_image(&tmp_image);
+
+ /* our_sha1[] is ours */
+ if (patch->is_new) {
+ if (load_current(&tmp_image, patch))
+ return error("cannot read the current contents of '%s'",
+ patch->new_name);
+ } else {
+ if (load_preimage(&tmp_image, patch, st, ce))
+ return error("cannot read the current contents of '%s'",
+ patch->old_name);
+ }
+ write_sha1_file(tmp_image.buf, tmp_image.len, blob_type, our_sha1);
+ clear_image(&tmp_image);
+
+ /* in-core three-way merge between post and our using pre as base */
+ status = three_way_merge(image, patch->new_name,
+ pre_sha1, our_sha1, post_sha1);
+ if (status < 0) {
+ fprintf(stderr, "Failed to fall back on three-way merge...\n");
+ return status;
+ }
+
+ if (status) {
+ patch->conflicted_threeway = 1;
+ if (patch->is_new)
+ hashclr(patch->threeway_stage[0]);
+ else
+ hashcpy(patch->threeway_stage[0], pre_sha1);
+ hashcpy(patch->threeway_stage[1], our_sha1);
+ hashcpy(patch->threeway_stage[2], post_sha1);
+ fprintf(stderr, "Applied patch to '%s' with conflicts.\n", patch->new_name);
+ } else {
+ fprintf(stderr, "Applied patch to '%s' cleanly.\n", patch->new_name);
}
- else if ((errno != ENOENT) && (errno != ENOTDIR))
- return error("%s: %s", new_name, strerror(errno));
return 0;
}
-static int verify_index_match(struct cache_entry *ce, struct stat *st)
+static int apply_data(struct patch *patch, struct stat *st, struct cache_entry *ce)
{
- if (S_ISGITLINK(ce->ce_mode)) {
- if (!S_ISDIR(st->st_mode))
+ struct image image;
+
+ if (load_preimage(&image, patch, st, ce) < 0)
+ return -1;
+
+ if (patch->direct_to_threeway ||
+ apply_fragments(&image, patch) < 0) {
+ /* Note: with --reject, apply_fragments() returns 0 */
+ if (!threeway || try_threeway(&image, patch, st, ce) < 0)
return -1;
- return 0;
}
- return ce_match_stat(ce, st, CE_MATCH_IGNORE_VALID|CE_MATCH_IGNORE_SKIP_WORKTREE);
+ patch->result = image.buf;
+ patch->resultsize = image.len;
+ add_to_fn_table(patch);
+ free(image.line_allocated);
+
+ if (0 < patch->is_delete && patch->resultsize)
+ return error(_("removal patch leaves file contents"));
+
+ return 0;
}
+/*
+ * If "patch" that we are looking at modifies or deletes what we have,
+ * we would want it not to lose any local modification we have, either
+ * in the working tree or in the index.
+ *
+ * This also decides if a non-git patch is a creation patch or a
+ * modification to an existing empty file. We do not check the state
+ * of the current tree for a creation patch in this function; the caller
+ * check_patch() separately makes sure (and errors out otherwise) that
+ * the path the patch creates does not exist in the current tree.
+ */
static int check_preimage(struct patch *patch, struct cache_entry **ce, struct stat *st)
{
const char *old_name = patch->old_name;
- struct patch *tpatch = NULL;
- int stat_ret = 0;
+ struct patch *previous = NULL;
+ int stat_ret = 0, status;
unsigned st_mode = 0;
- /*
- * Make sure that we do not have local modifications from the
- * index when we are looking at the index. Also make sure
- * we have the preimage file to be patched in the work tree,
- * unless --cached, which tells git to apply only in the index.
- */
if (!old_name)
return 0;
assert(patch->is_new <= 0);
+ previous = previous_patch(patch, &status);
- if (!(patch->is_copy || patch->is_rename) &&
- (tpatch = in_fn_table(old_name)) != NULL && !to_be_deleted(tpatch)) {
- if (was_deleted(tpatch))
- return error(_("%s: has been deleted/renamed"), old_name);
- st_mode = tpatch->new_mode;
+ if (status)
+ return error(_("path %s has been renamed/deleted"), old_name);
+ if (previous) {
+ st_mode = previous->new_mode;
} else if (!cached) {
stat_ret = lstat(old_name, st);
if (stat_ret && errno != ENOENT)
return error(_("%s: %s"), old_name, strerror(errno));
}
- if (to_be_deleted(tpatch))
- tpatch = NULL;
-
- if (check_index && !tpatch) {
+ if (check_index && !previous) {
int pos = cache_name_pos(old_name, strlen(old_name));
if (pos < 0) {
if (patch->is_new < 0)
}
*ce = active_cache[pos];
if (stat_ret < 0) {
- struct checkout costate;
- /* checkout */
- memset(&costate, 0, sizeof(costate));
- costate.base_dir = "";
- costate.refresh_cache = 1;
- if (checkout_entry(*ce, &costate, NULL) ||
- lstat(old_name, st))
+ if (checkout_target(*ce, st))
return -1;
}
if (!cached && verify_index_match(*ce, st))
return error(_("%s: %s"), old_name, strerror(errno));
}
- if (!cached && !tpatch)
+ if (!cached && !previous)
st_mode = ce_mode_from_stat(*ce, st->st_mode);
if (patch->is_new < 0)
return 0;
}
+
+#define EXISTS_IN_INDEX 1
+#define EXISTS_IN_WORKTREE 2
+
+static int check_to_create(const char *new_name, int ok_if_exists)
+{
+ struct stat nst;
+
+ if (check_index &&
+ cache_name_pos(new_name, strlen(new_name)) >= 0 &&
+ !ok_if_exists)
+ return EXISTS_IN_INDEX;
+ if (cached)
+ return 0;
+
+ if (!lstat(new_name, &nst)) {
+ if (S_ISDIR(nst.st_mode) || ok_if_exists)
+ return 0;
+ /*
+ * A leading component of new_name might be a symlink
+ * that is going to be removed with this patch, but
+ * still pointing at somewhere that has the path.
+ * In such a case, path "new_name" does not exist as
+ * far as git is concerned.
+ */
+ if (has_symlink_leading_path(new_name, strlen(new_name)))
+ return 0;
+
+ return EXISTS_IN_WORKTREE;
+ } else if ((errno != ENOENT) && (errno != ENOTDIR)) {
+ return error("%s: %s", new_name, strerror(errno));
+ }
+ return 0;
+}
+
/*
* Check and apply the patch in-core; leave the result in patch->result
* for the caller to write it out to the final destination.
return status;
old_name = patch->old_name;
+ /*
+ * A type-change diff is always split into a patch to delete
+ * old, immediately followed by a patch to create new (see
+ * diff.c::run_diff()); in such a case it is Ok that the entry
+ * to be deleted by the previous patch is still in the working
+ * tree and in the index.
+ *
+ * A patch to swap-rename between A and B would first rename A
+ * to B and then rename B to A. While applying the first one,
+ * the presense of B should not stop A from getting renamed to
+ * B; ask to_be_deleted() about the later rename. Removal of
+ * B and rename from A to B is handled the same way by asking
+ * was_deleted().
+ */
if ((tpatch = in_fn_table(new_name)) &&
- (was_deleted(tpatch) || to_be_deleted(tpatch)))
- /*
- * A type-change diff is always split into a patch to
- * delete old, immediately followed by a patch to
- * create new (see diff.c::run_diff()); in such a case
- * it is Ok that the entry to be deleted by the
- * previous patch is still in the working tree and in
- * the index.
- */
+ (was_deleted(tpatch) || to_be_deleted(tpatch)))
ok_if_exists = 1;
else
ok_if_exists = 0;
if (new_name &&
((0 < patch->is_new) | (0 < patch->is_rename) | patch->is_copy)) {
- if (check_index &&
- cache_name_pos(new_name, strlen(new_name)) >= 0 &&
- !ok_if_exists)
+ int err = check_to_create(new_name, ok_if_exists);
+
+ if (err && threeway) {
+ patch->direct_to_threeway = 1;
+ } else switch (err) {
+ case 0:
+ break; /* happy */
+ case EXISTS_IN_INDEX:
return error(_("%s: already exists in index"), new_name);
- if (!cached) {
- int err = check_to_create_blob(new_name, ok_if_exists);
- if (err)
- return err;
+ break;
+ case EXISTS_IN_WORKTREE:
+ return error(_("%s: already exists in working directory"),
+ new_name);
+ default:
+ return err;
}
+
if (!patch->new_mode) {
if (0 < patch->is_new)
patch->new_mode = S_IFREG | 0644;
name = patch->old_name ? patch->old_name : patch->new_name;
if (0 < patch->is_new)
continue;
- else if (get_sha1(patch->old_sha1_prefix, sha1))
+ else if (get_sha1_blob(patch->old_sha1_prefix, sha1))
/* git diff has no index line for mode/type changes */
if (!patch->lines_added && !patch->lines_deleted) {
if (get_current_sha1(patch->old_name, sha1))
die_errno(_("unable to write file '%s' mode %o"), path, mode);
}
+static void add_conflicted_stages_file(struct patch *patch)
+{
+ int stage, namelen;
+ unsigned ce_size, mode;
+ struct cache_entry *ce;
+
+ if (!update_index)
+ return;
+ namelen = strlen(patch->new_name);
+ ce_size = cache_entry_size(namelen);
+ mode = patch->new_mode ? patch->new_mode : (S_IFREG | 0644);
+
+ remove_file_from_cache(patch->new_name);
+ for (stage = 1; stage < 4; stage++) {
+ if (is_null_sha1(patch->threeway_stage[stage - 1]))
+ continue;
+ ce = xcalloc(1, ce_size);
+ memcpy(ce->name, patch->new_name, namelen);
+ ce->ce_mode = create_ce_mode(mode);
+ ce->ce_flags = create_ce_flags(namelen, stage);
+ hashcpy(ce->sha1, patch->threeway_stage[stage - 1]);
+ if (add_cache_entry(ce, ADD_CACHE_OK_TO_ADD) < 0)
+ die(_("unable to add cache entry for %s"), patch->new_name);
+ }
+}
+
static void create_file(struct patch *patch)
{
char *path = patch->new_name;
if (!mode)
mode = S_IFREG | 0644;
create_one_file(path, mode, buf, size);
- add_index_file(path, mode, buf, size);
+
+ if (patch->conflicted_threeway)
+ add_conflicted_stages_file(patch);
+ else
+ add_index_file(path, mode, buf, size);
}
/* phase zero is to remove, phase one is to create */
int phase;
int errs = 0;
struct patch *l;
+ struct string_list cpath = STRING_LIST_INIT_DUP;
for (phase = 0; phase < 2; phase++) {
l = list;
errs = 1;
else {
write_out_one_result(l, phase);
- if (phase == 1 && write_out_one_reject(l))
- errs = 1;
+ if (phase == 1) {
+ if (write_out_one_reject(l))
+ errs = 1;
+ if (l->conflicted_threeway) {
+ string_list_append(&cpath, l->new_name);
+ errs = 1;
+ }
+ }
}
l = l->next;
}
}
+
+ if (cpath.nr) {
+ struct string_list_item *item;
+
+ sort_string_list(&cpath);
+ for_each_string_list_item(item, &cpath)
+ fprintf(stderr, "U %s\n", item->string);
+ string_list_clear(&cpath, 0);
+
+ rerere(0);
+ }
+
return errs;
}
!apply_with_reject)
exit(1);
- if (apply && write_out_results(list))
- exit(1);
+ if (apply && write_out_results(list)) {
+ if (apply_with_reject)
+ exit(1);
+ /* with --3way, we still need to write the index out */
+ return 1;
+ }
if (fake_ancestor)
build_fake_ancestor(list, fake_ancestor);
N_("apply a patch without touching the working tree")),
OPT_BOOLEAN(0, "apply", &force_apply,
N_("also apply the patch (use with --stat/--summary/--check)")),
+ OPT_BOOL('3', "3way", &threeway,
+ N_( "attempt three-way merge if a patch does not apply")),
OPT_FILENAME(0, "build-fake-ancestor", &fake_ancestor,
N_("build a temporary index based on embedded index information")),
{ OPTION_CALLBACK, 'z', NULL, NULL, NULL,
argc = parse_options(argc, argv, prefix, builtin_apply_options,
apply_usage, 0);
+ if (apply_with_reject && threeway)
+ die("--reject and --3way cannot be used together.");
+ if (cached && threeway)
+ die("--cached and --3way cannot be used together.");
+ if (threeway) {
+ if (is_not_gitdir)
+ die(_("--3way outside a repository"));
+ check_index = 1;
+ }
if (apply_with_reject)
apply = apply_verbosely = 1;
if (!force_apply && (diffstat || numstat || summary || check || fake_ancestor))
unsigned long size;
struct object_context obj_context;
- if (get_sha1_with_context(obj_name, sha1, &obj_context))
+ if (get_sha1_with_context(obj_name, 0, sha1, &obj_context))
die("Not a valid object name %s", obj_name);
buf = NULL;
if (safe_create_leading_directories_const(work_tree) < 0)
die_errno(_("could not create leading directories of '%s'"),
work_tree);
- if (!dest_exists && mkdir(work_tree, 0755))
+ if (!dest_exists && mkdir(work_tree, 0777))
die_errno(_("could not create work tree dir '%s'."),
work_tree);
set_git_work_tree(work_tree);
if (argc < 2 || !strcmp(argv[1], "-h"))
usage(commit_tree_usage);
- if (get_sha1(argv[1], tree_sha1))
- die("Not a valid object name %s", argv[1]);
+ if (get_sha1_tree(argv[1], tree_sha1))
+ die("Not a valid tree object name %s", argv[1]);
for (i = 1; i < argc; i++) {
const char *arg = argv[i];
unsigned char sha1[20];
if (argc <= ++i)
usage(commit_tree_usage);
- if (get_sha1(argv[i], sha1))
+ if (get_sha1_commit(argv[i], sha1))
die("Not a valid object name %s", argv[i]);
assert_sha1_type(sha1, OBJ_COMMIT);
new_parent(lookup_commit(sha1), &parents);
continue;
}
- if (get_sha1(arg, tree_sha1))
+ if (get_sha1_tree(arg, tree_sha1))
die("Not a valid object name %s", arg);
if (got_tree)
die("Cannot give more than one trees");
int i;
char *m;
+ if (!pattern)
+ return 0;
+
for (i = 0; pattern[i]; i++)
;
m = xcalloc(1, i);
* and create commit from the_index.
* We still need to refresh the index here.
*/
- if (!pathspec || !*pathspec) {
+ if (!only && (!pathspec || !*pathspec)) {
fd = hold_locked_index(&index_lock, 1);
refresh_cache_or_die(refresh_flags);
if (active_cache_changed) {
hook_arg1 = "message";
} else if (use_message) {
buffer = strstr(use_message_buffer, "\n\n");
- if (!buffer || buffer[2] == '\0')
+ if (!use_editor && (!buffer || buffer[2] == '\0'))
die(_("commit has empty message"));
strbuf_add(&sb, buffer + 2, strlen(buffer + 2));
hook_arg1 = "commit";
home_config_paths(&user_config, &xdg_config, "config");
- if (access(user_config, R_OK) && !access(xdg_config, R_OK))
+ if (!user_config)
+ /*
+ * It is unknown if HOME/.gitconfig exists, so
+ * we do not know if we should write to XDG
+ * location; error out even if XDG_CONFIG_HOME
+ * is set and points at a sane location.
+ */
+ die("$HOME not set");
+
+ if (access(user_config, R_OK) &&
+ xdg_config && !access(xdg_config, R_OK))
given_config_file = xdg_config;
- else if (user_config)
- given_config_file = user_config;
else
- die("$HOME not set");
+ given_config_file = user_config;
}
else if (use_system_config)
given_config_file = git_etc_gitconfig();
stream.avail_out = consume ? 64*1024 : obj->size;
do {
- unsigned char *last_out = stream.next_out;
ssize_t n = (len < 64*1024) ? len : 64*1024;
n = pread(pack_fd, inbuf, n, from);
if (n < 0)
len -= n;
stream.next_in = inbuf;
stream.avail_in = n;
- status = git_inflate(&stream, 0);
- if (consume) {
- if (consume(last_out, stream.next_out - last_out, cb_data)) {
- free(inbuf);
- free(data);
- return NULL;
- }
- stream.next_out = data;
- stream.avail_out = 64*1024;
+ if (!consume)
+ status = git_inflate(&stream, 0);
+ else {
+ do {
+ status = git_inflate(&stream, 0);
+ if (consume(data, stream.next_out - data, cb_data)) {
+ free(inbuf);
+ free(data);
+ return NULL;
+ }
+ stream.next_out = data;
+ stream.avail_out = 64*1024;
+ } while (status == Z_OK && stream.avail_in);
}
} while (len && status == Z_OK && !stream.avail_in);
strcpy(path + len, "CoNfIg");
if (!access(path, F_OK))
git_config_set("core.ignorecase", "true");
+ probe_utf8_pathname_composition(path, len);
}
return reinit;
rev.simplify_history = 0;
memset(&opt, 0, sizeof(opt));
opt.def = "HEAD";
+ opt.revarg_opt = REVARG_COMMITTISH;
cmd_log_init(argc, argv, prefix, &rev, &opt);
if (!rev.diffopt.output_format)
rev.diffopt.output_format = DIFF_FORMAT_RAW;
opt.tweak = show_rev_tweak_rev;
cmd_log_init(argc, argv, prefix, &rev, &opt);
+ if (!rev.no_walk)
+ return cmd_log_walk(&rev);
+
count = rev.pending.nr;
objects = rev.pending.objects;
for (i = 0; i < count && !ret; i++) {
rev.always_show_header = 1;
memset(&opt, 0, sizeof(opt));
opt.def = "HEAD";
+ opt.revarg_opt = REVARG_COMMITTISH;
cmd_log_init(argc, argv, prefix, &rev, &opt);
return cmd_log_walk(&rev);
}
rev.subject_prefix = fmt_patch_subject_prefix;
memset(&s_r_opt, 0, sizeof(s_r_opt));
s_r_opt.def = "HEAD";
+ s_r_opt.revarg_opt = REVARG_COMMITTISH;
if (default_attach) {
rev.mime_boundary = default_attach;
}
die("not a rev '%s'", line);
}
- if (handle_revision_arg(line, &revs, flags, 1))
+ if (handle_revision_arg(line, &revs, flags, REVARG_CANNOT_BE_FILENAME))
die("bad revision '%s'", line);
}
* Otherwise, argv[i] could be either <rev> or <paths> and
* has to be unambiguous.
*/
- else if (!get_sha1(argv[i], sha1)) {
+ else if (!get_sha1_committish(argv[i], sha1)) {
/*
* Ok, argv[i] looks like a rev; it should not
* be a filename.
}
}
- if (get_sha1(rev, sha1))
+ if (get_sha1_committish(rev, sha1))
die(_("Failed to resolve '%s' as a valid ref."), rev);
+ /*
+ * NOTE: As "git reset $treeish -- $path" should be usable on
+ * any tree-ish, this is not strictly correct. We are not
+ * moving the HEAD to any commit; we are merely resetting the
+ * entries in the index to that of a treeish.
+ */
commit = lookup_commit_reference(sha1);
if (!commit)
die(_("Could not parse object '%s'."), rev);
return 0;
}
+static int show_abbrev(const unsigned char *sha1, void *cb_data)
+{
+ show_rev(NORMAL, sha1, NULL);
+ return 0;
+}
+
static void show_datestring(const char *flag, const char *datestr)
{
static char buffer[100];
next = "HEAD";
if (dotdot == arg)
this = "HEAD";
- if (!get_sha1(this, sha1) && !get_sha1(next, end)) {
+ if (!get_sha1_committish(this, sha1) && !get_sha1_committish(next, end)) {
show_rev(NORMAL, end, next);
show_rev(symmetric ? NORMAL : REVERSED, sha1, this);
if (symmetric) {
return 0;
*dotdot = 0;
- if (get_sha1(arg, sha1))
+ if (get_sha1_committish(arg, sha1))
return 0;
if (!parents_only)
for_each_ref(show_reference, NULL);
continue;
}
+ if (!prefixcmp(arg, "--disambiguate=")) {
+ for_each_abbrev(arg + 15, show_abbrev, NULL);
+ continue;
+ }
if (!strcmp(arg, "--bisect")) {
for_each_ref_in("refs/bisect/bad", show_reference, NULL);
for_each_ref_in("refs/bisect/good", anti_reference, NULL);
extern int fsync_object_files;
extern int core_preload_index;
extern int core_apply_sparse_checkout;
+extern int precomposed_unicode;
enum branch_track {
BRANCH_TRACK_UNSPECIFIED = -1,
unsigned mode;
};
+#define GET_SHA1_QUIETLY 01
+#define GET_SHA1_COMMIT 02
+#define GET_SHA1_COMMITTISH 04
+#define GET_SHA1_TREE 010
+#define GET_SHA1_TREEISH 020
+#define GET_SHA1_BLOB 040
+#define GET_SHA1_ONLY_TO_DIE 04000
+
extern int get_sha1(const char *str, unsigned char *sha1);
-extern int get_sha1_with_mode_1(const char *str, unsigned char *sha1, unsigned *mode, int only_to_die, const char *prefix);
-static inline int get_sha1_with_mode(const char *str, unsigned char *sha1, unsigned *mode)
-{
- return get_sha1_with_mode_1(str, sha1, mode, 0, NULL);
-}
-extern int get_sha1_with_context_1(const char *name, unsigned char *sha1, struct object_context *orc, int only_to_die, const char *prefix);
-static inline int get_sha1_with_context(const char *str, unsigned char *sha1, struct object_context *orc)
-{
- return get_sha1_with_context_1(str, sha1, orc, 0, NULL);
-}
+extern int get_sha1_commit(const char *str, unsigned char *sha1);
+extern int get_sha1_committish(const char *str, unsigned char *sha1);
+extern int get_sha1_tree(const char *str, unsigned char *sha1);
+extern int get_sha1_treeish(const char *str, unsigned char *sha1);
+extern int get_sha1_blob(const char *str, unsigned char *sha1);
+extern void maybe_die_on_misspelt_object_name(const char *name, const char *prefix);
+extern int get_sha1_with_context(const char *str, unsigned flags, unsigned char *sha1, struct object_context *orc);
+
+typedef int each_abbrev_fn(const unsigned char *sha1, void *);
+extern int for_each_abbrev(const char *prefix, each_abbrev_fn, void *);
/*
* Try to read a SHA1 in hexadecimal format from the 40 characters
unsigned char sha1[20];
struct commit *commit;
- if (get_sha1(name, sha1))
+ if (get_sha1_committish(name, sha1))
return NULL;
commit = lookup_commit_reference(sha1);
if (!commit || parse_commit(commit))
--- /dev/null
+/*
+ * Converts filenames from decomposed unicode into precomposed unicode.
+ * Used on MacOS X.
+*/
+
+
+#define PRECOMPOSE_UNICODE_C
+
+#include "cache.h"
+#include "utf8.h"
+#include "precompose_utf8.h"
+
+typedef char *iconv_ibp;
+const static char *repo_encoding = "UTF-8";
+const static char *path_encoding = "UTF-8-MAC";
+
+
+static size_t has_utf8(const char *s, size_t maxlen, size_t *strlen_c)
+{
+ const uint8_t *utf8p = (const uint8_t*) s;
+ size_t strlen_chars = 0;
+ size_t ret = 0;
+
+ if ((!utf8p) || (!*utf8p)) {
+ return 0;
+ }
+
+ while((*utf8p) && maxlen) {
+ if (*utf8p & 0x80)
+ ret++;
+ strlen_chars++;
+ utf8p++;
+ maxlen--;
+ }
+ if (strlen_c)
+ *strlen_c = strlen_chars;
+
+ return ret;
+}
+
+
+void probe_utf8_pathname_composition(char *path, int len)
+{
+ const static char *auml_nfc = "\xc3\xa4";
+ const static char *auml_nfd = "\x61\xcc\x88";
+ int output_fd;
+ if (precomposed_unicode != -1)
+ return; /* We found it defined in the global config, respect it */
+ path[len] = 0;
+ strcpy(path + len, auml_nfc);
+ output_fd = open(path, O_CREAT|O_EXCL|O_RDWR, 0600);
+ if (output_fd >=0) {
+ close(output_fd);
+ path[len] = 0;
+ strcpy(path + len, auml_nfd);
+ /* Indicate to the user, that we can configure it to true */
+ if (0 == access(path, R_OK))
+ git_config_set("core.precomposeunicode", "false");
+ /* To be backward compatible, set precomposed_unicode to 0 */
+ precomposed_unicode = 0;
+ path[len] = 0;
+ strcpy(path + len, auml_nfc);
+ unlink(path);
+ }
+}
+
+
+void precompose_argv(int argc, const char **argv)
+{
+ int i = 0;
+ const char *oldarg;
+ char *newarg;
+ iconv_t ic_precompose;
+
+ if (precomposed_unicode != 1)
+ return;
+
+ ic_precompose = iconv_open(repo_encoding, path_encoding);
+ if (ic_precompose == (iconv_t) -1)
+ return;
+
+ while (i < argc) {
+ size_t namelen;
+ oldarg = argv[i];
+ if (has_utf8(oldarg, (size_t)-1, &namelen)) {
+ newarg = reencode_string_iconv(oldarg, namelen, ic_precompose);
+ if (newarg)
+ argv[i] = newarg;
+ }
+ i++;
+ }
+ iconv_close(ic_precompose);
+}
+
+
+PREC_DIR *precompose_utf8_opendir(const char *dirname)
+{
+ PREC_DIR *prec_dir = xmalloc(sizeof(PREC_DIR));
+ prec_dir->dirent_nfc = xmalloc(sizeof(dirent_prec_psx));
+ prec_dir->dirent_nfc->max_name_len = sizeof(prec_dir->dirent_nfc->d_name);
+
+ prec_dir->dirp = opendir(dirname);
+ if (!prec_dir->dirp) {
+ free(prec_dir->dirent_nfc);
+ free(prec_dir);
+ return NULL;
+ } else {
+ int ret_errno = errno;
+ prec_dir->ic_precompose = iconv_open(repo_encoding, path_encoding);
+ /* if iconv_open() fails, die() in readdir() if needed */
+ errno = ret_errno;
+ }
+
+ return prec_dir;
+}
+
+struct dirent_prec_psx *precompose_utf8_readdir(PREC_DIR *prec_dir)
+{
+ struct dirent *res;
+ res = readdir(prec_dir->dirp);
+ if (res) {
+ size_t namelenz = strlen(res->d_name) + 1; /* \0 */
+ size_t new_maxlen = namelenz;
+
+ int ret_errno = errno;
+
+ if (new_maxlen > prec_dir->dirent_nfc->max_name_len) {
+ size_t new_len = sizeof(dirent_prec_psx) + new_maxlen -
+ sizeof(prec_dir->dirent_nfc->d_name);
+
+ prec_dir->dirent_nfc = xrealloc(prec_dir->dirent_nfc, new_len);
+ prec_dir->dirent_nfc->max_name_len = new_maxlen;
+ }
+
+ prec_dir->dirent_nfc->d_ino = res->d_ino;
+ prec_dir->dirent_nfc->d_type = res->d_type;
+
+ if ((precomposed_unicode == 1) && has_utf8(res->d_name, (size_t)-1, NULL)) {
+ if (prec_dir->ic_precompose == (iconv_t)-1) {
+ die("iconv_open(%s,%s) failed, but needed:\n"
+ " precomposed unicode is not supported.\n"
+ " If you wnat to use decomposed unicode, run\n"
+ " \"git config core.precomposeunicode false\"\n",
+ repo_encoding, path_encoding);
+ } else {
+ iconv_ibp cp = (iconv_ibp)res->d_name;
+ size_t inleft = namelenz;
+ char *outpos = &prec_dir->dirent_nfc->d_name[0];
+ size_t outsz = prec_dir->dirent_nfc->max_name_len;
+ size_t cnt;
+ errno = 0;
+ cnt = iconv(prec_dir->ic_precompose, &cp, &inleft, &outpos, &outsz);
+ if (errno || inleft) {
+ /*
+ * iconv() failed and errno could be E2BIG, EILSEQ, EINVAL, EBADF
+ * MacOS X avoids illegal byte sequemces.
+ * If they occur on a mounted drive (e.g. NFS) it is not worth to
+ * die() for that, but rather let the user see the original name
+ */
+ namelenz = 0; /* trigger strlcpy */
+ }
+ }
+ }
+ else
+ namelenz = 0;
+
+ if (!namelenz)
+ strlcpy(prec_dir->dirent_nfc->d_name, res->d_name,
+ prec_dir->dirent_nfc->max_name_len);
+
+ errno = ret_errno;
+ return prec_dir->dirent_nfc;
+ }
+ return NULL;
+}
+
+
+int precompose_utf8_closedir(PREC_DIR *prec_dir)
+{
+ int ret_value;
+ int ret_errno;
+ ret_value = closedir(prec_dir->dirp);
+ ret_errno = errno;
+ if (prec_dir->ic_precompose != (iconv_t)-1)
+ iconv_close(prec_dir->ic_precompose);
+ free(prec_dir->dirent_nfc);
+ free(prec_dir);
+ errno = ret_errno;
+ return ret_value;
+}
--- /dev/null
+#ifndef PRECOMPOSE_UNICODE_H
+#include <sys/stat.h>
+#include <sys/types.h>
+#include <dirent.h>
+#include <iconv.h>
+
+
+typedef struct dirent_prec_psx {
+ ino_t d_ino; /* Posix */
+ size_t max_name_len; /* See below */
+ unsigned char d_type; /* available on all systems git runs on */
+
+ /*
+ * See http://pubs.opengroup.org/onlinepubs/9699919799/basedefs/dirent.h.html
+ * NAME_MAX + 1 should be enough, but some systems have
+ * NAME_MAX=255 and strlen(d_name) may return 508 or 510
+ * Solution: allocate more when needed, see precompose_utf8_readdir()
+ */
+ char d_name[NAME_MAX+1];
+} dirent_prec_psx;
+
+
+typedef struct {
+ iconv_t ic_precompose;
+ DIR *dirp;
+ struct dirent_prec_psx *dirent_nfc;
+} PREC_DIR;
+
+void precompose_argv(int argc, const char **argv);
+void probe_utf8_pathname_composition(char *, int);
+
+PREC_DIR *precompose_utf8_opendir(const char *dirname);
+struct dirent_prec_psx *precompose_utf8_readdir(PREC_DIR *dirp);
+int precompose_utf8_closedir(PREC_DIR *dirp);
+
+#ifndef PRECOMPOSE_UNICODE_C
+#define dirent dirent_prec_psx
+#define opendir(n) precompose_utf8_opendir(n)
+#define readdir(d) precompose_utf8_readdir(d)
+#define closedir(d) precompose_utf8_closedir(d)
+#define DIR PREC_DIR
+#endif /* PRECOMPOSE_UNICODE_C */
+
+#define PRECOMPOSE_UNICODE_H
+#endif /* PRECOMPOSE_UNICODE_H */
return 0;
}
+ if (!strcmp(var, "core.precomposeunicode")) {
+ precomposed_unicode = git_config_bool(var, value);
+ return 0;
+ }
+
/* Add other config variables here and to Documentation/config.txt. */
return 0;
}
found += 1;
}
- if (!access(xdg_config, R_OK)) {
+ if (xdg_config && !access(xdg_config, R_OK)) {
ret += git_config_from_file(fn, xdg_config, data);
found += 1;
}
- if (!access(user_config, R_OK)) {
+ if (user_config && !access(user_config, R_OK)) {
ret += git_config_from_file(fn, user_config, data);
found += 1;
}
--- /dev/null
+#
+# Copyright (C) 2012
+# Charles Roussel <charles.roussel@ensimag.imag.fr>
+# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
+# Julien Khayat <julien.khayat@ensimag.imag.fr>
+# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
+# Simon Perrat <simon.perrat@ensimag.imag.fr>
+#
+## Build git-remote-mediawiki
+
+-include ../../config.mak.autogen
+-include ../../config.mak
+
+ifndef PERL_PATH
+ PERL_PATH = /usr/bin/perl
+endif
+ifndef gitexecdir
+ gitexecdir = $(shell git --exec-path)
+endif
+
+PERL_PATH_SQ = $(subst ','\'',$(PERL_PATH))
+gitexecdir_SQ = $(subst ','\'',$(gitexecdir))
+SCRIPT = git-remote-mediawiki
+
+.PHONY: install help doc test clean
+
+help:
+ @echo 'This is the help target of the Makefile. Current configuration:'
+ @echo ' gitexecdir = $(gitexecdir_SQ)'
+ @echo ' PERL_PATH = $(PERL_PATH_SQ)'
+ @echo 'Run "$(MAKE) install" to install $(SCRIPT) in gitexecdir'
+ @echo 'Run "$(MAKE) test" to run the testsuite'
+
+install:
+ sed -e '1s|#!.*/perl|#!$(PERL_PATH_SQ)|' $(SCRIPT) \
+ > '$(gitexecdir_SQ)/$(SCRIPT)'
+ chmod +x '$(gitexecdir)/$(SCRIPT)'
+
+doc:
+ @echo 'Sorry, "make doc" is not implemented yet for $(SCRIPT)'
+
+test:
+ $(MAKE) -C t/ test
+
+clean:
+ $(RM) '$(gitexecdir)/$(SCRIPT)'
+ $(MAKE) -C t/ clean
#
# Known limitations:
#
-# - Only wiki pages are managed, no support for [[File:...]]
-# attachments.
-#
-# - Poor performance in the best case: it takes forever to check
-# whether we're up-to-date (on fetch or push) or to fetch a few
-# revisions from a large wiki, because we use exclusively a
-# page-based synchronization. We could switch to a wiki-wide
-# synchronization when the synchronization involves few revisions
-# but the wiki is large.
+# - Several strategies are provided to fetch modifications from the
+# wiki, but no automatic heuristics is provided, the user has
+# to understand and chose which strategy is appropriate for him.
#
# - Git renames could be turned into MediaWiki renames (see TODO
# below)
use strict;
use MediaWiki::API;
use DateTime::Format::ISO8601;
-use encoding 'utf8';
-# use encoding 'utf8' doesn't change STDERROR
-# but we're going to output UTF-8 filenames to STDERR
+# By default, use UTF-8 to communicate with Git and the user
binmode STDERR, ":utf8";
+binmode STDOUT, ":utf8";
use URI::Escape;
use IPC::Open2;
my @tracked_categories = split(/[ \n]/, run_git("config --get-all remote.". $remotename .".categories"));
chomp(@tracked_categories);
+# Import media files too.
+my $import_media = run_git("config --get --bool remote.". $remotename .".mediaimport");
+chomp($import_media);
+$import_media = ($import_media eq "true");
+
my $wiki_login = run_git("config --get remote.". $remotename .".mwLogin");
# Note: mwPassword is discourraged. Use the credential system instead.
my $wiki_passwd = run_git("config --get remote.". $remotename .".mwPassword");
chomp($shallow_import);
$shallow_import = ($shallow_import eq "true");
+# Fetch (clone and pull) by revisions instead of by pages. This behavior
+# is more efficient when we have a wiki with lots of pages and we fetch
+# the revisions quite often so that they concern only few pages.
+# Possible values:
+# - by_rev: perform one query per new revision on the remote wiki
+# - by_page: query each tracked page for new revision
+my $fetch_strategy = run_git("config --get remote.$remotename.fetchStrategy");
+unless ($fetch_strategy) {
+ $fetch_strategy = run_git("config --get mediawiki.fetchStrategy");
+}
+chomp($fetch_strategy);
+unless ($fetch_strategy) {
+ $fetch_strategy = "by_page";
+}
+
# Dumb push: don't update notes and mediawiki ref to reflect the last push.
#
# Configurable with mediawiki.dumbPush, or per-remote with
}
}
+## Functions for listing pages on the remote wiki
+sub get_mw_tracked_pages {
+ my $pages = shift;
+ get_mw_page_list(\@tracked_pages, $pages);
+}
+
+sub get_mw_page_list {
+ my $page_list = shift;
+ my $pages = shift;
+ my @some_pages = @$page_list;
+ while (@some_pages) {
+ my $last = 50;
+ if ($#some_pages < $last) {
+ $last = $#some_pages;
+ }
+ my @slice = @some_pages[0..$last];
+ get_mw_first_pages(\@slice, $pages);
+ @some_pages = @some_pages[51..$#some_pages];
+ }
+}
+
+sub get_mw_tracked_categories {
+ my $pages = shift;
+ foreach my $category (@tracked_categories) {
+ if (index($category, ':') < 0) {
+ # Mediawiki requires the Category
+ # prefix, but let's not force the user
+ # to specify it.
+ $category = "Category:" . $category;
+ }
+ my $mw_pages = $mediawiki->list( {
+ action => 'query',
+ list => 'categorymembers',
+ cmtitle => $category,
+ cmlimit => 'max' } )
+ || die $mediawiki->{error}->{code} . ': '
+ . $mediawiki->{error}->{details};
+ foreach my $page (@{$mw_pages}) {
+ $pages->{$page->{title}} = $page;
+ }
+ }
+}
+
+sub get_mw_all_pages {
+ my $pages = shift;
+ # No user-provided list, get the list of pages from the API.
+ my $mw_pages = $mediawiki->list({
+ action => 'query',
+ list => 'allpages',
+ aplimit => 'max'
+ });
+ if (!defined($mw_pages)) {
+ print STDERR "fatal: could not get the list of wiki pages.\n";
+ print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
+ print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
+ exit 1;
+ }
+ foreach my $page (@{$mw_pages}) {
+ $pages->{$page->{title}} = $page;
+ }
+}
+
+# queries the wiki for a set of pages. Meant to be used within a loop
+# querying the wiki for slices of page list.
sub get_mw_first_pages {
my $some_pages = shift;
my @some_pages = @{$some_pages};
}
}
+# Get the list of pages to be fetched according to configuration.
sub get_mw_pages {
mw_connect_maybe();
$user_defined = 1;
# The user provided a list of pages titles, but we
# still need to query the API to get the page IDs.
-
- my @some_pages = @tracked_pages;
- while (@some_pages) {
- my $last = 50;
- if ($#some_pages < $last) {
- $last = $#some_pages;
- }
- my @slice = @some_pages[0..$last];
- get_mw_first_pages(\@slice, \%pages);
- @some_pages = @some_pages[51..$#some_pages];
- }
+ get_mw_tracked_pages(\%pages);
}
if (@tracked_categories) {
$user_defined = 1;
- foreach my $category (@tracked_categories) {
- if (index($category, ':') < 0) {
- # Mediawiki requires the Category
- # prefix, but let's not force the user
- # to specify it.
- $category = "Category:" . $category;
- }
- my $mw_pages = $mediawiki->list( {
- action => 'query',
- list => 'categorymembers',
- cmtitle => $category,
- cmlimit => 'max' } )
- || die $mediawiki->{error}->{code} . ': ' . $mediawiki->{error}->{details};
- foreach my $page (@{$mw_pages}) {
- $pages{$page->{title}} = $page;
- }
- }
+ get_mw_tracked_categories(\%pages);
}
if (!$user_defined) {
- # No user-provided list, get the list of pages from
- # the API.
- my $mw_pages = $mediawiki->list({
- action => 'query',
- list => 'allpages',
- aplimit => 500,
- });
- if (!defined($mw_pages)) {
- print STDERR "fatal: could not get the list of wiki pages.\n";
- print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
- print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
- exit 1;
- }
- foreach my $page (@{$mw_pages}) {
- $pages{$page->{title}} = $page;
+ get_mw_all_pages(\%pages);
+ }
+ if ($import_media) {
+ print STDERR "Getting media files for selected pages...\n";
+ if ($user_defined) {
+ get_linked_mediafiles(\%pages);
+ } else {
+ get_all_mediafiles(\%pages);
}
}
- return values(%pages);
+ return %pages;
}
+# usage: $out = run_git("command args");
+# $out = run_git("command args", "raw"); # don't interpret output as UTF-8.
sub run_git {
- open(my $git, "-|:encoding(UTF-8)", "git " . $_[0]);
+ my $args = shift;
+ my $encoding = (shift || "encoding(UTF-8)");
+ open(my $git, "-|:$encoding", "git " . $args);
my $res = do { local $/; <$git> };
close($git);
}
+sub get_all_mediafiles {
+ my $pages = shift;
+ # Attach list of all pages for media files from the API,
+ # they are in a different namespace, only one namespace
+ # can be queried at the same moment
+ my $mw_pages = $mediawiki->list({
+ action => 'query',
+ list => 'allpages',
+ apnamespace => get_mw_namespace_id("File"),
+ aplimit => 'max'
+ });
+ if (!defined($mw_pages)) {
+ print STDERR "fatal: could not get the list of pages for media files.\n";
+ print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
+ print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
+ exit 1;
+ }
+ foreach my $page (@{$mw_pages}) {
+ $pages->{$page->{title}} = $page;
+ }
+}
+
+sub get_linked_mediafiles {
+ my $pages = shift;
+ my @titles = map $_->{title}, values(%{$pages});
+
+ # The query is split in small batches because of the MW API limit of
+ # the number of links to be returned (500 links max).
+ my $batch = 10;
+ while (@titles) {
+ if ($#titles < $batch) {
+ $batch = $#titles;
+ }
+ my @slice = @titles[0..$batch];
+
+ # pattern 'page1|page2|...' required by the API
+ my $mw_titles = join('|', @slice);
+
+ # Media files could be included or linked from
+ # a page, get all related
+ my $query = {
+ action => 'query',
+ prop => 'links|images',
+ titles => $mw_titles,
+ plnamespace => get_mw_namespace_id("File"),
+ pllimit => 'max'
+ };
+ my $result = $mediawiki->api($query);
+
+ while (my ($id, $page) = each(%{$result->{query}->{pages}})) {
+ my @media_titles;
+ if (defined($page->{links})) {
+ my @link_titles = map $_->{title}, @{$page->{links}};
+ push(@media_titles, @link_titles);
+ }
+ if (defined($page->{images})) {
+ my @image_titles = map $_->{title}, @{$page->{images}};
+ push(@media_titles, @image_titles);
+ }
+ if (@media_titles) {
+ get_mw_page_list(\@media_titles, $pages);
+ }
+ }
+
+ @titles = @titles[($batch+1)..$#titles];
+ }
+}
+
+sub get_mw_mediafile_for_page_revision {
+ # Name of the file on Wiki, with the prefix.
+ my $filename = shift;
+ my $timestamp = shift;
+ my %mediafile;
+
+ # Search if on a media file with given timestamp exists on
+ # MediaWiki. In that case download the file.
+ my $query = {
+ action => 'query',
+ prop => 'imageinfo',
+ titles => "File:" . $filename,
+ iistart => $timestamp,
+ iiend => $timestamp,
+ iiprop => 'timestamp|archivename|url',
+ iilimit => 1
+ };
+ my $result = $mediawiki->api($query);
+
+ my ($fileid, $file) = each( %{$result->{query}->{pages}} );
+ # If not defined it means there is no revision of the file for
+ # given timestamp.
+ if (defined($file->{imageinfo})) {
+ $mediafile{title} = $filename;
+
+ my $fileinfo = pop(@{$file->{imageinfo}});
+ $mediafile{timestamp} = $fileinfo->{timestamp};
+ # Mediawiki::API's download function doesn't support https URLs
+ # and can't download old versions of files.
+ print STDERR "\tDownloading file $mediafile{title}, version $mediafile{timestamp}\n";
+ $mediafile{content} = download_mw_mediafile($fileinfo->{url});
+ }
+ return %mediafile;
+}
+
+sub download_mw_mediafile {
+ my $url = shift;
+
+ my $response = $mediawiki->{ua}->get($url);
+ if ($response->code == 200) {
+ return $response->decoded_content;
+ } else {
+ print STDERR "Error downloading mediafile from :\n";
+ print STDERR "URL: $url\n";
+ print STDERR "Server response: " . $response->code . " " . $response->message . "\n";
+ exit 1;
+ }
+}
+
sub get_last_local_revision {
# Get note regarding last mediawiki revision
my $note = run_git("notes --ref=$remotename/mediawiki show refs/mediawiki/$remotename/master 2>/dev/null");
# Remember the timestamp corresponding to a revision id.
my %basetimestamps;
+# Get the last remote revision without taking in account which pages are
+# tracked or not. This function makes a single request to the wiki thus
+# avoid a loop onto all tracked pages. This is useful for the fetch-by-rev
+# option.
+sub get_last_global_remote_rev {
+ mw_connect_maybe();
+
+ my $query = {
+ action => 'query',
+ list => 'recentchanges',
+ prop => 'revisions',
+ rclimit => '1',
+ rcdir => 'older',
+ };
+ my $result = $mediawiki->api($query);
+ return $result->{query}->{recentchanges}[0]->{revid};
+}
+
+# Get the last remote revision concerning the tracked pages and the tracked
+# categories.
sub get_last_remote_revision {
mw_connect_maybe();
- my @pages = get_mw_pages();
+ my %pages_hash = get_mw_pages();
+ my @pages = values(%pages_hash);
my $max_rev_num = 0;
print STDOUT "data ", bytes::length($content), "\n", $content;
}
+sub literal_data_raw {
+ # Output possibly binary content.
+ my ($content) = @_;
+ # Avoid confusion between size in bytes and in characters
+ utf8::downgrade($content);
+ binmode STDOUT, ":raw";
+ print STDOUT "data ", bytes::length($content), "\n", $content;
+ binmode STDOUT, ":utf8";
+}
+
sub mw_capabilities {
# Revisions are imported to the private namespace
# refs/mediawiki/$remotename/ by the helper and fetched into
my %commit = %{$commit};
my $full_import = shift;
my $n = shift;
+ my $mediafile = shift;
+ my %mediafile;
+ if ($mediafile) {
+ %mediafile = %{$mediafile};
+ }
my $title = $commit{title};
my $comment = $commit{comment};
if ($content ne DELETED_CONTENT) {
print STDOUT "M 644 inline $title.mw\n";
literal_data($content);
+ if (%mediafile) {
+ print STDOUT "M 644 inline $mediafile{title}\n";
+ literal_data_raw($mediafile{content});
+ }
print STDOUT "\n\n";
} else {
print STDOUT "D $title.mw\n";
mw_connect_maybe();
- my @pages = get_mw_pages();
-
print STDERR "Searching revisions...\n";
my $last_local = get_last_local_revision();
my $fetch_from = $last_local + 1;
} else {
print STDERR ", fetching from here.\n";
}
+
+ my $n = 0;
+ if ($fetch_strategy eq "by_rev") {
+ print STDERR "Fetching & writing export data by revs...\n";
+ $n = mw_import_ref_by_revs($fetch_from);
+ } elsif ($fetch_strategy eq "by_page") {
+ print STDERR "Fetching & writing export data by pages...\n";
+ $n = mw_import_ref_by_pages($fetch_from);
+ } else {
+ print STDERR "fatal: invalid fetch strategy \"$fetch_strategy\".\n";
+ print STDERR "Check your configuration variables remote.$remotename.fetchStrategy and mediawiki.fetchStrategy\n";
+ exit 1;
+ }
+
+ if ($fetch_from == 1 && $n == 0) {
+ print STDERR "You appear to have cloned an empty MediaWiki.\n";
+ # Something has to be done remote-helper side. If nothing is done, an error is
+ # thrown saying that HEAD is refering to unknown object 0000000000000000000
+ # and the clone fails.
+ }
+}
+
+sub mw_import_ref_by_pages {
+
+ my $fetch_from = shift;
+ my %pages_hash = get_mw_pages();
+ my @pages = values(%pages_hash);
+
my ($n, @revisions) = fetch_mw_revisions(\@pages, $fetch_from);
- # Creation of the fast-import stream
- print STDERR "Fetching & writing export data...\n";
+ @revisions = sort {$a->{revid} <=> $b->{revid}} @revisions;
+ my @revision_ids = map $_->{revid}, @revisions;
+
+ return mw_import_revids($fetch_from, \@revision_ids, \%pages_hash);
+}
+
+sub mw_import_ref_by_revs {
+
+ my $fetch_from = shift;
+ my %pages_hash = get_mw_pages();
+
+ my $last_remote = get_last_global_remote_rev();
+ my @revision_ids = $fetch_from..$last_remote;
+ return mw_import_revids($fetch_from, \@revision_ids, \%pages_hash);
+}
+
+# Import revisions given in second argument (array of integers).
+# Only pages appearing in the third argument (hash indexed by page titles)
+# will be imported.
+sub mw_import_revids {
+ my $fetch_from = shift;
+ my $revision_ids = shift;
+ my $pages = shift;
- $n = 0;
+ my $n = 0;
+ my $n_actual = 0;
my $last_timestamp = 0; # Placeholer in case $rev->timestamp is undefined
- foreach my $pagerevid (sort {$a->{revid} <=> $b->{revid}} @revisions) {
+ foreach my $pagerevid (@$revision_ids) {
# fetch the content of the pages
my $query = {
action => 'query',
prop => 'revisions',
rvprop => 'content|timestamp|comment|user|ids',
- revids => $pagerevid->{revid},
+ revids => $pagerevid,
};
my $result = $mediawiki->api($query);
- my $rev = pop(@{$result->{query}->{pages}->{$pagerevid->{pageid}}->{revisions}});
+ if (!$result) {
+ die "Failed to retrieve modified page for revision $pagerevid";
+ }
+
+ if (!defined($result->{query}->{pages})) {
+ die "Invalid revision $pagerevid.";
+ }
+
+ my @result_pages = values(%{$result->{query}->{pages}});
+ my $result_page = $result_pages[0];
+ my $rev = $result_pages[0]->{revisions}->[0];
+ # Count page even if we skip it, since we display
+ # $n/$total and $total includes skipped pages.
$n++;
+ my $page_title = $result_page->{title};
+
+ if (!exists($pages->{$page_title})) {
+ print STDERR "$n/", scalar(@$revision_ids),
+ ": Skipping revision #$rev->{revid} of $page_title\n";
+ next;
+ }
+
+ $n_actual++;
+
my %commit;
$commit{author} = $rev->{user} || 'Anonymous';
$commit{comment} = $rev->{comment} || '*Empty MediaWiki Message*';
- $commit{title} = mediawiki_smudge_filename(
- $result->{query}->{pages}->{$pagerevid->{pageid}}->{title}
- );
- $commit{mw_revision} = $pagerevid->{revid};
+ $commit{title} = mediawiki_smudge_filename($page_title);
+ $commit{mw_revision} = $rev->{revid};
$commit{content} = mediawiki_smudge($rev->{'*'});
if (!defined($rev->{timestamp})) {
}
$commit{date} = DateTime::Format::ISO8601->parse_datetime($last_timestamp);
- print STDERR "$n/", scalar(@revisions), ": Revision #$pagerevid->{revid} of $commit{title}\n";
-
- import_file_revision(\%commit, ($fetch_from == 1), $n);
+ # Differentiates classic pages and media files.
+ my ($namespace, $filename) = $page_title =~ /^([^:]*):(.*)$/;
+ my %mediafile;
+ if ($namespace && get_mw_namespace_id($namespace) == get_mw_namespace_id("File")) {
+ %mediafile = get_mw_mediafile_for_page_revision($filename, $rev->{timestamp});
+ }
+ # If this is a revision of the media page for new version
+ # of a file do one common commit for both file and media page.
+ # Else do commit only for that page.
+ print STDERR "$n/", scalar(@$revision_ids), ": Revision #$rev->{revid} of $commit{title}\n";
+ import_file_revision(\%commit, ($fetch_from == 1), $n_actual, \%mediafile);
}
- if ($fetch_from == 1 && $n == 0) {
- print STDERR "You appear to have cloned an empty MediaWiki.\n";
- # Something has to be done remote-helper side. If nothing is done, an error is
- # thrown saying that HEAD is refering to unknown object 0000000000000000000
- # and the clone fails.
- }
+ return $n_actual;
}
sub error_non_fast_forward {
return 0;
}
+sub mw_upload_file {
+ my $complete_file_name = shift;
+ my $new_sha1 = shift;
+ my $extension = shift;
+ my $file_deleted = shift;
+ my $summary = shift;
+ my $newrevid;
+ my $path = "File:" . $complete_file_name;
+ my %hashFiles = get_allowed_file_extensions();
+ if (!exists($hashFiles{$extension})) {
+ print STDERR "$complete_file_name is not a permitted file on this wiki.\n";
+ print STDERR "Check the configuration of file uploads in your mediawiki.\n";
+ return $newrevid;
+ }
+ # Deleting and uploading a file requires a priviledged user
+ if ($file_deleted) {
+ mw_connect_maybe();
+ my $query = {
+ action => 'delete',
+ title => $path,
+ reason => $summary
+ };
+ if (!$mediawiki->edit($query)) {
+ print STDERR "Failed to delete file on remote wiki\n";
+ print STDERR "Check your permissions on the remote site. Error code:\n";
+ print STDERR $mediawiki->{error}->{code} . ':' . $mediawiki->{error}->{details};
+ exit 1;
+ }
+ } else {
+ # Don't let perl try to interpret file content as UTF-8 => use "raw"
+ my $content = run_git("cat-file blob $new_sha1", "raw");
+ if ($content ne "") {
+ mw_connect_maybe();
+ $mediawiki->{config}->{upload_url} =
+ "$url/index.php/Special:Upload";
+ $mediawiki->edit({
+ action => 'upload',
+ filename => $complete_file_name,
+ comment => $summary,
+ file => [undef,
+ $complete_file_name,
+ Content => $content],
+ ignorewarnings => 1,
+ }, {
+ skip_encoding => 1
+ } ) || die $mediawiki->{error}->{code} . ':'
+ . $mediawiki->{error}->{details};
+ my $last_file_page = $mediawiki->get_page({title => $path});
+ $newrevid = $last_file_page->{revid};
+ print STDERR "Pushed file: $new_sha1 - $complete_file_name.\n";
+ } else {
+ print STDERR "Empty file $complete_file_name not pushed.\n";
+ }
+ }
+ return $newrevid;
+}
+
sub mw_push_file {
my $diff_info = shift;
# $diff_info contains a string in this format:
my $summary = shift;
# MediaWiki revision number. Keep the previous one by default,
# in case there's no edit to perform.
- my $newrevid = shift;
+ my $oldrevid = shift;
+ my $newrevid;
my $new_sha1 = $diff_info_split[3];
my $old_sha1 = $diff_info_split[2];
my $page_deleted = ($new_sha1 eq NULL_SHA1);
$complete_file_name = mediawiki_clean_filename($complete_file_name);
- if (substr($complete_file_name,-3) eq ".mw") {
- my $title = substr($complete_file_name,0,-3);
-
+ my ($title, $extension) = $complete_file_name =~ /^(.*)\.([^\.]*)$/;
+ if (!defined($extension)) {
+ $extension = "";
+ }
+ if ($extension eq "mw") {
my $file_content;
if ($page_deleted) {
# Deleting a page usually requires
action => 'edit',
summary => $summary,
title => $title,
- basetimestamp => $basetimestamps{$newrevid},
+ basetimestamp => $basetimestamps{$oldrevid},
text => mediawiki_clean($file_content, $page_created),
}, {
skip_encoding => 1 # Helps with names with accentuated characters
$mediawiki->{error}->{code} .
' from mediwiki: ' . $mediawiki->{error}->{details} .
".\n";
- return ($newrevid, "non-fast-forward");
+ return ($oldrevid, "non-fast-forward");
} else {
# Other errors. Shouldn't happen => just die()
die 'Fatal: Error ' .
$newrevid = $result->{edit}->{newrevid};
print STDERR "Pushed file: $new_sha1 - $title\n";
} else {
- print STDERR "$complete_file_name not a mediawiki file (Not pushable on this version of git-remote-mediawiki).\n"
+ $newrevid = mw_upload_file($complete_file_name, $new_sha1,
+ $extension, $page_deleted,
+ $summary);
}
+ $newrevid = ($newrevid or $oldrevid);
return ($newrevid, "ok");
}
# TODO: we could detect rename, and encode them with a #redirect on the wiki.
# TODO: for now, it's just a delete+add
my @diff_info_list = split(/\0/, $diff_infos);
- # Keep the first line of the commit message as mediawiki comment for the revision
- my $commit_msg = (split(/\n/, run_git("show --pretty=format:\"%s\" $sha1_commit")))[0];
+ # Keep the subject line of the commit message as mediawiki comment for the revision
+ my $commit_msg = run_git("log --no-walk --format=\"%s\" $sha1_commit");
chomp($commit_msg);
# Push every blob
while (@diff_info_list) {
print STDOUT "ok $remote\n";
return 1;
}
+
+sub get_allowed_file_extensions {
+ mw_connect_maybe();
+
+ my $query = {
+ action => 'query',
+ meta => 'siteinfo',
+ siprop => 'fileextensions'
+ };
+ my $result = $mediawiki->api($query);
+ my @file_extensions= map $_->{ext},@{$result->{query}->{fileextensions}};
+ my %hashFile = map {$_ => 1}@file_extensions;
+
+ return %hashFile;
+}
+
+# In memory cache for MediaWiki namespace ids.
+my %namespace_id;
+
+# Namespaces whose id is cached in the configuration file
+# (to avoid duplicates)
+my %cached_mw_namespace_id;
+
+# Return MediaWiki id for a canonical namespace name.
+# Ex.: "File", "Project".
+sub get_mw_namespace_id {
+ mw_connect_maybe();
+ my $name = shift;
+
+ if (!exists $namespace_id{$name}) {
+ # Look at configuration file, if the record for that namespace is
+ # already cached. Namespaces are stored in form:
+ # "Name_of_namespace:Id_namespace", ex.: "File:6".
+ my @temp = split(/[ \n]/, run_git("config --get-all remote."
+ . $remotename .".namespaceCache"));
+ chomp(@temp);
+ foreach my $ns (@temp) {
+ my ($n, $id) = split(/:/, $ns);
+ $namespace_id{$n} = $id;
+ $cached_mw_namespace_id{$n} = 1;
+ }
+ }
+
+ if (!exists $namespace_id{$name}) {
+ print STDERR "Namespace $name not found in cache, querying the wiki ...\n";
+ # NS not found => get namespace id from MW and store it in
+ # configuration file.
+ my $query = {
+ action => 'query',
+ meta => 'siteinfo',
+ siprop => 'namespaces'
+ };
+ my $result = $mediawiki->api($query);
+
+ while (my ($id, $ns) = each(%{$result->{query}->{namespaces}})) {
+ if (defined($ns->{id}) && defined($ns->{canonical})) {
+ $namespace_id{$ns->{canonical}} = $ns->{id};
+ if ($ns->{'*'}) {
+ # alias (e.g. french Fichier: as alias for canonical File:)
+ $namespace_id{$ns->{'*'}} = $ns->{id};
+ }
+ }
+ }
+ }
+
+ my $id = $namespace_id{$name};
+
+ if (defined $id) {
+ # Store explicitely requested namespaces on disk
+ if (!exists $cached_mw_namespace_id{$name}) {
+ run_git("config --add remote.". $remotename
+ .".namespaceCache \"". $name .":". $id ."\"");
+ $cached_mw_namespace_id{$name} = 1;
+ }
+ return $id;
+ } else {
+ die "No such namespace $name on MediaWiki.";
+ }
+}
--- /dev/null
+WEB/
+wiki/
+trash directory.t*/
+test-results/
--- /dev/null
+#
+# Copyright (C) 2012
+# Charles Roussel <charles.roussel@ensimag.imag.fr>
+# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
+# Julien Khayat <julien.khayat@ensimag.imag.fr>
+# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
+# Simon Perrat <simon.perrat@ensimag.imag.fr>
+#
+## Test git-remote-mediawiki
+
+all: test
+
+-include ../../../config.mak.autogen
+-include ../../../config.mak
+
+T = $(wildcard t[0-9][0-9][0-9][0-9]-*.sh)
+
+.PHONY: help test clean all
+
+help:
+ @echo 'Run "$(MAKE) test" to launch test scripts'
+ @echo 'Run "$(MAKE) clean" to remove trash folders'
+
+test:
+ @for t in $(T); do \
+ echo "$$t"; \
+ "./$$t" || exit 1; \
+ done
+
+clean:
+ $(RM) -r 'trash directory'.*
--- /dev/null
+Tests for Mediawiki-to-Git
+==========================
+
+Introduction
+------------
+This manual describes how to install the git-remote-mediawiki test
+environment on a machine with git installed on it.
+
+Prerequisite
+------------
+
+In order to run this test environment correctly, you will need to
+install the following packages (Debian/Ubuntu names, may need to be
+adapted for another distribution):
+
+* lighttpd
+* php5
+* php5-cgi
+* php5-cli
+* php5-curl
+* php5-sqlite
+
+Principles and Technical Choices
+--------------------------------
+
+The test environment makes it easy to install and manipulate one or
+several MediaWiki instances. To allow developers to run the testsuite
+easily, the environment does not require root priviledge (except to
+install the required packages if needed). It starts a webserver
+instance on the user's account (using lighttpd greatly helps for
+that), and does not need a separate database daemon (thanks to the use
+of sqlite).
+
+Run the test environment
+------------------------
+
+Install a new wiki
+~~~~~~~~~~~~~~~~~~
+
+Once you have all the prerequisite, you need to install a MediaWiki
+instance on your machine. If you already have one, it is still
+strongly recommended to install one with the script provided. Here's
+how to work it:
+
+a. change directory to contrib/mw-to-git/t/
+b. if needed, edit test.config to choose your installation parameters
+c. run `./install-wiki.sh install`
+d. check on your favourite web browser if your wiki is correctly
+ installed.
+
+Remove an existing wiki
+~~~~~~~~~~~~~~~~~~~~~~~
+
+Edit the file test.config to fit the wiki you want to delete, and then
+execute the command `./install-wiki.sh delete` from the
+contrib/mw-to-git/t directory.
+
+Run the existing tests
+~~~~~~~~~~~~~~~~~~~~~~
+
+The provided tests are currently in the `contrib/mw-to-git/t` directory.
+The files are all the t936[0-9]-*.sh shell scripts.
+
+a. Run all tests:
+To do so, run "make test" from the contrib/mw-to-git/ directory.
+
+b. Run a specific test:
+To run a given test <test_name>, run ./<test_name> from the
+contrib/mw-to-git/t directory.
+
+How to create new tests
+-----------------------
+
+Available functions
+~~~~~~~~~~~~~~~~~~~
+
+The test environment of git-remote-mediawiki provides some functions
+useful to test its behaviour. for more details about the functions'
+parameters, please refer to the `test-gitmw-lib.sh` and
+`test-gitmw.pl` files.
+
+** `test_check_wiki_precond`:
+Check if the tests must be skipped or not. Please use this function
+at the beggining of each new test file.
+
+** `wiki_getpage`:
+Fetch a given page from the wiki and puts its content in the
+directory in parameter.
+
+** `wiki_delete_page`:
+Delete a given page from the wiki.
+
+** `wiki_edit_page`:
+Create or modify a given page in the wiki. You can specify several
+parameters like a summary for the page edition, or add the page to a
+given category.
+See test-gitmw.pl for more details.
+
+** `wiki_getallpage`:
+Fetch all pages from the wiki into a given directory. The directory
+is created if it does not exists.
+
+** `test_diff_directories`:
+Compare the content of two directories. The content must be the same.
+Use this function to compare the content of a git directory and a wiki
+one created by wiki_getallpage.
+
+** `test_contains_N_files`:
+Check if the given directory contains a given number of file.
+
+** `wiki_page_exists`:
+Tests if a given page exists on the wiki.
+
+** `wiki_reset`:
+Reset the wiki, i.e. flush the database. Use this function at the
+begining of each new test, except if the test re-uses the same wiki
+(and history) as the previous test.
+
+How to write a new test
+~~~~~~~~~~~~~~~~~~~~~~~
+
+Please, follow the standards given by git. See git/t/README.
+New file should be named as t936[0-9]-*.sh.
+Be sure to reset your wiki regulary with the function `wiki_reset`.
--- /dev/null
+#!/bin/sh
+
+# This script installs or deletes a MediaWiki on your computer.
+# It requires a web server with PHP and SQLite running. In addition, if you
+# do not have MediaWiki sources on your computer, the option 'install'
+# downloads them for you.
+# Please set the CONFIGURATION VARIABLES in ./test-gitmw-lib.sh
+
+WIKI_TEST_DIR=$(cd "$(dirname "$0")" && pwd)
+
+if test -z "$WIKI_TEST_DIR"
+then
+ WIKI_TEST_DIR=.
+fi
+
+. "$WIKI_TEST_DIR"/test-gitmw-lib.sh
+usage () {
+ echo "Usage: "
+ echo " ./install-wiki.sh <install | delete | --help>"
+ echo " install | -i : Install a wiki on your computer."
+ echo " delete | -d : Delete the wiki and all its pages and "
+ echo " content."
+}
+
+
+# Argument: install, delete, --help | -h
+case "$1" in
+ "install" | "-i")
+ wiki_install
+ exit 0
+ ;;
+ "delete" | "-d")
+ wiki_delete
+ exit 0
+ ;;
+ "--help" | "-h")
+ usage
+ exit 0
+ ;;
+ *)
+ echo "Invalid argument: $1"
+ usage
+ exit 1
+ ;;
+esac
--- /dev/null
+wikidb.sqlite
--- /dev/null
+<?php
+# This file was automatically generated by the MediaWiki 1.19.0
+# installer. If you make manual changes, please keep track in case you
+# need to recreate them later.
+#
+# See includes/DefaultSettings.php for all configurable settings
+# and their default values, but don't forget to make changes in _this_
+# file, not there.
+#
+# Further documentation for configuration settings may be found at:
+# http://www.mediawiki.org/wiki/Manual:Configuration_settings
+
+# Protect against web entry
+if ( !defined( 'MEDIAWIKI' ) ) {
+ exit;
+}
+
+## Uncomment this to disable output compression
+# $wgDisableOutputCompression = true;
+
+$wgSitename = "Git-MediaWiki-Test";
+$wgMetaNamespace = "Git-MediaWiki-Test";
+
+## The URL base path to the directory containing the wiki;
+## defaults for all runtime URL paths are based off of this.
+## For more information on customizing the URLs please see:
+## http://www.mediawiki.org/wiki/Manual:Short_URL
+$wgScriptPath = "@WG_SCRIPT_PATH@";
+$wgScriptExtension = ".php";
+
+## The protocol and server name to use in fully-qualified URLs
+$wgServer = "@WG_SERVER@";
+
+## The relative URL path to the skins directory
+$wgStylePath = "$wgScriptPath/skins";
+
+## The relative URL path to the logo. Make sure you change this from the default,
+## or else you'll overwrite your logo when you upgrade!
+$wgLogo = "$wgStylePath/common/images/wiki.png";
+
+## UPO means: this is also a user preference option
+
+$wgEnableEmail = true;
+$wgEnableUserEmail = true; # UPO
+
+$wgEmergencyContact = "apache@localhost";
+$wgPasswordSender = "apache@localhost";
+
+$wgEnotifUserTalk = false; # UPO
+$wgEnotifWatchlist = false; # UPO
+$wgEmailAuthentication = true;
+
+## Database settings
+$wgDBtype = "sqlite";
+$wgDBserver = "";
+$wgDBname = "@WG_SQLITE_DATAFILE@";
+$wgDBuser = "";
+$wgDBpassword = "";
+
+# SQLite-specific settings
+$wgSQLiteDataDir = "@WG_SQLITE_DATADIR@";
+
+
+## Shared memory settings
+$wgMainCacheType = CACHE_NONE;
+$wgMemCachedServers = array();
+
+## To enable image uploads, make sure the 'images' directory
+## is writable, then set this to true:
+$wgEnableUploads = true;
+$wgUseImageMagick = true;
+$wgImageMagickConvertCommand ="@CONVERT@";
+$wgFileExtensions[] = 'txt';
+
+# InstantCommons allows wiki to use images from http://commons.wikimedia.org
+$wgUseInstantCommons = false;
+
+## If you use ImageMagick (or any other shell command) on a
+## Linux server, this will need to be set to the name of an
+## available UTF-8 locale
+$wgShellLocale = "en_US.utf8";
+
+## If you want to use image uploads under safe mode,
+## create the directories images/archive, images/thumb and
+## images/temp, and make them all writable. Then uncomment
+## this, if it's not already uncommented:
+#$wgHashedUploadDirectory = false;
+
+## Set $wgCacheDirectory to a writable directory on the web server
+## to make your wiki go slightly faster. The directory should not
+## be publically accessible from the web.
+#$wgCacheDirectory = "$IP/cache";
+
+# Site language code, should be one of the list in ./languages/Names.php
+$wgLanguageCode = "en";
+
+$wgSecretKey = "1c912bfe3519fb70f5dc523ecc698111cd43d81a11c585b3eefb28f29c2699b7";
+#$wgSecretKey = "@SECRETKEY@";
+
+
+# Site upgrade key. Must be set to a string (default provided) to turn on the
+# web installer while LocalSettings.php is in place
+$wgUpgradeKey = "ddae7dc87cd0a645";
+
+## Default skin: you can change the default skin. Use the internal symbolic
+## names, ie 'standard', 'nostalgia', 'cologneblue', 'monobook', 'vector':
+$wgDefaultSkin = "vector";
+
+## For attaching licensing metadata to pages, and displaying an
+## appropriate copyright notice / icon. GNU Free Documentation
+## License and Creative Commons licenses are supported so far.
+$wgRightsPage = ""; # Set to the title of a wiki page that describes your license/copyright
+$wgRightsUrl = "";
+$wgRightsText = "";
+$wgRightsIcon = "";
+
+# Path to the GNU diff3 utility. Used for conflict resolution.
+$wgDiff3 = "/usr/bin/diff3";
+
+# Query string length limit for ResourceLoader. You should only set this if
+# your web server has a query string length limit (then set it to that limit),
+# or if you have suhosin.get.max_value_length set in php.ini (then set it to
+# that value)
+$wgResourceLoaderMaxQueryLength = -1;
+
+
+
+# End of automatically generated settings.
+# Add more configuration options below.
--- /dev/null
+<?php
+/**
+ * This script generates a SQLite database for a MediaWiki version 1.19.0
+ * You must specify the login of the admin (argument 1) and its
+ * password (argument 2) and the folder where the database file
+ * is located (absolute path in argument 3).
+ * It is used by the script install-wiki.sh in order to make easy the
+ * installation of a MediaWiki.
+ *
+ * In order to generate a SQLite database file, MediaWiki ask the user
+ * to submit some forms in its web browser. This script simulates this
+ * behavior though the functions <get> and <submit>
+ *
+ */
+$argc = $_SERVER['argc'];
+$argv = $_SERVER['argv'];
+
+$login = $argv[2];
+$pass = $argv[3];
+$tmp = $argv[4];
+$port = $argv[5];
+
+$url = 'http://localhost:'.$port.'/wiki/mw-config/index.php';
+$db_dir = urlencode($tmp);
+$tmp_cookie = tempnam($tmp, "COOKIE_");
+/*
+ * Fetchs a page with cURL.
+ */
+function get($page_name = "") {
+ $curl = curl_init();
+ $page_name_add = "";
+ if ($page_name != "") {
+ $page_name_add = '?page='.$page_name;
+ }
+ $url = $GLOBALS['url'].$page_name_add;
+ $tmp_cookie = $GLOBALS['tmp_cookie'];
+ curl_setopt($curl, CURLOPT_COOKIEJAR, $tmp_cookie);
+ curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
+ curl_setopt($curl, CURLOPT_FOLLOWLOCATION, true);
+ curl_setopt($curl, CURLOPT_COOKIEFILE, $tmp_cookie);
+ curl_setopt($curl, CURLOPT_HEADER, true);
+ curl_setopt($curl, CURLOPT_URL, $url);
+
+ $page = curl_exec($curl);
+ if (!$page) {
+ die("Could not get page: $url\n");
+ }
+ curl_close($curl);
+ return $page;
+}
+
+/*
+ * Submits a form with cURL.
+ */
+function submit($page_name, $option = "") {
+ $curl = curl_init();
+ $datapost = 'submit-continue=Continue+%E2%86%92';
+ if ($option != "") {
+ $datapost = $option.'&'.$datapost;
+ }
+ $url = $GLOBALS['url'].'?page='.$page_name;
+ $tmp_cookie = $GLOBALS['tmp_cookie'];
+ curl_setopt($curl, CURLOPT_URL, $url);
+ curl_setopt($curl, CURLOPT_POST, true);
+ curl_setopt($curl, CURLOPT_FOLLOWLOCATION, true);
+ curl_setopt($curl, CURLOPT_POSTFIELDS, $datapost);
+ curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
+ curl_setopt($curl, CURLOPT_COOKIEJAR, $tmp_cookie);
+ curl_setopt($curl, CURLOPT_COOKIEFILE, $tmp_cookie);
+
+ $page = curl_exec($curl);
+ if (!$page) {
+ die("Could not get page: $url\n");
+ }
+ curl_close($curl);
+ return "$page";
+}
+
+/*
+ * Here starts this script: simulates the behavior of the user
+ * submitting forms to generates the database file.
+ * Note this simulation was made for the MediaWiki version 1.19.0,
+ * we can't assume it works with other versions.
+ *
+ */
+
+$page = get();
+if (!preg_match('/input type="hidden" value="([0-9]+)" name="LanguageRequestTime"/',
+ $page, $matches)) {
+ echo "Unexpected content for page downloaded:\n";
+ echo "$page";
+ die;
+};
+$timestamp = $matches[1];
+$language = "LanguageRequestTime=$timestamp&uselang=en&ContLang=en";
+$page = submit('Language', $language);
+
+submit('Welcome');
+
+$db_config = 'DBType=sqlite';
+$db_config = $db_config.'&sqlite_wgSQLiteDataDir='.$db_dir;
+$db_config = $db_config.'&sqlite_wgDBname='.$argv[1];
+submit('DBConnect', $db_config);
+
+$wiki_config = 'config_wgSitename=TEST';
+$wiki_config = $wiki_config.'&config__NamespaceType=site-name';
+$wiki_config = $wiki_config.'&config_wgMetaNamespace=MyWiki';
+$wiki_config = $wiki_config.'&config__AdminName='.$login;
+
+$wiki_config = $wiki_config.'&config__AdminPassword='.$pass;
+$wiki_config = $wiki_config.'&config__AdminPassword2='.$pass;
+
+$wiki_config = $wiki_config.'&wiki__configEmail=email%40email.org';
+$wiki_config = $wiki_config.'&config__SkipOptional=skip';
+submit('Name', $wiki_config);
+submit('Install');
+submit('Install');
+
+unlink($tmp_cookie);
+?>
--- /dev/null
+test_push_pull () {
+
+ test_expect_success 'Git pull works after adding a new wiki page' '
+ wiki_reset &&
+
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_1 &&
+ wiki_editpage Foo "page created after the git clone" false &&
+
+ (
+ cd mw_dir_1 &&
+ git pull
+ ) &&
+
+ wiki_getallpage ref_page_1 &&
+ test_diff_directories mw_dir_1 ref_page_1
+ '
+
+ test_expect_success 'Git pull works after editing a wiki page' '
+ wiki_reset &&
+
+ wiki_editpage Foo "page created before the git clone" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_2 &&
+ wiki_editpage Foo "new line added on the wiki" true &&
+
+ (
+ cd mw_dir_2 &&
+ git pull
+ ) &&
+
+ wiki_getallpage ref_page_2 &&
+ test_diff_directories mw_dir_2 ref_page_2
+ '
+
+ test_expect_success 'git pull works on conflict handled by auto-merge' '
+ wiki_reset &&
+
+ wiki_editpage Foo "1 init
+3
+5
+ " false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_3 &&
+
+ wiki_editpage Foo "1 init
+2 content added on wiki after clone
+3
+5
+ " false &&
+
+ (
+ cd mw_dir_3 &&
+ echo "1 init
+3
+4 content added on git after clone
+5
+" >Foo.mw &&
+ git commit -am "conflicting change on foo" &&
+ git pull &&
+ git push
+ )
+ '
+
+ test_expect_success 'Git push works after adding a file .mw' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_4 &&
+ wiki_getallpage ref_page_4 &&
+ (
+ cd mw_dir_4 &&
+ test_path_is_missing Foo.mw &&
+ touch Foo.mw &&
+ echo "hello world" >>Foo.mw &&
+ git add Foo.mw &&
+ git commit -m "Foo" &&
+ git push
+ ) &&
+ wiki_getallpage ref_page_4 &&
+ test_diff_directories mw_dir_4 ref_page_4
+ '
+
+ test_expect_success 'Git push works after editing a file .mw' '
+ wiki_reset &&
+ wiki_editpage "Foo" "page created before the git clone" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_5 &&
+
+ (
+ cd mw_dir_5 &&
+ echo "new line added in the file Foo.mw" >>Foo.mw &&
+ git commit -am "edit file Foo.mw" &&
+ git push
+ ) &&
+
+ wiki_getallpage ref_page_5 &&
+ test_diff_directories mw_dir_5 ref_page_5
+ '
+
+ test_expect_failure 'Git push works after deleting a file' '
+ wiki_reset &&
+ wiki_editpage Foo "wiki page added before git clone" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_6 &&
+
+ (
+ cd mw_dir_6 &&
+ git rm Foo.mw &&
+ git commit -am "page Foo.mw deleted" &&
+ git push
+ ) &&
+
+ test ! wiki_page_exist Foo
+ '
+
+ test_expect_success 'Merge conflict expected and solving it' '
+ wiki_reset &&
+
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_7 &&
+ wiki_editpage Foo "1 conflict
+3 wiki
+4" false &&
+
+ (
+ cd mw_dir_7 &&
+ echo "1 conflict
+2 git
+4" >Foo.mw &&
+ git add Foo.mw &&
+ git commit -m "conflict created" &&
+ test_must_fail git pull &&
+ "$PERL_PATH" -pi -e "s/[<=>].*//g" Foo.mw &&
+ git commit -am "merge conflict solved" &&
+ git push
+ )
+ '
+
+ test_expect_failure 'git pull works after deleting a wiki page' '
+ wiki_reset &&
+ wiki_editpage Foo "wiki page added before the git clone" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_8 &&
+
+ wiki_delete_page Foo &&
+ (
+ cd mw_dir_8 &&
+ git pull &&
+ test_path_is_missing Foo.mw
+ )
+ '
+}
--- /dev/null
+#!/bin/sh
+#
+# Copyright (C) 2012
+# Charles Roussel <charles.roussel@ensimag.imag.fr>
+# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
+# Julien Khayat <julien.khayat@ensimag.imag.fr>
+# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
+# Simon Perrat <simon.perrat@ensimag.imag.fr>
+#
+# License: GPL v2 or later
+
+
+test_description='Test the Git Mediawiki remote helper: git clone'
+
+. ./test-gitmw-lib.sh
+. $TEST_DIRECTORY/test-lib.sh
+
+
+test_check_precond
+
+
+test_expect_success 'Git clone creates the expected git log with one file' '
+ wiki_reset &&
+ wiki_editpage foo "this is not important" false -c cat -s "this must be the same" &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_1 &&
+ (
+ cd mw_dir_1 &&
+ git log --format=%s HEAD^..HEAD >log.tmp
+ ) &&
+ echo "this must be the same" >msg.tmp &&
+ diff -b mw_dir_1/log.tmp msg.tmp
+'
+
+
+test_expect_success 'Git clone creates the expected git log with multiple files' '
+ wiki_reset &&
+ wiki_editpage daddy "this is not important" false -s="this must be the same" &&
+ wiki_editpage daddy "neither is this" true -s="this must also be the same" &&
+ wiki_editpage daddy "neither is this" true -s="same same same" &&
+ wiki_editpage dj "dont care" false -s="identical" &&
+ wiki_editpage dj "dont care either" true -s="identical too" &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_2 &&
+ (
+ cd mw_dir_2 &&
+ git log --format=%s Daddy.mw >logDaddy.tmp &&
+ git log --format=%s Dj.mw >logDj.tmp
+ ) &&
+ echo "same same same" >msgDaddy.tmp &&
+ echo "this must also be the same" >>msgDaddy.tmp &&
+ echo "this must be the same" >>msgDaddy.tmp &&
+ echo "identical too" >msgDj.tmp &&
+ echo "identical" >>msgDj.tmp &&
+ diff -b mw_dir_2/logDaddy.tmp msgDaddy.tmp &&
+ diff -b mw_dir_2/logDj.tmp msgDj.tmp
+'
+
+
+test_expect_success 'Git clone creates only Main_Page.mw with an empty wiki' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_3 &&
+ test_contains_N_files mw_dir_3 1 &&
+ test_path_is_file mw_dir_3/Main_Page.mw
+'
+
+test_expect_success 'Git clone does not fetch a deleted page' '
+ wiki_reset &&
+ wiki_editpage foo "this page must be deleted before the clone" false &&
+ wiki_delete_page foo &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_4 &&
+ test_contains_N_files mw_dir_4 1 &&
+ test_path_is_file mw_dir_4/Main_Page.mw &&
+ test_path_is_missing mw_dir_4/Foo.mw
+'
+
+test_expect_success 'Git clone works with page added' '
+ wiki_reset &&
+ wiki_editpage foo " I will be cloned" false &&
+ wiki_editpage bar "I will be cloned" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_5 &&
+ wiki_getallpage ref_page_5 &&
+ test_diff_directories mw_dir_5 ref_page_5 &&
+ wiki_delete_page foo &&
+ wiki_delete_page bar
+'
+
+test_expect_success 'Git clone works with an edited page ' '
+ wiki_reset &&
+ wiki_editpage foo "this page will be edited" \
+ false -s "first edition of page foo"&&
+ wiki_editpage foo "this page has been edited and must be on the clone " true &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_6 &&
+ test_path_is_file mw_dir_6/Foo.mw &&
+ test_path_is_file mw_dir_6/Main_Page.mw &&
+ wiki_getallpage mw_dir_6/page_ref_6 &&
+ test_diff_directories mw_dir_6 mw_dir_6/page_ref_6 &&
+ (
+ cd mw_dir_6 &&
+ git log --format=%s HEAD^ Foo.mw > ../Foo.log
+ ) &&
+ echo "first edition of page foo" > FooExpect.log &&
+ diff FooExpect.log Foo.log
+'
+
+
+test_expect_success 'Git clone works with several pages and some deleted ' '
+ wiki_reset &&
+ wiki_editpage foo "this page will not be deleted" false &&
+ wiki_editpage bar "I must not be erased" false &&
+ wiki_editpage namnam "I will not be there at the end" false &&
+ wiki_editpage nyancat "nyan nyan nyan delete me" false &&
+ wiki_delete_page namnam &&
+ wiki_delete_page nyancat &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_7 &&
+ test_path_is_file mw_dir_7/Foo.mw &&
+ test_path_is_file mw_dir_7/Bar.mw &&
+ test_path_is_missing mw_dir_7/Namnam.mw &&
+ test_path_is_missing mw_dir_7/Nyancat.mw &&
+ wiki_getallpage mw_dir_7/page_ref_7 &&
+ test_diff_directories mw_dir_7 mw_dir_7/page_ref_7
+'
+
+
+test_expect_success 'Git clone works with one specific page cloned ' '
+ wiki_reset &&
+ wiki_editpage foo "I will not be cloned" false &&
+ wiki_editpage bar "Do not clone me" false &&
+ wiki_editpage namnam "I will be cloned :)" false -s="this log must stay" &&
+ wiki_editpage nyancat "nyan nyan nyan you cant clone me" false &&
+ git clone -c remote.origin.pages=namnam \
+ mediawiki::'"$WIKI_URL"' mw_dir_8 &&
+ test_contains_N_files mw_dir_8 1 &&
+ test_path_is_file mw_dir_8/Namnam.mw &&
+ test_path_is_missing mw_dir_8/Main_Page.mw &&
+ (
+ cd mw_dir_8 &&
+ echo "this log must stay" >msg.tmp &&
+ git log --format=%s >log.tmp &&
+ diff -b msg.tmp log.tmp
+ ) &&
+ wiki_check_content mw_dir_8/Namnam.mw Namnam
+'
+
+test_expect_success 'Git clone works with multiple specific page cloned ' '
+ wiki_reset &&
+ wiki_editpage foo "I will be there" false &&
+ wiki_editpage bar "I will not disapear" false &&
+ wiki_editpage namnam "I be erased" false &&
+ wiki_editpage nyancat "nyan nyan nyan you will not erase me" false &&
+ wiki_delete_page namnam &&
+ git clone -c remote.origin.pages="foo bar nyancat namnam" \
+ mediawiki::'"$WIKI_URL"' mw_dir_9 &&
+ test_contains_N_files mw_dir_9 3 &&
+ test_path_is_missing mw_dir_9/Namnam.mw &&
+ test_path_is_file mw_dir_9/Foo.mw &&
+ test_path_is_file mw_dir_9/Nyancat.mw &&
+ test_path_is_file mw_dir_9/Bar.mw &&
+ wiki_check_content mw_dir_9/Foo.mw Foo &&
+ wiki_check_content mw_dir_9/Bar.mw Bar &&
+ wiki_check_content mw_dir_9/Nyancat.mw Nyancat
+'
+
+test_expect_success 'Mediawiki-clone of several specific pages on wiki' '
+ wiki_reset &&
+ wiki_editpage foo "foo 1" false &&
+ wiki_editpage bar "bar 1" false &&
+ wiki_editpage dummy "dummy 1" false &&
+ wiki_editpage cloned_1 "cloned_1 1" false &&
+ wiki_editpage cloned_2 "cloned_2 2" false &&
+ wiki_editpage cloned_3 "cloned_3 3" false &&
+ mkdir -p ref_page_10 &&
+ wiki_getpage cloned_1 ref_page_10 &&
+ wiki_getpage cloned_2 ref_page_10 &&
+ wiki_getpage cloned_3 ref_page_10 &&
+ git clone -c remote.origin.pages="cloned_1 cloned_2 cloned_3" \
+ mediawiki::'"$WIKI_URL"' mw_dir_10 &&
+ test_diff_directories mw_dir_10 ref_page_10
+'
+
+test_expect_success 'Git clone works with the shallow option' '
+ wiki_reset &&
+ wiki_editpage foo "1st revision, should be cloned" false &&
+ wiki_editpage bar "1st revision, should be cloned" false &&
+ wiki_editpage nyan "1st revision, should not be cloned" false &&
+ wiki_editpage nyan "2nd revision, should be cloned" false &&
+ git -c remote.origin.shallow=true clone \
+ mediawiki::'"$WIKI_URL"' mw_dir_11 &&
+ test_contains_N_files mw_dir_11 4 &&
+ test_path_is_file mw_dir_11/Nyan.mw &&
+ test_path_is_file mw_dir_11/Foo.mw &&
+ test_path_is_file mw_dir_11/Bar.mw &&
+ test_path_is_file mw_dir_11/Main_Page.mw &&
+ (
+ cd mw_dir_11 &&
+ test `git log --oneline Nyan.mw | wc -l` -eq 1 &&
+ test `git log --oneline Foo.mw | wc -l` -eq 1 &&
+ test `git log --oneline Bar.mw | wc -l` -eq 1 &&
+ test `git log --oneline Main_Page.mw | wc -l ` -eq 1
+ ) &&
+ wiki_check_content mw_dir_11/Nyan.mw Nyan &&
+ wiki_check_content mw_dir_11/Foo.mw Foo &&
+ wiki_check_content mw_dir_11/Bar.mw Bar &&
+ wiki_check_content mw_dir_11/Main_Page.mw Main_Page
+'
+
+test_expect_success 'Git clone works with the shallow option with a delete page' '
+ wiki_reset &&
+ wiki_editpage foo "1st revision, will be deleted" false &&
+ wiki_editpage bar "1st revision, should be cloned" false &&
+ wiki_editpage nyan "1st revision, should not be cloned" false &&
+ wiki_editpage nyan "2nd revision, should be cloned" false &&
+ wiki_delete_page foo &&
+ git -c remote.origin.shallow=true clone \
+ mediawiki::'"$WIKI_URL"' mw_dir_12 &&
+ test_contains_N_files mw_dir_12 3 &&
+ test_path_is_file mw_dir_12/Nyan.mw &&
+ test_path_is_missing mw_dir_12/Foo.mw &&
+ test_path_is_file mw_dir_12/Bar.mw &&
+ test_path_is_file mw_dir_12/Main_Page.mw &&
+ (
+ cd mw_dir_12 &&
+ test `git log --oneline Nyan.mw | wc -l` -eq 1 &&
+ test `git log --oneline Bar.mw | wc -l` -eq 1 &&
+ test `git log --oneline Main_Page.mw | wc -l ` -eq 1
+ ) &&
+ wiki_check_content mw_dir_12/Nyan.mw Nyan &&
+ wiki_check_content mw_dir_12/Bar.mw Bar &&
+ wiki_check_content mw_dir_12/Main_Page.mw Main_Page
+'
+
+test_expect_success 'Test of fetching a category' '
+ wiki_reset &&
+ wiki_editpage Foo "I will be cloned" false -c=Category &&
+ wiki_editpage Bar "Meet me on the repository" false -c=Category &&
+ wiki_editpage Dummy "I will not come" false &&
+ wiki_editpage BarWrong "I will stay online only" false -c=NotCategory &&
+ git clone -c remote.origin.categories="Category" \
+ mediawiki::'"$WIKI_URL"' mw_dir_13 &&
+ wiki_getallpage ref_page_13 Category &&
+ test_diff_directories mw_dir_13 ref_page_13
+'
+
+test_expect_success 'Test of resistance to modification of category on wiki for clone' '
+ wiki_reset &&
+ wiki_editpage Tobedeleted "this page will be deleted" false -c=Catone &&
+ wiki_editpage Tobeedited "this page will be modified" false -c=Catone &&
+ wiki_editpage Normalone "this page wont be modified and will be on git" false -c=Catone &&
+ wiki_editpage Notconsidered "this page will not appear on local" false &&
+ wiki_editpage Othercategory "this page will not appear on local" false -c=Cattwo &&
+ wiki_editpage Tobeedited "this page have been modified" true -c=Catone &&
+ wiki_delete_page Tobedeleted
+ git clone -c remote.origin.categories="Catone" \
+ mediawiki::'"$WIKI_URL"' mw_dir_14 &&
+ wiki_getallpage ref_page_14 Catone &&
+ test_diff_directories mw_dir_14 ref_page_14
+'
+
+test_done
--- /dev/null
+#!/bin/sh
+#
+# Copyright (C) 2012
+# Charles Roussel <charles.roussel@ensimag.imag.fr>
+# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
+# Julien Khayat <julien.khayat@ensimag.imag.fr>
+# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
+# Simon Perrat <simon.perrat@ensimag.imag.fr>
+#
+# License: GPL v2 or later
+
+# tests for git-remote-mediawiki
+
+test_description='Test the Git Mediawiki remote helper: git push and git pull simple test cases'
+
+. ./test-gitmw-lib.sh
+. ./push-pull-tests.sh
+. $TEST_DIRECTORY/test-lib.sh
+
+test_check_precond
+
+test_push_pull
+
+test_done
--- /dev/null
+#!/bin/sh
+#
+# Copyright (C) 2012
+# Charles Roussel <charles.roussel@ensimag.imag.fr>
+# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
+# Julien Khayat <julien.khayat@ensimag.imag.fr>
+# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
+# Simon Perrat <simon.perrat@ensimag.imag.fr>
+#
+# License: GPL v2 or later
+
+# tests for git-remote-mediawiki
+
+test_description='Test git-mediawiki with special characters in filenames'
+
+. ./test-gitmw-lib.sh
+. $TEST_DIRECTORY/test-lib.sh
+
+
+test_check_precond
+
+
+test_expect_success 'Git clone works for a wiki with accents in the page names' '
+ wiki_reset &&
+ wiki_editpage féé "This page must be délétéd before clone" false &&
+ wiki_editpage kèè "This page must be deleted before clone" false &&
+ wiki_editpage hà à "This page must be deleted before clone" false &&
+ wiki_editpage kîî "This page must be deleted before clone" false &&
+ wiki_editpage foo "This page must be deleted before clone" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_1 &&
+ wiki_getallpage ref_page_1 &&
+ test_diff_directories mw_dir_1 ref_page_1
+'
+
+
+test_expect_success 'Git pull works with a wiki with accents in the pages names' '
+ wiki_reset &&
+ wiki_editpage kîî "this page must be cloned" false &&
+ wiki_editpage foo "this page must be cloned" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_2 &&
+ wiki_editpage éà îôû "This page must be pulled" false &&
+ (
+ cd mw_dir_2 &&
+ git pull
+ ) &&
+ wiki_getallpage ref_page_2 &&
+ test_diff_directories mw_dir_2 ref_page_2
+'
+
+
+test_expect_success 'Cloning a chosen page works with accents' '
+ wiki_reset &&
+ wiki_editpage kîî "this page must be cloned" false &&
+ git clone -c remote.origin.pages=kîî \
+ mediawiki::'"$WIKI_URL"' mw_dir_3 &&
+ wiki_check_content mw_dir_3/Kîî.mw Kîî &&
+ test_path_is_file mw_dir_3/Kîî.mw &&
+ rm -rf mw_dir_3
+'
+
+
+test_expect_success 'The shallow option works with accents' '
+ wiki_reset &&
+ wiki_editpage néoà "1st revision, should not be cloned" false &&
+ wiki_editpage néoà "2nd revision, should be cloned" false &&
+ git -c remote.origin.shallow=true clone \
+ mediawiki::'"$WIKI_URL"' mw_dir_4 &&
+ test_contains_N_files mw_dir_4 2 &&
+ test_path_is_file mw_dir_4/Néoà .mw &&
+ test_path_is_file mw_dir_4/Main_Page.mw &&
+ (
+ cd mw_dir_4 &&
+ test `git log --oneline Néoà .mw | wc -l` -eq 1 &&
+ test `git log --oneline Main_Page.mw | wc -l ` -eq 1
+ ) &&
+ wiki_check_content mw_dir_4/Néoà .mw Néoà &&
+ wiki_check_content mw_dir_4/Main_Page.mw Main_Page
+'
+
+
+test_expect_success 'Cloning works when page name first letter has an accent' '
+ wiki_reset &&
+ wiki_editpage îî "this page must be cloned" false &&
+ git clone -c remote.origin.pages=îî \
+ mediawiki::'"$WIKI_URL"' mw_dir_5 &&
+ test_path_is_file mw_dir_5/Îî.mw &&
+ wiki_check_content mw_dir_5/Îî.mw Îî
+'
+
+
+test_expect_success 'Git push works with a wiki with accents' '
+ wiki_reset &&
+ wiki_editpage féé "lots of accents : éèà Ö" false &&
+ wiki_editpage foo "this page must be cloned" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_6 &&
+ (
+ cd mw_dir_6 &&
+ echo "A wild Pîkächû appears on the wiki" >Pîkächû.mw &&
+ git add Pîkächû.mw &&
+ git commit -m "A new page appears" &&
+ git push
+ ) &&
+ wiki_getallpage ref_page_6 &&
+ test_diff_directories mw_dir_6 ref_page_6
+'
+
+test_expect_success 'Git clone works with accentsand spaces' '
+ wiki_reset &&
+ wiki_editpage "é à î" "this page must be délété before the clone" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_7 &&
+ wiki_getallpage ref_page_7 &&
+ test_diff_directories mw_dir_7 ref_page_7
+'
+
+test_expect_success 'character $ in page name (mw -> git)' '
+ wiki_reset &&
+ wiki_editpage file_\$_foo "expect to be called file_$_foo" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_8 &&
+ test_path_is_file mw_dir_8/File_\$_foo.mw &&
+ wiki_getallpage ref_page_8 &&
+ test_diff_directories mw_dir_8 ref_page_8
+'
+
+
+
+test_expect_success 'character $ in file name (git -> mw) ' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_9 &&
+ (
+ cd mw_dir_9 &&
+ echo "this file is called File_\$_foo.mw" >File_\$_foo.mw &&
+ git add . &&
+ git commit -am "file File_\$_foo.mw" &&
+ git pull &&
+ git push
+ ) &&
+ wiki_getallpage ref_page_9 &&
+ test_diff_directories mw_dir_9 ref_page_9
+'
+
+
+test_expect_failure 'capital at the begining of file names' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_10 &&
+ (
+ cd mw_dir_10 &&
+ echo "my new file foo" >foo.mw &&
+ echo "my new file Foo... Finger crossed" >Foo.mw &&
+ git add . &&
+ git commit -am "file foo.mw" &&
+ git pull &&
+ git push
+ ) &&
+ wiki_getallpage ref_page_10 &&
+ test_diff_directories mw_dir_10 ref_page_10
+'
+
+
+test_expect_failure 'special character at the begining of file name from mw to git' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_11 &&
+ wiki_editpage {char_1 "expect to be renamed {char_1" false &&
+ wiki_editpage [char_2 "expect to be renamed [char_2" false &&
+ (
+ cd mw_dir_11 &&
+ git pull
+ ) &&
+ test_path_is_file mw_dir_11/{char_1 &&
+ test_path_is_file mw_dir_11/[char_2
+'
+
+test_expect_success 'test of correct formating for file name from mw to git' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_12 &&
+ wiki_editpage char_%_7b_1 "expect to be renamed char{_1" false &&
+ wiki_editpage char_%_5b_2 "expect to be renamed char{_2" false &&
+ (
+ cd mw_dir_12 &&
+ git pull
+ ) &&
+ test_path_is_file mw_dir_12/Char\{_1.mw &&
+ test_path_is_file mw_dir_12/Char\[_2.mw &&
+ wiki_getallpage ref_page_12 &&
+ mv ref_page_12/Char_%_7b_1.mw ref_page_12/Char\{_1.mw &&
+ mv ref_page_12/Char_%_5b_2.mw ref_page_12/Char\[_2.mw &&
+ test_diff_directories mw_dir_12 ref_page_12
+'
+
+
+test_expect_failure 'test of correct formating for file name begining with special character' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_13 &&
+ (
+ cd mw_dir_13 &&
+ echo "my new file {char_1" >\{char_1.mw &&
+ echo "my new file [char_2" >\[char_2.mw &&
+ git add . &&
+ git commit -am "commiting some exotic file name..." &&
+ git push &&
+ git pull
+ ) &&
+ wiki_getallpage ref_page_13 &&
+ test_path_is_file ref_page_13/{char_1.mw &&
+ test_path_is_file ref_page_13/[char_2.mw &&
+ test_diff_directories mw_dir_13 ref_page_13
+'
+
+
+test_expect_success 'test of correct formating for file name from git to mw' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_14 &&
+ (
+ cd mw_dir_14 &&
+ echo "my new file char{_1" >Char\{_1.mw &&
+ echo "my new file char[_2" >Char\[_2.mw &&
+ git add . &&
+ git commit -m "commiting some exotic file name..." &&
+ git push
+ ) &&
+ wiki_getallpage ref_page_14 &&
+ mv mw_dir_14/Char\{_1.mw mw_dir_14/Char_%_7b_1.mw &&
+ mv mw_dir_14/Char\[_2.mw mw_dir_14/Char_%_5b_2.mw &&
+ test_diff_directories mw_dir_14 ref_page_14
+'
+
+
+test_expect_success 'git clone with /' '
+ wiki_reset &&
+ wiki_editpage \/fo\/o "this is not important" false -c=Deleted &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_15 &&
+ test_path_is_file mw_dir_15/%2Ffo%2Fo.mw &&
+ wiki_check_content mw_dir_15/%2Ffo%2Fo.mw \/fo\/o
+'
+
+
+test_expect_success 'git push with /' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_16 &&
+ echo "I will be on the wiki" >mw_dir_16/%2Ffo%2Fo.mw &&
+ (
+ cd mw_dir_16 &&
+ git add %2Ffo%2Fo.mw &&
+ git commit -m " %2Ffo%2Fo added" &&
+ git push
+ ) &&
+ wiki_page_exist \/fo\/o &&
+ wiki_check_content mw_dir_16/%2Ffo%2Fo.mw \/fo\/o
+
+'
+
+
+test_expect_success 'git clone with \' '
+ wiki_reset &&
+ wiki_editpage \\ko\\o "this is not important" false -c=Deleted &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_17 &&
+ test_path_is_file mw_dir_17/\\ko\\o.mw &&
+ wiki_check_content mw_dir_17/\\ko\\o.mw \\ko\\o
+'
+
+
+test_expect_success 'git push with \' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_18 &&
+ echo "I will be on the wiki" >mw_dir_18/\\ko\\o.mw &&
+ (
+ cd mw_dir_18 &&
+ git add \\ko\\o.mw &&
+ git commit -m " \\ko\\o added" &&
+ git push
+ )&&
+ wiki_page_exist \\ko\\o &&
+ wiki_check_content mw_dir_18/\\ko\\o.mw \\ko\\o
+
+'
+
+test_expect_success 'git clone with \ in format control' '
+ wiki_reset &&
+ wiki_editpage \\no\\o "this is not important" false &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_19 &&
+ test_path_is_file mw_dir_19/\\no\\o.mw &&
+ wiki_check_content mw_dir_19/\\no\\o.mw \\no\\o
+'
+
+
+test_expect_success 'git push with \ in format control' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir_20 &&
+ echo "I will be on the wiki" >mw_dir_20/\\fo\\o.mw &&
+ (
+ cd mw_dir_20 &&
+ git add \\fo\\o.mw &&
+ git commit -m " \\fo\\o added" &&
+ git push
+ )&&
+ wiki_page_exist \\fo\\o &&
+ wiki_check_content mw_dir_20/\\fo\\o.mw \\fo\\o
+
+'
+
+
+test_done
--- /dev/null
+#!/bin/sh
+#
+# Copyright (C) 2012
+# Charles Roussel <charles.roussel@ensimag.imag.fr>
+# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
+# Julien Khayat <julien.khayat@ensimag.imag.fr>
+# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
+# Simon Perrat <simon.perrat@ensimag.imag.fr>
+#
+# License: GPL v2 or later
+
+# tests for git-remote-mediawiki
+
+test_description='Test the Git Mediawiki remote helper: git push and git pull simple test cases'
+
+. ./test-gitmw-lib.sh
+. $TEST_DIRECTORY/test-lib.sh
+
+
+test_check_precond
+
+
+test_git_reimport () {
+ git -c remote.origin.dumbPush=true push &&
+ git -c remote.origin.mediaImport=true pull --rebase
+}
+
+# Don't bother with permissions, be administrator by default
+test_expect_success 'setup config' '
+ git config --global remote.origin.mwLogin WikiAdmin &&
+ git config --global remote.origin.mwPassword AdminPass &&
+ test_might_fail git config --global --unset remote.origin.mediaImport
+'
+
+test_expect_success 'git push can upload media (File:) files' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir &&
+ (
+ cd mw_dir &&
+ echo "hello world" >Foo.txt &&
+ git add Foo.txt &&
+ git commit -m "add a text file" &&
+ git push &&
+ "$PERL_PATH" -e "print STDOUT \"binary content: \".chr(255);" >Foo.txt &&
+ git add Foo.txt &&
+ git commit -m "add a text file with binary content" &&
+ git push
+ )
+'
+
+test_expect_success 'git clone works on previously created wiki with media files' '
+ test_when_finished "rm -rf mw_dir mw_dir_clone" &&
+ git clone -c remote.origin.mediaimport=true \
+ mediawiki::'"$WIKI_URL"' mw_dir_clone &&
+ test_cmp mw_dir_clone/Foo.txt mw_dir/Foo.txt &&
+ (cd mw_dir_clone && git checkout HEAD^) &&
+ (cd mw_dir && git checkout HEAD^) &&
+ test_cmp mw_dir_clone/Foo.txt mw_dir/Foo.txt
+'
+
+test_expect_success 'git push & pull work with locally renamed media files' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir &&
+ test_when_finished "rm -fr mw_dir" &&
+ (
+ cd mw_dir &&
+ echo "A File" >Foo.txt &&
+ git add Foo.txt &&
+ git commit -m "add a file" &&
+ git mv Foo.txt Bar.txt &&
+ git commit -m "Rename a file" &&
+ test_git_reimport &&
+ echo "A File" >expect &&
+ test_cmp expect Bar.txt &&
+ test_path_is_missing Foo.txt
+ )
+'
+
+test_expect_success 'git push can propagate local page deletion' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir &&
+ test_when_finished "rm -fr mw_dir" &&
+ (
+ cd mw_dir &&
+ test_path_is_missing Foo.mw &&
+ echo "hello world" >Foo.mw &&
+ git add Foo.mw &&
+ git commit -m "Add the page Foo" &&
+ git push &&
+ rm -f Foo.mw &&
+ git commit -am "Delete the page Foo" &&
+ test_git_reimport &&
+ test_path_is_missing Foo.mw
+ )
+'
+
+test_expect_success 'git push can propagate local media file deletion' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir &&
+ test_when_finished "rm -fr mw_dir" &&
+ (
+ cd mw_dir &&
+ echo "hello world" >Foo.txt &&
+ git add Foo.txt &&
+ git commit -m "Add the text file Foo" &&
+ git rm Foo.txt &&
+ git commit -m "Delete the file Foo" &&
+ test_git_reimport &&
+ test_path_is_missing Foo.txt
+ )
+'
+
+# test failure: the file is correctly uploaded, and then deleted but
+# as no page link to it, the import (which looks at page revisions)
+# doesn't notice the file deletion on the wiki. We fetch the list of
+# files from the wiki, but as the file is deleted, it doesn't appear.
+test_expect_failure 'git pull correctly imports media file deletion when no page link to it' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir &&
+ test_when_finished "rm -fr mw_dir" &&
+ (
+ cd mw_dir &&
+ echo "hello world" >Foo.txt &&
+ git add Foo.txt &&
+ git commit -m "Add the text file Foo" &&
+ git push &&
+ git rm Foo.txt &&
+ git commit -m "Delete the file Foo" &&
+ test_git_reimport &&
+ test_path_is_missing Foo.txt
+ )
+'
+
+test_expect_success 'git push properly warns about insufficient permissions' '
+ wiki_reset &&
+ git clone mediawiki::'"$WIKI_URL"' mw_dir &&
+ test_when_finished "rm -fr mw_dir" &&
+ (
+ cd mw_dir &&
+ echo "A File" >foo.forbidden &&
+ git add foo.forbidden &&
+ git commit -m "add a file" &&
+ git push 2>actual &&
+ test_i18ngrep "foo.forbidden is not a permitted file" actual
+ )
+'
+
+test_expect_success 'setup a repository with media files' '
+ wiki_reset &&
+ wiki_editpage testpage "I am linking a file [[File:File.txt]]" false &&
+ echo "File content" >File.txt &&
+ wiki_upload_file File.txt &&
+ echo "Another file content" >AnotherFile.txt &&
+ wiki_upload_file AnotherFile.txt
+'
+
+test_expect_success 'git clone works with one specific page cloned and mediaimport=true' '
+ git clone -c remote.origin.pages=testpage \
+ -c remote.origin.mediaimport=true \
+ mediawiki::'"$WIKI_URL"' mw_dir_15 &&
+ test_when_finished "rm -rf mw_dir_15" &&
+ test_contains_N_files mw_dir_15 3 &&
+ test_path_is_file mw_dir_15/Testpage.mw &&
+ test_path_is_file mw_dir_15/File:File.txt.mw &&
+ test_path_is_file mw_dir_15/File.txt &&
+ test_path_is_missing mw_dir_15/Main_Page.mw &&
+ test_path_is_missing mw_dir_15/File:AnotherFile.txt.mw &&
+ test_path_is_missing mw_dir_15/AnothetFile.txt &&
+ wiki_check_content mw_dir_15/Testpage.mw Testpage &&
+ test_cmp mw_dir_15/File.txt File.txt
+'
+
+test_expect_success 'git clone works with one specific page cloned and mediaimport=false' '
+ test_when_finished "rm -rf mw_dir_16" &&
+ git clone -c remote.origin.pages=testpage \
+ mediawiki::'"$WIKI_URL"' mw_dir_16 &&
+ test_contains_N_files mw_dir_16 1 &&
+ test_path_is_file mw_dir_16/Testpage.mw &&
+ test_path_is_missing mw_dir_16/File:File.txt.mw &&
+ test_path_is_missing mw_dir_16/File.txt &&
+ test_path_is_missing mw_dir_16/Main_Page.mw &&
+ wiki_check_content mw_dir_16/Testpage.mw Testpage
+'
+
+# should behave like mediaimport=false
+test_expect_success 'git clone works with one specific page cloned and mediaimport unset' '
+ test_when_finished "rm -fr mw_dir_17" &&
+ git clone -c remote.origin.pages=testpage \
+ mediawiki::'"$WIKI_URL"' mw_dir_17 &&
+ test_contains_N_files mw_dir_17 1 &&
+ test_path_is_file mw_dir_17/Testpage.mw &&
+ test_path_is_missing mw_dir_17/File:File.txt.mw &&
+ test_path_is_missing mw_dir_17/File.txt &&
+ test_path_is_missing mw_dir_17/Main_Page.mw &&
+ wiki_check_content mw_dir_17/Testpage.mw Testpage
+'
+
+test_done
--- /dev/null
+#!/bin/sh
+
+test_description='Test the Git Mediawiki remote helper: git pull by revision'
+
+. ./test-gitmw-lib.sh
+. ./push-pull-tests.sh
+. $TEST_DIRECTORY/test-lib.sh
+
+test_check_precond
+
+test_expect_success 'configuration' '
+ git config --global mediawiki.fetchStrategy by_rev
+'
+
+test_push_pull
+
+test_done
--- /dev/null
+# Copyright (C) 2012
+# Charles Roussel <charles.roussel@ensimag.imag.fr>
+# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
+# Julien Khayat <julien.khayat@ensimag.imag.fr>
+# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
+# Simon Perrat <simon.perrat@ensimag.imag.fr>
+# License: GPL v2 or later
+
+#
+# CONFIGURATION VARIABLES
+# You might want to change these ones
+#
+
+. ./test.config
+
+WIKI_URL=http://"$SERVER_ADDR:$PORT/$WIKI_DIR_NAME"
+CURR_DIR=$(pwd)
+TEST_OUTPUT_DIRECTORY=$(pwd)
+TEST_DIRECTORY="$CURR_DIR"/../../../t
+
+export TEST_OUTPUT_DIRECTORY TEST_DIRECTORY CURR_DIR
+
+if test "$LIGHTTPD" = "false" ; then
+ PORT=80
+else
+ WIKI_DIR_INST="$CURR_DIR/$WEB_WWW"
+fi
+
+wiki_upload_file () {
+ "$CURR_DIR"/test-gitmw.pl upload_file "$@"
+}
+
+wiki_getpage () {
+ "$CURR_DIR"/test-gitmw.pl get_page "$@"
+}
+
+wiki_delete_page () {
+ "$CURR_DIR"/test-gitmw.pl delete_page "$@"
+}
+
+wiki_editpage () {
+ "$CURR_DIR"/test-gitmw.pl edit_page "$@"
+}
+
+die () {
+ die_with_status 1 "$@"
+}
+
+die_with_status () {
+ status=$1
+ shift
+ echo >&2 "$*"
+ exit "$status"
+}
+
+
+# Check the preconditions to run git-remote-mediawiki's tests
+test_check_precond () {
+ if ! test_have_prereq PERL
+ then
+ skip_all='skipping gateway git-mw tests, perl not available'
+ test_done
+ fi
+
+ if [ ! -f "$GIT_BUILD_DIR"/git-remote-mediawiki ];
+ then
+ echo "No remote mediawiki for git found. Copying it in git"
+ echo "cp $GIT_BUILD_DIR/contrib/mw-to-git/git-remote-mediawiki $GIT_BUILD_DIR/"
+ ln -s "$GIT_BUILD_DIR"/contrib/mw-to-git/git-remote-mediawiki "$GIT_BUILD_DIR"
+ fi
+
+ if [ ! -d "$WIKI_DIR_INST/$WIKI_DIR_NAME" ];
+ then
+ skip_all='skipping gateway git-mw tests, no mediawiki found'
+ test_done
+ fi
+}
+
+# test_diff_directories <dir_git> <dir_wiki>
+#
+# Compare the contents of directories <dir_git> and <dir_wiki> with diff
+# and errors if they do not match. The program will
+# not look into .git in the process.
+# Warning: the first argument MUST be the directory containing the git data
+test_diff_directories () {
+ rm -rf "$1_tmp"
+ mkdir -p "$1_tmp"
+ cp "$1"/*.mw "$1_tmp"
+ diff -r -b "$1_tmp" "$2"
+}
+
+# $1=<dir>
+# $2=<N>
+#
+# Check that <dir> contains exactly <N> files
+test_contains_N_files () {
+ if test `ls -- "$1" | wc -l` -ne "$2"; then
+ echo "directory $1 sould contain $2 files"
+ echo "it contains these files:"
+ ls "$1"
+ false
+ fi
+}
+
+
+# wiki_check_content <file_name> <page_name>
+#
+# Compares the contents of the file <file_name> and the wiki page
+# <page_name> and exits with error 1 if they do not match.
+wiki_check_content () {
+ mkdir -p wiki_tmp
+ wiki_getpage "$2" wiki_tmp
+ # replacement of forbidden character in file name
+ page_name=$(printf "%s\n" "$2" | sed -e "s/\//%2F/g")
+
+ diff -b "$1" wiki_tmp/"$page_name".mw
+ if test $? -ne 0
+ then
+ rm -rf wiki_tmp
+ error "ERROR: file $2 not found on wiki"
+ fi
+ rm -rf wiki_tmp
+}
+
+# wiki_page_exist <page_name>
+#
+# Check the existence of the page <page_name> on the wiki and exits
+# with error if it is absent from it.
+wiki_page_exist () {
+ mkdir -p wiki_tmp
+ wiki_getpage "$1" wiki_tmp
+ page_name=$(printf "%s\n" "$1" | sed "s/\//%2F/g")
+ if test -f wiki_tmp/"$page_name".mw ; then
+ rm -rf wiki_tmp
+ else
+ rm -rf wiki_tmp
+ error "test failed: file $1 not found on wiki"
+ fi
+}
+
+# wiki_getallpagename
+#
+# Fetch the name of each page on the wiki.
+wiki_getallpagename () {
+ "$CURR_DIR"/test-gitmw.pl getallpagename
+}
+
+# wiki_getallpagecategory <category>
+#
+# Fetch the name of each page belonging to <category> on the wiki.
+wiki_getallpagecategory () {
+ "$CURR_DIR"/test-gitmw.pl getallpagename "$@"
+}
+
+# wiki_getallpage <dest_dir> [<category>]
+#
+# Fetch all the pages from the wiki and place them in the directory
+# <dest_dir>.
+# If <category> is define, then wiki_getallpage fetch the pages included
+# in <category>.
+wiki_getallpage () {
+ if test -z "$2";
+ then
+ wiki_getallpagename
+ else
+ wiki_getallpagecategory "$2"
+ fi
+ mkdir -p "$1"
+ while read -r line; do
+ wiki_getpage "$line" $1;
+ done < all.txt
+}
+
+# ================= Install part =================
+
+error () {
+ echo "$@" >&2
+ exit 1
+}
+
+# config_lighttpd
+#
+# Create the configuration files and the folders necessary to start lighttpd.
+# Overwrite any existing file.
+config_lighttpd () {
+ mkdir -p $WEB
+ mkdir -p $WEB_TMP
+ mkdir -p $WEB_WWW
+ cat > $WEB/lighttpd.conf <<EOF
+ server.document-root = "$CURR_DIR/$WEB_WWW"
+ server.port = $PORT
+ server.pid-file = "$CURR_DIR/$WEB_TMP/pid"
+
+ server.modules = (
+ "mod_rewrite",
+ "mod_redirect",
+ "mod_access",
+ "mod_accesslog",
+ "mod_fastcgi"
+ )
+
+ index-file.names = ("index.php" , "index.html")
+
+ mimetype.assign = (
+ ".pdf" => "application/pdf",
+ ".sig" => "application/pgp-signature",
+ ".spl" => "application/futuresplash",
+ ".class" => "application/octet-stream",
+ ".ps" => "application/postscript",
+ ".torrent" => "application/x-bittorrent",
+ ".dvi" => "application/x-dvi",
+ ".gz" => "application/x-gzip",
+ ".pac" => "application/x-ns-proxy-autoconfig",
+ ".swf" => "application/x-shockwave-flash",
+ ".tar.gz" => "application/x-tgz",
+ ".tgz" => "application/x-tgz",
+ ".tar" => "application/x-tar",
+ ".zip" => "application/zip",
+ ".mp3" => "audio/mpeg",
+ ".m3u" => "audio/x-mpegurl",
+ ".wma" => "audio/x-ms-wma",
+ ".wax" => "audio/x-ms-wax",
+ ".ogg" => "application/ogg",
+ ".wav" => "audio/x-wav",
+ ".gif" => "image/gif",
+ ".jpg" => "image/jpeg",
+ ".jpeg" => "image/jpeg",
+ ".png" => "image/png",
+ ".xbm" => "image/x-xbitmap",
+ ".xpm" => "image/x-xpixmap",
+ ".xwd" => "image/x-xwindowdump",
+ ".css" => "text/css",
+ ".html" => "text/html",
+ ".htm" => "text/html",
+ ".js" => "text/javascript",
+ ".asc" => "text/plain",
+ ".c" => "text/plain",
+ ".cpp" => "text/plain",
+ ".log" => "text/plain",
+ ".conf" => "text/plain",
+ ".text" => "text/plain",
+ ".txt" => "text/plain",
+ ".dtd" => "text/xml",
+ ".xml" => "text/xml",
+ ".mpeg" => "video/mpeg",
+ ".mpg" => "video/mpeg",
+ ".mov" => "video/quicktime",
+ ".qt" => "video/quicktime",
+ ".avi" => "video/x-msvideo",
+ ".asf" => "video/x-ms-asf",
+ ".asx" => "video/x-ms-asf",
+ ".wmv" => "video/x-ms-wmv",
+ ".bz2" => "application/x-bzip",
+ ".tbz" => "application/x-bzip-compressed-tar",
+ ".tar.bz2" => "application/x-bzip-compressed-tar",
+ "" => "text/plain"
+ )
+
+ fastcgi.server = ( ".php" =>
+ ("localhost" =>
+ ( "socket" => "$CURR_DIR/$WEB_TMP/php.socket",
+ "bin-path" => "$PHP_DIR/php-cgi -c $CURR_DIR/$WEB/php.ini"
+
+ )
+ )
+ )
+EOF
+
+ cat > $WEB/php.ini <<EOF
+ session.save_path ='$CURR_DIR/$WEB_TMP'
+EOF
+}
+
+# start_lighttpd
+#
+# Start or restart daemon lighttpd. If restart, rewrite configuration files.
+start_lighttpd () {
+ if test -f "$WEB_TMP/pid"; then
+ echo "Instance already running. Restarting..."
+ stop_lighttpd
+ fi
+ config_lighttpd
+ "$LIGHTTPD_DIR"/lighttpd -f "$WEB"/lighttpd.conf
+
+ if test $? -ne 0 ; then
+ echo "Could not execute http deamon lighttpd"
+ exit 1
+ fi
+}
+
+# stop_lighttpd
+#
+# Kill daemon lighttpd and removes files and folders associated.
+stop_lighttpd () {
+ test -f "$WEB_TMP/pid" && kill $(cat "$WEB_TMP/pid")
+ rm -rf "$WEB"
+}
+
+# Create the SQLite database of the MediaWiki. If the database file already
+# exists, it will be deleted.
+# This script should be runned from the directory where $FILES_FOLDER is
+# located.
+create_db () {
+ rm -f "$TMP/$DB_FILE"
+
+ echo "Generating the SQLite database file. It can take some time ..."
+ # Run the php script to generate the SQLite database file
+ # with cURL calls.
+ php "$FILES_FOLDER/$DB_INSTALL_SCRIPT" $(basename "$DB_FILE" .sqlite) \
+ "$WIKI_ADMIN" "$WIKI_PASSW" "$TMP" "$PORT"
+
+ if [ ! -f "$TMP/$DB_FILE" ] ; then
+ error "Can't create database file $TMP/$DB_FILE. Try to run ./install-wiki.sh delete first."
+ fi
+
+ # Copy the generated database file into the directory the
+ # user indicated.
+ cp "$TMP/$DB_FILE" "$FILES_FOLDER" ||
+ error "Unable to copy $TMP/$DB_FILE to $FILES_FOLDER"
+}
+
+# Install a wiki in your web server directory.
+wiki_install () {
+ if test $LIGHTTPD = "true" ; then
+ start_lighttpd
+ fi
+
+ SERVER_ADDR=$SERVER_ADDR:$PORT
+ # In this part, we change directory to $TMP in order to download,
+ # unpack and copy the files of MediaWiki
+ (
+ mkdir -p "$WIKI_DIR_INST/$WIKI_DIR_NAME"
+ if [ ! -d "$WIKI_DIR_INST/$WIKI_DIR_NAME" ] ; then
+ error "Folder $WIKI_DIR_INST/$WIKI_DIR_NAME doesn't exist.
+ Please create it and launch the script again."
+ fi
+
+ # Fetch MediaWiki's archive if not already present in the TMP directory
+ cd "$TMP"
+ if [ ! -f "$MW_VERSION.tar.gz" ] ; then
+ echo "Downloading $MW_VERSION sources ..."
+ wget "http://download.wikimedia.org/mediawiki/1.19/mediawiki-1.19.0.tar.gz" ||
+ error "Unable to download "\
+ "http://download.wikimedia.org/mediawiki/1.19/"\
+ "mediawiki-1.19.0.tar.gz. "\
+ "Please fix your connection and launch the script again."
+ echo "$MW_VERSION.tar.gz downloaded in `pwd`. "\
+ "You can delete it later if you want."
+ else
+ echo "Reusing existing $MW_VERSION.tar.gz downloaded in `pwd`."
+ fi
+ archive_abs_path=$(pwd)/"$MW_VERSION.tar.gz"
+ cd "$WIKI_DIR_INST/$WIKI_DIR_NAME/" ||
+ error "can't cd to $WIKI_DIR_INST/$WIKI_DIR_NAME/"
+ tar xzf "$archive_abs_path" --strip-components=1 ||
+ error "Unable to extract WikiMedia's files from $archive_abs_path to "\
+ "$WIKI_DIR_INST/$WIKI_DIR_NAME"
+ ) || exit 1
+
+ create_db
+
+ # Copy the generic LocalSettings.php in the web server's directory
+ # And modify parameters according to the ones set at the top
+ # of this script.
+ # Note that LocalSettings.php is never modified.
+ if [ ! -f "$FILES_FOLDER/LocalSettings.php" ] ; then
+ error "Can't find $FILES_FOLDER/LocalSettings.php " \
+ "in the current folder. "\
+ "Please run the script inside its folder."
+ fi
+ cp "$FILES_FOLDER/LocalSettings.php" \
+ "$FILES_FOLDER/LocalSettings-tmp.php" ||
+ error "Unable to copy $FILES_FOLDER/LocalSettings.php " \
+ "to $FILES_FOLDER/LocalSettings-tmp.php"
+
+ # Parse and set the LocalSettings file of the user according to the
+ # CONFIGURATION VARIABLES section at the beginning of this script
+ file_swap="$FILES_FOLDER/LocalSettings-swap.php"
+ sed "s,@WG_SCRIPT_PATH@,/$WIKI_DIR_NAME," \
+ "$FILES_FOLDER/LocalSettings-tmp.php" > "$file_swap"
+ mv "$file_swap" "$FILES_FOLDER/LocalSettings-tmp.php"
+ sed "s,@WG_SERVER@,http://$SERVER_ADDR," \
+ "$FILES_FOLDER/LocalSettings-tmp.php" > "$file_swap"
+ mv "$file_swap" "$FILES_FOLDER/LocalSettings-tmp.php"
+ sed "s,@WG_SQLITE_DATADIR@,$TMP," \
+ "$FILES_FOLDER/LocalSettings-tmp.php" > "$file_swap"
+ mv "$file_swap" "$FILES_FOLDER/LocalSettings-tmp.php"
+ sed "s,@WG_SQLITE_DATAFILE@,$( basename $DB_FILE .sqlite)," \
+ "$FILES_FOLDER/LocalSettings-tmp.php" > "$file_swap"
+ mv "$file_swap" "$FILES_FOLDER/LocalSettings-tmp.php"
+
+ mv "$FILES_FOLDER/LocalSettings-tmp.php" \
+ "$WIKI_DIR_INST/$WIKI_DIR_NAME/LocalSettings.php" ||
+ error "Unable to move $FILES_FOLDER/LocalSettings-tmp.php" \
+ "in $WIKI_DIR_INST/$WIKI_DIR_NAME"
+ echo "File $FILES_FOLDER/LocalSettings.php is set in" \
+ " $WIKI_DIR_INST/$WIKI_DIR_NAME"
+
+ echo "Your wiki has been installed. You can check it at
+ http://$SERVER_ADDR/$WIKI_DIR_NAME"
+}
+
+# Reset the database of the wiki and the password of the admin
+#
+# Warning: This function must be called only in a subdirectory of t/ directory
+wiki_reset () {
+ # Copy initial database of the wiki
+ if [ ! -f "../$FILES_FOLDER/$DB_FILE" ] ; then
+ error "Can't find ../$FILES_FOLDER/$DB_FILE in the current folder."
+ fi
+ cp "../$FILES_FOLDER/$DB_FILE" "$TMP" ||
+ error "Can't copy ../$FILES_FOLDER/$DB_FILE in $TMP"
+ echo "File $FILES_FOLDER/$DB_FILE is set in $TMP"
+}
+
+# Delete the wiki created in the web server's directory and all its content
+# saved in the database.
+wiki_delete () {
+ if test $LIGHTTPD = "true"; then
+ stop_lighttpd
+ else
+ # Delete the wiki's directory.
+ rm -rf "$WIKI_DIR_INST/$WIKI_DIR_NAME" ||
+ error "Wiki's directory $WIKI_DIR_INST/" \
+ "$WIKI_DIR_NAME could not be deleted"
+ # Delete the wiki's SQLite database.
+ rm -f "$TMP/$DB_FILE" ||
+ error "Database $TMP/$DB_FILE could not be deleted."
+ fi
+
+ # Delete the wiki's SQLite database
+ rm -f "$TMP/$DB_FILE" || error "Database $TMP/$DB_FILE could not be deleted."
+ rm -f "$FILES_FOLDER/$DB_FILE"
+ rm -rf "$TMP/$MW_VERSION"
+}
--- /dev/null
+#!/usr/bin/perl -w -s
+# Copyright (C) 2012
+# Charles Roussel <charles.roussel@ensimag.imag.fr>
+# Simon Cathebras <simon.cathebras@ensimag.imag.fr>
+# Julien Khayat <julien.khayat@ensimag.imag.fr>
+# Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
+# Simon Perrat <simon.perrat@ensimag.imag.fr>
+# License: GPL v2 or later
+
+# Usage:
+# ./test-gitmw.pl <command> [argument]*
+# Execute in terminal using the name of the function to call as first
+# parameter, and the function's arguments as following parameters
+#
+# Example:
+# ./test-gitmw.pl "get_page" foo .
+# will call <wiki_getpage> with arguments <foo> and <.>
+#
+# Available functions are:
+# "get_page"
+# "delete_page"
+# "edit_page"
+# "getallpagename"
+
+use MediaWiki::API;
+use Getopt::Long;
+use encoding 'utf8';
+use DateTime::Format::ISO8601;
+use open ':encoding(utf8)';
+use constant SLASH_REPLACEMENT => "%2F";
+
+#Parsing of the config file
+
+my $configfile = "$ENV{'CURR_DIR'}/test.config";
+my %config;
+open my $CONFIG, "<", $configfile or die "can't open $configfile: $!";
+while (<$CONFIG>)
+{
+ chomp;
+ s/#.*//;
+ s/^\s+//;
+ s/\s+$//;
+ next unless length;
+ my ($key, $value) = split (/\s*=\s*/,$_, 2);
+ $config{$key} = $value;
+ last if ($key eq 'LIGHTTPD' and $value eq 'false');
+ last if ($key eq 'PORT');
+}
+close $CONFIG or die "can't close $configfile: $!";
+
+my $wiki_address = "http://$config{'SERVER_ADDR'}".":"."$config{'PORT'}";
+my $wiki_url = "$wiki_address/$config{'WIKI_DIR_NAME'}/api.php";
+my $wiki_admin = "$config{'WIKI_ADMIN'}";
+my $wiki_admin_pass = "$config{'WIKI_PASSW'}";
+my $mw = MediaWiki::API->new;
+$mw->{config}->{api_url} = $wiki_url;
+
+
+# wiki_login <name> <password>
+#
+# Logs the user with <name> and <password> in the global variable
+# of the mediawiki $mw
+sub wiki_login {
+ $mw->login( { lgname => "$_[0]",lgpassword => "$_[1]" } )
+ || die "getpage: login failed";
+}
+
+# wiki_getpage <wiki_page> <dest_path>
+#
+# fetch a page <wiki_page> from the wiki referenced in the global variable
+# $mw and copies its content in directory dest_path
+sub wiki_getpage {
+ my $pagename = $_[0];
+ my $destdir = $_[1];
+
+ my $page = $mw->get_page( { title => $pagename } );
+ if (!defined($page)) {
+ die "getpage: wiki does not exist";
+ }
+
+ my $content = $page->{'*'};
+ if (!defined($content)) {
+ die "getpage: page does not exist";
+ }
+
+ $pagename=$page->{'title'};
+ # Replace spaces by underscore in the page name
+ $pagename =~ s/ /_/g;
+ $pagename =~ s/\//%2F/g;
+ open(my $file, ">$destdir/$pagename.mw");
+ print $file "$content";
+ close ($file);
+
+}
+
+# wiki_delete_page <page_name>
+#
+# delete the page with name <page_name> from the wiki referenced
+# in the global variable $mw
+sub wiki_delete_page {
+ my $pagename = $_[0];
+
+ my $exist=$mw->get_page({title => $pagename});
+
+ if (defined($exist->{'*'})){
+ $mw->edit({ action => 'delete',
+ title => $pagename})
+ || die $mw->{error}->{code} . ": " . $mw->{error}->{details};
+ } else {
+ die "no page with such name found: $pagename\n";
+ }
+}
+
+# wiki_editpage <wiki_page> <wiki_content> <wiki_append> [-c=<category>] [-s=<summary>]
+#
+# Edit a page named <wiki_page> with content <wiki_content> on the wiki
+# referenced with the global variable $mw
+# If <wiki_append> == true : append <wiki_content> at the end of the actual
+# content of the page <wiki_page>
+# If <wik_page> doesn't exist, that page is created with the <wiki_content>
+sub wiki_editpage {
+ my $wiki_page = $_[0];
+ my $wiki_content = $_[1];
+ my $wiki_append = $_[2];
+ my $summary = "";
+ my ($summ, $cat) = ();
+ GetOptions('s=s' => \$summ, 'c=s' => \$cat);
+
+ my $append = 0;
+ if (defined($wiki_append) && $wiki_append eq 'true') {
+ $append=1;
+ }
+
+ my $previous_text ="";
+
+ if ($append) {
+ my $ref = $mw->get_page( { title => $wiki_page } );
+ $previous_text = $ref->{'*'};
+ }
+
+ my $text = $wiki_content;
+ if (defined($previous_text)) {
+ $text="$previous_text$text";
+ }
+
+ # Eventually, add this page to a category.
+ if (defined($cat)) {
+ my $category_name="[[Category:$cat]]";
+ $text="$text\n $category_name";
+ }
+ if(defined($summ)){
+ $summary=$summ;
+ }
+
+ $mw->edit( { action => 'edit', title => $wiki_page, summary => $summary, text => "$text"} );
+}
+
+# wiki_getallpagename [<category>]
+#
+# Fetch all pages of the wiki referenced by the global variable $mw
+# and print the names of each one in the file all.txt with a new line
+# ("\n") between these.
+# If the argument <category> is defined, then this function get only the pages
+# belonging to <category>.
+sub wiki_getallpagename {
+ # fetch the pages of the wiki
+ if (defined($_[0])) {
+ my $mw_pages = $mw->list ( { action => 'query',
+ list => 'categorymembers',
+ cmtitle => "Category:$_[0]",
+ cmnamespace => 0,
+ cmlimit => 500 },
+ )
+ || die $mw->{error}->{code}.": ".$mw->{error}->{details};
+ open(my $file, ">all.txt");
+ foreach my $page (@{$mw_pages}) {
+ print $file "$page->{title}\n";
+ }
+ close ($file);
+
+ } else {
+ my $mw_pages = $mw->list({
+ action => 'query',
+ list => 'allpages',
+ aplimit => 500,
+ })
+ || die $mw->{error}->{code}.": ".$mw->{error}->{details};
+ open(my $file, ">all.txt");
+ foreach my $page (@{$mw_pages}) {
+ print $file "$page->{title}\n";
+ }
+ close ($file);
+ }
+}
+
+sub wiki_upload_file {
+ my $file_name = $_[0];
+ my $resultat = $mw->edit ( {
+ action => 'upload',
+ filename => $file_name,
+ comment => 'upload a file',
+ file => [ $file_name ],
+ ignorewarnings=>1,
+ }, {
+ skip_encoding => 1
+ } ) || die $mw->{error}->{code} . ' : ' . $mw->{error}->{details};
+}
+
+
+
+# Main part of this script: parse the command line arguments
+# and select which function to execute
+my $fct_to_call = shift;
+
+wiki_login($wiki_admin, $wiki_admin_pass);
+
+my %functions_to_call = qw(
+ upload_file wiki_upload_file
+ get_page wiki_getpage
+ delete_page wiki_delete_page
+ edit_page wiki_editpage
+ getallpagename wiki_getallpagename
+);
+die "$0 ERROR: wrong argument" unless exists $functions_to_call{$fct_to_call};
+&{$functions_to_call{$fct_to_call}}(@ARGV);
--- /dev/null
+# Name of the web server's directory dedicated to the wiki is WIKI_DIR_NAME
+WIKI_DIR_NAME=wiki
+
+# Login and password of the wiki's admin
+WIKI_ADMIN=WikiAdmin
+WIKI_PASSW=AdminPass
+
+# Address of the web server
+SERVER_ADDR=localhost
+
+# SQLite database of the wiki, named DB_FILE, is located in TMP
+TMP=/tmp
+DB_FILE=wikidb.sqlite
+
+# If LIGHTTPD is not set to true, the script will use the defaut
+# web server running in WIKI_DIR_INST.
+WIKI_DIR_INST=/var/www
+
+# If LIGHTTPD is set to true, the script will use Lighttpd to run
+# the wiki.
+LIGHTTPD=true
+
+# The variables below are useful only if LIGHTTPD is set to true.
+PORT=1234
+PHP_DIR=/usr/bin
+LIGHTTPD_DIR=/usr/sbin
+WEB=WEB
+WEB_TMP=$WEB/tmp
+WEB_WWW=$WEB/www
+
+# The variables below are used by the script to install a wiki.
+# You should not modify these unless you are modifying the script itself.
+MW_VERSION=mediawiki-1.19.0
+FILES_FOLDER=install-wiki
+DB_INSTALL_SCRIPT=db_install.php
unsigned long stamp;
int ofs;
- if (*date < '0' || '9' <= *date)
+ if (*date < '0' || '9' < *date)
return -1;
stamp = strtoul(date, &end, 10);
if (*end != ' ' || stamp == ULONG_MAX || (end[1] != '+' && end[1] != '-'))
return 0;
}
+/*
+ * This should be "(standard input)" or something, but it will
+ * probably expose many more breakages in the way no-index code
+ * is bolted onto the diff callchain.
+ */
+static const char file_from_standard_input[] = "-";
+
static int get_mode(const char *path, int *mode)
{
struct stat st;
else if (!strcasecmp(path, "nul"))
*mode = 0;
#endif
- else if (!strcmp(path, "-"))
+ else if (path == file_from_standard_input)
*mode = create_ce_mode(0666);
else if (lstat(path, &st))
return error("Could not access '%s'", path);
return 0;
}
+static int populate_from_stdin(struct diff_filespec *s)
+{
+ struct strbuf buf = STRBUF_INIT;
+ size_t size = 0;
+
+ if (strbuf_read(&buf, 0, 0) < 0)
+ return error("error while reading from stdin %s",
+ strerror(errno));
+
+ s->should_munmap = 0;
+ s->data = strbuf_detach(&buf, &size);
+ s->size = size;
+ s->should_free = 1;
+ s->is_stdin = 1;
+ return 0;
+}
+
+static struct diff_filespec *noindex_filespec(const char *name, int mode)
+{
+ struct diff_filespec *s;
+
+ if (!name)
+ name = "/dev/null";
+ s = alloc_filespec(name);
+ fill_filespec(s, null_sha1, mode);
+ if (name == file_from_standard_input)
+ populate_from_stdin(s);
+ return s;
+}
+
static int queue_diff(struct diff_options *o,
const char *name1, const char *name2)
{
tmp_c = name1; name1 = name2; name2 = tmp_c;
}
- if (!name1)
- name1 = "/dev/null";
- if (!name2)
- name2 = "/dev/null";
- d1 = alloc_filespec(name1);
- d2 = alloc_filespec(name2);
- fill_filespec(d1, null_sha1, mode1);
- fill_filespec(d2, null_sha1, mode2);
-
+ d1 = noindex_filespec(name1, mode1);
+ d2 = noindex_filespec(name2, mode2);
diff_queue(&diff_queued_diff, d1, d2);
return 0;
}
int argc, const char **argv,
int nongit, const char *prefix)
{
- int i;
+ int i, prefixlen;
int no_index = 0;
unsigned options = 0;
+ const char *paths[2];
/* Were we asked to do --no-index explicitly? */
for (i = 1; i < argc; i++) {
}
}
- if (prefix) {
- int len = strlen(prefix);
- const char *paths[3];
- memset(paths, 0, sizeof(paths));
-
- for (i = 0; i < 2; i++) {
- const char *p = argv[argc - 2 + i];
+ prefixlen = prefix ? strlen(prefix) : 0;
+ for (i = 0; i < 2; i++) {
+ const char *p = argv[argc - 2 + i];
+ if (!strcmp(p, "-"))
/*
- * stdin should be spelled as '-'; if you have
- * path that is '-', spell it as ./-.
+ * stdin should be spelled as "-"; if you have
+ * path that is "-", spell it as "./-".
*/
- p = (strcmp(p, "-")
- ? xstrdup(prefix_filename(prefix, len, p))
- : p);
- paths[i] = p;
- }
- diff_tree_setup_paths(paths, &revs->diffopt);
+ p = file_from_standard_input;
+ else if (prefixlen)
+ p = xstrdup(prefix_filename(prefix, prefixlen, p));
+ paths[i] = p;
}
- else
- diff_tree_setup_paths(argv + argc - 2, &revs->diffopt);
revs->diffopt.skip_stat_unmatch = 1;
if (!revs->diffopt.output_format)
revs->diffopt.output_format = DIFF_FORMAT_PATCH;
setup_diff_pager(&revs->diffopt);
DIFF_OPT_SET(&revs->diffopt, EXIT_WITH_STATUS);
- if (queue_diff(&revs->diffopt, revs->diffopt.pathspec.raw[0],
- revs->diffopt.pathspec.raw[1]))
+ if (queue_diff(&revs->diffopt, paths[0], paths[1]))
exit(1);
diff_set_mnemonic_prefix(&revs->diffopt, "1/", "2/");
diffcore_std(&revs->diffopt);
return 0;
}
-static int populate_from_stdin(struct diff_filespec *s)
-{
- struct strbuf buf = STRBUF_INIT;
- size_t size = 0;
-
- if (strbuf_read(&buf, 0, 0) < 0)
- return error("error while reading from stdin %s",
- strerror(errno));
-
- s->should_munmap = 0;
- s->data = strbuf_detach(&buf, &size);
- s->size = size;
- s->should_free = 1;
- return 0;
-}
-
static int diff_populate_gitlink(struct diff_filespec *s, int size_only)
{
int len;
struct stat st;
int fd;
- if (!strcmp(s->path, "-"))
- return populate_from_stdin(s);
-
if (lstat(s->path, &st) < 0) {
if (errno == ENOENT) {
err_empty:
if (DIFF_FILE_VALID(one)) {
if (!one->sha1_valid) {
struct stat st;
- if (!strcmp(one->path, "-")) {
+ if (one->is_stdin) {
hashcpy(one->sha1, null_sha1);
return;
}
unsigned should_free : 1; /* data should be free()'ed */
unsigned should_munmap : 1; /* data should be munmap()'ed */
unsigned dirty_submodule : 2; /* For submodules: its work tree is dirty */
+ unsigned is_stdin : 1;
#define DIRTY_SUBMODULE_UNTRACKED 1
#define DIRTY_SUBMODULE_MODIFIED 2
unsigned has_more_entries : 1; /* only appear in combined diff */
int grafts_replace_parents = 1;
int core_apply_sparse_checkout;
int merge_log_config = -1;
+int precomposed_unicode = -1; /* see probe_utf8_pathname_composition() */
struct startup_info *startup_info;
unsigned long pack_size_limit_cfg;
split_patches () {
case "$patch_format" in
mbox)
- if test -n "$rebasing" || test t = "$keepcr"
+ if test t = "$keepcr"
then
keep_cr=--keep-cr
else
--abort)
abort=t ;;
--rebasing)
- rebasing=t threeway=t keep=t scissors=f no_inbody_headers=t ;;
+ rebasing=t threeway=t ;;
-d|--dotest)
die "$(gettext "-d option is no longer supported. Do not use.")"
;;
# by the user, or the user can tell us to do so by --resolved flag.
case "$resume" in
'')
- git mailinfo $keep $no_inbody_headers $scissors $utf8 "$dotest/msg" "$dotest/patch" \
- <"$dotest/$msgnum" >"$dotest/info" ||
- stop_here $this
-
- # skip pine's internal folder data
- sane_grep '^Author: Mail System Internal Data$' \
- <"$dotest"/info >/dev/null &&
- go_next && continue
-
- test -s "$dotest/patch" || {
- eval_gettextln "Patch is empty. Was it split wrong?
-If you would prefer to skip this patch, instead run \"\$cmdline --skip\".
-To restore the original branch and stop patching run \"\$cmdline --abort\"."
- stop_here $this
- }
- rm -f "$dotest/original-commit" "$dotest/author-script"
- if test -f "$dotest/rebasing" &&
+ if test -f "$dotest/rebasing"
+ then
commit=$(sed -e 's/^From \([0-9a-f]*\) .*/\1/' \
-e q "$dotest/$msgnum") &&
- test "$(git cat-file -t "$commit")" = commit
- then
+ test "$(git cat-file -t "$commit")" = commit ||
+ stop_here $this
git cat-file commit "$commit" |
sed -e '1,/^$/d' >"$dotest/msg-clean"
- echo "$commit" > "$dotest/original-commit"
- get_author_ident_from_commit "$commit" > "$dotest/author-script"
+ echo "$commit" >"$dotest/original-commit"
+ get_author_ident_from_commit "$commit" >"$dotest/author-script"
+ git diff-tree --root --binary "$commit" >"$dotest/patch"
else
+ git mailinfo $keep $no_inbody_headers $scissors $utf8 "$dotest/msg" "$dotest/patch" \
+ <"$dotest/$msgnum" >"$dotest/info" ||
+ stop_here $this
+
+ # skip pine's internal folder data
+ sane_grep '^Author: Mail System Internal Data$' \
+ <"$dotest"/info >/dev/null &&
+ go_next && continue
+
+ test -s "$dotest/patch" || {
+ eval_gettextln "Patch is empty. Was it split wrong?
+If you would prefer to skip this patch, instead run \"\$cmdline --skip\".
+To restore the original branch and stop patching run \"\$cmdline --abort\"."
+ stop_here $this
+ }
+ rm -f "$dotest/original-commit" "$dotest/author-script"
{
sed -n '/^Subject/ s/Subject: //p' "$dotest/info"
echo
if test $apply_status != 0
then
eval_gettextln 'Patch failed at $msgnum $FIRSTLINE'
+ if test "$(git config --bool advice.amworkdir)" != false
+ then
+ eval_gettextln "The copy of the patch that failed is found in:
+ $dotest/patch"
+ fi
stop_here_user_resolve $this
fi
#endif
#endif
+/* used on Mac OS X */
+#ifdef PRECOMPOSE_UNICODE
+#include "compat/precompose_utf8.h"
+#else
+#define precompose_str(in,i_nfd2nfc)
+#define precompose_argv(c,v)
+#define probe_utf8_pathname_composition(a,b)
+#endif
+
#ifndef NO_LIBGEN_H
#include <libgen.h>
#else
s/.*/GIT_'$uid'_EMAIL='\''&'\''; export GIT_'$uid'_EMAIL/p
g
- s/^'$lid' [^<]* <[^>]*> \(.*\)$/\1/
+ s/^'$lid' [^<]* <[^>]*> \(.*\)$/@\1/
s/'\''/'\''\'\'\''/g
s/.*/GIT_'$uid'_DATE='\''&'\''; export GIT_'$uid'_DATE/p
real_cmd = p4_build_cmd(c)
return read_pipe_lines(real_cmd)
+def p4_has_command(cmd):
+ """Ask p4 for help on this command. If it returns an error, the
+ command does not exist in this version of p4."""
+ real_cmd = p4_build_cmd(["help", cmd])
+ p = subprocess.Popen(real_cmd, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ p.communicate()
+ return p.returncode == 0
+
def system(cmd):
expand = isinstance(cmd,basestring)
if verbose:
def p4_reopen(type, f):
p4_system(["reopen", "-t", type, wildcard_encode(f)])
+def p4_move(src, dest):
+ p4_system(["move", "-k", wildcard_encode(src), wildcard_encode(dest)])
+
#
# Canonicalize the p4 type and return a tuple of the
# base type, plus any modifiers. See "p4 help filetypes"
]
self.description = "Submit changes from git to the perforce depot."
self.usage += " [name of git branch to submit into perforce depot]"
- self.interactive = True
self.origin = ""
self.detectRenames = False
self.preserveUser = gitConfig("git-p4.preserveUser").lower() == "true"
self.isWindows = (platform.system() == "Windows")
self.exportLabels = False
+ self.p4HasMoveCommand = p4_has_command("move")
def check(self):
if len(p4CmdList("opened ...")) > 0:
die("You have files opened with perforce! Close them before starting the sync.")
- # replaces everything between 'Description:' and the next P4 submit template field with the
- # commit message
- def prepareLogMessage(self, template, message):
+ def separate_jobs_from_description(self, message):
+ """Extract and return a possible Jobs field in the commit
+ message. It goes into a separate section in the p4 change
+ specification.
+
+ A jobs line starts with "Jobs:" and looks like a new field
+ in a form. Values are white-space separated on the same
+ line or on following lines that start with a tab.
+
+ This does not parse and extract the full git commit message
+ like a p4 form. It just sees the Jobs: line as a marker
+ to pass everything from then on directly into the p4 form,
+ but outside the description section.
+
+ Return a tuple (stripped log message, jobs string)."""
+
+ m = re.search(r'^Jobs:', message, re.MULTILINE)
+ if m is None:
+ return (message, None)
+
+ jobtext = message[m.start():]
+ stripped_message = message[:m.start()].rstrip()
+ return (stripped_message, jobtext)
+
+ def prepareLogMessage(self, template, message, jobs):
+ """Edits the template returned from "p4 change -o" to insert
+ the message in the Description field, and the jobs text in
+ the Jobs field."""
result = ""
inDescriptionSection = False
if inDescriptionSection:
if line.startswith("Files:") or line.startswith("Jobs:"):
inDescriptionSection = False
+ # insert Jobs section
+ if jobs:
+ result += jobs + "\n"
else:
continue
else:
return 0
def prepareSubmitTemplate(self):
- # remove lines in the Files section that show changes to files outside the depot path we're committing into
+ """Run "p4 change -o" to grab a change specification template.
+ This does not use "p4 -G", as it is nice to keep the submission
+ template in original order, since a human might edit it.
+
+ Remove lines in the Files section that show changes to files
+ outside the depot path we're committing into."""
+
template = ""
inFilesSection = False
for line in p4_read_pipe_lines(['change', '-o']):
(p4User, gitEmail) = self.p4UserForCommit(id)
- if not self.detectRenames:
- # If not explicitly set check the config variable
- self.detectRenames = gitConfig("git-p4.detectRenames")
-
- if self.detectRenames.lower() == "false" or self.detectRenames == "":
- diffOpts = ""
- elif self.detectRenames.lower() == "true":
- diffOpts = "-M"
- else:
- diffOpts = "-M%s" % self.detectRenames
-
- detectCopies = gitConfig("git-p4.detectCopies")
- if detectCopies.lower() == "true":
- diffOpts += " -C"
- elif detectCopies != "" and detectCopies.lower() != "false":
- diffOpts += " -C%s" % detectCopies
-
- if gitConfig("git-p4.detectCopiesHarder", "--bool") == "true":
- diffOpts += " --find-copies-harder"
-
- diff = read_pipe_lines("git diff-tree -r %s \"%s^\" \"%s\"" % (diffOpts, id, id))
+ diff = read_pipe_lines("git diff-tree -r %s \"%s^\" \"%s\"" % (self.diffOpts, id, id))
filesToAdd = set()
filesToDelete = set()
editedFiles = set()
editedFiles.add(dest)
elif modifier == "R":
src, dest = diff['src'], diff['dst']
- p4_integrate(src, dest)
- if diff['src_sha1'] != diff['dst_sha1']:
- p4_edit(dest)
+ if self.p4HasMoveCommand:
+ p4_edit(src) # src must be open before move
+ p4_move(src, dest) # opens for (move/delete, move/add)
else:
- pureRenameCopy.add(dest)
+ p4_integrate(src, dest)
+ if diff['src_sha1'] != diff['dst_sha1']:
+ p4_edit(dest)
+ else:
+ pureRenameCopy.add(dest)
if isModeExecChanged(diff['src_mode'], diff['dst_mode']):
- p4_edit(dest)
+ if not self.p4HasMoveCommand:
+ p4_edit(dest) # with move: already open, writable
filesToChangeExecBit[dest] = diff['dst_mode']
- os.unlink(dest)
+ if not self.p4HasMoveCommand:
+ os.unlink(dest)
+ filesToDelete.add(src)
editedFiles.add(dest)
- filesToDelete.add(src)
else:
die("unknown modifier %s for %s" % (modifier, path))
logMessage = extractLogMessageFromGitCommit(id)
logMessage = logMessage.strip()
+ (logMessage, jobs) = self.separate_jobs_from_description(logMessage)
template = self.prepareSubmitTemplate()
+ submitTemplate = self.prepareLogMessage(template, logMessage, jobs)
- if self.interactive:
- submitTemplate = self.prepareLogMessage(template, logMessage)
-
- if self.preserveUser:
- submitTemplate = submitTemplate + ("\n######## Actual user %s, modified after commit\n" % p4User)
-
- if os.environ.has_key("P4DIFF"):
- del(os.environ["P4DIFF"])
- diff = ""
- for editedFile in editedFiles:
- diff += p4_read_pipe(['diff', '-du',
- wildcard_encode(editedFile)])
-
- newdiff = ""
- for newFile in filesToAdd:
- newdiff += "==== new file ====\n"
- newdiff += "--- /dev/null\n"
- newdiff += "+++ %s\n" % newFile
- f = open(newFile, "r")
- for line in f.readlines():
- newdiff += "+" + line
- f.close()
-
- if self.checkAuthorship and not self.p4UserIsMe(p4User):
- submitTemplate += "######## git author %s does not match your p4 account.\n" % gitEmail
- submitTemplate += "######## Use option --preserve-user to modify authorship.\n"
- submitTemplate += "######## Variable git-p4.skipUserNameCheck hides this message.\n"
-
- separatorLine = "######## everything below this line is just the diff #######\n"
-
- (handle, fileName) = tempfile.mkstemp()
- tmpFile = os.fdopen(handle, "w+")
- if self.isWindows:
- submitTemplate = submitTemplate.replace("\n", "\r\n")
- separatorLine = separatorLine.replace("\n", "\r\n")
- newdiff = newdiff.replace("\n", "\r\n")
- tmpFile.write(submitTemplate + separatorLine + diff + newdiff)
+ if self.preserveUser:
+ submitTemplate = submitTemplate + ("\n######## Actual user %s, modified after commit\n" % p4User)
+
+ if os.environ.has_key("P4DIFF"):
+ del(os.environ["P4DIFF"])
+ diff = ""
+ for editedFile in editedFiles:
+ diff += p4_read_pipe(['diff', '-du',
+ wildcard_encode(editedFile)])
+
+ newdiff = ""
+ for newFile in filesToAdd:
+ newdiff += "==== new file ====\n"
+ newdiff += "--- /dev/null\n"
+ newdiff += "+++ %s\n" % newFile
+ f = open(newFile, "r")
+ for line in f.readlines():
+ newdiff += "+" + line
+ f.close()
+
+ if self.checkAuthorship and not self.p4UserIsMe(p4User):
+ submitTemplate += "######## git author %s does not match your p4 account.\n" % gitEmail
+ submitTemplate += "######## Use option --preserve-user to modify authorship.\n"
+ submitTemplate += "######## Variable git-p4.skipUserNameCheck hides this message.\n"
+
+ separatorLine = "######## everything below this line is just the diff #######\n"
+
+ (handle, fileName) = tempfile.mkstemp()
+ tmpFile = os.fdopen(handle, "w+")
+ if self.isWindows:
+ submitTemplate = submitTemplate.replace("\n", "\r\n")
+ separatorLine = separatorLine.replace("\n", "\r\n")
+ newdiff = newdiff.replace("\n", "\r\n")
+ tmpFile.write(submitTemplate + separatorLine + diff + newdiff)
+ tmpFile.close()
+
+ if self.edit_template(fileName):
+ # read the edited message and submit
+ tmpFile = open(fileName, "rb")
+ message = tmpFile.read()
tmpFile.close()
+ submitTemplate = message[:message.index(separatorLine)]
+ if self.isWindows:
+ submitTemplate = submitTemplate.replace("\r\n", "\n")
+ p4_write_pipe(['submit', '-i'], submitTemplate)
- if self.edit_template(fileName):
- # read the edited message and submit
- tmpFile = open(fileName, "rb")
- message = tmpFile.read()
- tmpFile.close()
- submitTemplate = message[:message.index(separatorLine)]
- if self.isWindows:
- submitTemplate = submitTemplate.replace("\r\n", "\n")
- p4_write_pipe(['submit', '-i'], submitTemplate)
-
- if self.preserveUser:
- if p4User:
- # Get last changelist number. Cannot easily get it from
- # the submit command output as the output is
- # unmarshalled.
- changelist = self.lastP4Changelist()
- self.modifyChangelistUser(changelist, p4User)
-
- # The rename/copy happened by applying a patch that created a
- # new file. This leaves it writable, which confuses p4.
- for f in pureRenameCopy:
- p4_sync(f, "-f")
-
- else:
- # skip this patch
- print "Submission cancelled, undoing p4 changes."
- for f in editedFiles:
- p4_revert(f)
- for f in filesToAdd:
- p4_revert(f)
- os.remove(f)
+ if self.preserveUser:
+ if p4User:
+ # Get last changelist number. Cannot easily get it from
+ # the submit command output as the output is
+ # unmarshalled.
+ changelist = self.lastP4Changelist()
+ self.modifyChangelistUser(changelist, p4User)
+
+ # The rename/copy happened by applying a patch that created a
+ # new file. This leaves it writable, which confuses p4.
+ for f in pureRenameCopy:
+ p4_sync(f, "-f")
- os.remove(fileName)
else:
- fileName = "submit.txt"
- file = open(fileName, "w+")
- file.write(self.prepareLogMessage(template, logMessage))
- file.close()
- print ("Perforce submit template written as %s. "
- + "Please review/edit and then use p4 submit -i < %s to submit directly!"
- % (fileName, fileName))
+ # skip this patch
+ print "Submission cancelled, undoing p4 changes."
+ for f in editedFiles:
+ p4_revert(f)
+ for f in filesToAdd:
+ p4_revert(f)
+ os.remove(f)
+
+ os.remove(fileName)
# Export git tags as p4 labels. Create a p4 label and then tag
# with that.
if self.preserveUser:
self.checkValidP4Users(commits)
+ #
+ # Build up a set of options to be passed to diff when
+ # submitting each commit to p4.
+ #
+ if self.detectRenames:
+ # command-line -M arg
+ self.diffOpts = "-M"
+ else:
+ # If not explicitly set check the config variable
+ detectRenames = gitConfig("git-p4.detectRenames")
+
+ if detectRenames.lower() == "false" or detectRenames == "":
+ self.diffOpts = ""
+ elif detectRenames.lower() == "true":
+ self.diffOpts = "-M"
+ else:
+ self.diffOpts = "-M%s" % detectRenames
+
+ # no command-line arg for -C or --find-copies-harder, just
+ # config variables
+ detectCopies = gitConfig("git-p4.detectCopies")
+ if detectCopies.lower() == "false" or detectCopies == "":
+ pass
+ elif detectCopies.lower() == "true":
+ self.diffOpts += " -C"
+ else:
+ self.diffOpts += " -C%s" % detectCopies
+
+ if gitConfig("git-p4.detectCopiesHarder", "--bool") == "true":
+ self.diffOpts += " --find-copies-harder"
+
while len(commits) > 0:
commit = commits[0]
commits = commits[1:]
self.applyCommit(commit)
- if not self.interactive:
- break
if len(commits) == 0:
print "All changes applied!"
# Copyright (c) 2010 Junio C Hamano.
#
-. git-sh-setup
-
case "$action" in
continue)
git am --resolved --resolvemsg="$resolvemsg" &&
#
# The original idea comes from Eric W. Biederman, in
# http://article.gmane.org/gmane.comp.version-control.git/22407
-
-. git-sh-setup
-
+#
# The file containing rebase commands, comments, and empty lines.
# This file is created by "git rebase -i" then edited by the user. As
# the lines are processed, they are removed from the front of this
esac
}
+do_pick () {
+ if test "$(git rev-parse HEAD)" = "$squash_onto"
+ then
+ # Set the correct commit message and author info on the
+ # sentinel root before cherry-picking the original changes
+ # without committing (-n). Finally, update the sentinel again
+ # to include these changes. If the cherry-pick results in a
+ # conflict, this means our behaviour is similar to a standard
+ # failed cherry-pick during rebase, with a dirty index to
+ # resolve before manually running git commit --amend then git
+ # rebase --continue.
+ git commit --allow-empty --allow-empty-message --amend \
+ --no-post-rewrite -n -q -C $1 &&
+ pick_one -n $1 &&
+ git commit --allow-empty --allow-empty-message \
+ --amend --no-post-rewrite -n -q -C $1 ||
+ die_with_patch $1 "Could not apply $1... $2"
+ else
+ pick_one $1 ||
+ die_with_patch $1 "Could not apply $1... $2"
+ fi
+}
+
do_next () {
rm -f "$msg" "$author_script" "$amend" || exit
read -r command sha1 rest < "$todo"
comment_for_reflog pick
mark_action_done
- pick_one $sha1 ||
- die_with_patch $sha1 "Could not apply $sha1... $rest"
+ do_pick $sha1 "$rest"
record_in_rewritten $sha1
;;
reword|r)
comment_for_reflog reword
mark_action_done
- pick_one $sha1 ||
- die_with_patch $sha1 "Could not apply $sha1... $rest"
+ do_pick $sha1 "$rest"
git commit --amend --no-post-rewrite || {
warn "Could not amend commit after successfully picking $sha1... $rest"
warn "This is most likely due to an empty commit message, or the pre-commit hook"
comment_for_reflog edit
mark_action_done
- pick_one $sha1 ||
- die_with_patch $sha1 "Could not apply $sha1... $rest"
+ do_pick $sha1 "$rest"
warn "Stopped at $sha1... $rest"
exit_with_patch $sha1 0
;;
# Copyright (c) 2010 Junio C Hamano.
#
-. git-sh-setup
-
prec=4
read_state () {
OPTIONS_KEEPDASHDASH=
OPTIONS_SPEC="\
git rebase [-i] [options] [--exec <cmd>] [--onto <newbase>] [<upstream>] [<branch>]
-git rebase [-i] [options] [--exec <cmd>] --onto <newbase> --root [<branch>]
+git rebase [-i] [options] [--exec <cmd>] [--onto <newbase>] --root [<branch>]
git-rebase [-i] --continue | --abort | --skip
--
Available options are
valuable there.'
fi
+if test -n "$rebase_root" && test -z "$onto"
+then
+ test -z "$interactive_rebase" && interactive_rebase=implied
+fi
+
if test -n "$interactive_rebase"
then
type=interactive
die "invalid upstream $upstream_name"
upstream_arg="$upstream_name"
else
- test -z "$onto" && die "You must specify --onto when using --root"
+ if test -z "$onto"
+ then
+ empty_tree=`git hash-object -t tree /dev/null`
+ onto=`git commit-tree $empty_tree </dev/null`
+ squash_onto="$onto"
+ fi
unset upstream_name
unset upstream
+ test $# -gt 1 && usage
upstream_arg=--root
fi
die "fatal: no such branch: $1"
fi
;;
-*)
+0)
# Do not need to switch branches, we are already on it.
if branch_name=`git symbolic-ref -q HEAD`
then
fi
orig_head=$(git rev-parse --verify "${branch_name}^0") || exit
;;
+*)
+ die "BUG: unexpected number of arguments left to parse"
+ ;;
esac
require_clean_work_tree "rebase" "Please commit or stash them."
die "$(eval_gettext "Clone of '\$url' into submodule path '\$sm_path' failed")"
fi
- a=$(cd "$gitdir" && pwd)/
- b=$(cd "$sm_path" && pwd)/
+ # We already are at the root of the work tree but cd_to_toplevel will
+ # resolve any symlinks that might be present in $PWD
+ a=$(cd_to_toplevel && cd "$gitdir" && pwd)/
+ b=$(cd_to_toplevel && cd "$sm_path" && pwd)/
# normalize Windows-style absolute paths to POSIX-style absolute paths
case $a in [a-zA-Z]:/*) a=/${a%%:*}${a#*:} ;; esac
case $b in [a-zA-Z]:/*) b=/${b%%:*}${b#*:} ;; esac
usage_with_options(usagestr, options);
}
+ precompose_argv(argc, argv);
return parse_options_end(&ctx);
}
int cache_name_compare(const char *name1, int flags1, const char *name2, int flags2)
{
- int len1 = flags1 & CE_NAMEMASK;
- int len2 = flags2 & CE_NAMEMASK;
- int len = len1 < len2 ? len1 : len2;
- int cmp;
+ int len1, len2, len, cmp;
+
+ len1 = flags1 & CE_NAMEMASK;
+ if (CE_NAMEMASK <= len1)
+ len1 = strlen(name1 + CE_NAMEMASK) + CE_NAMEMASK;
+ len2 = flags2 & CE_NAMEMASK;
+ if (CE_NAMEMASK <= len2)
+ len2 = strlen(name2 + CE_NAMEMASK) + CE_NAMEMASK;
+ len = len1 < len2 ? len1 : len2;
cmp = memcmp(name1, name2, len);
if (cmp)
continue;
if (pathspec &&
- !match_pathspec(pathspec, ce->name, strlen(ce->name), 0, seen))
+ !match_pathspec(pathspec, ce->name, ce_namelen(ce), 0, seen))
filtered = 1;
if (ce_stage(ce)) {
if (!ce_stage(ce))
continue;
unmerged = 1;
- len = strlen(ce->name);
+ len = ce_namelen(ce);
size = cache_entry_size(len);
new_ce = xcalloc(1, size);
memcpy(new_ce->name, ce->name, len);
case 0:
if (!memcmp(dst_value, "refs/", 5))
matched_dst = make_linked_ref(dst_value, dst_tail);
+ else if (is_null_sha1(matched_src->new_sha1))
+ error("unable to delete '%s': remote ref does not exist",
+ dst_value);
else if ((dst_guess = guess_ref(dst_value, matched_src)))
matched_dst = make_linked_ref(dst_guess, dst_tail);
else
continue;
hex = xstrdup(sha1_to_hex(sha1));
string_list_insert(rr, path)->util = hex;
- if (mkdir(git_path("rr-cache/%s", hex), 0755))
+ if (mkdir_in_gitdir(git_path("rr-cache/%s", hex)))
continue;
handle_file(path, NULL, rerere_path(hex, "preimage"));
fprintf(stderr, "Recorded preimage for '%s'\n", path);
flags ^= UNINTERESTING;
arg++;
}
- if (get_sha1(arg, sha1))
+ if (get_sha1_committish(arg, sha1))
return 0;
while (1) {
it = get_reference(revs, arg, sha1, 0);
revs->limited = 1;
}
-int handle_revision_arg(const char *arg_, struct rev_info *revs,
- int flags,
- int cant_be_filename)
+int handle_revision_arg(const char *arg_, struct rev_info *revs, int flags, unsigned revarg_opt)
{
- unsigned mode;
+ struct object_context oc;
char *dotdot;
struct object *object;
unsigned char sha1[20];
int local_flags;
const char *arg = arg_;
+ int cant_be_filename = revarg_opt & REVARG_CANNOT_BE_FILENAME;
+ unsigned get_sha1_flags = 0;
dotdot = strstr(arg, "..");
if (dotdot) {
next = "HEAD";
if (dotdot == arg)
this = "HEAD";
- if (!get_sha1(this, from_sha1) &&
- !get_sha1(next, sha1)) {
+ if (!get_sha1_committish(this, from_sha1) &&
+ !get_sha1_committish(next, sha1)) {
struct commit *a, *b;
struct commit_list *exclude;
local_flags = UNINTERESTING;
arg++;
}
- if (get_sha1_with_mode(arg, sha1, &mode))
+
+ if (revarg_opt & REVARG_COMMITTISH)
+ get_sha1_flags = GET_SHA1_COMMITTISH;
+
+ if (get_sha1_with_context(arg, get_sha1_flags, sha1, &oc))
return revs->ignore_missing ? 0 : -1;
if (!cant_be_filename)
verify_non_filename(revs->prefix, arg);
object = get_reference(revs, arg, sha1, flags ^ local_flags);
add_rev_cmdline(revs, object, arg_, REV_CMD_REV, flags ^ local_flags);
- add_pending_object_with_mode(revs, object, arg, mode);
+ add_pending_object_with_mode(revs, object, arg, oc.mode);
return 0;
}
}
die("options not supported in --stdin mode");
}
- if (handle_revision_arg(sb.buf, revs, 0, 1))
+ if (handle_revision_arg(sb.buf, revs, 0, REVARG_CANNOT_BE_FILENAME))
die("bad revision '%s'", sb.buf);
}
if (seen_dashdash)
*/
int setup_revisions(int argc, const char **argv, struct rev_info *revs, struct setup_revision_opt *opt)
{
- int i, flags, left, seen_dashdash, read_from_stdin, got_rev_arg = 0;
+ int i, flags, left, seen_dashdash, read_from_stdin, got_rev_arg = 0, revarg_opt;
struct cmdline_pathspec prune_data;
const char *submodule = NULL;
/* Second, deal with arguments and options */
flags = 0;
+ revarg_opt = opt ? opt->revarg_opt : 0;
+ if (seen_dashdash)
+ revarg_opt |= REVARG_CANNOT_BE_FILENAME;
read_from_stdin = 0;
for (left = i = 1; i < argc; i++) {
const char *arg = argv[i];
continue;
}
- if (handle_revision_arg(arg, revs, flags, seen_dashdash)) {
+
+ if (handle_revision_arg(arg, revs, flags, revarg_opt)) {
int j;
if (seen_dashdash || *arg == '^')
die("bad revision '%s'", arg);
if (revs->def && !revs->pending.nr && !got_rev_arg) {
unsigned char sha1[20];
struct object *object;
- unsigned mode;
- if (get_sha1_with_mode(revs->def, sha1, &mode))
+ struct object_context oc;
+ if (get_sha1_with_context(revs->def, 0, sha1, &oc))
die("bad default revision '%s'", revs->def);
object = get_reference(revs, revs->def, sha1, 0);
- add_pending_object_with_mode(revs, object, revs->def, mode);
+ add_pending_object_with_mode(revs, object, revs->def, oc.mode);
}
/* Did the user ask for any diff output? Run the diff! */
void (*tweak)(struct rev_info *, struct setup_revision_opt *);
const char *submodule;
int assume_dashdash;
+ unsigned revarg_opt;
};
extern void init_revisions(struct rev_info *revs, const char *prefix);
extern void parse_revision_opt(struct rev_info *revs, struct parse_opt_ctx_t *ctx,
const struct option *options,
const char * const usagestr[]);
-extern int handle_revision_arg(const char *arg, struct rev_info *revs,int flags,int cant_be_filename);
+#define REVARG_CANNOT_BE_FILENAME 01
+#define REVARG_COMMITTISH 02
+extern int handle_revision_arg(const char *arg, struct rev_info *revs, int flags, unsigned revarg_opt);
extern void reset_revision_walk(void);
extern int prepare_revision_walk(struct rev_info *revs);
const char *arg,
int diagnose_misspelt_rev)
{
- unsigned char sha1[20];
- unsigned mode;
-
if (!diagnose_misspelt_rev)
die("%s: no such path in the working tree.\n"
"Use '-- <path>...' to specify paths that do not exist locally.",
* Saying "'(icase)foo' does not exist in the index" when the
* user gave us ":(icase)foo" is just stupid. A magic pathspec
* begins with a colon and is followed by a non-alnum; do not
- * let get_sha1_with_mode_1(only_to_die=1) to even trigger.
+ * let maybe_die_on_misspelt_object_name() even trigger.
*/
if (!(arg[0] == ':' && !isalnum(arg[1])))
- /* try a detailed diagnostic ... */
- get_sha1_with_mode_1(arg, sha1, &mode, 1, prefix);
+ maybe_die_on_misspelt_object_name(arg, prefix);
/* ... or fall back the most general message. */
die("ambiguous argument '%s': unknown revision or path not in the working tree.\n"
static int get_sha1_oneline(const char *, unsigned char *, struct commit_list *);
-static int find_short_object_filename(int len, const char *name, unsigned char *sha1)
+typedef int (*disambiguate_hint_fn)(const unsigned char *, void *);
+
+struct disambiguate_state {
+ disambiguate_hint_fn fn;
+ void *cb_data;
+ unsigned char candidate[20];
+ unsigned candidate_exists:1;
+ unsigned candidate_checked:1;
+ unsigned candidate_ok:1;
+ unsigned disambiguate_fn_used:1;
+ unsigned ambiguous:1;
+ unsigned always_call_fn:1;
+};
+
+static void update_candidates(struct disambiguate_state *ds, const unsigned char *current)
+{
+ if (ds->always_call_fn) {
+ ds->ambiguous = ds->fn(current, ds->cb_data) ? 1 : 0;
+ return;
+ }
+ if (!ds->candidate_exists) {
+ /* this is the first candidate */
+ hashcpy(ds->candidate, current);
+ ds->candidate_exists = 1;
+ return;
+ } else if (!hashcmp(ds->candidate, current)) {
+ /* the same as what we already have seen */
+ return;
+ }
+
+ if (!ds->fn) {
+ /* cannot disambiguate between ds->candidate and current */
+ ds->ambiguous = 1;
+ return;
+ }
+
+ if (!ds->candidate_checked) {
+ ds->candidate_ok = ds->fn(ds->candidate, ds->cb_data);
+ ds->disambiguate_fn_used = 1;
+ ds->candidate_checked = 1;
+ }
+
+ if (!ds->candidate_ok) {
+ /* discard the candidate; we know it does not satisify fn */
+ hashcpy(ds->candidate, current);
+ ds->candidate_checked = 0;
+ return;
+ }
+
+ /* if we reach this point, we know ds->candidate satisfies fn */
+ if (ds->fn(current, ds->cb_data)) {
+ /*
+ * if both current and candidate satisfy fn, we cannot
+ * disambiguate.
+ */
+ ds->candidate_ok = 0;
+ ds->ambiguous = 1;
+ }
+
+ /* otherwise, current can be discarded and candidate is still good */
+}
+
+static void find_short_object_filename(int len, const char *hex_pfx, struct disambiguate_state *ds)
{
struct alternate_object_database *alt;
char hex[40];
- int found = 0;
static struct alternate_object_database *fakeent;
if (!fakeent) {
+ /*
+ * Create a "fake" alternate object database that
+ * points to our own object database, to make it
+ * easier to get a temporary working space in
+ * alt->name/alt->base while iterating over the
+ * object databases including our own.
+ */
const char *objdir = get_object_directory();
int objdir_len = strlen(objdir);
int entlen = objdir_len + 43;
}
fakeent->next = alt_odb_list;
- sprintf(hex, "%.2s", name);
- for (alt = fakeent; alt && found < 2; alt = alt->next) {
+ sprintf(hex, "%.2s", hex_pfx);
+ for (alt = fakeent; alt && !ds->ambiguous; alt = alt->next) {
struct dirent *de;
DIR *dir;
- sprintf(alt->name, "%.2s/", name);
+ sprintf(alt->name, "%.2s/", hex_pfx);
dir = opendir(alt->base);
if (!dir)
continue;
- while ((de = readdir(dir)) != NULL) {
+
+ while (!ds->ambiguous && (de = readdir(dir)) != NULL) {
+ unsigned char sha1[20];
+
if (strlen(de->d_name) != 38)
continue;
- if (memcmp(de->d_name, name + 2, len - 2))
+ if (memcmp(de->d_name, hex_pfx + 2, len - 2))
continue;
- if (!found) {
- memcpy(hex + 2, de->d_name, 38);
- found++;
- }
- else if (memcmp(hex + 2, de->d_name, 38)) {
- found = 2;
- break;
- }
+ memcpy(hex + 2, de->d_name, 38);
+ if (!get_sha1_hex(hex, sha1))
+ update_candidates(ds, sha1);
}
closedir(dir);
}
- if (found == 1)
- return get_sha1_hex(hex, sha1) == 0;
- return found;
}
static int match_sha(unsigned len, const unsigned char *a, const unsigned char *b)
return 1;
}
-static int find_short_packed_object(int len, const unsigned char *match, unsigned char *sha1)
+static void unique_in_pack(int len,
+ const unsigned char *bin_pfx,
+ struct packed_git *p,
+ struct disambiguate_state *ds)
{
- struct packed_git *p;
- const unsigned char *found_sha1 = NULL;
- int found = 0;
-
- prepare_packed_git();
- for (p = packed_git; p && found < 2; p = p->next) {
- uint32_t num, last;
- uint32_t first = 0;
- open_pack_index(p);
- num = p->num_objects;
- last = num;
- while (first < last) {
- uint32_t mid = (first + last) / 2;
- const unsigned char *now;
- int cmp;
-
- now = nth_packed_object_sha1(p, mid);
- cmp = hashcmp(match, now);
- if (!cmp) {
- first = mid;
- break;
- }
- if (cmp > 0) {
- first = mid+1;
- continue;
- }
- last = mid;
+ uint32_t num, last, i, first = 0;
+ const unsigned char *current = NULL;
+
+ open_pack_index(p);
+ num = p->num_objects;
+ last = num;
+ while (first < last) {
+ uint32_t mid = (first + last) / 2;
+ const unsigned char *current;
+ int cmp;
+
+ current = nth_packed_object_sha1(p, mid);
+ cmp = hashcmp(bin_pfx, current);
+ if (!cmp) {
+ first = mid;
+ break;
}
- if (first < num) {
- const unsigned char *now, *next;
- now = nth_packed_object_sha1(p, first);
- if (match_sha(len, match, now)) {
- next = nth_packed_object_sha1(p, first+1);
- if (!next|| !match_sha(len, match, next)) {
- /* unique within this pack */
- if (!found) {
- found_sha1 = now;
- found++;
- }
- else if (hashcmp(found_sha1, now)) {
- found = 2;
- break;
- }
- }
- else {
- /* not even unique within this pack */
- found = 2;
- break;
- }
- }
+ if (cmp > 0) {
+ first = mid+1;
+ continue;
}
+ last = mid;
+ }
+
+ /*
+ * At this point, "first" is the location of the lowest object
+ * with an object name that could match "bin_pfx". See if we have
+ * 0, 1 or more objects that actually match(es).
+ */
+ for (i = first; i < num && !ds->ambiguous; i++) {
+ current = nth_packed_object_sha1(p, i);
+ if (!match_sha(len, bin_pfx, current))
+ break;
+ update_candidates(ds, current);
}
- if (found == 1)
- hashcpy(sha1, found_sha1);
- return found;
+}
+
+static void find_short_packed_object(int len, const unsigned char *bin_pfx,
+ struct disambiguate_state *ds)
+{
+ struct packed_git *p;
+
+ prepare_packed_git();
+ for (p = packed_git; p && !ds->ambiguous; p = p->next)
+ unique_in_pack(len, bin_pfx, p, ds);
}
#define SHORT_NAME_NOT_FOUND (-1)
#define SHORT_NAME_AMBIGUOUS (-2)
-static int find_unique_short_object(int len, char *canonical,
- unsigned char *res, unsigned char *sha1)
+static int finish_object_disambiguation(struct disambiguate_state *ds,
+ unsigned char *sha1)
{
- int has_unpacked, has_packed;
- unsigned char unpacked_sha1[20], packed_sha1[20];
+ if (ds->ambiguous)
+ return SHORT_NAME_AMBIGUOUS;
- prepare_alt_odb();
- has_unpacked = find_short_object_filename(len, canonical, unpacked_sha1);
- has_packed = find_short_packed_object(len, res, packed_sha1);
- if (!has_unpacked && !has_packed)
+ if (!ds->candidate_exists)
return SHORT_NAME_NOT_FOUND;
- if (1 < has_unpacked || 1 < has_packed)
+
+ if (!ds->candidate_checked)
+ /*
+ * If this is the only candidate, there is no point
+ * calling the disambiguation hint callback.
+ *
+ * On the other hand, if the current candidate
+ * replaced an earlier candidate that did _not_ pass
+ * the disambiguation hint callback, then we do have
+ * more than one objects that match the short name
+ * given, so we should make sure this one matches;
+ * otherwise, if we discovered this one and the one
+ * that we previously discarded in the reverse order,
+ * we would end up showing different results in the
+ * same repository!
+ */
+ ds->candidate_ok = (!ds->disambiguate_fn_used ||
+ ds->fn(ds->candidate, ds->cb_data));
+
+ if (!ds->candidate_ok)
return SHORT_NAME_AMBIGUOUS;
- if (has_unpacked != has_packed) {
- hashcpy(sha1, (has_packed ? packed_sha1 : unpacked_sha1));
+
+ hashcpy(sha1, ds->candidate);
+ return 0;
+}
+
+static int disambiguate_commit_only(const unsigned char *sha1, void *cb_data_unused)
+{
+ int kind = sha1_object_info(sha1, NULL);
+ return kind == OBJ_COMMIT;
+}
+
+static int disambiguate_committish_only(const unsigned char *sha1, void *cb_data_unused)
+{
+ struct object *obj;
+ int kind;
+
+ kind = sha1_object_info(sha1, NULL);
+ if (kind == OBJ_COMMIT)
+ return 1;
+ if (kind != OBJ_TAG)
return 0;
- }
- /* Both have unique ones -- do they match? */
- if (hashcmp(packed_sha1, unpacked_sha1))
- return SHORT_NAME_AMBIGUOUS;
- hashcpy(sha1, packed_sha1);
+
+ /* We need to do this the hard way... */
+ obj = deref_tag(lookup_object(sha1), NULL, 0);
+ if (obj && obj->type == OBJ_COMMIT)
+ return 1;
return 0;
}
-static int get_short_sha1(const char *name, int len, unsigned char *sha1,
- int quietly)
+static int disambiguate_tree_only(const unsigned char *sha1, void *cb_data_unused)
{
- int i, status;
- char canonical[40];
- unsigned char res[20];
+ int kind = sha1_object_info(sha1, NULL);
+ return kind == OBJ_TREE;
+}
- if (len < MINIMUM_ABBREV || len > 40)
- return -1;
- hashclr(res);
- memset(canonical, 'x', 40);
+static int disambiguate_treeish_only(const unsigned char *sha1, void *cb_data_unused)
+{
+ struct object *obj;
+ int kind;
+
+ kind = sha1_object_info(sha1, NULL);
+ if (kind == OBJ_TREE || kind == OBJ_COMMIT)
+ return 1;
+ if (kind != OBJ_TAG)
+ return 0;
+
+ /* We need to do this the hard way... */
+ obj = deref_tag(lookup_object(sha1), NULL, 0);
+ if (obj && (obj->type == OBJ_TREE || obj->type == OBJ_COMMIT))
+ return 1;
+ return 0;
+}
+
+static int disambiguate_blob_only(const unsigned char *sha1, void *cb_data_unused)
+{
+ int kind = sha1_object_info(sha1, NULL);
+ return kind == OBJ_BLOB;
+}
+
+static int prepare_prefixes(const char *name, int len,
+ unsigned char *bin_pfx,
+ char *hex_pfx)
+{
+ int i;
+
+ hashclr(bin_pfx);
+ memset(hex_pfx, 'x', 40);
for (i = 0; i < len ;i++) {
unsigned char c = name[i];
unsigned char val;
}
else
return -1;
- canonical[i] = c;
+ hex_pfx[i] = c;
if (!(i & 1))
val <<= 4;
- res[i >> 1] |= val;
+ bin_pfx[i >> 1] |= val;
}
+ return 0;
+}
+
+static int get_short_sha1(const char *name, int len, unsigned char *sha1,
+ unsigned flags)
+{
+ int status;
+ char hex_pfx[40];
+ unsigned char bin_pfx[20];
+ struct disambiguate_state ds;
+ int quietly = !!(flags & GET_SHA1_QUIETLY);
+
+ if (len < MINIMUM_ABBREV || len > 40)
+ return -1;
+ if (prepare_prefixes(name, len, bin_pfx, hex_pfx) < 0)
+ return -1;
+
+ prepare_alt_odb();
+
+ memset(&ds, 0, sizeof(ds));
+ if (flags & GET_SHA1_COMMIT)
+ ds.fn = disambiguate_commit_only;
+ else if (flags & GET_SHA1_COMMITTISH)
+ ds.fn = disambiguate_committish_only;
+ else if (flags & GET_SHA1_TREE)
+ ds.fn = disambiguate_tree_only;
+ else if (flags & GET_SHA1_TREEISH)
+ ds.fn = disambiguate_treeish_only;
+ else if (flags & GET_SHA1_BLOB)
+ ds.fn = disambiguate_blob_only;
+
+ find_short_object_filename(len, hex_pfx, &ds);
+ find_short_packed_object(len, bin_pfx, &ds);
+ status = finish_object_disambiguation(&ds, sha1);
- status = find_unique_short_object(i, canonical, res, sha1);
if (!quietly && (status == SHORT_NAME_AMBIGUOUS))
- return error("short SHA1 %.*s is ambiguous.", len, canonical);
+ return error("short SHA1 %.*s is ambiguous.", len, hex_pfx);
return status;
}
+
+int for_each_abbrev(const char *prefix, each_abbrev_fn fn, void *cb_data)
+{
+ char hex_pfx[40];
+ unsigned char bin_pfx[20];
+ struct disambiguate_state ds;
+ int len = strlen(prefix);
+
+ if (len < MINIMUM_ABBREV || len > 40)
+ return -1;
+ if (prepare_prefixes(prefix, len, bin_pfx, hex_pfx) < 0)
+ return -1;
+
+ prepare_alt_odb();
+
+ memset(&ds, 0, sizeof(ds));
+ ds.always_call_fn = 1;
+ ds.cb_data = cb_data;
+ ds.fn = fn;
+
+ find_short_object_filename(len, hex_pfx, &ds);
+ find_short_packed_object(len, bin_pfx, &ds);
+ return ds.ambiguous;
+}
+
const char *find_unique_abbrev(const unsigned char *sha1, int len)
{
int status, exists;
return hex;
while (len < 40) {
unsigned char sha1_ret[20];
- status = get_short_sha1(hex, len, sha1_ret, 1);
+ status = get_short_sha1(hex, len, sha1_ret, GET_SHA1_QUIETLY);
if (exists
? !status
: status == SHORT_NAME_NOT_FOUND) {
return 0;
}
-static int get_sha1_1(const char *name, int len, unsigned char *sha1);
+static int get_sha1_1(const char *name, int len, unsigned char *sha1, unsigned lookup_flags);
static int get_sha1_basic(const char *str, int len, unsigned char *sha1)
{
ret = interpret_branch_name(str+at, &buf);
if (ret > 0) {
/* substitute this branch name and restart */
- return get_sha1_1(buf.buf, buf.len, sha1);
+ return get_sha1_1(buf.buf, buf.len, sha1, 0);
} else if (ret == 0) {
return -1;
}
unsigned char *result, int idx)
{
unsigned char sha1[20];
- int ret = get_sha1_1(name, len, sha1);
+ int ret = get_sha1_1(name, len, sha1, GET_SHA1_COMMITTISH);
struct commit *commit;
struct commit_list *p;
struct commit *commit;
int ret;
- ret = get_sha1_1(name, len, sha1);
+ ret = get_sha1_1(name, len, sha1, GET_SHA1_COMMITTISH);
if (ret)
return ret;
commit = lookup_commit_reference(sha1);
unsigned char outer[20];
const char *sp;
unsigned int expected_type = 0;
+ unsigned lookup_flags = 0;
struct object *o;
/*
else
return -1;
- if (get_sha1_1(name, sp - name - 2, outer))
+ if (expected_type == OBJ_COMMIT)
+ lookup_flags = GET_SHA1_COMMITTISH;
+
+ if (get_sha1_1(name, sp - name - 2, outer, lookup_flags))
return -1;
o = parse_object(outer);
static int get_describe_name(const char *name, int len, unsigned char *sha1)
{
const char *cp;
+ unsigned flags = GET_SHA1_QUIETLY | GET_SHA1_COMMIT;
for (cp = name + len - 1; name + 2 <= cp; cp--) {
char ch = *cp;
if (ch == 'g' && cp[-1] == '-') {
cp++;
len -= cp - name;
- return get_short_sha1(cp, len, sha1, 1);
+ return get_short_sha1(cp, len, sha1, flags);
}
}
}
return -1;
}
-static int get_sha1_1(const char *name, int len, unsigned char *sha1)
+static int get_sha1_1(const char *name, int len, unsigned char *sha1, unsigned lookup_flags)
{
int ret, has_suffix;
const char *cp;
if (!ret)
return 0;
- return get_short_sha1(name, len, sha1, 0);
+ return get_short_sha1(name, len, sha1, lookup_flags);
}
/*
struct strbuf sb;
strbuf_init(&sb, dots - name);
strbuf_add(&sb, name, dots - name);
- st = get_sha1(sb.buf, sha1_tmp);
+ st = get_sha1_committish(sb.buf, sha1_tmp);
strbuf_release(&sb);
}
if (st)
if (!one)
return -1;
- if (get_sha1(dots[3] ? (dots + 3) : "HEAD", sha1_tmp))
+ if (get_sha1_committish(dots[3] ? (dots + 3) : "HEAD", sha1_tmp))
return -1;
two = lookup_commit_reference_gently(sha1_tmp, 0);
if (!two)
int get_sha1(const char *name, unsigned char *sha1)
{
struct object_context unused;
- return get_sha1_with_context(name, sha1, &unused);
+ return get_sha1_with_context(name, 0, sha1, &unused);
+}
+
+/*
+ * Many callers know that the user meant to name a committish by
+ * syntactical positions where the object name appears. Calling this
+ * function allows the machinery to disambiguate shorter-than-unique
+ * abbreviated object names between committish and others.
+ *
+ * Note that this does NOT error out when the named object is not a
+ * committish. It is merely to give a hint to the disambiguation
+ * machinery.
+ */
+int get_sha1_committish(const char *name, unsigned char *sha1)
+{
+ struct object_context unused;
+ return get_sha1_with_context(name, GET_SHA1_COMMITTISH,
+ sha1, &unused);
+}
+
+int get_sha1_treeish(const char *name, unsigned char *sha1)
+{
+ struct object_context unused;
+ return get_sha1_with_context(name, GET_SHA1_TREEISH,
+ sha1, &unused);
+}
+
+int get_sha1_commit(const char *name, unsigned char *sha1)
+{
+ struct object_context unused;
+ return get_sha1_with_context(name, GET_SHA1_COMMIT,
+ sha1, &unused);
+}
+
+int get_sha1_tree(const char *name, unsigned char *sha1)
+{
+ struct object_context unused;
+ return get_sha1_with_context(name, GET_SHA1_TREE,
+ sha1, &unused);
+}
+
+int get_sha1_blob(const char *name, unsigned char *sha1)
+{
+ struct object_context unused;
+ return get_sha1_with_context(name, GET_SHA1_BLOB,
+ sha1, &unused);
}
/* Must be called only when object_name:filename doesn't exist. */
}
-int get_sha1_with_mode_1(const char *name, unsigned char *sha1, unsigned *mode,
- int only_to_die, const char *prefix)
-{
- struct object_context oc;
- int ret;
- ret = get_sha1_with_context_1(name, sha1, &oc, only_to_die, prefix);
- *mode = oc.mode;
- return ret;
-}
-
static char *resolve_relative_path(const char *rel)
{
if (prefixcmp(rel, "./") && prefixcmp(rel, "../"))
rel);
}
-int get_sha1_with_context_1(const char *name, unsigned char *sha1,
- struct object_context *oc,
- int only_to_die, const char *prefix)
+static int get_sha1_with_context_1(const char *name,
+ unsigned flags,
+ const char *prefix,
+ unsigned char *sha1,
+ struct object_context *oc)
{
int ret, bracket_depth;
int namelen = strlen(name);
const char *cp;
+ int only_to_die = flags & GET_SHA1_ONLY_TO_DIE;
memset(oc, 0, sizeof(*oc));
oc->mode = S_IFINVALID;
- ret = get_sha1_1(name, namelen, sha1);
+ ret = get_sha1_1(name, namelen, sha1, flags);
if (!ret)
return ret;
- /* sha1:path --> object name of path in ent sha1
+ /*
+ * sha1:path --> object name of path in ent sha1
* :path -> object name of absolute path in index
* :./path -> object name of path relative to cwd in index
* :[0-3]:path -> object name of path in index at stage
strncpy(object_name, name, cp-name);
object_name[cp-name] = '\0';
}
- if (!get_sha1_1(name, cp-name, tree_sha1)) {
+ if (!get_sha1_1(name, cp-name, tree_sha1, GET_SHA1_TREEISH)) {
const char *filename = cp+1;
char *new_filename = NULL;
}
return ret;
}
+
+/*
+ * Call this function when you know "name" given by the end user must
+ * name an object but it doesn't; the function _may_ die with a better
+ * diagnostic message than "no such object 'name'", e.g. "Path 'doc' does not
+ * exist in 'HEAD'" when given "HEAD:doc", or it may return in which case
+ * you have a chance to diagnose the error further.
+ */
+void maybe_die_on_misspelt_object_name(const char *name, const char *prefix)
+{
+ struct object_context oc;
+ unsigned char sha1[20];
+ get_sha1_with_context_1(name, GET_SHA1_ONLY_TO_DIE, prefix, sha1, &oc);
+}
+
+int get_sha1_with_context(const char *str, unsigned flags, unsigned char *sha1, struct object_context *orc)
+{
+ return get_sha1_with_context_1(str, flags, NULL, sha1, orc);
+}
rm -rf "$git" &&
mkdir "$git"
}
+
+marshal_dump() {
+ what=$1 &&
+ line=${2:-1} &&
+ cat >"$TRASH_DIRECTORY/marshal-dump.py" <<-EOF &&
+ import marshal
+ import sys
+ for i in range($line):
+ d = marshal.load(sys.stdin)
+ print d['$what']
+ EOF
+ "$PYTHON_PATH" "$TRASH_DIRECTORY/marshal-dump.py"
+}
--- /dev/null
+#!/bin/sh
+
+test_description='object name disambiguation
+
+Create blobs, trees, commits and a tag that all share the same
+prefix, and make sure "git rev-parse" can take advantage of
+type information to disambiguate short object names that are
+not necessarily unique.
+
+The final history used in the test has five commits, with the bottom
+one tagged as v1.0.0. They all have one regular file each.
+
+ +-------------------------------------------+
+ | |
+ | .-------b3wettvi---- ad2uee |
+ | / / |
+ | a2onsxbvj---czy8f73t--ioiley5o |
+ | |
+ +-------------------------------------------+
+
+'
+
+. ./test-lib.sh
+
+test_expect_success 'blob and tree' '
+ test_tick &&
+ (
+ for i in 0 1 2 3 4 5 6 7 8 9
+ do
+ echo $i
+ done
+ echo
+ echo b1rwzyc3
+ ) >a0blgqsjc &&
+
+ # create one blob 0000000000b36
+ git add a0blgqsjc &&
+
+ # create one tree 0000000000cdc
+ git write-tree
+'
+
+test_expect_success 'warn ambiguity when no candidate matches type hint' '
+ test_must_fail git rev-parse --verify 000000000^{commit} 2>actual &&
+ grep "short SHA1 000000000 is ambiguous" actual
+'
+
+test_expect_success 'disambiguate tree-ish' '
+ # feed tree-ish in an unambiguous way
+ git rev-parse --verify 0000000000cdc:a0blgqsjc &&
+
+ # ambiguous at the object name level, but there is only one
+ # such tree-ish (the other is a blob)
+ git rev-parse --verify 000000000:a0blgqsjc
+'
+
+test_expect_success 'disambiguate blob' '
+ sed -e "s/|$//" >patch <<-EOF &&
+ diff --git a/frotz b/frotz
+ index 000000000..ffffff 100644
+ --- a/frotz
+ +++ b/frotz
+ @@ -10,3 +10,4 @@
+ 9
+ |
+ b1rwzyc3
+ +irwry
+ EOF
+ (
+ GIT_INDEX_FILE=frotz &&
+ export GIT_INDEX_FILE &&
+ git apply --build-fake-ancestor frotz patch &&
+ git cat-file blob :frotz >actual
+ ) &&
+ test_cmp a0blgqsjc actual
+'
+
+test_expect_success 'disambiguate tree' '
+ commit=$(echo "d7xm" | git commit-tree 000000000) &&
+ test $(git rev-parse $commit^{tree}) = $(git rev-parse 0000000000cdc)
+'
+
+test_expect_success 'first commit' '
+ # create one commit 0000000000e4f
+ git commit -m a2onsxbvj
+'
+
+test_expect_success 'disambiguate commit-ish' '
+ # feed commit-ish in an unambiguous way
+ git rev-parse --verify 0000000000e4f^{commit} &&
+
+ # ambiguous at the object name level, but there is only one
+ # such commit (the others are tree and blob)
+ git rev-parse --verify 000000000^{commit} &&
+
+ # likewise
+ git rev-parse --verify 000000000^0
+'
+
+test_expect_success 'disambiguate commit' '
+ commit=$(echo "hoaxj" | git commit-tree 0000000000cdc -p 000000000) &&
+ test $(git rev-parse $commit^) = $(git rev-parse 0000000000e4f)
+'
+
+test_expect_success 'log name1..name2 takes only commit-ishes on both ends' '
+ git log 000000000..000000000 &&
+ git log ..000000000 &&
+ git log 000000000.. &&
+ git log 000000000...000000000 &&
+ git log ...000000000 &&
+ git log 000000000...
+'
+
+test_expect_success 'rev-parse name1..name2 takes only commit-ishes on both ends' '
+ git rev-parse 000000000..000000000 &&
+ git rev-parse ..000000000 &&
+ git rev-parse 000000000..
+'
+
+test_expect_success 'git log takes only commit-ish' '
+ git log 000000000
+'
+
+test_expect_success 'git reset takes only commit-ish' '
+ git reset 000000000
+'
+
+test_expect_success 'first tag' '
+ # create one tag 0000000000f8f
+ git tag -a -m j7cp83um v1.0.0
+'
+
+test_expect_failure 'two semi-ambiguous commit-ish' '
+ # Once the parser becomes ultra-smart, it could notice that
+ # 110282 before ^{commit} name many different objects, but
+ # that only two (HEAD and v1.0.0 tag) can be peeled to commit,
+ # and that peeling them down to commit yield the same commit
+ # without ambiguity.
+ git rev-parse --verify 110282^{commit} &&
+
+ # likewise
+ git log 000000000..000000000 &&
+ git log ..000000000 &&
+ git log 000000000.. &&
+ git log 000000000...000000000 &&
+ git log ...000000000 &&
+ git log 000000000...
+'
+
+test_expect_failure 'three semi-ambiguous tree-ish' '
+ # Likewise for tree-ish. HEAD, v1.0.0 and HEAD^{tree} share
+ # the prefix but peeling them to tree yields the same thing
+ git rev-parse --verify 000000000^{tree}
+'
+
+test_expect_success 'parse describe name' '
+ # feed an unambiguous describe name
+ git rev-parse --verify v1.0.0-0-g0000000000e4f &&
+
+ # ambiguous at the object name level, but there is only one
+ # such commit (others are blob, tree and tag)
+ git rev-parse --verify v1.0.0-0-g000000000
+'
+
+test_expect_success 'more history' '
+ # commit 0000000000043
+ git mv a0blgqsjc d12cr3h8t &&
+ echo h62xsjeu >>d12cr3h8t &&
+ git add d12cr3h8t &&
+
+ test_tick &&
+ git commit -m czy8f73t &&
+
+ # commit 00000000008ec
+ git mv d12cr3h8t j000jmpzn &&
+ echo j08bekfvt >>j000jmpzn &&
+ git add j000jmpzn &&
+
+ test_tick &&
+ git commit -m ioiley5o &&
+
+ # commit 0000000005b0
+ git checkout v1.0.0^0 &&
+ git mv a0blgqsjc f5518nwu &&
+
+ for i in h62xsjeu j08bekfvt kg7xflhm
+ do
+ echo $i
+ done >>f5518nwu &&
+ git add f5518nwu &&
+
+ test_tick &&
+ git commit -m b3wettvi &&
+ side=$(git rev-parse HEAD) &&
+
+ # commit 000000000066
+ git checkout master &&
+
+ # If you use recursive, merge will fail and you will need to
+ # clean up a0blgqsjc as well. If you use resolve, merge will
+ # succeed.
+ test_might_fail git merge --no-commit -s recursive $side &&
+ git rm -f f5518nwu j000jmpzn &&
+
+ test_might_fail git rm -f a0blgqsjc &&
+ (
+ git cat-file blob $side:f5518nwu
+ echo j3l0i9s6
+ ) >ab2gs879 &&
+ git add ab2gs879 &&
+
+ test_tick &&
+ git commit -m ad2uee
+
+'
+
+test_expect_failure 'parse describe name taking advantage of generation' '
+ # ambiguous at the object name level, but there is only one
+ # such commit at generation 0
+ git rev-parse --verify v1.0.0-0-g000000000 &&
+
+ # likewise for generation 2 and 4
+ git rev-parse --verify v1.0.0-2-g000000000 &&
+ git rev-parse --verify v1.0.0-4-g000000000
+'
+
+# Note: because rev-parse does not even try to disambiguate based on
+# the generation number, this test currently succeeds for a wrong
+# reason. When it learns to use the generation number, the previous
+# test should succeed, and also this test should fail because the
+# describe name used in the test with generation number can name two
+# commits. Make sure that such a future enhancement does not randomly
+# pick one.
+test_expect_success 'parse describe name not ignoring ambiguity' '
+ # ambiguous at the object name level, and there are two such
+ # commits at generation 1
+ test_must_fail git rev-parse --verify v1.0.0-1-g000000000
+'
+
+test_expect_success 'ambiguous commit-ish' '
+ # Now there are many commits that begin with the
+ # common prefix, none of these should pick one at
+ # random. They all should result in ambiguity errors.
+ test_must_fail git rev-parse --verify 110282^{commit} &&
+
+ # likewise
+ test_must_fail git log 000000000..000000000 &&
+ test_must_fail git log ..000000000 &&
+ test_must_fail git log 000000000.. &&
+ test_must_fail git log 000000000...000000000 &&
+ test_must_fail git log ...000000000 &&
+ test_must_fail git log 000000000...
+'
+
+test_expect_success 'rev-parse --disambiguate' '
+ # The test creates 16 objects that share the prefix and two
+ # commits created by commit-tree in earlier tests share a
+ # different prefix.
+ git rev-parse --disambiguate=000000000 >actual &&
+ test $(wc -l <actual) = 16 &&
+ test "$(sed -e "s/^\(.........\).*/\1/" actual | sort -u)" = 000000000
+'
+
+test_done
--- /dev/null
+#!/bin/sh
+
+test_description='overly long paths'
+. ./test-lib.sh
+
+test_expect_success setup '
+ p=filefilefilefilefilefilefilefile &&
+ p=$p$p$p$p$p$p$p$p$p$p$p$p$p$p$p$p &&
+ p=$p$p$p$p$p$p$p$p$p$p$p$p$p$p$p$p &&
+
+ path_a=${p}_a &&
+ path_z=${p}_z &&
+
+ blob_a=$(echo frotz | git hash-object -w --stdin) &&
+ blob_z=$(echo nitfol | git hash-object -w --stdin) &&
+
+ pat="100644 %s 0\t%s\n"
+'
+
+test_expect_success 'overly-long path by itself is not a problem' '
+ printf "$pat" "$blob_a" "$path_a" |
+ git update-index --add --index-info &&
+ echo "$path_a" >expect &&
+ git ls-files >actual &&
+ test_cmp expect actual
+'
+
+test_expect_success 'overly-long path does not replace another by mistake' '
+ printf "$pat" "$blob_a" "$path_a" "$blob_z" "$path_z" |
+ git update-index --add --index-info &&
+ (
+ echo "$path_a"
+ echo "$path_z"
+ ) >expect &&
+ git ls-files >actual &&
+ test_cmp expect actual
+'
+
+test_done
git checkout master
'
+test_expect_success 'rebase -i --root re-order and drop commits' '
+ git checkout E &&
+ FAKE_LINES="3 1 2 5" git rebase -i --root &&
+ test E = $(git cat-file commit HEAD | sed -ne \$p) &&
+ test B = $(git cat-file commit HEAD^ | sed -ne \$p) &&
+ test A = $(git cat-file commit HEAD^^ | sed -ne \$p) &&
+ test C = $(git cat-file commit HEAD^^^ | sed -ne \$p) &&
+ test 0 = $(git cat-file commit HEAD^^^ | grep -c ^parent\ )
+'
+
+test_expect_success 'rebase -i --root retain root commit author and message' '
+ git checkout A &&
+ echo B >file7 &&
+ git add file7 &&
+ GIT_AUTHOR_NAME="Twerp Snog" git commit -m "different author" &&
+ FAKE_LINES="2" git rebase -i --root &&
+ git cat-file commit HEAD | grep -q "^author Twerp Snog" &&
+ git cat-file commit HEAD | grep -q "^different author$"
+'
+
+test_expect_success 'rebase -i --root temporary sentinel commit' '
+ git checkout B &&
+ (
+ FAKE_LINES="2" &&
+ export FAKE_LINES &&
+ test_must_fail git rebase -i --root
+ ) &&
+ git cat-file commit HEAD | grep "^tree 4b825dc642cb" &&
+ git rebase --abort
+'
+
test_done
#!/bin/sh
-test_description='rebase should not insist on git message convention'
+test_description='rebase should handle arbitrary git message'
. ./test-lib.sh
to oneline summary format.
EOF
+cat >G <<\EOF
+commit log message containing a diff
+EOF
+
+
test_expect_success setup '
>file1 &&
git add file1 file2 &&
test_tick &&
git commit -m "Initial commit" &&
+ git branch diff-in-message
- git checkout -b side &&
+ git checkout -b multi-line-subject &&
cat F >file2 &&
git add file2 &&
test_tick &&
git cat-file commit HEAD | sed -e "1,/^\$/d" >F0 &&
+ git checkout diff-in-message &&
+ echo "commit log message containing a diff" >G &&
+ echo "" >>G
+ cat G >file2 &&
+ git add file2 &&
+ git diff --cached >>G &&
+ test_tick &&
+ git commit -F G &&
+
+ git cat-file commit HEAD | sed -e "1,/^\$/d" >G0 &&
+
git checkout master &&
echo One >file1 &&
git commit -m "Second commit"
'
-test_expect_success rebase '
+test_expect_success 'rebase commit with multi-line subject' '
- git rebase master side &&
+ git rebase master multi-line-subject &&
git cat-file commit HEAD | sed -e "1,/^\$/d" >F1 &&
test_cmp F0 F1 &&
test_cmp F F0
'
+test_expect_success 'rebase commit with diff in message' '
+ git rebase master diff-in-message &&
+ git cat-file commit HEAD | sed -e "1,/^$/d" >G1 &&
+ test_cmp G0 G1 &&
+ test_cmp G G0
+'
+
test_done
test_commit 4 B
'
-test_expect_success 'rebase --root expects --onto' '
- test_must_fail git rebase --root
+test_expect_success 'rebase --root fails with too many args' '
+ git checkout -B fail other &&
+ test_must_fail git rebase --onto master --root fail fail
'
test_expect_success 'setup pre-rebase hook' '
EOF
test_expect_success 'rebase --root --onto <newbase>' '
- git checkout -b work &&
+ git checkout -b work other &&
git rebase --root --onto master &&
git log --pretty=tformat:"%s" > rebased &&
test_cmp expect rebased
--- /dev/null
+#!/bin/sh
+#
+# Copyright (c) 2012 Torsten Bögershausen
+#
+
+test_description='utf-8 decomposed (nfd) converted to precomposed (nfc)'
+
+. ./test-lib.sh
+
+Adiarnfc=`printf '\303\204'`
+Adiarnfd=`printf 'A\314\210'`
+
+# check if the feature is compiled in
+mkdir junk &&
+>junk/"$Adiarnfc" &&
+case "$(cd junk && echo *)" in
+ "$Adiarnfd")
+ test_nfd=1
+ ;;
+ *) ;;
+esac
+rm -rf junk
+
+
+if test "$test_nfd"
+then
+ # create more utf-8 variables
+ Odiarnfc=`printf '\303\226'`
+ Odiarnfd=`printf 'O\314\210'`
+ AEligatu=`printf '\303\206'`
+ Invalidu=`printf '\303\377'`
+
+
+ #Create a string with 255 bytes (decomposed)
+ Alongd=$Adiarnfd$Adiarnfd$Adiarnfd$Adiarnfd$Adiarnfd$Adiarnfd$Adiarnfd #21 Byte
+ Alongd=$Alongd$Alongd$Alongd #63 Byte
+ Alongd=$Alongd$Alongd$Alongd$Alongd$Adiarnfd #255 Byte
+
+ #Create a string with 254 bytes (precomposed)
+ Alongc=$AEligatu$AEligatu$AEligatu$AEligatu$AEligatu #10 Byte
+ Alongc=$Alongc$Alongc$Alongc$Alongc$Alongc #50 Byte
+ Alongc=$Alongc$Alongc$Alongc$Alongc$Alongc #250 Byte
+ Alongc=$Alongc$AEligatu$AEligatu #254 Byte
+
+ test_expect_success "detect if nfd needed" '
+ precomposeunicode=`git config core.precomposeunicode` &&
+ test "$precomposeunicode" = false &&
+ git config core.precomposeunicode true
+ '
+ test_expect_success "setup" '
+ >x &&
+ git add x &&
+ git commit -m "1st commit" &&
+ git rm x &&
+ git commit -m "rm x"
+ '
+ test_expect_success "setup case mac" '
+ git checkout -b mac_os
+ '
+ # This will test nfd2nfc in readdir()
+ test_expect_success "add file Adiarnfc" '
+ echo f.Adiarnfc >f.$Adiarnfc &&
+ git add f.$Adiarnfc &&
+ git commit -m "add f.$Adiarnfc"
+ '
+ # This will test nfd2nfc in git stage()
+ test_expect_success "stage file d.Adiarnfd/f.Adiarnfd" '
+ mkdir d.$Adiarnfd &&
+ echo d.$Adiarnfd/f.$Adiarnfd >d.$Adiarnfd/f.$Adiarnfd &&
+ git stage d.$Adiarnfd/f.$Adiarnfd &&
+ git commit -m "add d.$Adiarnfd/f.$Adiarnfd"
+ '
+ test_expect_success "add link Adiarnfc" '
+ ln -s d.$Adiarnfd/f.$Adiarnfd l.$Adiarnfc &&
+ git add l.$Adiarnfc &&
+ git commit -m "add l.Adiarnfc"
+ '
+ # This will test git log
+ test_expect_success "git log f.Adiar" '
+ git log f.$Adiarnfc > f.Adiarnfc.log &&
+ git log f.$Adiarnfd > f.Adiarnfd.log &&
+ test -s f.Adiarnfc.log &&
+ test -s f.Adiarnfd.log &&
+ test_cmp f.Adiarnfc.log f.Adiarnfd.log &&
+ rm f.Adiarnfc.log f.Adiarnfd.log
+ '
+ # This will test git ls-files
+ test_expect_success "git lsfiles f.Adiar" '
+ git ls-files f.$Adiarnfc > f.Adiarnfc.log &&
+ git ls-files f.$Adiarnfd > f.Adiarnfd.log &&
+ test -s f.Adiarnfc.log &&
+ test -s f.Adiarnfd.log &&
+ test_cmp f.Adiarnfc.log f.Adiarnfd.log &&
+ rm f.Adiarnfc.log f.Adiarnfd.log
+ '
+ # This will test git mv
+ test_expect_success "git mv" '
+ git mv f.$Adiarnfd f.$Odiarnfc &&
+ git mv d.$Adiarnfd d.$Odiarnfc &&
+ git mv l.$Adiarnfd l.$Odiarnfc &&
+ git commit -m "mv Adiarnfd Odiarnfc"
+ '
+ # Files can be checked out as nfc
+ # And the link has been corrected from nfd to nfc
+ test_expect_success "git checkout nfc" '
+ rm f.$Odiarnfc &&
+ git checkout f.$Odiarnfc
+ '
+ # Make it possible to checkout files with their NFD names
+ test_expect_success "git checkout file nfd" '
+ rm -f f.* &&
+ git checkout f.$Odiarnfd
+ '
+ # Make it possible to checkout links with their NFD names
+ test_expect_success "git checkout link nfd" '
+ rm l.* &&
+ git checkout l.$Odiarnfd
+ '
+ test_expect_success "setup case mac2" '
+ git checkout master &&
+ git reset --hard &&
+ git checkout -b mac_os_2
+ '
+ # This will test nfd2nfc in git commit
+ test_expect_success "commit file d2.Adiarnfd/f.Adiarnfd" '
+ mkdir d2.$Adiarnfd &&
+ echo d2.$Adiarnfd/f.$Adiarnfd >d2.$Adiarnfd/f.$Adiarnfd &&
+ git add d2.$Adiarnfd/f.$Adiarnfd &&
+ git commit -m "add d2.$Adiarnfd/f.$Adiarnfd" -- d2.$Adiarnfd/f.$Adiarnfd
+ '
+ test_expect_success "setup for long decomposed filename" '
+ git checkout master &&
+ git reset --hard &&
+ git checkout -b mac_os_long_nfd_fn
+ '
+ test_expect_success "Add long decomposed filename" '
+ echo longd >$Alongd &&
+ git add * &&
+ git commit -m "Long filename"
+ '
+ test_expect_success "setup for long precomposed filename" '
+ git checkout master &&
+ git reset --hard &&
+ git checkout -b mac_os_long_nfc_fn
+ '
+ test_expect_success "Add long precomposed filename" '
+ echo longc >$Alongc &&
+ git add * &&
+ git commit -m "Long filename"
+ '
+ # Test if the global core.precomposeunicode stops autosensing
+ # Must be the last test case
+ test_expect_success "respect git config --global core.precomposeunicode" '
+ git config --global core.precomposeunicode true &&
+ rm -rf .git &&
+ git init &&
+ precomposeunicode=`git config core.precomposeunicode` &&
+ test "$precomposeunicode" = "true"
+ '
+else
+ say "Skipping nfc/nfd tests"
+fi
+
+test_done
- - d
EOF
-test_expect_success 'prepare repository' \
- 'echo AIT >a && echo BIT >b && echo CIT >c && echo DIT >d &&
- git update-index --add a b c d &&
- echo git >a &&
- cat "$TEST_DIRECTORY"/test-binary-1.png >b &&
- echo git >c &&
- cat b b >d'
+test_expect_success 'prepare repository' '
+ echo AIT >a && echo BIT >b && echo CIT >c && echo DIT >d &&
+ git update-index --add a b c d &&
+ echo git >a &&
+ cat "$TEST_DIRECTORY"/test-binary-1.png >b &&
+ echo git >c &&
+ cat b b >d
+'
cat > expected <<\EOF
a | 2 +-
d | Bin
4 files changed, 2 insertions(+), 2 deletions(-)
EOF
-test_expect_success '"apply --stat" output for binary file change' '
+test_expect_success 'apply --stat output for binary file change' '
git diff >diff &&
git apply --stat --summary <diff >current &&
test_i18ncmp expected current
'
test_expect_success 'diff --shortstat output for binary file change' '
- echo " 4 files changed, 2 insertions(+), 2 deletions(-)" >expected &&
+ tail -n 1 expected >expect &&
git diff --shortstat >current &&
- test_i18ncmp expected current
+ test_i18ncmp expect current
'
test_expect_success 'diff --shortstat output for binary file change only' '
# apply needs to be able to skip the binary material correctly
# in order to report the line number of a corrupt patch.
-test_expect_success 'apply detecting corrupt patch correctly' \
- 'git diff | sed -e 's/-CIT/xCIT/' >broken &&
- if git apply --stat --summary broken 2>detected
- then
- echo unhappy - should have detected an error
- (exit 1)
- else
- echo happy
- fi &&
- detected=`cat detected` &&
- detected=`expr "$detected" : "fatal.*at line \\([0-9]*\\)\$"` &&
- detected=`sed -ne "${detected}p" broken` &&
- test "$detected" = xCIT'
-
-test_expect_success 'apply detecting corrupt patch correctly' \
- 'git diff --binary | sed -e 's/-CIT/xCIT/' >broken &&
- if git apply --stat --summary broken 2>detected
- then
- echo unhappy - should have detected an error
- (exit 1)
- else
- echo happy
- fi &&
- detected=`cat detected` &&
- detected=`expr "$detected" : "fatal.*at line \\([0-9]*\\)\$"` &&
- detected=`sed -ne "${detected}p" broken` &&
- test "$detected" = xCIT'
+test_expect_success 'apply detecting corrupt patch correctly' '
+ git diff >output &&
+ sed -e "s/-CIT/xCIT/" <output >broken &&
+ test_must_fail git apply --stat --summary broken 2>detected &&
+ detected=`cat detected` &&
+ detected=`expr "$detected" : "fatal.*at line \\([0-9]*\\)\$"` &&
+ detected=`sed -ne "${detected}p" broken` &&
+ test "$detected" = xCIT
+'
+
+test_expect_success 'apply detecting corrupt patch correctly' '
+ git diff --binary | sed -e "s/-CIT/xCIT/" >broken &&
+ test_must_fail git apply --stat --summary broken 2>detected &&
+ detected=`cat detected` &&
+ detected=`expr "$detected" : "fatal.*at line \\([0-9]*\\)\$"` &&
+ detected=`sed -ne "${detected}p" broken` &&
+ test "$detected" = xCIT
+'
test_expect_success 'initial commit' 'git commit -a -m initial'
# Try removal (b), modification (d), and creation (e).
-test_expect_success 'diff-index with --binary' \
- 'echo AIT >a && mv b e && echo CIT >c && cat e >d &&
- git update-index --add --remove a b c d e &&
- tree0=`git write-tree` &&
- git diff --cached --binary >current &&
- git apply --stat --summary current'
-
-test_expect_success 'apply binary patch' \
- 'git reset --hard &&
- git apply --binary --index <current &&
- tree1=`git write-tree` &&
- test "$tree1" = "$tree0"'
+test_expect_success 'diff-index with --binary' '
+ echo AIT >a && mv b e && echo CIT >c && cat e >d &&
+ git update-index --add --remove a b c d e &&
+ tree0=`git write-tree` &&
+ git diff --cached --binary >current &&
+ git apply --stat --summary current
+'
+
+test_expect_success 'apply binary patch' '
+ git reset --hard &&
+ git apply --binary --index <current &&
+ tree1=`git write-tree` &&
+ test "$tree1" = "$tree0"
+'
test_expect_success 'diff --no-index with binary creation' '
echo Q | q_to_nul >binary &&
EOF
test_expect_success 'diff --stat with binary files and big change count' '
- echo X | dd of=binfile bs=1k seek=1 &&
+ printf "\01\00%1024d" 1 >binfile &&
git add binfile &&
i=0 &&
while test $i -lt 10000; do
--- /dev/null
+#!/bin/sh
+
+test_description='git apply --3way'
+
+. ./test-lib.sh
+
+create_file () {
+ for i
+ do
+ echo "$i"
+ done
+}
+
+sanitize_conflicted_diff () {
+ sed -e '
+ /^index /d
+ s/^\(+[<>][<>][<>][<>]*\) .*/\1/
+ '
+}
+
+test_expect_success setup '
+ test_tick &&
+ create_file >one 1 2 3 4 5 6 7 &&
+ cat one >two &&
+ git add one two &&
+ git commit -m initial &&
+
+ git branch side &&
+
+ test_tick &&
+ create_file >one 1 two 3 4 5 six 7 &&
+ create_file >two 1 two 3 4 5 6 7 &&
+ git commit -a -m master &&
+
+ git checkout side &&
+ create_file >one 1 2 3 4 five 6 7 &&
+ create_file >two 1 2 3 4 five 6 7 &&
+ git commit -a -m side &&
+
+ git checkout master
+'
+
+test_expect_success 'apply without --3way' '
+ git diff side^ side >P.diff &&
+
+ # should fail to apply
+ git reset --hard &&
+ git checkout master^0 &&
+ test_must_fail git apply --index P.diff &&
+ # should leave things intact
+ git diff-files --exit-code &&
+ git diff-index --exit-code --cached HEAD
+'
+
+test_expect_success 'apply with --3way' '
+ # Merging side should be similar to applying this patch
+ git diff ...side >P.diff &&
+
+ # The corresponding conflicted merge
+ git reset --hard &&
+ git checkout master^0 &&
+ test_must_fail git merge --no-commit side &&
+ git ls-files -s >expect.ls &&
+ git diff HEAD | sanitize_conflicted_diff >expect.diff &&
+
+ # should fail to apply
+ git reset --hard &&
+ git checkout master^0 &&
+ test_must_fail git apply --index --3way P.diff &&
+ git ls-files -s >actual.ls &&
+ git diff HEAD | sanitize_conflicted_diff >actual.diff &&
+
+ # The result should resemble the corresponding merge
+ test_cmp expect.ls actual.ls &&
+ test_cmp expect.diff actual.diff
+'
+
+test_expect_success 'apply with --3way with rerere enabled' '
+ git config rerere.enabled true &&
+
+ # Merging side should be similar to applying this patch
+ git diff ...side >P.diff &&
+
+ # The corresponding conflicted merge
+ git reset --hard &&
+ git checkout master^0 &&
+ test_must_fail git merge --no-commit side &&
+
+ # Manually resolve and record the resolution
+ create_file 1 two 3 4 five six 7 >one &&
+ git rerere &&
+ cat one >expect &&
+
+ # should fail to apply
+ git reset --hard &&
+ git checkout master^0 &&
+ test_must_fail git apply --index --3way P.diff &&
+
+ # but rerere should have replayed the recorded resolution
+ test_cmp expect one
+'
+
+test_expect_success 'apply -3 with add/add conflict setup' '
+ git reset --hard &&
+
+ git checkout -b adder &&
+ create_file 1 2 3 4 5 6 7 >three &&
+ create_file 1 2 3 4 5 6 7 >four &&
+ git add three four &&
+ git commit -m "add three and four" &&
+
+ git checkout -b another adder^ &&
+ create_file 1 2 3 4 5 6 7 >three &&
+ create_file 1 2 3 four 5 6 7 >four &&
+ git add three four &&
+ git commit -m "add three and four" &&
+
+ # Merging another should be similar to applying this patch
+ git diff adder...another >P.diff &&
+
+ git checkout adder^0 &&
+ test_must_fail git merge --no-commit another &&
+ git ls-files -s >expect.ls &&
+ git diff HEAD | sanitize_conflicted_diff >expect.diff
+'
+
+test_expect_success 'apply -3 with add/add conflict' '
+ # should fail to apply ...
+ git reset --hard &&
+ git checkout adder^0 &&
+ test_must_fail git apply --index --3way P.diff &&
+ # ... and leave conflicts in the index and in the working tree
+ git ls-files -s >actual.ls &&
+ git diff HEAD | sanitize_conflicted_diff >actual.diff &&
+
+ # The result should resemble the corresponding merge
+ test_cmp expect.ls actual.ls &&
+ test_cmp expect.diff actual.diff
+'
+
+test_expect_success 'apply -3 with add/add conflict (dirty working tree)' '
+ # should fail to apply ...
+ git reset --hard &&
+ git checkout adder^0 &&
+ echo >>four &&
+ cat four >four.save &&
+ cat three >three.save &&
+ git ls-files -s >expect.ls &&
+ test_must_fail git apply --index --3way P.diff &&
+ # ... and should not touch anything
+ git ls-files -s >actual.ls &&
+ test_cmp expect.ls actual.ls &&
+ test_cmp four.save four &&
+ test_cmp three.save three
+'
+
+test_done
cat file1 >saved.file1
'
+test_expect_success 'apply --reject is incompatible with --3way' '
+ test_when_finished "cat saved.file1 >file1" &&
+ git diff >patch.0 &&
+ git checkout file1 &&
+ test_must_fail git apply --reject --3way patch.0 &&
+ git diff --exit-code
+'
+
test_expect_success 'apply without --reject should fail' '
if git apply patch.1
test_expect_success 'setup' '
test_commit A &&
- test_commit B &&
+ GIT_COMMITTER_DATE="@0 +0000" GIT_AUTHOR_DATE="@0 +0000" &&
+ test_commit --notick B &&
git checkout -b branch B &&
test_commit D &&
mkdir dir &&
test_must_fail git --no-pager show foo-tag
'
+test_expect_success 'set up a bit of history' '
+ test_commit main1 &&
+ test_commit main2 &&
+ test_commit main3 &&
+ git tag -m "annotated tag" annotated &&
+ git checkout -b side HEAD^^ &&
+ test_commit side2 &&
+ test_commit side3
+'
+
+test_expect_success 'showing two commits' '
+ cat >expect <<-EOF &&
+ commit $(git rev-parse main2)
+ commit $(git rev-parse main3)
+ EOF
+ git show main2 main3 >actual &&
+ grep ^commit actual >actual.filtered &&
+ test_cmp expect actual.filtered
+'
+
+test_expect_success 'showing a range walks (linear)' '
+ cat >expect <<-EOF &&
+ commit $(git rev-parse main3)
+ commit $(git rev-parse main2)
+ EOF
+ git show main1..main3 >actual &&
+ grep ^commit actual >actual.filtered &&
+ test_cmp expect actual.filtered
+'
+
+test_expect_success 'showing a range walks (Y shape, ^ first)' '
+ cat >expect <<-EOF &&
+ commit $(git rev-parse main3)
+ commit $(git rev-parse main2)
+ EOF
+ git show ^side3 main3 >actual &&
+ grep ^commit actual >actual.filtered &&
+ test_cmp expect actual.filtered
+'
+
+test_expect_success 'showing a range walks (Y shape, ^ last)' '
+ cat >expect <<-EOF &&
+ commit $(git rev-parse main3)
+ commit $(git rev-parse main2)
+ EOF
+ git show main3 ^side3 >actual &&
+ grep ^commit actual >actual.filtered &&
+ test_cmp expect actual.filtered
+'
+
+test_expect_success 'showing with -N walks' '
+ cat >expect <<-EOF &&
+ commit $(git rev-parse main3)
+ commit $(git rev-parse main2)
+ EOF
+ git show -2 main3 >actual &&
+ grep ^commit actual >actual.filtered &&
+ test_cmp expect actual.filtered
+'
+
+test_expect_success 'showing annotated tag' '
+ cat >expect <<-EOF &&
+ tag annotated
+ commit $(git rev-parse annotated^{commit})
+ EOF
+ git show annotated >actual &&
+ grep -E "^(commit|tag)" actual >actual.filtered &&
+ test_cmp expect actual.filtered
+'
+
+test_expect_success 'showing annotated tag plus commit' '
+ cat >expect <<-EOF &&
+ tag annotated
+ commit $(git rev-parse annotated^{commit})
+ commit $(git rev-parse side3)
+ EOF
+ git show annotated side3 >actual &&
+ grep -E "^(commit|tag)" actual >actual.filtered &&
+ test_cmp expect actual.filtered
+'
+
+test_expect_success 'showing range' '
+ cat >expect <<-EOF &&
+ commit $(git rev-parse main3)
+ commit $(git rev-parse main2)
+ EOF
+ git show ^side3 annotated >actual &&
+ grep -E "^(commit|tag)" actual >actual.filtered &&
+ test_cmp expect actual.filtered
+'
+
test_done
)
'
+test_expect_success SYMLINKS 'submodule update can handle symbolic links in pwd' '
+ mkdir -p linked/dir &&
+ ln -s linked/dir linkto &&
+ (
+ cd linkto &&
+ git clone "$TRASH_DIRECTORY"/super_update_r2 super &&
+ (
+ cd super &&
+ git submodule update --init --recursive
+ )
+ )
+'
+
test_done
EDITOR=./editor git commit --amend
'
+test_expect_success 'amend --only ignores staged contents' '
+ cp file file.expect &&
+ echo changed >file &&
+ git add file &&
+ git commit --no-edit --amend --only &&
+ git cat-file blob HEAD:file >file.actual &&
+ test_cmp file.expect file.actual &&
+ git diff --exit-code
+'
+
test_expect_success 'set up editor' '
cat >editor <<-\EOF &&
#!/bin/sh
test_cmp expect msg
'
+test_expect_success '--amend --edit of empty message' '
+ cat >replace <<-\EOF &&
+ #!/bin/sh
+ echo "amended" >"$1"
+ EOF
+ chmod 755 replace &&
+ git commit --allow-empty --allow-empty-message -m "" &&
+ echo more bongo >file &&
+ git add file &&
+ EDITOR=./replace git commit --edit --amend &&
+ git diff-tree -s --format=%s HEAD >msg &&
+ ./replace expect &&
+ test_cmp expect msg
+'
+
test_expect_success '-m --edit' '
echo amended >expect &&
git commit --allow-empty -m buffer &&
'
+test_expect_success 'commit a file whose name is a dash' '
+ git reset --hard &&
+ for i in 1 2 3 4 5
+ do
+ echo $i
+ done >./- &&
+ git add ./- &&
+ test_tick &&
+ git commit -m "add dash" >output </dev/null &&
+ test_i18ngrep " changed, 5 insertions" output
+'
+
test_done
)
'
-marshal_dump() {
- what=$1
- "$PYTHON_PATH" -c 'import marshal, sys; d = marshal.load(sys.stdin); print d["'$what'"]'
-}
-
# Sleep a bit so that the top-most p4 change did not happen "now". Then
# import the repo and make sure that the initial import has the same time
# as the top-most change.
)
'
+#
+# Converting git commit message to p4 change description, including
+# parsing out the optional Jobs: line.
+#
+test_expect_success 'simple one-line description' '
+ test_when_finished cleanup_git &&
+ git p4 clone --dest="$git" //depot &&
+ (
+ cd "$git" &&
+ echo desc2 >desc2 &&
+ git add desc2 &&
+ cat >msg <<-EOF &&
+ One-line description line for desc2.
+ EOF
+ git commit -F - <msg &&
+ git config git-p4.skipSubmitEdit true &&
+ git p4 submit &&
+ change=$(p4 -G changes -m 1 //depot/... | \
+ marshal_dump change) &&
+ # marshal_dump always adds a newline
+ p4 -G describe $change | marshal_dump desc | sed \$d >pmsg &&
+ test_cmp msg pmsg
+ )
+'
+
+test_expect_success 'description with odd formatting' '
+ test_when_finished cleanup_git &&
+ git p4 clone --dest="$git" //depot &&
+ (
+ cd "$git" &&
+ echo desc3 >desc3 &&
+ git add desc3 &&
+ (
+ printf "subject line\n\n\tExtra tab\nline.\n\n" &&
+ printf "Description:\n\tBogus description marker\n\n" &&
+ # git commit eats trailing newlines; only use one
+ printf "Files:\n\tBogus descs marker\n"
+ ) >msg &&
+ git commit -F - <msg &&
+ git config git-p4.skipSubmitEdit true &&
+ git p4 submit &&
+ change=$(p4 -G changes -m 1 //depot/... | \
+ marshal_dump change) &&
+ # marshal_dump always adds a newline
+ p4 -G describe $change | marshal_dump desc | sed \$d >pmsg &&
+ test_cmp msg pmsg
+ )
+'
+
+make_job() {
+ name="$1" &&
+ tab="$(printf \\t)" &&
+ p4 job -o | \
+ sed -e "/^Job:/s/.*/Job: $name/" \
+ -e "/^Description/{ n; s/.*/$tab job text/; }" | \
+ p4 job -i
+}
+
+test_expect_success 'description with Jobs section at end' '
+ test_when_finished cleanup_git &&
+ git p4 clone --dest="$git" //depot &&
+ (
+ cd "$git" &&
+ echo desc4 >desc4 &&
+ git add desc4 &&
+ echo 6060842 >jobname &&
+ (
+ printf "subject line\n\n\tExtra tab\nline.\n\n" &&
+ printf "Files:\n\tBogus files marker\n" &&
+ printf "Junk: 3164175\n" &&
+ printf "Jobs: $(cat jobname)\n"
+ ) >msg &&
+ git commit -F - <msg &&
+ git config git-p4.skipSubmitEdit true &&
+ # build a job
+ make_job $(cat jobname) &&
+ git p4 submit &&
+ change=$(p4 -G changes -m 1 //depot/... | \
+ marshal_dump change) &&
+ # marshal_dump always adds a newline
+ p4 -G describe $change | marshal_dump desc | sed \$d >pmsg &&
+ # make sure Jobs line and all following is gone
+ sed "/^Jobs:/,\$d" msg >jmsg &&
+ test_cmp jmsg pmsg &&
+ # make sure p4 knows about job
+ p4 -G describe $change | marshal_dump job0 >job0 &&
+ test_cmp jobname job0
+ )
+'
+
+test_expect_success 'description with Jobs and values on separate lines' '
+ test_when_finished cleanup_git &&
+ git p4 clone --dest="$git" //depot &&
+ (
+ cd "$git" &&
+ echo desc5 >desc5 &&
+ git add desc5 &&
+ echo PROJ-6060842 >jobname1 &&
+ echo PROJ-6060847 >jobname2 &&
+ (
+ printf "subject line\n\n\tExtra tab\nline.\n\n" &&
+ printf "Files:\n\tBogus files marker\n" &&
+ printf "Junk: 3164175\n" &&
+ printf "Jobs:\n" &&
+ printf "\t$(cat jobname1)\n" &&
+ printf "\t$(cat jobname2)\n"
+ ) >msg &&
+ git commit -F - <msg &&
+ git config git-p4.skipSubmitEdit true &&
+ # build two jobs
+ make_job $(cat jobname1) &&
+ make_job $(cat jobname2) &&
+ git p4 submit &&
+ change=$(p4 -G changes -m 1 //depot/... | \
+ marshal_dump change) &&
+ # marshal_dump always adds a newline
+ p4 -G describe $change | marshal_dump desc | sed \$d >pmsg &&
+ # make sure Jobs line and all following is gone
+ sed "/^Jobs:/,\$d" msg >jmsg &&
+ test_cmp jmsg pmsg &&
+ # make sure p4 knows about the two jobs
+ p4 -G describe $change >change &&
+ (
+ marshal_dump job0 <change &&
+ marshal_dump job1 <change
+ ) | sort >jobs &&
+ cat jobname1 jobname2 | sort >expected &&
+ test_cmp expected jobs
+ )
+'
+
+test_expect_success 'description with Jobs section and bogus following text' '
+ test_when_finished cleanup_git &&
+ git p4 clone --dest="$git" //depot &&
+ (
+ cd "$git" &&
+ echo desc6 >desc6 &&
+ git add desc6 &&
+ echo 6060843 >jobname &&
+ (
+ printf "subject line\n\n\tExtra tab\nline.\n\n" &&
+ printf "Files:\n\tBogus files marker\n" &&
+ printf "Junk: 3164175\n" &&
+ printf "Jobs: $(cat jobname)\n" &&
+ printf "MoreJunk: 3711\n"
+ ) >msg &&
+ git commit -F - <msg &&
+ git config git-p4.skipSubmitEdit true &&
+ # build a job
+ make_job $(cat jobname) &&
+ test_must_fail git p4 submit 2>err &&
+ test_i18ngrep "Unknown field name" err
+ )
+'
+
test_expect_success 'kill p4d' '
kill_p4d
'
git commit -a -m "Rename file1 to file4" &&
git diff-tree -r -M HEAD &&
git p4 submit &&
- p4 filelog //depot/file4 &&
- p4 filelog //depot/file4 | test_must_fail grep -q "branch from" &&
+ p4 filelog //depot/file4 >filelog &&
+ ! grep " from //depot" filelog &&
git mv file4 file5 &&
git commit -a -m "Rename file4 to file5" &&
git diff-tree -r -M HEAD &&
git config git-p4.detectRenames true &&
git p4 submit &&
- p4 filelog //depot/file5 &&
- p4 filelog //depot/file5 | grep -q "branch from //depot/file4" &&
+ p4 filelog //depot/file5 >filelog &&
+ grep " from //depot/file4" filelog &&
git mv file5 file6 &&
echo update >>file6 &&
test -n "$level" && test "$level" -gt 0 && test "$level" -lt 98 &&
git config git-p4.detectRenames $(($level + 2)) &&
git p4 submit &&
- p4 filelog //depot/file6 &&
- p4 filelog //depot/file6 | test_must_fail grep -q "branch from" &&
+ p4 filelog //depot/file6 >filelog &&
+ ! grep " from //depot" filelog &&
git mv file6 file7 &&
echo update >>file7 &&
test -n "$level" && test "$level" -gt 2 && test "$level" -lt 100 &&
git config git-p4.detectRenames $(($level - 2)) &&
git p4 submit &&
- p4 filelog //depot/file7 &&
- p4 filelog //depot/file7 | grep -q "branch from //depot/file6"
+ p4 filelog //depot/file7 >filelog &&
+ grep " from //depot/file6" filelog
)
'
# Both <file> and <contents> default to <message>.
test_commit () {
- file=${2:-"$1.t"}
+ notick= &&
+ if test "z$1" = "z--notick"
+ then
+ notick=yes
+ shift
+ fi &&
+ file=${2:-"$1.t"} &&
echo "${3-$1}" > "$file" &&
git add "$file" &&
- test_tick &&
+ if test -z "$notick"
+ then
+ test_tick
+ fi &&
git commit -m "$1" &&
git tag "$1"
}
die("input error");
if (ferror(stdout))
die("output error");
- buffer_reset(&stdin_buf);
return 0;
}
die_errno("cannot close preimage");
if (buffer_deinit(&delta))
die_errno("cannot close delta");
- buffer_reset(&preimage);
strbuf_release(&preimage_view.buf);
- buffer_reset(&delta);
return 0;
}
* First let's make sure we do not have a local modification
* in that directory.
*/
- namelen = strlen(ce->name);
+ namelen = ce_namelen(ce);
for (i = locate_in_src_index(ce, o);
i < o->src_index->cache_nr;
i++) {
#else
typedef char * iconv_ibp;
#endif
-char *reencode_string(const char *in, const char *out_encoding, const char *in_encoding)
+char *reencode_string_iconv(const char *in, size_t insz, iconv_t conv)
{
- iconv_t conv;
- size_t insz, outsz, outalloc;
+ size_t outsz, outalloc;
char *out, *outpos;
iconv_ibp cp;
- if (!in_encoding)
- return NULL;
- conv = iconv_open(out_encoding, in_encoding);
- if (conv == (iconv_t) -1)
- return NULL;
- insz = strlen(in);
outsz = insz;
outalloc = outsz + 1; /* for terminating NUL */
out = xmalloc(outalloc);
size_t sofar;
if (errno != E2BIG) {
free(out);
- iconv_close(conv);
return NULL;
}
/* insz has remaining number of bytes.
break;
}
}
+ return out;
+}
+
+char *reencode_string(const char *in, const char *out_encoding, const char *in_encoding)
+{
+ iconv_t conv;
+ char *out;
+
+ if (!in_encoding)
+ return NULL;
+ conv = iconv_open(out_encoding, in_encoding);
+ if (conv == (iconv_t) -1)
+ return NULL;
+ out = reencode_string_iconv(in, strlen(in), conv);
iconv_close(conv);
return out;
}
int indent, int indent2, int width);
#ifndef NO_ICONV
+char *reencode_string_iconv(const char *in, size_t insz, iconv_t conv);
char *reencode_string(const char *in, const char *out_encoding, const char *in_encoding);
#else
#define reencode_string(a,b,c) NULL
die_errno("error closing fast-import feedback stream");
}
-void fast_export_reset(void)
-{
- buffer_reset(&report_buffer);
-}
-
void fast_export_delete(const char *path)
{
putchar('D');
if (ends_with(header, headerlen, " missing"))
return error("cat-blob reports missing blob: %s", header);
- type = memmem(header, headerlen, " blob ", strlen(" blob "));
+ type = strstr(header, " blob ");
if (!type)
return error("cat-blob header has wrong object type: %s", header);
n = strtoumax(type + strlen(" blob "), (char **) &end, 10);
}
/* Mode. */
- if (response_end - response < strlen("100644") ||
+ if (response_end - response < (signed) strlen("100644") ||
response[strlen("100644")] != ' ')
die("invalid ls response: missing mode: %s", response);
*mode = 0;
}
/* ' blob ' or ' tree ' */
- if (response_end - response < strlen(" blob ") ||
+ if (response_end - response < (signed) strlen(" blob ") ||
(response[1] != 'b' && response[1] != 't'))
die("unexpected ls response: not a tree or blob: %s", response);
response += strlen(" blob ");
void fast_export_init(int fd);
void fast_export_deinit(void);
-void fast_export_reset(void);
void fast_export_delete(const char *path);
void fast_export_modify(const char *path, uint32_t mode, const char *dataref);
}
return done;
}
-
-void buffer_reset(struct line_buffer *buf)
-{
-}
int buffer_init(struct line_buffer *buf, const char *filename);
int buffer_fdinit(struct line_buffer *buf, int fd);
int buffer_deinit(struct line_buffer *buf);
-void buffer_reset(struct line_buffer *buf);
int buffer_tmpfile_init(struct line_buffer *buf);
FILE *buffer_tmpfile_rewind(struct line_buffer *buf); /* prepare to write. */
return -1;
if (off < view->off || off + width < view->off + view->width)
return error("invalid delta: window slides left");
- if (view->max_off >= 0 && view->max_off < off + width)
+ if (view->max_off >= 0 && view->max_off < off + (off_t) width)
return error("delta preimage ends early");
file_offset = view->off + view->buf.len;
static int read_chunk(struct line_buffer *delta, off_t *delta_len,
struct strbuf *buf, size_t len)
{
+ assert(*delta_len >= 0);
strbuf_reset(buf);
- if (len > *delta_len ||
+ if (len > (uintmax_t) *delta_len ||
buffer_read_binary(delta, buf, len) != len)
return error_short_read(delta);
*delta_len -= buf->len;
static int apply_one_window(struct line_buffer *delta, off_t *delta_len,
struct sliding_view *preimage, FILE *out)
{
+ int rv = -1;
struct window ctx = WINDOW_INIT(preimage);
size_t out_len;
size_t instructions_len;
if (apply_window_in_core(&ctx))
goto error_out;
if (ctx.out.len != out_len) {
- error("invalid delta: incorrect postimage length");
+ rv = error("invalid delta: incorrect postimage length");
goto error_out;
}
if (write_strbuf(&ctx.out, out))
goto error_out;
- window_release(&ctx);
- return 0;
+ rv = 0;
error_out:
window_release(&ctx);
- return -1;
+ return rv;
}
int svndiff0_apply(struct line_buffer *delta, off_t delta_len,
struct sliding_view *preimage, FILE *postimage)
{
- assert(delta && preimage && postimage);
+ assert(delta && preimage && postimage && delta_len >= 0);
if (read_magic(delta, &delta_len))
return -1;
while (delta_len) { /* For each window: */
- off_t pre_off = pre_off; /* stupid GCC... */
+ off_t pre_off = -1;
size_t pre_len;
if (read_offset(delta, &pre_off, &delta_len) ||
#define NODE_CTX 2 /* node metadata */
#define INTERNODE_CTX 3 /* between nodes */
-#define LENGTH_UNKNOWN (~0)
#define DATE_RFC2822_LEN 31
static struct line_buffer input = LINE_BUFFER_INIT;
static struct {
- uint32_t action, propLength, srcRev, type;
- off_t text_length;
+ uint32_t action, srcRev, type;
+ off_t prop_length, text_length;
struct strbuf src, dst;
uint32_t text_delta, prop_delta;
} node_ctx;
{
node_ctx.type = 0;
node_ctx.action = NODEACT_UNKNOWN;
- node_ctx.propLength = LENGTH_UNKNOWN;
+ node_ctx.prop_length = -1;
node_ctx.text_length = -1;
strbuf_reset(&node_ctx.src);
node_ctx.srcRev = 0;
static void handle_node(void)
{
const uint32_t type = node_ctx.type;
- const int have_props = node_ctx.propLength != LENGTH_UNKNOWN;
+ const int have_props = node_ctx.prop_length != -1;
const int have_text = node_ctx.text_length != -1;
/*
* Old text for this node:
if (have_props) {
if (!node_ctx.prop_delta)
node_ctx.type = type;
- if (node_ctx.propLength)
+ if (node_ctx.prop_length)
read_props();
}
reset_rev_ctx(atoi(val));
break;
case sizeof("Node-path"):
- if (prefixcmp(t, "Node-"))
+ if (constcmp(t, "Node-"))
continue;
if (!constcmp(t + strlen("Node-"), "path")) {
if (active_ctx == NODE_CTX)
node_ctx.srcRev = atoi(val);
break;
case sizeof("Text-content-length"):
- if (!constcmp(t, "Text-content-length")) {
+ if (constcmp(t, "Text") && constcmp(t, "Prop"))
+ continue;
+ if (constcmp(t + 4, "-content-length"))
+ continue;
+ {
char *end;
- uintmax_t textlen;
+ uintmax_t len;
- textlen = strtoumax(val, &end, 10);
+ len = strtoumax(val, &end, 10);
if (!isdigit(*val) || *end)
die("invalid dump: non-numeric length %s", val);
- if (textlen > maximum_signed_value_of_type(off_t))
+ if (len > maximum_signed_value_of_type(off_t))
die("unrepresentable length in dump: %s", val);
- node_ctx.text_length = (off_t) textlen;
+
+ if (*t == 'T')
+ node_ctx.text_length = (off_t) len;
+ else
+ node_ctx.prop_length = (off_t) len;
break;
}
- if (constcmp(t, "Prop-content-length"))
- continue;
- node_ctx.propLength = atoi(val);
- break;
case sizeof("Text-delta"):
if (!constcmp(t, "Text-delta")) {
node_ctx.text_delta = !strcmp(val, "true");
void svndump_reset(void)
{
- fast_export_reset();
- buffer_reset(&input);
strbuf_release(&dump_ctx.uuid);
strbuf_release(&dump_ctx.url);
strbuf_release(&rev_ctx.log);
GIT_COLOR_GREEN, /* WT_STATUS_LOCAL_BRANCH */
GIT_COLOR_RED, /* WT_STATUS_REMOTE_BRANCH */
GIT_COLOR_NIL, /* WT_STATUS_ONBRANCH */
- GIT_COLOR_NORMAL, /* WT_STATUS_IN_PROGRESS */
};
static const char *color(int slot, struct wt_status *s)
static void wt_status_print_state(struct wt_status *s)
{
- const char *state_color = color(WT_STATUS_IN_PROGRESS, s);
+ const char *state_color = color(WT_STATUS_HEADER, s);
struct wt_status_state state;
struct stat st;
WT_STATUS_LOCAL_BRANCH,
WT_STATUS_REMOTE_BRANCH,
WT_STATUS_ONBRANCH,
- WT_STATUS_IN_PROGRESS,
WT_STATUS_MAXSLOT
};