#
# Define NO_MMAP if you want to avoid mmap.
#
- # Define WITH_OWN_SUBPROCESS_PY if you want to use with python 2.3.
+ # Define NO_FAST_WORKING_DIRECTORY if accessing objects in pack files is
+ # generally faster on your platform than accessing the working directory.
#
# Define NO_IPV6 if you lack IPv6 support and getaddrinfo().
#
#
# Define NO_ICONV if your libc does not properly support iconv.
#
- # Define NO_ACCURATE_DIFF if your diff program at least sometimes misses
- # a missing newline at the end of the file.
- #
- # Define COLLISION_CHECK below if you believe that SHA1's
- # 1461501637330902918203684832716283019655932542976 hashes do not give you
- # sufficient guarantee that no collisions between objects will ever happen.
- #
# Define USE_NSEC below if you want git to care about sub-second file mtimes
# and ctimes. Note that you need recent glibc (at least 2.2.4) for this, and
# it will BREAK YOUR LOCAL DIFFS! show-diff and anything using it will likely
#
# Define USE_STDEV below if you want git to care about the underlying device
# change being considered an inode change from the update-cache perspective.
+ #
+ # Define NO_PERL_MAKEMAKER if you cannot use Makefiles generated by perl's
+ # MakeMaker (e.g. using ActiveState under Cygwin).
+ #
GIT-VERSION-FILE: .FORCE-GIT-VERSION-FILE
@$(SHELL_PATH) ./GIT-VERSION-GEN
bindir = $(prefix)/bin
gitexecdir = $(bindir)
template_dir = $(prefix)/share/git-core/templates/
- GIT_PYTHON_DIR = $(prefix)/share/git-core/python
# DESTDIR=
# default configuration for gitweb
GITWEB_SITE_HEADER =
GITWEB_SITE_FOOTER =
- export prefix bindir gitexecdir template_dir GIT_PYTHON_DIR
+ export prefix bindir gitexecdir template_dir
CC = gcc
AR = ar
git-lost-found.sh git-quiltimport.sh
SCRIPT_PERL = \
+ git-add--interactive.perl \
git-archimport.perl git-cvsimport.perl git-relink.perl \
- git-shortlog.perl git-rerere.perl \
git-cvsserver.perl \
git-svnimport.perl git-cvsexportcommit.perl \
git-send-email.perl git-svn.perl
- SCRIPT_PYTHON = \
- git-merge-recursive-old.py
-
SCRIPTS = $(patsubst %.sh,%,$(SCRIPT_SH)) \
$(patsubst %.perl,%,$(SCRIPT_PERL)) \
- $(patsubst %.py,%,$(SCRIPT_PYTHON)) \
git-cherry-pick git-status git-instaweb
# ... and all the rest that could be moved out of bindir to gitexecdir
ifndef PERL_PATH
PERL_PATH = /usr/bin/perl
endif
- ifndef PYTHON_PATH
- PYTHON_PATH = /usr/bin/python
- endif
- PYMODULES = \
- gitMergeCommon.py
+ export PERL_PATH
LIB_FILE=libgit.a
XDIFF_LIB=xdiff/lib.a
archive.h blob.h cache.h commit.h csum-file.h delta.h grep.h \
diff.h object.h pack.h pkt-line.h quote.h refs.h list-objects.h sideband.h \
run-command.h strbuf.h tag.h tree.h git-compat-util.h revision.h \
- tree-walk.h log-tree.h dir.h path-list.h unpack-trees.h builtin.h
+ tree-walk.h log-tree.h dir.h path-list.h unpack-trees.h builtin.h \
+ utf8.h
DIFF_OBJS = \
diff.o diff-lib.o diffcore-break.o diffcore-order.o \
revision.o pager.o tree-walk.o xdiff-interface.o \
write_or_die.o trace.o list-objects.o grep.o \
alloc.o merge-file.o path-list.o help.o unpack-trees.o $(DIFF_OBJS) \
- color.o wt-status.o archive-zip.o archive-tar.o shallow.o
- color.o wt-status.o archive-zip.o archive-tar.o \
- utf8.o
++ color.o wt-status.o archive-zip.o archive-tar.o shallow.o utf8.o
BUILTIN_OBJS = \
builtin-add.o \
builtin-ls-tree.o \
builtin-mailinfo.o \
builtin-mailsplit.o \
+ builtin-merge-file.o \
builtin-mv.o \
builtin-name-rev.o \
builtin-pack-objects.o \
builtin-prune-packed.o \
builtin-push.o \
builtin-read-tree.o \
+ builtin-reflog.o \
builtin-repo-config.o \
+ builtin-rerere.o \
builtin-rev-list.o \
builtin-rev-parse.o \
builtin-rm.o \
builtin-runstatus.o \
+ builtin-shortlog.o \
builtin-show-branch.o \
builtin-stripspace.o \
builtin-symbolic-ref.o \
NEEDS_SSL_WITH_CRYPTO = YesPlease
NEEDS_LIBICONV = YesPlease
NO_STRLCPY = YesPlease
- ifndef NO_FINK
- ifeq ($(shell test -d /sw/lib && echo y),y)
- BASIC_CFLAGS += -I/sw/include
- BASIC_LDFLAGS += -L/sw/lib
- endif
- endif
- ifndef NO_DARWIN_PORTS
- ifeq ($(shell test -d /opt/local/lib && echo y),y)
- BASIC_CFLAGS += -I/opt/local/include
- BASIC_LDFLAGS += -L/opt/local/lib
- endif
- endif
endif
ifeq ($(uname_S),SunOS)
NEEDS_SOCKET = YesPlease
NO_SYMLINK_HEAD = YesPlease
NEEDS_LIBICONV = YesPlease
NO_C99_FORMAT = YesPlease
+ NO_FAST_WORKING_DIRECTORY = UnfortunatelyYes
# There are conflicting reports about this.
# On some boxes NO_MMAP is needed, and not so elsewhere.
# Try uncommenting this if you see things break -- YMMV.
-include config.mak.autogen
-include config.mak
- ifdef WITH_OWN_SUBPROCESS_PY
- PYMODULES += compat/subprocess.py
- else
- ifeq ($(NO_PYTHON),)
- ifneq ($(shell $(PYTHON_PATH) -c 'import subprocess;print"OK"' 2>/dev/null),OK)
- PYMODULES += compat/subprocess.py
+ ifeq ($(uname_S),Darwin)
+ ifndef NO_FINK
+ ifeq ($(shell test -d /sw/lib && echo y),y)
+ BASIC_CFLAGS += -I/sw/include
+ BASIC_LDFLAGS += -L/sw/lib
+ endif
+ endif
+ ifndef NO_DARWIN_PORTS
+ ifeq ($(shell test -d /opt/local/lib && echo y),y)
+ BASIC_CFLAGS += -I/opt/local/include
+ BASIC_LDFLAGS += -L/opt/local/lib
endif
endif
endif
COMPAT_CFLAGS += -DNO_MMAP
COMPAT_OBJS += compat/mmap.o
endif
+ ifdef NO_FAST_WORKING_DIRECTORY
+ BASIC_CFLAGS += -DNO_FAST_WORKING_DIRECTORY
+ endif
ifdef NO_IPV6
BASIC_CFLAGS += -DNO_IPV6
endif
endif
endif
endif
- ifdef NO_ACCURATE_DIFF
- BASIC_CFLAGS += -DNO_ACCURATE_DIFF
+ ifdef NO_PERL_MAKEMAKER
+ export NO_PERL_MAKEMAKER
endif
# Shell quote (do not use $(call) to accommodate ancient setups);
SHELL_PATH_SQ = $(subst ','\'',$(SHELL_PATH))
PERL_PATH_SQ = $(subst ','\'',$(PERL_PATH))
- PYTHON_PATH_SQ = $(subst ','\'',$(PYTHON_PATH))
- GIT_PYTHON_DIR_SQ = $(subst ','\'',$(GIT_PYTHON_DIR))
LIBS = $(GITLIBS) $(EXTLIBS)
all: $(ALL_PROGRAMS) $(BUILT_INS) git$X gitk gitweb/gitweb.cgi
- all: perl/Makefile
- $(MAKE) -C perl
+ all:
+ $(MAKE) -C perl PERL_PATH='$(PERL_PATH_SQ)' prefix='$(prefix_SQ)' all
$(MAKE) -C templates
strip: $(PROGRAMS) git$X
-e 's|@@PERL@@|$(PERL_PATH_SQ)|g' \
-e 's/@@GIT_VERSION@@/$(GIT_VERSION)/g' \
-e 's/@@NO_CURL@@/$(NO_CURL)/g' \
- -e 's/@@NO_PYTHON@@/$(NO_PYTHON)/g' \
$@.sh >$@+
chmod +x $@+
mv $@+ $@
- $(patsubst %.perl,%,$(SCRIPT_PERL)): perl/Makefile
+ $(patsubst %.perl,%,$(SCRIPT_PERL)): perl/perl.mak
+
+ perl/perl.mak: GIT-CFLAGS
+ $(MAKE) -C perl PERL_PATH='$(PERL_PATH_SQ)' prefix='$(prefix_SQ)' $(@F)
+
$(patsubst %.perl,%,$(SCRIPT_PERL)): % : %.perl
rm -f $@ $@+
INSTLIBDIR=`$(MAKE) -C perl -s --no-print-directory instlibdir` && \
chmod +x $@+
mv $@+ $@
- $(patsubst %.py,%,$(SCRIPT_PYTHON)) : % : %.py GIT-CFLAGS
- rm -f $@ $@+
- sed -e '1s|#!.*python|#!$(PYTHON_PATH_SQ)|' \
- -e 's|@@GIT_PYTHON_PATH@@|$(GIT_PYTHON_DIR_SQ)|g' \
- -e 's/@@GIT_VERSION@@/$(GIT_VERSION)/g' \
- $@.py >$@+
- chmod +x $@+
- mv $@+ $@
-
git-cherry-pick: git-revert
cp $< $@+
mv $@+ $@
sed -e '1s|#!.*/sh|#!$(SHELL_PATH_SQ)|' \
-e 's/@@GIT_VERSION@@/$(GIT_VERSION)/g' \
-e 's/@@NO_CURL@@/$(NO_CURL)/g' \
- -e 's/@@NO_PYTHON@@/$(NO_PYTHON)/g' \
-e '/@@GITWEB_CGI@@/r gitweb/gitweb.cgi' \
-e '/@@GITWEB_CGI@@/d' \
-e '/@@GITWEB_CSS@@/r gitweb/gitweb.css' \
git$X git.spec \
$(patsubst %.sh,%,$(SCRIPT_SH)) \
$(patsubst %.perl,%,$(SCRIPT_PERL)) \
- $(patsubst %.py,%,$(SCRIPT_PYTHON)) \
: GIT-VERSION-FILE
%.o: %.c GIT-CFLAGS
$(LIB_FILE): $(LIB_OBJS)
rm -f $@ && $(AR) rcs $@ $(LIB_OBJS)
- XDIFF_OBJS=xdiff/xdiffi.o xdiff/xprepare.o xdiff/xutils.o xdiff/xemit.o
+ XDIFF_OBJS=xdiff/xdiffi.o xdiff/xprepare.o xdiff/xutils.o xdiff/xemit.o \
+ xdiff/xmerge.o
$(XDIFF_OBJS): xdiff/xinclude.h xdiff/xmacros.h xdiff/xdiff.h xdiff/xtypes.h \
xdiff/xutils.h xdiff/xprepare.h xdiff/xdiffi.h xdiff/xemit.h
find . -name '*.[hcS]' -print | xargs ctags -a
### Detect prefix changes
- TRACK_CFLAGS = $(subst ','\'',$(ALL_CFLAGS)):$(GIT_PYTHON_DIR_SQ):\
+ TRACK_CFLAGS = $(subst ','\'',$(ALL_CFLAGS)):\
$(bindir_SQ):$(gitexecdir_SQ):$(template_dir_SQ):$(prefix_SQ)
GIT-CFLAGS: .FORCE-GIT-CFLAGS
# However, the environment gets quite big, and some programs have problems
# with that.
- export NO_PYTHON
export NO_SVN_TESTS
test: all
test-date$X: test-date.c date.o ctype.o
$(CC) $(ALL_CFLAGS) -o $@ $(ALL_LDFLAGS) test-date.c date.o ctype.o
- test-delta$X: test-delta.c diff-delta.o patch-delta.o
- $(CC) $(ALL_CFLAGS) -o $@ $(ALL_LDFLAGS) $^
+ test-delta$X: test-delta.o diff-delta.o patch-delta.o $(GITLIBS)
+ $(CC) $(ALL_CFLAGS) -o $@ $(ALL_LDFLAGS) $(filter %.o,$^) $(LIBS)
test-dump-cache-tree$X: dump-cache-tree.o $(GITLIBS)
$(CC) $(ALL_CFLAGS) -o $@ $(ALL_LDFLAGS) $(filter %.o,$^) $(LIBS)
$(INSTALL) $(ALL_PROGRAMS) '$(DESTDIR_SQ)$(gitexecdir_SQ)'
$(INSTALL) git$X gitk '$(DESTDIR_SQ)$(bindir_SQ)'
$(MAKE) -C templates DESTDIR='$(DESTDIR_SQ)' install
- $(MAKE) -C perl install
- $(INSTALL) -d -m755 '$(DESTDIR_SQ)$(GIT_PYTHON_DIR_SQ)'
- $(INSTALL) $(PYMODULES) '$(DESTDIR_SQ)$(GIT_PYTHON_DIR_SQ)'
+ $(MAKE) -C perl prefix='$(prefix_SQ)' install
if test 'z$(bindir_SQ)' != 'z$(gitexecdir_SQ)'; \
then \
ln -f '$(DESTDIR_SQ)$(bindir_SQ)/git$X' \
install-doc:
$(MAKE) -C Documentation install
+ quick-install-doc:
+ $(MAKE) -C Documentation quick-install
rm -f $(htmldocs).tar.gz $(manpages).tar.gz
rm -f gitweb/gitweb.cgi
$(MAKE) -C Documentation/ clean
- [ ! -f perl/Makefile ] || $(MAKE) -C perl/ clean || $(MAKE) -C perl/ clean
- rm -f perl/ppport.h perl/Makefile.old
+ $(MAKE) -C perl clean
$(MAKE) -C templates/ clean
$(MAKE) -C t/ clean
rm -f GIT-VERSION-FILE GIT-CFLAGS
case "$$v" in \
git-merge-octopus | git-merge-ours | git-merge-recursive | \
git-merge-resolve | git-merge-stupid | git-merge-recur | \
- git-merge-recursive-old | \
git-ssh-pull | git-ssh-push ) continue ;; \
esac ; \
test -f "Documentation/$$v.txt" || \
#include "cache.h"
#include "tag.h"
#include "commit.h"
+#include "pkt-line.h"
int save_commit_buffer = 1;
return;
graft_file = get_graft_file();
read_graft_file(graft_file);
+ /* make sure shallows are read */
+ is_repository_shallow();
commit_graft_prepared = 1;
}
return commit_graft[pos];
}
+int write_shallow_commits(int fd, int use_pack_protocol)
+{
+ int i, count = 0;
+ for (i = 0; i < commit_graft_nr; i++)
+ if (commit_graft[i]->nr_parent < 0) {
+ const char *hex =
+ sha1_to_hex(commit_graft[i]->sha1);
+ count++;
+ if (use_pack_protocol)
+ packet_write(fd, "shallow %s", hex);
+ else {
+ write(fd, hex, 40);
+ write(fd, "\n", 1);
+ }
+ }
+ return count;
+}
+
+int unregister_shallow(const unsigned char *sha1)
+{
+ int pos = commit_graft_pos(sha1);
+ if (pos < 0)
+ return -1;
+ if (pos + 1 < commit_graft_nr)
+ memcpy(commit_graft + pos, commit_graft + pos + 1,
+ sizeof(struct commit_graft *)
+ * (commit_graft_nr - pos - 1));
+ commit_graft_nr--;
+ return 0;
+}
+
int parse_commit_buffer(struct commit *item, void *buffer, unsigned long size)
{
char *tail = buffer;
/* merge-rebase stuff */
- /* bits #0..7 in revision.h */
- #define PARENT1 (1u<< 8)
- #define PARENT2 (1u<< 9)
- #define STALE (1u<<10)
- #define RESULT (1u<<11)
+ /* bits #0..15 in revision.h */
+ #define PARENT1 (1u<<16)
+ #define PARENT2 (1u<<17)
+ #define STALE (1u<<18)
+ #define RESULT (1u<<19)
static struct commit *interesting(struct commit_list *list)
{
free(rslt);
return result;
}
+
+ int in_merge_bases(struct commit *rev1, struct commit *rev2)
+ {
+ struct commit_list *bases, *b;
+ int ret = 0;
+
+ bases = get_merge_bases(rev1, rev2, 1);
+ for (b = bases; b; b = b->next) {
+ if (!hashcmp(rev1->object.sha1, b->item->object.sha1)) {
+ ret = 1;
+ break;
+ }
+ }
+
+ free_commit_list(bases);
+ return ret;
+ }
struct commit_graft {
unsigned char sha1[20];
- int nr_parent;
+ int nr_parent; /* < 0 if shallow commit */
unsigned char parent[FLEX_ARRAY][20]; /* more */
};
extern struct commit_list *get_merge_bases(struct commit *rev1, struct commit *rev2, int cleanup);
+extern int register_shallow(const unsigned char *sha1);
+extern int unregister_shallow(const unsigned char *sha1);
+extern int write_shallow_commits(int fd, int use_pack_protocol);
+extern int is_repository_shallow();
+extern struct commit_list *get_shallow_commits(struct object_array *heads,
+ int depth, int shallow_flag, int not_shallow_flag);
+
+ int in_merge_bases(struct commit *rev1, struct commit *rev2);
#endif /* COMMIT_H */
#include "tag.h"
#include "exec_cmd.h"
#include "sideband.h"
- #include <sys/wait.h>
static int keep_pack;
static int quiet;
static int verbose;
static int fetch_all;
+static int depth;
static const char fetch_pack_usage[] =
-"git-fetch-pack [--all] [-q] [-v] [-k] [--thin] [--exec=upload-pack] [host:]directory <refs>...";
+"git-fetch-pack [--all] [-q] [-v] [-k] [--thin] [--exec=upload-pack] [--depth=<n>] [host:]directory <refs>...";
static const char *exec = "git-upload-pack";
#define COMPLETE (1U << 0)
packet_write(fd[1], "want %s\n", sha1_to_hex(remote));
fetching++;
}
+ if (is_repository_shallow())
+ write_shallow_commits(fd[1], 1);
+ if (depth > 0)
+ packet_write(fd[1], "deepen %d", depth);
packet_flush(fd[1]);
if (!fetching)
return 1;
+ if (depth > 0) {
+ char line[1024];
+ unsigned char sha1[20];
+ int len;
+
+ while ((len = packet_read_line(fd[0], line, sizeof(line)))) {
+ if (!strncmp("shallow ", line, 8)) {
+ if (get_sha1_hex(line + 8, sha1))
+ die("invalid shallow line: %s", line);
+ register_shallow(sha1);
+ continue;
+ }
+ if (!strncmp("unshallow ", line, 10)) {
+ if (get_sha1_hex(line + 10, sha1))
+ die("invalid unshallow line: %s", line);
+ if (!lookup_object(sha1))
+ die("object not found: %s", line);
+ /* make sure that it is parsed as shallow */
+ parse_object(sha1);
+ if (unregister_shallow(sha1))
+ die("no shallow found: %s", line);
+ continue;
+ }
+ die("expected shallow/unshallow, got %s", line);
+ }
+ }
+
flushes = 0;
retval = -1;
while ((sha1 = get_rev())) {
if (!memcmp(ref->name, "refs/", 5) &&
check_ref_format(ref->name + 5))
; /* trash */
- else if (fetch_all) {
+ else if (fetch_all &&
+ (!depth || strncmp(ref->name, "refs/tags/", 10) )) {
*newtail = ref;
ref->next = NULL;
newtail = &ref->next;
}
}
- for_each_ref(mark_complete, NULL);
- if (cutoff)
- mark_recent_complete_commits(cutoff);
+ if (!depth) {
+ for_each_ref(mark_complete, NULL);
+ if (cutoff)
+ mark_recent_complete_commits(cutoff);
+ }
/*
* Mark all complete remote refs as common refs.
int status;
get_remote_heads(fd[0], &ref, 0, NULL, 0);
+ if (is_repository_shallow() && !server_supports("shallow"))
+ die("Server does not support shallow clients");
if (server_supports("multi_ack")) {
if (verbose)
fprintf(stderr, "Server supports multi_ack\n");
return 0;
}
+ static int remove_duplicates(int nr_heads, char **heads)
+ {
+ int src, dst;
+
+ for (src = dst = 0; src < nr_heads; src++) {
+ /* If heads[src] is different from any of
+ * heads[0..dst], push it in.
+ */
+ int i;
+ for (i = 0; i < dst; i++) {
+ if (!strcmp(heads[i], heads[src]))
+ break;
+ }
+ if (i < dst)
+ continue;
+ if (src != dst)
+ heads[dst] = heads[src];
+ dst++;
+ }
+ heads[dst] = 0;
+ return dst;
+ }
+
int main(int argc, char **argv)
{
int i, ret, nr_heads;
char *dest = NULL, **heads;
int fd[2];
pid_t pid;
+ struct stat st;
+ struct lock_file lock;
setup_git_directory();
verbose = 1;
continue;
}
+ if (!strncmp("--depth=", arg, 8)) {
+ depth = strtol(arg + 8, NULL, 0);
+ if (stat(git_path("shallow"), &st))
+ st.st_mtime = 0;
+ continue;
+ }
usage(fetch_pack_usage);
}
dest = arg;
pid = git_connect(fd, dest, exec);
if (pid < 0)
return 1;
+ if (heads && nr_heads)
+ nr_heads = remove_duplicates(nr_heads, heads);
ret = fetch_pack(fd, nr_heads, heads);
close(fd[0]);
close(fd[1]);
}
}
+ if (!ret && depth > 0) {
+ struct cache_time mtime;
+ char *shallow = git_path("shallow");
+ int fd;
+
+ mtime.sec = st.st_mtime;
+#ifdef USE_NSEC
+ mtime.usec = st.st_mtim.usec;
+#endif
+ if (stat(shallow, &st)) {
+ if (mtime.sec)
+ die("shallow file was removed during fetch");
+ } else if (st.st_mtime != mtime.sec
+#ifdef USE_NSEC
+ || st.st_mtim.usec != mtime.usec
+#endif
+ )
+ die("shallow file was changed during fetch");
+
+ fd = hold_lock_file_for_update(&lock, shallow, 1);
+ if (!write_shallow_commits(fd, 0)) {
+ unlink(shallow);
+ rollback_lock_file(&lock);
+ } else {
+ close(fd);
+ commit_lock_file(&lock);
+ }
+ }
+
return !!ret;
}
}
usage() {
- die "Usage: $0 [--template=<template_directory>] [--use-immingled-remote] [--reference <reference-repo>] [--bare] [-l [-s]] [-q] [-u <upload-pack>] [--origin <name>] [--depth <n>] [-n] <repo> [<dir>]"
- die "Usage: $0 [--template=<template_directory>] [--reference <reference-repo>] [--bare] [-l [-s]] [-q] [-u <upload-pack>] [--origin <name>] [-n] <repo> [<dir>]"
++ die "Usage: $0 [--template=<template_directory>] [--reference <reference-repo>] [--bare] [-l [-s]] [-q] [-u <upload-pack>] [--origin <name>] [--depth <n>] [-n] <repo> [<dir>]"
}
get_repo_base() {
case "$name" in
*^*) continue;;
esac
+ case "$bare,$name" in
+ yes,* | ,heads/* | ,tags/*) ;;
+ *) continue ;;
+ esac
if test -n "$use_separate_remote" &&
branch_name=`expr "z$name" : 'zheads/\(.*\)'`
then
origin=
origin_override=
use_separate_remote=t
+depth=
while
case "$#,$1" in
0,*) break ;;
*,--template=*)
template="$1" ;;
*,-q|*,--quiet) quiet=-q ;;
- *,--use-separate-remote)
- # default
- use_separate_remote=t ;;
- *,--use-immingled-remote)
- use_separate_remote= ;;
+ *,--use-separate-remote) ;;
+ *,--no-separate-remote)
+ die "clones are always made with separate-remote layout" ;;
1,--reference) usage ;;
*,--reference)
shift; reference="$1" ;;
*,-u|*,--upload-pack)
shift
upload_pack="--exec=$1" ;;
+ 1,--depth) usage;;
+ *,--depth)
+ shift
+ depth="--depth=$1";;
*,-*) usage ;;
*) break ;;
esac
test -n "$repo" ||
die 'you must specify a repository to clone.'
- # --bare implies --no-checkout and --use-immingled-remote
+ # --bare implies --no-checkout and --no-separate-remote
if test yes = "$bare"
then
if test yes = "$origin_override"
*)
case "$repo" in
rsync://*)
+ case "$depth" in
+ "") ;;
+ *) die "shallow over rsync not supported" ;;
+ esac
rsync $quiet -av --ignore-existing \
--exclude info "$repo/objects/" "$GIT_DIR/objects/" ||
exit
git-ls-remote "$repo" >"$GIT_DIR/CLONE_HEAD" || exit 1
;;
https://*|http://*|ftp://*)
+ case "$depth" in
+ "") ;;
+ *) die "shallow over http or ftp not supported" ;;
+ esac
if test -z "@@NO_CURL@@"
then
clone_dumb_http "$repo" "$D"
;;
*)
case "$upload_pack" in
- '') git-fetch-pack --all -k $quiet "$repo" ;;
- *) git-fetch-pack --all -k $quiet "$upload_pack" "$repo" ;;
+ '') git-fetch-pack --all -k $quiet $depth "$repo" ;;
+ *) git-fetch-pack --all -k $quiet "$upload_pack" $depth "$repo" ;;
esac >"$GIT_DIR/CLONE_HEAD" ||
die "fetch-pack from '$repo' failed."
;;
if test -z "$bare" && test -f "$GIT_DIR/REMOTE_HEAD"
then
- # Figure out which remote branch HEAD points at.
- case "$use_separate_remote" in
- '') remote_top=refs/heads ;;
- *) remote_top="refs/remotes/$origin" ;;
- esac
-
+ # a non-bare repository is always in separate-remote layout
+ remote_top="refs/remotes/$origin"
head_sha1=`cat "$GIT_DIR/REMOTE_HEAD"`
case "$head_sha1" in
'ref: refs/'*)
)
)
- # Write out remotes/$origin file, and update our "$head_points_at".
+ # Write out remote.$origin config, and update our "$head_points_at".
case "$head_points_at" in
?*)
- mkdir -p "$GIT_DIR/remotes" &&
+ # Local default branch
git-symbolic-ref HEAD "refs/heads/$head_points_at" &&
- case "$use_separate_remote" in
- t) origin_track="$remote_top/$head_points_at"
- git-update-ref HEAD "$head_sha1" ;;
- *) origin_track="$remote_top/$origin"
- git-update-ref "refs/heads/$origin" "$head_sha1" ;;
- esac &&
- echo >"$GIT_DIR/remotes/$origin" \
- "URL: $repo
- Pull: refs/heads/$head_points_at:$origin_track" &&
- (cd "$GIT_DIR/$remote_top" && find . -type f -print) |
- while read dotslref
- do
- name=`expr "$dotslref" : './\(.*\)'`
- if test "z$head_points_at" = "z$name"
- then
- continue
- fi
- if test "$use_separate_remote" = '' &&
- test "z$origin" = "z$name"
- then
- continue
- fi
- echo "Pull: refs/heads/${name}:$remote_top/${name}"
- done >>"$GIT_DIR/remotes/$origin" &&
- case "$use_separate_remote" in
- t)
- rm -f "refs/remotes/$origin/HEAD"
- git-symbolic-ref "refs/remotes/$origin/HEAD" \
- "refs/remotes/$origin/$head_points_at"
- esac
+
+ # Tracking branch for the primary branch at the remote.
+ origin_track="$remote_top/$head_points_at" &&
+ git-update-ref HEAD "$head_sha1" &&
+
+ # Upstream URL
+ git-repo-config remote."$origin".url "$repo" &&
+
+ # Set up the mappings to track the remote branches.
+ git-repo-config remote."$origin".fetch \
+ "refs/heads/*:$remote_top/*" '^$' &&
+ rm -f "refs/remotes/$origin/HEAD"
+ git-symbolic-ref "refs/remotes/$origin/HEAD" \
+ "refs/remotes/$origin/$head_points_at" &&
+
+ git-repo-config branch."$head_points_at".remote "$origin" &&
+ git-repo-config branch."$head_points_at".merge "refs/heads/$head_points_at"
esac
case "$no_checkout" in
#
USAGE='<fetch-options> <repository> <refspec>...'
+ SUBDIRECTORY_OK=Yes
. git-sh-setup
+ TOP=$(git-rev-parse --show-cdup)
+ if test ! -z "$TOP"
+ then
+ cd "$TOP"
+ fi
. git-parse-remote
_x40='[0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f]'
_x40="$_x40$_x40$_x40$_x40$_x40$_x40$_x40$_x40"
exec=
upload_pack=
keep=
+shallow_depth=
while case "$#" in 0) break ;; esac
do
case "$1" in
--reflog-action=*)
rloga=`expr "z$1" : 'z-[^=]*=\(.*\)'`
;;
+ --depth=*)
+ shallow_depth="--depth=`expr "z$1" : 'z-[^=]*=\(.*\)'`"
+ ;;
+ --depth)
+ shift
+ shallow_depth="--depth=$1"
+ ;;
-*)
usage
;;
: >"$GIT_DIR/FETCH_HEAD"
fi
+ # Global that is reused later
+ ls_remote_result=$(git ls-remote $upload_pack "$remote") ||
+ die "Cannot get the repository state from $remote"
+
append_fetch_head () {
head_="$1"
remote_="$2"
reflist=$(get_remote_refs_for_fetch "$@")
if test "$tags"
then
- taglist=`IFS=" " &&
- (
- git-ls-remote $upload_pack --tags "$remote" ||
- echo fail ouch
- ) |
+ taglist=`IFS=' ' &&
+ echo "$ls_remote_result" |
while read sha1 name
do
case "$sha1" in
esac
case "$name" in
*^*) continue ;;
+ refs/tags/*) ;;
+ *) continue ;;
esac
if git-check-ref-format "$name"
then
# There are transports that can fetch only one head at a time...
case "$remote" in
http://* | https://* | ftp://*)
+ test -n "$shallow_depth" &&
+ die "shallow clone with http not supported"
proto=`expr "$remote" : '\([^:]*\):'`
if [ -n "$GIT_SSL_NO_VERIFY" ]; then
curl_extra_args="-k"
"`git-repo-config --bool http.noEPSV`" = true ]; then
noepsv_opt="--disable-epsv"
fi
- max_depth=5
- depth=0
- head="ref: $remote_name"
- while (expr "z$head" : "zref:" && expr $depth \< $max_depth) >/dev/null
- do
- remote_name_quoted=$(@@PERL@@ -e '
- my $u = $ARGV[0];
- $u =~ s/^ref:\s*//;
- $u =~ s{([^-a-zA-Z0-9/.])}{sprintf"%%%02x",ord($1)}eg;
- print "$u";
- ' "$head")
- head=$(curl -nsfL $curl_extra_args $noepsv_opt "$remote/$remote_name_quoted")
- depth=$( expr \( $depth + 1 \) )
- done
+
+ # Find $remote_name from ls-remote output.
+ head=$(
+ IFS=' '
+ echo "$ls_remote_result" |
+ while read sha1 name
+ do
+ test "z$name" = "z$remote_name" || continue
+ echo "$sha1"
+ break
+ done
+ )
expr "z$head" : "z$_x40\$" >/dev/null ||
- die "Failed to fetch $remote_name from $remote"
+ die "No such ref $remote_name at $remote"
echo >&2 "Fetching $remote_name from $remote using $proto"
git-http-fetch -v -a "$head" "$remote/" || exit
;;
rsync://*)
+ test -n "$shallow_depth" &&
+ die "shallow clone with rsync not supported"
TMP_HEAD="$GIT_DIR/TMP_HEAD"
rsync -L -q "$remote/$remote_name" "$TMP_HEAD" || exit 1
head=$(git-rev-parse --verify TMP_HEAD)
esac
append_fetch_head "$head" "$remote" \
- "$remote_name" "$remote_nick" "$local_name" "$not_for_merge"
+ "$remote_name" "$remote_nick" "$local_name" "$not_for_merge" || exit
done
pack_lockfile=
IFS=" $LF"
(
- git-fetch-pack --thin $exec $keep "$remote" $rref || echo failed "$remote"
+ git-fetch-pack --thin $exec $keep $shallow_depth "$remote" $rref || echo failed "$remote"
) |
while read sha1 remote_name
do
done
local_name=$(expr "z$found" : 'z[^:]*:\(.*\)')
append_fetch_head "$sha1" "$remote" \
- "$remote_name" "$remote_nick" "$local_name" "$not_for_merge"
- done
+ "$remote_name" "$remote_nick" "$local_name" \
+ "$not_for_merge" || exit
+ done &&
if [ "$pack_lockfile" ]; then rm -f "$pack_lockfile"; fi
) || exit ;;
esac
}
- fetch_main "$reflist"
+ fetch_main "$reflist" || exit
# automated tag following
case "$no_tags$tags" in
*:refs/*)
# effective only when we are following remote branch
# using local tracking branch.
- taglist=$(IFS=" " &&
- git-ls-remote $upload_pack --tags "$remote" |
- sed -n -e 's|^\('"$_x40"'\) \(refs/tags/.*\)^{}$|\1 \2|p' \
- -e 's|^\('"$_x40"'\) \(refs/tags/.*\)$|\1 \2|p' |
+ taglist=$(IFS=' ' &&
+ echo "$ls_remote_result" |
+ git-show-ref --exclude-existing=refs/tags/ |
while read sha1 name
do
- git-show-ref --verify --quiet -- "$name" && continue
- git-check-ref-format "$name" || {
- echo >&2 "warning: tag ${name} ignored"
- continue
- }
git-cat-file -t "$sha1" >/dev/null 2>&1 || continue
echo >&2 "Auto-following $name"
echo ".${name}:${name}"
case "$taglist" in
'') ;;
?*)
- fetch_main "$taglist" ;;
+ # do not deepen a shallow tree when following tags
+ shallow_depth=
+ fetch_main "$taglist" || exit ;;
esac
esac
pull_to_client 3rd "A" $((1*3)) # old fails
- test_expect_success "clone shallow object count (part 2)" \
- "test -z \"$(grep -v in-pack count.shallow | sed "s/^.*: 0//")\""
+test_expect_success "clone shallow" "git-clone --depth 2 . shallow"
+
+(cd shallow; git-count-objects -v) > count.shallow
+
+test_expect_success "clone shallow object count" \
+ "test \"in-pack: 18\" = \"$(grep in-pack count.shallow)\""
+
++count_output () {
++ sed -e '/^in-pack:/d' -e '/^packs:/d' -e '/: 0$/d' "$1"
++}
++
++test_expect_success "clone shallow object count (part 2)" '
++ test -z "$(count_output count.shallow)"
++'
+
+test_expect_success "fsck in shallow repo" \
+ "(cd shallow; git-fsck-objects --full)"
+
+#test_done; exit
+
+add B66 $B65
+add B67 $B66
+
+test_expect_success "pull in shallow repo" \
+ "(cd shallow; git pull .. B)"
+
+(cd shallow; git-count-objects -v) > count.shallow
+test_expect_success "clone shallow object count" \
+ "test \"count: 6\" = \"$(grep count count.shallow)\""
+
+add B68 $B67
+add B69 $B68
+
+test_expect_success "deepening pull in shallow repo" \
+ "(cd shallow; git pull --depth 4 .. B)"
+
+(cd shallow; git-count-objects -v) > count.shallow
+test_expect_success "clone shallow object count" \
+ "test \"count: 12\" = \"$(grep count count.shallow)\""
+
+test_expect_success "deepening fetch in shallow repo" \
+ "(cd shallow; git fetch --depth 4 .. A:A)"
+
+(cd shallow; git-count-objects -v) > count.shallow
+test_expect_success "clone shallow object count" \
+ "test \"count: 18\" = \"$(grep count count.shallow)\""
+
+test_expect_failure "pull in shallow repo with missing merge base" \
+ "(cd shallow; git pull --depth 4 .. A)"
+
test_done
- #include <signal.h>
- #include <sys/wait.h>
- #include <sys/poll.h>
#include "cache.h"
#include "refs.h"
#include "pkt-line.h"
#include "object.h"
#include "commit.h"
#include "exec_cmd.h"
+#include "diff.h"
+#include "revision.h"
+#include "list-objects.h"
static const char upload_pack_usage[] = "git-upload-pack [--strict] [--timeout=nn] <dir>";
#define COMMON_KNOWN (1u << 14)
#define REACHABLE (1u << 15)
+#define SHALLOW (1u << 16)
+#define NOT_SHALLOW (1u << 17)
+#define CLIENT_SHALLOW (1u << 18)
+
static unsigned long oldest_have;
static int multi_ack, nr_our_refs;
return safe_write(fd, data, sz);
}
+FILE *pack_pipe = NULL;
+static void show_commit(struct commit *commit)
+{
+ if (commit->object.flags & BOUNDARY)
+ fputc('-', pack_pipe);
+ if (fputs(sha1_to_hex(commit->object.sha1), pack_pipe) < 0)
+ die("broken output pipe");
+ fputc('\n', pack_pipe);
+ fflush(pack_pipe);
+ free(commit->buffer);
+ commit->buffer = NULL;
+}
+
+static void show_object(struct object_array_entry *p)
+{
+ /* An object with name "foo\n0000000..." can be used to
+ * confuse downstream git-pack-objects very badly.
+ */
+ const char *ep = strchr(p->name, '\n');
+ if (ep) {
+ fprintf(pack_pipe, "%s %.*s\n", sha1_to_hex(p->item->sha1),
+ (int) (ep - p->name),
+ p->name);
+ }
+ else
+ fprintf(pack_pipe, "%s %s\n",
+ sha1_to_hex(p->item->sha1), p->name);
+}
+
+static void show_edge(struct commit *commit)
+{
+ fprintf(pack_pipe, "-%s\n", sha1_to_hex(commit->object.sha1));
+}
+
static void create_pack_file(void)
{
/* Pipes between rev-list to pack-objects, pack-objects to us
if (!pid_rev_list) {
int i;
- int args;
- const char **argv;
- const char **p;
- char *buf;
+ struct rev_info revs;
- if (create_full_pack) {
- args = 10;
- use_thin_pack = 0; /* no point doing it */
- }
- else
- args = have_obj.nr + want_obj.nr + 5;
- p = xmalloc(args * sizeof(char *));
- argv = (const char **) p;
- buf = xmalloc(args * 45);
+ pack_pipe = fdopen(lp_pipe[1], "w");
- dup2(lp_pipe[1], 1);
- close(0);
- close(lp_pipe[0]);
- close(lp_pipe[1]);
- *p++ = "rev-list";
- *p++ = use_thin_pack ? "--objects-edge" : "--objects";
if (create_full_pack)
- *p++ = "--all";
- else {
+ use_thin_pack = 0; /* no point doing it */
+ init_revisions(&revs, NULL);
+ revs.tag_objects = 1;
+ revs.tree_objects = 1;
+ revs.blob_objects = 1;
+ if (use_thin_pack)
+ revs.edge_hint = 1;
+
+ if (create_full_pack) {
+ const char *args[] = {"rev-list", "--all", NULL};
+ setup_revisions(2, args, &revs, NULL);
+ } else {
for (i = 0; i < want_obj.nr; i++) {
struct object *o = want_obj.objects[i].item;
- *p++ = buf;
- memcpy(buf, sha1_to_hex(o->sha1), 41);
- buf += 41;
+ /* why??? */
+ o->flags &= ~UNINTERESTING;
+ add_pending_object(&revs, o, NULL);
}
- }
- if (!create_full_pack)
for (i = 0; i < have_obj.nr; i++) {
struct object *o = have_obj.objects[i].item;
- *p++ = buf;
- *buf++ = '^';
- memcpy(buf, sha1_to_hex(o->sha1), 41);
- buf += 41;
+ o->flags |= UNINTERESTING;
+ add_pending_object(&revs, o, NULL);
}
- *p++ = NULL;
- execv_git_cmd(argv);
- die("git-upload-pack: unable to exec git-rev-list");
+ setup_revisions(0, NULL, &revs, NULL);
+ }
+ prepare_revision_walk(&revs);
+ mark_edges_uninteresting(revs.commits, &revs, show_edge);
+ traverse_commit_list(&revs, show_commit, show_object);
+ exit(0);
}
if (pipe(pu_pipe) < 0)
static void receive_needs(void)
{
+ struct object_array shallows = {0, 0, NULL};
static char line[1000];
- int len;
+ int len, depth = 0;
for (;;) {
struct object *o;
len = packet_read_line(0, line, sizeof(line));
reset_timeout();
if (!len)
- return;
+ break;
+ if (!strncmp("shallow ", line, 8)) {
+ unsigned char sha1[20];
+ struct object *object;
+ use_thin_pack = 0;
+ if (get_sha1(line + 8, sha1))
+ die("invalid shallow line: %s", line);
+ object = parse_object(sha1);
+ if (!object)
+ die("did not find object for %s", line);
+ object->flags |= CLIENT_SHALLOW;
+ add_object_array(object, NULL, &shallows);
+ continue;
+ }
+ if (!strncmp("deepen ", line, 7)) {
+ char *end;
+ use_thin_pack = 0;
+ depth = strtol(line + 7, &end, 0);
+ if (end == line + 7 || depth <= 0)
+ die("Invalid deepen: %s", line);
+ continue;
+ }
if (strncmp("want ", line, 5) ||
get_sha1_hex(line+5, sha1_buf))
die("git-upload-pack: protocol error, "
add_object_array(o, NULL, &want_obj);
}
}
+ if (depth == 0 && shallows.nr == 0)
+ return;
+ if (depth > 0) {
+ struct commit_list *result, *backup;
+ int i;
+ backup = result = get_shallow_commits(&want_obj, depth,
+ SHALLOW, NOT_SHALLOW);
+ while (result) {
+ struct object *object = &result->item->object;
+ if (!(object->flags & (CLIENT_SHALLOW|NOT_SHALLOW))) {
+ packet_write(1, "shallow %s",
+ sha1_to_hex(object->sha1));
+ register_shallow(object->sha1);
+ }
+ result = result->next;
+ }
+ free_commit_list(backup);
+ for (i = 0; i < shallows.nr; i++) {
+ struct object *object = shallows.objects[i].item;
+ if (object->flags & NOT_SHALLOW) {
+ struct commit_list *parents;
+ packet_write(1, "unshallow %s",
+ sha1_to_hex(object->sha1));
+ object->flags &= ~CLIENT_SHALLOW;
+ /* make sure the real parents are parsed */
+ unregister_shallow(object->sha1);
+ object->parsed = 0;
+ parse_commit((struct commit *)object);
+ parents = ((struct commit *)object)->parents;
+ while (parents) {
+ add_object_array(&parents->item->object,
+ NULL, &want_obj);
+ parents = parents->next;
+ }
+ }
+ /* make sure commit traversal conforms to client */
+ register_shallow(object->sha1);
+ }
+ packet_flush(1);
+ } else
+ if (shallows.nr > 0) {
+ int i;
+ for (i = 0; i < shallows.nr; i++)
+ register_shallow(shallows.objects[i].item->sha1);
+ }
+ free(shallows.objects);
}
static int send_ref(const char *refname, const unsigned char *sha1, int flag, void *cb_data)
{
- static const char *capabilities = "multi_ack thin-pack side-band side-band-64k ofs-delta";
+ static const char *capabilities = "multi_ack thin-pack side-band"
+ " side-band-64k ofs-delta shallow";
struct object *o = parse_object(sha1);
if (!o)