Merge branch 'jc/autogc' into js/rebase-i
authorJunio C Hamano <gitster@pobox.com>
Wed, 26 Sep 2007 07:42:12 +0000 (00:42 -0700)
committerJunio C Hamano <gitster@pobox.com>
Wed, 26 Sep 2007 07:42:12 +0000 (00:42 -0700)
* jc/autogc:
git-gc --auto: run "repack -A -d -l" as necessary.
git-gc --auto: restructure the way "repack" command line is built.
git-gc --auto: protect ourselves from accumulated cruft
git-gc --auto: add documentation.
git-gc --auto: move threshold check to need_to_gc() function.
repack -A -d: use --keep-unreachable when repacking
pack-objects --keep-unreachable
Export matches_pack_name() and fix its return value
Invoke "git gc --auto" from commit, merge, am and rebase.
Implement git gc --auto

Conflicts:

builtin-pack-objects.c

1  2 
Documentation/config.txt
builtin-pack-objects.c
cache.h
git-am.sh
git-commit.sh
git-merge.sh
git-rebase--interactive.sh
git-repack.sh
diff --combined Documentation/config.txt
index 015910f27a450cdaec80f3bfc2679243126736c0,b0390f82b85b17287b9e90c5ea57839517c040cf..2f04226988cb070a8fede8db757949ea7cab09c7
@@@ -439,6 -439,19 +439,19 @@@ gc.aggressiveWindow:
        algorithm used by 'git gc --aggressive'.  This defaults
        to 10.
  
+ gc.auto::
+       When there are approximately more than this many loose
+       objects in the repository, `git gc --auto` will pack them.
+       Some Porcelain commands use this command to perform a
+       light-weight garbage collection from time to time.  Setting
+       this to 0 disables it.
+ gc.autopacklimit::
+       When there are more than this many packs that are not
+       marked with `*.keep` file in the repository, `git gc
+       --auto` consolidates them into one larger pack.  Setting
+       this to 0 disables this.
  gc.packrefs::
        `git gc` does not run `git pack-refs` in a bare repository by
        default so that older dumb-transport clients can still fetch
@@@ -630,17 -643,9 +643,17 @@@ pack.deltaCacheSize:
        A value of 0 means no limit. Defaults to 0.
  
  pack.deltaCacheLimit::
 -      The maxium size of a delta, that is cached in
 +      The maximum size of a delta, that is cached in
        gitlink:git-pack-objects[1]. Defaults to 1000.
  
 +pack.threads::
 +      Specifies the number of threads to spawn when searching for best
 +      delta matches.  This requires that gitlink:git-pack-objects[1]
 +      be compiled with pthreads otherwise this option is ignored with a
 +      warning. This is meant to reduce packing time on multiprocessor
 +      machines. The required amount of memory for the delta search window
 +      is however multiplied by the number of threads.
 +
  pull.octopus::
        The default merge strategy to use when pulling multiple branches
        at once.
diff --combined builtin-pack-objects.c
index a15906bdb2021e68a014344cad4e73e9de3367ca,ba7c8da5bf0b91c5ddab3fa692ca49d025d85e29..0be539ed7fd9bf95bb40515b560c7615ed318f37
  #include "list-objects.h"
  #include "progress.h"
  
 +#ifdef THREADED_DELTA_SEARCH
 +#include <pthread.h>
 +#endif
 +
  static const char pack_usage[] = "\
  git-pack-objects [{ -q | --progress | --all-progress }] \n\
        [--max-pack-size=N] [--local] [--incremental] \n\
        [--window=N] [--window-memory=N] [--depth=N] \n\
        [--no-reuse-delta] [--no-reuse-object] [--delta-base-offset] \n\
 -      [--non-empty] [--revs [--unpacked | --all]*] [--reflog] \n\
 +      [--threads=N] [--non-empty] [--revs [--unpacked | --all]*] [--reflog] \n\
-       [--stdout | base-name] [<ref-list | <object-list]";
+       [--stdout | base-name] [--keep-unreachable] [<ref-list | <object-list]";
  
  struct object_entry {
        struct pack_idx_entry idx;
@@@ -61,7 -57,7 +61,7 @@@ static struct object_entry **written_li
  static uint32_t nr_objects, nr_alloc, nr_result, nr_written;
  
  static int non_empty;
- static int no_reuse_delta, no_reuse_object;
+ static int no_reuse_delta, no_reuse_object, keep_unreachable;
  static int local;
  static int incremental;
  static int allow_ofs_delta;
@@@ -72,7 -68,6 +72,7 @@@ static int progress = 1
  static int window = 10;
  static uint32_t pack_size_limit;
  static int depth = 50;
 +static int delta_search_threads = 1;
  static int pack_to_stdout;
  static int num_preferred_base;
  static struct progress progress_state;
@@@ -83,6 -78,7 +83,6 @@@ static unsigned long delta_cache_size 
  static unsigned long max_delta_cache_size = 0;
  static unsigned long cache_max_small_delta_size = 1000;
  
 -static unsigned long window_memory_usage = 0;
  static unsigned long window_memory_limit = 0;
  
  /*
@@@ -1295,31 -1291,6 +1295,31 @@@ static int delta_cacheable(unsigned lon
        return 0;
  }
  
 +#ifdef THREADED_DELTA_SEARCH
 +
 +static pthread_mutex_t read_mutex = PTHREAD_MUTEX_INITIALIZER;
 +#define read_lock()           pthread_mutex_lock(&read_mutex)
 +#define read_unlock()         pthread_mutex_unlock(&read_mutex)
 +
 +static pthread_mutex_t cache_mutex = PTHREAD_MUTEX_INITIALIZER;
 +#define cache_lock()          pthread_mutex_lock(&cache_mutex)
 +#define cache_unlock()                pthread_mutex_unlock(&cache_mutex)
 +
 +static pthread_mutex_t progress_mutex = PTHREAD_MUTEX_INITIALIZER;
 +#define progress_lock()               pthread_mutex_lock(&progress_mutex)
 +#define progress_unlock()     pthread_mutex_unlock(&progress_mutex)
 +
 +#else
 +
 +#define read_lock()           (void)0
 +#define read_unlock()         (void)0
 +#define cache_lock()          (void)0
 +#define cache_unlock()                (void)0
 +#define progress_lock()               (void)0
 +#define progress_unlock()     (void)0
 +
 +#endif
 +
  /*
   * We search for deltas _backwards_ in a list sorted by type and
   * by size, so that we see progressively smaller and smaller files.
   * one.
   */
  static int try_delta(struct unpacked *trg, struct unpacked *src,
 -                   unsigned max_depth)
 +                   unsigned max_depth, unsigned long *mem_usage)
  {
        struct object_entry *trg_entry = trg->entry;
        struct object_entry *src_entry = src->entry;
        if (trg_entry->type != src_entry->type)
                return -1;
  
 -      /* We do not compute delta to *create* objects we are not
 -       * going to pack.
 -       */
 -      if (trg_entry->preferred_base)
 -              return -1;
 -
        /*
         * We do not bother to try a delta that we discarded
         * on an earlier try, but only when reusing delta data.
  
        /* Load data if not already done */
        if (!trg->data) {
 +              read_lock();
                trg->data = read_sha1_file(trg_entry->idx.sha1, &type, &sz);
 +              read_unlock();
                if (!trg->data)
                        die("object %s cannot be read",
                            sha1_to_hex(trg_entry->idx.sha1));
                if (sz != trg_size)
                        die("object %s inconsistent object length (%lu vs %lu)",
                            sha1_to_hex(trg_entry->idx.sha1), sz, trg_size);
 -              window_memory_usage += sz;
 +              *mem_usage += sz;
        }
        if (!src->data) {
 +              read_lock();
                src->data = read_sha1_file(src_entry->idx.sha1, &type, &sz);
 +              read_unlock();
                if (!src->data)
                        die("object %s cannot be read",
                            sha1_to_hex(src_entry->idx.sha1));
                if (sz != src_size)
                        die("object %s inconsistent object length (%lu vs %lu)",
                            sha1_to_hex(src_entry->idx.sha1), sz, src_size);
 -              window_memory_usage += sz;
 +              *mem_usage += sz;
        }
        if (!src->index) {
                src->index = create_delta_index(src->data, src_size);
                                warning("suboptimal pack - out of memory");
                        return 0;
                }
 -              window_memory_usage += sizeof_delta_index(src->index);
 +              *mem_usage += sizeof_delta_index(src->index);
        }
  
        delta_buf = create_delta(src->index, trg->data, trg_size, &delta_size, max_size);
        trg_entry->delta_size = delta_size;
        trg->depth = src->depth + 1;
  
 +      /*
 +       * Handle memory allocation outside of the cache
 +       * accounting lock.  Compiler will optimize the strangeness
 +       * away when THREADED_DELTA_SEARCH is not defined.
 +       */
 +      if (trg_entry->delta_data)
 +              free(trg_entry->delta_data);
 +      cache_lock();
        if (trg_entry->delta_data) {
                delta_cache_size -= trg_entry->delta_size;
 -              free(trg_entry->delta_data);
                trg_entry->delta_data = NULL;
        }
 -
        if (delta_cacheable(src_size, trg_size, delta_size)) {
 -              trg_entry->delta_data = xrealloc(delta_buf, delta_size);
                delta_cache_size += trg_entry->delta_size;
 -      } else
 +              cache_unlock();
 +              trg_entry->delta_data = xrealloc(delta_buf, delta_size);
 +      } else {
 +              cache_unlock();
                free(delta_buf);
 +      }
 +
        return 1;
  }
  
@@@ -1466,60 -1429,68 +1466,60 @@@ static unsigned int check_delta_limit(s
        return m;
  }
  
 -static void free_unpacked(struct unpacked *n)
 +static unsigned long free_unpacked(struct unpacked *n)
  {
 -      window_memory_usage -= sizeof_delta_index(n->index);
 +      unsigned long freed_mem = sizeof_delta_index(n->index);
        free_delta_index(n->index);
        n->index = NULL;
        if (n->data) {
 +              freed_mem += n->entry->size;
                free(n->data);
                n->data = NULL;
 -              window_memory_usage -= n->entry->size;
        }
        n->entry = NULL;
        n->depth = 0;
 +      return freed_mem;
  }
  
 -static void find_deltas(struct object_entry **list, int window, int depth)
 +static void find_deltas(struct object_entry **list, unsigned list_size,
 +                      int window, int depth, unsigned *processed)
  {
 -      uint32_t i = nr_objects, idx = 0, count = 0, processed = 0;
 +      uint32_t i = list_size, idx = 0, count = 0;
        unsigned int array_size = window * sizeof(struct unpacked);
        struct unpacked *array;
 -      int max_depth;
 +      unsigned long mem_usage = 0;
  
 -      if (!nr_objects)
 -              return;
        array = xmalloc(array_size);
        memset(array, 0, array_size);
 -      if (progress)
 -              start_progress(&progress_state, "Deltifying %u objects...", "", nr_result);
  
        do {
                struct object_entry *entry = list[--i];
                struct unpacked *n = array + idx;
 -              int j;
 -
 -              if (!entry->preferred_base)
 -                      processed++;
 -
 -              if (progress)
 -                      display_progress(&progress_state, processed);
 +              int j, max_depth, best_base = -1;
  
 -              if (entry->delta)
 -                      /* This happens if we decided to reuse existing
 -                       * delta from a pack.  "!no_reuse_delta &&" is implied.
 -                       */
 -                      continue;
 -
 -              if (entry->size < 50)
 -                      continue;
 -
 -              if (entry->no_try_delta)
 -                      continue;
 -
 -              free_unpacked(n);
 +              mem_usage -= free_unpacked(n);
                n->entry = entry;
  
                while (window_memory_limit &&
 -                     window_memory_usage > window_memory_limit &&
 +                     mem_usage > window_memory_limit &&
                       count > 1) {
                        uint32_t tail = (idx + window - count) % window;
 -                      free_unpacked(array + tail);
 +                      mem_usage -= free_unpacked(array + tail);
                        count--;
                }
  
 +              /* We do not compute delta to *create* objects we are not
 +               * going to pack.
 +               */
 +              if (entry->preferred_base)
 +                      goto next;
 +
 +              progress_lock();
 +              (*processed)++;
 +              if (progress)
 +                      display_progress(&progress_state, *processed);
 +              progress_unlock();
 +
                /*
                 * If the current object is at pack edge, take the depth the
                 * objects that depend on the current object into account
  
                j = window;
                while (--j > 0) {
 +                      int ret;
                        uint32_t other_idx = idx + j;
                        struct unpacked *m;
                        if (other_idx >= window)
                        m = array + other_idx;
                        if (!m->entry)
                                break;
 -                      if (try_delta(n, m, max_depth) < 0)
 +                      ret = try_delta(n, m, max_depth, &mem_usage);
 +                      if (ret < 0)
                                break;
 +                      else if (ret > 0)
 +                              best_base = other_idx;
                }
  
                /* if we made n a delta, and if n is already at max
                if (entry->delta && depth <= n->depth)
                        continue;
  
 +              /*
 +               * Move the best delta base up in the window, after the
 +               * currently deltified object, to keep it longer.  It will
 +               * be the first base object to be attempted next.
 +               */
 +              if (entry->delta) {
 +                      struct unpacked swap = array[best_base];
 +                      int dist = (window + idx - best_base) % window;
 +                      int dst = best_base;
 +                      while (dist--) {
 +                              int src = (dst + 1) % window;
 +                              array[dst] = array[src];
 +                              dst = src;
 +                      }
 +                      array[dst] = swap;
 +              }
 +
                next:
                idx++;
                if (count + 1 < window)
                        idx = 0;
        } while (i > 0);
  
 -      if (progress)
 -              stop_progress(&progress_state);
 -
        for (i = 0; i < window; ++i) {
                free_delta_index(array[i].index);
                free(array[i].data);
        free(array);
  }
  
 +#ifdef THREADED_DELTA_SEARCH
 +
 +struct thread_params {
 +      pthread_t thread;
 +      struct object_entry **list;
 +      unsigned list_size;
 +      int window;
 +      int depth;
 +      unsigned *processed;
 +};
 +
 +static pthread_mutex_t data_request  = PTHREAD_MUTEX_INITIALIZER;
 +static pthread_mutex_t data_ready    = PTHREAD_MUTEX_INITIALIZER;
 +static pthread_mutex_t data_provider = PTHREAD_MUTEX_INITIALIZER;
 +static struct thread_params *data_requester;
 +
 +static void *threaded_find_deltas(void *arg)
 +{
 +      struct thread_params *me = arg;
 +
 +      for (;;) {
 +              pthread_mutex_lock(&data_request);
 +              data_requester = me;
 +              pthread_mutex_unlock(&data_provider);
 +              pthread_mutex_lock(&data_ready);
 +              pthread_mutex_unlock(&data_request);
 +
 +              if (!me->list_size)
 +                      return NULL;
 +
 +              find_deltas(me->list, me->list_size,
 +                          me->window, me->depth, me->processed);
 +      }
 +}
 +
 +static void ll_find_deltas(struct object_entry **list, unsigned list_size,
 +                         int window, int depth, unsigned *processed)
 +{
 +      struct thread_params *target, p[delta_search_threads];
 +      int i, ret;
 +      unsigned chunk_size;
 +
 +      if (delta_search_threads <= 1) {
 +              find_deltas(list, list_size, window, depth, processed);
 +              return;
 +      }
 +
 +      pthread_mutex_lock(&data_provider);
 +      pthread_mutex_lock(&data_ready);
 +
 +      for (i = 0; i < delta_search_threads; i++) {
 +              p[i].window = window;
 +              p[i].depth = depth;
 +              p[i].processed = processed;
 +              ret = pthread_create(&p[i].thread, NULL,
 +                                   threaded_find_deltas, &p[i]);
 +              if (ret)
 +                      die("unable to create thread: %s", strerror(ret));
 +      }
 +
 +      /* this should be auto-tuned somehow */
 +      chunk_size = window * 1000;
 +
 +      do {
 +              unsigned sublist_size = chunk_size;
 +              if (sublist_size > list_size)
 +                      sublist_size = list_size;
 +
 +              /* try to split chunks on "path" boundaries */
 +              while (sublist_size < list_size && list[sublist_size]->hash &&
 +                     list[sublist_size]->hash == list[sublist_size-1]->hash)
 +                      sublist_size++;
 +
 +              pthread_mutex_lock(&data_provider);
 +              target = data_requester;
 +              target->list = list;
 +              target->list_size = sublist_size;
 +              pthread_mutex_unlock(&data_ready);
 +
 +              list += sublist_size;
 +              list_size -= sublist_size;
 +              if (!sublist_size) {
 +                      pthread_join(target->thread, NULL);
 +                      i--;
 +              }
 +      } while (i);
 +}
 +
 +#else
 +#define ll_find_deltas find_deltas
 +#endif
 +
  static void prepare_pack(int window, int depth)
  {
        struct object_entry **delta_list;
 -      uint32_t i;
 +      uint32_t i, n, nr_deltas;
  
        get_object_details();
  
 -      if (!window || !depth)
 +      if (!nr_objects || !window || !depth)
                return;
  
        delta_list = xmalloc(nr_objects * sizeof(*delta_list));
 -      for (i = 0; i < nr_objects; i++)
 -              delta_list[i] = objects + i;
 -      qsort(delta_list, nr_objects, sizeof(*delta_list), type_size_sort);
 -      find_deltas(delta_list, window+1, depth);
 +      nr_deltas = n = 0;
 +
 +      for (i = 0; i < nr_objects; i++) {
 +              struct object_entry *entry = objects + i;
 +
 +              if (entry->delta)
 +                      /* This happens if we decided to reuse existing
 +                       * delta from a pack.  "!no_reuse_delta &&" is implied.
 +                       */
 +                      continue;
 +
 +              if (entry->size < 50)
 +                      continue;
 +
 +              if (entry->no_try_delta)
 +                      continue;
 +
 +              if (!entry->preferred_base)
 +                      nr_deltas++;
 +
 +              delta_list[n++] = entry;
 +      }
 +
 +      if (nr_deltas) {
 +              unsigned nr_done = 0;
 +              if (progress)
 +                      start_progress(&progress_state,
 +                                     "Deltifying %u objects...", "",
 +                                     nr_deltas);
 +              qsort(delta_list, n, sizeof(*delta_list), type_size_sort);
 +              ll_find_deltas(delta_list, n, window+1, depth, &nr_done);
 +              if (progress)
 +                      stop_progress(&progress_state);
 +              if (nr_done != nr_deltas)
 +                      die("inconsistency with delta count");
 +      }
        free(delta_list);
  }
  
@@@ -1762,17 -1591,6 +1762,17 @@@ static int git_pack_config(const char *
                cache_max_small_delta_size = git_config_int(k, v);
                return 0;
        }
 +      if (!strcmp(k, "pack.threads")) {
 +              delta_search_threads = git_config_int(k, v);
 +              if (delta_search_threads < 1)
 +                      die("invalid number of threads specified (%d)",
 +                          delta_search_threads);
 +#ifndef THREADED_DELTA_SEARCH
 +              if (delta_search_threads > 1)
 +                      warning("no threads support, ignoring %s", k);
 +#endif
 +              return 0;
 +      }
        return git_default_config(k, v);
  }
  
@@@ -1807,15 -1625,19 +1807,19 @@@ static void read_object_list_from_stdin
        }
  }
  
+ #define OBJECT_ADDED (1u<<20)
  static void show_commit(struct commit *commit)
  {
        add_object_entry(commit->object.sha1, OBJ_COMMIT, NULL, 0);
+       commit->object.flags |= OBJECT_ADDED;
  }
  
  static void show_object(struct object_array_entry *p)
  {
        add_preferred_base_object(p->name);
        add_object_entry(p->item->sha1, p->item->type, p->name, 0);
+       p->item->flags |= OBJECT_ADDED;
  }
  
  static void show_edge(struct commit *commit)
        add_preferred_base(commit->object.sha1);
  }
  
+ struct in_pack_object {
+       off_t offset;
+       struct object *object;
+ };
+ struct in_pack {
+       int alloc;
+       int nr;
+       struct in_pack_object *array;
+ };
+ static void mark_in_pack_object(struct object *object, struct packed_git *p, struct in_pack *in_pack)
+ {
+       in_pack->array[in_pack->nr].offset = find_pack_entry_one(object->sha1, p);
+       in_pack->array[in_pack->nr].object = object;
+       in_pack->nr++;
+ }
+ /*
+  * Compare the objects in the offset order, in order to emulate the
+  * "git-rev-list --objects" output that produced the pack originally.
+  */
+ static int ofscmp(const void *a_, const void *b_)
+ {
+       struct in_pack_object *a = (struct in_pack_object *)a_;
+       struct in_pack_object *b = (struct in_pack_object *)b_;
+       if (a->offset < b->offset)
+               return -1;
+       else if (a->offset > b->offset)
+               return 1;
+       else
+               return hashcmp(a->object->sha1, b->object->sha1);
+ }
+ static void add_objects_in_unpacked_packs(struct rev_info *revs)
+ {
+       struct packed_git *p;
+       struct in_pack in_pack;
+       uint32_t i;
+       memset(&in_pack, 0, sizeof(in_pack));
+       for (p = packed_git; p; p = p->next) {
+               const unsigned char *sha1;
+               struct object *o;
+               for (i = 0; i < revs->num_ignore_packed; i++) {
+                       if (matches_pack_name(p, revs->ignore_packed[i]))
+                               break;
+               }
+               if (revs->num_ignore_packed <= i)
+                       continue;
+               if (open_pack_index(p))
+                       die("cannot open pack index");
+               ALLOC_GROW(in_pack.array,
+                          in_pack.nr + p->num_objects,
+                          in_pack.alloc);
+               for (i = 0; i < p->num_objects; i++) {
+                       sha1 = nth_packed_object_sha1(p, i);
+                       o = lookup_unknown_object(sha1);
+                       if (!(o->flags & OBJECT_ADDED))
+                               mark_in_pack_object(o, p, &in_pack);
+                       o->flags |= OBJECT_ADDED;
+               }
+       }
+       if (in_pack.nr) {
+               qsort(in_pack.array, in_pack.nr, sizeof(in_pack.array[0]),
+                     ofscmp);
+               for (i = 0; i < in_pack.nr; i++) {
+                       struct object *o = in_pack.array[i].object;
+                       add_object_entry(o->sha1, o->type, "", 0);
+               }
+       }
+       free(in_pack.array);
+ }
  static void get_object_list(int ac, const char **av)
  {
        struct rev_info revs;
        prepare_revision_walk(&revs);
        mark_edges_uninteresting(revs.commits, &revs, show_edge);
        traverse_commit_list(&revs, show_commit, show_object);
+       if (keep_unreachable)
+               add_objects_in_unpacked_packs(&revs);
  }
  
  static int adjust_perm(const char *path, mode_t mode)
@@@ -1932,18 -1837,6 +2019,18 @@@ int cmd_pack_objects(int argc, const ch
                                usage(pack_usage);
                        continue;
                }
 +              if (!prefixcmp(arg, "--threads=")) {
 +                      char *end;
 +                      delta_search_threads = strtoul(arg+10, &end, 0);
 +                      if (!arg[10] || *end || delta_search_threads < 1)
 +                              usage(pack_usage);
 +#ifndef THREADED_DELTA_SEARCH
 +                      if (delta_search_threads > 1)
 +                              warning("no threads support, "
 +                                      "ignoring %s", arg);
 +#endif
 +                      continue;
 +              }
                if (!prefixcmp(arg, "--depth=")) {
                        char *end;
                        depth = strtoul(arg+8, &end, 0);
                        use_internal_rev_list = 1;
                        continue;
                }
+               if (!strcmp("--keep-unreachable", arg)) {
+                       keep_unreachable = 1;
+                       continue;
+               }
                if (!strcmp("--unpacked", arg) ||
                    !prefixcmp(arg, "--unpacked=") ||
                    !strcmp("--reflog", arg) ||
diff --combined cache.h
index 824650016677353cfa8c8a140eb3d904f56d60ee,3fa5b8e6a83bd8dc9d5e27e2b572e0e4a681d415..bb86fcce04f79bcd7857e346f44d38d3b7291004
+++ b/cache.h
@@@ -264,7 -264,6 +264,7 @@@ extern struct cache_entry *refresh_cach
  extern int remove_index_entry_at(struct index_state *, int pos);
  extern int remove_file_from_index(struct index_state *, const char *path);
  extern int add_file_to_index(struct index_state *, const char *path, int verbose);
 +extern struct cache_entry *make_cache_entry(unsigned int mode, const unsigned char *sha1, const char *path, int stage, int refresh);
  extern int ce_same_name(struct cache_entry *a, struct cache_entry *b);
  extern int ie_match_stat(struct index_state *, struct cache_entry *, struct stat *, int);
  extern int ie_modified(struct index_state *, struct cache_entry *, struct stat *, int);
@@@ -530,6 -529,7 +530,7 @@@ extern void *unpack_entry(struct packed
  extern unsigned long unpack_object_header_gently(const unsigned char *buf, unsigned long len, enum object_type *type, unsigned long *sizep);
  extern unsigned long get_size_from_delta(struct packed_git *, struct pack_window **, off_t);
  extern const char *packed_object_info_detail(struct packed_git *, off_t, unsigned long *, unsigned long *, unsigned int *, unsigned char *);
+ extern int matches_pack_name(struct packed_git *p, const char *name);
  
  /* Dumb servers support */
  extern int update_server_info(int);
@@@ -593,6 -593,7 +594,6 @@@ extern void trace_argv_printf(const cha
  /* convert.c */
  extern char *convert_to_git(const char *path, const char *src, unsigned long *sizep);
  extern char *convert_to_working_tree(const char *path, const char *src, unsigned long *sizep);
 -extern void *convert_sha1_file(const char *path, const unsigned char *sha1, unsigned int mode, enum object_type *type, unsigned long *size);
  
  /* diff.c */
  extern int diff_auto_refresh_index;
diff --combined git-am.sh
index 32c46d7ed4b26220f4c9e7fc778bb240c85dae1c,4db4701c9ec7a5f5fa85c3a6d0576903b5d2741a..b02ae6a76fa8265da1ea3d720df9dc29cc5d5519
+++ b/git-am.sh
@@@ -62,8 -62,10 +62,8 @@@ fall_back_3way () 
      mkdir "$dotest/patch-merge-tmp-dir"
  
      # First see if the patch records the index info that we can use.
 -    git apply -z --index-info "$dotest/patch" \
 -      >"$dotest/patch-merge-index-info" &&
 -    GIT_INDEX_FILE="$dotest/patch-merge-tmp-index" \
 -    git update-index -z --index-info <"$dotest/patch-merge-index-info" &&
 +    git apply --build-fake-ancestor "$dotest/patch-merge-tmp-index" \
 +      "$dotest/patch" &&
      GIT_INDEX_FILE="$dotest/patch-merge-tmp-index" \
      git write-tree >"$dotest/patch-merge-base+" ||
      cannot_fallback "Repository lacks necessary blobs to fall back on 3-way merge."
@@@ -107,7 -109,7 +107,7 @@@ dotest=.dotest sign= utf8=t keep= skip
  resolvemsg= resume=
  git_apply_opt=
  
 -while case "$#" in 0) break;; esac
 +while test $# != 0
  do
        case "$1" in
        -d=*|--d=*|--do=*|--dot=*|--dote=*|--dotes=*|--dotest=*)
@@@ -464,6 -466,8 +464,8 @@@ d
                "$GIT_DIR"/hooks/post-applypatch
        fi
  
+       git gc --auto
        go_next
  done
  
diff --combined git-commit.sh
index cb14f0621651d2006b08d1eddf67ab3269df84d0,d22d35eadc7c2c3dcc4825ddbf034e81d376c2ec..44ccc4418e9891dc2027c1f6bbd0c692a08f5e4c
@@@ -89,7 -89,7 +89,7 @@@ force_author
  only_include_assumed=
  untracked_files=
  templatefile="`git config commit.template`"
 -while case "$#" in 0) break;; esac
 +while test $# != 0
  do
        case "$1" in
        -F|--F|-f|--f|--fi|--fil|--file)
                no_edit=t
                log_given=t$log_given
                logfile="$1"
 -              shift
                ;;
        -F*|-f*)
                no_edit=t
                log_given=t$log_given
 -              logfile=`expr "z$1" : 'z-[Ff]\(.*\)'`
 -              shift
 +              logfile="${1#-[Ff]}"
                ;;
        --F=*|--f=*|--fi=*|--fil=*|--file=*)
                no_edit=t
                log_given=t$log_given
 -              logfile=`expr "z$1" : 'z-[^=]*=\(.*\)'`
 -              shift
 +              logfile="${1#*=}"
                ;;
        -a|--a|--al|--all)
                all=t
 -              shift
                ;;
        --au=*|--aut=*|--auth=*|--autho=*|--author=*)
 -              force_author=`expr "z$1" : 'z-[^=]*=\(.*\)'`
 -              shift
 +              force_author="${1#*=}"
                ;;
        --au|--aut|--auth|--autho|--author)
                case "$#" in 1) usage ;; esac
                shift
                force_author="$1"
 -              shift
                ;;
        -e|--e|--ed|--edi|--edit)
                edit_flag=t
 -              shift
                ;;
        -i|--i|--in|--inc|--incl|--inclu|--includ|--include)
                also=t
 -              shift
                ;;
        --int|--inte|--inter|--intera|--interac|--interact|--interacti|\
        --interactiv|--interactive)
                interactive=t
 -              shift
                ;;
        -o|--o|--on|--onl|--only)
                only=t
 -              shift
                ;;
        -m|--m|--me|--mes|--mess|--messa|--messag|--message)
                case "$#" in 1) usage ;; esac
                shift
                log_given=m$log_given
 -              if test "$log_message" = ''
 -              then
 -                  log_message="$1"
 -              else
 -                  log_message="$log_message
 +              log_message="${log_message:+${log_message}
  
 -$1"
 -              fi
 +}$1"
                no_edit=t
 -              shift
                ;;
        -m*)
                log_given=m$log_given
 -              if test "$log_message" = ''
 -              then
 -                  log_message=`expr "z$1" : 'z-m\(.*\)'`
 -              else
 -                  log_message="$log_message
 +              log_message="${log_message:+${log_message}
  
 -`expr "z$1" : 'z-m\(.*\)'`"
 -              fi
 +}${1#-m}"
                no_edit=t
 -              shift
                ;;
        --m=*|--me=*|--mes=*|--mess=*|--messa=*|--messag=*|--message=*)
                log_given=m$log_given
 -              if test "$log_message" = ''
 -              then
 -                  log_message=`expr "z$1" : 'z-[^=]*=\(.*\)'`
 -              else
 -                  log_message="$log_message
 +              log_message="${log_message:+${log_message}
  
 -`expr "z$1" : 'zq-[^=]*=\(.*\)'`"
 -              fi
 +}${1#*=}"
                no_edit=t
 -              shift
                ;;
        -n|--n|--no|--no-|--no-v|--no-ve|--no-ver|--no-veri|--no-verif|\
        --no-verify)
                verify=
 -              shift
                ;;
        --a|--am|--ame|--amen|--amend)
                amend=t
                use_commit=HEAD
 -              shift
                ;;
        -c)
                case "$#" in 1) usage ;; esac
                log_given=t$log_given
                use_commit="$1"
                no_edit=
 -              shift
                ;;
        --ree=*|--reed=*|--reedi=*|--reedit=*|--reedit-=*|--reedit-m=*|\
        --reedit-me=*|--reedit-mes=*|--reedit-mess=*|--reedit-messa=*|\
        --reedit-messag=*|--reedit-message=*)
                log_given=t$log_given
 -              use_commit=`expr "z$1" : 'z-[^=]*=\(.*\)'`
 +              use_commit="${1#*=}"
                no_edit=
 -              shift
                ;;
        --ree|--reed|--reedi|--reedit|--reedit-|--reedit-m|--reedit-me|\
        --reedit-mes|--reedit-mess|--reedit-messa|--reedit-messag|\
                log_given=t$log_given
                use_commit="$1"
                no_edit=
 -              shift
                ;;
        -C)
                case "$#" in 1) usage ;; esac
                log_given=t$log_given
                use_commit="$1"
                no_edit=t
 -              shift
                ;;
        --reu=*|--reus=*|--reuse=*|--reuse-=*|--reuse-m=*|--reuse-me=*|\
        --reuse-mes=*|--reuse-mess=*|--reuse-messa=*|--reuse-messag=*|\
        --reuse-message=*)
                log_given=t$log_given
 -              use_commit=`expr "z$1" : 'z-[^=]*=\(.*\)'`
 +              use_commit="${1#*=}"
                no_edit=t
 -              shift
                ;;
        --reu|--reus|--reuse|--reuse-|--reuse-m|--reuse-me|--reuse-mes|\
        --reuse-mess|--reuse-messa|--reuse-messag|--reuse-message)
                log_given=t$log_given
                use_commit="$1"
                no_edit=t
 -              shift
                ;;
        -s|--s|--si|--sig|--sign|--signo|--signof|--signoff)
                signoff=t
 -              shift
                ;;
        -t|--t|--te|--tem|--temp|--templ|--templa|--templat|--template)
                case "$#" in 1) usage ;; esac
                shift
                templatefile="$1"
                no_edit=
 -              shift
                ;;
        -q|--q|--qu|--qui|--quie|--quiet)
                quiet=t
 -              shift
                ;;
        -v|--v|--ve|--ver|--verb|--verbo|--verbos|--verbose)
                verbose=t
 -              shift
                ;;
        -u|--u|--un|--unt|--untr|--untra|--untrac|--untrack|--untracke|\
        --untracked|--untracked-|--untracked-f|--untracked-fi|--untracked-fil|\
        --untracked-file|--untracked-files)
                untracked_files=t
 -              shift
                ;;
        --)
                shift
                break
                ;;
        esac
 +      shift
  done
  case "$edit_flag" in t) no_edit= ;; esac
  
@@@ -339,11 -379,8 +339,11 @@@ t,
                then
                        refuse_partial "Cannot do a partial commit during a merge."
                fi
 +
                TMP_INDEX="$GIT_DIR/tmp-index$$"
 -              commit_only=`git ls-files --error-unmatch -- "$@"` || exit
 +              W=
 +              test -z "$initial_commit" && W=--with-tree=HEAD
 +              commit_only=`git ls-files --error-unmatch $W -- "$@"` || exit
  
                # Build a temporary index and update the real index
                # the same way.
                (
                        GIT_INDEX_FILE="$NEXT_INDEX"
                        export GIT_INDEX_FILE
 -                      git update-index --remove --stdin
 +                      git update-index --add --remove --stdin
                ) || exit
                ;;
        esac
@@@ -401,8 -438,12 +401,8 @@@ esa
  
  if test t = "$verify" && test -x "$GIT_DIR"/hooks/pre-commit
  then
 -      if test "$TMP_INDEX"
 -      then
 -              GIT_INDEX_FILE="$TMP_INDEX" "$GIT_DIR"/hooks/pre-commit
 -      else
 -              GIT_INDEX_FILE="$USE_INDEX" "$GIT_DIR"/hooks/pre-commit
 -      fi || exit
 +    GIT_INDEX_FILE="${TMP_INDEX:-${USE_INDEX}}" "$GIT_DIR"/hooks/pre-commit \
 +    || exit
  fi
  
  if test "$log_message" != ''
@@@ -513,7 -554,7 +513,7 @@@ els
        # we need to check if there is anything to commit
        run_status >/dev/null
  fi
 -if [ "$?" != "0" -a ! -f "$GIT_DIR/MERGE_HEAD" -a -z "$amend" ]
 +if [ "$?" != "0" -a ! -f "$GIT_DIR/MERGE_HEAD" ]
  then
        rm -f "$GIT_DIR/COMMIT_EDITMSG" "$GIT_DIR/SQUASH_MSG"
        use_status_color=t
@@@ -611,6 -652,7 +611,7 @@@ git rerer
  
  if test "$ret" = 0
  then
+       git gc --auto
        if test -x "$GIT_DIR"/hooks/post-commit
        then
                "$GIT_DIR"/hooks/post-commit
diff --combined git-merge.sh
index 6c513dcbdf44036b0207c276e765a87eceb7aa77,697bec24fa2da16fb1fa7f618a700c0110b5e5fa..bf18f582da53200fb422bf35c85c1f05c5f7c88d
@@@ -82,6 -82,7 +82,7 @@@ finish () 
                        ;;
                *)
                        git update-ref -m "$rlogm" HEAD "$1" "$head" || exit 1
+                       git gc --auto
                        ;;
                esac
                ;;
                fi
                ;;
        esac
 +
 +      # Run a post-merge hook
 +        if test -x "$GIT_DIR"/hooks/post-merge
 +        then
 +          case "$squash" in
 +          t)
 +                "$GIT_DIR"/hooks/post-merge 1
 +              ;;
 +          '')
 +                "$GIT_DIR"/hooks/post-merge 0
 +              ;;
 +          esac
 +        fi
  }
  
  merge_name () {
  case "$#" in 0) usage ;; esac
  
  have_message=
 -while case "$#" in 0) break ;; esac
 +while test $# != 0
  do
        case "$1" in
        -n|--n|--no|--no-|--no-s|--no-su|--no-sum|--no-summ|\
index 268a629c434c3cc1bad8a59861f3f093291ec540,8258b7adf97d2bc4378d3b92f66e83b383669414..8e6e9431e885660cd01ae95c4a13b8bbbf260063
@@@ -36,14 -36,14 +36,14 @@@ warn () 
  output () {
        case "$VERBOSE" in
        '')
 -              "$@" > "$DOTEST"/output 2>&1
 +              output=$("$@" 2>&1 )
                status=$?
 -              test $status != 0 &&
 -                      cat "$DOTEST"/output
 +              test $status != 0 && printf "%s\n" "$output"
                return $status
 -      ;;
 +              ;;
        *)
                "$@"
 +              ;;
        esac
  }
  
@@@ -63,7 -63,6 +63,7 @@@ comment_for_reflog () 
        ''|rebase*)
                GIT_REFLOG_ACTION="rebase -i ($1)"
                export GIT_REFLOG_ACTION
 +              ;;
        esac
  }
  
@@@ -71,23 -70,22 +71,23 @@@ mark_action_done () 
        sed -e 1q < "$TODO" >> "$DONE"
        sed -e 1d < "$TODO" >> "$TODO".new
        mv -f "$TODO".new "$TODO"
 -      count=$(($(wc -l < "$DONE")))
 -      total=$(($count+$(wc -l < "$TODO")))
 +      count=$(($(grep -ve '^$' -e '^#' < "$DONE" | wc -l)))
 +      total=$(($count+$(grep -ve '^$' -e '^#' < "$TODO" | wc -l)))
        printf "Rebasing (%d/%d)\r" $count $total
        test -z "$VERBOSE" || echo
  }
  
  make_patch () {
 -      parent_sha1=$(git rev-parse --verify "$1"^ 2> /dev/null)
 +      parent_sha1=$(git rev-parse --verify "$1"^) ||
 +              die "Cannot get patch for $1^"
        git diff "$parent_sha1".."$1" > "$DOTEST"/patch
 +      test -f "$DOTEST"/message ||
 +              git cat-file commit "$1" | sed "1,/^$/d" > "$DOTEST"/message
 +      test -f "$DOTEST"/author-script ||
 +              get_author_ident_from_commit "$1" > "$DOTEST"/author-script
  }
  
  die_with_patch () {
 -      test -f "$DOTEST"/message ||
 -              git cat-file commit $sha1 | sed "1,/^$/d" > "$DOTEST"/message
 -      test -f "$DOTEST"/author-script ||
 -              get_author_ident_from_commit $sha1 > "$DOTEST"/author-script
        make_patch "$1"
        die "$2"
  }
@@@ -97,18 -95,13 +97,18 @@@ die_abort () 
        die "$1"
  }
  
 +has_action () {
 +      grep -vqe '^$' -e '^#' "$1"
 +}
 +
  pick_one () {
        no_ff=
        case "$1" in -n) sha1=$2; no_ff=t ;; *) sha1=$1 ;; esac
        output git rev-parse --verify $sha1 || die "Invalid commit name: $sha1"
        test -d "$REWRITTEN" &&
                pick_one_preserving_merges "$@" && return
 -      parent_sha1=$(git rev-parse --verify $sha1^ 2>/dev/null)
 +      parent_sha1=$(git rev-parse --verify $sha1^) ||
 +              die "Could not get the parent of $sha1"
        current_sha1=$(git rev-parse --verify HEAD)
        if test $no_ff$current_sha1 = $parent_sha1; then
                output git reset --hard $sha1
@@@ -136,7 -129,7 +136,7 @@@ pick_one_preserving_merges () 
        fast_forward=t
        preserve=t
        new_parents=
 -      for p in $(git rev-list --parents -1 $sha1 | cut -d -f2-)
 +      for p in $(git rev-list --parents -1 $sha1 | cut -d' ' -f2-)
        do
                if test -f "$REWRITTEN"/$p
                then
                                ;; # do nothing; that parent is already there
                        *)
                                new_parents="$new_parents $new_p"
 +                              ;;
                        esac
                fi
        done
        case $fast_forward in
        t)
                output warn "Fast forward to $sha1"
 -              test $preserve=f && echo $sha1 > "$REWRITTEN"/$sha1
 +              test $preserve = f || echo $sha1 > "$REWRITTEN"/$sha1
                ;;
        f)
                test "a$1" = a-n && die "Refusing to squash a merge: $sha1"
  
 -              first_parent=$(expr "$new_parents" : " \([^ ]*\)")
 +              first_parent=$(expr "$new_parents" : ' \([^ ]*\)')
                # detach HEAD to current parent
                output git checkout $first_parent 2> /dev/null ||
                        die "Cannot move HEAD to $first_parent"
  
                echo $sha1 > "$DOTEST"/current-commit
                case "$new_parents" in
 -              \ *\ *)
 +              ' '*' '*)
                        # redo merge
                        author_script=$(get_author_ident_from_commit $sha1)
                        eval "$author_script"
 -                      msg="$(git cat-file commit $sha1 | \
 -                              sed -e '1,/^$/d' -e "s/[\"\\]/\\\\&/g")"
 +                      msg="$(git cat-file commit $sha1 | sed -e '1,/^$/d')"
                        # NEEDSWORK: give rerere a chance
 -                      if ! output git merge $STRATEGY -m "$msg" $new_parents
 +                      if ! GIT_AUTHOR_NAME="$GIT_AUTHOR_NAME" \
 +                              GIT_AUTHOR_EMAIL="$GIT_AUTHOR_EMAIL" \
 +                              GIT_AUTHOR_DATE="$GIT_AUTHOR_DATE" \
 +                              output git merge $STRATEGY -m "$msg" \
 +                                      $new_parents
                        then
 -                              echo "$msg" > "$GIT_DIR"/MERGE_MSG
 +                              printf "%s\n" "$msg" > "$GIT_DIR"/MERGE_MSG
                                die Error redoing merge $sha1
                        fi
                        ;;
                *)
                        output git cherry-pick $STRATEGY "$@" ||
                                die_with_patch $sha1 "Could not pick $sha1"
 +                      ;;
                esac
 +              ;;
        esac
  }
  
@@@ -225,11 -212,11 +225,11 @@@ peek_next_command () 
  }
  
  do_next () {
 -      test -f "$DOTEST"/message && rm "$DOTEST"/message
 -      test -f "$DOTEST"/author-script && rm "$DOTEST"/author-script
 +      rm -f "$DOTEST"/message "$DOTEST"/author-script \
 +              "$DOTEST"/amend || exit
        read command sha1 rest < "$TODO"
        case "$command" in
 -      \#|'')
 +      '#'*|'')
                mark_action_done
                ;;
        pick)
                pick_one $sha1 ||
                        die_with_patch $sha1 "Could not apply $sha1... $rest"
                make_patch $sha1
 +              : > "$DOTEST"/amend
                warn
                warn "You can amend the commit now, with"
                warn
        squash)
                comment_for_reflog squash
  
 -              test -z "$(grep -ve '^$' -e '^#' < $DONE)" &&
 +              has_action "$DONE" ||
                        die "Cannot 'squash' without a previous commit"
  
                mark_action_done
                        EDIT_COMMIT=
                        USE_OUTPUT=output
                        cp "$MSG" "$SQUASH_MSG"
 -              ;;
 +                      ;;
                *)
                        EDIT_COMMIT=-e
                        USE_OUTPUT=
 -                      test -f "$SQUASH_MSG" && rm "$SQUASH_MSG"
 +                      rm -f "$SQUASH_MSG" || exit
 +                      ;;
                esac
  
                failed=f
                f)
                        # This is like --amend, but with a different message
                        eval "$author_script"
 -                      export GIT_AUTHOR_NAME GIT_AUTHOR_EMAIL GIT_AUTHOR_DATE
 +                      GIT_AUTHOR_NAME="$GIT_AUTHOR_NAME" \
 +                      GIT_AUTHOR_EMAIL="$GIT_AUTHOR_EMAIL" \
 +                      GIT_AUTHOR_DATE="$GIT_AUTHOR_DATE" \
                        $USE_OUTPUT git commit -F "$MSG" $EDIT_COMMIT
                        ;;
                t)
                        warn
                        warn "Could not apply $sha1... $rest"
                        die_with_patch $sha1 ""
 +                      ;;
                esac
                ;;
        *)
                warn "Unknown command: $command $sha1 $rest"
                die_with_patch $sha1 "Please fix this in the file $TODO."
 +              ;;
        esac
        test -s "$TODO" && return
  
        rm -rf "$DOTEST" &&
        warn "Successfully rebased and updated $HEADNAME."
  
+       git gc --auto
        exit
  }
  
@@@ -336,7 -319,7 +338,7 @@@ do_rest () 
        done
  }
  
 -while case $# in 0) break ;; esac
 +while test $# != 0
  do
        case "$1" in
        --continue)
                git update-index --refresh &&
                git diff-files --quiet &&
                ! git diff-index --cached --quiet HEAD &&
 -              . "$DOTEST"/author-script &&
 +              . "$DOTEST"/author-script && {
 +                      test ! -f "$DOTEST"/amend || git reset --soft HEAD^
 +              } &&
                export GIT_AUTHOR_NAME GIT_AUTHOR_NAME GIT_AUTHOR_DATE &&
                git commit -F "$DOTEST"/message -e
  
  
                require_clean_work_tree
  
 -              mkdir "$DOTEST" || die "Could not create temporary $DOTEST"
                if test ! -z "$2"
                then
                        output git show-ref --verify --quiet "refs/heads/$2" ||
                HEAD=$(git rev-parse --verify HEAD) || die "No HEAD?"
                UPSTREAM=$(git rev-parse --verify "$1") || die "Invalid base"
  
 +              mkdir "$DOTEST" || die "Could not create temporary $DOTEST"
 +
                test -z "$ONTO" && ONTO=$UPSTREAM
  
                : > "$DOTEST"/interactive || die "Could not mark as interactive"
@@@ -490,18 -470,17 +492,18 @@@ EO
                        $UPSTREAM...$HEAD | \
                        sed -n "s/^>/pick /p" >> "$TODO"
  
 -              test -z "$(grep -ve '^$' -e '^#' < $TODO)" &&
 +              has_action "$TODO" ||
                        die_abort "Nothing to do"
  
                cp "$TODO" "$TODO".backup
                git_editor "$TODO" ||
                        die "Could not execute editor"
  
 -              test -z "$(grep -ve '^$' -e '^#' < $TODO)" &&
 +              has_action "$TODO" ||
                        die_abort "Nothing to do"
  
                output git checkout $ONTO && do_rest
 +              ;;
        esac
        shift
  done
diff --combined git-repack.sh
index 0aae1a3ed5571a010f80438f8e8a0fc7eb0dc285,633b902e26a7e527c7a80ccf6aed2da5e71fa802..e72adc4d91efb8eb6dc96c1f431c8863c408439b
@@@ -3,17 -3,19 +3,19 @@@
  # Copyright (c) 2005 Linus Torvalds
  #
  
- USAGE='[-a] [-d] [-f] [-l] [-n] [-q] [--max-pack-size=N] [--window=N] [--window-memory=N] [--depth=N]'
+ USAGE='[-a|-A] [-d] [-f] [-l] [-n] [-q] [--max-pack-size=N] [--window=N] [--window-memory=N] [--depth=N]'
  SUBDIRECTORY_OK='Yes'
  . git-sh-setup
  
- no_update_info= all_into_one= remove_redundant=
+ no_update_info= all_into_one= remove_redundant= keep_unreachable=
  local= quiet= no_reuse= extra=
 -while case "$#" in 0) break ;; esac
 +while test $# != 0
  do
        case "$1" in
        -n)     no_update_info=t ;;
        -a)     all_into_one=t ;;
+       -A)     all_into_one=t
+               keep_unreachable=--keep-unreachable ;;
        -d)     remove_redundant=t ;;
        -q)     quiet=-q ;;
        -f)     no_reuse=--no-reuse-object ;;
@@@ -59,7 -61,13 +61,13 @@@ case ",$all_into_one," i
                        fi
                done
        fi
-       [ -z "$args" ] && args='--unpacked --incremental'
+       if test -z "$args"
+       then
+               args='--unpacked --incremental'
+       elif test -n "$keep_unreachable"
+       then
+               args="$args $keep_unreachable"
+       fi
        ;;
  esac