static uint32_t reuse_packfile_objects;
static off_t reuse_packfile_offset;
-static int use_bitmap_index = 1;
+static int use_bitmap_index_default = 1;
+static int use_bitmap_index = -1;
static int write_bitmap_index;
static uint16_t write_bitmap_options;
}
/* Return 0 if we will bust the pack-size limit */
-static unsigned long write_reuse_object(struct sha1file *f, struct object_entry *entry,
- unsigned long limit, int usable_delta)
+static off_t write_reuse_object(struct sha1file *f, struct object_entry *entry,
+ unsigned long limit, int usable_delta)
{
struct packed_git *p = entry->in_pack;
struct pack_window *w_curs = NULL;
struct revindex_entry *revidx;
off_t offset;
enum object_type type = entry->type;
- unsigned long datalen;
+ off_t datalen;
unsigned char header[10], dheader[10];
unsigned hdrlen;
}
/* Return 0 if we will bust the pack-size limit */
-static unsigned long write_object(struct sha1file *f,
- struct object_entry *entry,
- off_t write_offset)
+static off_t write_object(struct sha1file *f,
+ struct object_entry *entry,
+ off_t write_offset)
{
- unsigned long limit, len;
+ unsigned long limit;
+ off_t len;
int usable_delta, to_reuse;
if (!pack_to_stdout)
struct object_entry *e,
off_t *offset)
{
- unsigned long size;
+ off_t size;
int recursing;
/*
#define ll_find_deltas(l, s, w, d, p) find_deltas(l, &s, w, d, p)
#endif
+static void add_tag_chain(const struct object_id *oid)
+{
+ struct tag *tag;
+
+ /*
+ * We catch duplicates already in add_object_entry(), but we'd
+ * prefer to do this extra check to avoid having to parse the
+ * tag at all if we already know that it's being packed (e.g., if
+ * it was included via bitmaps, we would not have parsed it
+ * previously).
+ */
+ if (packlist_find(&to_pack, oid->hash, NULL))
+ return;
+
+ tag = lookup_tag(oid->hash);
+ while (1) {
+ if (!tag || parse_tag(tag) || !tag->tagged)
+ die("unable to pack objects reachable from tag %s",
+ oid_to_hex(oid));
+
+ add_object_entry(tag->object.oid.hash, OBJ_TAG, NULL, 0);
+
+ if (tag->tagged->type != OBJ_TAG)
+ return;
+
+ tag = (struct tag *)tag->tagged;
+ }
+}
+
static int add_ref_tag(const char *path, const struct object_id *oid, int flag, void *cb_data)
{
struct object_id peeled;
if (starts_with(path, "refs/tags/") && /* is a tag? */
!peel_ref(path, peeled.hash) && /* peelable? */
packlist_find(&to_pack, peeled.hash, NULL)) /* object packed? */
- add_object_entry(oid->hash, OBJ_TAG, NULL, 0);
+ add_tag_chain(oid);
return 0;
}
write_bitmap_options &= ~BITMAP_OPT_HASH_CACHE;
}
if (!strcmp(k, "pack.usebitmaps")) {
- use_bitmap_index = git_config_bool(k, v);
+ use_bitmap_index_default = git_config_bool(k, v);
return 0;
}
if (!strcmp(k, "pack.threads")) {
}
/*
- * This tracks any options which a reader of the pack might
- * not understand, and which would therefore prevent blind reuse
- * of what we have on disk.
+ * This tracks any options which pack-reuse code expects to be on, or which a
+ * reader of the pack might not understand, and which would therefore prevent
+ * blind reuse of what we have on disk.
*/
static int pack_options_allow_reuse(void)
{
- return allow_ofs_delta;
+ return pack_to_stdout && allow_ofs_delta;
}
static int get_object_list_from_bitmap(struct rev_info *revs)
if (!rev_list_all || !rev_list_reflog || !rev_list_index)
unpack_unreachable_expiration = 0;
- if (!use_internal_rev_list || !pack_to_stdout || is_repository_shallow())
+ /*
+ * "soft" reasons not to use bitmaps - for on-disk repack by default we want
+ *
+ * - to produce good pack (with bitmap index not-yet-packed objects are
+ * packed in suboptimal order).
+ *
+ * - to use more robust pack-generation codepath (avoiding possible
+ * bugs in bitmap code and possible bitmap index corruption).
+ */
+ if (!pack_to_stdout)
+ use_bitmap_index_default = 0;
+
+ if (use_bitmap_index < 0)
+ use_bitmap_index = use_bitmap_index_default;
+
+ /* "hard" reasons not to use bitmaps; these just won't work at all */
+ if (!use_internal_rev_list || (!pack_to_stdout && write_bitmap_index) || is_repository_shallow())
use_bitmap_index = 0;
if (pack_to_stdout || !rev_list_all)