+#define NO_THE_INDEX_COMPATIBILITY_MACROS
#include "cache.h"
#include "cache-tree.h"
#include "tree.h"
const char *tree_type = "tree";
-static int read_one_entry_opt(const unsigned char *sha1, const char *base, int baselen, const char *pathname, unsigned mode, int stage, int opt)
+static int read_one_entry_opt(struct index_state *istate,
+ const struct object_id *oid,
+ const char *base, int baselen,
+ const char *pathname,
+ unsigned mode, int stage, int opt)
{
int len;
unsigned int size;
ce->ce_namelen = baselen + len;
memcpy(ce->name, base, baselen);
memcpy(ce->name + baselen, pathname, len+1);
- hashcpy(ce->oid.hash, sha1);
- return add_cache_entry(ce, opt);
+ oidcpy(&ce->oid, oid);
+ return add_index_entry(istate, ce, opt);
}
-static int read_one_entry(const unsigned char *sha1, struct strbuf *base,
+static int read_one_entry(const struct object_id *oid, struct strbuf *base,
const char *pathname, unsigned mode, int stage,
void *context)
{
- return read_one_entry_opt(sha1, base->buf, base->len, pathname,
+ struct index_state *istate = context;
+ return read_one_entry_opt(istate, oid, base->buf, base->len, pathname,
mode, stage,
ADD_CACHE_OK_TO_ADD|ADD_CACHE_SKIP_DFCHECK);
}
* This is used when the caller knows there is no existing entries at
* the stage that will conflict with the entry being added.
*/
-static int read_one_entry_quick(const unsigned char *sha1, struct strbuf *base,
+static int read_one_entry_quick(const struct object_id *oid, struct strbuf *base,
const char *pathname, unsigned mode, int stage,
void *context)
{
- return read_one_entry_opt(sha1, base->buf, base->len, pathname,
+ struct index_state *istate = context;
+ return read_one_entry_opt(istate, oid, base->buf, base->len, pathname,
mode, stage,
ADD_CACHE_JUST_APPEND);
}
continue;
}
- switch (fn(entry.oid->hash, base,
+ switch (fn(entry.oid, base,
entry.path, entry.mode, stage, context)) {
case 0:
continue;
ce2->name, ce2->ce_namelen, ce_stage(ce2));
}
-int read_tree(struct tree *tree, int stage, struct pathspec *match)
+int read_tree(struct tree *tree, int stage, struct pathspec *match,
+ struct index_state *istate)
{
read_tree_fn_t fn = NULL;
int i, err;
* do it the original slow way, otherwise, append and then
* sort at the end.
*/
- for (i = 0; !fn && i < active_nr; i++) {
- const struct cache_entry *ce = active_cache[i];
+ for (i = 0; !fn && i < istate->cache_nr; i++) {
+ const struct cache_entry *ce = istate->cache[i];
if (ce_stage(ce) == stage)
fn = read_one_entry;
}
if (!fn)
fn = read_one_entry_quick;
- err = read_tree_recursive(tree, "", 0, stage, match, fn, NULL);
+ err = read_tree_recursive(tree, "", 0, stage, match, fn, istate);
if (fn == read_one_entry || err)
return err;
/*
* Sort the cache entry -- we need to nuke the cache tree, though.
*/
- cache_tree_free(&active_cache_tree);
- QSORT(active_cache, active_nr, cmp_cache_name_compare);
+ cache_tree_free(&istate->cache_tree);
+ QSORT(istate->cache, istate->cache_nr, cmp_cache_name_compare);
return 0;
}
void free_tree_buffer(struct tree *tree)
{
- free(tree->buffer);
- tree->buffer = NULL;
+ FREE_AND_NULL(tree->buffer);
tree->size = 0;
tree->object.parsed = 0;
}