* another. But we never change the name, or the hash state!
*/
#define CE_STATE_MASK (CE_HASHED | CE_UNHASHED)
-static inline void copy_cache_entry(struct cache_entry *dst, struct cache_entry *src)
+static inline void copy_cache_entry(struct cache_entry *dst,
+ const struct cache_entry *src)
{
unsigned int state = dst->ce_flags & CE_STATE_MASK;
return S_IFGITLINK;
return S_IFREG | ce_permissions(mode);
}
-static inline unsigned int ce_mode_from_stat(struct cache_entry *ce, unsigned int mode)
+static inline unsigned int ce_mode_from_stat(const struct cache_entry *ce,
+ unsigned int mode)
{
extern int trust_executable_bit, has_symlinks;
if (!has_symlinks && S_ISREG(mode) &&
#define CE_MATCH_RACY_IS_DIRTY 02
/* do stat comparison even if CE_SKIP_WORKTREE is true */
#define CE_MATCH_IGNORE_SKIP_WORKTREE 04
-extern int ie_match_stat(const struct index_state *, struct cache_entry *, struct stat *, unsigned int);
-extern int ie_modified(const struct index_state *, struct cache_entry *, struct stat *, unsigned int);
+extern int ie_match_stat(const struct index_state *, const struct cache_entry *, struct stat *, unsigned int);
+extern int ie_modified(const struct index_state *, const struct cache_entry *, struct stat *, unsigned int);
#define PATHSPEC_ONESTAR 1 /* the pathspec pattern sastisfies GFNM_ONESTAR */
unsigned int
force:1,
forced_update:1,
- merge:1,
deletion:1,
matched:1;
+
+ /*
+ * Order is important here, as we write to FETCH_HEAD
+ * in numeric order. And the default NOT_FOR_MERGE
+ * should be 0, so that xcalloc'd structures get it
+ * by default.
+ */
+ enum {
+ FETCH_HEAD_MERGE = -1,
+ FETCH_HEAD_NOT_FOR_MERGE = 0,
+ FETCH_HEAD_IGNORE = 1
+ } fetch_head_status;
+
enum {
REF_STATUS_NONE = 0,
REF_STATUS_OK,