1#include "cache.h"
2#include "cache-tree.h"
3#include "tree.h"
4#include "blob.h"
5#include "commit.h"
6#include "tag.h"
7#include "tree-walk.h"
8
9const char *tree_type = "tree";
10
11static int read_one_entry_opt(const unsigned char *sha1, const char *base, int baselen, const char *pathname, unsigned mode, int stage, int opt)
12{
13 int len;
14 unsigned int size;
15 struct cache_entry *ce;
16
17 if (S_ISDIR(mode))
18 return READ_TREE_RECURSIVE;
19
20 len = strlen(pathname);
21 size = cache_entry_size(baselen + len);
22 ce = xcalloc(1, size);
23
24 ce->ce_mode = create_ce_mode(mode);
25 ce->ce_flags = create_ce_flags(baselen + len, stage);
26 memcpy(ce->name, base, baselen);
27 memcpy(ce->name + baselen, pathname, len+1);
28 hashcpy(ce->sha1, sha1);
29 return add_cache_entry(ce, opt);
30}
31
32static int read_one_entry(const unsigned char *sha1, const char *base, int baselen, const char *pathname, unsigned mode, int stage)
33{
34 return read_one_entry_opt(sha1, base, baselen, pathname, mode, stage,
35 ADD_CACHE_OK_TO_ADD|ADD_CACHE_SKIP_DFCHECK);
36}
37
38/*
39 * This is used when the caller knows there is no existing entries at
40 * the stage that will conflict with the entry being added.
41 */
42static int read_one_entry_quick(const unsigned char *sha1, const char *base, int baselen, const char *pathname, unsigned mode, int stage)
43{
44 return read_one_entry_opt(sha1, base, baselen, pathname, mode, stage,
45 ADD_CACHE_JUST_APPEND);
46}
47
48static int match_tree_entry(const char *base, int baselen, const char *path, unsigned int mode, const char **paths)
49{
50 const char *match;
51 int pathlen;
52
53 if (!paths)
54 return 1;
55 pathlen = strlen(path);
56 while ((match = *paths++) != NULL) {
57 int matchlen = strlen(match);
58
59 if (baselen >= matchlen) {
60 /* If it doesn't match, move along... */
61 if (strncmp(base, match, matchlen))
62 continue;
63 /* The base is a subdirectory of a path which was specified. */
64 return 1;
65 }
66
67 /* Does the base match? */
68 if (strncmp(base, match, baselen))
69 continue;
70
71 match += baselen;
72 matchlen -= baselen;
73
74 if (pathlen > matchlen)
75 continue;
76
77 if (matchlen > pathlen) {
78 if (match[pathlen] != '/')
79 continue;
80 if (!S_ISDIR(mode))
81 continue;
82 }
83
84 if (strncmp(path, match, pathlen))
85 continue;
86
87 return 1;
88 }
89 return 0;
90}
91
92int read_tree_recursive(struct tree *tree,
93 const char *base, int baselen,
94 int stage, const char **match,
95 read_tree_fn_t fn)
96{
97 struct tree_desc desc;
98 struct name_entry entry;
99
100 if (parse_tree(tree))
101 return -1;
102
103 init_tree_desc(&desc, tree->buffer, tree->size);
104
105 while (tree_entry(&desc, &entry)) {
106 if (!match_tree_entry(base, baselen, entry.path, entry.mode, match))
107 continue;
108
109 switch (fn(entry.sha1, base, baselen, entry.path, entry.mode, stage)) {
110 case 0:
111 continue;
112 case READ_TREE_RECURSIVE:
113 break;;
114 default:
115 return -1;
116 }
117 if (S_ISDIR(entry.mode)) {
118 int retval;
119 char *newbase;
120 unsigned int pathlen = tree_entry_len(entry.path, entry.sha1);
121
122 newbase = xmalloc(baselen + 1 + pathlen);
123 memcpy(newbase, base, baselen);
124 memcpy(newbase + baselen, entry.path, pathlen);
125 newbase[baselen + pathlen] = '/';
126 retval = read_tree_recursive(lookup_tree(entry.sha1),
127 newbase,
128 baselen + pathlen + 1,
129 stage, match, fn);
130 free(newbase);
131 if (retval)
132 return -1;
133 continue;
134 }
135 }
136 return 0;
137}
138
139static int cmp_cache_name_compare(const void *a_, const void *b_)
140{
141 const struct cache_entry *ce1, *ce2;
142
143 ce1 = *((const struct cache_entry **)a_);
144 ce2 = *((const struct cache_entry **)b_);
145 return cache_name_compare(ce1->name, ce1->ce_flags,
146 ce2->name, ce2->ce_flags);
147}
148
149int read_tree(struct tree *tree, int stage, const char **match)
150{
151 read_tree_fn_t fn = NULL;
152 int i, err;
153
154 /*
155 * Currently the only existing callers of this function all
156 * call it with stage=1 and after making sure there is nothing
157 * at that stage; we could always use read_one_entry_quick().
158 *
159 * But when we decide to straighten out git-read-tree not to
160 * use unpack_trees() in some cases, this will probably start
161 * to matter.
162 */
163
164 /*
165 * See if we have cache entry at the stage. If so,
166 * do it the original slow way, otherwise, append and then
167 * sort at the end.
168 */
169 for (i = 0; !fn && i < active_nr; i++) {
170 struct cache_entry *ce = active_cache[i];
171 if (ce_stage(ce) == stage)
172 fn = read_one_entry;
173 }
174
175 if (!fn)
176 fn = read_one_entry_quick;
177 err = read_tree_recursive(tree, "", 0, stage, match, fn);
178 if (fn == read_one_entry || err)
179 return err;
180
181 /*
182 * Sort the cache entry -- we need to nuke the cache tree, though.
183 */
184 cache_tree_free(&active_cache_tree);
185 qsort(active_cache, active_nr, sizeof(active_cache[0]),
186 cmp_cache_name_compare);
187 return 0;
188}
189
190struct tree *lookup_tree(const unsigned char *sha1)
191{
192 struct object *obj = lookup_object(sha1);
193 if (!obj)
194 return create_object(sha1, OBJ_TREE, alloc_tree_node());
195 if (!obj->type)
196 obj->type = OBJ_TREE;
197 if (obj->type != OBJ_TREE) {
198 error("Object %s is a %s, not a tree",
199 sha1_to_hex(sha1), typename(obj->type));
200 return NULL;
201 }
202 return (struct tree *) obj;
203}
204
205/*
206 * NOTE! Tree refs to external git repositories
207 * (ie gitlinks) do not count as real references.
208 *
209 * You don't have to have those repositories
210 * available at all, much less have the objects
211 * accessible from the current repository.
212 */
213static void track_tree_refs(struct tree *item)
214{
215 int n_refs = 0, i;
216 struct object_refs *refs;
217 struct tree_desc desc;
218 struct name_entry entry;
219
220 /* Count how many entries there are.. */
221 init_tree_desc(&desc, item->buffer, item->size);
222 while (tree_entry(&desc, &entry)) {
223 if (S_ISGITLINK(entry.mode))
224 continue;
225 n_refs++;
226 }
227
228 /* Allocate object refs and walk it again.. */
229 i = 0;
230 refs = alloc_object_refs(n_refs);
231 init_tree_desc(&desc, item->buffer, item->size);
232 while (tree_entry(&desc, &entry)) {
233 struct object *obj;
234
235 if (S_ISGITLINK(entry.mode))
236 continue;
237 if (S_ISDIR(entry.mode))
238 obj = &lookup_tree(entry.sha1)->object;
239 else if (S_ISREG(entry.mode) || S_ISLNK(entry.mode))
240 obj = &lookup_blob(entry.sha1)->object;
241 else {
242 warning("in tree %s: entry %s has bad mode %.6o\n",
243 sha1_to_hex(item->object.sha1), entry.path, entry.mode);
244 obj = lookup_unknown_object(entry.sha1);
245 }
246 refs->ref[i++] = obj;
247 }
248 set_object_refs(&item->object, refs);
249}
250
251int parse_tree_buffer(struct tree *item, void *buffer, unsigned long size)
252{
253 if (item->object.parsed)
254 return 0;
255 item->object.parsed = 1;
256 item->buffer = buffer;
257 item->size = size;
258
259 if (track_object_refs)
260 track_tree_refs(item);
261 return 0;
262}
263
264int parse_tree(struct tree *item)
265{
266 enum object_type type;
267 void *buffer;
268 unsigned long size;
269
270 if (item->object.parsed)
271 return 0;
272 buffer = read_sha1_file(item->object.sha1, &type, &size);
273 if (!buffer)
274 return error("Could not read %s",
275 sha1_to_hex(item->object.sha1));
276 if (type != OBJ_TREE) {
277 free(buffer);
278 return error("Object %s not a tree",
279 sha1_to_hex(item->object.sha1));
280 }
281 return parse_tree_buffer(item, buffer, size);
282}
283
284struct tree *parse_tree_indirect(const unsigned char *sha1)
285{
286 struct object *obj = parse_object(sha1);
287 do {
288 if (!obj)
289 return NULL;
290 if (obj->type == OBJ_TREE)
291 return (struct tree *) obj;
292 else if (obj->type == OBJ_COMMIT)
293 obj = &(((struct commit *) obj)->tree->object);
294 else if (obj->type == OBJ_TAG)
295 obj = ((struct tag *) obj)->tagged;
296 else
297 return NULL;
298 if (!obj->parsed)
299 parse_object(obj->sha1);
300 } while (1);
301}