1/*
2 * Generic reference iterator infrastructure. See refs-internal.h for
3 * documentation about the design and use of reference iterators.
4 */
5
6#include "cache.h"
7#include "refs.h"
8#include "refs/refs-internal.h"
9#include "iterator.h"
10
11int ref_iterator_advance(struct ref_iterator *ref_iterator)
12{
13 return ref_iterator->vtable->advance(ref_iterator);
14}
15
16int ref_iterator_peel(struct ref_iterator *ref_iterator,
17 struct object_id *peeled)
18{
19 return ref_iterator->vtable->peel(ref_iterator, peeled);
20}
21
22int ref_iterator_abort(struct ref_iterator *ref_iterator)
23{
24 return ref_iterator->vtable->abort(ref_iterator);
25}
26
27void base_ref_iterator_init(struct ref_iterator *iter,
28 struct ref_iterator_vtable *vtable)
29{
30 iter->vtable = vtable;
31 iter->refname = NULL;
32 iter->oid = NULL;
33 iter->flags = 0;
34}
35
36void base_ref_iterator_free(struct ref_iterator *iter)
37{
38 /* Help make use-after-free bugs fail quickly: */
39 iter->vtable = NULL;
40 free(iter);
41}
42
43struct empty_ref_iterator {
44 struct ref_iterator base;
45};
46
47static int empty_ref_iterator_advance(struct ref_iterator *ref_iterator)
48{
49 return ref_iterator_abort(ref_iterator);
50}
51
52static int empty_ref_iterator_peel(struct ref_iterator *ref_iterator,
53 struct object_id *peeled)
54{
55 die("BUG: peel called for empty iterator");
56}
57
58static int empty_ref_iterator_abort(struct ref_iterator *ref_iterator)
59{
60 base_ref_iterator_free(ref_iterator);
61 return ITER_DONE;
62}
63
64static struct ref_iterator_vtable empty_ref_iterator_vtable = {
65 empty_ref_iterator_advance,
66 empty_ref_iterator_peel,
67 empty_ref_iterator_abort
68};
69
70struct ref_iterator *empty_ref_iterator_begin(void)
71{
72 struct empty_ref_iterator *iter = xcalloc(1, sizeof(*iter));
73 struct ref_iterator *ref_iterator = &iter->base;
74
75 base_ref_iterator_init(ref_iterator, &empty_ref_iterator_vtable);
76 return ref_iterator;
77}
78
79int is_empty_ref_iterator(struct ref_iterator *ref_iterator)
80{
81 return ref_iterator->vtable == &empty_ref_iterator_vtable;
82}
83
84struct merge_ref_iterator {
85 struct ref_iterator base;
86
87 struct ref_iterator *iter0, *iter1;
88
89 ref_iterator_select_fn *select;
90 void *cb_data;
91
92 /*
93 * A pointer to iter0 or iter1 (whichever is supplying the
94 * current value), or NULL if advance has not yet been called.
95 */
96 struct ref_iterator **current;
97};
98
99static int merge_ref_iterator_advance(struct ref_iterator *ref_iterator)
100{
101 struct merge_ref_iterator *iter =
102 (struct merge_ref_iterator *)ref_iterator;
103 int ok;
104
105 if (!iter->current) {
106 /* Initialize: advance both iterators to their first entries */
107 if ((ok = ref_iterator_advance(iter->iter0)) != ITER_OK) {
108 iter->iter0 = NULL;
109 if (ok == ITER_ERROR)
110 goto error;
111 }
112 if ((ok = ref_iterator_advance(iter->iter1)) != ITER_OK) {
113 iter->iter1 = NULL;
114 if (ok == ITER_ERROR)
115 goto error;
116 }
117 } else {
118 /*
119 * Advance the current iterator past the just-used
120 * entry:
121 */
122 if ((ok = ref_iterator_advance(*iter->current)) != ITER_OK) {
123 *iter->current = NULL;
124 if (ok == ITER_ERROR)
125 goto error;
126 }
127 }
128
129 /* Loop until we find an entry that we can yield. */
130 while (1) {
131 struct ref_iterator **secondary;
132 enum iterator_selection selection =
133 iter->select(iter->iter0, iter->iter1, iter->cb_data);
134
135 if (selection == ITER_SELECT_DONE) {
136 return ref_iterator_abort(ref_iterator);
137 } else if (selection == ITER_SELECT_ERROR) {
138 ref_iterator_abort(ref_iterator);
139 return ITER_ERROR;
140 }
141
142 if ((selection & ITER_CURRENT_SELECTION_MASK) == 0) {
143 iter->current = &iter->iter0;
144 secondary = &iter->iter1;
145 } else {
146 iter->current = &iter->iter1;
147 secondary = &iter->iter0;
148 }
149
150 if (selection & ITER_SKIP_SECONDARY) {
151 if ((ok = ref_iterator_advance(*secondary)) != ITER_OK) {
152 *secondary = NULL;
153 if (ok == ITER_ERROR)
154 goto error;
155 }
156 }
157
158 if (selection & ITER_YIELD_CURRENT) {
159 iter->base.refname = (*iter->current)->refname;
160 iter->base.oid = (*iter->current)->oid;
161 iter->base.flags = (*iter->current)->flags;
162 return ITER_OK;
163 }
164 }
165
166error:
167 ref_iterator_abort(ref_iterator);
168 return ITER_ERROR;
169}
170
171static int merge_ref_iterator_peel(struct ref_iterator *ref_iterator,
172 struct object_id *peeled)
173{
174 struct merge_ref_iterator *iter =
175 (struct merge_ref_iterator *)ref_iterator;
176
177 if (!iter->current) {
178 die("BUG: peel called before advance for merge iterator");
179 }
180 return ref_iterator_peel(*iter->current, peeled);
181}
182
183static int merge_ref_iterator_abort(struct ref_iterator *ref_iterator)
184{
185 struct merge_ref_iterator *iter =
186 (struct merge_ref_iterator *)ref_iterator;
187 int ok = ITER_DONE;
188
189 if (iter->iter0) {
190 if (ref_iterator_abort(iter->iter0) != ITER_DONE)
191 ok = ITER_ERROR;
192 }
193 if (iter->iter1) {
194 if (ref_iterator_abort(iter->iter1) != ITER_DONE)
195 ok = ITER_ERROR;
196 }
197 base_ref_iterator_free(ref_iterator);
198 return ok;
199}
200
201static struct ref_iterator_vtable merge_ref_iterator_vtable = {
202 merge_ref_iterator_advance,
203 merge_ref_iterator_peel,
204 merge_ref_iterator_abort
205};
206
207struct ref_iterator *merge_ref_iterator_begin(
208 struct ref_iterator *iter0, struct ref_iterator *iter1,
209 ref_iterator_select_fn *select, void *cb_data)
210{
211 struct merge_ref_iterator *iter = xcalloc(1, sizeof(*iter));
212 struct ref_iterator *ref_iterator = &iter->base;
213
214 /*
215 * We can't do the same kind of is_empty_ref_iterator()-style
216 * optimization here as overlay_ref_iterator_begin() does,
217 * because we don't know the semantics of the select function.
218 * It might, for example, implement "intersect" by passing
219 * references through only if they exist in both iterators.
220 */
221
222 base_ref_iterator_init(ref_iterator, &merge_ref_iterator_vtable);
223 iter->iter0 = iter0;
224 iter->iter1 = iter1;
225 iter->select = select;
226 iter->cb_data = cb_data;
227 iter->current = NULL;
228 return ref_iterator;
229}
230
231/*
232 * A ref_iterator_select_fn that overlays the items from front on top
233 * of those from back (like loose refs over packed refs). See
234 * overlay_ref_iterator_begin().
235 */
236static enum iterator_selection overlay_iterator_select(
237 struct ref_iterator *front, struct ref_iterator *back,
238 void *cb_data)
239{
240 int cmp;
241
242 if (!back)
243 return front ? ITER_SELECT_0 : ITER_SELECT_DONE;
244 else if (!front)
245 return ITER_SELECT_1;
246
247 cmp = strcmp(front->refname, back->refname);
248
249 if (cmp < 0)
250 return ITER_SELECT_0;
251 else if (cmp > 0)
252 return ITER_SELECT_1;
253 else
254 return ITER_SELECT_0_SKIP_1;
255}
256
257struct ref_iterator *overlay_ref_iterator_begin(
258 struct ref_iterator *front, struct ref_iterator *back)
259{
260 /*
261 * Optimization: if one of the iterators is empty, return the
262 * other one rather than incurring the overhead of wrapping
263 * them.
264 */
265 if (is_empty_ref_iterator(front)) {
266 ref_iterator_abort(front);
267 return back;
268 } else if (is_empty_ref_iterator(back)) {
269 ref_iterator_abort(back);
270 return front;
271 }
272
273 return merge_ref_iterator_begin(front, back,
274 overlay_iterator_select, NULL);
275}
276
277struct prefix_ref_iterator {
278 struct ref_iterator base;
279
280 struct ref_iterator *iter0;
281 char *prefix;
282 int trim;
283};
284
285static int prefix_ref_iterator_advance(struct ref_iterator *ref_iterator)
286{
287 struct prefix_ref_iterator *iter =
288 (struct prefix_ref_iterator *)ref_iterator;
289 int ok;
290
291 while ((ok = ref_iterator_advance(iter->iter0)) == ITER_OK) {
292 if (!starts_with(iter->iter0->refname, iter->prefix))
293 continue;
294
295 iter->base.refname = iter->iter0->refname + iter->trim;
296 iter->base.oid = iter->iter0->oid;
297 iter->base.flags = iter->iter0->flags;
298 return ITER_OK;
299 }
300
301 iter->iter0 = NULL;
302 if (ref_iterator_abort(ref_iterator) != ITER_DONE)
303 return ITER_ERROR;
304 return ok;
305}
306
307static int prefix_ref_iterator_peel(struct ref_iterator *ref_iterator,
308 struct object_id *peeled)
309{
310 struct prefix_ref_iterator *iter =
311 (struct prefix_ref_iterator *)ref_iterator;
312
313 return ref_iterator_peel(iter->iter0, peeled);
314}
315
316static int prefix_ref_iterator_abort(struct ref_iterator *ref_iterator)
317{
318 struct prefix_ref_iterator *iter =
319 (struct prefix_ref_iterator *)ref_iterator;
320 int ok = ITER_DONE;
321
322 if (iter->iter0)
323 ok = ref_iterator_abort(iter->iter0);
324 free(iter->prefix);
325 base_ref_iterator_free(ref_iterator);
326 return ok;
327}
328
329static struct ref_iterator_vtable prefix_ref_iterator_vtable = {
330 prefix_ref_iterator_advance,
331 prefix_ref_iterator_peel,
332 prefix_ref_iterator_abort
333};
334
335struct ref_iterator *prefix_ref_iterator_begin(struct ref_iterator *iter0,
336 const char *prefix,
337 int trim)
338{
339 struct prefix_ref_iterator *iter;
340 struct ref_iterator *ref_iterator;
341
342 if (!*prefix && !trim)
343 return iter0; /* optimization: no need to wrap iterator */
344
345 iter = xcalloc(1, sizeof(*iter));
346 ref_iterator = &iter->base;
347
348 base_ref_iterator_init(ref_iterator, &prefix_ref_iterator_vtable);
349
350 iter->iter0 = iter0;
351 iter->prefix = xstrdup(prefix);
352 iter->trim = trim;
353
354 return ref_iterator;
355}
356
357struct ref_iterator *current_ref_iter = NULL;
358
359int do_for_each_ref_iterator(struct ref_iterator *iter,
360 each_ref_fn fn, void *cb_data)
361{
362 int retval = 0, ok;
363 struct ref_iterator *old_ref_iter = current_ref_iter;
364
365 current_ref_iter = iter;
366 while ((ok = ref_iterator_advance(iter)) == ITER_OK) {
367 retval = fn(iter->refname, iter->oid, iter->flags, cb_data);
368 if (retval) {
369 /*
370 * If ref_iterator_abort() returns ITER_ERROR,
371 * we ignore that error in deference to the
372 * callback function's return value.
373 */
374 ref_iterator_abort(iter);
375 goto out;
376 }
377 }
378
379out:
380 current_ref_iter = old_ref_iter;
381 if (ok == ITER_ERROR)
382 return -1;
383 return retval;
384}