#include "commit.h"
#include "walker.h"
#include "http.h"
+#include "list.h"
struct alt_base {
char *base;
struct alt_base *repo;
enum object_request_state state;
struct http_object_request *req;
- struct object_request *next;
+ struct list_head node;
};
struct alternates_request {
struct alt_base *alt;
};
-static struct object_request *object_queue_head;
+static LIST_HEAD(object_queue_head);
static void fetch_alternates(struct walker *walker, const char *base);
static void release_object_request(struct object_request *obj_req)
{
- struct object_request *entry = object_queue_head;
-
if (obj_req->req !=NULL && obj_req->req->localfile != -1)
error("fd leakage in release: %d", obj_req->req->localfile);
- if (obj_req == object_queue_head) {
- object_queue_head = obj_req->next;
- } else {
- while (entry->next != NULL && entry->next != obj_req)
- entry = entry->next;
- if (entry->next == obj_req)
- entry->next = entry->next->next;
- }
+ list_del(&obj_req->node);
free(obj_req);
}
static int fill_active_slot(struct walker *walker)
{
struct object_request *obj_req;
+ struct list_head *pos, *tmp, *head = &object_queue_head;
- for (obj_req = object_queue_head; obj_req; obj_req = obj_req->next) {
+ list_for_each_safe(pos, tmp, head) {
+ obj_req = list_entry(pos, struct object_request, node);
if (obj_req->state == WAITING) {
if (has_sha1_file(obj_req->sha1))
obj_req->state = COMPLETE;
static void prefetch(struct walker *walker, unsigned char *sha1)
{
struct object_request *newreq;
- struct object_request *tail;
struct walker_data *data = walker->data;
newreq = xmalloc(sizeof(*newreq));
newreq->repo = data->alt;
newreq->state = WAITING;
newreq->req = NULL;
- newreq->next = NULL;
http_is_verbose = walker->get_verbosely;
-
- if (object_queue_head == NULL) {
- object_queue_head = newreq;
- } else {
- tail = object_queue_head;
- while (tail->next != NULL)
- tail = tail->next;
- tail->next = newreq;
- }
+ list_add_tail(&newreq->node, &object_queue_head);
#ifdef USE_CURL_MULTI
fill_active_slots();
release_object_request(obj_req);
}
-static int fetch_object(struct walker *walker, struct alt_base *repo, unsigned char *sha1)
+static int fetch_object(struct walker *walker, unsigned char *sha1)
{
char *hex = sha1_to_hex(sha1);
int ret = 0;
- struct object_request *obj_req = object_queue_head;
+ struct object_request *obj_req = NULL;
struct http_object_request *req;
+ struct list_head *pos, *head = &object_queue_head;
- while (obj_req != NULL && hashcmp(obj_req->sha1, sha1))
- obj_req = obj_req->next;
+ list_for_each(pos, head) {
+ obj_req = list_entry(pos, struct object_request, node);
+ if (!hashcmp(obj_req->sha1, sha1))
+ break;
+ }
if (obj_req == NULL)
return error("Couldn't find request for %s in the queue", hex);
req->localfile = -1;
}
+ /*
+ * we turned off CURLOPT_FAILONERROR to avoid losing a
+ * persistent connection and got CURLE_OK.
+ */
+ if (req->http_code >= 300 && req->curl_result == CURLE_OK &&
+ (starts_with(req->url, "http://") ||
+ starts_with(req->url, "https://"))) {
+ req->curl_result = CURLE_HTTP_RETURNED_ERROR;
+ xsnprintf(req->errorstr, sizeof(req->errorstr),
+ "HTTP request failed");
+ }
+
if (obj_req->state == ABORTED) {
ret = error("Request for %s aborted", hex);
} else if (req->curl_result != CURLE_OK &&
struct walker_data *data = walker->data;
struct alt_base *altbase = data->alt;
- if (!fetch_object(walker, altbase, sha1))
+ if (!fetch_object(walker, sha1))
return 0;
while (altbase) {
if (!http_fetch_pack(walker, altbase, sha1))