#
# This program is licensed under the GPLv2
+use 5.008;
use strict;
use warnings;
use CGI qw(:standard :escapeHTML -nosticky);
use CGI::Util qw(unescape);
-use CGI::Carp qw(fatalsToBrowser);
+use CGI::Carp qw(fatalsToBrowser set_message);
use Encode;
use Fcntl ':mode';
use File::Find qw();
use File::Basename qw(basename);
+use Time::HiRes qw(gettimeofday tv_interval);
binmode STDOUT, ':utf8';
-our $t0;
-if (eval { require Time::HiRes; 1; }) {
- $t0 = [Time::HiRes::gettimeofday()];
-}
+our $t0 = [ gettimeofday() ];
our $number_of_git_cmds = 0;
BEGIN {
CGI->compile() if $ENV{'MOD_PERL'};
}
-our $cgi = new CGI;
our $version = "++GIT_VERSION++";
-our $my_url = $cgi->url();
-our $my_uri = $cgi->url(-absolute => 1);
-# Base URL for relative URLs in gitweb ($logo, $favicon, ...),
-# needed and used only for URLs with nonempty PATH_INFO
-our $base_url = $my_url;
+our ($my_url, $my_uri, $base_url, $path_info, $home_link);
+sub evaluate_uri {
+ our $cgi;
-# When the script is used as DirectoryIndex, the URL does not contain the name
-# of the script file itself, and $cgi->url() fails to strip PATH_INFO, so we
-# have to do it ourselves. We make $path_info global because it's also used
-# later on.
-#
-# Another issue with the script being the DirectoryIndex is that the resulting
-# $my_url data is not the full script URL: this is good, because we want
-# generated links to keep implying the script name if it wasn't explicitly
-# indicated in the URL we're handling, but it means that $my_url cannot be used
-# as base URL.
-# Therefore, if we needed to strip PATH_INFO, then we know that we have
-# to build the base URL ourselves:
-our $path_info = $ENV{"PATH_INFO"};
-if ($path_info) {
- if ($my_url =~ s,\Q$path_info\E$,, &&
- $my_uri =~ s,\Q$path_info\E$,, &&
- defined $ENV{'SCRIPT_NAME'}) {
- $base_url = $cgi->url(-base => 1) . $ENV{'SCRIPT_NAME'};
+ our $my_url = $cgi->url();
+ our $my_uri = $cgi->url(-absolute => 1);
+
+ # Base URL for relative URLs in gitweb ($logo, $favicon, ...),
+ # needed and used only for URLs with nonempty PATH_INFO
+ our $base_url = $my_url;
+
+ # When the script is used as DirectoryIndex, the URL does not contain the name
+ # of the script file itself, and $cgi->url() fails to strip PATH_INFO, so we
+ # have to do it ourselves. We make $path_info global because it's also used
+ # later on.
+ #
+ # Another issue with the script being the DirectoryIndex is that the resulting
+ # $my_url data is not the full script URL: this is good, because we want
+ # generated links to keep implying the script name if it wasn't explicitly
+ # indicated in the URL we're handling, but it means that $my_url cannot be used
+ # as base URL.
+ # Therefore, if we needed to strip PATH_INFO, then we know that we have
+ # to build the base URL ourselves:
+ our $path_info = $ENV{"PATH_INFO"};
+ if ($path_info) {
+ if ($my_url =~ s,\Q$path_info\E$,, &&
+ $my_uri =~ s,\Q$path_info\E$,, &&
+ defined $ENV{'SCRIPT_NAME'}) {
+ $base_url = $cgi->url(-base => 1) . $ENV{'SCRIPT_NAME'};
+ }
}
+
+ # target of the home link on top of all pages
+ our $home_link = $my_uri || "/";
}
# core git executable to use
# the number is relative to the projectroot
our $project_maxdepth = "++GITWEB_PROJECT_MAXDEPTH++";
-# target of the home link on top of all pages
-our $home_link = $my_uri || "/";
-
# string of the home link on top of all pages
our $home_link_str = "++GITWEB_HOME_LINK_STR++";
# the gitweb domain.
our $prevent_xss = 0;
+# Path to the highlight executable to use (must be the one from
+# http://www.andre-simon.de due to assumptions about parameters and output).
+# Useful if highlight is not installed on your webserver's PATH.
+# [Default: highlight]
+our $highlight_bin = "++HIGHLIGHT_BIN++";
+
# information about snapshot formats that gitweb is capable of serving
our %known_snapshot_formats = (
# name => {
# Leave it undefined (or set to 'undef') to turn off load checking.
our $maxload = 300;
+# configuration for 'highlight' (http://www.andre-simon.de/)
+# match by basename
+our %highlight_basename = (
+ #'Program' => 'py',
+ #'Library' => 'py',
+ 'SConstruct' => 'py', # SCons equivalent of Makefile
+ 'Makefile' => 'make',
+);
+# match by extension
+our %highlight_ext = (
+ # main extensions, defining name of syntax;
+ # see files in /usr/share/highlight/langDefs/ directory
+ map { $_ => $_ }
+ qw(py c cpp rb java css php sh pl js tex bib xml awk bat ini spec tcl sql make),
+ # alternate extensions, see /etc/highlight/filetypes.conf
+ 'h' => 'c',
+ map { $_ => 'sh' } qw(bash zsh ksh),
+ map { $_ => 'cpp' } qw(cxx c++ cc),
+ map { $_ => 'php' } qw(php3 php4 php5 phps),
+ map { $_ => 'pl' } qw(perl pm), # perhaps also 'cgi'
+ map { $_ => 'make'} qw(mak mk),
+ map { $_ => 'xml' } qw(xhtml html htm),
+);
+
# You define site-wide feature defaults here; override them with
# $GITWEB_CONFIG as necessary.
our %feature = (
# return value of feature-sub indicates if to enable specified feature
#
# if there is no 'sub' key (no feature-sub), then feature cannot be
- # overriden
+ # overridden
#
# use gitweb_get_feature(<feature>) to retrieve the <feature> value
# (an array) or gitweb_check_feature(<feature>) to check if <feature>
'javascript-actions' => {
'override' => 0,
'default' => [0]},
+
+ # Syntax highlighting support. This is based on Daniel Svensson's
+ # and Sham Chukoury's work in gitweb-xmms2.git.
+ # It requires the 'highlight' program present in $PATH,
+ # and therefore is disabled by default.
+
+ # To enable system wide have in $GITWEB_CONFIG
+ # $feature{'highlight'}{'default'} = [1];
+
+ 'highlight' => {
+ 'sub' => sub { feature_bool('highlight', @_) },
+ 'override' => 0,
+ 'default' => [0]},
+
+ # Enable displaying of remote heads in the heads list
+
+ # To enable system wide have in $GITWEB_CONFIG
+ # $feature{'remote_heads'}{'default'} = [1];
+ # To have project specific config enable override in $GITWEB_CONFIG
+ # $feature{'remote_heads'}{'override'} = 1;
+ # and in project config gitweb.remote_heads = 0|1;
+ 'remote_heads' => {
+ 'sub' => sub { feature_bool('remote_heads', @_) },
+ 'override' => 0,
+ 'default' => [0]},
);
sub gitweb_get_feature {
!$known_snapshot_formats{$_}{'disabled'}} @fmts;
}
-our $GITWEB_CONFIG = $ENV{'GITWEB_CONFIG'} || "++GITWEB_CONFIG++";
-our $GITWEB_CONFIG_SYSTEM = $ENV{'GITWEB_CONFIG_SYSTEM'} || "++GITWEB_CONFIG_SYSTEM++";
-# die if there are errors parsing config file
-if (-e $GITWEB_CONFIG) {
- do $GITWEB_CONFIG;
- die $@ if $@;
-} elsif (-e $GITWEB_CONFIG_SYSTEM) {
- do $GITWEB_CONFIG_SYSTEM;
- die $@ if $@;
+# If it is set to code reference, it is code that it is to be run once per
+# request, allowing updating configurations that change with each request,
+# while running other code in config file only once.
+#
+# Otherwise, if it is false then gitweb would process config file only once;
+# if it is true then gitweb config would be run for each request.
+our $per_request_config = 1;
+
+our ($GITWEB_CONFIG, $GITWEB_CONFIG_SYSTEM);
+sub evaluate_gitweb_config {
+ our $GITWEB_CONFIG = $ENV{'GITWEB_CONFIG'} || "++GITWEB_CONFIG++";
+ our $GITWEB_CONFIG_SYSTEM = $ENV{'GITWEB_CONFIG_SYSTEM'} || "++GITWEB_CONFIG_SYSTEM++";
+ # die if there are errors parsing config file
+ if (-e $GITWEB_CONFIG) {
+ do $GITWEB_CONFIG;
+ die $@ if $@;
+ } elsif (-e $GITWEB_CONFIG_SYSTEM) {
+ do $GITWEB_CONFIG_SYSTEM;
+ die $@ if $@;
+ }
}
# Get loadavg of system, to compare against $maxload.
}
# version of the core git binary
-our $git_version = qx("$GIT" --version) =~ m/git version (.*)$/ ? $1 : "unknown";
-$number_of_git_cmds++;
-
-$projects_list ||= $projectroot;
+our $git_version;
+sub evaluate_git_version {
+ our $git_version = qx("$GIT" --version) =~ m/git version (.*)$/ ? $1 : "unknown";
+ $number_of_git_cmds++;
+}
-if (defined $maxload && get_loadavg() > $maxload) {
- die_error(503, "The load average on the server is too high");
+sub check_loadavg {
+ if (defined $maxload && get_loadavg() > $maxload) {
+ die_error(503, "The load average on the server is too high");
+ }
}
# ======================================================================
"log" => \&git_log,
"patch" => \&git_patch,
"patches" => \&git_patches,
+ "remotes" => \&git_remotes,
"rss" => \&git_rss,
"atom" => \&git_atom,
"search" => \&git_search,
# should be single values, but opt can be an array. We should probably
# build an array of parameters that can be multi-valued, but since for the time
# being it's only this one, we just single it out
-while (my ($name, $symbol) = each %cgi_param_mapping) {
- if ($symbol eq 'opt') {
- $input_params{$name} = [ $cgi->param($symbol) ];
- } else {
- $input_params{$name} = $cgi->param($symbol);
+sub evaluate_query_params {
+ our $cgi;
+
+ while (my ($name, $symbol) = each %cgi_param_mapping) {
+ if ($symbol eq 'opt') {
+ $input_params{$name} = [ $cgi->param($symbol) ];
+ } else {
+ $input_params{$name} = $cgi->param($symbol);
+ }
}
}
'history',
);
- # we want to catch
+ # we want to catch, among others
# [$hash_parent_base[:$file_parent]..]$hash_parent[:$file_name]
my ($parentrefname, $parentpathname, $refname, $pathname) =
- ($path_info =~ /^(?:(.+?)(?::(.+))?\.\.)?(.+?)(?::(.+))?$/);
+ ($path_info =~ /^(?:(.+?)(?::(.+))?\.\.)?([^:]+?)?(?::(.+))?$/);
# first, analyze the 'current' part
if (defined $pathname) {
# hash_base instead. It should also be noted that hand-crafted
# links having 'history' as an action and no pathname or hash
# set will fail, but that happens regardless of PATH_INFO.
- $input_params{'action'} ||= "shortlog";
- if (grep { $_ eq $input_params{'action'} } @wants_base) {
+ if (defined $parentrefname) {
+ # if there is parent let the default be 'shortlog' action
+ # (for http://git.example.com/repo.git/A..B links); if there
+ # is no parent, dispatch will detect type of object and set
+ # action appropriately if required (if action is not set)
+ $input_params{'action'} ||= "shortlog";
+ }
+ if ($input_params{'action'} &&
+ grep { $_ eq $input_params{'action'} } @wants_base) {
$input_params{'hash_base'} ||= $refname;
} else {
$input_params{'hash'} ||= $refname;
}
}
}
-evaluate_path_info();
-our $action = $input_params{'action'};
-if (defined $action) {
- if (!validate_action($action)) {
- die_error(400, "Invalid action parameter");
+our ($action, $project, $file_name, $file_parent, $hash, $hash_parent, $hash_base,
+ $hash_parent_base, @extra_options, $page, $searchtype, $search_use_regexp,
+ $searchtext, $search_regexp);
+sub evaluate_and_validate_params {
+ our $action = $input_params{'action'};
+ if (defined $action) {
+ if (!validate_action($action)) {
+ die_error(400, "Invalid action parameter");
+ }
}
-}
-# parameters which are pathnames
-our $project = $input_params{'project'};
-if (defined $project) {
- if (!validate_project($project)) {
- undef $project;
- die_error(404, "No such project");
+ # parameters which are pathnames
+ our $project = $input_params{'project'};
+ if (defined $project) {
+ if (!validate_project($project)) {
+ undef $project;
+ die_error(404, "No such project");
+ }
}
-}
-our $file_name = $input_params{'file_name'};
-if (defined $file_name) {
- if (!validate_pathname($file_name)) {
- die_error(400, "Invalid file parameter");
+ our $file_name = $input_params{'file_name'};
+ if (defined $file_name) {
+ if (!validate_pathname($file_name)) {
+ die_error(400, "Invalid file parameter");
+ }
}
-}
-our $file_parent = $input_params{'file_parent'};
-if (defined $file_parent) {
- if (!validate_pathname($file_parent)) {
- die_error(400, "Invalid file parent parameter");
+ our $file_parent = $input_params{'file_parent'};
+ if (defined $file_parent) {
+ if (!validate_pathname($file_parent)) {
+ die_error(400, "Invalid file parent parameter");
+ }
}
-}
-# parameters which are refnames
-our $hash = $input_params{'hash'};
-if (defined $hash) {
- if (!validate_refname($hash)) {
- die_error(400, "Invalid hash parameter");
+ # parameters which are refnames
+ our $hash = $input_params{'hash'};
+ if (defined $hash) {
+ if (!validate_refname($hash)) {
+ die_error(400, "Invalid hash parameter");
+ }
}
-}
-our $hash_parent = $input_params{'hash_parent'};
-if (defined $hash_parent) {
- if (!validate_refname($hash_parent)) {
- die_error(400, "Invalid hash parent parameter");
+ our $hash_parent = $input_params{'hash_parent'};
+ if (defined $hash_parent) {
+ if (!validate_refname($hash_parent)) {
+ die_error(400, "Invalid hash parent parameter");
+ }
}
-}
-our $hash_base = $input_params{'hash_base'};
-if (defined $hash_base) {
- if (!validate_refname($hash_base)) {
- die_error(400, "Invalid hash base parameter");
+ our $hash_base = $input_params{'hash_base'};
+ if (defined $hash_base) {
+ if (!validate_refname($hash_base)) {
+ die_error(400, "Invalid hash base parameter");
+ }
}
-}
-our @extra_options = @{$input_params{'extra_options'}};
-# @extra_options is always defined, since it can only be (currently) set from
-# CGI, and $cgi->param() returns the empty array in array context if the param
-# is not set
-foreach my $opt (@extra_options) {
- if (not exists $allowed_options{$opt}) {
- die_error(400, "Invalid option parameter");
- }
- if (not grep(/^$action$/, @{$allowed_options{$opt}})) {
- die_error(400, "Invalid option parameter for this action");
+ our @extra_options = @{$input_params{'extra_options'}};
+ # @extra_options is always defined, since it can only be (currently) set from
+ # CGI, and $cgi->param() returns the empty array in array context if the param
+ # is not set
+ foreach my $opt (@extra_options) {
+ if (not exists $allowed_options{$opt}) {
+ die_error(400, "Invalid option parameter");
+ }
+ if (not grep(/^$action$/, @{$allowed_options{$opt}})) {
+ die_error(400, "Invalid option parameter for this action");
+ }
}
-}
-our $hash_parent_base = $input_params{'hash_parent_base'};
-if (defined $hash_parent_base) {
- if (!validate_refname($hash_parent_base)) {
- die_error(400, "Invalid hash parent base parameter");
+ our $hash_parent_base = $input_params{'hash_parent_base'};
+ if (defined $hash_parent_base) {
+ if (!validate_refname($hash_parent_base)) {
+ die_error(400, "Invalid hash parent base parameter");
+ }
}
-}
-# other parameters
-our $page = $input_params{'page'};
-if (defined $page) {
- if ($page =~ m/[^0-9]/) {
- die_error(400, "Invalid page parameter");
+ # other parameters
+ our $page = $input_params{'page'};
+ if (defined $page) {
+ if ($page =~ m/[^0-9]/) {
+ die_error(400, "Invalid page parameter");
+ }
}
-}
-our $searchtype = $input_params{'searchtype'};
-if (defined $searchtype) {
- if ($searchtype =~ m/[^a-z]/) {
- die_error(400, "Invalid searchtype parameter");
+ our $searchtype = $input_params{'searchtype'};
+ if (defined $searchtype) {
+ if ($searchtype =~ m/[^a-z]/) {
+ die_error(400, "Invalid searchtype parameter");
+ }
}
-}
-our $search_use_regexp = $input_params{'search_use_regexp'};
+ our $search_use_regexp = $input_params{'search_use_regexp'};
-our $searchtext = $input_params{'searchtext'};
-our $search_regexp;
-if (defined $searchtext) {
- if (length($searchtext) < 2) {
- die_error(403, "At least two characters are required for search parameter");
+ our $searchtext = $input_params{'searchtext'};
+ our $search_regexp;
+ if (defined $searchtext) {
+ if (length($searchtext) < 2) {
+ die_error(403, "At least two characters are required for search parameter");
+ }
+ $search_regexp = $search_use_regexp ? $searchtext : quotemeta $searchtext;
}
- $search_regexp = $search_use_regexp ? $searchtext : quotemeta $searchtext;
}
# path to the current git repository
our $git_dir;
-$git_dir = "$projectroot/$project" if $project;
-
-# list of supported snapshot formats
-our @snapshot_fmts = gitweb_get_feature('snapshot');
-@snapshot_fmts = filter_snapshot_fmts(@snapshot_fmts);
-
-# check that the avatar feature is set to a known provider name,
-# and for each provider check if the dependencies are satisfied.
-# if the provider name is invalid or the dependencies are not met,
-# reset $git_avatar to the empty string.
-our ($git_avatar) = gitweb_get_feature('avatar');
-if ($git_avatar eq 'gravatar') {
- $git_avatar = '' unless (eval { require Digest::MD5; 1; });
-} elsif ($git_avatar eq 'picon') {
- # no dependencies
-} else {
- $git_avatar = '';
+sub evaluate_git_dir {
+ our $git_dir = "$projectroot/$project" if $project;
}
-# dispatch
-if (!defined $action) {
- if (defined $hash) {
- $action = git_get_type($hash);
- } elsif (defined $hash_base && defined $file_name) {
- $action = git_get_type("$hash_base:$file_name");
- } elsif (defined $project) {
- $action = 'summary';
+our (@snapshot_fmts, $git_avatar);
+sub configure_gitweb_features {
+ # list of supported snapshot formats
+ our @snapshot_fmts = gitweb_get_feature('snapshot');
+ @snapshot_fmts = filter_snapshot_fmts(@snapshot_fmts);
+
+ # check that the avatar feature is set to a known provider name,
+ # and for each provider check if the dependencies are satisfied.
+ # if the provider name is invalid or the dependencies are not met,
+ # reset $git_avatar to the empty string.
+ our ($git_avatar) = gitweb_get_feature('avatar');
+ if ($git_avatar eq 'gravatar') {
+ $git_avatar = '' unless (eval { require Digest::MD5; 1; });
+ } elsif ($git_avatar eq 'picon') {
+ # no dependencies
} else {
- $action = 'project_list';
+ $git_avatar = '';
}
}
-if (!defined($actions{$action})) {
- die_error(400, "Unknown action");
+
+# custom error handler: 'die <message>' is Internal Server Error
+sub handle_errors_html {
+ my $msg = shift; # it is already HTML escaped
+
+ # to avoid infinite loop where error occurs in die_error,
+ # change handler to default handler, disabling handle_errors_html
+ set_message("Error occured when inside die_error:\n$msg");
+
+ # you cannot jump out of die_error when called as error handler;
+ # the subroutine set via CGI::Carp::set_message is called _after_
+ # HTTP headers are already written, so it cannot write them itself
+ die_error(undef, undef, $msg, -error_handler => 1, -no_http_header => 1);
+}
+set_message(\&handle_errors_html);
+
+# dispatch
+sub dispatch {
+ if (!defined $action) {
+ if (defined $hash) {
+ $action = git_get_type($hash);
+ } elsif (defined $hash_base && defined $file_name) {
+ $action = git_get_type("$hash_base:$file_name");
+ } elsif (defined $project) {
+ $action = 'summary';
+ } else {
+ $action = 'project_list';
+ }
+ }
+ if (!defined($actions{$action})) {
+ die_error(400, "Unknown action");
+ }
+ if ($action !~ m/^(?:opml|project_list|project_index)$/ &&
+ !$project) {
+ die_error(400, "Project needed");
+ }
+ $actions{$action}->();
}
-if ($action !~ m/^(?:opml|project_list|project_index)$/ &&
- !$project) {
- die_error(400, "Project needed");
+
+sub reset_timer {
+ our $t0 = [ gettimeofday() ]
+ if defined $t0;
+ our $number_of_git_cmds = 0;
+}
+
+our $first_request = 1;
+sub run_request {
+ reset_timer();
+
+ evaluate_uri();
+ if ($first_request) {
+ evaluate_gitweb_config();
+ evaluate_git_version();
+ }
+ if ($per_request_config) {
+ if (ref($per_request_config) eq 'CODE') {
+ $per_request_config->();
+ } elsif (!$first_request) {
+ evaluate_gitweb_config();
+ }
+ }
+ check_loadavg();
+
+ # $projectroot and $projects_list might be set in gitweb config file
+ $projects_list ||= $projectroot;
+
+ evaluate_query_params();
+ evaluate_path_info();
+ evaluate_and_validate_params();
+ evaluate_git_dir();
+
+ configure_gitweb_features();
+
+ dispatch();
+}
+
+our $is_last_request = sub { 1 };
+our ($pre_dispatch_hook, $post_dispatch_hook, $pre_listen_hook);
+our $CGI = 'CGI';
+our $cgi;
+sub configure_as_fcgi {
+ require CGI::Fast;
+ our $CGI = 'CGI::Fast';
+
+ my $request_number = 0;
+ # let each child service 100 requests
+ our $is_last_request = sub { ++$request_number > 100 };
+}
+sub evaluate_argv {
+ my $script_name = $ENV{'SCRIPT_NAME'} || $ENV{'SCRIPT_FILENAME'} || __FILE__;
+ configure_as_fcgi()
+ if $script_name =~ /\.fcgi$/;
+
+ return unless (@ARGV);
+
+ require Getopt::Long;
+ Getopt::Long::GetOptions(
+ 'fastcgi|fcgi|f' => \&configure_as_fcgi,
+ 'nproc|n=i' => sub {
+ my ($arg, $val) = @_;
+ return unless eval { require FCGI::ProcManager; 1; };
+ my $proc_manager = FCGI::ProcManager->new({
+ n_processes => $val,
+ });
+ our $pre_listen_hook = sub { $proc_manager->pm_manage() };
+ our $pre_dispatch_hook = sub { $proc_manager->pm_pre_dispatch() };
+ our $post_dispatch_hook = sub { $proc_manager->pm_post_dispatch() };
+ },
+ );
+}
+
+sub run {
+ evaluate_argv();
+
+ $first_request = 1;
+ $pre_listen_hook->()
+ if $pre_listen_hook;
+
+ REQUEST:
+ while ($cgi = $CGI->new()) {
+ $pre_dispatch_hook->()
+ if $pre_dispatch_hook;
+
+ run_request();
+
+ $post_dispatch_hook->()
+ if $post_dispatch_hook;
+ $first_request = 0;
+
+ last REQUEST if ($is_last_request->());
+ }
+
+ DONE_GITWEB:
+ 1;
+}
+
+run();
+
+if (defined caller) {
+ # wrapped in a subroutine processing requests,
+ # e.g. mod_perl with ModPerl::Registry, or PSGI with Plack::App::WrapCGI
+ return;
+} else {
+ # pure CGI script, serving single request
+ exit;
}
-$actions{$action}->();
-exit;
## ======================================================================
## action links
+# possible values of extra options
+# -full => 0|1 - use absolute/full URL ($my_uri/$my_url as base)
+# -replay => 1 - start from a current view (replay with modifications)
+# -path_info => 0|1 - don't use/use path_info URL (if possible)
sub href {
my %params = @_;
# default is to use -absolute url() i.e. $my_uri
}
my $use_pathinfo = gitweb_check_feature('pathinfo');
- if ($use_pathinfo and defined $params{'project'}) {
+ if (defined $params{'project'} &&
+ (exists $params{-path_info} ? $params{-path_info} : $use_pathinfo)) {
# try to put as many parameters as possible in PATH_INFO:
# - project name
# - action
$href =~ s,/$,,;
# Then add the project name, if present
- $href .= "/".esc_url($params{'project'});
+ $href .= "/".esc_path_info($params{'project'});
delete $params{'project'};
# since we destructively absorb parameters, we keep this
# Summary just uses the project path URL, any other action is
# added to the URL
if (defined $params{'action'}) {
- $href .= "/".esc_url($params{'action'}) unless $params{'action'} eq 'summary';
+ $href .= "/".esc_path_info($params{'action'})
+ unless $params{'action'} eq 'summary';
delete $params{'action'};
}
|| $params{'hash_parent'} || $params{'hash'});
if (defined $params{'hash_base'}) {
if (defined $params{'hash_parent_base'}) {
- $href .= esc_url($params{'hash_parent_base'});
+ $href .= esc_path_info($params{'hash_parent_base'});
# skip the file_parent if it's the same as the file_name
if (defined $params{'file_parent'}) {
if (defined $params{'file_name'} && $params{'file_parent'} eq $params{'file_name'}) {
delete $params{'file_parent'};
} elsif ($params{'file_parent'} !~ /\.\./) {
- $href .= ":/".esc_url($params{'file_parent'});
+ $href .= ":/".esc_path_info($params{'file_parent'});
delete $params{'file_parent'};
}
}
delete $params{'hash_parent'};
delete $params{'hash_parent_base'};
} elsif (defined $params{'hash_parent'}) {
- $href .= esc_url($params{'hash_parent'}). "..";
+ $href .= esc_path_info($params{'hash_parent'}). "..";
delete $params{'hash_parent'};
}
- $href .= esc_url($params{'hash_base'});
+ $href .= esc_path_info($params{'hash_base'});
if (defined $params{'file_name'} && $params{'file_name'} !~ /\.\./) {
- $href .= ":/".esc_url($params{'file_name'});
+ $href .= ":/".esc_path_info($params{'file_name'});
delete $params{'file_name'};
}
delete $params{'hash'};
delete $params{'hash_base'};
} elsif (defined $params{'hash'}) {
- $href .= esc_url($params{'hash'});
+ $href .= esc_path_info($params{'hash'});
delete $params{'hash'};
}
}
$href .= "?" . join(';', @result) if scalar @result;
+ # final transformation: trailing spaces must be escaped (URI-encoded)
+ $href =~ s/(\s+)$/CGI::escape($1)/e;
+
return $href;
}
# in utf-8 thanks to "binmode STDOUT, ':utf8'" at beginning
sub to_utf8 {
my $str = shift;
+ return undef unless defined $str;
if (utf8::valid($str)) {
utf8::decode($str);
return $str;
# correct, but quoted slashes look too horrible in bookmarks
sub esc_param {
my $str = shift;
+ return undef unless defined $str;
$str =~ s/([^A-Za-z0-9\-_.~()\/:@ ]+)/CGI::escape($1)/eg;
$str =~ s/ /\+/g;
return $str;
}
-# quote unsafe chars in whole URL, so some charactrs cannot be quoted
+# the quoting rules for path_info fragment are slightly different
+sub esc_path_info {
+ my $str = shift;
+ return undef unless defined $str;
+
+ # path_info doesn't treat '+' as space (specially), but '?' must be escaped
+ $str =~ s/([^A-Za-z0-9\-_.~();\/;:@&= +]+)/CGI::escape($1)/eg;
+
+ return $str;
+}
+
+# quote unsafe chars in whole URL, so some characters cannot be quoted
sub esc_url {
my $str = shift;
- $str =~ s/([^A-Za-z0-9\-_.~();\/;?:@&=])/sprintf("%%%02X", ord($1))/eg;
- $str =~ s/\+/%2B/g;
+ return undef unless defined $str;
+ $str =~ s/([^A-Za-z0-9\-_.~();\/;?:@&= ]+)/CGI::escape($1)/eg;
$str =~ s/ /\+/g;
return $str;
}
my $str = shift;
my %opts = @_;
+ return undef unless defined $str;
+
$str = to_utf8($str);
$str = $cgi->escapeHTML($str);
if ($opts{'-nbsp'}) {
my $str = shift;
my %opts = @_;
+ return undef unless defined $str;
+
$str = to_utf8($str);
$str = $cgi->escapeHTML($str);
if ($opts{'-nbsp'}) {
follow_skip => 2, # ignore duplicates
dangling_symlinks => 0, # ignore dangling symlinks, silently
wanted => sub {
+ # global variables
+ our $project_maxdepth;
+ our $projectroot;
# skip project-list toplevel, if we get it.
return if (m!^[/.]$!);
# only directories can be git repositories
return (undef, undef);
}
+# Implementation note: when a single remote is wanted, we cannot use 'git
+# remote show -n' because that command always work (assuming it's a remote URL
+# if it's not defined), and we cannot use 'git remote show' because that would
+# try to make a network roundtrip. So the only way to find if that particular
+# remote is defined is to walk the list provided by 'git remote -v' and stop if
+# and when we find what we want.
+sub git_get_remotes_list {
+ my $wanted = shift;
+ my %remotes = ();
+
+ open my $fd, '-|' , git_cmd(), 'remote', '-v';
+ return unless $fd;
+ while (my $remote = <$fd>) {
+ chomp $remote;
+ $remote =~ s!\t(.*?)\s+\((\w+)\)$!!;
+ next if $wanted and not $remote eq $wanted;
+ my ($url, $key) = ($1, $2);
+
+ $remotes{$remote} ||= { 'heads' => () };
+ $remotes{$remote}{$key} = $url;
+ }
+ close $fd or return;
+ return wantarray ? %remotes : \%remotes;
+}
+
+# Takes a hash of remotes as first parameter and fills it by adding the
+# available remote heads for each of the indicated remotes.
+sub fill_remote_heads {
+ my $remotes = shift;
+ my @heads = map { "remotes/$_" } keys %$remotes;
+ my @remoteheads = git_get_heads_list(undef, @heads);
+ foreach my $remote (keys %$remotes) {
+ $remotes->{$remote}{'heads'} = [ grep {
+ $_->{'name'} =~ s!^$remote/!!
+ } @remoteheads ];
+ }
+}
+
sub git_get_references {
my $type = shift || "";
my %refs;
## parse to array of hashes functions
sub git_get_heads_list {
- my $limit = shift;
+ my ($limit, @classes) = @_;
+ @classes = ('heads') unless @classes;
+ my @patterns = map { "refs/$_" } @classes;
my @headslist;
open my $fd, '-|', git_cmd(), 'for-each-ref',
($limit ? '--count='.($limit+1) : ()), '--sort=-committerdate',
'--format=%(objectname) %(refname) %(subject)%00%(committer)',
- 'refs/heads'
+ @patterns
or return;
while (my $line = <$fd>) {
my %ref_item;
my ($committer, $epoch, $tz) =
($committerinfo =~ /^(.*) ([0-9]+) (.*)$/);
$ref_item{'fullname'} = $name;
- $name =~ s!^refs/heads/!!;
+ $name =~ s!^refs/(?:head|remote)s/!!;
$ref_item{'name'} = $name;
$ref_item{'id'} = $hash;
return $type;
}
+# guess file syntax for syntax highlighting; return undef if no highlighting
+# the name of syntax can (in the future) depend on syntax highlighter used
+sub guess_file_syntax {
+ my ($highlight, $mimetype, $file_name) = @_;
+ return undef unless ($highlight && defined $file_name);
+ my $basename = basename($file_name, '.in');
+ return $highlight_basename{$basename}
+ if exists $highlight_basename{$basename};
+
+ $basename =~ /\.([^.]*)$/;
+ my $ext = $1 or return undef;
+ return $highlight_ext{$ext}
+ if exists $highlight_ext{$ext};
+
+ return undef;
+}
+
+# run highlighter and return FD of its output,
+# or return original FD if no highlighting
+sub run_highlighter {
+ my ($fd, $highlight, $syntax) = @_;
+ return $fd unless ($highlight && defined $syntax);
+
+ close $fd;
+ open $fd, quote_command(git_cmd(), "cat-file", "blob", $hash)." | ".
+ quote_command($highlight_bin).
+ " --fragment --syntax $syntax |"
+ or die_error(500, "Couldn't open file or run syntax highlighter");
+ return $fd;
+}
+
## ======================================================================
## functions printing HTML: header, footer, error page
-sub git_header_html {
- my $status = shift || "200 OK";
- my $expires = shift;
+sub get_page_title {
+ my $title = to_utf8($site_name);
- my $title = "$site_name";
+ return $title unless (defined $project);
+ $title .= " - " . to_utf8($project);
+
+ return $title unless (defined $action);
+ $title .= "/$action"; # $action is US-ASCII (7bit ASCII)
+
+ return $title unless (defined $file_name);
+ $title .= " - " . esc_path($file_name);
+ if ($action eq "tree" && $file_name !~ m|/$|) {
+ $title .= "/";
+ }
+
+ return $title;
+}
+
+sub print_feed_meta {
if (defined $project) {
- $title .= " - " . to_utf8($project);
- if (defined $action) {
- $title .= "/$action";
- if (defined $file_name) {
- $title .= " - " . esc_path($file_name);
- if ($action eq "tree" && $file_name !~ m|/$|) {
- $title .= "/";
- }
- }
+ my %href_params = get_feed_info();
+ if (!exists $href_params{'-title'}) {
+ $href_params{'-title'} = 'log';
+ }
+
+ foreach my $format qw(RSS Atom) {
+ my $type = lc($format);
+ my %link_attr = (
+ '-rel' => 'alternate',
+ '-title' => esc_attr("$project - $href_params{'-title'} - $format feed"),
+ '-type' => "application/$type+xml"
+ );
+
+ $href_params{'action'} = $type;
+ $link_attr{'-href'} = href(%href_params);
+ print "<link ".
+ "rel=\"$link_attr{'-rel'}\" ".
+ "title=\"$link_attr{'-title'}\" ".
+ "href=\"$link_attr{'-href'}\" ".
+ "type=\"$link_attr{'-type'}\" ".
+ "/>\n";
+
+ $href_params{'extra_options'} = '--no-merges';
+ $link_attr{'-href'} = href(%href_params);
+ $link_attr{'-title'} .= ' (no merges)';
+ print "<link ".
+ "rel=\"$link_attr{'-rel'}\" ".
+ "title=\"$link_attr{'-title'}\" ".
+ "href=\"$link_attr{'-href'}\" ".
+ "type=\"$link_attr{'-type'}\" ".
+ "/>\n";
}
+
+ } else {
+ printf('<link rel="alternate" title="%s projects list" '.
+ 'href="%s" type="text/plain; charset=utf-8" />'."\n",
+ esc_attr($site_name), href(project=>undef, action=>"project_index"));
+ printf('<link rel="alternate" title="%s projects feeds" '.
+ 'href="%s" type="text/x-opml" />'."\n",
+ esc_attr($site_name), href(project=>undef, action=>"opml"));
}
+}
+
+sub git_header_html {
+ my $status = shift || "200 OK";
+ my $expires = shift;
+ my %opts = @_;
+
+ my $title = get_page_title();
my $content_type;
# require explicit support from the UA if we are to send the page as
# 'application/xhtml+xml', otherwise send it as plain old 'text/html'.
$content_type = 'text/html';
}
print $cgi->header(-type=>$content_type, -charset => 'utf-8',
- -status=> $status, -expires => $expires);
+ -status=> $status, -expires => $expires)
+ unless ($opts{'-no_http_header'});
my $mod_perl_version = $ENV{'MOD_PERL'} ? " $ENV{'MOD_PERL'}" : '';
print <<EOF;
<?xml version="1.0" encoding="utf-8"?>
print '<link rel="stylesheet" type="text/css" href="'.esc_url($stylesheet).'"/>'."\n";
}
}
- if (defined $project) {
- my %href_params = get_feed_info();
- if (!exists $href_params{'-title'}) {
- $href_params{'-title'} = 'log';
- }
-
- foreach my $format qw(RSS Atom) {
- my $type = lc($format);
- my %link_attr = (
- '-rel' => 'alternate',
- '-title' => esc_attr("$project - $href_params{'-title'} - $format feed"),
- '-type' => "application/$type+xml"
- );
-
- $href_params{'action'} = $type;
- $link_attr{'-href'} = href(%href_params);
- print "<link ".
- "rel=\"$link_attr{'-rel'}\" ".
- "title=\"$link_attr{'-title'}\" ".
- "href=\"$link_attr{'-href'}\" ".
- "type=\"$link_attr{'-type'}\" ".
- "/>\n";
-
- $href_params{'extra_options'} = '--no-merges';
- $link_attr{'-href'} = href(%href_params);
- $link_attr{'-title'} .= ' (no merges)';
- print "<link ".
- "rel=\"$link_attr{'-rel'}\" ".
- "title=\"$link_attr{'-title'}\" ".
- "href=\"$link_attr{'-href'}\" ".
- "type=\"$link_attr{'-type'}\" ".
- "/>\n";
- }
-
- } else {
- printf('<link rel="alternate" title="%s projects list" '.
- 'href="%s" type="text/plain; charset=utf-8" />'."\n",
- esc_attr($site_name), href(project=>undef, action=>"project_index"));
- printf('<link rel="alternate" title="%s projects feeds" '.
- 'href="%s" type="text/x-opml" />'."\n",
- esc_attr($site_name), href(project=>undef, action=>"opml"));
- }
+ print_feed_meta()
+ if ($status eq '200 OK');
if (defined $favicon) {
print qq(<link rel="shortcut icon" href=").esc_url($favicon).qq(" type="image/png" />\n);
}
insert_file($site_header);
}
- print "<div class=\"page_header\">\n" .
- $cgi->a({-href => esc_url($logo_url),
- -title => $logo_label},
- qq(<img src=").esc_url($logo).qq(" width="72" height="27" alt="git" class="logo"/>));
+ print "<div class=\"page_header\">\n";
+ if (defined $logo) {
+ print $cgi->a({-href => esc_url($logo_url),
+ -title => $logo_label},
+ $cgi->img({-src => esc_url($logo),
+ -width => 72, -height => 27,
+ -alt => "git",
+ -class => "logo"}));
+ }
print $cgi->a({-href => esc_url($home_link)}, $home_link_str) . " / ";
if (defined $project) {
print $cgi->a({-href => href(action=>"summary")}, esc_html($project));
if (defined $action) {
- print " / $action";
+ my $action_print = $action ;
+ if (defined $opts{-action_extra}) {
+ $action_print = $cgi->a({-href => href(action=>$action)},
+ $action);
+ }
+ print " / $action_print";
+ }
+ if (defined $opts{-action_extra}) {
+ print " / $opts{-action_extra}";
}
print "\n";
}
print "<div id=\"generating_info\">\n";
print 'This page took '.
'<span id="generating_time" class="time_span">'.
- Time::HiRes::tv_interval($t0, [Time::HiRes::gettimeofday()]).
+ tv_interval($t0, [ gettimeofday() ]).
' seconds </span>'.
' and '.
'<span id="generating_cmd">'.
"</html>";
}
-# die_error(<http_status_code>, <error_message>)
+# die_error(<http_status_code>, <error_message>[, <detailed_html_description>])
# Example: die_error(404, 'Hash not found')
# By convention, use the following status codes (as defined in RFC 2616):
# 400: Invalid or missing CGI parameters, or
# or down for maintenance). Generally, this is a temporary state.
sub die_error {
my $status = shift || 500;
- my $error = shift || "Internal server error";
+ my $error = esc_html(shift) || "Internal Server Error";
my $extra = shift;
+ my %opts = @_;
my %http_responses = (
400 => '400 Bad Request',
500 => '500 Internal Server Error',
503 => '503 Service Unavailable',
);
- git_header_html($http_responses{$status});
+ git_header_html($http_responses{$status}, undef, %opts);
print <<EOF;
<div class="page_body">
<br /><br />
print "</div>\n";
git_footer_html();
- exit;
+ goto DONE_GITWEB
+ unless ($opts{'-error_handler'});
}
## ----------------------------------------------------------------------
"</div>\n";
}
+# returns a submenu for the nagivation of the refs views (tags, heads,
+# remotes) with the current view disabled and the remotes view only
+# available if the feature is enabled
+sub format_ref_views {
+ my ($current) = @_;
+ my @ref_views = qw{tags heads};
+ push @ref_views, 'remotes' if gitweb_check_feature('remote_heads');
+ return join " | ", map {
+ $_ eq $current ? $_ :
+ $cgi->a({-href => href(action=>$_)}, $_)
+ } @ref_views
+}
+
sub format_paging_nav {
my ($action, $page, $has_next_link) = @_;
my $paging_nav;
"\n</div>\n";
}
+sub format_repo_url {
+ my ($name, $url) = @_;
+ return "<tr class=\"metadata_url\"><td>$name</td><td>$url</td></tr>\n";
+}
+
+# Group output by placing it in a DIV element and adding a header.
+# Options for start_div() can be provided by passing a hash reference as the
+# first parameter to the function.
+# Options to git_print_header_div() can be provided by passing an array
+# reference. This must follow the options to start_div if they are present.
+# The content can be a scalar, which is output as-is, a scalar reference, which
+# is output after html escaping, an IO handle passed either as *handle or
+# *handle{IO}, or a function reference. In the latter case all following
+# parameters will be taken as argument to the content function call.
+sub git_print_section {
+ my ($div_args, $header_args, $content);
+ my $arg = shift;
+ if (ref($arg) eq 'HASH') {
+ $div_args = $arg;
+ $arg = shift;
+ }
+ if (ref($arg) eq 'ARRAY') {
+ $header_args = $arg;
+ $arg = shift;
+ }
+ $content = $arg;
+
+ print $cgi->start_div($div_args);
+ git_print_header_div(@$header_args);
+
+ if (ref($content) eq 'CODE') {
+ $content->(@_);
+ } elsif (ref($content) eq 'SCALAR') {
+ print esc_html($$content);
+ } elsif (ref($content) eq 'GLOB' or ref($content) eq 'IO::Handle') {
+ print <$content>;
+ } elsif (!ref($content) && defined($content)) {
+ print $content;
+ }
+
+ print $cgi->end_div;
+}
+
sub print_local_time {
print format_local_time(@_);
}
}
# Outputs table rows containing the full author or committer information,
-# in the format expected for 'commit' view (& similia).
+# in the format expected for 'commit' view (& similar).
# Parameters are a commit hash reference, followed by the list of people
-# to output information for. If the list is empty it defalts to both
+# to output information for. If the list is empty it defaults to both
# author and committer.
sub git_print_authorship_rows {
my $co = shift;
print "</div>\n"; # class="patch"
}
- # for compact combined (--cc) format, with chunk and patch simpliciaction
- # patchset might be empty, but there might be unprocessed raw lines
+ # for compact combined (--cc) format, with chunk and patch simplification
+ # the patchset might be empty, but there might be unprocessed raw lines
for (++$patch_idx if $patch_number > 0;
$patch_idx < @$difftree;
++$patch_idx) {
"<td class=\"link\">" .
$cgi->a({-href => href(action=>"shortlog", hash=>$ref{'fullname'})}, "shortlog") . " | " .
$cgi->a({-href => href(action=>"log", hash=>$ref{'fullname'})}, "log") . " | " .
- $cgi->a({-href => href(action=>"tree", hash=>$ref{'fullname'}, hash_base=>$ref{'name'})}, "tree") .
+ $cgi->a({-href => href(action=>"tree", hash=>$ref{'fullname'}, hash_base=>$ref{'fullname'})}, "tree") .
"</td>\n" .
"</tr>";
}
print "</table>\n";
}
+# Display a single remote block
+sub git_remote_block {
+ my ($remote, $rdata, $limit, $head) = @_;
+
+ my $heads = $rdata->{'heads'};
+ my $fetch = $rdata->{'fetch'};
+ my $push = $rdata->{'push'};
+
+ my $urls_table = "<table class=\"projects_list\">\n" ;
+
+ if (defined $fetch) {
+ if ($fetch eq $push) {
+ $urls_table .= format_repo_url("URL", $fetch);
+ } else {
+ $urls_table .= format_repo_url("Fetch URL", $fetch);
+ $urls_table .= format_repo_url("Push URL", $push) if defined $push;
+ }
+ } elsif (defined $push) {
+ $urls_table .= format_repo_url("Push URL", $push);
+ } else {
+ $urls_table .= format_repo_url("", "No remote URL");
+ }
+
+ $urls_table .= "</table>\n";
+
+ my $dots;
+ if (defined $limit && $limit < @$heads) {
+ $dots = $cgi->a({-href => href(action=>"remotes", hash=>$remote)}, "...");
+ }
+
+ print $urls_table;
+ git_heads_body($heads, $head, 0, $limit, $dots);
+}
+
+# Display a list of remote names with the respective fetch and push URLs
+sub git_remotes_list {
+ my ($remotedata, $limit) = @_;
+ print "<table class=\"heads\">\n";
+ my $alternate = 1;
+ my @remotes = sort keys %$remotedata;
+
+ my $limited = $limit && $limit < @remotes;
+
+ $#remotes = $limit - 1 if $limited;
+
+ while (my $remote = shift @remotes) {
+ my $rdata = $remotedata->{$remote};
+ my $fetch = $rdata->{'fetch'};
+ my $push = $rdata->{'push'};
+ if ($alternate) {
+ print "<tr class=\"dark\">\n";
+ } else {
+ print "<tr class=\"light\">\n";
+ }
+ $alternate ^= 1;
+ print "<td>" .
+ $cgi->a({-href=> href(action=>'remotes', hash=>$remote),
+ -class=> "list name"},esc_html($remote)) .
+ "</td>";
+ print "<td class=\"link\">" .
+ (defined $fetch ? $cgi->a({-href=> $fetch}, "fetch") : "fetch") .
+ " | " .
+ (defined $push ? $cgi->a({-href=> $push}, "push") : "push") .
+ "</td>";
+
+ print "</tr>\n";
+ }
+
+ if ($limited) {
+ print "<tr>\n" .
+ "<td colspan=\"3\">" .
+ $cgi->a({-href => href(action=>"remotes")}, "...") .
+ "</td>\n" . "</tr>\n";
+ }
+
+ print "</table>";
+}
+
+# Display remote heads grouped by remote, unless there are too many
+# remotes, in which case we only display the remote names
+sub git_remotes_body {
+ my ($remotedata, $limit, $head) = @_;
+ if ($limit and $limit < keys %$remotedata) {
+ git_remotes_list($remotedata, $limit);
+ } else {
+ fill_remote_heads($remotedata);
+ while (my ($remote, $rdata) = each %$remotedata) {
+ git_print_section({-class=>"remote", -id=>$remote},
+ ["remotes", $remote, $remote], sub {
+ git_remote_block($remote, $rdata, $limit, $head);
+ });
+ }
+ }
+}
+
sub git_search_grep_body {
my ($commitlist, $from, $to, $extra) = @_;
$from = 0 unless defined $from;
my %co = parse_commit("HEAD");
my %cd = %co ? parse_date($co{'committer_epoch'}, $co{'committer_tz'}) : ();
my $head = $co{'id'};
+ my $remote_heads = gitweb_check_feature('remote_heads');
my $owner = git_get_project_owner($project);
# there are more ...
my @taglist = git_get_tags_list(16);
my @headlist = git_get_heads_list(16);
+ my %remotedata = $remote_heads ? git_get_remotes_list() : ();
my @forklist;
my $check_forks = gitweb_check_feature('forks');
@url_list = map { "$_/$project" } @git_base_url_list unless @url_list;
foreach my $git_url (@url_list) {
next unless $git_url;
- print "<tr class=\"metadata_url\"><td>$url_tag</td><td>$git_url</td></tr>\n";
+ print format_repo_url($url_tag, $git_url);
$url_tag = "";
}
$cgi->a({-href => href(action=>"heads")}, "..."));
}
+ if (%remotedata) {
+ git_print_header_div('remotes');
+ git_remotes_body(\%remotedata, 15, $head);
+ }
+
if (@forklist) {
git_print_header_div('forks');
git_project_list_body(\@forklist, 'age', 0, 15,
}
sub git_tag {
- my $head = git_get_head_hash($project);
- git_header_html();
- git_print_page_nav('','', $head,undef,$head);
my %tag = parse_tag($hash);
if (! %tag) {
die_error(404, "Unknown tag object");
}
+ my $head = git_get_head_hash($project);
+ git_header_html();
+ git_print_page_nav('','', $head,undef,$head);
git_print_header_div('commit', esc_html($tag{'name'}), $hash);
print "<div class=\"title_text\">\n" .
"<table class=\"object_header\">\n" .
print 'END';
if (defined $t0 && gitweb_check_feature('timed')) {
print ' '.
- Time::HiRes::tv_interval($t0, [Time::HiRes::gettimeofday()]).
+ tv_interval($t0, [ gettimeofday() ]).
' '.$number_of_git_cmds;
}
print "\n";
sub git_tags {
my $head = git_get_head_hash($project);
git_header_html();
- git_print_page_nav('','', $head,undef,$head);
+ git_print_page_nav('','', $head,undef,$head,format_ref_views('tags'));
git_print_header_div('summary', $project);
my @tagslist = git_get_tags_list();
sub git_heads {
my $head = git_get_head_hash($project);
git_header_html();
- git_print_page_nav('','', $head,undef,$head);
+ git_print_page_nav('','', $head,undef,$head,format_ref_views('heads'));
git_print_header_div('summary', $project);
my @headslist = git_get_heads_list();
git_footer_html();
}
+# used both for single remote view and for list of all the remotes
+sub git_remotes {
+ gitweb_check_feature('remote_heads')
+ or die_error(403, "Remote heads view is disabled");
+
+ my $head = git_get_head_hash($project);
+ my $remote = $input_params{'hash'};
+
+ my $remotedata = git_get_remotes_list($remote);
+ die_error(500, "Unable to get remote information") unless defined $remotedata;
+
+ unless (%$remotedata) {
+ die_error(404, defined $remote ?
+ "Remote $remote not found" :
+ "No remotes found");
+ }
+
+ git_header_html(undef, undef, -action_extra => $remote);
+ git_print_page_nav('', '', $head, undef, $head,
+ format_ref_views($remote ? '' : 'remotes'));
+
+ fill_remote_heads($remotedata);
+ if (defined $remote) {
+ git_print_header_div('remotes', "$remote remote for $project");
+ git_remote_block($remote, $remotedata->{$remote}, undef, $head);
+ } else {
+ git_print_header_div('summary', "$project remotes");
+ git_remotes_body($remotedata, undef, $head);
+ }
+
+ git_footer_html();
+}
+
sub git_blob_plain {
my $type = shift;
my $expires;
open my $fd, "-|", git_cmd(), "cat-file", "blob", $hash
or die_error(500, "Couldn't cat $file_name, $hash");
my $mimetype = blob_mimetype($fd, $file_name);
+ # use 'blob_plain' (aka 'raw') view for files that cannot be displayed
if ($mimetype !~ m!^(?:text/|image/(?:gif|png|jpeg)$)! && -B $fd) {
close $fd;
return git_blob_plain($mimetype);
# we can have blame only for text/* mimetype
$have_blame &&= ($mimetype =~ m!^text/!);
+ my $highlight = gitweb_check_feature('highlight');
+ my $syntax = guess_file_syntax($highlight, $mimetype, $file_name);
+ $fd = run_highlighter($fd, $highlight, $syntax)
+ if $syntax;
+
git_header_html(undef, $expires);
my $formats_nav = '';
if (defined $hash_base && (my %co = parse_commit($hash_base))) {
chomp $line;
$nr++;
$line = untabify($line);
- printf "<div class=\"pre\"><a id=\"l%i\" href=\""
- . esc_attr(href(-replay => 1))
- . "#l%i\" class=\"linenr\">%4i</a> %s</div>\n",
- $nr, $nr, $nr, esc_html($line, -nbsp=>1);
+ printf qq!<div class="pre"><a id="l%i" href="%s#l%i" class="linenr">%4i</a> %s</div>\n!,
+ $nr, esc_attr(href(-replay => 1)), $nr, $nr, $syntax ? $line : esc_html($line, -nbsp=>1);
}
}
close $fd
}
push @commit_spec, '--root', $hash;
}
- open $fd, "-|", git_cmd(), "format-patch", '--encoding=utf8',
- '--stdout', @commit_spec
+ open $fd, "-|", git_cmd(), "format-patch", @diff_opts,
+ '--encoding=utf8', '--stdout', @commit_spec
or die_error(500, "Open git-format-patch failed");
} else {
die_error(400, "Unknown commitdiff format");
$paging_nav .= " ⋅ next";
}
- if ($#commitlist >= 100) {
- }
-
git_print_page_nav('','', $hash,$co{'tree'},$hash, $paging_nav);
git_print_header_div('commit', esc_html($co{'title'}), $hash);
- git_search_grep_body(\@commitlist, 0, 99, $next_link);
+ if ($page == 0 && !@commitlist) {
+ print "<p>No match.</p>\n";
+ } else {
+ git_search_grep_body(\@commitlist, 0, 99, $next_link);
+ }
}
if ($searchtype eq 'pickaxe') {
if (defined $favicon) {
print "<icon>" . esc_url($favicon) . "</icon>\n";
}
- if (defined $logo_url) {
+ if (defined $logo) {
# not twice as wide as tall: 72 x 27 pixels
print "<logo>" . esc_url($logo) . "</logo>\n";
}