contrib / mw-to-git / git-remote-mediawiki.perlon commit remote-mediawiki: show known namespace choices on failure (09eebba)
   1#! /usr/bin/perl
   2
   3# Copyright (C) 2011
   4#     Jérémie Nikaes <jeremie.nikaes@ensimag.imag.fr>
   5#     Arnaud Lacurie <arnaud.lacurie@ensimag.imag.fr>
   6#     Claire Fousse <claire.fousse@ensimag.imag.fr>
   7#     David Amouyal <david.amouyal@ensimag.imag.fr>
   8#     Matthieu Moy <matthieu.moy@grenoble-inp.fr>
   9# License: GPL v2 or later
  10
  11# Gateway between Git and MediaWiki.
  12# Documentation & bugtracker: https://github.com/moy/Git-Mediawiki/
  13
  14use strict;
  15use MediaWiki::API;
  16use Git;
  17use Git::Mediawiki qw(clean_filename smudge_filename connect_maybe
  18                                        EMPTY HTTP_CODE_OK);
  19use DateTime::Format::ISO8601;
  20use warnings;
  21
  22# By default, use UTF-8 to communicate with Git and the user
  23binmode STDERR, ':encoding(UTF-8)';
  24binmode STDOUT, ':encoding(UTF-8)';
  25
  26use URI::Escape;
  27
  28# It's not always possible to delete pages (may require some
  29# privileges). Deleted pages are replaced with this content.
  30use constant DELETED_CONTENT => "[[Category:Deleted]]\n";
  31
  32# It's not possible to create empty pages. New empty files in Git are
  33# sent with this content instead.
  34use constant EMPTY_CONTENT => "<!-- empty page -->\n";
  35
  36# used to reflect file creation or deletion in diff.
  37use constant NULL_SHA1 => '0000000000000000000000000000000000000000';
  38
  39# Used on Git's side to reflect empty edit messages on the wiki
  40use constant EMPTY_MESSAGE => '*Empty MediaWiki Message*';
  41
  42# Number of pages taken into account at once in submodule get_mw_page_list
  43use constant SLICE_SIZE => 50;
  44
  45# Number of linked mediafile to get at once in get_linked_mediafiles
  46# The query is split in small batches because of the MW API limit of
  47# the number of links to be returned (500 links max).
  48use constant BATCH_SIZE => 10;
  49
  50if (@ARGV != 2) {
  51        exit_error_usage();
  52}
  53
  54my $remotename = $ARGV[0];
  55my $url = $ARGV[1];
  56
  57# Accept both space-separated and multiple keys in config file.
  58# Spaces should be written as _ anyway because we'll use chomp.
  59my @tracked_pages = split(/[ \n]/, run_git("config --get-all remote.${remotename}.pages"));
  60chomp(@tracked_pages);
  61
  62# Just like @tracked_pages, but for MediaWiki categories.
  63my @tracked_categories = split(/[ \n]/, run_git("config --get-all remote.${remotename}.categories"));
  64chomp(@tracked_categories);
  65
  66# Just like @tracked_categories, but for MediaWiki namespaces.
  67my @tracked_namespaces = split(/[ \n]/, run_git("config --get-all remote.${remotename}.namespaces"));
  68for (@tracked_namespaces) { s/_/ /g; }
  69chomp(@tracked_namespaces);
  70
  71# Import media files on pull
  72my $import_media = run_git("config --get --bool remote.${remotename}.mediaimport");
  73chomp($import_media);
  74$import_media = ($import_media eq 'true');
  75
  76# Export media files on push
  77my $export_media = run_git("config --get --bool remote.${remotename}.mediaexport");
  78chomp($export_media);
  79$export_media = !($export_media eq 'false');
  80
  81my $wiki_login = run_git("config --get remote.${remotename}.mwLogin");
  82# Note: mwPassword is discourraged. Use the credential system instead.
  83my $wiki_passwd = run_git("config --get remote.${remotename}.mwPassword");
  84my $wiki_domain = run_git("config --get remote.${remotename}.mwDomain");
  85chomp($wiki_login);
  86chomp($wiki_passwd);
  87chomp($wiki_domain);
  88
  89# Import only last revisions (both for clone and fetch)
  90my $shallow_import = run_git("config --get --bool remote.${remotename}.shallow");
  91chomp($shallow_import);
  92$shallow_import = ($shallow_import eq 'true');
  93
  94# Fetch (clone and pull) by revisions instead of by pages. This behavior
  95# is more efficient when we have a wiki with lots of pages and we fetch
  96# the revisions quite often so that they concern only few pages.
  97# Possible values:
  98# - by_rev: perform one query per new revision on the remote wiki
  99# - by_page: query each tracked page for new revision
 100my $fetch_strategy = run_git("config --get remote.${remotename}.fetchStrategy");
 101if (!$fetch_strategy) {
 102        $fetch_strategy = run_git('config --get mediawiki.fetchStrategy');
 103}
 104chomp($fetch_strategy);
 105if (!$fetch_strategy) {
 106        $fetch_strategy = 'by_page';
 107}
 108
 109# Remember the timestamp corresponding to a revision id.
 110my %basetimestamps;
 111
 112# Dumb push: don't update notes and mediawiki ref to reflect the last push.
 113#
 114# Configurable with mediawiki.dumbPush, or per-remote with
 115# remote.<remotename>.dumbPush.
 116#
 117# This means the user will have to re-import the just-pushed
 118# revisions. On the other hand, this means that the Git revisions
 119# corresponding to MediaWiki revisions are all imported from the wiki,
 120# regardless of whether they were initially created in Git or from the
 121# web interface, hence all users will get the same history (i.e. if
 122# the push from Git to MediaWiki loses some information, everybody
 123# will get the history with information lost). If the import is
 124# deterministic, this means everybody gets the same sha1 for each
 125# MediaWiki revision.
 126my $dumb_push = run_git("config --get --bool remote.${remotename}.dumbPush");
 127if (!$dumb_push) {
 128        $dumb_push = run_git('config --get --bool mediawiki.dumbPush');
 129}
 130chomp($dumb_push);
 131$dumb_push = ($dumb_push eq 'true');
 132
 133my $wiki_name = $url;
 134$wiki_name =~ s{[^/]*://}{};
 135# If URL is like http://user:password@example.com/, we clearly don't
 136# want the password in $wiki_name. While we're there, also remove user
 137# and '@' sign, to avoid author like MWUser@HTTPUser@host.com
 138$wiki_name =~ s/^.*@//;
 139
 140# Commands parser
 141while (<STDIN>) {
 142        chomp;
 143
 144        if (!parse_command($_)) {
 145                last;
 146        }
 147
 148        BEGIN { $| = 1 } # flush STDOUT, to make sure the previous
 149                         # command is fully processed.
 150}
 151
 152########################## Functions ##############################
 153
 154## error handling
 155sub exit_error_usage {
 156        die "ERROR: git-remote-mediawiki module was not called with a correct number of\n" .
 157            "parameters\n" .
 158            "You may obtain this error because you attempted to run the git-remote-mediawiki\n" .
 159            "module directly.\n" .
 160            "This module can be used the following way:\n" .
 161            "\tgit clone mediawiki://<address of a mediawiki>\n" .
 162            "Then, use git commit, push and pull as with every normal git repository.\n";
 163}
 164
 165sub parse_command {
 166        my ($line) = @_;
 167        my @cmd = split(/ /, $line);
 168        if (!defined $cmd[0]) {
 169                return 0;
 170        }
 171        if ($cmd[0] eq 'capabilities') {
 172                die("Too many arguments for capabilities\n")
 173                    if (defined($cmd[1]));
 174                mw_capabilities();
 175        } elsif ($cmd[0] eq 'list') {
 176                die("Too many arguments for list\n") if (defined($cmd[2]));
 177                mw_list($cmd[1]);
 178        } elsif ($cmd[0] eq 'import') {
 179                die("Invalid argument for import\n")
 180                    if ($cmd[1] eq EMPTY);
 181                die("Too many arguments for import\n")
 182                    if (defined($cmd[2]));
 183                mw_import($cmd[1]);
 184        } elsif ($cmd[0] eq 'option') {
 185                die("Invalid arguments for option\n")
 186                    if ($cmd[1] eq EMPTY || $cmd[2] eq EMPTY);
 187                die("Too many arguments for option\n")
 188                    if (defined($cmd[3]));
 189                mw_option($cmd[1],$cmd[2]);
 190        } elsif ($cmd[0] eq 'push') {
 191                mw_push($cmd[1]);
 192        } else {
 193                print {*STDERR} "Unknown command. Aborting...\n";
 194                return 0;
 195        }
 196        return 1;
 197}
 198
 199# MediaWiki API instance, created lazily.
 200my $mediawiki;
 201
 202sub fatal_mw_error {
 203        my $action = shift;
 204        print STDERR "fatal: could not $action.\n";
 205        print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
 206        if ($url =~ /^https/) {
 207                print STDERR "fatal: make sure '$url/api.php' is a valid page\n";
 208                print STDERR "fatal: and the SSL certificate is correct.\n";
 209        } else {
 210                print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
 211        }
 212        print STDERR "fatal: (error " .
 213            $mediawiki->{error}->{code} . ': ' .
 214            $mediawiki->{error}->{details} . ")\n";
 215        exit 1;
 216}
 217
 218## Functions for listing pages on the remote wiki
 219sub get_mw_tracked_pages {
 220        my $pages = shift;
 221        get_mw_page_list(\@tracked_pages, $pages);
 222        return;
 223}
 224
 225sub get_mw_page_list {
 226        my $page_list = shift;
 227        my $pages = shift;
 228        my @some_pages = @{$page_list};
 229        while (@some_pages) {
 230                my $last_page = SLICE_SIZE;
 231                if ($#some_pages < $last_page) {
 232                        $last_page = $#some_pages;
 233                }
 234                my @slice = @some_pages[0..$last_page];
 235                get_mw_first_pages(\@slice, $pages);
 236                @some_pages = @some_pages[(SLICE_SIZE + 1)..$#some_pages];
 237        }
 238        return;
 239}
 240
 241sub get_mw_tracked_categories {
 242        my $pages = shift;
 243        foreach my $category (@tracked_categories) {
 244                if (index($category, ':') < 0) {
 245                        # Mediawiki requires the Category
 246                        # prefix, but let's not force the user
 247                        # to specify it.
 248                        $category = "Category:${category}";
 249                }
 250                my $mw_pages = $mediawiki->list( {
 251                        action => 'query',
 252                        list => 'categorymembers',
 253                        cmtitle => $category,
 254                        cmlimit => 'max' } )
 255                        || die $mediawiki->{error}->{code} . ': '
 256                                . $mediawiki->{error}->{details} . "\n";
 257                foreach my $page (@{$mw_pages}) {
 258                        $pages->{$page->{title}} = $page;
 259                }
 260        }
 261        return;
 262}
 263
 264sub get_mw_tracked_namespaces {
 265    my $pages = shift;
 266    foreach my $local_namespace (@tracked_namespaces) {
 267        my $mw_pages = $mediawiki->list( {
 268            action => 'query',
 269            list => 'allpages',
 270            apnamespace => get_mw_namespace_id($local_namespace),
 271            aplimit => 'max' } )
 272            || die $mediawiki->{error}->{code} . ': '
 273                . $mediawiki->{error}->{details} . "\n";
 274        foreach my $page (@{$mw_pages}) {
 275            $pages->{$page->{title}} = $page;
 276        }
 277    }
 278    return;
 279}
 280
 281sub get_mw_all_pages {
 282        my $pages = shift;
 283        # No user-provided list, get the list of pages from the API.
 284        my $mw_pages = $mediawiki->list({
 285                action => 'query',
 286                list => 'allpages',
 287                aplimit => 'max'
 288        });
 289        if (!defined($mw_pages)) {
 290                fatal_mw_error("get the list of wiki pages");
 291        }
 292        foreach my $page (@{$mw_pages}) {
 293                $pages->{$page->{title}} = $page;
 294        }
 295        return;
 296}
 297
 298# queries the wiki for a set of pages. Meant to be used within a loop
 299# querying the wiki for slices of page list.
 300sub get_mw_first_pages {
 301        my $some_pages = shift;
 302        my @some_pages = @{$some_pages};
 303
 304        my $pages = shift;
 305
 306        # pattern 'page1|page2|...' required by the API
 307        my $titles = join('|', @some_pages);
 308
 309        my $mw_pages = $mediawiki->api({
 310                action => 'query',
 311                titles => $titles,
 312        });
 313        if (!defined($mw_pages)) {
 314                fatal_mw_error("query the list of wiki pages");
 315        }
 316        while (my ($id, $page) = each(%{$mw_pages->{query}->{pages}})) {
 317                if ($id < 0) {
 318                        print {*STDERR} "Warning: page $page->{title} not found on wiki\n";
 319                } else {
 320                        $pages->{$page->{title}} = $page;
 321                }
 322        }
 323        return;
 324}
 325
 326# Get the list of pages to be fetched according to configuration.
 327sub get_mw_pages {
 328        $mediawiki = connect_maybe($mediawiki, $remotename, $url);
 329
 330        print {*STDERR} "Listing pages on remote wiki...\n";
 331
 332        my %pages; # hash on page titles to avoid duplicates
 333        my $user_defined;
 334        if (@tracked_pages) {
 335                $user_defined = 1;
 336                # The user provided a list of pages titles, but we
 337                # still need to query the API to get the page IDs.
 338                get_mw_tracked_pages(\%pages);
 339        }
 340        if (@tracked_categories) {
 341                $user_defined = 1;
 342                get_mw_tracked_categories(\%pages);
 343        }
 344        if (@tracked_namespaces) {
 345                $user_defined = 1;
 346                get_mw_tracked_namespaces(\%pages);
 347        }
 348        if (!$user_defined) {
 349                get_mw_all_pages(\%pages);
 350        }
 351        if ($import_media) {
 352                print {*STDERR} "Getting media files for selected pages...\n";
 353                if ($user_defined) {
 354                        get_linked_mediafiles(\%pages);
 355                } else {
 356                        get_all_mediafiles(\%pages);
 357                }
 358        }
 359        print {*STDERR} (scalar keys %pages) . " pages found.\n";
 360        return %pages;
 361}
 362
 363# usage: $out = run_git("command args");
 364#        $out = run_git("command args", "raw"); # don't interpret output as UTF-8.
 365sub run_git {
 366        my $args = shift;
 367        my $encoding = (shift || 'encoding(UTF-8)');
 368        open(my $git, "-|:${encoding}", "git ${args}")
 369            or die "Unable to fork: $!\n";
 370        my $res = do {
 371                local $/ = undef;
 372                <$git>
 373        };
 374        close($git);
 375
 376        return $res;
 377}
 378
 379
 380sub get_all_mediafiles {
 381        my $pages = shift;
 382        # Attach list of all pages for media files from the API,
 383        # they are in a different namespace, only one namespace
 384        # can be queried at the same moment
 385        my $mw_pages = $mediawiki->list({
 386                action => 'query',
 387                list => 'allpages',
 388                apnamespace => get_mw_namespace_id('File'),
 389                aplimit => 'max'
 390        });
 391        if (!defined($mw_pages)) {
 392                print {*STDERR} "fatal: could not get the list of pages for media files.\n";
 393                print {*STDERR} "fatal: '$url' does not appear to be a mediawiki\n";
 394                print {*STDERR} "fatal: make sure '$url/api.php' is a valid page.\n";
 395                exit 1;
 396        }
 397        foreach my $page (@{$mw_pages}) {
 398                $pages->{$page->{title}} = $page;
 399        }
 400        return;
 401}
 402
 403sub get_linked_mediafiles {
 404        my $pages = shift;
 405        my @titles = map { $_->{title} } values(%{$pages});
 406
 407        my $batch = BATCH_SIZE;
 408        while (@titles) {
 409                if ($#titles < $batch) {
 410                        $batch = $#titles;
 411                }
 412                my @slice = @titles[0..$batch];
 413
 414                # pattern 'page1|page2|...' required by the API
 415                my $mw_titles = join('|', @slice);
 416
 417                # Media files could be included or linked from
 418                # a page, get all related
 419                my $query = {
 420                        action => 'query',
 421                        prop => 'links|images',
 422                        titles => $mw_titles,
 423                        plnamespace => get_mw_namespace_id('File'),
 424                        pllimit => 'max'
 425                };
 426                my $result = $mediawiki->api($query);
 427
 428                while (my ($id, $page) = each(%{$result->{query}->{pages}})) {
 429                        my @media_titles;
 430                        if (defined($page->{links})) {
 431                                my @link_titles
 432                                    = map { $_->{title} } @{$page->{links}};
 433                                push(@media_titles, @link_titles);
 434                        }
 435                        if (defined($page->{images})) {
 436                                my @image_titles
 437                                    = map { $_->{title} } @{$page->{images}};
 438                                push(@media_titles, @image_titles);
 439                        }
 440                        if (@media_titles) {
 441                                get_mw_page_list(\@media_titles, $pages);
 442                        }
 443                }
 444
 445                @titles = @titles[($batch+1)..$#titles];
 446        }
 447        return;
 448}
 449
 450sub get_mw_mediafile_for_page_revision {
 451        # Name of the file on Wiki, with the prefix.
 452        my $filename = shift;
 453        my $timestamp = shift;
 454        my %mediafile;
 455
 456        # Search if on a media file with given timestamp exists on
 457        # MediaWiki. In that case download the file.
 458        my $query = {
 459                action => 'query',
 460                prop => 'imageinfo',
 461                titles => "File:${filename}",
 462                iistart => $timestamp,
 463                iiend => $timestamp,
 464                iiprop => 'timestamp|archivename|url',
 465                iilimit => 1
 466        };
 467        my $result = $mediawiki->api($query);
 468
 469        my ($fileid, $file) = each( %{$result->{query}->{pages}} );
 470        # If not defined it means there is no revision of the file for
 471        # given timestamp.
 472        if (defined($file->{imageinfo})) {
 473                $mediafile{title} = $filename;
 474
 475                my $fileinfo = pop(@{$file->{imageinfo}});
 476                $mediafile{timestamp} = $fileinfo->{timestamp};
 477                # Mediawiki::API's download function doesn't support https URLs
 478                # and can't download old versions of files.
 479                print {*STDERR} "\tDownloading file $mediafile{title}, version $mediafile{timestamp}\n";
 480                $mediafile{content} = download_mw_mediafile($fileinfo->{url});
 481        }
 482        return %mediafile;
 483}
 484
 485sub download_mw_mediafile {
 486        my $download_url = shift;
 487
 488        my $response = $mediawiki->{ua}->get($download_url);
 489        if ($response->code == HTTP_CODE_OK) {
 490                # It is tempting to return
 491                # $response->decoded_content({charset => "none"}), but
 492                # when doing so, utf8::downgrade($content) fails with
 493                # "Wide character in subroutine entry".
 494                $response->decode();
 495                return $response->content();
 496        } else {
 497                print {*STDERR} "Error downloading mediafile from :\n";
 498                print {*STDERR} "URL: ${download_url}\n";
 499                print {*STDERR} 'Server response: ' . $response->code . q{ } . $response->message . "\n";
 500                exit 1;
 501        }
 502}
 503
 504sub get_last_local_revision {
 505        # Get note regarding last mediawiki revision
 506        my $note = run_git("notes --ref=${remotename}/mediawiki show refs/mediawiki/${remotename}/master 2>/dev/null");
 507        my @note_info = split(/ /, $note);
 508
 509        my $lastrevision_number;
 510        if (!(defined($note_info[0]) && $note_info[0] eq 'mediawiki_revision:')) {
 511                print {*STDERR} 'No previous mediawiki revision found';
 512                $lastrevision_number = 0;
 513        } else {
 514                # Notes are formatted : mediawiki_revision: #number
 515                $lastrevision_number = $note_info[1];
 516                chomp($lastrevision_number);
 517                print {*STDERR} "Last local mediawiki revision found is ${lastrevision_number}";
 518        }
 519        return $lastrevision_number;
 520}
 521
 522# Get the last remote revision without taking in account which pages are
 523# tracked or not. This function makes a single request to the wiki thus
 524# avoid a loop onto all tracked pages. This is useful for the fetch-by-rev
 525# option.
 526sub get_last_global_remote_rev {
 527        $mediawiki = connect_maybe($mediawiki, $remotename, $url);
 528
 529        my $query = {
 530                action => 'query',
 531                list => 'recentchanges',
 532                prop => 'revisions',
 533                rclimit => '1',
 534                rcdir => 'older',
 535        };
 536        my $result = $mediawiki->api($query);
 537        return $result->{query}->{recentchanges}[0]->{revid};
 538}
 539
 540# Get the last remote revision concerning the tracked pages and the tracked
 541# categories.
 542sub get_last_remote_revision {
 543        $mediawiki = connect_maybe($mediawiki, $remotename, $url);
 544
 545        my %pages_hash = get_mw_pages();
 546        my @pages = values(%pages_hash);
 547
 548        my $max_rev_num = 0;
 549
 550        print {*STDERR} "Getting last revision id on tracked pages...\n";
 551
 552        foreach my $page (@pages) {
 553                my $id = $page->{pageid};
 554
 555                my $query = {
 556                        action => 'query',
 557                        prop => 'revisions',
 558                        rvprop => 'ids|timestamp',
 559                        pageids => $id,
 560                };
 561
 562                my $result = $mediawiki->api($query);
 563
 564                my $lastrev = pop(@{$result->{query}->{pages}->{$id}->{revisions}});
 565
 566                $basetimestamps{$lastrev->{revid}} = $lastrev->{timestamp};
 567
 568                $max_rev_num = ($lastrev->{revid} > $max_rev_num ? $lastrev->{revid} : $max_rev_num);
 569        }
 570
 571        print {*STDERR} "Last remote revision found is $max_rev_num.\n";
 572        return $max_rev_num;
 573}
 574
 575# Clean content before sending it to MediaWiki
 576sub mediawiki_clean {
 577        my $string = shift;
 578        my $page_created = shift;
 579        # Mediawiki does not allow blank space at the end of a page and ends with a single \n.
 580        # This function right trims a string and adds a \n at the end to follow this rule
 581        $string =~ s/\s+$//;
 582        if ($string eq EMPTY && $page_created) {
 583                # Creating empty pages is forbidden.
 584                $string = EMPTY_CONTENT;
 585        }
 586        return $string."\n";
 587}
 588
 589# Filter applied on MediaWiki data before adding them to Git
 590sub mediawiki_smudge {
 591        my $string = shift;
 592        if ($string eq EMPTY_CONTENT) {
 593                $string = EMPTY;
 594        }
 595        # This \n is important. This is due to mediawiki's way to handle end of files.
 596        return "${string}\n";
 597}
 598
 599sub literal_data {
 600        my ($content) = @_;
 601        print {*STDOUT} 'data ', bytes::length($content), "\n", $content;
 602        return;
 603}
 604
 605sub literal_data_raw {
 606        # Output possibly binary content.
 607        my ($content) = @_;
 608        # Avoid confusion between size in bytes and in characters
 609        utf8::downgrade($content);
 610        binmode STDOUT, ':raw';
 611        print {*STDOUT} 'data ', bytes::length($content), "\n", $content;
 612        binmode STDOUT, ':encoding(UTF-8)';
 613        return;
 614}
 615
 616sub mw_capabilities {
 617        # Revisions are imported to the private namespace
 618        # refs/mediawiki/$remotename/ by the helper and fetched into
 619        # refs/remotes/$remotename later by fetch.
 620        print {*STDOUT} "refspec refs/heads/*:refs/mediawiki/${remotename}/*\n";
 621        print {*STDOUT} "import\n";
 622        print {*STDOUT} "list\n";
 623        print {*STDOUT} "push\n";
 624        if ($dumb_push) {
 625                print {*STDOUT} "no-private-update\n";
 626        }
 627        print {*STDOUT} "\n";
 628        return;
 629}
 630
 631sub mw_list {
 632        # MediaWiki do not have branches, we consider one branch arbitrarily
 633        # called master, and HEAD pointing to it.
 634        print {*STDOUT} "? refs/heads/master\n";
 635        print {*STDOUT} "\@refs/heads/master HEAD\n";
 636        print {*STDOUT} "\n";
 637        return;
 638}
 639
 640sub mw_option {
 641        print {*STDERR} "remote-helper command 'option $_[0]' not yet implemented\n";
 642        print {*STDOUT} "unsupported\n";
 643        return;
 644}
 645
 646sub fetch_mw_revisions_for_page {
 647        my $page = shift;
 648        my $id = shift;
 649        my $fetch_from = shift;
 650        my @page_revs = ();
 651        my $query = {
 652                action => 'query',
 653                prop => 'revisions',
 654                rvprop => 'ids',
 655                rvdir => 'newer',
 656                rvstartid => $fetch_from,
 657                rvlimit => 500,
 658                pageids => $id,
 659
 660                # Let MediaWiki know that we support the latest API.
 661                continue => '',
 662        };
 663
 664        my $revnum = 0;
 665        # Get 500 revisions at a time due to the mediawiki api limit
 666        while (1) {
 667                my $result = $mediawiki->api($query);
 668
 669                # Parse each of those 500 revisions
 670                foreach my $revision (@{$result->{query}->{pages}->{$id}->{revisions}}) {
 671                        my $page_rev_ids;
 672                        $page_rev_ids->{pageid} = $page->{pageid};
 673                        $page_rev_ids->{revid} = $revision->{revid};
 674                        push(@page_revs, $page_rev_ids);
 675                        $revnum++;
 676                }
 677
 678                if ($result->{'query-continue'}) { # For legacy APIs
 679                        $query->{rvstartid} = $result->{'query-continue'}->{revisions}->{rvstartid};
 680                } elsif ($result->{continue}) { # For newer APIs
 681                        $query->{rvstartid} = $result->{continue}->{rvcontinue};
 682                        $query->{continue} = $result->{continue}->{continue};
 683                } else {
 684                        last;
 685                }
 686        }
 687        if ($shallow_import && @page_revs) {
 688                print {*STDERR} "  Found 1 revision (shallow import).\n";
 689                @page_revs = sort {$b->{revid} <=> $a->{revid}} (@page_revs);
 690                return $page_revs[0];
 691        }
 692        print {*STDERR} "  Found ${revnum} revision(s).\n";
 693        return @page_revs;
 694}
 695
 696sub fetch_mw_revisions {
 697        my $pages = shift; my @pages = @{$pages};
 698        my $fetch_from = shift;
 699
 700        my @revisions = ();
 701        my $n = 1;
 702        foreach my $page (@pages) {
 703                my $id = $page->{pageid};
 704                print {*STDERR} "page ${n}/", scalar(@pages), ': ', $page->{title}, "\n";
 705                $n++;
 706                my @page_revs = fetch_mw_revisions_for_page($page, $id, $fetch_from);
 707                @revisions = (@page_revs, @revisions);
 708        }
 709
 710        return ($n, @revisions);
 711}
 712
 713sub fe_escape_path {
 714    my $path = shift;
 715    $path =~ s/\\/\\\\/g;
 716    $path =~ s/"/\\"/g;
 717    $path =~ s/\n/\\n/g;
 718    return qq("${path}");
 719}
 720
 721sub import_file_revision {
 722        my $commit = shift;
 723        my %commit = %{$commit};
 724        my $full_import = shift;
 725        my $n = shift;
 726        my $mediafile = shift;
 727        my %mediafile;
 728        if ($mediafile) {
 729                %mediafile = %{$mediafile};
 730        }
 731
 732        my $title = $commit{title};
 733        my $comment = $commit{comment};
 734        my $content = $commit{content};
 735        my $author = $commit{author};
 736        my $date = $commit{date};
 737
 738        print {*STDOUT} "commit refs/mediawiki/${remotename}/master\n";
 739        print {*STDOUT} "mark :${n}\n";
 740        print {*STDOUT} "committer ${author} <${author}\@${wiki_name}> " . $date->epoch . " +0000\n";
 741        literal_data($comment);
 742
 743        # If it's not a clone, we need to know where to start from
 744        if (!$full_import && $n == 1) {
 745                print {*STDOUT} "from refs/mediawiki/${remotename}/master^0\n";
 746        }
 747        if ($content ne DELETED_CONTENT) {
 748                print {*STDOUT} 'M 644 inline ' .
 749                    fe_escape_path("${title}.mw") . "\n";
 750                literal_data($content);
 751                if (%mediafile) {
 752                        print {*STDOUT} 'M 644 inline '
 753                            . fe_escape_path($mediafile{title}) . "\n";
 754                        literal_data_raw($mediafile{content});
 755                }
 756                print {*STDOUT} "\n\n";
 757        } else {
 758                print {*STDOUT} 'D ' . fe_escape_path("${title}.mw") . "\n";
 759        }
 760
 761        # mediawiki revision number in the git note
 762        if ($full_import && $n == 1) {
 763                print {*STDOUT} "reset refs/notes/${remotename}/mediawiki\n";
 764        }
 765        print {*STDOUT} "commit refs/notes/${remotename}/mediawiki\n";
 766        print {*STDOUT} "committer ${author} <${author}\@${wiki_name}> " . $date->epoch . " +0000\n";
 767        literal_data('Note added by git-mediawiki during import');
 768        if (!$full_import && $n == 1) {
 769                print {*STDOUT} "from refs/notes/${remotename}/mediawiki^0\n";
 770        }
 771        print {*STDOUT} "N inline :${n}\n";
 772        literal_data("mediawiki_revision: $commit{mw_revision}");
 773        print {*STDOUT} "\n\n";
 774        return;
 775}
 776
 777# parse a sequence of
 778# <cmd> <arg1>
 779# <cmd> <arg2>
 780# \n
 781# (like batch sequence of import and sequence of push statements)
 782sub get_more_refs {
 783        my $cmd = shift;
 784        my @refs;
 785        while (1) {
 786                my $line = <STDIN>;
 787                if ($line =~ /^$cmd (.*)$/) {
 788                        push(@refs, $1);
 789                } elsif ($line eq "\n") {
 790                        return @refs;
 791                } else {
 792                        die("Invalid command in a '$cmd' batch: $_\n");
 793                }
 794        }
 795        return;
 796}
 797
 798sub mw_import {
 799        # multiple import commands can follow each other.
 800        my @refs = (shift, get_more_refs('import'));
 801        foreach my $ref (@refs) {
 802                mw_import_ref($ref);
 803        }
 804        print {*STDOUT} "done\n";
 805        return;
 806}
 807
 808sub mw_import_ref {
 809        my $ref = shift;
 810        # The remote helper will call "import HEAD" and
 811        # "import refs/heads/master".
 812        # Since HEAD is a symbolic ref to master (by convention,
 813        # followed by the output of the command "list" that we gave),
 814        # we don't need to do anything in this case.
 815        if ($ref eq 'HEAD') {
 816                return;
 817        }
 818
 819        $mediawiki = connect_maybe($mediawiki, $remotename, $url);
 820
 821        print {*STDERR} "Searching revisions...\n";
 822        my $last_local = get_last_local_revision();
 823        my $fetch_from = $last_local + 1;
 824        if ($fetch_from == 1) {
 825                print {*STDERR} ", fetching from beginning.\n";
 826        } else {
 827                print {*STDERR} ", fetching from here.\n";
 828        }
 829
 830        my $n = 0;
 831        if ($fetch_strategy eq 'by_rev') {
 832                print {*STDERR} "Fetching & writing export data by revs...\n";
 833                $n = mw_import_ref_by_revs($fetch_from);
 834        } elsif ($fetch_strategy eq 'by_page') {
 835                print {*STDERR} "Fetching & writing export data by pages...\n";
 836                $n = mw_import_ref_by_pages($fetch_from);
 837        } else {
 838                print {*STDERR} qq(fatal: invalid fetch strategy "${fetch_strategy}".\n);
 839                print {*STDERR} "Check your configuration variables remote.${remotename}.fetchStrategy and mediawiki.fetchStrategy\n";
 840                exit 1;
 841        }
 842
 843        if ($fetch_from == 1 && $n == 0) {
 844                print {*STDERR} "You appear to have cloned an empty MediaWiki.\n";
 845                # Something has to be done remote-helper side. If nothing is done, an error is
 846                # thrown saying that HEAD is referring to unknown object 0000000000000000000
 847                # and the clone fails.
 848        }
 849        return;
 850}
 851
 852sub mw_import_ref_by_pages {
 853
 854        my $fetch_from = shift;
 855        my %pages_hash = get_mw_pages();
 856        my @pages = values(%pages_hash);
 857
 858        my ($n, @revisions) = fetch_mw_revisions(\@pages, $fetch_from);
 859
 860        @revisions = sort {$a->{revid} <=> $b->{revid}} @revisions;
 861        my @revision_ids = map { $_->{revid} } @revisions;
 862
 863        return mw_import_revids($fetch_from, \@revision_ids, \%pages_hash);
 864}
 865
 866sub mw_import_ref_by_revs {
 867
 868        my $fetch_from = shift;
 869        my %pages_hash = get_mw_pages();
 870
 871        my $last_remote = get_last_global_remote_rev();
 872        my @revision_ids = $fetch_from..$last_remote;
 873        return mw_import_revids($fetch_from, \@revision_ids, \%pages_hash);
 874}
 875
 876# Import revisions given in second argument (array of integers).
 877# Only pages appearing in the third argument (hash indexed by page titles)
 878# will be imported.
 879sub mw_import_revids {
 880        my $fetch_from = shift;
 881        my $revision_ids = shift;
 882        my $pages = shift;
 883
 884        my $n = 0;
 885        my $n_actual = 0;
 886        my $last_timestamp = 0; # Placeholder in case $rev->timestamp is undefined
 887
 888        foreach my $pagerevid (@{$revision_ids}) {
 889                # Count page even if we skip it, since we display
 890                # $n/$total and $total includes skipped pages.
 891                $n++;
 892
 893                # fetch the content of the pages
 894                my $query = {
 895                        action => 'query',
 896                        prop => 'revisions',
 897                        rvprop => 'content|timestamp|comment|user|ids',
 898                        revids => $pagerevid,
 899                };
 900
 901                my $result = $mediawiki->api($query);
 902
 903                if (!$result) {
 904                        die "Failed to retrieve modified page for revision $pagerevid\n";
 905                }
 906
 907                if (defined($result->{query}->{badrevids}->{$pagerevid})) {
 908                        # The revision id does not exist on the remote wiki.
 909                        next;
 910                }
 911
 912                if (!defined($result->{query}->{pages})) {
 913                        die "Invalid revision ${pagerevid}.\n";
 914                }
 915
 916                my @result_pages = values(%{$result->{query}->{pages}});
 917                my $result_page = $result_pages[0];
 918                my $rev = $result_pages[0]->{revisions}->[0];
 919
 920                my $page_title = $result_page->{title};
 921
 922                if (!exists($pages->{$page_title})) {
 923                        print {*STDERR} "${n}/", scalar(@{$revision_ids}),
 924                                ": Skipping revision #$rev->{revid} of ${page_title}\n";
 925                        next;
 926                }
 927
 928                $n_actual++;
 929
 930                my %commit;
 931                $commit{author} = $rev->{user} || 'Anonymous';
 932                $commit{comment} = $rev->{comment} || EMPTY_MESSAGE;
 933                $commit{title} = smudge_filename($page_title);
 934                $commit{mw_revision} = $rev->{revid};
 935                $commit{content} = mediawiki_smudge($rev->{'*'});
 936
 937                if (!defined($rev->{timestamp})) {
 938                        $last_timestamp++;
 939                } else {
 940                        $last_timestamp = $rev->{timestamp};
 941                }
 942                $commit{date} = DateTime::Format::ISO8601->parse_datetime($last_timestamp);
 943
 944                # Differentiates classic pages and media files.
 945                my ($namespace, $filename) = $page_title =~ /^([^:]*):(.*)$/;
 946                my %mediafile;
 947                if ($namespace) {
 948                        my $id = get_mw_namespace_id($namespace);
 949                        if ($id && $id == get_mw_namespace_id('File')) {
 950                                %mediafile = get_mw_mediafile_for_page_revision($filename, $rev->{timestamp});
 951                        }
 952                }
 953                # If this is a revision of the media page for new version
 954                # of a file do one common commit for both file and media page.
 955                # Else do commit only for that page.
 956                print {*STDERR} "${n}/", scalar(@{$revision_ids}), ": Revision #$rev->{revid} of $commit{title}\n";
 957                import_file_revision(\%commit, ($fetch_from == 1), $n_actual, \%mediafile);
 958        }
 959
 960        return $n_actual;
 961}
 962
 963sub error_non_fast_forward {
 964        my $advice = run_git('config --bool advice.pushNonFastForward');
 965        chomp($advice);
 966        if ($advice ne 'false') {
 967                # Native git-push would show this after the summary.
 968                # We can't ask it to display it cleanly, so print it
 969                # ourselves before.
 970                print {*STDERR} "To prevent you from losing history, non-fast-forward updates were rejected\n";
 971                print {*STDERR} "Merge the remote changes (e.g. 'git pull') before pushing again. See the\n";
 972                print {*STDERR} "'Note about fast-forwards' section of 'git push --help' for details.\n";
 973        }
 974        print {*STDOUT} qq(error $_[0] "non-fast-forward"\n);
 975        return 0;
 976}
 977
 978sub mw_upload_file {
 979        my $complete_file_name = shift;
 980        my $new_sha1 = shift;
 981        my $extension = shift;
 982        my $file_deleted = shift;
 983        my $summary = shift;
 984        my $newrevid;
 985        my $path = "File:${complete_file_name}";
 986        my %hashFiles = get_allowed_file_extensions();
 987        if (!exists($hashFiles{$extension})) {
 988                print {*STDERR} "${complete_file_name} is not a permitted file on this wiki.\n";
 989                print {*STDERR} "Check the configuration of file uploads in your mediawiki.\n";
 990                return $newrevid;
 991        }
 992        # Deleting and uploading a file requires a privileged user
 993        if ($file_deleted) {
 994                $mediawiki = connect_maybe($mediawiki, $remotename, $url);
 995                my $query = {
 996                        action => 'delete',
 997                        title => $path,
 998                        reason => $summary
 999                };
1000                if (!$mediawiki->edit($query)) {
1001                        print {*STDERR} "Failed to delete file on remote wiki\n";
1002                        print {*STDERR} "Check your permissions on the remote site. Error code:\n";
1003                        print {*STDERR} $mediawiki->{error}->{code} . ':' . $mediawiki->{error}->{details};
1004                        exit 1;
1005                }
1006        } else {
1007                # Don't let perl try to interpret file content as UTF-8 => use "raw"
1008                my $content = run_git("cat-file blob ${new_sha1}", 'raw');
1009                if ($content ne EMPTY) {
1010                        $mediawiki = connect_maybe($mediawiki, $remotename, $url);
1011                        $mediawiki->{config}->{upload_url} =
1012                                "${url}/index.php/Special:Upload";
1013                        $mediawiki->edit({
1014                                action => 'upload',
1015                                filename => $complete_file_name,
1016                                comment => $summary,
1017                                file => [undef,
1018                                         $complete_file_name,
1019                                         Content => $content],
1020                                ignorewarnings => 1,
1021                        }, {
1022                                skip_encoding => 1
1023                        } ) || die $mediawiki->{error}->{code} . ':'
1024                                 . $mediawiki->{error}->{details} . "\n";
1025                        my $last_file_page = $mediawiki->get_page({title => $path});
1026                        $newrevid = $last_file_page->{revid};
1027                        print {*STDERR} "Pushed file: ${new_sha1} - ${complete_file_name}.\n";
1028                } else {
1029                        print {*STDERR} "Empty file ${complete_file_name} not pushed.\n";
1030                }
1031        }
1032        return $newrevid;
1033}
1034
1035sub mw_push_file {
1036        my $diff_info = shift;
1037        # $diff_info contains a string in this format:
1038        # 100644 100644 <sha1_of_blob_before_commit> <sha1_of_blob_now> <status>
1039        my @diff_info_split = split(/[ \t]/, $diff_info);
1040
1041        # Filename, including .mw extension
1042        my $complete_file_name = shift;
1043        # Commit message
1044        my $summary = shift;
1045        # MediaWiki revision number. Keep the previous one by default,
1046        # in case there's no edit to perform.
1047        my $oldrevid = shift;
1048        my $newrevid;
1049
1050        if ($summary eq EMPTY_MESSAGE) {
1051                $summary = EMPTY;
1052        }
1053
1054        my $new_sha1 = $diff_info_split[3];
1055        my $old_sha1 = $diff_info_split[2];
1056        my $page_created = ($old_sha1 eq NULL_SHA1);
1057        my $page_deleted = ($new_sha1 eq NULL_SHA1);
1058        $complete_file_name = clean_filename($complete_file_name);
1059
1060        my ($title, $extension) = $complete_file_name =~ /^(.*)\.([^\.]*)$/;
1061        if (!defined($extension)) {
1062                $extension = EMPTY;
1063        }
1064        if ($extension eq 'mw') {
1065                my $ns = get_mw_namespace_id_for_page($complete_file_name);
1066                if ($ns && $ns == get_mw_namespace_id('File') && (!$export_media)) {
1067                        print {*STDERR} "Ignoring media file related page: ${complete_file_name}\n";
1068                        return ($oldrevid, 'ok');
1069                }
1070                my $file_content;
1071                if ($page_deleted) {
1072                        # Deleting a page usually requires
1073                        # special privileges. A common
1074                        # convention is to replace the page
1075                        # with this content instead:
1076                        $file_content = DELETED_CONTENT;
1077                } else {
1078                        $file_content = run_git("cat-file blob ${new_sha1}");
1079                }
1080
1081                $mediawiki = connect_maybe($mediawiki, $remotename, $url);
1082
1083                my $result = $mediawiki->edit( {
1084                        action => 'edit',
1085                        summary => $summary,
1086                        title => $title,
1087                        basetimestamp => $basetimestamps{$oldrevid},
1088                        text => mediawiki_clean($file_content, $page_created),
1089                                  }, {
1090                                          skip_encoding => 1 # Helps with names with accentuated characters
1091                                  });
1092                if (!$result) {
1093                        if ($mediawiki->{error}->{code} == 3) {
1094                                # edit conflicts, considered as non-fast-forward
1095                                print {*STDERR} 'Warning: Error ' .
1096                                    $mediawiki->{error}->{code} .
1097                                    ' from mediawiki: ' . $mediawiki->{error}->{details} .
1098                                    ".\n";
1099                                return ($oldrevid, 'non-fast-forward');
1100                        } else {
1101                                # Other errors. Shouldn't happen => just die()
1102                                die 'Fatal: Error ' .
1103                                    $mediawiki->{error}->{code} .
1104                                    ' from mediawiki: ' . $mediawiki->{error}->{details} . "\n";
1105                        }
1106                }
1107                $newrevid = $result->{edit}->{newrevid};
1108                print {*STDERR} "Pushed file: ${new_sha1} - ${title}\n";
1109        } elsif ($export_media) {
1110                $newrevid = mw_upload_file($complete_file_name, $new_sha1,
1111                                           $extension, $page_deleted,
1112                                           $summary);
1113        } else {
1114                print {*STDERR} "Ignoring media file ${title}\n";
1115        }
1116        $newrevid = ($newrevid or $oldrevid);
1117        return ($newrevid, 'ok');
1118}
1119
1120sub mw_push {
1121        # multiple push statements can follow each other
1122        my @refsspecs = (shift, get_more_refs('push'));
1123        my $pushed;
1124        for my $refspec (@refsspecs) {
1125                my ($force, $local, $remote) = $refspec =~ /^(\+)?([^:]*):([^:]*)$/
1126                    or die("Invalid refspec for push. Expected <src>:<dst> or +<src>:<dst>\n");
1127                if ($force) {
1128                        print {*STDERR} "Warning: forced push not allowed on a MediaWiki.\n";
1129                }
1130                if ($local eq EMPTY) {
1131                        print {*STDERR} "Cannot delete remote branch on a MediaWiki\n";
1132                        print {*STDOUT} "error ${remote} cannot delete\n";
1133                        next;
1134                }
1135                if ($remote ne 'refs/heads/master') {
1136                        print {*STDERR} "Only push to the branch 'master' is supported on a MediaWiki\n";
1137                        print {*STDOUT} "error ${remote} only master allowed\n";
1138                        next;
1139                }
1140                if (mw_push_revision($local, $remote)) {
1141                        $pushed = 1;
1142                }
1143        }
1144
1145        # Notify Git that the push is done
1146        print {*STDOUT} "\n";
1147
1148        if ($pushed && $dumb_push) {
1149                print {*STDERR} "Just pushed some revisions to MediaWiki.\n";
1150                print {*STDERR} "The pushed revisions now have to be re-imported, and your current branch\n";
1151                print {*STDERR} "needs to be updated with these re-imported commits. You can do this with\n";
1152                print {*STDERR} "\n";
1153                print {*STDERR} "  git pull --rebase\n";
1154                print {*STDERR} "\n";
1155        }
1156        return;
1157}
1158
1159sub mw_push_revision {
1160        my $local = shift;
1161        my $remote = shift; # actually, this has to be "refs/heads/master" at this point.
1162        my $last_local_revid = get_last_local_revision();
1163        print {*STDERR} ".\n"; # Finish sentence started by get_last_local_revision()
1164        my $last_remote_revid = get_last_remote_revision();
1165        my $mw_revision = $last_remote_revid;
1166
1167        # Get sha1 of commit pointed by local HEAD
1168        my $HEAD_sha1 = run_git("rev-parse ${local} 2>/dev/null");
1169        chomp($HEAD_sha1);
1170        # Get sha1 of commit pointed by remotes/$remotename/master
1171        my $remoteorigin_sha1 = run_git("rev-parse refs/remotes/${remotename}/master 2>/dev/null");
1172        chomp($remoteorigin_sha1);
1173
1174        if ($last_local_revid > 0 &&
1175            $last_local_revid < $last_remote_revid) {
1176                return error_non_fast_forward($remote);
1177        }
1178
1179        if ($HEAD_sha1 eq $remoteorigin_sha1) {
1180                # nothing to push
1181                return 0;
1182        }
1183
1184        # Get every commit in between HEAD and refs/remotes/origin/master,
1185        # including HEAD and refs/remotes/origin/master
1186        my @commit_pairs = ();
1187        if ($last_local_revid > 0) {
1188                my $parsed_sha1 = $remoteorigin_sha1;
1189                # Find a path from last MediaWiki commit to pushed commit
1190                print {*STDERR} "Computing path from local to remote ...\n";
1191                my @local_ancestry = split(/\n/, run_git("rev-list --boundary --parents ${local} ^${parsed_sha1}"));
1192                my %local_ancestry;
1193                foreach my $line (@local_ancestry) {
1194                        if (my ($child, $parents) = $line =~ /^-?([a-f0-9]+) ([a-f0-9 ]+)/) {
1195                                foreach my $parent (split(/ /, $parents)) {
1196                                        $local_ancestry{$parent} = $child;
1197                                }
1198                        } elsif (!$line =~ /^([a-f0-9]+)/) {
1199                                die "Unexpected output from git rev-list: ${line}\n";
1200                        }
1201                }
1202                while ($parsed_sha1 ne $HEAD_sha1) {
1203                        my $child = $local_ancestry{$parsed_sha1};
1204                        if (!$child) {
1205                                print {*STDERR} "Cannot find a path in history from remote commit to last commit\n";
1206                                return error_non_fast_forward($remote);
1207                        }
1208                        push(@commit_pairs, [$parsed_sha1, $child]);
1209                        $parsed_sha1 = $child;
1210                }
1211        } else {
1212                # No remote mediawiki revision. Export the whole
1213                # history (linearized with --first-parent)
1214                print {*STDERR} "Warning: no common ancestor, pushing complete history\n";
1215                my $history = run_git("rev-list --first-parent --children ${local}");
1216                my @history = split(/\n/, $history);
1217                @history = @history[1..$#history];
1218                foreach my $line (reverse @history) {
1219                        my @commit_info_split = split(/[ \n]/, $line);
1220                        push(@commit_pairs, \@commit_info_split);
1221                }
1222        }
1223
1224        foreach my $commit_info_split (@commit_pairs) {
1225                my $sha1_child = @{$commit_info_split}[0];
1226                my $sha1_commit = @{$commit_info_split}[1];
1227                my $diff_infos = run_git("diff-tree -r --raw -z ${sha1_child} ${sha1_commit}");
1228                # TODO: we could detect rename, and encode them with a #redirect on the wiki.
1229                # TODO: for now, it's just a delete+add
1230                my @diff_info_list = split(/\0/, $diff_infos);
1231                # Keep the subject line of the commit message as mediawiki comment for the revision
1232                my $commit_msg = run_git(qq(log --no-walk --format="%s" ${sha1_commit}));
1233                chomp($commit_msg);
1234                # Push every blob
1235                while (@diff_info_list) {
1236                        my $status;
1237                        # git diff-tree -z gives an output like
1238                        # <metadata>\0<filename1>\0
1239                        # <metadata>\0<filename2>\0
1240                        # and we've split on \0.
1241                        my $info = shift(@diff_info_list);
1242                        my $file = shift(@diff_info_list);
1243                        ($mw_revision, $status) = mw_push_file($info, $file, $commit_msg, $mw_revision);
1244                        if ($status eq 'non-fast-forward') {
1245                                # we may already have sent part of the
1246                                # commit to MediaWiki, but it's too
1247                                # late to cancel it. Stop the push in
1248                                # the middle, but still give an
1249                                # accurate error message.
1250                                return error_non_fast_forward($remote);
1251                        }
1252                        if ($status ne 'ok') {
1253                                die("Unknown error from mw_push_file()\n");
1254                        }
1255                }
1256                if (!$dumb_push) {
1257                        run_git(qq(notes --ref=${remotename}/mediawiki add -f -m "mediawiki_revision: ${mw_revision}" ${sha1_commit}));
1258                }
1259        }
1260
1261        print {*STDOUT} "ok ${remote}\n";
1262        return 1;
1263}
1264
1265sub get_allowed_file_extensions {
1266        $mediawiki = connect_maybe($mediawiki, $remotename, $url);
1267
1268        my $query = {
1269                action => 'query',
1270                meta => 'siteinfo',
1271                siprop => 'fileextensions'
1272                };
1273        my $result = $mediawiki->api($query);
1274        my @file_extensions = map { $_->{ext}} @{$result->{query}->{fileextensions}};
1275        my %hashFile = map { $_ => 1 } @file_extensions;
1276
1277        return %hashFile;
1278}
1279
1280# In memory cache for MediaWiki namespace ids.
1281my %namespace_id;
1282
1283# Namespaces whose id is cached in the configuration file
1284# (to avoid duplicates)
1285my %cached_mw_namespace_id;
1286
1287# Return MediaWiki id for a canonical namespace name.
1288# Ex.: "File", "Project".
1289sub get_mw_namespace_id {
1290        $mediawiki = connect_maybe($mediawiki, $remotename, $url);
1291        my $name = shift;
1292
1293        if (!exists $namespace_id{$name}) {
1294                # Look at configuration file, if the record for that namespace is
1295                # already cached. Namespaces are stored in form:
1296                # "Name_of_namespace:Id_namespace", ex.: "File:6".
1297                my @temp = split(/\n/,
1298                                 run_git("config --get-all remote.${remotename}.namespaceCache"));
1299                chomp(@temp);
1300                foreach my $ns (@temp) {
1301                        my ($n, $id) = split(/:/, $ns);
1302                        if ($id eq 'notANameSpace') {
1303                                $namespace_id{$n} = {is_namespace => 0};
1304                        } else {
1305                                $namespace_id{$n} = {is_namespace => 1, id => $id};
1306                        }
1307                        $cached_mw_namespace_id{$n} = 1;
1308                }
1309        }
1310
1311        if (!exists $namespace_id{$name}) {
1312                print {*STDERR} "Namespace ${name} not found in cache, querying the wiki ...\n";
1313                # NS not found => get namespace id from MW and store it in
1314                # configuration file.
1315                my $query = {
1316                        action => 'query',
1317                        meta => 'siteinfo',
1318                        siprop => 'namespaces'
1319                };
1320                my $result = $mediawiki->api($query);
1321
1322                while (my ($id, $ns) = each(%{$result->{query}->{namespaces}})) {
1323                        if (defined($ns->{id}) && defined($ns->{canonical})) {
1324                                $namespace_id{$ns->{canonical}} = {is_namespace => 1, id => $ns->{id}};
1325                                if ($ns->{'*'}) {
1326                                        # alias (e.g. french Fichier: as alias for canonical File:)
1327                                        $namespace_id{$ns->{'*'}} = {is_namespace => 1, id => $ns->{id}};
1328                                }
1329                        }
1330                }
1331        }
1332
1333        my $ns = $namespace_id{$name};
1334        my $id;
1335
1336        if (!defined $ns) {
1337                my @namespaces = map { s/ /_/g; $_; } sort keys %namespace_id;
1338                print {*STDERR} "No such namespace ${name} on MediaWiki, known namespaces: @namespaces\n";
1339                $ns = {is_namespace => 0};
1340                $namespace_id{$name} = $ns;
1341        }
1342
1343        if ($ns->{is_namespace}) {
1344                $id = $ns->{id};
1345        }
1346
1347        # Store "notANameSpace" as special value for inexisting namespaces
1348        my $store_id = ($id || 'notANameSpace');
1349
1350        # Store explicitly requested namespaces on disk
1351        if (!exists $cached_mw_namespace_id{$name}) {
1352                run_git(qq(config --add remote.${remotename}.namespaceCache "${name}:${store_id}"));
1353                $cached_mw_namespace_id{$name} = 1;
1354        }
1355        return $id;
1356}
1357
1358sub get_mw_namespace_id_for_page {
1359        my $namespace = shift;
1360        if ($namespace =~ /^([^:]*):/) {
1361                return get_mw_namespace_id($namespace);
1362        } else {
1363                return;
1364        }
1365}