contrib / mw-to-git / git-remote-mediawiki.perlon commit Merge branch 'ah/fix-http-push' into maint (cd989a9)
   1#! /usr/bin/perl
   2
   3# Copyright (C) 2011
   4#     Jérémie Nikaes <jeremie.nikaes@ensimag.imag.fr>
   5#     Arnaud Lacurie <arnaud.lacurie@ensimag.imag.fr>
   6#     Claire Fousse <claire.fousse@ensimag.imag.fr>
   7#     David Amouyal <david.amouyal@ensimag.imag.fr>
   8#     Matthieu Moy <matthieu.moy@grenoble-inp.fr>
   9# License: GPL v2 or later
  10
  11# Gateway between Git and MediaWiki.
  12# Documentation & bugtracker: https://github.com/moy/Git-Mediawiki/
  13
  14use strict;
  15use MediaWiki::API;
  16use Git;
  17use Git::Mediawiki qw(clean_filename smudge_filename connect_maybe
  18                                        EMPTY HTTP_CODE_OK);
  19use DateTime::Format::ISO8601;
  20use warnings;
  21
  22# By default, use UTF-8 to communicate with Git and the user
  23binmode STDERR, ':encoding(UTF-8)';
  24binmode STDOUT, ':encoding(UTF-8)';
  25
  26use URI::Escape;
  27
  28# It's not always possible to delete pages (may require some
  29# privileges). Deleted pages are replaced with this content.
  30use constant DELETED_CONTENT => "[[Category:Deleted]]\n";
  31
  32# It's not possible to create empty pages. New empty files in Git are
  33# sent with this content instead.
  34use constant EMPTY_CONTENT => "<!-- empty page -->\n";
  35
  36# used to reflect file creation or deletion in diff.
  37use constant NULL_SHA1 => '0000000000000000000000000000000000000000';
  38
  39# Used on Git's side to reflect empty edit messages on the wiki
  40use constant EMPTY_MESSAGE => '*Empty MediaWiki Message*';
  41
  42# Number of pages taken into account at once in submodule get_mw_page_list
  43use constant SLICE_SIZE => 50;
  44
  45# Number of linked mediafile to get at once in get_linked_mediafiles
  46# The query is split in small batches because of the MW API limit of
  47# the number of links to be returned (500 links max).
  48use constant BATCH_SIZE => 10;
  49
  50if (@ARGV != 2) {
  51        exit_error_usage();
  52}
  53
  54my $remotename = $ARGV[0];
  55my $url = $ARGV[1];
  56
  57# Accept both space-separated and multiple keys in config file.
  58# Spaces should be written as _ anyway because we'll use chomp.
  59my @tracked_pages = split(/[ \n]/, run_git("config --get-all remote.${remotename}.pages"));
  60chomp(@tracked_pages);
  61
  62# Just like @tracked_pages, but for MediaWiki categories.
  63my @tracked_categories = split(/[ \n]/, run_git("config --get-all remote.${remotename}.categories"));
  64chomp(@tracked_categories);
  65
  66# Import media files on pull
  67my $import_media = run_git("config --get --bool remote.${remotename}.mediaimport");
  68chomp($import_media);
  69$import_media = ($import_media eq 'true');
  70
  71# Export media files on push
  72my $export_media = run_git("config --get --bool remote.${remotename}.mediaexport");
  73chomp($export_media);
  74$export_media = !($export_media eq 'false');
  75
  76my $wiki_login = run_git("config --get remote.${remotename}.mwLogin");
  77# Note: mwPassword is discourraged. Use the credential system instead.
  78my $wiki_passwd = run_git("config --get remote.${remotename}.mwPassword");
  79my $wiki_domain = run_git("config --get remote.${remotename}.mwDomain");
  80chomp($wiki_login);
  81chomp($wiki_passwd);
  82chomp($wiki_domain);
  83
  84# Import only last revisions (both for clone and fetch)
  85my $shallow_import = run_git("config --get --bool remote.${remotename}.shallow");
  86chomp($shallow_import);
  87$shallow_import = ($shallow_import eq 'true');
  88
  89# Fetch (clone and pull) by revisions instead of by pages. This behavior
  90# is more efficient when we have a wiki with lots of pages and we fetch
  91# the revisions quite often so that they concern only few pages.
  92# Possible values:
  93# - by_rev: perform one query per new revision on the remote wiki
  94# - by_page: query each tracked page for new revision
  95my $fetch_strategy = run_git("config --get remote.${remotename}.fetchStrategy");
  96if (!$fetch_strategy) {
  97        $fetch_strategy = run_git('config --get mediawiki.fetchStrategy');
  98}
  99chomp($fetch_strategy);
 100if (!$fetch_strategy) {
 101        $fetch_strategy = 'by_page';
 102}
 103
 104# Remember the timestamp corresponding to a revision id.
 105my %basetimestamps;
 106
 107# Dumb push: don't update notes and mediawiki ref to reflect the last push.
 108#
 109# Configurable with mediawiki.dumbPush, or per-remote with
 110# remote.<remotename>.dumbPush.
 111#
 112# This means the user will have to re-import the just-pushed
 113# revisions. On the other hand, this means that the Git revisions
 114# corresponding to MediaWiki revisions are all imported from the wiki,
 115# regardless of whether they were initially created in Git or from the
 116# web interface, hence all users will get the same history (i.e. if
 117# the push from Git to MediaWiki loses some information, everybody
 118# will get the history with information lost). If the import is
 119# deterministic, this means everybody gets the same sha1 for each
 120# MediaWiki revision.
 121my $dumb_push = run_git("config --get --bool remote.${remotename}.dumbPush");
 122if (!$dumb_push) {
 123        $dumb_push = run_git('config --get --bool mediawiki.dumbPush');
 124}
 125chomp($dumb_push);
 126$dumb_push = ($dumb_push eq 'true');
 127
 128my $wiki_name = $url;
 129$wiki_name =~ s{[^/]*://}{};
 130# If URL is like http://user:password@example.com/, we clearly don't
 131# want the password in $wiki_name. While we're there, also remove user
 132# and '@' sign, to avoid author like MWUser@HTTPUser@host.com
 133$wiki_name =~ s/^.*@//;
 134
 135# Commands parser
 136while (<STDIN>) {
 137        chomp;
 138
 139        if (!parse_command($_)) {
 140                last;
 141        }
 142
 143        BEGIN { $| = 1 } # flush STDOUT, to make sure the previous
 144                         # command is fully processed.
 145}
 146
 147########################## Functions ##############################
 148
 149## error handling
 150sub exit_error_usage {
 151        die "ERROR: git-remote-mediawiki module was not called with a correct number of\n" .
 152            "parameters\n" .
 153            "You may obtain this error because you attempted to run the git-remote-mediawiki\n" .
 154            "module directly.\n" .
 155            "This module can be used the following way:\n" .
 156            "\tgit clone mediawiki://<address of a mediawiki>\n" .
 157            "Then, use git commit, push and pull as with every normal git repository.\n";
 158}
 159
 160sub parse_command {
 161        my ($line) = @_;
 162        my @cmd = split(/ /, $line);
 163        if (!defined $cmd[0]) {
 164                return 0;
 165        }
 166        if ($cmd[0] eq 'capabilities') {
 167                die("Too many arguments for capabilities\n")
 168                    if (defined($cmd[1]));
 169                mw_capabilities();
 170        } elsif ($cmd[0] eq 'list') {
 171                die("Too many arguments for list\n") if (defined($cmd[2]));
 172                mw_list($cmd[1]);
 173        } elsif ($cmd[0] eq 'import') {
 174                die("Invalid argument for import\n")
 175                    if ($cmd[1] eq EMPTY);
 176                die("Too many arguments for import\n")
 177                    if (defined($cmd[2]));
 178                mw_import($cmd[1]);
 179        } elsif ($cmd[0] eq 'option') {
 180                die("Invalid arguments for option\n")
 181                    if ($cmd[1] eq EMPTY || $cmd[2] eq EMPTY);
 182                die("Too many arguments for option\n")
 183                    if (defined($cmd[3]));
 184                mw_option($cmd[1],$cmd[2]);
 185        } elsif ($cmd[0] eq 'push') {
 186                mw_push($cmd[1]);
 187        } else {
 188                print {*STDERR} "Unknown command. Aborting...\n";
 189                return 0;
 190        }
 191        return 1;
 192}
 193
 194# MediaWiki API instance, created lazily.
 195my $mediawiki;
 196
 197sub fatal_mw_error {
 198        my $action = shift;
 199        print STDERR "fatal: could not $action.\n";
 200        print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
 201        if ($url =~ /^https/) {
 202                print STDERR "fatal: make sure '$url/api.php' is a valid page\n";
 203                print STDERR "fatal: and the SSL certificate is correct.\n";
 204        } else {
 205                print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
 206        }
 207        print STDERR "fatal: (error " .
 208            $mediawiki->{error}->{code} . ': ' .
 209            $mediawiki->{error}->{details} . ")\n";
 210        exit 1;
 211}
 212
 213## Functions for listing pages on the remote wiki
 214sub get_mw_tracked_pages {
 215        my $pages = shift;
 216        get_mw_page_list(\@tracked_pages, $pages);
 217        return;
 218}
 219
 220sub get_mw_page_list {
 221        my $page_list = shift;
 222        my $pages = shift;
 223        my @some_pages = @{$page_list};
 224        while (@some_pages) {
 225                my $last_page = SLICE_SIZE;
 226                if ($#some_pages < $last_page) {
 227                        $last_page = $#some_pages;
 228                }
 229                my @slice = @some_pages[0..$last_page];
 230                get_mw_first_pages(\@slice, $pages);
 231                @some_pages = @some_pages[(SLICE_SIZE + 1)..$#some_pages];
 232        }
 233        return;
 234}
 235
 236sub get_mw_tracked_categories {
 237        my $pages = shift;
 238        foreach my $category (@tracked_categories) {
 239                if (index($category, ':') < 0) {
 240                        # Mediawiki requires the Category
 241                        # prefix, but let's not force the user
 242                        # to specify it.
 243                        $category = "Category:${category}";
 244                }
 245                my $mw_pages = $mediawiki->list( {
 246                        action => 'query',
 247                        list => 'categorymembers',
 248                        cmtitle => $category,
 249                        cmlimit => 'max' } )
 250                        || die $mediawiki->{error}->{code} . ': '
 251                                . $mediawiki->{error}->{details} . "\n";
 252                foreach my $page (@{$mw_pages}) {
 253                        $pages->{$page->{title}} = $page;
 254                }
 255        }
 256        return;
 257}
 258
 259sub get_mw_all_pages {
 260        my $pages = shift;
 261        # No user-provided list, get the list of pages from the API.
 262        my $mw_pages = $mediawiki->list({
 263                action => 'query',
 264                list => 'allpages',
 265                aplimit => 'max'
 266        });
 267        if (!defined($mw_pages)) {
 268                fatal_mw_error("get the list of wiki pages");
 269        }
 270        foreach my $page (@{$mw_pages}) {
 271                $pages->{$page->{title}} = $page;
 272        }
 273        return;
 274}
 275
 276# queries the wiki for a set of pages. Meant to be used within a loop
 277# querying the wiki for slices of page list.
 278sub get_mw_first_pages {
 279        my $some_pages = shift;
 280        my @some_pages = @{$some_pages};
 281
 282        my $pages = shift;
 283
 284        # pattern 'page1|page2|...' required by the API
 285        my $titles = join('|', @some_pages);
 286
 287        my $mw_pages = $mediawiki->api({
 288                action => 'query',
 289                titles => $titles,
 290        });
 291        if (!defined($mw_pages)) {
 292                fatal_mw_error("query the list of wiki pages");
 293        }
 294        while (my ($id, $page) = each(%{$mw_pages->{query}->{pages}})) {
 295                if ($id < 0) {
 296                        print {*STDERR} "Warning: page $page->{title} not found on wiki\n";
 297                } else {
 298                        $pages->{$page->{title}} = $page;
 299                }
 300        }
 301        return;
 302}
 303
 304# Get the list of pages to be fetched according to configuration.
 305sub get_mw_pages {
 306        $mediawiki = connect_maybe($mediawiki, $remotename, $url);
 307
 308        print {*STDERR} "Listing pages on remote wiki...\n";
 309
 310        my %pages; # hash on page titles to avoid duplicates
 311        my $user_defined;
 312        if (@tracked_pages) {
 313                $user_defined = 1;
 314                # The user provided a list of pages titles, but we
 315                # still need to query the API to get the page IDs.
 316                get_mw_tracked_pages(\%pages);
 317        }
 318        if (@tracked_categories) {
 319                $user_defined = 1;
 320                get_mw_tracked_categories(\%pages);
 321        }
 322        if (!$user_defined) {
 323                get_mw_all_pages(\%pages);
 324        }
 325        if ($import_media) {
 326                print {*STDERR} "Getting media files for selected pages...\n";
 327                if ($user_defined) {
 328                        get_linked_mediafiles(\%pages);
 329                } else {
 330                        get_all_mediafiles(\%pages);
 331                }
 332        }
 333        print {*STDERR} (scalar keys %pages) . " pages found.\n";
 334        return %pages;
 335}
 336
 337# usage: $out = run_git("command args");
 338#        $out = run_git("command args", "raw"); # don't interpret output as UTF-8.
 339sub run_git {
 340        my $args = shift;
 341        my $encoding = (shift || 'encoding(UTF-8)');
 342        open(my $git, "-|:${encoding}", "git ${args}")
 343            or die "Unable to fork: $!\n";
 344        my $res = do {
 345                local $/ = undef;
 346                <$git>
 347        };
 348        close($git);
 349
 350        return $res;
 351}
 352
 353
 354sub get_all_mediafiles {
 355        my $pages = shift;
 356        # Attach list of all pages for media files from the API,
 357        # they are in a different namespace, only one namespace
 358        # can be queried at the same moment
 359        my $mw_pages = $mediawiki->list({
 360                action => 'query',
 361                list => 'allpages',
 362                apnamespace => get_mw_namespace_id('File'),
 363                aplimit => 'max'
 364        });
 365        if (!defined($mw_pages)) {
 366                print {*STDERR} "fatal: could not get the list of pages for media files.\n";
 367                print {*STDERR} "fatal: '$url' does not appear to be a mediawiki\n";
 368                print {*STDERR} "fatal: make sure '$url/api.php' is a valid page.\n";
 369                exit 1;
 370        }
 371        foreach my $page (@{$mw_pages}) {
 372                $pages->{$page->{title}} = $page;
 373        }
 374        return;
 375}
 376
 377sub get_linked_mediafiles {
 378        my $pages = shift;
 379        my @titles = map { $_->{title} } values(%{$pages});
 380
 381        my $batch = BATCH_SIZE;
 382        while (@titles) {
 383                if ($#titles < $batch) {
 384                        $batch = $#titles;
 385                }
 386                my @slice = @titles[0..$batch];
 387
 388                # pattern 'page1|page2|...' required by the API
 389                my $mw_titles = join('|', @slice);
 390
 391                # Media files could be included or linked from
 392                # a page, get all related
 393                my $query = {
 394                        action => 'query',
 395                        prop => 'links|images',
 396                        titles => $mw_titles,
 397                        plnamespace => get_mw_namespace_id('File'),
 398                        pllimit => 'max'
 399                };
 400                my $result = $mediawiki->api($query);
 401
 402                while (my ($id, $page) = each(%{$result->{query}->{pages}})) {
 403                        my @media_titles;
 404                        if (defined($page->{links})) {
 405                                my @link_titles
 406                                    = map { $_->{title} } @{$page->{links}};
 407                                push(@media_titles, @link_titles);
 408                        }
 409                        if (defined($page->{images})) {
 410                                my @image_titles
 411                                    = map { $_->{title} } @{$page->{images}};
 412                                push(@media_titles, @image_titles);
 413                        }
 414                        if (@media_titles) {
 415                                get_mw_page_list(\@media_titles, $pages);
 416                        }
 417                }
 418
 419                @titles = @titles[($batch+1)..$#titles];
 420        }
 421        return;
 422}
 423
 424sub get_mw_mediafile_for_page_revision {
 425        # Name of the file on Wiki, with the prefix.
 426        my $filename = shift;
 427        my $timestamp = shift;
 428        my %mediafile;
 429
 430        # Search if on a media file with given timestamp exists on
 431        # MediaWiki. In that case download the file.
 432        my $query = {
 433                action => 'query',
 434                prop => 'imageinfo',
 435                titles => "File:${filename}",
 436                iistart => $timestamp,
 437                iiend => $timestamp,
 438                iiprop => 'timestamp|archivename|url',
 439                iilimit => 1
 440        };
 441        my $result = $mediawiki->api($query);
 442
 443        my ($fileid, $file) = each( %{$result->{query}->{pages}} );
 444        # If not defined it means there is no revision of the file for
 445        # given timestamp.
 446        if (defined($file->{imageinfo})) {
 447                $mediafile{title} = $filename;
 448
 449                my $fileinfo = pop(@{$file->{imageinfo}});
 450                $mediafile{timestamp} = $fileinfo->{timestamp};
 451                # Mediawiki::API's download function doesn't support https URLs
 452                # and can't download old versions of files.
 453                print {*STDERR} "\tDownloading file $mediafile{title}, version $mediafile{timestamp}\n";
 454                $mediafile{content} = download_mw_mediafile($fileinfo->{url});
 455        }
 456        return %mediafile;
 457}
 458
 459sub download_mw_mediafile {
 460        my $download_url = shift;
 461
 462        my $response = $mediawiki->{ua}->get($download_url);
 463        if ($response->code == HTTP_CODE_OK) {
 464                return $response->decoded_content;
 465        } else {
 466                print {*STDERR} "Error downloading mediafile from :\n";
 467                print {*STDERR} "URL: ${download_url}\n";
 468                print {*STDERR} 'Server response: ' . $response->code . q{ } . $response->message . "\n";
 469                exit 1;
 470        }
 471}
 472
 473sub get_last_local_revision {
 474        # Get note regarding last mediawiki revision
 475        my $note = run_git("notes --ref=${remotename}/mediawiki show refs/mediawiki/${remotename}/master 2>/dev/null");
 476        my @note_info = split(/ /, $note);
 477
 478        my $lastrevision_number;
 479        if (!(defined($note_info[0]) && $note_info[0] eq 'mediawiki_revision:')) {
 480                print {*STDERR} 'No previous mediawiki revision found';
 481                $lastrevision_number = 0;
 482        } else {
 483                # Notes are formatted : mediawiki_revision: #number
 484                $lastrevision_number = $note_info[1];
 485                chomp($lastrevision_number);
 486                print {*STDERR} "Last local mediawiki revision found is ${lastrevision_number}";
 487        }
 488        return $lastrevision_number;
 489}
 490
 491# Get the last remote revision without taking in account which pages are
 492# tracked or not. This function makes a single request to the wiki thus
 493# avoid a loop onto all tracked pages. This is useful for the fetch-by-rev
 494# option.
 495sub get_last_global_remote_rev {
 496        $mediawiki = connect_maybe($mediawiki, $remotename, $url);
 497
 498        my $query = {
 499                action => 'query',
 500                list => 'recentchanges',
 501                prop => 'revisions',
 502                rclimit => '1',
 503                rcdir => 'older',
 504        };
 505        my $result = $mediawiki->api($query);
 506        return $result->{query}->{recentchanges}[0]->{revid};
 507}
 508
 509# Get the last remote revision concerning the tracked pages and the tracked
 510# categories.
 511sub get_last_remote_revision {
 512        $mediawiki = connect_maybe($mediawiki, $remotename, $url);
 513
 514        my %pages_hash = get_mw_pages();
 515        my @pages = values(%pages_hash);
 516
 517        my $max_rev_num = 0;
 518
 519        print {*STDERR} "Getting last revision id on tracked pages...\n";
 520
 521        foreach my $page (@pages) {
 522                my $id = $page->{pageid};
 523
 524                my $query = {
 525                        action => 'query',
 526                        prop => 'revisions',
 527                        rvprop => 'ids|timestamp',
 528                        pageids => $id,
 529                };
 530
 531                my $result = $mediawiki->api($query);
 532
 533                my $lastrev = pop(@{$result->{query}->{pages}->{$id}->{revisions}});
 534
 535                $basetimestamps{$lastrev->{revid}} = $lastrev->{timestamp};
 536
 537                $max_rev_num = ($lastrev->{revid} > $max_rev_num ? $lastrev->{revid} : $max_rev_num);
 538        }
 539
 540        print {*STDERR} "Last remote revision found is $max_rev_num.\n";
 541        return $max_rev_num;
 542}
 543
 544# Clean content before sending it to MediaWiki
 545sub mediawiki_clean {
 546        my $string = shift;
 547        my $page_created = shift;
 548        # Mediawiki does not allow blank space at the end of a page and ends with a single \n.
 549        # This function right trims a string and adds a \n at the end to follow this rule
 550        $string =~ s/\s+$//;
 551        if ($string eq EMPTY && $page_created) {
 552                # Creating empty pages is forbidden.
 553                $string = EMPTY_CONTENT;
 554        }
 555        return $string."\n";
 556}
 557
 558# Filter applied on MediaWiki data before adding them to Git
 559sub mediawiki_smudge {
 560        my $string = shift;
 561        if ($string eq EMPTY_CONTENT) {
 562                $string = EMPTY;
 563        }
 564        # This \n is important. This is due to mediawiki's way to handle end of files.
 565        return "${string}\n";
 566}
 567
 568sub literal_data {
 569        my ($content) = @_;
 570        print {*STDOUT} 'data ', bytes::length($content), "\n", $content;
 571        return;
 572}
 573
 574sub literal_data_raw {
 575        # Output possibly binary content.
 576        my ($content) = @_;
 577        # Avoid confusion between size in bytes and in characters
 578        utf8::downgrade($content);
 579        binmode STDOUT, ':raw';
 580        print {*STDOUT} 'data ', bytes::length($content), "\n", $content;
 581        binmode STDOUT, ':encoding(UTF-8)';
 582        return;
 583}
 584
 585sub mw_capabilities {
 586        # Revisions are imported to the private namespace
 587        # refs/mediawiki/$remotename/ by the helper and fetched into
 588        # refs/remotes/$remotename later by fetch.
 589        print {*STDOUT} "refspec refs/heads/*:refs/mediawiki/${remotename}/*\n";
 590        print {*STDOUT} "import\n";
 591        print {*STDOUT} "list\n";
 592        print {*STDOUT} "push\n";
 593        if ($dumb_push) {
 594                print {*STDOUT} "no-private-update\n";
 595        }
 596        print {*STDOUT} "\n";
 597        return;
 598}
 599
 600sub mw_list {
 601        # MediaWiki do not have branches, we consider one branch arbitrarily
 602        # called master, and HEAD pointing to it.
 603        print {*STDOUT} "? refs/heads/master\n";
 604        print {*STDOUT} "\@refs/heads/master HEAD\n";
 605        print {*STDOUT} "\n";
 606        return;
 607}
 608
 609sub mw_option {
 610        print {*STDERR} "remote-helper command 'option $_[0]' not yet implemented\n";
 611        print {*STDOUT} "unsupported\n";
 612        return;
 613}
 614
 615sub fetch_mw_revisions_for_page {
 616        my $page = shift;
 617        my $id = shift;
 618        my $fetch_from = shift;
 619        my @page_revs = ();
 620        my $query = {
 621                action => 'query',
 622                prop => 'revisions',
 623                rvprop => 'ids',
 624                rvdir => 'newer',
 625                rvstartid => $fetch_from,
 626                rvlimit => 500,
 627                pageids => $id,
 628
 629                # Let MediaWiki know that we support the latest API.
 630                continue => '',
 631        };
 632
 633        my $revnum = 0;
 634        # Get 500 revisions at a time due to the mediawiki api limit
 635        while (1) {
 636                my $result = $mediawiki->api($query);
 637
 638                # Parse each of those 500 revisions
 639                foreach my $revision (@{$result->{query}->{pages}->{$id}->{revisions}}) {
 640                        my $page_rev_ids;
 641                        $page_rev_ids->{pageid} = $page->{pageid};
 642                        $page_rev_ids->{revid} = $revision->{revid};
 643                        push(@page_revs, $page_rev_ids);
 644                        $revnum++;
 645                }
 646
 647                if ($result->{'query-continue'}) { # For legacy APIs
 648                        $query->{rvstartid} = $result->{'query-continue'}->{revisions}->{rvstartid};
 649                } elsif ($result->{continue}) { # For newer APIs
 650                        $query->{rvstartid} = $result->{continue}->{rvcontinue};
 651                        $query->{continue} = $result->{continue}->{continue};
 652                } else {
 653                        last;
 654                }
 655        }
 656        if ($shallow_import && @page_revs) {
 657                print {*STDERR} "  Found 1 revision (shallow import).\n";
 658                @page_revs = sort {$b->{revid} <=> $a->{revid}} (@page_revs);
 659                return $page_revs[0];
 660        }
 661        print {*STDERR} "  Found ${revnum} revision(s).\n";
 662        return @page_revs;
 663}
 664
 665sub fetch_mw_revisions {
 666        my $pages = shift; my @pages = @{$pages};
 667        my $fetch_from = shift;
 668
 669        my @revisions = ();
 670        my $n = 1;
 671        foreach my $page (@pages) {
 672                my $id = $page->{pageid};
 673                print {*STDERR} "page ${n}/", scalar(@pages), ': ', $page->{title}, "\n";
 674                $n++;
 675                my @page_revs = fetch_mw_revisions_for_page($page, $id, $fetch_from);
 676                @revisions = (@page_revs, @revisions);
 677        }
 678
 679        return ($n, @revisions);
 680}
 681
 682sub fe_escape_path {
 683    my $path = shift;
 684    $path =~ s/\\/\\\\/g;
 685    $path =~ s/"/\\"/g;
 686    $path =~ s/\n/\\n/g;
 687    return qq("${path}");
 688}
 689
 690sub import_file_revision {
 691        my $commit = shift;
 692        my %commit = %{$commit};
 693        my $full_import = shift;
 694        my $n = shift;
 695        my $mediafile = shift;
 696        my %mediafile;
 697        if ($mediafile) {
 698                %mediafile = %{$mediafile};
 699        }
 700
 701        my $title = $commit{title};
 702        my $comment = $commit{comment};
 703        my $content = $commit{content};
 704        my $author = $commit{author};
 705        my $date = $commit{date};
 706
 707        print {*STDOUT} "commit refs/mediawiki/${remotename}/master\n";
 708        print {*STDOUT} "mark :${n}\n";
 709        print {*STDOUT} "committer ${author} <${author}\@${wiki_name}> " . $date->epoch . " +0000\n";
 710        literal_data($comment);
 711
 712        # If it's not a clone, we need to know where to start from
 713        if (!$full_import && $n == 1) {
 714                print {*STDOUT} "from refs/mediawiki/${remotename}/master^0\n";
 715        }
 716        if ($content ne DELETED_CONTENT) {
 717                print {*STDOUT} 'M 644 inline ' .
 718                    fe_escape_path("${title}.mw") . "\n";
 719                literal_data($content);
 720                if (%mediafile) {
 721                        print {*STDOUT} 'M 644 inline '
 722                            . fe_escape_path($mediafile{title}) . "\n";
 723                        literal_data_raw($mediafile{content});
 724                }
 725                print {*STDOUT} "\n\n";
 726        } else {
 727                print {*STDOUT} 'D ' . fe_escape_path("${title}.mw") . "\n";
 728        }
 729
 730        # mediawiki revision number in the git note
 731        if ($full_import && $n == 1) {
 732                print {*STDOUT} "reset refs/notes/${remotename}/mediawiki\n";
 733        }
 734        print {*STDOUT} "commit refs/notes/${remotename}/mediawiki\n";
 735        print {*STDOUT} "committer ${author} <${author}\@${wiki_name}> " . $date->epoch . " +0000\n";
 736        literal_data('Note added by git-mediawiki during import');
 737        if (!$full_import && $n == 1) {
 738                print {*STDOUT} "from refs/notes/${remotename}/mediawiki^0\n";
 739        }
 740        print {*STDOUT} "N inline :${n}\n";
 741        literal_data("mediawiki_revision: $commit{mw_revision}");
 742        print {*STDOUT} "\n\n";
 743        return;
 744}
 745
 746# parse a sequence of
 747# <cmd> <arg1>
 748# <cmd> <arg2>
 749# \n
 750# (like batch sequence of import and sequence of push statements)
 751sub get_more_refs {
 752        my $cmd = shift;
 753        my @refs;
 754        while (1) {
 755                my $line = <STDIN>;
 756                if ($line =~ /^$cmd (.*)$/) {
 757                        push(@refs, $1);
 758                } elsif ($line eq "\n") {
 759                        return @refs;
 760                } else {
 761                        die("Invalid command in a '$cmd' batch: $_\n");
 762                }
 763        }
 764        return;
 765}
 766
 767sub mw_import {
 768        # multiple import commands can follow each other.
 769        my @refs = (shift, get_more_refs('import'));
 770        foreach my $ref (@refs) {
 771                mw_import_ref($ref);
 772        }
 773        print {*STDOUT} "done\n";
 774        return;
 775}
 776
 777sub mw_import_ref {
 778        my $ref = shift;
 779        # The remote helper will call "import HEAD" and
 780        # "import refs/heads/master".
 781        # Since HEAD is a symbolic ref to master (by convention,
 782        # followed by the output of the command "list" that we gave),
 783        # we don't need to do anything in this case.
 784        if ($ref eq 'HEAD') {
 785                return;
 786        }
 787
 788        $mediawiki = connect_maybe($mediawiki, $remotename, $url);
 789
 790        print {*STDERR} "Searching revisions...\n";
 791        my $last_local = get_last_local_revision();
 792        my $fetch_from = $last_local + 1;
 793        if ($fetch_from == 1) {
 794                print {*STDERR} ", fetching from beginning.\n";
 795        } else {
 796                print {*STDERR} ", fetching from here.\n";
 797        }
 798
 799        my $n = 0;
 800        if ($fetch_strategy eq 'by_rev') {
 801                print {*STDERR} "Fetching & writing export data by revs...\n";
 802                $n = mw_import_ref_by_revs($fetch_from);
 803        } elsif ($fetch_strategy eq 'by_page') {
 804                print {*STDERR} "Fetching & writing export data by pages...\n";
 805                $n = mw_import_ref_by_pages($fetch_from);
 806        } else {
 807                print {*STDERR} qq(fatal: invalid fetch strategy "${fetch_strategy}".\n);
 808                print {*STDERR} "Check your configuration variables remote.${remotename}.fetchStrategy and mediawiki.fetchStrategy\n";
 809                exit 1;
 810        }
 811
 812        if ($fetch_from == 1 && $n == 0) {
 813                print {*STDERR} "You appear to have cloned an empty MediaWiki.\n";
 814                # Something has to be done remote-helper side. If nothing is done, an error is
 815                # thrown saying that HEAD is referring to unknown object 0000000000000000000
 816                # and the clone fails.
 817        }
 818        return;
 819}
 820
 821sub mw_import_ref_by_pages {
 822
 823        my $fetch_from = shift;
 824        my %pages_hash = get_mw_pages();
 825        my @pages = values(%pages_hash);
 826
 827        my ($n, @revisions) = fetch_mw_revisions(\@pages, $fetch_from);
 828
 829        @revisions = sort {$a->{revid} <=> $b->{revid}} @revisions;
 830        my @revision_ids = map { $_->{revid} } @revisions;
 831
 832        return mw_import_revids($fetch_from, \@revision_ids, \%pages_hash);
 833}
 834
 835sub mw_import_ref_by_revs {
 836
 837        my $fetch_from = shift;
 838        my %pages_hash = get_mw_pages();
 839
 840        my $last_remote = get_last_global_remote_rev();
 841        my @revision_ids = $fetch_from..$last_remote;
 842        return mw_import_revids($fetch_from, \@revision_ids, \%pages_hash);
 843}
 844
 845# Import revisions given in second argument (array of integers).
 846# Only pages appearing in the third argument (hash indexed by page titles)
 847# will be imported.
 848sub mw_import_revids {
 849        my $fetch_from = shift;
 850        my $revision_ids = shift;
 851        my $pages = shift;
 852
 853        my $n = 0;
 854        my $n_actual = 0;
 855        my $last_timestamp = 0; # Placeholer in case $rev->timestamp is undefined
 856
 857        foreach my $pagerevid (@{$revision_ids}) {
 858                # Count page even if we skip it, since we display
 859                # $n/$total and $total includes skipped pages.
 860                $n++;
 861
 862                # fetch the content of the pages
 863                my $query = {
 864                        action => 'query',
 865                        prop => 'revisions',
 866                        rvprop => 'content|timestamp|comment|user|ids',
 867                        revids => $pagerevid,
 868                };
 869
 870                my $result = $mediawiki->api($query);
 871
 872                if (!$result) {
 873                        die "Failed to retrieve modified page for revision $pagerevid\n";
 874                }
 875
 876                if (defined($result->{query}->{badrevids}->{$pagerevid})) {
 877                        # The revision id does not exist on the remote wiki.
 878                        next;
 879                }
 880
 881                if (!defined($result->{query}->{pages})) {
 882                        die "Invalid revision ${pagerevid}.\n";
 883                }
 884
 885                my @result_pages = values(%{$result->{query}->{pages}});
 886                my $result_page = $result_pages[0];
 887                my $rev = $result_pages[0]->{revisions}->[0];
 888
 889                my $page_title = $result_page->{title};
 890
 891                if (!exists($pages->{$page_title})) {
 892                        print {*STDERR} "${n}/", scalar(@{$revision_ids}),
 893                                ": Skipping revision #$rev->{revid} of ${page_title}\n";
 894                        next;
 895                }
 896
 897                $n_actual++;
 898
 899                my %commit;
 900                $commit{author} = $rev->{user} || 'Anonymous';
 901                $commit{comment} = $rev->{comment} || EMPTY_MESSAGE;
 902                $commit{title} = smudge_filename($page_title);
 903                $commit{mw_revision} = $rev->{revid};
 904                $commit{content} = mediawiki_smudge($rev->{'*'});
 905
 906                if (!defined($rev->{timestamp})) {
 907                        $last_timestamp++;
 908                } else {
 909                        $last_timestamp = $rev->{timestamp};
 910                }
 911                $commit{date} = DateTime::Format::ISO8601->parse_datetime($last_timestamp);
 912
 913                # Differentiates classic pages and media files.
 914                my ($namespace, $filename) = $page_title =~ /^([^:]*):(.*)$/;
 915                my %mediafile;
 916                if ($namespace) {
 917                        my $id = get_mw_namespace_id($namespace);
 918                        if ($id && $id == get_mw_namespace_id('File')) {
 919                                %mediafile = get_mw_mediafile_for_page_revision($filename, $rev->{timestamp});
 920                        }
 921                }
 922                # If this is a revision of the media page for new version
 923                # of a file do one common commit for both file and media page.
 924                # Else do commit only for that page.
 925                print {*STDERR} "${n}/", scalar(@{$revision_ids}), ": Revision #$rev->{revid} of $commit{title}\n";
 926                import_file_revision(\%commit, ($fetch_from == 1), $n_actual, \%mediafile);
 927        }
 928
 929        return $n_actual;
 930}
 931
 932sub error_non_fast_forward {
 933        my $advice = run_git('config --bool advice.pushNonFastForward');
 934        chomp($advice);
 935        if ($advice ne 'false') {
 936                # Native git-push would show this after the summary.
 937                # We can't ask it to display it cleanly, so print it
 938                # ourselves before.
 939                print {*STDERR} "To prevent you from losing history, non-fast-forward updates were rejected\n";
 940                print {*STDERR} "Merge the remote changes (e.g. 'git pull') before pushing again. See the\n";
 941                print {*STDERR} "'Note about fast-forwards' section of 'git push --help' for details.\n";
 942        }
 943        print {*STDOUT} qq(error $_[0] "non-fast-forward"\n);
 944        return 0;
 945}
 946
 947sub mw_upload_file {
 948        my $complete_file_name = shift;
 949        my $new_sha1 = shift;
 950        my $extension = shift;
 951        my $file_deleted = shift;
 952        my $summary = shift;
 953        my $newrevid;
 954        my $path = "File:${complete_file_name}";
 955        my %hashFiles = get_allowed_file_extensions();
 956        if (!exists($hashFiles{$extension})) {
 957                print {*STDERR} "${complete_file_name} is not a permitted file on this wiki.\n";
 958                print {*STDERR} "Check the configuration of file uploads in your mediawiki.\n";
 959                return $newrevid;
 960        }
 961        # Deleting and uploading a file requires a priviledged user
 962        if ($file_deleted) {
 963                $mediawiki = connect_maybe($mediawiki, $remotename, $url);
 964                my $query = {
 965                        action => 'delete',
 966                        title => $path,
 967                        reason => $summary
 968                };
 969                if (!$mediawiki->edit($query)) {
 970                        print {*STDERR} "Failed to delete file on remote wiki\n";
 971                        print {*STDERR} "Check your permissions on the remote site. Error code:\n";
 972                        print {*STDERR} $mediawiki->{error}->{code} . ':' . $mediawiki->{error}->{details};
 973                        exit 1;
 974                }
 975        } else {
 976                # Don't let perl try to interpret file content as UTF-8 => use "raw"
 977                my $content = run_git("cat-file blob ${new_sha1}", 'raw');
 978                if ($content ne EMPTY) {
 979                        $mediawiki = connect_maybe($mediawiki, $remotename, $url);
 980                        $mediawiki->{config}->{upload_url} =
 981                                "${url}/index.php/Special:Upload";
 982                        $mediawiki->edit({
 983                                action => 'upload',
 984                                filename => $complete_file_name,
 985                                comment => $summary,
 986                                file => [undef,
 987                                         $complete_file_name,
 988                                         Content => $content],
 989                                ignorewarnings => 1,
 990                        }, {
 991                                skip_encoding => 1
 992                        } ) || die $mediawiki->{error}->{code} . ':'
 993                                 . $mediawiki->{error}->{details} . "\n";
 994                        my $last_file_page = $mediawiki->get_page({title => $path});
 995                        $newrevid = $last_file_page->{revid};
 996                        print {*STDERR} "Pushed file: ${new_sha1} - ${complete_file_name}.\n";
 997                } else {
 998                        print {*STDERR} "Empty file ${complete_file_name} not pushed.\n";
 999                }
1000        }
1001        return $newrevid;
1002}
1003
1004sub mw_push_file {
1005        my $diff_info = shift;
1006        # $diff_info contains a string in this format:
1007        # 100644 100644 <sha1_of_blob_before_commit> <sha1_of_blob_now> <status>
1008        my @diff_info_split = split(/[ \t]/, $diff_info);
1009
1010        # Filename, including .mw extension
1011        my $complete_file_name = shift;
1012        # Commit message
1013        my $summary = shift;
1014        # MediaWiki revision number. Keep the previous one by default,
1015        # in case there's no edit to perform.
1016        my $oldrevid = shift;
1017        my $newrevid;
1018
1019        if ($summary eq EMPTY_MESSAGE) {
1020                $summary = EMPTY;
1021        }
1022
1023        my $new_sha1 = $diff_info_split[3];
1024        my $old_sha1 = $diff_info_split[2];
1025        my $page_created = ($old_sha1 eq NULL_SHA1);
1026        my $page_deleted = ($new_sha1 eq NULL_SHA1);
1027        $complete_file_name = clean_filename($complete_file_name);
1028
1029        my ($title, $extension) = $complete_file_name =~ /^(.*)\.([^\.]*)$/;
1030        if (!defined($extension)) {
1031                $extension = EMPTY;
1032        }
1033        if ($extension eq 'mw') {
1034                my $ns = get_mw_namespace_id_for_page($complete_file_name);
1035                if ($ns && $ns == get_mw_namespace_id('File') && (!$export_media)) {
1036                        print {*STDERR} "Ignoring media file related page: ${complete_file_name}\n";
1037                        return ($oldrevid, 'ok');
1038                }
1039                my $file_content;
1040                if ($page_deleted) {
1041                        # Deleting a page usually requires
1042                        # special privileges. A common
1043                        # convention is to replace the page
1044                        # with this content instead:
1045                        $file_content = DELETED_CONTENT;
1046                } else {
1047                        $file_content = run_git("cat-file blob ${new_sha1}");
1048                }
1049
1050                $mediawiki = connect_maybe($mediawiki, $remotename, $url);
1051
1052                my $result = $mediawiki->edit( {
1053                        action => 'edit',
1054                        summary => $summary,
1055                        title => $title,
1056                        basetimestamp => $basetimestamps{$oldrevid},
1057                        text => mediawiki_clean($file_content, $page_created),
1058                                  }, {
1059                                          skip_encoding => 1 # Helps with names with accentuated characters
1060                                  });
1061                if (!$result) {
1062                        if ($mediawiki->{error}->{code} == 3) {
1063                                # edit conflicts, considered as non-fast-forward
1064                                print {*STDERR} 'Warning: Error ' .
1065                                    $mediawiki->{error}->{code} .
1066                                    ' from mediawiki: ' . $mediawiki->{error}->{details} .
1067                                    ".\n";
1068                                return ($oldrevid, 'non-fast-forward');
1069                        } else {
1070                                # Other errors. Shouldn't happen => just die()
1071                                die 'Fatal: Error ' .
1072                                    $mediawiki->{error}->{code} .
1073                                    ' from mediawiki: ' . $mediawiki->{error}->{details} . "\n";
1074                        }
1075                }
1076                $newrevid = $result->{edit}->{newrevid};
1077                print {*STDERR} "Pushed file: ${new_sha1} - ${title}\n";
1078        } elsif ($export_media) {
1079                $newrevid = mw_upload_file($complete_file_name, $new_sha1,
1080                                           $extension, $page_deleted,
1081                                           $summary);
1082        } else {
1083                print {*STDERR} "Ignoring media file ${title}\n";
1084        }
1085        $newrevid = ($newrevid or $oldrevid);
1086        return ($newrevid, 'ok');
1087}
1088
1089sub mw_push {
1090        # multiple push statements can follow each other
1091        my @refsspecs = (shift, get_more_refs('push'));
1092        my $pushed;
1093        for my $refspec (@refsspecs) {
1094                my ($force, $local, $remote) = $refspec =~ /^(\+)?([^:]*):([^:]*)$/
1095                    or die("Invalid refspec for push. Expected <src>:<dst> or +<src>:<dst>\n");
1096                if ($force) {
1097                        print {*STDERR} "Warning: forced push not allowed on a MediaWiki.\n";
1098                }
1099                if ($local eq EMPTY) {
1100                        print {*STDERR} "Cannot delete remote branch on a MediaWiki\n";
1101                        print {*STDOUT} "error ${remote} cannot delete\n";
1102                        next;
1103                }
1104                if ($remote ne 'refs/heads/master') {
1105                        print {*STDERR} "Only push to the branch 'master' is supported on a MediaWiki\n";
1106                        print {*STDOUT} "error ${remote} only master allowed\n";
1107                        next;
1108                }
1109                if (mw_push_revision($local, $remote)) {
1110                        $pushed = 1;
1111                }
1112        }
1113
1114        # Notify Git that the push is done
1115        print {*STDOUT} "\n";
1116
1117        if ($pushed && $dumb_push) {
1118                print {*STDERR} "Just pushed some revisions to MediaWiki.\n";
1119                print {*STDERR} "The pushed revisions now have to be re-imported, and your current branch\n";
1120                print {*STDERR} "needs to be updated with these re-imported commits. You can do this with\n";
1121                print {*STDERR} "\n";
1122                print {*STDERR} "  git pull --rebase\n";
1123                print {*STDERR} "\n";
1124        }
1125        return;
1126}
1127
1128sub mw_push_revision {
1129        my $local = shift;
1130        my $remote = shift; # actually, this has to be "refs/heads/master" at this point.
1131        my $last_local_revid = get_last_local_revision();
1132        print {*STDERR} ".\n"; # Finish sentence started by get_last_local_revision()
1133        my $last_remote_revid = get_last_remote_revision();
1134        my $mw_revision = $last_remote_revid;
1135
1136        # Get sha1 of commit pointed by local HEAD
1137        my $HEAD_sha1 = run_git("rev-parse ${local} 2>/dev/null");
1138        chomp($HEAD_sha1);
1139        # Get sha1 of commit pointed by remotes/$remotename/master
1140        my $remoteorigin_sha1 = run_git("rev-parse refs/remotes/${remotename}/master 2>/dev/null");
1141        chomp($remoteorigin_sha1);
1142
1143        if ($last_local_revid > 0 &&
1144            $last_local_revid < $last_remote_revid) {
1145                return error_non_fast_forward($remote);
1146        }
1147
1148        if ($HEAD_sha1 eq $remoteorigin_sha1) {
1149                # nothing to push
1150                return 0;
1151        }
1152
1153        # Get every commit in between HEAD and refs/remotes/origin/master,
1154        # including HEAD and refs/remotes/origin/master
1155        my @commit_pairs = ();
1156        if ($last_local_revid > 0) {
1157                my $parsed_sha1 = $remoteorigin_sha1;
1158                # Find a path from last MediaWiki commit to pushed commit
1159                print {*STDERR} "Computing path from local to remote ...\n";
1160                my @local_ancestry = split(/\n/, run_git("rev-list --boundary --parents ${local} ^${parsed_sha1}"));
1161                my %local_ancestry;
1162                foreach my $line (@local_ancestry) {
1163                        if (my ($child, $parents) = $line =~ /^-?([a-f0-9]+) ([a-f0-9 ]+)/) {
1164                                foreach my $parent (split(/ /, $parents)) {
1165                                        $local_ancestry{$parent} = $child;
1166                                }
1167                        } elsif (!$line =~ /^([a-f0-9]+)/) {
1168                                die "Unexpected output from git rev-list: ${line}\n";
1169                        }
1170                }
1171                while ($parsed_sha1 ne $HEAD_sha1) {
1172                        my $child = $local_ancestry{$parsed_sha1};
1173                        if (!$child) {
1174                                print {*STDERR} "Cannot find a path in history from remote commit to last commit\n";
1175                                return error_non_fast_forward($remote);
1176                        }
1177                        push(@commit_pairs, [$parsed_sha1, $child]);
1178                        $parsed_sha1 = $child;
1179                }
1180        } else {
1181                # No remote mediawiki revision. Export the whole
1182                # history (linearized with --first-parent)
1183                print {*STDERR} "Warning: no common ancestor, pushing complete history\n";
1184                my $history = run_git("rev-list --first-parent --children ${local}");
1185                my @history = split(/\n/, $history);
1186                @history = @history[1..$#history];
1187                foreach my $line (reverse @history) {
1188                        my @commit_info_split = split(/[ \n]/, $line);
1189                        push(@commit_pairs, \@commit_info_split);
1190                }
1191        }
1192
1193        foreach my $commit_info_split (@commit_pairs) {
1194                my $sha1_child = @{$commit_info_split}[0];
1195                my $sha1_commit = @{$commit_info_split}[1];
1196                my $diff_infos = run_git("diff-tree -r --raw -z ${sha1_child} ${sha1_commit}");
1197                # TODO: we could detect rename, and encode them with a #redirect on the wiki.
1198                # TODO: for now, it's just a delete+add
1199                my @diff_info_list = split(/\0/, $diff_infos);
1200                # Keep the subject line of the commit message as mediawiki comment for the revision
1201                my $commit_msg = run_git(qq(log --no-walk --format="%s" ${sha1_commit}));
1202                chomp($commit_msg);
1203                # Push every blob
1204                while (@diff_info_list) {
1205                        my $status;
1206                        # git diff-tree -z gives an output like
1207                        # <metadata>\0<filename1>\0
1208                        # <metadata>\0<filename2>\0
1209                        # and we've split on \0.
1210                        my $info = shift(@diff_info_list);
1211                        my $file = shift(@diff_info_list);
1212                        ($mw_revision, $status) = mw_push_file($info, $file, $commit_msg, $mw_revision);
1213                        if ($status eq 'non-fast-forward') {
1214                                # we may already have sent part of the
1215                                # commit to MediaWiki, but it's too
1216                                # late to cancel it. Stop the push in
1217                                # the middle, but still give an
1218                                # accurate error message.
1219                                return error_non_fast_forward($remote);
1220                        }
1221                        if ($status ne 'ok') {
1222                                die("Unknown error from mw_push_file()\n");
1223                        }
1224                }
1225                if (!$dumb_push) {
1226                        run_git(qq(notes --ref=${remotename}/mediawiki add -f -m "mediawiki_revision: ${mw_revision}" ${sha1_commit}));
1227                }
1228        }
1229
1230        print {*STDOUT} "ok ${remote}\n";
1231        return 1;
1232}
1233
1234sub get_allowed_file_extensions {
1235        $mediawiki = connect_maybe($mediawiki, $remotename, $url);
1236
1237        my $query = {
1238                action => 'query',
1239                meta => 'siteinfo',
1240                siprop => 'fileextensions'
1241                };
1242        my $result = $mediawiki->api($query);
1243        my @file_extensions = map { $_->{ext}} @{$result->{query}->{fileextensions}};
1244        my %hashFile = map { $_ => 1 } @file_extensions;
1245
1246        return %hashFile;
1247}
1248
1249# In memory cache for MediaWiki namespace ids.
1250my %namespace_id;
1251
1252# Namespaces whose id is cached in the configuration file
1253# (to avoid duplicates)
1254my %cached_mw_namespace_id;
1255
1256# Return MediaWiki id for a canonical namespace name.
1257# Ex.: "File", "Project".
1258sub get_mw_namespace_id {
1259        $mediawiki = connect_maybe($mediawiki, $remotename, $url);
1260        my $name = shift;
1261
1262        if (!exists $namespace_id{$name}) {
1263                # Look at configuration file, if the record for that namespace is
1264                # already cached. Namespaces are stored in form:
1265                # "Name_of_namespace:Id_namespace", ex.: "File:6".
1266                my @temp = split(/\n/,
1267                                 run_git("config --get-all remote.${remotename}.namespaceCache"));
1268                chomp(@temp);
1269                foreach my $ns (@temp) {
1270                        my ($n, $id) = split(/:/, $ns);
1271                        if ($id eq 'notANameSpace') {
1272                                $namespace_id{$n} = {is_namespace => 0};
1273                        } else {
1274                                $namespace_id{$n} = {is_namespace => 1, id => $id};
1275                        }
1276                        $cached_mw_namespace_id{$n} = 1;
1277                }
1278        }
1279
1280        if (!exists $namespace_id{$name}) {
1281                print {*STDERR} "Namespace ${name} not found in cache, querying the wiki ...\n";
1282                # NS not found => get namespace id from MW and store it in
1283                # configuration file.
1284                my $query = {
1285                        action => 'query',
1286                        meta => 'siteinfo',
1287                        siprop => 'namespaces'
1288                };
1289                my $result = $mediawiki->api($query);
1290
1291                while (my ($id, $ns) = each(%{$result->{query}->{namespaces}})) {
1292                        if (defined($ns->{id}) && defined($ns->{canonical})) {
1293                                $namespace_id{$ns->{canonical}} = {is_namespace => 1, id => $ns->{id}};
1294                                if ($ns->{'*'}) {
1295                                        # alias (e.g. french Fichier: as alias for canonical File:)
1296                                        $namespace_id{$ns->{'*'}} = {is_namespace => 1, id => $ns->{id}};
1297                                }
1298                        }
1299                }
1300        }
1301
1302        my $ns = $namespace_id{$name};
1303        my $id;
1304
1305        if (!defined $ns) {
1306                print {*STDERR} "No such namespace ${name} on MediaWiki.\n";
1307                $ns = {is_namespace => 0};
1308                $namespace_id{$name} = $ns;
1309        }
1310
1311        if ($ns->{is_namespace}) {
1312                $id = $ns->{id};
1313        }
1314
1315        # Store "notANameSpace" as special value for inexisting namespaces
1316        my $store_id = ($id || 'notANameSpace');
1317
1318        # Store explicitly requested namespaces on disk
1319        if (!exists $cached_mw_namespace_id{$name}) {
1320                run_git(qq(config --add remote.${remotename}.namespaceCache "${name}:${store_id}"));
1321                $cached_mw_namespace_id{$name} = 1;
1322        }
1323        return $id;
1324}
1325
1326sub get_mw_namespace_id_for_page {
1327        my $namespace = shift;
1328        if ($namespace =~ /^([^:]*):/) {
1329                return get_mw_namespace_id($namespace);
1330        } else {
1331                return;
1332        }
1333}