contrib / mw-to-git / git-remote-mediawiki.perlon commit git-rebase: fix typo (ac1998d)
   1#! /usr/bin/perl
   2
   3# Copyright (C) 2011
   4#     Jérémie Nikaes <jeremie.nikaes@ensimag.imag.fr>
   5#     Arnaud Lacurie <arnaud.lacurie@ensimag.imag.fr>
   6#     Claire Fousse <claire.fousse@ensimag.imag.fr>
   7#     David Amouyal <david.amouyal@ensimag.imag.fr>
   8#     Matthieu Moy <matthieu.moy@grenoble-inp.fr>
   9# License: GPL v2 or later
  10
  11# Gateway between Git and MediaWiki.
  12# Documentation & bugtracker: https://github.com/moy/Git-Mediawiki/
  13
  14use strict;
  15use MediaWiki::API;
  16use DateTime::Format::ISO8601;
  17
  18# By default, use UTF-8 to communicate with Git and the user
  19binmode STDERR, ":utf8";
  20binmode STDOUT, ":utf8";
  21
  22use URI::Escape;
  23use IPC::Open2;
  24
  25use warnings;
  26
  27# Mediawiki filenames can contain forward slashes. This variable decides by which pattern they should be replaced
  28use constant SLASH_REPLACEMENT => "%2F";
  29
  30# It's not always possible to delete pages (may require some
  31# privileges). Deleted pages are replaced with this content.
  32use constant DELETED_CONTENT => "[[Category:Deleted]]\n";
  33
  34# It's not possible to create empty pages. New empty files in Git are
  35# sent with this content instead.
  36use constant EMPTY_CONTENT => "<!-- empty page -->\n";
  37
  38# used to reflect file creation or deletion in diff.
  39use constant NULL_SHA1 => "0000000000000000000000000000000000000000";
  40
  41# Used on Git's side to reflect empty edit messages on the wiki
  42use constant EMPTY_MESSAGE => '*Empty MediaWiki Message*';
  43
  44my $remotename = $ARGV[0];
  45my $url = $ARGV[1];
  46
  47# Accept both space-separated and multiple keys in config file.
  48# Spaces should be written as _ anyway because we'll use chomp.
  49my @tracked_pages = split(/[ \n]/, run_git("config --get-all remote.". $remotename .".pages"));
  50chomp(@tracked_pages);
  51
  52# Just like @tracked_pages, but for MediaWiki categories.
  53my @tracked_categories = split(/[ \n]/, run_git("config --get-all remote.". $remotename .".categories"));
  54chomp(@tracked_categories);
  55
  56# Import media files on pull
  57my $import_media = run_git("config --get --bool remote.". $remotename .".mediaimport");
  58chomp($import_media);
  59$import_media = ($import_media eq "true");
  60
  61# Export media files on push
  62my $export_media = run_git("config --get --bool remote.". $remotename .".mediaexport");
  63chomp($export_media);
  64$export_media = !($export_media eq "false");
  65
  66my $wiki_login = run_git("config --get remote.". $remotename .".mwLogin");
  67# Note: mwPassword is discourraged. Use the credential system instead.
  68my $wiki_passwd = run_git("config --get remote.". $remotename .".mwPassword");
  69my $wiki_domain = run_git("config --get remote.". $remotename .".mwDomain");
  70chomp($wiki_login);
  71chomp($wiki_passwd);
  72chomp($wiki_domain);
  73
  74# Import only last revisions (both for clone and fetch)
  75my $shallow_import = run_git("config --get --bool remote.". $remotename .".shallow");
  76chomp($shallow_import);
  77$shallow_import = ($shallow_import eq "true");
  78
  79# Fetch (clone and pull) by revisions instead of by pages. This behavior
  80# is more efficient when we have a wiki with lots of pages and we fetch
  81# the revisions quite often so that they concern only few pages.
  82# Possible values:
  83# - by_rev: perform one query per new revision on the remote wiki
  84# - by_page: query each tracked page for new revision
  85my $fetch_strategy = run_git("config --get remote.$remotename.fetchStrategy");
  86unless ($fetch_strategy) {
  87        $fetch_strategy = run_git("config --get mediawiki.fetchStrategy");
  88}
  89chomp($fetch_strategy);
  90unless ($fetch_strategy) {
  91        $fetch_strategy = "by_page";
  92}
  93
  94# Dumb push: don't update notes and mediawiki ref to reflect the last push.
  95#
  96# Configurable with mediawiki.dumbPush, or per-remote with
  97# remote.<remotename>.dumbPush.
  98#
  99# This means the user will have to re-import the just-pushed
 100# revisions. On the other hand, this means that the Git revisions
 101# corresponding to MediaWiki revisions are all imported from the wiki,
 102# regardless of whether they were initially created in Git or from the
 103# web interface, hence all users will get the same history (i.e. if
 104# the push from Git to MediaWiki loses some information, everybody
 105# will get the history with information lost). If the import is
 106# deterministic, this means everybody gets the same sha1 for each
 107# MediaWiki revision.
 108my $dumb_push = run_git("config --get --bool remote.$remotename.dumbPush");
 109unless ($dumb_push) {
 110        $dumb_push = run_git("config --get --bool mediawiki.dumbPush");
 111}
 112chomp($dumb_push);
 113$dumb_push = ($dumb_push eq "true");
 114
 115my $wiki_name = $url;
 116$wiki_name =~ s/[^\/]*:\/\///;
 117# If URL is like http://user:password@example.com/, we clearly don't
 118# want the password in $wiki_name. While we're there, also remove user
 119# and '@' sign, to avoid author like MWUser@HTTPUser@host.com
 120$wiki_name =~ s/^.*@//;
 121
 122# Commands parser
 123my $entry;
 124my @cmd;
 125while (<STDIN>) {
 126        chomp;
 127        @cmd = split(/ /);
 128        if (defined($cmd[0])) {
 129                # Line not blank
 130                if ($cmd[0] eq "capabilities") {
 131                        die("Too many arguments for capabilities") unless (!defined($cmd[1]));
 132                        mw_capabilities();
 133                } elsif ($cmd[0] eq "list") {
 134                        die("Too many arguments for list") unless (!defined($cmd[2]));
 135                        mw_list($cmd[1]);
 136                } elsif ($cmd[0] eq "import") {
 137                        die("Invalid arguments for import") unless ($cmd[1] ne "" && !defined($cmd[2]));
 138                        mw_import($cmd[1]);
 139                } elsif ($cmd[0] eq "option") {
 140                        die("Too many arguments for option") unless ($cmd[1] ne "" && $cmd[2] ne "" && !defined($cmd[3]));
 141                        mw_option($cmd[1],$cmd[2]);
 142                } elsif ($cmd[0] eq "push") {
 143                        mw_push($cmd[1]);
 144                } else {
 145                        print STDERR "Unknown command. Aborting...\n";
 146                        last;
 147                }
 148        } else {
 149                # blank line: we should terminate
 150                last;
 151        }
 152
 153        BEGIN { $| = 1 } # flush STDOUT, to make sure the previous
 154                         # command is fully processed.
 155}
 156
 157########################## Functions ##############################
 158
 159## credential API management (generic functions)
 160
 161sub credential_read {
 162        my %credential;
 163        my $reader = shift;
 164        my $op = shift;
 165        while (<$reader>) {
 166                my ($key, $value) = /([^=]*)=(.*)/;
 167                if (not defined $key) {
 168                        die "ERROR receiving response from git credential $op:\n$_\n";
 169                }
 170                $credential{$key} = $value;
 171        }
 172        return %credential;
 173}
 174
 175sub credential_write {
 176        my $credential = shift;
 177        my $writer = shift;
 178        # url overwrites other fields, so it must come first
 179        print $writer "url=$credential->{url}\n" if exists $credential->{url};
 180        while (my ($key, $value) = each(%$credential) ) {
 181                if (length $value && $key ne 'url') {
 182                        print $writer "$key=$value\n";
 183                }
 184        }
 185}
 186
 187sub credential_run {
 188        my $op = shift;
 189        my $credential = shift;
 190        my $pid = open2(my $reader, my $writer, "git credential $op");
 191        credential_write($credential, $writer);
 192        print $writer "\n";
 193        close($writer);
 194
 195        if ($op eq "fill") {
 196                %$credential = credential_read($reader, $op);
 197        } else {
 198                if (<$reader>) {
 199                        die "ERROR while running git credential $op:\n$_";
 200                }
 201        }
 202        close($reader);
 203        waitpid($pid, 0);
 204        my $child_exit_status = $? >> 8;
 205        if ($child_exit_status != 0) {
 206                die "'git credential $op' failed with code $child_exit_status.";
 207        }
 208}
 209
 210# MediaWiki API instance, created lazily.
 211my $mediawiki;
 212
 213sub mw_connect_maybe {
 214        if ($mediawiki) {
 215                return;
 216        }
 217        $mediawiki = MediaWiki::API->new;
 218        $mediawiki->{config}->{api_url} = "$url/api.php";
 219        if ($wiki_login) {
 220                my %credential = (url => $url);
 221                $credential{username} = $wiki_login;
 222                $credential{password} = $wiki_passwd;
 223                credential_run("fill", \%credential);
 224                my $request = {lgname => $credential{username},
 225                               lgpassword => $credential{password},
 226                               lgdomain => $wiki_domain};
 227                if ($mediawiki->login($request)) {
 228                        credential_run("approve", \%credential);
 229                        print STDERR "Logged in mediawiki user \"$credential{username}\".\n";
 230                } else {
 231                        print STDERR "Failed to log in mediawiki user \"$credential{username}\" on $url\n";
 232                        print STDERR "  (error " .
 233                                $mediawiki->{error}->{code} . ': ' .
 234                                $mediawiki->{error}->{details} . ")\n";
 235                        credential_run("reject", \%credential);
 236                        exit 1;
 237                }
 238        }
 239}
 240
 241## Functions for listing pages on the remote wiki
 242sub get_mw_tracked_pages {
 243        my $pages = shift;
 244        get_mw_page_list(\@tracked_pages, $pages);
 245}
 246
 247sub get_mw_page_list {
 248        my $page_list = shift;
 249        my $pages = shift;
 250        my @some_pages = @$page_list;
 251        while (@some_pages) {
 252                my $last = 50;
 253                if ($#some_pages < $last) {
 254                        $last = $#some_pages;
 255                }
 256                my @slice = @some_pages[0..$last];
 257                get_mw_first_pages(\@slice, $pages);
 258                @some_pages = @some_pages[51..$#some_pages];
 259        }
 260}
 261
 262sub get_mw_tracked_categories {
 263        my $pages = shift;
 264        foreach my $category (@tracked_categories) {
 265                if (index($category, ':') < 0) {
 266                        # Mediawiki requires the Category
 267                        # prefix, but let's not force the user
 268                        # to specify it.
 269                        $category = "Category:" . $category;
 270                }
 271                my $mw_pages = $mediawiki->list( {
 272                        action => 'query',
 273                        list => 'categorymembers',
 274                        cmtitle => $category,
 275                        cmlimit => 'max' } )
 276                        || die $mediawiki->{error}->{code} . ': '
 277                                . $mediawiki->{error}->{details};
 278                foreach my $page (@{$mw_pages}) {
 279                        $pages->{$page->{title}} = $page;
 280                }
 281        }
 282}
 283
 284sub get_mw_all_pages {
 285        my $pages = shift;
 286        # No user-provided list, get the list of pages from the API.
 287        my $mw_pages = $mediawiki->list({
 288                action => 'query',
 289                list => 'allpages',
 290                aplimit => 'max'
 291        });
 292        if (!defined($mw_pages)) {
 293                print STDERR "fatal: could not get the list of wiki pages.\n";
 294                print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
 295                print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
 296                exit 1;
 297        }
 298        foreach my $page (@{$mw_pages}) {
 299                $pages->{$page->{title}} = $page;
 300        }
 301}
 302
 303# queries the wiki for a set of pages. Meant to be used within a loop
 304# querying the wiki for slices of page list.
 305sub get_mw_first_pages {
 306        my $some_pages = shift;
 307        my @some_pages = @{$some_pages};
 308
 309        my $pages = shift;
 310
 311        # pattern 'page1|page2|...' required by the API
 312        my $titles = join('|', @some_pages);
 313
 314        my $mw_pages = $mediawiki->api({
 315                action => 'query',
 316                titles => $titles,
 317        });
 318        if (!defined($mw_pages)) {
 319                print STDERR "fatal: could not query the list of wiki pages.\n";
 320                print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
 321                print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
 322                exit 1;
 323        }
 324        while (my ($id, $page) = each(%{$mw_pages->{query}->{pages}})) {
 325                if ($id < 0) {
 326                        print STDERR "Warning: page $page->{title} not found on wiki\n";
 327                } else {
 328                        $pages->{$page->{title}} = $page;
 329                }
 330        }
 331}
 332
 333# Get the list of pages to be fetched according to configuration.
 334sub get_mw_pages {
 335        mw_connect_maybe();
 336
 337        print STDERR "Listing pages on remote wiki...\n";
 338
 339        my %pages; # hash on page titles to avoid duplicates
 340        my $user_defined;
 341        if (@tracked_pages) {
 342                $user_defined = 1;
 343                # The user provided a list of pages titles, but we
 344                # still need to query the API to get the page IDs.
 345                get_mw_tracked_pages(\%pages);
 346        }
 347        if (@tracked_categories) {
 348                $user_defined = 1;
 349                get_mw_tracked_categories(\%pages);
 350        }
 351        if (!$user_defined) {
 352                get_mw_all_pages(\%pages);
 353        }
 354        if ($import_media) {
 355                print STDERR "Getting media files for selected pages...\n";
 356                if ($user_defined) {
 357                        get_linked_mediafiles(\%pages);
 358                } else {
 359                        get_all_mediafiles(\%pages);
 360                }
 361        }
 362        print STDERR (scalar keys %pages) . " pages found.\n";
 363        return %pages;
 364}
 365
 366# usage: $out = run_git("command args");
 367#        $out = run_git("command args", "raw"); # don't interpret output as UTF-8.
 368sub run_git {
 369        my $args = shift;
 370        my $encoding = (shift || "encoding(UTF-8)");
 371        open(my $git, "-|:$encoding", "git " . $args);
 372        my $res = do { local $/; <$git> };
 373        close($git);
 374
 375        return $res;
 376}
 377
 378
 379sub get_all_mediafiles {
 380        my $pages = shift;
 381        # Attach list of all pages for media files from the API,
 382        # they are in a different namespace, only one namespace
 383        # can be queried at the same moment
 384        my $mw_pages = $mediawiki->list({
 385                action => 'query',
 386                list => 'allpages',
 387                apnamespace => get_mw_namespace_id("File"),
 388                aplimit => 'max'
 389        });
 390        if (!defined($mw_pages)) {
 391                print STDERR "fatal: could not get the list of pages for media files.\n";
 392                print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
 393                print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
 394                exit 1;
 395        }
 396        foreach my $page (@{$mw_pages}) {
 397                $pages->{$page->{title}} = $page;
 398        }
 399}
 400
 401sub get_linked_mediafiles {
 402        my $pages = shift;
 403        my @titles = map $_->{title}, values(%{$pages});
 404
 405        # The query is split in small batches because of the MW API limit of
 406        # the number of links to be returned (500 links max).
 407        my $batch = 10;
 408        while (@titles) {
 409                if ($#titles < $batch) {
 410                        $batch = $#titles;
 411                }
 412                my @slice = @titles[0..$batch];
 413
 414                # pattern 'page1|page2|...' required by the API
 415                my $mw_titles = join('|', @slice);
 416
 417                # Media files could be included or linked from
 418                # a page, get all related
 419                my $query = {
 420                        action => 'query',
 421                        prop => 'links|images',
 422                        titles => $mw_titles,
 423                        plnamespace => get_mw_namespace_id("File"),
 424                        pllimit => 'max'
 425                };
 426                my $result = $mediawiki->api($query);
 427
 428                while (my ($id, $page) = each(%{$result->{query}->{pages}})) {
 429                        my @media_titles;
 430                        if (defined($page->{links})) {
 431                                my @link_titles = map $_->{title}, @{$page->{links}};
 432                                push(@media_titles, @link_titles);
 433                        }
 434                        if (defined($page->{images})) {
 435                                my @image_titles = map $_->{title}, @{$page->{images}};
 436                                push(@media_titles, @image_titles);
 437                        }
 438                        if (@media_titles) {
 439                                get_mw_page_list(\@media_titles, $pages);
 440                        }
 441                }
 442
 443                @titles = @titles[($batch+1)..$#titles];
 444        }
 445}
 446
 447sub get_mw_mediafile_for_page_revision {
 448        # Name of the file on Wiki, with the prefix.
 449        my $filename = shift;
 450        my $timestamp = shift;
 451        my %mediafile;
 452
 453        # Search if on a media file with given timestamp exists on
 454        # MediaWiki. In that case download the file.
 455        my $query = {
 456                action => 'query',
 457                prop => 'imageinfo',
 458                titles => "File:" . $filename,
 459                iistart => $timestamp,
 460                iiend => $timestamp,
 461                iiprop => 'timestamp|archivename|url',
 462                iilimit => 1
 463        };
 464        my $result = $mediawiki->api($query);
 465
 466        my ($fileid, $file) = each( %{$result->{query}->{pages}} );
 467        # If not defined it means there is no revision of the file for
 468        # given timestamp.
 469        if (defined($file->{imageinfo})) {
 470                $mediafile{title} = $filename;
 471
 472                my $fileinfo = pop(@{$file->{imageinfo}});
 473                $mediafile{timestamp} = $fileinfo->{timestamp};
 474                # Mediawiki::API's download function doesn't support https URLs
 475                # and can't download old versions of files.
 476                print STDERR "\tDownloading file $mediafile{title}, version $mediafile{timestamp}\n";
 477                $mediafile{content} = download_mw_mediafile($fileinfo->{url});
 478        }
 479        return %mediafile;
 480}
 481
 482sub download_mw_mediafile {
 483        my $url = shift;
 484
 485        my $response = $mediawiki->{ua}->get($url);
 486        if ($response->code == 200) {
 487                return $response->decoded_content;
 488        } else {
 489                print STDERR "Error downloading mediafile from :\n";
 490                print STDERR "URL: $url\n";
 491                print STDERR "Server response: " . $response->code . " " . $response->message . "\n";
 492                exit 1;
 493        }
 494}
 495
 496sub get_last_local_revision {
 497        # Get note regarding last mediawiki revision
 498        my $note = run_git("notes --ref=$remotename/mediawiki show refs/mediawiki/$remotename/master 2>/dev/null");
 499        my @note_info = split(/ /, $note);
 500
 501        my $lastrevision_number;
 502        if (!(defined($note_info[0]) && $note_info[0] eq "mediawiki_revision:")) {
 503                print STDERR "No previous mediawiki revision found";
 504                $lastrevision_number = 0;
 505        } else {
 506                # Notes are formatted : mediawiki_revision: #number
 507                $lastrevision_number = $note_info[1];
 508                chomp($lastrevision_number);
 509                print STDERR "Last local mediawiki revision found is $lastrevision_number";
 510        }
 511        return $lastrevision_number;
 512}
 513
 514# Remember the timestamp corresponding to a revision id.
 515my %basetimestamps;
 516
 517# Get the last remote revision without taking in account which pages are
 518# tracked or not. This function makes a single request to the wiki thus
 519# avoid a loop onto all tracked pages. This is useful for the fetch-by-rev
 520# option.
 521sub get_last_global_remote_rev {
 522        mw_connect_maybe();
 523
 524        my $query = {
 525                action => 'query',
 526                list => 'recentchanges',
 527                prop => 'revisions',
 528                rclimit => '1',
 529                rcdir => 'older',
 530        };
 531        my $result = $mediawiki->api($query);
 532        return $result->{query}->{recentchanges}[0]->{revid};
 533}
 534
 535# Get the last remote revision concerning the tracked pages and the tracked
 536# categories.
 537sub get_last_remote_revision {
 538        mw_connect_maybe();
 539
 540        my %pages_hash = get_mw_pages();
 541        my @pages = values(%pages_hash);
 542
 543        my $max_rev_num = 0;
 544
 545        print STDERR "Getting last revision id on tracked pages...\n";
 546
 547        foreach my $page (@pages) {
 548                my $id = $page->{pageid};
 549
 550                my $query = {
 551                        action => 'query',
 552                        prop => 'revisions',
 553                        rvprop => 'ids|timestamp',
 554                        pageids => $id,
 555                };
 556
 557                my $result = $mediawiki->api($query);
 558
 559                my $lastrev = pop(@{$result->{query}->{pages}->{$id}->{revisions}});
 560
 561                $basetimestamps{$lastrev->{revid}} = $lastrev->{timestamp};
 562
 563                $max_rev_num = ($lastrev->{revid} > $max_rev_num ? $lastrev->{revid} : $max_rev_num);
 564        }
 565
 566        print STDERR "Last remote revision found is $max_rev_num.\n";
 567        return $max_rev_num;
 568}
 569
 570# Clean content before sending it to MediaWiki
 571sub mediawiki_clean {
 572        my $string = shift;
 573        my $page_created = shift;
 574        # Mediawiki does not allow blank space at the end of a page and ends with a single \n.
 575        # This function right trims a string and adds a \n at the end to follow this rule
 576        $string =~ s/\s+$//;
 577        if ($string eq "" && $page_created) {
 578                # Creating empty pages is forbidden.
 579                $string = EMPTY_CONTENT;
 580        }
 581        return $string."\n";
 582}
 583
 584# Filter applied on MediaWiki data before adding them to Git
 585sub mediawiki_smudge {
 586        my $string = shift;
 587        if ($string eq EMPTY_CONTENT) {
 588                $string = "";
 589        }
 590        # This \n is important. This is due to mediawiki's way to handle end of files.
 591        return $string."\n";
 592}
 593
 594sub mediawiki_clean_filename {
 595        my $filename = shift;
 596        $filename =~ s/@{[SLASH_REPLACEMENT]}/\//g;
 597        # [, ], |, {, and } are forbidden by MediaWiki, even URL-encoded.
 598        # Do a variant of URL-encoding, i.e. looks like URL-encoding,
 599        # but with _ added to prevent MediaWiki from thinking this is
 600        # an actual special character.
 601        $filename =~ s/[\[\]\{\}\|]/sprintf("_%%_%x", ord($&))/ge;
 602        # If we use the uri escape before
 603        # we should unescape here, before anything
 604
 605        return $filename;
 606}
 607
 608sub mediawiki_smudge_filename {
 609        my $filename = shift;
 610        $filename =~ s/\//@{[SLASH_REPLACEMENT]}/g;
 611        $filename =~ s/ /_/g;
 612        # Decode forbidden characters encoded in mediawiki_clean_filename
 613        $filename =~ s/_%_([0-9a-fA-F][0-9a-fA-F])/sprintf("%c", hex($1))/ge;
 614        return $filename;
 615}
 616
 617sub literal_data {
 618        my ($content) = @_;
 619        print STDOUT "data ", bytes::length($content), "\n", $content;
 620}
 621
 622sub literal_data_raw {
 623        # Output possibly binary content.
 624        my ($content) = @_;
 625        # Avoid confusion between size in bytes and in characters
 626        utf8::downgrade($content);
 627        binmode STDOUT, ":raw";
 628        print STDOUT "data ", bytes::length($content), "\n", $content;
 629        binmode STDOUT, ":utf8";
 630}
 631
 632sub mw_capabilities {
 633        # Revisions are imported to the private namespace
 634        # refs/mediawiki/$remotename/ by the helper and fetched into
 635        # refs/remotes/$remotename later by fetch.
 636        print STDOUT "refspec refs/heads/*:refs/mediawiki/$remotename/*\n";
 637        print STDOUT "import\n";
 638        print STDOUT "list\n";
 639        print STDOUT "push\n";
 640        print STDOUT "\n";
 641}
 642
 643sub mw_list {
 644        # MediaWiki do not have branches, we consider one branch arbitrarily
 645        # called master, and HEAD pointing to it.
 646        print STDOUT "? refs/heads/master\n";
 647        print STDOUT "\@refs/heads/master HEAD\n";
 648        print STDOUT "\n";
 649}
 650
 651sub mw_option {
 652        print STDERR "remote-helper command 'option $_[0]' not yet implemented\n";
 653        print STDOUT "unsupported\n";
 654}
 655
 656sub fetch_mw_revisions_for_page {
 657        my $page = shift;
 658        my $id = shift;
 659        my $fetch_from = shift;
 660        my @page_revs = ();
 661        my $query = {
 662                action => 'query',
 663                prop => 'revisions',
 664                rvprop => 'ids',
 665                rvdir => 'newer',
 666                rvstartid => $fetch_from,
 667                rvlimit => 500,
 668                pageids => $id,
 669        };
 670
 671        my $revnum = 0;
 672        # Get 500 revisions at a time due to the mediawiki api limit
 673        while (1) {
 674                my $result = $mediawiki->api($query);
 675
 676                # Parse each of those 500 revisions
 677                foreach my $revision (@{$result->{query}->{pages}->{$id}->{revisions}}) {
 678                        my $page_rev_ids;
 679                        $page_rev_ids->{pageid} = $page->{pageid};
 680                        $page_rev_ids->{revid} = $revision->{revid};
 681                        push(@page_revs, $page_rev_ids);
 682                        $revnum++;
 683                }
 684                last unless $result->{'query-continue'};
 685                $query->{rvstartid} = $result->{'query-continue'}->{revisions}->{rvstartid};
 686        }
 687        if ($shallow_import && @page_revs) {
 688                print STDERR "  Found 1 revision (shallow import).\n";
 689                @page_revs = sort {$b->{revid} <=> $a->{revid}} (@page_revs);
 690                return $page_revs[0];
 691        }
 692        print STDERR "  Found ", $revnum, " revision(s).\n";
 693        return @page_revs;
 694}
 695
 696sub fetch_mw_revisions {
 697        my $pages = shift; my @pages = @{$pages};
 698        my $fetch_from = shift;
 699
 700        my @revisions = ();
 701        my $n = 1;
 702        foreach my $page (@pages) {
 703                my $id = $page->{pageid};
 704
 705                print STDERR "page $n/", scalar(@pages), ": ". $page->{title} ."\n";
 706                $n++;
 707                my @page_revs = fetch_mw_revisions_for_page($page, $id, $fetch_from);
 708                @revisions = (@page_revs, @revisions);
 709        }
 710
 711        return ($n, @revisions);
 712}
 713
 714sub fe_escape_path {
 715    my $path = shift;
 716    $path =~ s/\\/\\\\/g;
 717    $path =~ s/"/\\"/g;
 718    $path =~ s/\n/\\n/g;
 719    return '"' . $path . '"';
 720}
 721
 722sub import_file_revision {
 723        my $commit = shift;
 724        my %commit = %{$commit};
 725        my $full_import = shift;
 726        my $n = shift;
 727        my $mediafile = shift;
 728        my %mediafile;
 729        if ($mediafile) {
 730                %mediafile = %{$mediafile};
 731        }
 732
 733        my $title = $commit{title};
 734        my $comment = $commit{comment};
 735        my $content = $commit{content};
 736        my $author = $commit{author};
 737        my $date = $commit{date};
 738
 739        print STDOUT "commit refs/mediawiki/$remotename/master\n";
 740        print STDOUT "mark :$n\n";
 741        print STDOUT "committer $author <$author\@$wiki_name> ", $date->epoch, " +0000\n";
 742        literal_data($comment);
 743
 744        # If it's not a clone, we need to know where to start from
 745        if (!$full_import && $n == 1) {
 746                print STDOUT "from refs/mediawiki/$remotename/master^0\n";
 747        }
 748        if ($content ne DELETED_CONTENT) {
 749                print STDOUT "M 644 inline " .
 750                    fe_escape_path($title . ".mw") . "\n";
 751                literal_data($content);
 752                if (%mediafile) {
 753                        print STDOUT "M 644 inline "
 754                            . fe_escape_path($mediafile{title}) . "\n";
 755                        literal_data_raw($mediafile{content});
 756                }
 757                print STDOUT "\n\n";
 758        } else {
 759                print STDOUT "D " . fe_escape_path($title . ".mw") . "\n";
 760        }
 761
 762        # mediawiki revision number in the git note
 763        if ($full_import && $n == 1) {
 764                print STDOUT "reset refs/notes/$remotename/mediawiki\n";
 765        }
 766        print STDOUT "commit refs/notes/$remotename/mediawiki\n";
 767        print STDOUT "committer $author <$author\@$wiki_name> ", $date->epoch, " +0000\n";
 768        literal_data("Note added by git-mediawiki during import");
 769        if (!$full_import && $n == 1) {
 770                print STDOUT "from refs/notes/$remotename/mediawiki^0\n";
 771        }
 772        print STDOUT "N inline :$n\n";
 773        literal_data("mediawiki_revision: " . $commit{mw_revision});
 774        print STDOUT "\n\n";
 775}
 776
 777# parse a sequence of
 778# <cmd> <arg1>
 779# <cmd> <arg2>
 780# \n
 781# (like batch sequence of import and sequence of push statements)
 782sub get_more_refs {
 783        my $cmd = shift;
 784        my @refs;
 785        while (1) {
 786                my $line = <STDIN>;
 787                if ($line =~ m/^$cmd (.*)$/) {
 788                        push(@refs, $1);
 789                } elsif ($line eq "\n") {
 790                        return @refs;
 791                } else {
 792                        die("Invalid command in a '$cmd' batch: ". $_);
 793                }
 794        }
 795}
 796
 797sub mw_import {
 798        # multiple import commands can follow each other.
 799        my @refs = (shift, get_more_refs("import"));
 800        foreach my $ref (@refs) {
 801                mw_import_ref($ref);
 802        }
 803        print STDOUT "done\n";
 804}
 805
 806sub mw_import_ref {
 807        my $ref = shift;
 808        # The remote helper will call "import HEAD" and
 809        # "import refs/heads/master".
 810        # Since HEAD is a symbolic ref to master (by convention,
 811        # followed by the output of the command "list" that we gave),
 812        # we don't need to do anything in this case.
 813        if ($ref eq "HEAD") {
 814                return;
 815        }
 816
 817        mw_connect_maybe();
 818
 819        print STDERR "Searching revisions...\n";
 820        my $last_local = get_last_local_revision();
 821        my $fetch_from = $last_local + 1;
 822        if ($fetch_from == 1) {
 823                print STDERR ", fetching from beginning.\n";
 824        } else {
 825                print STDERR ", fetching from here.\n";
 826        }
 827
 828        my $n = 0;
 829        if ($fetch_strategy eq "by_rev") {
 830                print STDERR "Fetching & writing export data by revs...\n";
 831                $n = mw_import_ref_by_revs($fetch_from);
 832        } elsif ($fetch_strategy eq "by_page") {
 833                print STDERR "Fetching & writing export data by pages...\n";
 834                $n = mw_import_ref_by_pages($fetch_from);
 835        } else {
 836                print STDERR "fatal: invalid fetch strategy \"$fetch_strategy\".\n";
 837                print STDERR "Check your configuration variables remote.$remotename.fetchStrategy and mediawiki.fetchStrategy\n";
 838                exit 1;
 839        }
 840
 841        if ($fetch_from == 1 && $n == 0) {
 842                print STDERR "You appear to have cloned an empty MediaWiki.\n";
 843                # Something has to be done remote-helper side. If nothing is done, an error is
 844                # thrown saying that HEAD is referring to unknown object 0000000000000000000
 845                # and the clone fails.
 846        }
 847}
 848
 849sub mw_import_ref_by_pages {
 850
 851        my $fetch_from = shift;
 852        my %pages_hash = get_mw_pages();
 853        my @pages = values(%pages_hash);
 854
 855        my ($n, @revisions) = fetch_mw_revisions(\@pages, $fetch_from);
 856
 857        @revisions = sort {$a->{revid} <=> $b->{revid}} @revisions;
 858        my @revision_ids = map $_->{revid}, @revisions;
 859
 860        return mw_import_revids($fetch_from, \@revision_ids, \%pages_hash);
 861}
 862
 863sub mw_import_ref_by_revs {
 864
 865        my $fetch_from = shift;
 866        my %pages_hash = get_mw_pages();
 867
 868        my $last_remote = get_last_global_remote_rev();
 869        my @revision_ids = $fetch_from..$last_remote;
 870        return mw_import_revids($fetch_from, \@revision_ids, \%pages_hash);
 871}
 872
 873# Import revisions given in second argument (array of integers).
 874# Only pages appearing in the third argument (hash indexed by page titles)
 875# will be imported.
 876sub mw_import_revids {
 877        my $fetch_from = shift;
 878        my $revision_ids = shift;
 879        my $pages = shift;
 880
 881        my $n = 0;
 882        my $n_actual = 0;
 883        my $last_timestamp = 0; # Placeholer in case $rev->timestamp is undefined
 884
 885        foreach my $pagerevid (@$revision_ids) {
 886                # Count page even if we skip it, since we display
 887                # $n/$total and $total includes skipped pages.
 888                $n++;
 889
 890                # fetch the content of the pages
 891                my $query = {
 892                        action => 'query',
 893                        prop => 'revisions',
 894                        rvprop => 'content|timestamp|comment|user|ids',
 895                        revids => $pagerevid,
 896                };
 897
 898                my $result = $mediawiki->api($query);
 899
 900                if (!$result) {
 901                        die "Failed to retrieve modified page for revision $pagerevid";
 902                }
 903
 904                if (defined($result->{query}->{badrevids}->{$pagerevid})) {
 905                        # The revision id does not exist on the remote wiki.
 906                        next;
 907                }
 908
 909                if (!defined($result->{query}->{pages})) {
 910                        die "Invalid revision $pagerevid.";
 911                }
 912
 913                my @result_pages = values(%{$result->{query}->{pages}});
 914                my $result_page = $result_pages[0];
 915                my $rev = $result_pages[0]->{revisions}->[0];
 916
 917                my $page_title = $result_page->{title};
 918
 919                if (!exists($pages->{$page_title})) {
 920                        print STDERR "$n/", scalar(@$revision_ids),
 921                                ": Skipping revision #$rev->{revid} of $page_title\n";
 922                        next;
 923                }
 924
 925                $n_actual++;
 926
 927                my %commit;
 928                $commit{author} = $rev->{user} || 'Anonymous';
 929                $commit{comment} = $rev->{comment} || EMPTY_MESSAGE;
 930                $commit{title} = mediawiki_smudge_filename($page_title);
 931                $commit{mw_revision} = $rev->{revid};
 932                $commit{content} = mediawiki_smudge($rev->{'*'});
 933
 934                if (!defined($rev->{timestamp})) {
 935                        $last_timestamp++;
 936                } else {
 937                        $last_timestamp = $rev->{timestamp};
 938                }
 939                $commit{date} = DateTime::Format::ISO8601->parse_datetime($last_timestamp);
 940
 941                # Differentiates classic pages and media files.
 942                my ($namespace, $filename) = $page_title =~ /^([^:]*):(.*)$/;
 943                my %mediafile;
 944                if ($namespace) {
 945                        my $id = get_mw_namespace_id($namespace);
 946                        if ($id && $id == get_mw_namespace_id("File")) {
 947                                %mediafile = get_mw_mediafile_for_page_revision($filename, $rev->{timestamp});
 948                        }
 949                }
 950                # If this is a revision of the media page for new version
 951                # of a file do one common commit for both file and media page.
 952                # Else do commit only for that page.
 953                print STDERR "$n/", scalar(@$revision_ids), ": Revision #$rev->{revid} of $commit{title}\n";
 954                import_file_revision(\%commit, ($fetch_from == 1), $n_actual, \%mediafile);
 955        }
 956
 957        return $n_actual;
 958}
 959
 960sub error_non_fast_forward {
 961        my $advice = run_git("config --bool advice.pushNonFastForward");
 962        chomp($advice);
 963        if ($advice ne "false") {
 964                # Native git-push would show this after the summary.
 965                # We can't ask it to display it cleanly, so print it
 966                # ourselves before.
 967                print STDERR "To prevent you from losing history, non-fast-forward updates were rejected\n";
 968                print STDERR "Merge the remote changes (e.g. 'git pull') before pushing again. See the\n";
 969                print STDERR "'Note about fast-forwards' section of 'git push --help' for details.\n";
 970        }
 971        print STDOUT "error $_[0] \"non-fast-forward\"\n";
 972        return 0;
 973}
 974
 975sub mw_upload_file {
 976        my $complete_file_name = shift;
 977        my $new_sha1 = shift;
 978        my $extension = shift;
 979        my $file_deleted = shift;
 980        my $summary = shift;
 981        my $newrevid;
 982        my $path = "File:" . $complete_file_name;
 983        my %hashFiles = get_allowed_file_extensions();
 984        if (!exists($hashFiles{$extension})) {
 985                print STDERR "$complete_file_name is not a permitted file on this wiki.\n";
 986                print STDERR "Check the configuration of file uploads in your mediawiki.\n";
 987                return $newrevid;
 988        }
 989        # Deleting and uploading a file requires a priviledged user
 990        if ($file_deleted) {
 991                mw_connect_maybe();
 992                my $query = {
 993                        action => 'delete',
 994                        title => $path,
 995                        reason => $summary
 996                };
 997                if (!$mediawiki->edit($query)) {
 998                        print STDERR "Failed to delete file on remote wiki\n";
 999                        print STDERR "Check your permissions on the remote site. Error code:\n";
1000                        print STDERR $mediawiki->{error}->{code} . ':' . $mediawiki->{error}->{details};
1001                        exit 1;
1002                }
1003        } else {
1004                # Don't let perl try to interpret file content as UTF-8 => use "raw"
1005                my $content = run_git("cat-file blob $new_sha1", "raw");
1006                if ($content ne "") {
1007                        mw_connect_maybe();
1008                        $mediawiki->{config}->{upload_url} =
1009                                "$url/index.php/Special:Upload";
1010                        $mediawiki->edit({
1011                                action => 'upload',
1012                                filename => $complete_file_name,
1013                                comment => $summary,
1014                                file => [undef,
1015                                         $complete_file_name,
1016                                         Content => $content],
1017                                ignorewarnings => 1,
1018                        }, {
1019                                skip_encoding => 1
1020                        } ) || die $mediawiki->{error}->{code} . ':'
1021                                 . $mediawiki->{error}->{details};
1022                        my $last_file_page = $mediawiki->get_page({title => $path});
1023                        $newrevid = $last_file_page->{revid};
1024                        print STDERR "Pushed file: $new_sha1 - $complete_file_name.\n";
1025                } else {
1026                        print STDERR "Empty file $complete_file_name not pushed.\n";
1027                }
1028        }
1029        return $newrevid;
1030}
1031
1032sub mw_push_file {
1033        my $diff_info = shift;
1034        # $diff_info contains a string in this format:
1035        # 100644 100644 <sha1_of_blob_before_commit> <sha1_of_blob_now> <status>
1036        my @diff_info_split = split(/[ \t]/, $diff_info);
1037
1038        # Filename, including .mw extension
1039        my $complete_file_name = shift;
1040        # Commit message
1041        my $summary = shift;
1042        # MediaWiki revision number. Keep the previous one by default,
1043        # in case there's no edit to perform.
1044        my $oldrevid = shift;
1045        my $newrevid;
1046
1047        if ($summary eq EMPTY_MESSAGE) {
1048                $summary = '';
1049        }
1050
1051        my $new_sha1 = $diff_info_split[3];
1052        my $old_sha1 = $diff_info_split[2];
1053        my $page_created = ($old_sha1 eq NULL_SHA1);
1054        my $page_deleted = ($new_sha1 eq NULL_SHA1);
1055        $complete_file_name = mediawiki_clean_filename($complete_file_name);
1056
1057        my ($title, $extension) = $complete_file_name =~ /^(.*)\.([^\.]*)$/;
1058        if (!defined($extension)) {
1059                $extension = "";
1060        }
1061        if ($extension eq "mw") {
1062                my $ns = get_mw_namespace_id_for_page($complete_file_name);
1063                if ($ns && $ns == get_mw_namespace_id("File") && (!$export_media)) {
1064                        print STDERR "Ignoring media file related page: $complete_file_name\n";
1065                        return ($oldrevid, "ok");
1066                }
1067                my $file_content;
1068                if ($page_deleted) {
1069                        # Deleting a page usually requires
1070                        # special privileges. A common
1071                        # convention is to replace the page
1072                        # with this content instead:
1073                        $file_content = DELETED_CONTENT;
1074                } else {
1075                        $file_content = run_git("cat-file blob $new_sha1");
1076                }
1077
1078                mw_connect_maybe();
1079
1080                my $result = $mediawiki->edit( {
1081                        action => 'edit',
1082                        summary => $summary,
1083                        title => $title,
1084                        basetimestamp => $basetimestamps{$oldrevid},
1085                        text => mediawiki_clean($file_content, $page_created),
1086                                  }, {
1087                                          skip_encoding => 1 # Helps with names with accentuated characters
1088                                  });
1089                if (!$result) {
1090                        if ($mediawiki->{error}->{code} == 3) {
1091                                # edit conflicts, considered as non-fast-forward
1092                                print STDERR 'Warning: Error ' .
1093                                    $mediawiki->{error}->{code} .
1094                                    ' from mediwiki: ' . $mediawiki->{error}->{details} .
1095                                    ".\n";
1096                                return ($oldrevid, "non-fast-forward");
1097                        } else {
1098                                # Other errors. Shouldn't happen => just die()
1099                                die 'Fatal: Error ' .
1100                                    $mediawiki->{error}->{code} .
1101                                    ' from mediwiki: ' . $mediawiki->{error}->{details};
1102                        }
1103                }
1104                $newrevid = $result->{edit}->{newrevid};
1105                print STDERR "Pushed file: $new_sha1 - $title\n";
1106        } elsif ($export_media) {
1107                $newrevid = mw_upload_file($complete_file_name, $new_sha1,
1108                                           $extension, $page_deleted,
1109                                           $summary);
1110        } else {
1111                print STDERR "Ignoring media file $title\n";
1112        }
1113        $newrevid = ($newrevid or $oldrevid);
1114        return ($newrevid, "ok");
1115}
1116
1117sub mw_push {
1118        # multiple push statements can follow each other
1119        my @refsspecs = (shift, get_more_refs("push"));
1120        my $pushed;
1121        for my $refspec (@refsspecs) {
1122                my ($force, $local, $remote) = $refspec =~ /^(\+)?([^:]*):([^:]*)$/
1123                    or die("Invalid refspec for push. Expected <src>:<dst> or +<src>:<dst>");
1124                if ($force) {
1125                        print STDERR "Warning: forced push not allowed on a MediaWiki.\n";
1126                }
1127                if ($local eq "") {
1128                        print STDERR "Cannot delete remote branch on a MediaWiki\n";
1129                        print STDOUT "error $remote cannot delete\n";
1130                        next;
1131                }
1132                if ($remote ne "refs/heads/master") {
1133                        print STDERR "Only push to the branch 'master' is supported on a MediaWiki\n";
1134                        print STDOUT "error $remote only master allowed\n";
1135                        next;
1136                }
1137                if (mw_push_revision($local, $remote)) {
1138                        $pushed = 1;
1139                }
1140        }
1141
1142        # Notify Git that the push is done
1143        print STDOUT "\n";
1144
1145        if ($pushed && $dumb_push) {
1146                print STDERR "Just pushed some revisions to MediaWiki.\n";
1147                print STDERR "The pushed revisions now have to be re-imported, and your current branch\n";
1148                print STDERR "needs to be updated with these re-imported commits. You can do this with\n";
1149                print STDERR "\n";
1150                print STDERR "  git pull --rebase\n";
1151                print STDERR "\n";
1152        }
1153}
1154
1155sub mw_push_revision {
1156        my $local = shift;
1157        my $remote = shift; # actually, this has to be "refs/heads/master" at this point.
1158        my $last_local_revid = get_last_local_revision();
1159        print STDERR ".\n"; # Finish sentence started by get_last_local_revision()
1160        my $last_remote_revid = get_last_remote_revision();
1161        my $mw_revision = $last_remote_revid;
1162
1163        # Get sha1 of commit pointed by local HEAD
1164        my $HEAD_sha1 = run_git("rev-parse $local 2>/dev/null"); chomp($HEAD_sha1);
1165        # Get sha1 of commit pointed by remotes/$remotename/master
1166        my $remoteorigin_sha1 = run_git("rev-parse refs/remotes/$remotename/master 2>/dev/null");
1167        chomp($remoteorigin_sha1);
1168
1169        if ($last_local_revid > 0 &&
1170            $last_local_revid < $last_remote_revid) {
1171                return error_non_fast_forward($remote);
1172        }
1173
1174        if ($HEAD_sha1 eq $remoteorigin_sha1) {
1175                # nothing to push
1176                return 0;
1177        }
1178
1179        # Get every commit in between HEAD and refs/remotes/origin/master,
1180        # including HEAD and refs/remotes/origin/master
1181        my @commit_pairs = ();
1182        if ($last_local_revid > 0) {
1183                my $parsed_sha1 = $remoteorigin_sha1;
1184                # Find a path from last MediaWiki commit to pushed commit
1185                print STDERR "Computing path from local to remote ...\n";
1186                my @local_ancestry = split(/\n/, run_git("rev-list --boundary --parents $local ^$parsed_sha1"));
1187                my %local_ancestry;
1188                foreach my $line (@local_ancestry) {
1189                        if (my ($child, $parents) = $line =~ m/^-?([a-f0-9]+) ([a-f0-9 ]+)/) {
1190                                foreach my $parent (split(' ', $parents)) {
1191                                        $local_ancestry{$parent} = $child;
1192                                }
1193                        } elsif (!$line =~ m/^([a-f0-9]+)/) {
1194                                die "Unexpected output from git rev-list: $line";
1195                        }
1196                }
1197                while ($parsed_sha1 ne $HEAD_sha1) {
1198                        my $child = $local_ancestry{$parsed_sha1};
1199                        if (!$child) {
1200                                printf STDERR "Cannot find a path in history from remote commit to last commit\n";
1201                                return error_non_fast_forward($remote);
1202                        }
1203                        push(@commit_pairs, [$parsed_sha1, $child]);
1204                        $parsed_sha1 = $child;
1205                }
1206        } else {
1207                # No remote mediawiki revision. Export the whole
1208                # history (linearized with --first-parent)
1209                print STDERR "Warning: no common ancestor, pushing complete history\n";
1210                my $history = run_git("rev-list --first-parent --children $local");
1211                my @history = split('\n', $history);
1212                @history = @history[1..$#history];
1213                foreach my $line (reverse @history) {
1214                        my @commit_info_split = split(/ |\n/, $line);
1215                        push(@commit_pairs, \@commit_info_split);
1216                }
1217        }
1218
1219        foreach my $commit_info_split (@commit_pairs) {
1220                my $sha1_child = @{$commit_info_split}[0];
1221                my $sha1_commit = @{$commit_info_split}[1];
1222                my $diff_infos = run_git("diff-tree -r --raw -z $sha1_child $sha1_commit");
1223                # TODO: we could detect rename, and encode them with a #redirect on the wiki.
1224                # TODO: for now, it's just a delete+add
1225                my @diff_info_list = split(/\0/, $diff_infos);
1226                # Keep the subject line of the commit message as mediawiki comment for the revision
1227                my $commit_msg = run_git("log --no-walk --format=\"%s\" $sha1_commit");
1228                chomp($commit_msg);
1229                # Push every blob
1230                while (@diff_info_list) {
1231                        my $status;
1232                        # git diff-tree -z gives an output like
1233                        # <metadata>\0<filename1>\0
1234                        # <metadata>\0<filename2>\0
1235                        # and we've split on \0.
1236                        my $info = shift(@diff_info_list);
1237                        my $file = shift(@diff_info_list);
1238                        ($mw_revision, $status) = mw_push_file($info, $file, $commit_msg, $mw_revision);
1239                        if ($status eq "non-fast-forward") {
1240                                # we may already have sent part of the
1241                                # commit to MediaWiki, but it's too
1242                                # late to cancel it. Stop the push in
1243                                # the middle, but still give an
1244                                # accurate error message.
1245                                return error_non_fast_forward($remote);
1246                        }
1247                        if ($status ne "ok") {
1248                                die("Unknown error from mw_push_file()");
1249                        }
1250                }
1251                unless ($dumb_push) {
1252                        run_git("notes --ref=$remotename/mediawiki add -f -m \"mediawiki_revision: $mw_revision\" $sha1_commit");
1253                        run_git("update-ref -m \"Git-MediaWiki push\" refs/mediawiki/$remotename/master $sha1_commit $sha1_child");
1254                }
1255        }
1256
1257        print STDOUT "ok $remote\n";
1258        return 1;
1259}
1260
1261sub get_allowed_file_extensions {
1262        mw_connect_maybe();
1263
1264        my $query = {
1265                action => 'query',
1266                meta => 'siteinfo',
1267                siprop => 'fileextensions'
1268                };
1269        my $result = $mediawiki->api($query);
1270        my @file_extensions= map $_->{ext},@{$result->{query}->{fileextensions}};
1271        my %hashFile = map {$_ => 1}@file_extensions;
1272
1273        return %hashFile;
1274}
1275
1276# In memory cache for MediaWiki namespace ids.
1277my %namespace_id;
1278
1279# Namespaces whose id is cached in the configuration file
1280# (to avoid duplicates)
1281my %cached_mw_namespace_id;
1282
1283# Return MediaWiki id for a canonical namespace name.
1284# Ex.: "File", "Project".
1285sub get_mw_namespace_id {
1286        mw_connect_maybe();
1287        my $name = shift;
1288
1289        if (!exists $namespace_id{$name}) {
1290                # Look at configuration file, if the record for that namespace is
1291                # already cached. Namespaces are stored in form:
1292                # "Name_of_namespace:Id_namespace", ex.: "File:6".
1293                my @temp = split(/[\n]/, run_git("config --get-all remote."
1294                                                . $remotename .".namespaceCache"));
1295                chomp(@temp);
1296                foreach my $ns (@temp) {
1297                        my ($n, $id) = split(/:/, $ns);
1298                        if ($id eq 'notANameSpace') {
1299                                $namespace_id{$n} = {is_namespace => 0};
1300                        } else {
1301                                $namespace_id{$n} = {is_namespace => 1, id => $id};
1302                        }
1303                        $cached_mw_namespace_id{$n} = 1;
1304                }
1305        }
1306
1307        if (!exists $namespace_id{$name}) {
1308                print STDERR "Namespace $name not found in cache, querying the wiki ...\n";
1309                # NS not found => get namespace id from MW and store it in
1310                # configuration file.
1311                my $query = {
1312                        action => 'query',
1313                        meta => 'siteinfo',
1314                        siprop => 'namespaces'
1315                };
1316                my $result = $mediawiki->api($query);
1317
1318                while (my ($id, $ns) = each(%{$result->{query}->{namespaces}})) {
1319                        if (defined($ns->{id}) && defined($ns->{canonical})) {
1320                                $namespace_id{$ns->{canonical}} = {is_namespace => 1, id => $ns->{id}};
1321                                if ($ns->{'*'}) {
1322                                        # alias (e.g. french Fichier: as alias for canonical File:)
1323                                        $namespace_id{$ns->{'*'}} = {is_namespace => 1, id => $ns->{id}};
1324                                }
1325                        }
1326                }
1327        }
1328
1329        my $ns = $namespace_id{$name};
1330        my $id;
1331
1332        unless (defined $ns) {
1333                print STDERR "No such namespace $name on MediaWiki.\n";
1334                $ns = {is_namespace => 0};
1335                $namespace_id{$name} = $ns;
1336        }
1337
1338        if ($ns->{is_namespace}) {
1339                $id = $ns->{id};
1340        }
1341
1342        # Store "notANameSpace" as special value for inexisting namespaces
1343        my $store_id = ($id || 'notANameSpace');
1344
1345        # Store explicitely requested namespaces on disk
1346        if (!exists $cached_mw_namespace_id{$name}) {
1347                run_git("config --add remote.". $remotename
1348                        .".namespaceCache \"". $name .":". $store_id ."\"");
1349                $cached_mw_namespace_id{$name} = 1;
1350        }
1351        return $id;
1352}
1353
1354sub get_mw_namespace_id_for_page {
1355        if (my ($namespace) = $_[0] =~ /^([^:]*):/) {
1356                return get_mw_namespace_id($namespace);
1357        } else {
1358                return;
1359        }
1360}