contrib / mw-to-git / git-remote-mediawikion commit Add a remote helper to interact with mediawiki (fetch & push) (428c995)
   1#! /usr/bin/perl
   2
   3# Copyright (C) 2011
   4#     Jérémie Nikaes <jeremie.nikaes@ensimag.imag.fr>
   5#     Arnaud Lacurie <arnaud.lacurie@ensimag.imag.fr>
   6#     Claire Fousse <claire.fousse@ensimag.imag.fr>
   7#     David Amouyal <david.amouyal@ensimag.imag.fr>
   8#     Matthieu Moy <matthieu.moy@grenoble-inp.fr>
   9# License: GPL v2 or later
  10
  11# Gateway between Git and MediaWiki.
  12#   https://github.com/Bibzball/Git-Mediawiki/wiki
  13#
  14# Known limitations:
  15#
  16# - Only wiki pages are managed, no support for [[File:...]]
  17#   attachments.
  18#
  19# - Poor performance in the best case: it takes forever to check
  20#   whether we're up-to-date (on fetch or push) or to fetch a few
  21#   revisions from a large wiki, because we use exclusively a
  22#   page-based synchronization. We could switch to a wiki-wide
  23#   synchronization when the synchronization involves few revisions
  24#   but the wiki is large.
  25#
  26# - Git renames could be turned into MediaWiki renames (see TODO
  27#   below)
  28#
  29# - login/password support requires the user to write the password
  30#   cleartext in a file (see TODO below).
  31#
  32# - No way to import "one page, and all pages included in it"
  33#
  34# - Multiple remote MediaWikis have not been very well tested.
  35
  36use strict;
  37use MediaWiki::API;
  38use DateTime::Format::ISO8601;
  39use encoding 'utf8';
  40
  41# use encoding 'utf8' doesn't change STDERROR
  42# but we're going to output UTF-8 filenames to STDERR
  43binmode STDERR, ":utf8";
  44
  45use URI::Escape;
  46use warnings;
  47
  48# Mediawiki filenames can contain forward slashes. This variable decides by which pattern they should be replaced
  49use constant SLASH_REPLACEMENT => "%2F";
  50
  51# It's not always possible to delete pages (may require some
  52# priviledges). Deleted pages are replaced with this content.
  53use constant DELETED_CONTENT => "[[Category:Deleted]]\n";
  54
  55# It's not possible to create empty pages. New empty files in Git are
  56# sent with this content instead.
  57use constant EMPTY_CONTENT => "<!-- empty page -->\n";
  58
  59# used to reflect file creation or deletion in diff.
  60use constant NULL_SHA1 => "0000000000000000000000000000000000000000";
  61
  62my $remotename = $ARGV[0];
  63my $url = $ARGV[1];
  64
  65# Accept both space-separated and multiple keys in config file.
  66# Spaces should be written as _ anyway because we'll use chomp.
  67my @tracked_pages = split(/[ \n]/, run_git("config --get-all remote.". $remotename .".pages"));
  68chomp(@tracked_pages);
  69
  70# Just like @tracked_pages, but for MediaWiki categories.
  71my @tracked_categories = split(/[ \n]/, run_git("config --get-all remote.". $remotename .".categories"));
  72chomp(@tracked_categories);
  73
  74my $wiki_login = run_git("config --get remote.". $remotename .".mwLogin");
  75# TODO: ideally, this should be able to read from keyboard, but we're
  76# inside a remote helper, so our stdin is connect to git, not to a
  77# terminal.
  78my $wiki_passwd = run_git("config --get remote.". $remotename .".mwPassword");
  79chomp($wiki_login);
  80chomp($wiki_passwd);
  81
  82# Import only last revisions (both for clone and fetch)
  83my $shallow_import = run_git("config --get --bool remote.". $remotename .".shallow");
  84chomp($shallow_import);
  85$shallow_import = ($shallow_import eq "true");
  86
  87my $wiki_name = $url;
  88$wiki_name =~ s/[^\/]*:\/\///;
  89
  90# Commands parser
  91my $entry;
  92my @cmd;
  93while (<STDIN>) {
  94        chomp;
  95        @cmd = split(/ /);
  96        if (defined($cmd[0])) {
  97                # Line not blank
  98                if ($cmd[0] eq "capabilities") {
  99                        die("Too many arguments for capabilities") unless (!defined($cmd[1]));
 100                        mw_capabilities();
 101                } elsif ($cmd[0] eq "list") {
 102                        die("Too many arguments for list") unless (!defined($cmd[2]));
 103                        mw_list($cmd[1]);
 104                } elsif ($cmd[0] eq "import") {
 105                        die("Invalid arguments for import") unless ($cmd[1] ne "" && !defined($cmd[2]));
 106                        mw_import($cmd[1]);
 107                } elsif ($cmd[0] eq "option") {
 108                        die("Too many arguments for option") unless ($cmd[1] ne "" && $cmd[2] ne "" && !defined($cmd[3]));
 109                        mw_option($cmd[1],$cmd[2]);
 110                } elsif ($cmd[0] eq "push") {
 111                        mw_push($cmd[1]);
 112                } else {
 113                        print STDERR "Unknown command. Aborting...\n";
 114                        last;
 115                }
 116        } else {
 117                # blank line: we should terminate
 118                last;
 119        }
 120
 121        BEGIN { $| = 1 } # flush STDOUT, to make sure the previous
 122                         # command is fully processed.
 123}
 124
 125########################## Functions ##############################
 126
 127# MediaWiki API instance, created lazily.
 128my $mediawiki;
 129
 130sub mw_connect_maybe {
 131        if ($mediawiki) {
 132            return;
 133        }
 134        $mediawiki = MediaWiki::API->new;
 135        $mediawiki->{config}->{api_url} = "$url/api.php";
 136        if ($wiki_login) {
 137                if (!$mediawiki->login({
 138                        lgname => $wiki_login,
 139                        lgpassword => $wiki_passwd,
 140                })) {
 141                        print STDERR "Failed to log in mediawiki user \"$wiki_login\" on $url\n";
 142                        print STDERR "(error " .
 143                            $mediawiki->{error}->{code} . ': ' .
 144                            $mediawiki->{error}->{details} . ")\n";
 145                        exit 1;
 146                } else {
 147                        print STDERR "Logged in with user \"$wiki_login\".\n";
 148                }
 149        }
 150}
 151
 152sub get_mw_first_pages {
 153        my $some_pages = shift;
 154        my @some_pages = @{$some_pages};
 155
 156        my $pages = shift;
 157
 158        # pattern 'page1|page2|...' required by the API
 159        my $titles = join('|', @some_pages);
 160
 161        my $mw_pages = $mediawiki->api({
 162                action => 'query',
 163                titles => $titles,
 164        });
 165        if (!defined($mw_pages)) {
 166                print STDERR "fatal: could not query the list of wiki pages.\n";
 167                print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
 168                print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
 169                exit 1;
 170        }
 171        while (my ($id, $page) = each(%{$mw_pages->{query}->{pages}})) {
 172                if ($id < 0) {
 173                        print STDERR "Warning: page $page->{title} not found on wiki\n";
 174                } else {
 175                        $pages->{$page->{title}} = $page;
 176                }
 177        }
 178}
 179
 180sub get_mw_pages {
 181        mw_connect_maybe();
 182
 183        my %pages; # hash on page titles to avoid duplicates
 184        my $user_defined;
 185        if (@tracked_pages) {
 186                $user_defined = 1;
 187                # The user provided a list of pages titles, but we
 188                # still need to query the API to get the page IDs.
 189
 190                my @some_pages = @tracked_pages;
 191                while (@some_pages) {
 192                        my $last = 50;
 193                        if ($#some_pages < $last) {
 194                                $last = $#some_pages;
 195                        }
 196                        my @slice = @some_pages[0..$last];
 197                        get_mw_first_pages(\@slice, \%pages);
 198                        @some_pages = @some_pages[51..$#some_pages];
 199                }
 200        }
 201        if (@tracked_categories) {
 202                $user_defined = 1;
 203                foreach my $category (@tracked_categories) {
 204                        if (index($category, ':') < 0) {
 205                                # Mediawiki requires the Category
 206                                # prefix, but let's not force the user
 207                                # to specify it.
 208                                $category = "Category:" . $category;
 209                        }
 210                        my $mw_pages = $mediawiki->list( {
 211                                action => 'query',
 212                                list => 'categorymembers',
 213                                cmtitle => $category,
 214                                cmlimit => 'max' } )
 215                            || die $mediawiki->{error}->{code} . ': ' . $mediawiki->{error}->{details};
 216                        foreach my $page (@{$mw_pages}) {
 217                                $pages{$page->{title}} = $page;
 218                        }
 219                }
 220        }
 221        if (!$user_defined) {
 222                # No user-provided list, get the list of pages from
 223                # the API.
 224                my $mw_pages = $mediawiki->list({
 225                        action => 'query',
 226                        list => 'allpages',
 227                        aplimit => 500,
 228                });
 229                if (!defined($mw_pages)) {
 230                        print STDERR "fatal: could not get the list of wiki pages.\n";
 231                        print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
 232                        print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
 233                        exit 1;
 234                }
 235                foreach my $page (@{$mw_pages}) {
 236                        $pages{$page->{title}} = $page;
 237                }
 238        }
 239        return values(%pages);
 240}
 241
 242sub run_git {
 243        open(my $git, "-|:encoding(UTF-8)", "git " . $_[0]);
 244        my $res = do { local $/; <$git> };
 245        close($git);
 246
 247        return $res;
 248}
 249
 250
 251sub get_last_local_revision {
 252        # Get note regarding last mediawiki revision
 253        my $note = run_git("notes --ref=$remotename/mediawiki show refs/mediawiki/$remotename/master 2>/dev/null");
 254        my @note_info = split(/ /, $note);
 255
 256        my $lastrevision_number;
 257        if (!(defined($note_info[0]) && $note_info[0] eq "mediawiki_revision:")) {
 258                print STDERR "No previous mediawiki revision found";
 259                $lastrevision_number = 0;
 260        } else {
 261                # Notes are formatted : mediawiki_revision: #number
 262                $lastrevision_number = $note_info[1];
 263                chomp($lastrevision_number);
 264                print STDERR "Last local mediawiki revision found is $lastrevision_number";
 265        }
 266        return $lastrevision_number;
 267}
 268
 269sub get_last_remote_revision {
 270        mw_connect_maybe();
 271
 272        my @pages = get_mw_pages();
 273
 274        my $max_rev_num = 0;
 275
 276        foreach my $page (@pages) {
 277                my $id = $page->{pageid};
 278
 279                my $query = {
 280                        action => 'query',
 281                        prop => 'revisions',
 282                        rvprop => 'ids',
 283                        pageids => $id,
 284                };
 285
 286                my $result = $mediawiki->api($query);
 287
 288                my $lastrev = pop(@{$result->{query}->{pages}->{$id}->{revisions}});
 289
 290                $max_rev_num = ($lastrev->{revid} > $max_rev_num ? $lastrev->{revid} : $max_rev_num);
 291        }
 292
 293        print STDERR "Last remote revision found is $max_rev_num.\n";
 294        return $max_rev_num;
 295}
 296
 297# Clean content before sending it to MediaWiki
 298sub mediawiki_clean {
 299        my $string = shift;
 300        my $page_created = shift;
 301        # Mediawiki does not allow blank space at the end of a page and ends with a single \n.
 302        # This function right trims a string and adds a \n at the end to follow this rule
 303        $string =~ s/\s+$//;
 304        if ($string eq "" && $page_created) {
 305                # Creating empty pages is forbidden.
 306                $string = EMPTY_CONTENT;
 307        }
 308        return $string."\n";
 309}
 310
 311# Filter applied on MediaWiki data before adding them to Git
 312sub mediawiki_smudge {
 313        my $string = shift;
 314        if ($string eq EMPTY_CONTENT) {
 315                $string = "";
 316        }
 317        # This \n is important. This is due to mediawiki's way to handle end of files.
 318        return $string."\n";
 319}
 320
 321sub mediawiki_clean_filename {
 322        my $filename = shift;
 323        $filename =~ s/@{[SLASH_REPLACEMENT]}/\//g;
 324        # [, ], |, {, and } are forbidden by MediaWiki, even URL-encoded.
 325        # Do a variant of URL-encoding, i.e. looks like URL-encoding,
 326        # but with _ added to prevent MediaWiki from thinking this is
 327        # an actual special character.
 328        $filename =~ s/[\[\]\{\}\|]/sprintf("_%%_%x", ord($&))/ge;
 329        # If we use the uri escape before
 330        # we should unescape here, before anything
 331
 332        return $filename;
 333}
 334
 335sub mediawiki_smudge_filename {
 336        my $filename = shift;
 337        $filename =~ s/\//@{[SLASH_REPLACEMENT]}/g;
 338        $filename =~ s/ /_/g;
 339        # Decode forbidden characters encoded in mediawiki_clean_filename
 340        $filename =~ s/_%_([0-9a-fA-F][0-9a-fA-F])/sprintf("%c", hex($1))/ge;
 341        return $filename;
 342}
 343
 344sub literal_data {
 345        my ($content) = @_;
 346        print STDOUT "data ", bytes::length($content), "\n", $content;
 347}
 348
 349sub mw_capabilities {
 350        # Revisions are imported to the private namespace
 351        # refs/mediawiki/$remotename/ by the helper and fetched into
 352        # refs/remotes/$remotename later by fetch.
 353        print STDOUT "refspec refs/heads/*:refs/mediawiki/$remotename/*\n";
 354        print STDOUT "import\n";
 355        print STDOUT "list\n";
 356        print STDOUT "push\n";
 357        print STDOUT "\n";
 358}
 359
 360sub mw_list {
 361        # MediaWiki do not have branches, we consider one branch arbitrarily
 362        # called master, and HEAD pointing to it.
 363        print STDOUT "? refs/heads/master\n";
 364        print STDOUT "\@refs/heads/master HEAD\n";
 365        print STDOUT "\n";
 366}
 367
 368sub mw_option {
 369        print STDERR "remote-helper command 'option $_[0]' not yet implemented\n";
 370        print STDOUT "unsupported\n";
 371}
 372
 373sub fetch_mw_revisions_for_page {
 374        my $page = shift;
 375        my $id = shift;
 376        my $fetch_from = shift;
 377        my @page_revs = ();
 378        my $query = {
 379                action => 'query',
 380                prop => 'revisions',
 381                rvprop => 'ids',
 382                rvdir => 'newer',
 383                rvstartid => $fetch_from,
 384                rvlimit => 500,
 385                pageids => $id,
 386        };
 387
 388        my $revnum = 0;
 389        # Get 500 revisions at a time due to the mediawiki api limit
 390        while (1) {
 391                my $result = $mediawiki->api($query);
 392
 393                # Parse each of those 500 revisions
 394                foreach my $revision (@{$result->{query}->{pages}->{$id}->{revisions}}) {
 395                        my $page_rev_ids;
 396                        $page_rev_ids->{pageid} = $page->{pageid};
 397                        $page_rev_ids->{revid} = $revision->{revid};
 398                        push(@page_revs, $page_rev_ids);
 399                        $revnum++;
 400                }
 401                last unless $result->{'query-continue'};
 402                $query->{rvstartid} = $result->{'query-continue'}->{revisions}->{rvstartid};
 403        }
 404        if ($shallow_import && @page_revs) {
 405                print STDERR "  Found 1 revision (shallow import).\n";
 406                @page_revs = sort {$b->{revid} <=> $a->{revid}} (@page_revs);
 407                return $page_revs[0];
 408        }
 409        print STDERR "  Found ", $revnum, " revision(s).\n";
 410        return @page_revs;
 411}
 412
 413sub fetch_mw_revisions {
 414        my $pages = shift; my @pages = @{$pages};
 415        my $fetch_from = shift;
 416
 417        my @revisions = ();
 418        my $n = 1;
 419        foreach my $page (@pages) {
 420                my $id = $page->{pageid};
 421
 422                print STDERR "page $n/", scalar(@pages), ": ". $page->{title} ."\n";
 423                $n++;
 424                my @page_revs = fetch_mw_revisions_for_page($page, $id, $fetch_from);
 425                @revisions = (@page_revs, @revisions);
 426        }
 427
 428        return ($n, @revisions);
 429}
 430
 431sub import_file_revision {
 432        my $commit = shift;
 433        my %commit = %{$commit};
 434        my $full_import = shift;
 435        my $n = shift;
 436
 437        my $title = $commit{title};
 438        my $comment = $commit{comment};
 439        my $content = $commit{content};
 440        my $author = $commit{author};
 441        my $date = $commit{date};
 442
 443        print STDOUT "commit refs/mediawiki/$remotename/master\n";
 444        print STDOUT "mark :$n\n";
 445        print STDOUT "committer $author <$author\@$wiki_name> ", $date->epoch, " +0000\n";
 446        literal_data($comment);
 447
 448        # If it's not a clone, we need to know where to start from
 449        if (!$full_import && $n == 1) {
 450                print STDOUT "from refs/mediawiki/$remotename/master^0\n";
 451        }
 452        if ($content ne DELETED_CONTENT) {
 453                print STDOUT "M 644 inline $title.mw\n";
 454                literal_data($content);
 455                print STDOUT "\n\n";
 456        } else {
 457                print STDOUT "D $title.mw\n";
 458        }
 459
 460        # mediawiki revision number in the git note
 461        if ($full_import && $n == 1) {
 462                print STDOUT "reset refs/notes/$remotename/mediawiki\n";
 463        }
 464        print STDOUT "commit refs/notes/$remotename/mediawiki\n";
 465        print STDOUT "committer $author <$author\@$wiki_name> ", $date->epoch, " +0000\n";
 466        literal_data("Note added by git-mediawiki during import");
 467        if (!$full_import && $n == 1) {
 468                print STDOUT "from refs/notes/$remotename/mediawiki^0\n";
 469        }
 470        print STDOUT "N inline :$n\n";
 471        literal_data("mediawiki_revision: " . $commit{mw_revision});
 472        print STDOUT "\n\n";
 473}
 474
 475# parse a sequence of
 476# <cmd> <arg1>
 477# <cmd> <arg2>
 478# \n
 479# (like batch sequence of import and sequence of push statements)
 480sub get_more_refs {
 481        my $cmd = shift;
 482        my @refs;
 483        while (1) {
 484                my $line = <STDIN>;
 485                if ($line =~ m/^$cmd (.*)$/) {
 486                        push(@refs, $1);
 487                } elsif ($line eq "\n") {
 488                        return @refs;
 489                } else {
 490                        die("Invalid command in a '$cmd' batch: ". $_);
 491                }
 492        }
 493}
 494
 495sub mw_import {
 496        # multiple import commands can follow each other.
 497        my @refs = (shift, get_more_refs("import"));
 498        foreach my $ref (@refs) {
 499                mw_import_ref($ref);
 500        }
 501        print STDOUT "done\n";
 502}
 503
 504sub mw_import_ref {
 505        my $ref = shift;
 506        # The remote helper will call "import HEAD" and
 507        # "import refs/heads/master".
 508        # Since HEAD is a symbolic ref to master (by convention,
 509        # followed by the output of the command "list" that we gave),
 510        # we don't need to do anything in this case.
 511        if ($ref eq "HEAD") {
 512                return;
 513        }
 514
 515        mw_connect_maybe();
 516
 517        my @pages = get_mw_pages();
 518
 519        print STDERR "Searching revisions...\n";
 520        my $last_local = get_last_local_revision();
 521        my $fetch_from = $last_local + 1;
 522        if ($fetch_from == 1) {
 523                print STDERR ", fetching from beginning.\n";
 524        } else {
 525                print STDERR ", fetching from here.\n";
 526        }
 527        my ($n, @revisions) = fetch_mw_revisions(\@pages, $fetch_from);
 528
 529        # Creation of the fast-import stream
 530        print STDERR "Fetching & writing export data...\n";
 531
 532        $n = 0;
 533        my $last_timestamp = 0; # Placeholer in case $rev->timestamp is undefined
 534
 535        foreach my $pagerevid (sort {$a->{revid} <=> $b->{revid}} @revisions) {
 536                # fetch the content of the pages
 537                my $query = {
 538                        action => 'query',
 539                        prop => 'revisions',
 540                        rvprop => 'content|timestamp|comment|user|ids',
 541                        revids => $pagerevid->{revid},
 542                };
 543
 544                my $result = $mediawiki->api($query);
 545
 546                my $rev = pop(@{$result->{query}->{pages}->{$pagerevid->{pageid}}->{revisions}});
 547
 548                $n++;
 549
 550                my %commit;
 551                $commit{author} = $rev->{user} || 'Anonymous';
 552                $commit{comment} = $rev->{comment} || '*Empty MediaWiki Message*';
 553                $commit{title} = mediawiki_smudge_filename(
 554                        $result->{query}->{pages}->{$pagerevid->{pageid}}->{title}
 555                    );
 556                $commit{mw_revision} = $pagerevid->{revid};
 557                $commit{content} = mediawiki_smudge($rev->{'*'});
 558
 559                if (!defined($rev->{timestamp})) {
 560                        $last_timestamp++;
 561                } else {
 562                        $last_timestamp = $rev->{timestamp};
 563                }
 564                $commit{date} = DateTime::Format::ISO8601->parse_datetime($last_timestamp);
 565
 566                print STDERR "$n/", scalar(@revisions), ": Revision #$pagerevid->{revid} of $commit{title}\n";
 567
 568                import_file_revision(\%commit, ($fetch_from == 1), $n);
 569        }
 570
 571        if ($fetch_from == 1 && $n == 0) {
 572                print STDERR "You appear to have cloned an empty MediaWiki.\n";
 573                # Something has to be done remote-helper side. If nothing is done, an error is
 574                # thrown saying that HEAD is refering to unknown object 0000000000000000000
 575                # and the clone fails.
 576        }
 577}
 578
 579sub error_non_fast_forward {
 580        # Native git-push would show this after the summary.
 581        # We can't ask it to display it cleanly, so print it
 582        # ourselves before.
 583        print STDERR "To prevent you from losing history, non-fast-forward updates were rejected\n";
 584        print STDERR "Merge the remote changes (e.g. 'git pull') before pushing again. See the\n";
 585        print STDERR "'Note about fast-forwards' section of 'git push --help' for details.\n";
 586
 587        print STDOUT "error $_[0] \"non-fast-forward\"\n";
 588        return 0;
 589}
 590
 591sub mw_push_file {
 592        my $diff_info = shift;
 593        # $diff_info contains a string in this format:
 594        # 100644 100644 <sha1_of_blob_before_commit> <sha1_of_blob_now> <status>
 595        my @diff_info_split = split(/[ \t]/, $diff_info);
 596
 597        # Filename, including .mw extension
 598        my $complete_file_name = shift;
 599        # Commit message
 600        my $summary = shift;
 601
 602        my $new_sha1 = $diff_info_split[3];
 603        my $old_sha1 = $diff_info_split[2];
 604        my $page_created = ($old_sha1 eq NULL_SHA1);
 605        my $page_deleted = ($new_sha1 eq NULL_SHA1);
 606        $complete_file_name = mediawiki_clean_filename($complete_file_name);
 607
 608        if (substr($complete_file_name,-3) eq ".mw") {
 609                my $title = substr($complete_file_name,0,-3);
 610
 611                my $file_content;
 612                if ($page_deleted) {
 613                        # Deleting a page usually requires
 614                        # special priviledges. A common
 615                        # convention is to replace the page
 616                        # with this content instead:
 617                        $file_content = DELETED_CONTENT;
 618                } else {
 619                        $file_content = run_git("cat-file blob $new_sha1");
 620                }
 621
 622                mw_connect_maybe();
 623
 624                my $result = $mediawiki->edit( {
 625                        action => 'edit',
 626                        summary => $summary,
 627                        title => $title,
 628                        text => mediawiki_clean($file_content, $page_created),
 629                                  }, {
 630                                          skip_encoding => 1 # Helps with names with accentuated characters
 631                                  }) || die 'Fatal: Error ' .
 632                                  $mediawiki->{error}->{code} .
 633                                  ' from mediwiki: ' . $mediawiki->{error}->{details};
 634                print STDERR "Pushed file : $new_sha1 - $title\n";
 635        } else {
 636                print STDERR "$complete_file_name not a mediawiki file (Not pushable on this version of git-remote-mediawiki).\n"
 637        }
 638}
 639
 640sub mw_push {
 641        # multiple push statements can follow each other
 642        my @refsspecs = (shift, get_more_refs("push"));
 643        my %status;
 644        my $pushed;
 645        for my $refspec (@refsspecs) {
 646                my ($force, $local, $remote) = $refspec =~ /^(\+)?([^:]*):([^:]*)$/
 647                    or die("Invalid refspec for push. Expected <src>:<dst> or +<src>:<dst>");
 648                if ($force) {
 649                        print STDERR "Warning: forced push not allowed on a MediaWiki.\n";
 650                }
 651                if ($local eq "") {
 652                        print STDERR "Cannot delete remote branch on a MediaWiki\n";
 653                        print STDOUT "error $remote cannot delete\n";
 654                        next;
 655                }
 656                if ($remote ne "refs/heads/master") {
 657                        print STDERR "Only push to the branch 'master' is supported on a MediaWiki\n";
 658                        print STDOUT "error $remote only master allowed\n";
 659                        next;
 660                }
 661                if (mw_push_revision($local, $remote)) {
 662                        $pushed = 1;
 663                }
 664        }
 665
 666        # Notify Git that the push is done
 667        print STDOUT "\n";
 668
 669        if ($pushed) {
 670                print STDERR "Just pushed some revisions to MediaWiki.\n";
 671                print STDERR "The pushed revisions now have to be re-imported, and your current branch\n";
 672                print STDERR "needs to be updated with these re-imported commits. You can do this with\n";
 673                print STDERR "\n";
 674                print STDERR "  git pull --rebase\n";
 675                print STDERR "\n";
 676        }
 677}
 678
 679sub mw_push_revision {
 680        my $local = shift;
 681        my $remote = shift; # actually, this has to be "refs/heads/master" at this point.
 682        my $last_local_revid = get_last_local_revision();
 683        print STDERR ".\n"; # Finish sentence started by get_last_local_revision()
 684        my $last_remote_revid = get_last_remote_revision();
 685
 686        # Get sha1 of commit pointed by local HEAD
 687        my $HEAD_sha1 = run_git("rev-parse $local 2>/dev/null"); chomp($HEAD_sha1);
 688        # Get sha1 of commit pointed by remotes/$remotename/master
 689        my $remoteorigin_sha1 = run_git("rev-parse refs/remotes/$remotename/master 2>/dev/null");
 690        chomp($remoteorigin_sha1);
 691
 692        if ($last_local_revid > 0 &&
 693            $last_local_revid < $last_remote_revid) {
 694                return error_non_fast_forward($remote);
 695        }
 696
 697        if ($HEAD_sha1 eq $remoteorigin_sha1) {
 698                # nothing to push
 699                return 0;
 700        }
 701
 702        # Get every commit in between HEAD and refs/remotes/origin/master,
 703        # including HEAD and refs/remotes/origin/master
 704        my @commit_pairs = ();
 705        if ($last_local_revid > 0) {
 706                my $parsed_sha1 = $remoteorigin_sha1;
 707                # Find a path from last MediaWiki commit to pushed commit
 708                while ($parsed_sha1 ne $HEAD_sha1) {
 709                        my @commit_info =  grep(/^$parsed_sha1/, split(/\n/, run_git("rev-list --children $local")));
 710                        if (!@commit_info) {
 711                                return error_non_fast_forward($remote);
 712                        }
 713                        my @commit_info_split = split(/ |\n/, $commit_info[0]);
 714                        # $commit_info_split[1] is the sha1 of the commit to export
 715                        # $commit_info_split[0] is the sha1 of its direct child
 716                        push(@commit_pairs, \@commit_info_split);
 717                        $parsed_sha1 = $commit_info_split[1];
 718                }
 719        } else {
 720                # No remote mediawiki revision. Export the whole
 721                # history (linearized with --first-parent)
 722                print STDERR "Warning: no common ancestor, pushing complete history\n";
 723                my $history = run_git("rev-list --first-parent --children $local");
 724                my @history = split('\n', $history);
 725                @history = @history[1..$#history];
 726                foreach my $line (reverse @history) {
 727                        my @commit_info_split = split(/ |\n/, $line);
 728                        push(@commit_pairs, \@commit_info_split);
 729                }
 730        }
 731
 732        foreach my $commit_info_split (@commit_pairs) {
 733                my $sha1_child = @{$commit_info_split}[0];
 734                my $sha1_commit = @{$commit_info_split}[1];
 735                my $diff_infos = run_git("diff-tree -r --raw -z $sha1_child $sha1_commit");
 736                # TODO: we could detect rename, and encode them with a #redirect on the wiki.
 737                # TODO: for now, it's just a delete+add
 738                my @diff_info_list = split(/\0/, $diff_infos);
 739                # Keep the first line of the commit message as mediawiki comment for the revision
 740                my $commit_msg = (split(/\n/, run_git("show --pretty=format:\"%s\" $sha1_commit")))[0];
 741                chomp($commit_msg);
 742                # Push every blob
 743                while (@diff_info_list) {
 744                        # git diff-tree -z gives an output like
 745                        # <metadata>\0<filename1>\0
 746                        # <metadata>\0<filename2>\0
 747                        # and we've split on \0.
 748                        my $info = shift(@diff_info_list);
 749                        my $file = shift(@diff_info_list);
 750                        mw_push_file($info, $file, $commit_msg);
 751                }
 752        }
 753
 754        print STDOUT "ok $remote\n";
 755        return 1;
 756}