# Used on Git's side to reflect empty edit messages on the wiki
use constant EMPTY_MESSAGE => '*Empty MediaWiki Message*';
+use constant EMPTY => q{};
+
+# Number of pages taken into account at once in submodule get_mw_page_list
+use constant SLICE_SIZE => 50;
+
+# Number of linked mediafile to get at once in get_linked_mediafiles
+# The query is split in small batches because of the MW API limit of
+# the number of links to be returned (500 links max).
+use constant BATCH_SIZE => 10;
+
+use constant HTTP_CODE_OK => 200;
+
+if (@ARGV != 2) {
+ exit_error_usage();
+}
+
my $remotename = $ARGV[0];
my $url = $ARGV[1];
# - by_rev: perform one query per new revision on the remote wiki
# - by_page: query each tracked page for new revision
my $fetch_strategy = run_git("config --get remote.${remotename}.fetchStrategy");
-unless ($fetch_strategy) {
+if (!$fetch_strategy) {
$fetch_strategy = run_git('config --get mediawiki.fetchStrategy');
}
chomp($fetch_strategy);
-unless ($fetch_strategy) {
+if (!$fetch_strategy) {
$fetch_strategy = 'by_page';
}
# deterministic, this means everybody gets the same sha1 for each
# MediaWiki revision.
my $dumb_push = run_git("config --get --bool remote.${remotename}.dumbPush");
-unless ($dumb_push) {
+if (!$dumb_push) {
$dumb_push = run_git('config --get --bool mediawiki.dumbPush');
}
chomp($dumb_push);
########################## Functions ##############################
+## error handling
+sub exit_error_usage {
+ die "ERROR: git-remote-mediawiki module was not called with a correct number of\n" .
+ "parameters\n" .
+ "You may obtain this error because you attempted to run the git-remote-mediawiki\n" .
+ "module directly.\n" .
+ "This module can be used the following way:\n" .
+ "\tgit clone mediawiki://<address of a mediawiki>\n" .
+ "Then, use git commit, push and pull as with every normal git repository.\n";
+}
+
sub parse_command {
my ($line) = @_;
my @cmd = split(/ /, $line);
die("Too many arguments for list\n") if (defined($cmd[2]));
mw_list($cmd[1]);
} elsif ($cmd[0] eq 'import') {
- die("Invalid arguments for import\n")
- if ($cmd[1] eq "" || defined($cmd[2]));
+ die("Invalid argument for import\n")
+ if ($cmd[1] eq EMPTY);
+ die("Too many arguments for import\n")
+ if (defined($cmd[2]));
mw_import($cmd[1]);
} elsif ($cmd[0] eq 'option') {
+ die("Invalid arguments for option\n")
+ if ($cmd[1] eq EMPTY || $cmd[2] eq EMPTY);
die("Too many arguments for option\n")
- if ($cmd[1] eq "" || $cmd[2] eq "" || defined($cmd[3]));
+ if (defined($cmd[3]));
mw_option($cmd[1],$cmd[2]);
} elsif ($cmd[0] eq 'push') {
mw_push($cmd[1]);
} else {
- print STDERR "Unknown command. Aborting...\n";
+ print {*STDERR} "Unknown command. Aborting...\n";
return 0;
}
return 1;
lgdomain => $wiki_domain};
if ($mediawiki->login($request)) {
Git::credential(\%credential, 'approve');
- print STDERR qq(Logged in mediawiki user "$credential{username}".\n);
+ print {*STDERR} qq(Logged in mediawiki user "$credential{username}".\n);
} else {
- print STDERR qq(Failed to log in mediawiki user "$credential{username}" on ${url}\n);
- print STDERR ' (error ' .
+ print {*STDERR} qq(Failed to log in mediawiki user "$credential{username}" on ${url}\n);
+ print {*STDERR} ' (error ' .
$mediawiki->{error}->{code} . ': ' .
$mediawiki->{error}->{details} . ")\n";
Git::credential(\%credential, 'reject');
sub get_mw_page_list {
my $page_list = shift;
my $pages = shift;
- my @some_pages = @$page_list;
+ my @some_pages = @{$page_list};
while (@some_pages) {
- my $last_page = 50;
+ my $last_page = SLICE_SIZE;
if ($#some_pages < $last_page) {
$last_page = $#some_pages;
}
my @slice = @some_pages[0..$last_page];
get_mw_first_pages(\@slice, $pages);
- @some_pages = @some_pages[51..$#some_pages];
+ @some_pages = @some_pages[(SLICE_SIZE + 1)..$#some_pages];
}
return;
}
}
while (my ($id, $page) = each(%{$mw_pages->{query}->{pages}})) {
if ($id < 0) {
- print STDERR "Warning: page $page->{title} not found on wiki\n";
+ print {*STDERR} "Warning: page $page->{title} not found on wiki\n";
} else {
$pages->{$page->{title}} = $page;
}
sub get_mw_pages {
mw_connect_maybe();
- print STDERR "Listing pages on remote wiki...\n";
+ print {*STDERR} "Listing pages on remote wiki...\n";
my %pages; # hash on page titles to avoid duplicates
my $user_defined;
get_mw_all_pages(\%pages);
}
if ($import_media) {
- print STDERR "Getting media files for selected pages...\n";
+ print {*STDERR} "Getting media files for selected pages...\n";
if ($user_defined) {
get_linked_mediafiles(\%pages);
} else {
get_all_mediafiles(\%pages);
}
}
- print STDERR (scalar keys %pages) . " pages found.\n";
+ print {*STDERR} (scalar keys %pages) . " pages found.\n";
return %pages;
}
aplimit => 'max'
});
if (!defined($mw_pages)) {
- print STDERR "fatal: could not get the list of pages for media files.\n";
- print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
- print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
+ print {*STDERR} "fatal: could not get the list of pages for media files.\n";
+ print {*STDERR} "fatal: '$url' does not appear to be a mediawiki\n";
+ print {*STDERR} "fatal: make sure '$url/api.php' is a valid page.\n";
exit 1;
}
foreach my $page (@{$mw_pages}) {
my $pages = shift;
my @titles = map { $_->{title} } values(%{$pages});
- # The query is split in small batches because of the MW API limit of
- # the number of links to be returned (500 links max).
- my $batch = 10;
+ my $batch = BATCH_SIZE;
while (@titles) {
if ($#titles < $batch) {
$batch = $#titles;
$mediafile{timestamp} = $fileinfo->{timestamp};
# Mediawiki::API's download function doesn't support https URLs
# and can't download old versions of files.
- print STDERR "\tDownloading file $mediafile{title}, version $mediafile{timestamp}\n";
+ print {*STDERR} "\tDownloading file $mediafile{title}, version $mediafile{timestamp}\n";
$mediafile{content} = download_mw_mediafile($fileinfo->{url});
}
return %mediafile;
my $download_url = shift;
my $response = $mediawiki->{ua}->get($download_url);
- if ($response->code == 200) {
+ if ($response->code == HTTP_CODE_OK) {
return $response->decoded_content;
} else {
- print STDERR "Error downloading mediafile from :\n";
- print STDERR "URL: ${download_url}\n";
- print STDERR 'Server response: ' . $response->code . q{ } . $response->message . "\n";
+ print {*STDERR} "Error downloading mediafile from :\n";
+ print {*STDERR} "URL: ${download_url}\n";
+ print {*STDERR} 'Server response: ' . $response->code . q{ } . $response->message . "\n";
exit 1;
}
}
my $lastrevision_number;
if (!(defined($note_info[0]) && $note_info[0] eq 'mediawiki_revision:')) {
- print STDERR 'No previous mediawiki revision found';
+ print {*STDERR} 'No previous mediawiki revision found';
$lastrevision_number = 0;
} else {
# Notes are formatted : mediawiki_revision: #number
$lastrevision_number = $note_info[1];
chomp($lastrevision_number);
- print STDERR "Last local mediawiki revision found is ${lastrevision_number}";
+ print {*STDERR} "Last local mediawiki revision found is ${lastrevision_number}";
}
return $lastrevision_number;
}
my $max_rev_num = 0;
- print STDERR "Getting last revision id on tracked pages...\n";
+ print {*STDERR} "Getting last revision id on tracked pages...\n";
foreach my $page (@pages) {
my $id = $page->{pageid};
$max_rev_num = ($lastrev->{revid} > $max_rev_num ? $lastrev->{revid} : $max_rev_num);
}
- print STDERR "Last remote revision found is $max_rev_num.\n";
+ print {*STDERR} "Last remote revision found is $max_rev_num.\n";
return $max_rev_num;
}
# Mediawiki does not allow blank space at the end of a page and ends with a single \n.
# This function right trims a string and adds a \n at the end to follow this rule
$string =~ s/\s+$//;
- if ($string eq "" && $page_created) {
+ if ($string eq EMPTY && $page_created) {
# Creating empty pages is forbidden.
$string = EMPTY_CONTENT;
}
sub mediawiki_smudge {
my $string = shift;
if ($string eq EMPTY_CONTENT) {
- $string = "";
+ $string = EMPTY;
}
# This \n is important. This is due to mediawiki's way to handle end of files.
return "${string}\n";
sub literal_data {
my ($content) = @_;
- print STDOUT 'data ', bytes::length($content), "\n", $content;
+ print {*STDOUT} 'data ', bytes::length($content), "\n", $content;
return;
}
# Avoid confusion between size in bytes and in characters
utf8::downgrade($content);
binmode STDOUT, ':raw';
- print STDOUT 'data ', bytes::length($content), "\n", $content;
+ print {*STDOUT} 'data ', bytes::length($content), "\n", $content;
binmode STDOUT, ':encoding(UTF-8)';
return;
}
# Revisions are imported to the private namespace
# refs/mediawiki/$remotename/ by the helper and fetched into
# refs/remotes/$remotename later by fetch.
- print STDOUT "refspec refs/heads/*:refs/mediawiki/${remotename}/*\n";
- print STDOUT "import\n";
- print STDOUT "list\n";
- print STDOUT "push\n";
- print STDOUT "\n";
+ print {*STDOUT} "refspec refs/heads/*:refs/mediawiki/${remotename}/*\n";
+ print {*STDOUT} "import\n";
+ print {*STDOUT} "list\n";
+ print {*STDOUT} "push\n";
+ print {*STDOUT} "\n";
return;
}
sub mw_list {
# MediaWiki do not have branches, we consider one branch arbitrarily
# called master, and HEAD pointing to it.
- print STDOUT "? refs/heads/master\n";
- print STDOUT "\@refs/heads/master HEAD\n";
- print STDOUT "\n";
+ print {*STDOUT} "? refs/heads/master\n";
+ print {*STDOUT} "\@refs/heads/master HEAD\n";
+ print {*STDOUT} "\n";
return;
}
sub mw_option {
- print STDERR "remote-helper command 'option $_[0]' not yet implemented\n";
- print STDOUT "unsupported\n";
+ print {*STDERR} "remote-helper command 'option $_[0]' not yet implemented\n";
+ print {*STDOUT} "unsupported\n";
return;
}
push(@page_revs, $page_rev_ids);
$revnum++;
}
- last unless $result->{'query-continue'};
+ last if (!$result->{'query-continue'});
$query->{rvstartid} = $result->{'query-continue'}->{revisions}->{rvstartid};
}
if ($shallow_import && @page_revs) {
- print STDERR " Found 1 revision (shallow import).\n";
+ print {*STDERR} " Found 1 revision (shallow import).\n";
@page_revs = sort {$b->{revid} <=> $a->{revid}} (@page_revs);
return $page_revs[0];
}
- print STDERR " Found ${revnum} revision(s).\n";
+ print {*STDERR} " Found ${revnum} revision(s).\n";
return @page_revs;
}
my $n = 1;
foreach my $page (@pages) {
my $id = $page->{pageid};
- print STDERR "page ${n}/", scalar(@pages), ': ', $page->{title}, "\n";
+ print {*STDERR} "page ${n}/", scalar(@pages), ': ', $page->{title}, "\n";
$n++;
my @page_revs = fetch_mw_revisions_for_page($page, $id, $fetch_from);
@revisions = (@page_revs, @revisions);
my $author = $commit{author};
my $date = $commit{date};
- print STDOUT "commit refs/mediawiki/${remotename}/master\n";
- print STDOUT "mark :${n}\n";
- print STDOUT "committer ${author} <${author}\@${wiki_name}> " . $date->epoch . " +0000\n";
+ print {*STDOUT} "commit refs/mediawiki/${remotename}/master\n";
+ print {*STDOUT} "mark :${n}\n";
+ print {*STDOUT} "committer ${author} <${author}\@${wiki_name}> " . $date->epoch . " +0000\n";
literal_data($comment);
# If it's not a clone, we need to know where to start from
if (!$full_import && $n == 1) {
- print STDOUT "from refs/mediawiki/${remotename}/master^0\n";
+ print {*STDOUT} "from refs/mediawiki/${remotename}/master^0\n";
}
if ($content ne DELETED_CONTENT) {
- print STDOUT 'M 644 inline ' .
+ print {*STDOUT} 'M 644 inline ' .
fe_escape_path("${title}.mw") . "\n";
literal_data($content);
if (%mediafile) {
- print STDOUT 'M 644 inline '
+ print {*STDOUT} 'M 644 inline '
. fe_escape_path($mediafile{title}) . "\n";
literal_data_raw($mediafile{content});
}
- print STDOUT "\n\n";
+ print {*STDOUT} "\n\n";
} else {
- print STDOUT 'D ' . fe_escape_path("${title}.mw") . "\n";
+ print {*STDOUT} 'D ' . fe_escape_path("${title}.mw") . "\n";
}
# mediawiki revision number in the git note
if ($full_import && $n == 1) {
- print STDOUT "reset refs/notes/${remotename}/mediawiki\n";
+ print {*STDOUT} "reset refs/notes/${remotename}/mediawiki\n";
}
- print STDOUT "commit refs/notes/${remotename}/mediawiki\n";
- print STDOUT "committer ${author} <${author}\@${wiki_name}> " . $date->epoch . " +0000\n";
+ print {*STDOUT} "commit refs/notes/${remotename}/mediawiki\n";
+ print {*STDOUT} "committer ${author} <${author}\@${wiki_name}> " . $date->epoch . " +0000\n";
literal_data('Note added by git-mediawiki during import');
if (!$full_import && $n == 1) {
- print STDOUT "from refs/notes/${remotename}/mediawiki^0\n";
+ print {*STDOUT} "from refs/notes/${remotename}/mediawiki^0\n";
}
- print STDOUT "N inline :${n}\n";
+ print {*STDOUT} "N inline :${n}\n";
literal_data("mediawiki_revision: $commit{mw_revision}");
- print STDOUT "\n\n";
+ print {*STDOUT} "\n\n";
return;
}
foreach my $ref (@refs) {
mw_import_ref($ref);
}
- print STDOUT "done\n";
+ print {*STDOUT} "done\n";
return;
}
mw_connect_maybe();
- print STDERR "Searching revisions...\n";
+ print {*STDERR} "Searching revisions...\n";
my $last_local = get_last_local_revision();
my $fetch_from = $last_local + 1;
if ($fetch_from == 1) {
- print STDERR ", fetching from beginning.\n";
+ print {*STDERR} ", fetching from beginning.\n";
} else {
- print STDERR ", fetching from here.\n";
+ print {*STDERR} ", fetching from here.\n";
}
my $n = 0;
if ($fetch_strategy eq 'by_rev') {
- print STDERR "Fetching & writing export data by revs...\n";
+ print {*STDERR} "Fetching & writing export data by revs...\n";
$n = mw_import_ref_by_revs($fetch_from);
} elsif ($fetch_strategy eq 'by_page') {
- print STDERR "Fetching & writing export data by pages...\n";
+ print {*STDERR} "Fetching & writing export data by pages...\n";
$n = mw_import_ref_by_pages($fetch_from);
} else {
- print STDERR qq(fatal: invalid fetch strategy "${fetch_strategy}".\n);
- print STDERR "Check your configuration variables remote.${remotename}.fetchStrategy and mediawiki.fetchStrategy\n";
+ print {*STDERR} qq(fatal: invalid fetch strategy "${fetch_strategy}".\n);
+ print {*STDERR} "Check your configuration variables remote.${remotename}.fetchStrategy and mediawiki.fetchStrategy\n";
exit 1;
}
if ($fetch_from == 1 && $n == 0) {
- print STDERR "You appear to have cloned an empty MediaWiki.\n";
+ print {*STDERR} "You appear to have cloned an empty MediaWiki.\n";
# Something has to be done remote-helper side. If nothing is done, an error is
# thrown saying that HEAD is referring to unknown object 0000000000000000000
# and the clone fails.
my $n_actual = 0;
my $last_timestamp = 0; # Placeholer in case $rev->timestamp is undefined
- foreach my $pagerevid (@$revision_ids) {
+ foreach my $pagerevid (@{$revision_ids}) {
# Count page even if we skip it, since we display
# $n/$total and $total includes skipped pages.
$n++;
my $page_title = $result_page->{title};
if (!exists($pages->{$page_title})) {
- print STDERR "${n}/", scalar(@$revision_ids),
+ print {*STDERR} "${n}/", scalar(@{$revision_ids}),
": Skipping revision #$rev->{revid} of ${page_title}\n";
next;
}
# If this is a revision of the media page for new version
# of a file do one common commit for both file and media page.
# Else do commit only for that page.
- print STDERR "${n}/", scalar(@$revision_ids), ": Revision #$rev->{revid} of $commit{title}\n";
+ print {*STDERR} "${n}/", scalar(@{$revision_ids}), ": Revision #$rev->{revid} of $commit{title}\n";
import_file_revision(\%commit, ($fetch_from == 1), $n_actual, \%mediafile);
}
# Native git-push would show this after the summary.
# We can't ask it to display it cleanly, so print it
# ourselves before.
- print STDERR "To prevent you from losing history, non-fast-forward updates were rejected\n";
- print STDERR "Merge the remote changes (e.g. 'git pull') before pushing again. See the\n";
- print STDERR "'Note about fast-forwards' section of 'git push --help' for details.\n";
+ print {*STDERR} "To prevent you from losing history, non-fast-forward updates were rejected\n";
+ print {*STDERR} "Merge the remote changes (e.g. 'git pull') before pushing again. See the\n";
+ print {*STDERR} "'Note about fast-forwards' section of 'git push --help' for details.\n";
}
- print STDOUT qq(error $_[0] "non-fast-forward"\n);
+ print {*STDOUT} qq(error $_[0] "non-fast-forward"\n);
return 0;
}
my $path = "File:${complete_file_name}";
my %hashFiles = get_allowed_file_extensions();
if (!exists($hashFiles{$extension})) {
- print STDERR "${complete_file_name} is not a permitted file on this wiki.\n";
- print STDERR "Check the configuration of file uploads in your mediawiki.\n";
+ print {*STDERR} "${complete_file_name} is not a permitted file on this wiki.\n";
+ print {*STDERR} "Check the configuration of file uploads in your mediawiki.\n";
return $newrevid;
}
# Deleting and uploading a file requires a priviledged user
reason => $summary
};
if (!$mediawiki->edit($query)) {
- print STDERR "Failed to delete file on remote wiki\n";
- print STDERR "Check your permissions on the remote site. Error code:\n";
- print STDERR $mediawiki->{error}->{code} . ':' . $mediawiki->{error}->{details};
+ print {*STDERR} "Failed to delete file on remote wiki\n";
+ print {*STDERR} "Check your permissions on the remote site. Error code:\n";
+ print {*STDERR} $mediawiki->{error}->{code} . ':' . $mediawiki->{error}->{details};
exit 1;
}
} else {
# Don't let perl try to interpret file content as UTF-8 => use "raw"
my $content = run_git("cat-file blob ${new_sha1}", 'raw');
- if ($content ne "") {
+ if ($content ne EMPTY) {
mw_connect_maybe();
$mediawiki->{config}->{upload_url} =
"${url}/index.php/Special:Upload";
. $mediawiki->{error}->{details} . "\n";
my $last_file_page = $mediawiki->get_page({title => $path});
$newrevid = $last_file_page->{revid};
- print STDERR "Pushed file: ${new_sha1} - ${complete_file_name}.\n";
+ print {*STDERR} "Pushed file: ${new_sha1} - ${complete_file_name}.\n";
} else {
- print STDERR "Empty file ${complete_file_name} not pushed.\n";
+ print {*STDERR} "Empty file ${complete_file_name} not pushed.\n";
}
}
return $newrevid;
my $newrevid;
if ($summary eq EMPTY_MESSAGE) {
- $summary = '';
+ $summary = EMPTY;
}
my $new_sha1 = $diff_info_split[3];
my ($title, $extension) = $complete_file_name =~ /^(.*)\.([^\.]*)$/;
if (!defined($extension)) {
- $extension = "";
+ $extension = EMPTY;
}
if ($extension eq 'mw') {
my $ns = get_mw_namespace_id_for_page($complete_file_name);
if ($ns && $ns == get_mw_namespace_id('File') && (!$export_media)) {
- print STDERR "Ignoring media file related page: ${complete_file_name}\n";
+ print {*STDERR} "Ignoring media file related page: ${complete_file_name}\n";
return ($oldrevid, 'ok');
}
my $file_content;
if (!$result) {
if ($mediawiki->{error}->{code} == 3) {
# edit conflicts, considered as non-fast-forward
- print STDERR 'Warning: Error ' .
+ print {*STDERR} 'Warning: Error ' .
$mediawiki->{error}->{code} .
- ' from mediwiki: ' . $mediawiki->{error}->{details} .
+ ' from mediawiki: ' . $mediawiki->{error}->{details} .
".\n";
return ($oldrevid, 'non-fast-forward');
} else {
# Other errors. Shouldn't happen => just die()
die 'Fatal: Error ' .
$mediawiki->{error}->{code} .
- ' from mediwiki: ' . $mediawiki->{error}->{details} . "\n";
+ ' from mediawiki: ' . $mediawiki->{error}->{details} . "\n";
}
}
$newrevid = $result->{edit}->{newrevid};
- print STDERR "Pushed file: ${new_sha1} - ${title}\n";
+ print {*STDERR} "Pushed file: ${new_sha1} - ${title}\n";
} elsif ($export_media) {
$newrevid = mw_upload_file($complete_file_name, $new_sha1,
$extension, $page_deleted,
$summary);
} else {
- print STDERR "Ignoring media file ${title}\n";
+ print {*STDERR} "Ignoring media file ${title}\n";
}
$newrevid = ($newrevid or $oldrevid);
return ($newrevid, 'ok');
my ($force, $local, $remote) = $refspec =~ /^(\+)?([^:]*):([^:]*)$/
or die("Invalid refspec for push. Expected <src>:<dst> or +<src>:<dst>\n");
if ($force) {
- print STDERR "Warning: forced push not allowed on a MediaWiki.\n";
+ print {*STDERR} "Warning: forced push not allowed on a MediaWiki.\n";
}
- if ($local eq "") {
- print STDERR "Cannot delete remote branch on a MediaWiki\n";
- print STDOUT "error ${remote} cannot delete\n";
+ if ($local eq EMPTY) {
+ print {*STDERR} "Cannot delete remote branch on a MediaWiki\n";
+ print {*STDOUT} "error ${remote} cannot delete\n";
next;
}
if ($remote ne 'refs/heads/master') {
- print STDERR "Only push to the branch 'master' is supported on a MediaWiki\n";
- print STDOUT "error ${remote} only master allowed\n";
+ print {*STDERR} "Only push to the branch 'master' is supported on a MediaWiki\n";
+ print {*STDOUT} "error ${remote} only master allowed\n";
next;
}
if (mw_push_revision($local, $remote)) {
}
# Notify Git that the push is done
- print STDOUT "\n";
+ print {*STDOUT} "\n";
if ($pushed && $dumb_push) {
- print STDERR "Just pushed some revisions to MediaWiki.\n";
- print STDERR "The pushed revisions now have to be re-imported, and your current branch\n";
- print STDERR "needs to be updated with these re-imported commits. You can do this with\n";
- print STDERR "\n";
- print STDERR " git pull --rebase\n";
- print STDERR "\n";
+ print {*STDERR} "Just pushed some revisions to MediaWiki.\n";
+ print {*STDERR} "The pushed revisions now have to be re-imported, and your current branch\n";
+ print {*STDERR} "needs to be updated with these re-imported commits. You can do this with\n";
+ print {*STDERR} "\n";
+ print {*STDERR} " git pull --rebase\n";
+ print {*STDERR} "\n";
}
return;
}
my $local = shift;
my $remote = shift; # actually, this has to be "refs/heads/master" at this point.
my $last_local_revid = get_last_local_revision();
- print STDERR ".\n"; # Finish sentence started by get_last_local_revision()
+ print {*STDERR} ".\n"; # Finish sentence started by get_last_local_revision()
my $last_remote_revid = get_last_remote_revision();
my $mw_revision = $last_remote_revid;
if ($last_local_revid > 0) {
my $parsed_sha1 = $remoteorigin_sha1;
# Find a path from last MediaWiki commit to pushed commit
- print STDERR "Computing path from local to remote ...\n";
+ print {*STDERR} "Computing path from local to remote ...\n";
my @local_ancestry = split(/\n/, run_git("rev-list --boundary --parents ${local} ^${parsed_sha1}"));
my %local_ancestry;
foreach my $line (@local_ancestry) {
while ($parsed_sha1 ne $HEAD_sha1) {
my $child = $local_ancestry{$parsed_sha1};
if (!$child) {
- printf STDERR "Cannot find a path in history from remote commit to last commit\n";
+ print {*STDERR} "Cannot find a path in history from remote commit to last commit\n";
return error_non_fast_forward($remote);
}
push(@commit_pairs, [$parsed_sha1, $child]);
} else {
# No remote mediawiki revision. Export the whole
# history (linearized with --first-parent)
- print STDERR "Warning: no common ancestor, pushing complete history\n";
+ print {*STDERR} "Warning: no common ancestor, pushing complete history\n";
my $history = run_git("rev-list --first-parent --children ${local}");
my @history = split(/\n/, $history);
@history = @history[1..$#history];
die("Unknown error from mw_push_file()\n");
}
}
- unless ($dumb_push) {
+ if (!$dumb_push) {
run_git(qq(notes --ref=${remotename}/mediawiki add -f -m "mediawiki_revision: ${mw_revision}" ${sha1_commit}));
run_git(qq(update-ref -m "Git-MediaWiki push" refs/mediawiki/${remotename}/master ${sha1_commit} ${sha1_child}));
}
}
- print STDOUT "ok ${remote}\n";
+ print {*STDOUT} "ok ${remote}\n";
return 1;
}
}
if (!exists $namespace_id{$name}) {
- print STDERR "Namespace ${name} not found in cache, querying the wiki ...\n";
+ print {*STDERR} "Namespace ${name} not found in cache, querying the wiki ...\n";
# NS not found => get namespace id from MW and store it in
# configuration file.
my $query = {
my $ns = $namespace_id{$name};
my $id;
- unless (defined $ns) {
- print STDERR "No such namespace ${name} on MediaWiki.\n";
+ if (!defined $ns) {
+ print {*STDERR} "No such namespace ${name} on MediaWiki.\n";
$ns = {is_namespace => 0};
$namespace_id{$name} = $ns;
}