use constant HTTP_CODE_OK => 200;
+if (@ARGV != 2) {
+ exit_error_usage();
+}
+
my $remotename = $ARGV[0];
my $url = $ARGV[1];
########################## Functions ##############################
+## error handling
+sub exit_error_usage {
+ die "ERROR: git-remote-mediawiki module was not called with a correct number of\n" .
+ "parameters\n" .
+ "You may obtain this error because you attempted to run the git-remote-mediawiki\n" .
+ "module directly.\n" .
+ "This module can be used the following way:\n" .
+ "\tgit clone mediawiki://<address of a mediawiki>\n" .
+ "Then, use git commit, push and pull as with every normal git repository.\n";
+}
+
sub parse_command {
my ($line) = @_;
my @cmd = split(/ /, $line);
die("Too many arguments for list\n") if (defined($cmd[2]));
mw_list($cmd[1]);
} elsif ($cmd[0] eq 'import') {
- die("Invalid arguments for import\n")
- if ($cmd[1] eq EMPTY || defined($cmd[2]));
+ die("Invalid argument for import\n")
+ if ($cmd[1] eq EMPTY);
+ die("Too many arguments for import\n")
+ if (defined($cmd[2]));
mw_import($cmd[1]);
} elsif ($cmd[0] eq 'option') {
+ die("Invalid arguments for option\n")
+ if ($cmd[1] eq EMPTY || $cmd[2] eq EMPTY);
die("Too many arguments for option\n")
- if ($cmd[1] eq EMPTY || $cmd[2] eq EMPTY || defined($cmd[3]));
+ if (defined($cmd[3]));
mw_option($cmd[1],$cmd[2]);
} elsif ($cmd[0] eq 'push') {
mw_push($cmd[1]);
sub get_mw_page_list {
my $page_list = shift;
my $pages = shift;
- my @some_pages = @$page_list;
+ my @some_pages = @{$page_list};
while (@some_pages) {
my $last_page = SLICE_SIZE;
if ($#some_pages < $last_page) {
my ($content) = @_;
# Avoid confusion between size in bytes and in characters
utf8::downgrade($content);
- binmode {*STDOUT}, ':raw';
+ binmode STDOUT, ':raw';
print {*STDOUT} 'data ', bytes::length($content), "\n", $content;
- binmode {*STDOUT}, ':encoding(UTF-8)';
+ binmode STDOUT, ':encoding(UTF-8)';
return;
}
my $n_actual = 0;
my $last_timestamp = 0; # Placeholer in case $rev->timestamp is undefined
- foreach my $pagerevid (@$revision_ids) {
+ foreach my $pagerevid (@{$revision_ids}) {
# Count page even if we skip it, since we display
# $n/$total and $total includes skipped pages.
$n++;
my $page_title = $result_page->{title};
if (!exists($pages->{$page_title})) {
- print {*STDERR} "${n}/", scalar(@$revision_ids),
+ print {*STDERR} "${n}/", scalar(@{$revision_ids}),
": Skipping revision #$rev->{revid} of ${page_title}\n";
next;
}
# If this is a revision of the media page for new version
# of a file do one common commit for both file and media page.
# Else do commit only for that page.
- print {*STDERR} "${n}/", scalar(@$revision_ids), ": Revision #$rev->{revid} of $commit{title}\n";
+ print {*STDERR} "${n}/", scalar(@{$revision_ids}), ": Revision #$rev->{revid} of $commit{title}\n";
import_file_revision(\%commit, ($fetch_from == 1), $n_actual, \%mediafile);
}
# edit conflicts, considered as non-fast-forward
print {*STDERR} 'Warning: Error ' .
$mediawiki->{error}->{code} .
- ' from mediwiki: ' . $mediawiki->{error}->{details} .
+ ' from mediawiki: ' . $mediawiki->{error}->{details} .
".\n";
return ($oldrevid, 'non-fast-forward');
} else {
# Other errors. Shouldn't happen => just die()
die 'Fatal: Error ' .
$mediawiki->{error}->{code} .
- ' from mediwiki: ' . $mediawiki->{error}->{details} . "\n";
+ ' from mediawiki: ' . $mediawiki->{error}->{details} . "\n";
}
}
$newrevid = $result->{edit}->{newrevid};