use strict;
use vars qw/ $AUTHOR $VERSION
$sha1 $sha1_short $_revision $_repository
- $_q $_authors %users/;
+ $_q $_authors $_authors_prog %users/;
$AUTHOR = 'Eric Wong <normalperson@yhbt.net>';
$VERSION = '@@GIT_VERSION@@';
$Git::SVN::default_repo_id = 'svn';
$Git::SVN::default_ref_id = $ENV{GIT_SVN_ID} || 'git-svn';
$Git::SVN::Ra::_log_window_size = 100;
+$Git::SVN::_minimize_url = 'unset';
+
+if (! exists $ENV{SVN_SSH}) {
+ if (exists $ENV{GIT_SSH}) {
+ $ENV{SVN_SSH} = $ENV{GIT_SSH};
+ if ($^O eq 'msys') {
+ $ENV{SVN_SSH} =~ s/\\/\\\\/g;
+ $ENV{SVN_SSH} =~ s/(.*)/"$1"/;
+ }
+ }
+}
$Git::SVN::Log::TZ = $ENV{TZ};
$ENV{TZ} = 'UTC';
if ($SVN::Core::VERSION lt '1.1.0') {
fatal "Need SVN::Core 1.1.0 or better (got $SVN::Core::VERSION)";
}
+my $can_compress = eval { require Compress::Zlib; 1};
push @Git::SVN::Ra::ISA, 'SVN::Ra';
push @SVN::Git::Editor::ISA, 'SVN::Delta::Editor';
push @SVN::Git::Fetcher::ISA, 'SVN::Delta::Editor';
use IO::File qw//;
use File::Basename qw/dirname basename/;
use File::Path qw/mkpath/;
+use File::Spec;
+use File::Find;
use Getopt::Long qw/:config gnu_getopt no_ignore_case auto_abbrev/;
use IPC::Open3;
use Git;
# import functions from Git into our packages, en masse
no strict 'refs';
foreach (qw/command command_oneline command_noisy command_output_pipe
- command_input_pipe command_close_pipe/) {
+ command_input_pipe command_close_pipe
+ command_bidi_pipe command_close_bidi_pipe/) {
for my $package ( qw(SVN::Git::Editor SVN::Git::Fetcher
Git::SVN::Migration Git::SVN::Log Git::SVN),
__PACKAGE__) {
$sha1 = qr/[a-f\d]{40}/;
$sha1_short = qr/[a-f\d]{4,40}/;
my ($_stdin, $_help, $_edit,
- $_message, $_file,
+ $_message, $_file, $_branch_dest,
$_template, $_shared,
- $_version, $_fetch_all, $_no_rebase,
+ $_version, $_fetch_all, $_no_rebase, $_fetch_parent,
$_merge, $_strategy, $_dry_run, $_local,
$_prefix, $_no_checkout, $_url, $_verbose,
$_git_format, $_commit_url, $_tag);
$Git::SVN::_follow_parent = 1;
+$_q ||= 0;
my %remote_opts = ( 'username=s' => \$Git::SVN::Prompt::_username,
'config-dir=s' => \$Git::SVN::Ra::config_dir,
'no-auth-cache' => \$Git::SVN::Prompt::_no_auth_cache,
'ignore-paths=s' => \$SVN::Git::Fetcher::_ignore_regex );
my %fc_opts = ( 'follow-parent|follow!' => \$Git::SVN::_follow_parent,
'authors-file|A=s' => \$_authors,
+ 'authors-prog=s' => \$_authors_prog,
'repack:i' => \$Git::SVN::_repack,
'noMetadata' => \$Git::SVN::_no_metadata,
'useSvmProps' => \$Git::SVN::_use_svm_props,
'useSvnsyncProps' => \$Git::SVN::_use_svnsync_props,
'log-window-size=i' => \$Git::SVN::Ra::_log_window_size,
'no-checkout' => \$_no_checkout,
- 'quiet|q' => \$_q,
+ 'quiet|q+' => \$_q,
'repack-flags|repack-args|repack-opts=s' =>
\$Git::SVN::_repack_flags,
'use-log-author' => \$Git::SVN::_use_log_author,
'localtime' => \$Git::SVN::_localtime,
%remote_opts );
-my ($_trunk, $_tags, $_branches, $_stdlayout);
+my ($_trunk, @_tags, @_branches, $_stdlayout);
my %icv;
my %init_opts = ( 'template=s' => \$_template, 'shared:s' => \$_shared,
- 'trunk|T=s' => \$_trunk, 'tags|t=s' => \$_tags,
- 'branches|b=s' => \$_branches, 'prefix=s' => \$_prefix,
+ 'trunk|T=s' => \$_trunk, 'tags|t=s@' => \@_tags,
+ 'branches|b=s@' => \@_branches, 'prefix=s' => \$_prefix,
'stdlayout|s' => \$_stdlayout,
- 'minimize-url|m' => \$Git::SVN::_minimize_url,
+ 'minimize-url|m!' => \$Git::SVN::_minimize_url,
'no-metadata' => sub { $icv{noMetadata} = 1 },
'use-svm-props' => sub { $icv{useSvmProps} = 1 },
'use-svnsync-props' => sub { $icv{useSvnsyncProps} = 1 },
'rewrite-root=s' => sub { $icv{rewriteRoot} = $_[1] },
+ 'rewrite-uuid=s' => sub { $icv{rewriteUUID} = $_[1] },
%remote_opts );
my %cmt_opts = ( 'edit|e' => \$_edit,
'rmdir' => \$SVN::Git::Editor::_rmdir,
fetch => [ \&cmd_fetch, "Download new revisions from SVN",
{ 'revision|r=s' => \$_revision,
'fetch-all|all' => \$_fetch_all,
+ 'parent|p' => \$_fetch_parent,
%fc_opts } ],
clone => [ \&cmd_clone, "Initialize and fetch revisions",
{ 'revision|r=s' => \$_revision,
branch => [ \&cmd_branch,
'Create a branch in the SVN repository',
{ 'message|m=s' => \$_message,
+ 'destination|d=s' => \$_branch_dest,
'dry-run|n' => \$_dry_run,
- 'tag|t' => \$_tag } ],
+ 'tag|t' => \$_tag,
+ 'username=s' => \$Git::SVN::Prompt::_username,
+ 'commit-url=s' => \$_commit_url } ],
tag => [ sub { $_tag = 1; cmd_branch(@_) },
'Create a tag in the SVN repository',
{ 'message|m=s' => \$_message,
- 'dry-run|n' => \$_dry_run } ],
+ 'destination|d=s' => \$_branch_dest,
+ 'dry-run|n' => \$_dry_run,
+ 'username=s' => \$Git::SVN::Prompt::_username,
+ 'commit-url=s' => \$_commit_url } ],
'set-tree' => [ \&cmd_set_tree,
"Set an SVN repository to a git tree-ish",
{ 'stdin' => \$_stdin, %cmt_opts, %fc_opts, } ],
'Create a .gitignore per svn:ignore',
{ 'revision|r=i' => \$_revision
} ],
+ 'mkdirs' => [ \&cmd_mkdirs ,
+ "recreate empty directories after a checkout",
+ { 'revision|r=i' => \$_revision } ],
'propget' => [ \&cmd_propget,
'Print the value of a property on a file or directory',
{ 'revision|r=i' => \$_revision } ],
'blame' => [ \&Git::SVN::Log::cmd_blame,
"Show what revision and author last modified each line of a file",
{ 'git-format' => \$_git_format } ],
+ 'reset' => [ \&cmd_reset,
+ "Undo fetches back to the specified SVN revision",
+ { 'revision|r=s' => \$_revision,
+ 'parent|p' => \$_fetch_parent } ],
+ 'gc' => [ \&cmd_gc,
+ "Compress unhandled.log files in .git/svn and remove " .
+ "index files in .git/svn",
+ {} ],
);
my $cmd;
$cmd = $ARGV[$i];
splice @ARGV, $i, 1;
last;
+ } elsif ($ARGV[$i] eq 'help') {
+ $cmd = $ARGV[$i+1];
+ usage(0);
}
};
my %opts = %{$cmd{$cmd}->[2]} if (defined $cmd);
-read_repo_config(\%opts);
+read_git_config(\%opts);
if ($cmd && ($cmd eq 'log' || $cmd eq 'blame')) {
Getopt::Long::Configure('pass_through');
}
version() if $_version;
usage(1) unless defined $cmd;
load_authors() if $_authors;
+if (defined $_authors_prog) {
+ $_authors_prog = "'" . File::Spec->rel2abs($_authors_prog) . "'";
+}
unless ($cmd =~ /^(?:clone|init|multi-init|commit-diff)$/) {
Git::SVN::Migration::migration_check();
command_noisy(@init_db);
$_repository = Git->repository(Repository => ".git");
}
+ command_noisy('config', 'core.autocrlf', 'false');
my $set;
my $pfx = "svn-remote.$Git::SVN::default_repo_id";
foreach my $i (keys %icv) {
command_noisy('config', "$pfx.$i", $icv{$i});
$set = $i;
}
+ my $ignore_regex = \$SVN::Git::Fetcher::_ignore_regex;
+ command_noisy('config', "$pfx.ignore-paths", $$ignore_regex)
+ if defined $$ignore_regex;
}
sub init_subdir {
sub cmd_clone {
my ($url, $path) = @_;
if (!defined $path &&
- (defined $_trunk || defined $_branches || defined $_tags ||
+ (defined $_trunk || @_branches || @_tags ||
defined $_stdlayout) &&
$url !~ m#^[a-z\+]+://#) {
$path = $url;
}
$path = basename($url) if !defined $path || !length $path;
+ my $authors_absolute = $_authors ? File::Spec->rel2abs($_authors) : "";
cmd_init($url, $path);
+ command_oneline('config', 'svn.authorsfile', $authors_absolute)
+ if $_authors;
Git::SVN::fetch_all($Git::SVN::default_repo_id);
}
sub cmd_init {
if (defined $_stdlayout) {
$_trunk = 'trunk' if (!defined $_trunk);
- $_tags = 'tags' if (!defined $_tags);
- $_branches = 'branches' if (!defined $_branches);
+ @_tags = 'tags' if (! @_tags);
+ @_branches = 'branches' if (! @_branches);
}
- if (defined $_trunk || defined $_branches || defined $_tags) {
+ if (defined $_trunk || @_branches || @_tags) {
return cmd_multi_init(@_);
}
my $url = shift or die "SVN repository location required ",
"as a command-line argument\n";
+ $url = canonicalize_url($url);
init_subdir(@_);
do_git_init_db();
+ if ($Git::SVN::_minimize_url eq 'unset') {
+ $Git::SVN::_minimize_url = 0;
+ }
+
Git::SVN->init($url);
}
}
my ($remote) = @_;
if (@_ > 1) {
- die "Usage: $0 fetch [--all] [svn-remote]\n";
+ die "Usage: $0 fetch [--all] [--parent] [svn-remote]\n";
}
- $remote ||= $Git::SVN::default_repo_id;
- if ($_fetch_all) {
+ $Git::SVN::no_reuse_existing = undef;
+ if ($_fetch_parent) {
+ my ($url, $rev, $uuid, $gs) = working_head_info('HEAD');
+ unless ($gs) {
+ die "Unable to determine upstream SVN information from ",
+ "working tree history\n";
+ }
+ # just fetch, don't checkout.
+ $_no_checkout = 'true';
+ $_fetch_all ? $gs->fetch_all : $gs->fetch;
+ } elsif ($_fetch_all) {
cmd_multi_fetch();
} else {
+ $remote ||= $Git::SVN::default_repo_id;
Git::SVN::fetch_all($remote, Git::SVN::read_all_remotes());
}
}
'Cannot dcommit with a dirty index. Commit your changes first, '
. "or stash them with `git stash'.\n";
$head ||= 'HEAD';
+
+ my $old_head;
+ if ($head ne 'HEAD') {
+ $old_head = eval {
+ command_oneline([qw/symbolic-ref -q HEAD/])
+ };
+ if ($old_head) {
+ $old_head =~ s{^refs/heads/}{};
+ } else {
+ $old_head = eval { command_oneline(qw/rev-parse HEAD/) };
+ }
+ command(['checkout', $head], STDERR => 0);
+ }
+
my @refs;
- my ($url, $rev, $uuid, $gs) = working_head_info($head, \@refs);
+ my ($url, $rev, $uuid, $gs) = working_head_info('HEAD', \@refs);
unless ($gs) {
die "Unable to determine upstream SVN information from ",
"$head history.\nPerhaps the repository is empty.";
if (@diff) {
@refs = ();
my ($url_, $rev_, $uuid_, $gs_) =
- working_head_info($head, \@refs);
+ working_head_info('HEAD', \@refs);
my ($linear_refs_, $parents_) =
linearize_history($gs_, \@refs);
if (scalar(@$linear_refs) !=
"\nBefore dcommitting";
}
if ($url_ ne $expect_url) {
- fatal "URL mismatch after rebase: ",
- "$url_ != $expect_url";
+ if ($url_ eq $gs->metadata_url) {
+ print
+ "Accepting rewritten URL:",
+ " $url_\n";
+ } else {
+ fatal
+ "URL mismatch after rebase:",
+ " $url_ != $expect_url";
+ }
}
if ($uuid_ ne $uuid) {
fatal "uuid mismatch after rebase: ",
}
}
}
+
+ if ($old_head) {
+ my $new_head = command_oneline(qw/rev-parse HEAD/);
+ my $new_is_symbolic = eval {
+ command_oneline(qw/symbolic-ref -q HEAD/);
+ };
+ if ($new_is_symbolic) {
+ print "dcommitted the branch ", $head, "\n";
+ } else {
+ print "dcommitted on a detached HEAD because you gave ",
+ "a revision argument.\n",
+ "The rewritten commit is: ", $new_head, "\n";
+ }
+ command(['checkout', $old_head], STDERR => 0);
+ }
+
unlink $gs->{index};
}
}
$head ||= 'HEAD';
- my ($src, $rev, undef, $gs) = working_head_info($head);
+ my (undef, $rev, undef, $gs) = working_head_info($head);
+ my $src = $gs->full_url;
my $remote = Git::SVN::read_all_remotes()->{$gs->{repo_id}};
- my $glob = $remote->{ $_tag ? 'tags' : 'branches' };
+ my $allglobs = $remote->{ $_tag ? 'tags' : 'branches' };
+ my $glob;
+ if ($#{$allglobs} == 0) {
+ $glob = $allglobs->[0];
+ } else {
+ unless(defined $_branch_dest) {
+ die "Multiple ",
+ $_tag ? "tag" : "branch",
+ " paths defined for Subversion repository.\n",
+ "You must specify where you want to create the ",
+ $_tag ? "tag" : "branch",
+ " with the --destination argument.\n";
+ }
+ foreach my $g (@{$allglobs}) {
+ # SVN::Git::Editor could probably be moved to Git.pm..
+ my $re = SVN::Git::Editor::glob2pat($g->{path}->{left});
+ if ($_branch_dest =~ /$re/) {
+ $glob = $g;
+ last;
+ }
+ }
+ unless (defined $glob) {
+ my $dest_re = qr/\b\Q$_branch_dest\E\b/;
+ foreach my $g (@{$allglobs}) {
+ $g->{path}->{left} =~ /$dest_re/ or next;
+ if (defined $glob) {
+ die "Ambiguous destination: ",
+ $_branch_dest, "\nmatches both '",
+ $glob->{path}->{left}, "' and '",
+ $g->{path}->{left}, "'\n";
+ }
+ $glob = $g;
+ }
+ unless (defined $glob) {
+ die "Unknown ",
+ $_tag ? "tag" : "branch",
+ " destination $_branch_dest\n";
+ }
+ }
+ }
my ($lft, $rgt) = @{ $glob->{path} }{qw/left right/};
- my $dst = join '/', $remote->{url}, $lft, $branch_name, ($rgt || ());
+ my $url;
+ if (defined $_commit_url) {
+ $url = $_commit_url;
+ } else {
+ $url = eval { command_oneline('config', '--get',
+ "svn-remote.$gs->{repo_id}.commiturl") };
+ if (!$url) {
+ $url = $remote->{url};
+ }
+ }
+ my $dst = join '/', $url, $lft, $branch_name, ($rgt || ());
+
+ if ($dst =~ /^https:/ && $src =~ /^http:/) {
+ $src=~s/^http:/https:/;
+ }
my $ctx = SVN::Client->new(
auth => Git::SVN::Ra::_auth_providers(),
$_fetch_all ? $gs->fetch_all : $gs->fetch;
}
command_noisy(rebase_cmd(), $gs->refname);
+ $gs->mkemptydirs;
}
sub cmd_show_ignore {
print STDOUT "\n# $path\n";
my $s = $props->{'svn:ignore'} or return;
$s =~ s/[\r\n]+/\n/g;
+ $s =~ s/^\n+//;
chomp $s;
$s =~ s#^#$path#gm;
print STDOUT "$s\n";
open(GITIGNORE, '>', $ignore)
or fatal("Failed to open `$ignore' for writing: $!");
$s =~ s/[\r\n]+/\n/g;
+ $s =~ s/^\n+//;
chomp $s;
# Prefix all patterns so that the ignore doesn't apply
# to sub-directories.
});
}
+sub cmd_mkdirs {
+ my ($url, $rev, $uuid, $gs) = working_head_info('HEAD');
+ $gs ||= Git::SVN->new;
+ $gs->mkemptydirs($_revision);
+}
+
sub canonicalize_path {
my ($path) = @_;
my $dot_slash_added = 0;
return $path;
}
+sub canonicalize_url {
+ my ($url) = @_;
+ $url =~ s#^([^:]+://[^/]*/)(.*)$#$1 . canonicalize_path($2)#e;
+ return $url;
+}
+
# get_svnprops(PATH)
# ------------------
# Helper for cmd_propget and cmd_proplist below.
sub cmd_multi_init {
my $url = shift;
- unless (defined $_trunk || defined $_branches || defined $_tags) {
+ unless (defined $_trunk || @_branches || @_tags) {
usage(1);
}
- # there are currently some bugs that prevent multi-init/multi-fetch
- # setups from working well without this.
- $Git::SVN::_minimize_url = 1;
-
$_prefix = '' unless defined $_prefix;
if (defined $url) {
- $url =~ s#/+$##;
+ $url = canonicalize_url($url);
init_subdir(@_);
}
do_git_init_db();
if (defined $_trunk) {
- my $trunk_ref = $_prefix . 'trunk';
+ my $trunk_ref = 'refs/remotes/' . $_prefix . 'trunk';
# try both old-style and new-style lookups:
my $gs_trunk = eval { Git::SVN->new($trunk_ref) };
unless ($gs_trunk) {
undef, $trunk_ref);
}
}
- return unless defined $_branches || defined $_tags;
+ return unless @_branches || @_tags;
my $ra = $url ? Git::SVN::Ra->new($url) : undef;
- complete_url_ls_init($ra, $_branches, '--branches/-b', $_prefix);
- complete_url_ls_init($ra, $_tags, '--tags/-t', $_prefix . 'tags/');
+ foreach my $path (@_branches) {
+ complete_url_ls_init($ra, $path, '--branches/-b', $_prefix);
+ }
+ foreach my $path (@_tags) {
+ complete_url_ls_init($ra, $path, '--tags/-t', $_prefix.'tags/');
+ }
}
sub cmd_multi_fetch {
+ $Git::SVN::no_reuse_existing = undef;
my $remotes = Git::SVN::read_all_remotes();
foreach my $repo_id (sort keys %$remotes) {
if ($remotes->{$repo_id}->{url}) {
print $result, "\n";
}
+sub cmd_reset {
+ my $target = shift || $_revision or die "SVN revision required\n";
+ $target = $1 if $target =~ /^r(\d+)$/;
+ $target =~ /^\d+$/ or die "Numeric SVN revision expected\n";
+ my ($url, $rev, $uuid, $gs) = working_head_info('HEAD');
+ unless ($gs) {
+ die "Unable to determine upstream SVN information from ".
+ "history\n";
+ }
+ my ($r, $c) = $gs->find_rev_before($target, not $_fetch_parent);
+ $gs->rev_map_set($r, $c, 'reset', $uuid);
+ print "r$r = $c ($gs->{ref_id})\n";
+}
+
+sub cmd_gc {
+ if (!$can_compress) {
+ warn "Compress::Zlib could not be found; unhandled.log " .
+ "files will not be compressed.\n";
+ }
+ find({ wanted => \&gc_directory, no_chdir => 1}, "$ENV{GIT_DIR}/svn");
+}
+
########################### utility functions #########################
sub rebase_cmd {
my $gs = $Git::SVN::_head or return;
return if verify_ref('refs/heads/master^0');
+ # look for "trunk" ref if it exists
+ my $remote = Git::SVN::read_all_remotes()->{$gs->{repo_id}};
+ my $fetch = $remote->{fetch};
+ if ($fetch) {
+ foreach my $p (keys %$fetch) {
+ basename($fetch->{$p}) eq 'trunk' or next;
+ $gs = Git::SVN->new($fetch->{$p}, $gs->{repo_id}, $p);
+ last;
+ }
+ }
+
my $valid_head = verify_ref('HEAD^0');
command_noisy(qw(update-ref refs/heads/master), $gs->refname);
return if ($valid_head || !verify_ref('HEAD^0'));
command_noisy(qw/read-tree -m -u -v HEAD HEAD/);
print STDERR "Checked out HEAD:\n ",
$gs->full_url, " r", $gs->last_rev, "\n";
+ $gs->mkemptydirs($gs->last_rev);
}
sub complete_svn_url {
"wanted to set to: $gs->{url}\n";
}
command_oneline('config', $k, $gs->{url}) unless $orig_url;
- my $remote_path = "$ra->{svn_path}/$repo_path";
+ my $remote_path = "$gs->{path}/$repo_path";
+ $remote_path =~ s{%([0-9A-F]{2})}{chr hex($1)}ieg;
$remote_path =~ s#/+#/#g;
$remote_path =~ s#^/##g;
$remote_path .= "/*" if $remote_path !~ /\*/;
die "--prefix='$pfx' must have a trailing slash '/'\n";
}
command_noisy('config',
+ '--add',
"svn-remote.$gs->{repo_id}.$n",
"$remote_path:refs/remotes/$pfx*" .
('/*' x (($remote_path =~ tr/*/*/) - 1)) );
close $log_fh or croak $!;
if ($_edit || ($type eq 'tree')) {
- my $editor = $ENV{VISUAL} || $ENV{EDITOR} || 'vi';
- # TODO: strip out spaces, comments, like git-commit.sh
- system($editor, $commit_editmsg);
+ chomp(my $editor = command_oneline(qw(var GIT_EDITOR)));
+ system('sh', '-c', $editor.' "$@"', $editor, $commit_editmsg);
}
rename $commit_editmsg, $commit_msg or croak $!;
{
+ require Encode;
# SVN requires messages to be UTF-8 when entering the repo
local $/;
open $log_fh, '<', $commit_msg or croak $!;
binmode $log_fh;
chomp($log_entry{log} = <$log_fh>);
- if (my $enc = Git::config('i18n.commitencoding')) {
- require Encode;
- Encode::from_to($log_entry{log}, $enc, 'UTF-8');
+ my $enc = Git::config('i18n.commitencoding') || 'UTF-8';
+ my $msg = $log_entry{log};
+
+ eval { $msg = Encode::decode($enc, $msg, 1) };
+ if ($@) {
+ die "Could not decode as $enc:\n", $msg,
+ "\nPerhaps you need to set i18n.commitencoding\n";
}
+
+ eval { $msg = Encode::encode('UTF-8', $msg, 1) };
+ die "Could not encode as UTF-8:\n$msg\n" if $@;
+
+ $log_entry{log} = $msg;
+
close $log_fh or croak $!;
}
unlink $commit_msg;
}
# convert GetOpt::Long specs for use by git-config
-sub read_repo_config {
- return unless -d $ENV{GIT_DIR};
+sub read_git_config {
my $opts = shift;
my @config_only;
foreach my $o (keys %$opts) {
sub extract_metadata {
my $id = shift or return (undef, undef, undef);
my ($url, $rev, $uuid) = ($id =~ /^\s*git-svn-id:\s+(.*)\@(\d+)
- \s([a-f\d\-]+)$/x);
+ \s([a-f\d\-]+)$/ix);
if (!defined $rev || !$uuid || !$url) {
# some of the original repositories I made had
# identifiers like this:
- ($rev, $uuid) = ($id =~/^\s*git-svn-id:\s(\d+)\@([a-f\d\-]+)/);
+ ($rev, $uuid) = ($id =~/^\s*git-svn-id:\s(\d+)\@([a-f\d\-]+)/i);
}
return ($url, $rev, $uuid);
}
command(qw/cat-file commit/, shift)))[-1]);
}
+sub cmt_sha2rev_batch {
+ my %s2r;
+ my ($pid, $in, $out, $ctx) = command_bidi_pipe(qw/cat-file --batch/);
+ my $list = shift;
+
+ foreach my $sha (@{$list}) {
+ my $first = 1;
+ my $size = 0;
+ print $out $sha, "\n";
+
+ while (my $line = <$in>) {
+ if ($first && $line =~ /^[[:xdigit:]]{40}\smissing$/) {
+ last;
+ } elsif ($first &&
+ $line =~ /^[[:xdigit:]]{40}\scommit\s(\d+)$/) {
+ $first = 0;
+ $size = $1;
+ next;
+ } elsif ($line =~ /^(git-svn-id: )/) {
+ my (undef, $rev, undef) =
+ extract_metadata($line);
+ $s2r{$sha} = $rev;
+ }
+
+ $size -= length($line);
+ last if ($size == 0);
+ }
+ }
+
+ command_close_bidi_pipe($pid, $in, $out, $ctx);
+
+ return \%s2r;
+}
+
sub working_head_info {
my ($head, $refs) = @_;
my @args = ('log', '--no-color', '--first-parent', '--pretty=medium');
return $md5->hexdigest();
}
+sub gc_directory {
+ if ($can_compress && -f $_ && basename($_) eq "unhandled.log") {
+ my $out_filename = $_ . ".gz";
+ open my $in_fh, "<", $_ or die "Unable to open $_: $!\n";
+ binmode $in_fh;
+ my $gz = Compress::Zlib::gzopen($out_filename, "ab") or
+ die "Unable to open $out_filename: $!\n";
+
+ my $res;
+ while ($res = sysread($in_fh, my $str, 1024)) {
+ $gz->gzwrite($str) or
+ die "Unable to write: ".$gz->gzerror()."!\n";
+ }
+ unlink $_ or die "unlink $File::Find::name: $!\n";
+ } elsif (-f $_ && basename($_) eq "index") {
+ unlink $_ or die "unlink $_: $!\n";
+ }
+}
+
package Git::SVN;
use strict;
use warnings;
use File::Path qw/mkpath/;
use File::Copy qw/copy/;
use IPC::Open3;
+use Memoize; # core since 5.8.0, Jul 2002
+use Memoize::Storable;
my ($_gc_nr, $_gc_period);
return unless defined $glob_spec;
my $ref = $glob_spec->{ref};
my $path = $glob_spec->{path};
- foreach (command(qw#for-each-ref --format=%(refname) refs/remotes#)) {
- next unless m#^refs/remotes/$ref->{regex}$#;
+ foreach (command(qw#for-each-ref --format=%(refname) refs/#)) {
+ next unless m#^$ref->{regex}$#;
my $p = $1;
my $pathname = desanitize_refname($path->full_path($p));
my $refname = desanitize_refname($ref->full_path($p));
if (my $existing = $fetch->{$pathname}) {
if ($existing ne $refname) {
die "Refspec conflict:\n",
- "existing: refs/remotes/$existing\n",
- " globbed: refs/remotes/$refname\n";
+ "existing: $existing\n",
+ " globbed: $refname\n";
}
- my $u = (::cmt_metadata("refs/remotes/$refname"))[0];
+ my $u = (::cmt_metadata("$refname"))[0];
$u =~ s!^\Q$url\E(/|$)!! or die
- "refs/remotes/$refname: '$url' not found in '$u'\n";
+ "$refname: '$url' not found in '$u'\n";
if ($pathname ne $u) {
warn "W: Refspec glob conflict ",
- "(ref: refs/remotes/$refname):\n",
+ "(ref: $refname):\n",
"expected path: $pathname\n",
" real path: $u\n",
"Continuing ahead with $u\n";
my $ra = Git::SVN::Ra->new($url);
my $uuid = $ra->get_uuid;
my $head = $ra->get_latest_revnum;
+
+ # ignore errors, $head revision may not even exist anymore
+ eval { $ra->get_log("", $head, 0, 1, 0, 1, sub { $head = $_[1] }) };
+ warn "W: $@\n" if $@;
+
my $base = defined $fetch ? $head : 0;
# read the max revs for wildcard expansion (branches/*, tags/*)
foreach my $t (qw/branches tags/) {
defined $remote->{$t} or next;
- push @globs, $remote->{$t};
+ push @globs, @{$remote->{$t}};
+
my $max_rev = eval { tmp_config(qw/--int --get/,
"svn-remote.$repo_id.${t}-maxRev") };
if (defined $max_rev && ($max_rev < $base)) {
my $use_svm_props = eval { command_oneline(qw/config --bool
svn.useSvmProps/) };
$use_svm_props = $use_svm_props eq 'true' if $use_svm_props;
+ my $svn_refspec = qr{\s*(.*?)\s*:\s*(.+?)\s*};
foreach (grep { s/^svn-remote\.// } command(qw/config -l/)) {
- if (m!^(.+)\.fetch=\s*(.*)\s*:\s*(.+)\s*$!) {
- my ($remote, $local_ref, $_remote_ref) = ($1, $2, $3);
- die("svn-remote.$remote: remote ref '$_remote_ref' "
- . "must start with 'refs/remotes/'\n")
- unless $_remote_ref =~ m{^refs/remotes/(.+)};
- my $remote_ref = $1;
- $local_ref =~ s{^/}{};
+ if (m!^(.+)\.fetch=$svn_refspec$!) {
+ my ($remote, $local_ref, $remote_ref) = ($1, $2, $3);
+ die("svn-remote.$remote: remote ref '$remote_ref' "
+ . "must start with 'refs/'\n")
+ unless $remote_ref =~ m{^refs/};
$r->{$remote}->{fetch}->{$local_ref} = $remote_ref;
$r->{$remote}->{svm} = {} if $use_svm_props;
} elsif (m!^(.+)\.usesvmprops=\s*(.*)\s*$!) {
$r->{$1}->{svm} = {};
} elsif (m!^(.+)\.url=\s*(.*)\s*$!) {
$r->{$1}->{url} = $2;
- } elsif (m!^(.+)\.(branches|tags)=
- (.*):refs/remotes/(.+)\s*$/!x) {
- my ($p, $g) = ($3, $4);
- my $rs = $r->{$1}->{$2} = {
- t => $2,
- remote => $1,
- path => Git::SVN::GlobSpec->new($p),
- ref => Git::SVN::GlobSpec->new($g) };
+ } elsif (m!^(.+)\.(branches|tags)=$svn_refspec$!) {
+ my ($remote, $t, $local_ref, $remote_ref) =
+ ($1, $2, $3, $4);
+ die("svn-remote.$remote: remote ref '$remote_ref' ($t) "
+ . "must start with 'refs/'\n")
+ unless $remote_ref =~ m{^refs/};
+ my $rs = {
+ t => $t,
+ remote => $remote,
+ path => Git::SVN::GlobSpec->new($local_ref, 1),
+ ref => Git::SVN::GlobSpec->new($remote_ref, 0) };
if (length($rs->{ref}->{right}) != 0) {
die "The '*' glob character must be the last ",
- "character of '$g'\n";
+ "character of '$remote_ref'\n";
}
+ push @{ $r->{$remote}->{$t} }, $rs;
}
}
}
}
my ($xrepo_id, $xpath) = find_ref($self->refname);
- if (defined $xpath) {
+ if (!$no_write && defined $xpath) {
die "svn-remote.$xrepo_id.fetch already set to track ",
- "$xpath:refs/remotes/", $self->refname, "\n";
+ "$xpath:", $self->refname, "\n";
}
unless ($no_write) {
command_noisy('config',
"svn-remote.$self->{repo_id}.url", $url);
$self->{path} =~ s{^/}{};
+ $self->{path} =~ s{%([0-9A-F]{2})}{chr hex($1)}ieg;
command_noisy('config', '--add',
"svn-remote.$self->{repo_id}.fetch",
"$self->{path}:".$self->refname);
next if defined $repos_root && $repos_root ne $u;
my $fetch = $remotes->{$repo_id}->{fetch} || {};
- foreach (qw/branches tags/) {
- resolve_local_globs($u, $fetch,
- $remotes->{$repo_id}->{$_});
+ foreach my $t (qw/branches tags/) {
+ foreach my $globspec (@{$remotes->{$repo_id}->{$t}}) {
+ resolve_local_globs($u, $fetch, $globspec);
+ }
}
my $p = $path;
my $rwr = rewrite_root({repo_id => $repo_id});
my ($ref_id) = @_;
foreach (command(qw/config -l/)) {
next unless m!^svn-remote\.(.+)\.fetch=
- \s*(.*)\s*:\s*refs/remotes/(.+)\s*$!x;
+ \s*(.*?)\s*:\s*(.+?)\s*$!x;
my ($repo_id, $path, $ref) = ($1, $2, $3);
if ($ref eq $ref_id) {
$path = '' if ($path =~ m#^\./?#);
if (!defined $repo_id) {
die "Could not find a \"svn-remote.*.fetch\" key ",
"in the repository configuration matching: ",
- "refs/remotes/$ref_id\n";
+ "$ref_id\n";
}
}
my $self = _new($class, $repo_id, $ref_id, $path);
if (!defined $self->{path} || !length $self->{path}) {
my $fetch = command_oneline('config', '--get',
"svn-remote.$repo_id.fetch",
- ":refs/remotes/$ref_id\$") or
+ ":$ref_id\$") or
die "Failed to read \"svn-remote.$repo_id.fetch\" ",
- "\":refs/remotes/$ref_id\$\" in config\n";
+ "\":$ref_id\$\" in config\n";
($self->{path}, undef) = split(/\s*:\s*/, $fetch);
}
$self->{url} = command_oneline('config', '--get',
}
sub refname {
- my ($refname) = "refs/remotes/$_[0]->{ref_id}" ;
+ my ($refname) = $_[0]->{ref_id} ;
# It cannot end with a slash /, we'll throw up on this because
# SVN can't have directories with a slash in their name, either:
chomp($src, $uuid);
- $uuid =~ m{^[0-9a-f\-]{30,}$}
+ $uuid =~ m{^[0-9a-f\-]{30,}$}i
or die "doesn't look right - svm:uuid is '$uuid'\n";
# the '!' is used to mark the repos_root!/relative/path
die "Can't have both 'useSvnsyncProps' and 'rewriteRoot' ",
"options set!\n";
}
+ if ($self->rewrite_uuid) {
+ die "Can't have both 'useSvnsyncProps' and 'rewriteUUID' ",
+ "options set!\n";
+ }
my $svnsync;
# see if we have it in our config, first:
die "doesn't look right - svn:sync-from-url is '$url'\n";
my $uuid = tmp_config('--get', "$section.svnsync-uuid");
- ($uuid) = ($uuid =~ m{^([0-9a-f\-]{30,})$}) or
+ ($uuid) = ($uuid =~ m{^([0-9a-f\-]{30,})$}i) or
die "doesn't look right - svn:sync-from-uuid is '$uuid'\n";
$svnsync = { url => $url, uuid => $uuid }
die "doesn't look right - svn:sync-from-url is '$url'\n";
my $uuid = $rp->{'svn:sync-from-uuid'} or die $err . "uuid\n";
- ($uuid) = ($uuid =~ m{^([0-9a-f\-]{30,})$}) or
+ ($uuid) = ($uuid =~ m{^([0-9a-f\-]{30,})$}i) or
die "doesn't look right - svn:sync-from-uuid is '$uuid'\n";
my $section = "svn-remote.$self->{repo_id}";
unless ($self->{ra_uuid}) {
my $key = "svn-remote.$self->{repo_id}.uuid";
my $uuid = eval { tmp_config('--get', $key) };
- if (!$@ && $uuid && $uuid =~ /^([a-f\d\-]{30,})$/) {
+ if (!$@ && $uuid && $uuid =~ /^([a-f\d\-]{30,})$/i) {
$self->{ra_uuid} = $uuid;
} else {
die "ra_uuid called without URL\n" unless $self->{url};
$ra;
}
-sub rel_path {
- my ($self) = @_;
- my $repos_root = $self->ra->{repos_root};
- return $self->{path} if ($self->{url} eq $repos_root);
- my $url = $self->{url} .
- (length $self->{path} ? "/$self->{path}" : $self->{path});
- $url =~ s!^\Q$repos_root\E(?:/+|$)!!g;
- $url;
-}
-
# prop_walk(PATH, REV, SUB)
# -------------------------
# Recursively traverse PATH at revision REV and invoke SUB for each
next if $seen{$p};
$seen{$p} = 1;
push @ret, $p;
- # MAXPARENT is defined to 16 in commit-tree.c:
- last if @ret >= 16;
- }
- if (@tmp) {
- die "r$log_entry->{revision}: No room for parents:\n\t",
- join("\n\t", @tmp), "\n";
}
@ret;
}
$self->{-rewrite_root} = $rwr;
}
+sub rewrite_uuid {
+ my ($self) = @_;
+ return $self->{-rewrite_uuid} if exists $self->{-rewrite_uuid};
+ my $k = "svn-remote.$self->{repo_id}.rewriteUUID";
+ my $rwid = eval { command_oneline(qw/config --get/, $k) };
+ if ($rwid) {
+ $rwid =~ s#/+$##;
+ if ($rwid !~ m#^[a-f0-9]{8}-(?:[a-f0-9]{4}-){3}[a-f0-9]{12}$#) {
+ die "$rwid is not a valid UUID (key: $k)\n";
+ }
+ }
+ $self->{-rewrite_uuid} = $rwid;
+}
+
sub metadata_url {
my ($self) = @_;
($self->rewrite_root || $self->{url}) .
$self->{last_rev} = $log_entry->{revision};
$self->{last_commit} = $commit;
- print "r$log_entry->{revision}";
+ print "r$log_entry->{revision}" unless $::_q > 1;
if (defined $log_entry->{svm_revision}) {
- print " (\@$log_entry->{svm_revision})";
+ print " (\@$log_entry->{svm_revision})" unless $::_q > 1;
$self->rev_map_set($log_entry->{svm_revision}, $commit,
0, $self->svm_uuid);
}
- print " = $commit ($self->{ref_id})\n";
+ print " = $commit ($self->{ref_id})\n" unless $::_q > 1;
if (--$_gc_nr == 0) {
$_gc_nr = $_gc_period;
gc();
unless (defined $paths) {
my $err_handler = $SVN::Error::handler;
$SVN::Error::handler = \&Git::SVN::Ra::skip_unknown_revs;
- $self->ra->get_log([$self->{path}], $rev, $rev, 0, 1, 1, sub {
- $paths =
- Git::SVN::Ra::dup_changed_paths($_[0]) });
+ $self->ra->get_log([$self->{path}], $rev, $rev, 0, 1, 1,
+ sub { $paths = $_[0] });
$SVN::Error::handler = $err_handler;
}
return undef unless defined $paths;
# look for a parent from another branch:
- my @b_path_components = split m#/#, $self->rel_path;
+ my @b_path_components = split m#/#, $self->{path};
my @a_path_components;
my $i;
while (@b_path_components) {
my $r = $i->{copyfrom_rev};
my $repos_root = $self->ra->{repos_root};
my $url = $self->ra->{url};
- my $new_url = $repos_root . $branch_from;
+ my $new_url = $url . $branch_from;
print STDERR "Found possible branch point: ",
- "$new_url => ", $self->full_url, ", $r\n";
+ "$new_url => ", $self->full_url, ", $r\n"
+ unless $::_q > 1;
$branch_from =~ s#^/##;
- my $gs = $self->other_gs($new_url, $url, $repos_root,
+ my $gs = $self->other_gs($new_url, $url,
$branch_from, $r, $self->{ref_id});
my ($r0, $parent) = $gs->find_rev_before($r, 1);
{
($r0, $parent) = $gs->find_rev_before($r, 1);
}
if (defined $r0 && defined $parent) {
- print STDERR "Found branch parent: ($self->{ref_id}) $parent\n";
+ print STDERR "Found branch parent: ($self->{ref_id}) $parent\n"
+ unless $::_q > 1;
my $ed;
if ($self->ra->can_do_switch) {
$self->assert_index_clean($parent);
- print STDERR "Following parent with do_switch\n";
+ print STDERR "Following parent with do_switch\n"
+ unless $::_q > 1;
# do_switch works with svn/trunk >= r22312, but that
# is not included with SVN 1.4.3 (the latest version
# at the moment), so we can't rely on it
print STDERR "Trees match:\n",
" $new_url\@$r0\n",
" ${\$self->full_url}\@$rev\n",
- "Following parent with no changes\n";
+ "Following parent with no changes\n"
+ unless $::_q > 1;
$self->tmp_index_do(sub {
command_noisy('read-tree', $parent);
});
$self->{last_commit} = $parent;
} else {
- print STDERR "Following parent with do_update\n";
+ print STDERR "Following parent with do_update\n"
+ unless $::_q > 1;
$ed = SVN::Git::Fetcher->new($self);
$self->ra->gs_do_update($rev, $rev, $self, $ed)
or die "SVN connection failed somewhere...\n";
}
- print STDERR "Successfully followed parent\n";
+ print STDERR "Successfully followed parent\n" unless $::_q > 1;
return $self->make_log_entry($rev, [$parent], $ed);
}
return undef;
$self->make_log_entry($rev, \@parents, $ed);
}
+sub mkemptydirs {
+ my ($self, $r) = @_;
+
+ sub scan {
+ my ($r, $empty_dirs, $line) = @_;
+ if (defined $r && $line =~ /^r(\d+)$/) {
+ return 0 if $1 > $r;
+ } elsif ($line =~ /^ \+empty_dir: (.+)$/) {
+ $empty_dirs->{$1} = 1;
+ } elsif ($line =~ /^ \-empty_dir: (.+)$/) {
+ my @d = grep {m[^\Q$1\E(/|$)]} (keys %$empty_dirs);
+ delete @$empty_dirs{@d};
+ }
+ 1; # continue
+ };
+
+ my %empty_dirs = ();
+ my $gz_file = "$self->{dir}/unhandled.log.gz";
+ if (-f $gz_file) {
+ if (!$can_compress) {
+ warn "Compress::Zlib could not be found; ",
+ "empty directories in $gz_file will not be read\n";
+ } else {
+ my $gz = Compress::Zlib::gzopen($gz_file, "rb") or
+ die "Unable to open $gz_file: $!\n";
+ my $line;
+ while ($gz->gzreadline($line) > 0) {
+ scan($r, \%empty_dirs, $line) or last;
+ }
+ $gz->gzclose;
+ }
+ }
+
+ if (open my $fh, '<', "$self->{dir}/unhandled.log") {
+ binmode $fh or croak "binmode: $!";
+ while (<$fh>) {
+ scan($r, \%empty_dirs, $_) or last;
+ }
+ close $fh;
+ }
+
+ my $strip = qr/\A\Q$self->{path}\E(?:\/|$)/;
+ foreach my $d (sort keys %empty_dirs) {
+ $d = uri_decode($d);
+ $d =~ s/$strip//;
+ next if -d $d;
+ if (-e _) {
+ warn "$d exists but is not a directory\n";
+ } else {
+ print "creating empty directory: $d\n";
+ mkpath([$d]);
+ }
+ }
+}
+
sub get_untracked {
my ($self, $ed) = @_;
my @out;
}
sub other_gs {
- my ($self, $new_url, $url, $repos_root,
+ my ($self, $new_url, $url,
$branch_from, $r, $old_ref_id) = @_;
- my $gs = Git::SVN->find_by_url($new_url, $repos_root, $branch_from);
+ my $gs = Git::SVN->find_by_url($new_url, $url, $branch_from);
unless ($gs) {
my $ref_id = $old_ref_id;
$ref_id =~ s/\@\d+$//;
$ref_id .= "\@$r";
# just grow a tail if we're not unique enough :x
$ref_id .= '-' while find_ref($ref_id);
- print STDERR "Initializing parent: $ref_id\n";
+ print STDERR "Initializing parent: $ref_id\n" unless $::_q > 1;
my ($u, $p, $repo_id) = ($new_url, '', $ref_id);
if ($u =~ s#^\Q$url\E(/|$)##) {
$p = $u;
$gs
}
+sub call_authors_prog {
+ my ($orig_author) = @_;
+ $orig_author = command_oneline('rev-parse', '--sq-quote', $orig_author);
+ my $author = `$::_authors_prog $orig_author`;
+ if ($? != 0) {
+ die "$::_authors_prog failed with exit code $?\n"
+ }
+ if ($author =~ /^\s*(.+?)\s*<(.*)>\s*$/) {
+ my ($name, $email) = ($1, $2);
+ $email = undef if length $2 == 0;
+ return [$name, $email];
+ } else {
+ die "Author: $orig_author: $::_authors_prog returned "
+ . "invalid author format: $author\n";
+ }
+}
+
sub check_author {
my ($author) = @_;
if (!defined $author || length $author == 0) {
$author = '(no author)';
- } elsif (defined $::_authors && ! defined $::users{$author}) {
- die "Author: $author not defined in $::_authors file\n";
+ }
+ if (!defined $::users{$author}) {
+ if (defined $::_authors_prog) {
+ $::users{$author} = call_authors_prog($author);
+ } elsif (defined $::_authors) {
+ die "Author: $author not defined in $::_authors file\n";
+ }
}
$author;
}
+sub find_extra_svk_parents {
+ my ($self, $ed, $tickets, $parents) = @_;
+ # aha! svk:merge property changed...
+ my @tickets = split "\n", $tickets;
+ my @known_parents;
+ for my $ticket ( @tickets ) {
+ my ($uuid, $path, $rev) = split /:/, $ticket;
+ if ( $uuid eq $self->ra_uuid ) {
+ my $url = $self->rewrite_root || $self->{url};
+ my $repos_root = $url;
+ my $branch_from = $path;
+ $branch_from =~ s{^/}{};
+ my $gs = $self->other_gs($repos_root."/".$branch_from,
+ $url,
+ $branch_from,
+ $rev,
+ $self->{ref_id});
+ if ( my $commit = $gs->rev_map_get($rev, $uuid) ) {
+ # wahey! we found it, but it might be
+ # an old one (!)
+ push @known_parents, [ $rev, $commit ];
+ }
+ }
+ }
+ # Ordering matters; highest-numbered commit merge tickets
+ # first, as they may account for later merge ticket additions
+ # or changes.
+ @known_parents = map {$_->[1]} sort {$b->[0] <=> $a->[0]} @known_parents;
+ for my $parent ( @known_parents ) {
+ my @cmd = ('rev-list', $parent, map { "^$_" } @$parents );
+ my ($msg_fh, $ctx) = command_output_pipe(@cmd);
+ my $new;
+ while ( <$msg_fh> ) {
+ $new=1;last;
+ }
+ command_close_pipe($msg_fh, $ctx);
+ if ( $new ) {
+ print STDERR
+ "Found merge parent (svk:merge ticket): $parent\n";
+ push @$parents, $parent;
+ }
+ }
+}
+
+sub lookup_svn_merge {
+ my $uuid = shift;
+ my $url = shift;
+ my $merge = shift;
+
+ my ($source, $revs) = split ":", $merge;
+ my $path = $source;
+ $path =~ s{^/}{};
+ my $gs = Git::SVN->find_by_url($url.$source, $url, $path);
+ if ( !$gs ) {
+ warn "Couldn't find revmap for $url$source\n";
+ return;
+ }
+ my @ranges = split ",", $revs;
+ my ($tip, $tip_commit);
+ my @merged_commit_ranges;
+ # find the tip
+ for my $range ( @ranges ) {
+ my ($bottom, $top) = split "-", $range;
+ $top ||= $bottom;
+ my $bottom_commit = $gs->find_rev_after( $bottom, 1, $top );
+ my $top_commit = $gs->find_rev_before( $top, 1, $bottom );
+
+ unless ($top_commit and $bottom_commit) {
+ warn "W:unknown path/rev in svn:mergeinfo "
+ ."dirprop: $source:$range\n";
+ next;
+ }
+
+ push @merged_commit_ranges,
+ "$bottom_commit^..$top_commit";
+
+ if ( !defined $tip or $top > $tip ) {
+ $tip = $top;
+ $tip_commit = $top_commit;
+ }
+ }
+ return ($tip_commit, @merged_commit_ranges);
+}
+
+sub _rev_list {
+ my ($msg_fh, $ctx) = command_output_pipe(
+ "rev-list", @_,
+ );
+ my @rv;
+ while ( <$msg_fh> ) {
+ chomp;
+ push @rv, $_;
+ }
+ command_close_pipe($msg_fh, $ctx);
+ @rv;
+}
+
+sub check_cherry_pick {
+ my $base = shift;
+ my $tip = shift;
+ my @ranges = @_;
+ my %commits = map { $_ => 1 }
+ _rev_list("--no-merges", $tip, "--not", $base);
+ for my $range ( @ranges ) {
+ delete @commits{_rev_list($range)};
+ }
+ for my $commit (keys %commits) {
+ if (has_no_changes($commit)) {
+ delete $commits{$commit};
+ }
+ }
+ return (keys %commits);
+}
+
+sub has_no_changes {
+ my $commit = shift;
+
+ my @revs = split / /, command_oneline(
+ qw(rev-list --parents -1 -m), $commit);
+
+ # Commits with no parents, e.g. the start of a partial branch,
+ # have changes by definition.
+ return 1 if (@revs < 2);
+
+ # Commits with multiple parents, e.g a merge, have no changes
+ # by definition.
+ return 0 if (@revs > 2);
+
+ return (command_oneline("rev-parse", "$commit^{tree}") eq
+ command_oneline("rev-parse", "$commit~1^{tree}"));
+}
+
+# The GIT_DIR environment variable is not always set until after the command
+# line arguments are processed, so we can't memoize in a BEGIN block.
+{
+ my $memoized = 0;
+
+ sub memoize_svn_mergeinfo_functions {
+ return if $memoized;
+ $memoized = 1;
+
+ my $cache_path = "$ENV{GIT_DIR}/svn/.caches/";
+ mkpath([$cache_path]) unless -d $cache_path;
+
+ tie my %lookup_svn_merge_cache => 'Memoize::Storable',
+ "$cache_path/lookup_svn_merge.db", 'nstore';
+ memoize 'lookup_svn_merge',
+ SCALAR_CACHE => 'FAULT',
+ LIST_CACHE => ['HASH' => \%lookup_svn_merge_cache],
+ ;
+
+ tie my %check_cherry_pick_cache => 'Memoize::Storable',
+ "$cache_path/check_cherry_pick.db", 'nstore';
+ memoize 'check_cherry_pick',
+ SCALAR_CACHE => 'FAULT',
+ LIST_CACHE => ['HASH' => \%check_cherry_pick_cache],
+ ;
+
+ tie my %has_no_changes_cache => 'Memoize::Storable',
+ "$cache_path/has_no_changes.db", 'nstore';
+ memoize 'has_no_changes',
+ SCALAR_CACHE => ['HASH' => \%has_no_changes_cache],
+ LIST_CACHE => 'FAULT',
+ ;
+ }
+}
+
+sub parents_exclude {
+ my $parents = shift;
+ my @commits = @_;
+ return unless @commits;
+
+ my @excluded;
+ my $excluded;
+ do {
+ my @cmd = ('rev-list', "-1", @commits, "--not", @$parents );
+ $excluded = command_oneline(@cmd);
+ if ( $excluded ) {
+ my @new;
+ my $found;
+ for my $commit ( @commits ) {
+ if ( $commit eq $excluded ) {
+ push @excluded, $commit;
+ $found++;
+ last;
+ }
+ else {
+ push @new, $commit;
+ }
+ }
+ die "saw commit '$excluded' in rev-list output, "
+ ."but we didn't ask for that commit (wanted: @commits --not @$parents)"
+ unless $found;
+ @commits = @new;
+ }
+ }
+ while ($excluded and @commits);
+
+ return @excluded;
+}
+
+
+# note: this function should only be called if the various dirprops
+# have actually changed
+sub find_extra_svn_parents {
+ my ($self, $ed, $mergeinfo, $parents) = @_;
+ # aha! svk:merge property changed...
+
+ memoize_svn_mergeinfo_functions();
+
+ # We first search for merged tips which are not in our
+ # history. Then, we figure out which git revisions are in
+ # that tip, but not this revision. If all of those revisions
+ # are now marked as merge, we can add the tip as a parent.
+ my @merges = split "\n", $mergeinfo;
+ my @merge_tips;
+ my $url = $self->rewrite_root || $self->{url};
+ my $uuid = $self->ra_uuid;
+ my %ranges;
+ for my $merge ( @merges ) {
+ my ($tip_commit, @ranges) =
+ lookup_svn_merge( $uuid, $url, $merge );
+ unless (!$tip_commit or
+ grep { $_ eq $tip_commit } @$parents ) {
+ push @merge_tips, $tip_commit;
+ $ranges{$tip_commit} = \@ranges;
+ } else {
+ push @merge_tips, undef;
+ }
+ }
+
+ my %excluded = map { $_ => 1 }
+ parents_exclude($parents, grep { defined } @merge_tips);
+
+ # check merge tips for new parents
+ my @new_parents;
+ for my $merge_tip ( @merge_tips ) {
+ my $spec = shift @merges;
+ next unless $merge_tip and $excluded{$merge_tip};
+
+ my $ranges = $ranges{$merge_tip};
+
+ # check out 'new' tips
+ my $merge_base;
+ eval {
+ $merge_base = command_oneline(
+ "merge-base",
+ @$parents, $merge_tip,
+ );
+ };
+ if ($@) {
+ die "An error occurred during merge-base"
+ unless $@->isa("Git::Error::Command");
+
+ warn "W: Cannot find common ancestor between ".
+ "@$parents and $merge_tip. Ignoring merge info.\n";
+ next;
+ }
+
+ # double check that there are no missing non-merge commits
+ my (@incomplete) = check_cherry_pick(
+ $merge_base, $merge_tip,
+ @$ranges,
+ );
+
+ if ( @incomplete ) {
+ warn "W:svn cherry-pick ignored ($spec) - missing "
+ .@incomplete." commit(s) (eg $incomplete[0])\n";
+ } else {
+ warn
+ "Found merge parent (svn:mergeinfo prop): ",
+ $merge_tip, "\n";
+ push @new_parents, $merge_tip;
+ }
+ }
+
+ # cater for merges which merge commits from multiple branches
+ if ( @new_parents > 1 ) {
+ for ( my $i = 0; $i <= $#new_parents; $i++ ) {
+ for ( my $j = 0; $j <= $#new_parents; $j++ ) {
+ next if $i == $j;
+ next unless $new_parents[$i];
+ next unless $new_parents[$j];
+ my $revs = command_oneline(
+ "rev-list", "-1",
+ "$new_parents[$i]..$new_parents[$j]",
+ );
+ if ( !$revs ) {
+ undef($new_parents[$i]);
+ }
+ }
+ }
+ }
+ push @$parents, grep { defined } @new_parents;
+}
+
sub make_log_entry {
my ($self, $rev, $parents, $ed) = @_;
my $untracked = $self->get_untracked($ed);
+ my @parents = @$parents;
+ my $ps = $ed->{path_strip} || "";
+ for my $path ( grep { m/$ps/ } %{$ed->{dir_prop}} ) {
+ my $props = $ed->{dir_prop}{$path};
+ if ( $props->{"svk:merge"} ) {
+ $self->find_extra_svk_parents
+ ($ed, $props->{"svk:merge"}, \@parents);
+ }
+ if ( $props->{"svn:mergeinfo"} ) {
+ $self->find_extra_svn_parents
+ ($ed,
+ $props->{"svn:mergeinfo"},
+ \@parents);
+ }
+ }
+
open my $un, '>>', "$self->{dir}/unhandled.log" or croak $!;
print $un "r$rev\n" or croak $!;
print $un $_, "\n" foreach @$untracked;
- my %log_entry = ( parents => $parents || [], revision => $rev,
+ my %log_entry = ( parents => \@parents, revision => $rev,
log => '');
my $headrev;
die "Can't have both 'useSvmProps' and 'rewriteRoot' ",
"options set!\n";
}
- my ($uuid, $r) = $headrev =~ m{^([a-f\d\-]{30,}):(\d+)$};
+ if ($self->rewrite_uuid) {
+ die "Can't have both 'useSvmProps' and 'rewriteUUID' ",
+ "options set!\n";
+ }
+ my ($uuid, $r) = $headrev =~ m{^([a-f\d\-]{30,}):(\d+)$}i;
# we don't want "SVM: initializing mirror for junk" ...
return undef if $r == 0;
my $svm = $self->svm;
} else {
my $url = $self->metadata_url;
remove_username($url);
- $log_entry{metadata} = "$url\@$rev " .
- $self->ra->get_uuid;
- $email ||= "$author\@" . $self->ra->get_uuid;
- $commit_email ||= "$author\@" . $self->ra->get_uuid;
+ my $uuid = $self->rewrite_uuid || $self->ra->get_uuid;
+ $log_entry{metadata} = "$url\@$rev " . $uuid;
+ $email ||= "$author\@" . $uuid;
+ $commit_email ||= "$author\@" . $uuid;
}
$log_entry{name} = $name;
$log_entry{email} = $email;
'--');
my $metadata_url = $self->metadata_url;
remove_username($metadata_url);
- my $svn_uuid = $self->ra_uuid;
+ my $svn_uuid = $self->rewrite_uuid || $self->ra_uuid;
my $c;
while (<$log>) {
if ( m{^commit ($::sha1)$} ) {
croak "write: $!";
}
+sub _rev_map_reset {
+ my ($fh, $rev, $commit) = @_;
+ my $c = _rev_map_get($fh, $rev);
+ $c eq $commit or die "_rev_map_reset(@_) commit $c does not match!\n";
+ my $offset = sysseek($fh, 0, SEEK_CUR) or croak "seek: $!";
+ truncate $fh, $offset or croak "truncate: $!";
+}
+
sub mkfile {
my ($path) = @_;
unless (-e $path) {
my $db = $self->map_path($uuid);
my $db_lock = "$db.lock";
my $sig;
+ $update_ref ||= 0;
if ($update_ref) {
$SIG{INT} = $SIG{HUP} = $SIG{TERM} = $SIG{ALRM} = $SIG{PIPE} =
$SIG{USR1} = $SIG{USR2} = sub { $sig = $_[0] };
sysopen(my $fh, $db_lock, O_RDWR | O_CREAT)
or croak "Couldn't open $db_lock: $!\n";
- _rev_map_set($fh, $rev, $commit);
+ $update_ref eq 'reset' ? _rev_map_reset($fh, $rev, $commit) :
+ _rev_map_set($fh, $rev, $commit);
if ($sync) {
$fh->flush or die "Couldn't flush $db_lock: $!\n";
$fh->sync or die "Couldn't sync $db_lock: $!\n";
close $fh or croak $!;
if ($update_ref) {
$_head = $self;
- command_noisy('update-ref', '-m', "r$rev",
+ my $note = "";
+ $note = " ($update_ref)" if ($update_ref !~ /^\d*$/);
+ command_noisy('update-ref', '-m', "r$rev$note",
$self->refname, $commit);
}
rename $db_lock, $db or die "rev_map_set(@_): ", "Failed to rename: ",
return undef unless -e $map_path;
sysopen(my $fh, $map_path, O_RDONLY) or croak "open: $!";
+ my $c = _rev_map_get($fh, $rev);
+ close($fh) or croak "close: $!";
+ $c
+}
+
+sub _rev_map_get {
+ my ($fh, $rev) = @_;
+
binmode $fh or croak "binmode: $!";
my $size = (stat($fh))[7];
($size % 24) == 0 or croak "inconsistent size: $size";
if ($size == 0) {
- close $fh or croak "close: $fh";
return undef;
}
my $i = int(($l/24 + $u/24) / 2) * 24;
sysseek($fh, $i, SEEK_SET) or croak "seek: $!";
sysread($fh, my $buf, 24) == 24 or croak "read: $!";
- my ($r, $c) = unpack('NH40', $buf);
+ my ($r, $c) = unpack(rev_map_fmt, $buf);
if ($r < $rev) {
$l = $i + 24;
} elsif ($r > $rev) {
$u = $i - 24;
} else { # $r == $rev
- close($fh) or croak "close: $!";
return $c eq ('0' x 40) ? undef : $c;
}
}
- close($fh) or croak "close: $!";
undef;
}
my ($self, $rev, $eq_ok, $min_rev) = @_;
--$rev unless $eq_ok;
$min_rev ||= 1;
+ my $max_rev = $self->rev_map_max;
+ $rev = $max_rev if ($rev > $max_rev);
while ($rev >= $min_rev) {
if (my $c = $self->rev_map_get($rev)) {
return ($rev, $c);
$repo_id = $Git::SVN::default_repo_id;
}
unless (defined $ref_id && length $ref_id) {
- $_[2] = $ref_id = $Git::SVN::default_ref_id;
+ $_prefix = '' unless defined($_prefix);
+ $_[2] = $ref_id =
+ "refs/remotes/$_prefix$Git::SVN::default_ref_id";
}
$_[1] = $repo_id;
my $dir = "$ENV{GIT_DIR}/svn/$ref_id";
+
+ # Older repos imported by us used $GIT_DIR/svn/foo instead of
+ # $GIT_DIR/svn/refs/remotes/foo when tracking refs/remotes/foo
+ if ($ref_id =~ m{^refs/remotes/(.*)}) {
+ my $old_dir = "$ENV{GIT_DIR}/svn/$1";
+ if (-d $old_dir && ! -d $dir) {
+ $dir = $old_dir;
+ }
+ }
+
$_[3] = $path = '' unless (defined $path);
- mkpath(["$ENV{GIT_DIR}/svn"]);
+ mkpath([$dir]);
bless {
ref_id => $ref_id, dir => $dir, index => "$dir/index",
path => $path, config => "$ENV{GIT_DIR}/svn/config",
$f
}
+sub uri_decode {
+ my ($f) = @_;
+ $f =~ s#%([0-9a-fA-F]{2})#chr(hex($1))#eg;
+ $f
+}
+
sub remove_username {
$_[0] =~ s{^([^:]*://)[^@]+@}{$1};
}
$self->{empty_symlinks} =
_mark_empty_symlinks($git_svn, $switch_path);
}
+ $self->{ignore_regex} = eval { command_oneline('config', '--get',
+ "svn-remote.$git_svn->{repo_id}.ignore-paths") };
$self->{empty} = {};
$self->{dir_prop} = {};
$self->{file_prop} = {};
# return value: 0 -- don't ignore, 1 -- ignore
sub is_path_ignored {
- my ($path) = @_;
+ my ($self, $path) = @_;
return 1 if in_dot_git($path);
+ return 1 if defined($self->{ignore_regex}) &&
+ $path =~ m!$self->{ignore_regex}!;
return 0 unless defined($_ignore_regex);
return 1 if $path =~ m!$_ignore_regex!o;
return 0;
sub delete_entry {
my ($self, $path, $rev, $pb) = @_;
- return undef if is_path_ignored($path);
+ return undef if $self->is_path_ignored($path);
my $gpath = $self->git_path($path);
return undef if ($gpath eq '');
}
print "\tD\t$gpath/\n" unless $::_q;
command_close_pipe($ls, $ctx);
- $self->{empty}->{$path} = 0
} else {
$self->{gii}->remove($gpath);
print "\tD\t$gpath\n" unless $::_q;
}
+ $self->{empty}->{$path} = 0;
undef;
}
my ($self, $path, $pb, $rev) = @_;
my ($mode, $blob);
- goto out if is_path_ignored($path);
+ goto out if $self->is_path_ignored($path);
my $gpath = $self->git_path($path);
($mode, $blob) = (command('ls-tree', '-z', $self->{c}, "./$gpath")
my ($self, $path, $pb, $cp_path, $cp_rev) = @_;
my $mode;
- if (!is_path_ignored($path)) {
+ if (!$self->is_path_ignored($path)) {
my ($dir, $file) = ($path =~ m#^(.*?)/?([^/]+)$#);
delete $self->{empty}->{$dir};
$mode = '100644';
sub add_directory {
my ($self, $path, $cp_path, $cp_rev) = @_;
- goto out if is_path_ignored($path);
+ goto out if $self->is_path_ignored($path);
my $gpath = $self->git_path($path);
if ($gpath eq '') {
my ($ls, $ctx) = command_output_pipe(qw/ls-tree
sub change_dir_prop {
my ($self, $db, $prop, $value) = @_;
- return undef if is_path_ignored($db->{path});
+ return undef if $self->is_path_ignored($db->{path});
$self->{dir_prop}->{$db->{path}} ||= {};
$self->{dir_prop}->{$db->{path}}->{$prop} = $value;
undef;
sub absent_directory {
my ($self, $path, $pb) = @_;
- return undef if is_path_ignored($path);
+ return undef if $self->is_path_ignored($path);
$self->{absent_dir}->{$pb->{path}} ||= [];
push @{$self->{absent_dir}->{$pb->{path}}}, $path;
undef;
sub absent_file {
my ($self, $path, $pb) = @_;
- return undef if is_path_ignored($path);
+ return undef if $self->is_path_ignored($path);
$self->{absent_file}->{$pb->{path}} ||= [];
push @{$self->{absent_file}->{$pb->{path}}}, $path;
undef;
sub change_file_prop {
my ($self, $fb, $prop, $value) = @_;
- return undef if is_path_ignored($fb->{path});
+ return undef if $self->is_path_ignored($fb->{path});
if ($prop eq 'svn:executable') {
if ($fb->{mode_b} != 120000) {
$fb->{mode_b} = defined $value ? 100755 : 100644;
sub apply_textdelta {
my ($self, $fb, $exp) = @_;
- return undef if is_path_ignored($fb->{path});
+ return undef if $self->is_path_ignored($fb->{path});
my $fh = $::_repository->temp_acquire('svn_delta');
# $fh gets auto-closed() by SVN::TxDelta::apply(),
# (but $base does not,) so dup() it for reading in close_file
sub close_file {
my ($self, $fb, $exp) = @_;
- return undef if is_path_ignored($fb->{path});
+ return undef if $self->is_path_ignored($fb->{path});
my $hash;
my $path = $self->git_path($fb->{path});
sub url_path {
my ($self, $path) = @_;
if ($self->{url} =~ m#^https?://#) {
- $path =~ s/([^~a-zA-Z0-9_.-])/uc sprintf("%%%02x",ord($1))/eg;
+ $path =~ s!([^~a-zA-Z0-9_./-])!uc sprintf("%%%02x",ord($1))!eg;
}
$self->{url} . '/' . $self->repo_path($path);
}
my ($self, @args) = @_;
my $pool = SVN::Pool->new;
+ # svn_log_changed_path_t objects passed to get_log are likely to be
+ # overwritten even if only the refs are copied to an external variable,
+ # so we should dup the structures in their entirety. Using an
+ # externally passed pool (instead of our temporary and quickly cleared
+ # pool in Git::SVN::Ra) does not help matters at all...
+ my $receiver = pop @args;
+ my $prefix = "/".$self->{svn_path};
+ $prefix =~ s#/+($)##;
+ my $prefix_regex = qr#^\Q$prefix\E#;
+ push(@args, sub {
+ my ($paths) = $_[0];
+ return &$receiver(@_) unless $paths;
+ $_[0] = ();
+ foreach my $p (keys %$paths) {
+ my $i = $paths->{$p};
+ # Make path relative to our url, not repos_root
+ $p =~ s/$prefix_regex//;
+ my %s = map { $_ => $i->$_; }
+ qw/copyfrom_path copyfrom_rev action/;
+ if ($s{'copyfrom_path'}) {
+ $s{'copyfrom_path'} =~ s/$prefix_regex//;
+ }
+ $_[0]{$p} = \%s;
+ }
+ &$receiver(@_);
+ });
+
+
# the limit parameter was not supported in SVN 1.1.x, so we
# drop it. Therefore, the receiver callback passed to it
# is made aware of this limitation by being wrapped if
my $full_url = $self->{url};
my $old_url = $full_url;
- $full_url .= '/' . escape_uri_only($path) if length $path;
+ $full_url .= '/' . $path if length $path;
my ($ra, $reparented);
- if ($old_url =~ m#^svn(\+ssh)?://#) {
+ if ($old_url =~ m#^svn(\+ssh)?://# ||
+ ($full_url =~ m#^https?://# &&
+ escape_url($full_url) ne $full_url)) {
$_[0] = undef;
$self = undef;
$RA = undef;
my ($min, $max) = ($base, $head < $base + $inc ? $head : $base + $inc);
my $longest_path = longest_common_path($gsv, $globs);
my $ra_url = $self->{url};
+ my $find_trailing_edge;
while (1) {
my %revs;
my $err;
};
sub _cb {
my ($paths, $r, $author, $date, $log) = @_;
- [ dup_changed_paths($paths),
+ [ $paths,
{ author => $author, date => $date, log => $log } ];
}
$self->get_log([$longest_path], $min, $max, 0, 1, 1,
sub { $revs{$_[1]} = _cb(@_) });
if ($err) {
print "Checked through r$max\r";
+ } else {
+ $find_trailing_edge = 1;
}
- if ($err && $max >= $head) {
+ if ($err and $find_trailing_edge) {
print STDERR "Path '$longest_path' ",
"was probably deleted:\n",
$err->expanded_message,
my $ok;
$self->get_log([$longest_path], $min, $hi,
0, 1, 1, sub {
- $ok ||= $_[1];
+ $ok = $_[1];
$revs{$_[1]} = _cb(@_) });
if ($ok) {
print STDERR "r$min .. r$ok OK\n";
last;
}
}
+ $find_trailing_edge = 0;
}
$SVN::Error::handler = $err_handler;
next if (length $g->{path}->{right} &&
($self->check_path($p, $r) !=
$SVN::Node::dir));
+ next unless $p =~ /$g->{path}->{regex}/;
$exists->{$p} = Git::SVN->init($self->{url}, $p, undef,
$g->{ref}->full_path($de), 1);
}
my $c = '';
do {
$url .= "/$c" if length $c;
- eval { (ref $self)->new($url)->get_latest_revnum };
+ eval {
+ my $ra = (ref $self)->new($url);
+ my $latest = $ra->get_latest_revnum;
+ $ra->get_log("", $latest, 0, 1, 0, 1, sub {});
+ };
} while ($@ && ($c = shift @components));
$url;
}
die "Error from SVN, ($errno): ", $err->expanded_message,"\n";
}
-# svn_log_changed_path_t objects passed to get_log are likely to be
-# overwritten even if only the refs are copied to an external variable,
-# so we should dup the structures in their entirety. Using an externally
-# passed pool (instead of our temporary and quickly cleared pool in
-# Git::SVN::Ra) does not help matters at all...
-sub dup_changed_paths {
- my ($paths) = @_;
- return undef unless $paths;
- my %ret;
- foreach my $p (keys %$paths) {
- my $i = $paths->{$p};
- my %s = map { $_ => $i->$_ }
- qw/copyfrom_path copyfrom_rev action/;
- $ret{$p} = \%s;
- }
- \%ret;
-}
-
package Git::SVN::Log;
use strict;
use warnings;
# adapted from pager.c
sub config_pager {
- $pager ||= $ENV{GIT_PAGER} || $ENV{PAGER};
- if (!defined $pager) {
- $pager = 'less';
- } elsif (length $pager == 0 || $pager eq 'cat') {
+ chomp(my $pager = command_oneline(qw(var GIT_PAGER)));
+ if ($pager eq 'cat') {
$pager = undef;
}
$ENV{GIT_PAGER_IN_USE} = defined($pager);
'--', $path);
my ($sha1);
my %authors;
+ my @buffer;
+ my %dsha; #distinct sha keys
+
while (my $line = <$fh>) {
+ push @buffer, $line;
if ($line =~ /^([[:xdigit:]]{40})\s\d+\s\d+/) {
- $sha1 = $1;
- (undef, $rev, undef) = ::cmt_metadata($1);
- $rev = '0' if (!$rev);
+ $dsha{$1} = 1;
+ }
+ }
+
+ my $s2r = ::cmt_sha2rev_batch([keys %dsha]);
+
+ foreach my $line (@buffer) {
+ if ($line =~ /^([[:xdigit:]]{40})\s\d+\s\d+/) {
+ $rev = $s2r->{$1};
+ $rev = '0' if (!$rev)
}
elsif ($line =~ /^author (.*)/) {
$authors{$rev} = $1;
my $pfx = "svn-remote.$x->{old_repo_id}";
my $old_fetch = quotemeta("$x->{old_path}:".
- "refs/remotes/$x->{ref_id}");
+ "$x->{ref_id}");
command_noisy(qw/config --unset/,
"$pfx.fetch", '^'. $old_fetch . '$');
delete $r->{$x->{old_repo_id}}->
use warnings;
sub new {
- my ($class, $glob) = @_;
+ my ($class, $glob, $pattern_ok) = @_;
my $re = $glob;
$re =~ s!/+$!!g; # no need for trailing slashes
- $re =~ m!^([^*]*)(\*(?:/\*)*)([^*]*)$!;
- my $temp = $re;
- my ($left, $right) = ($1, $3);
- $re = $2;
- my $depth = $re =~ tr/*/*/;
- if ($depth != $temp =~ tr/*/*/) {
- die "Only one set of wildcard directories " .
- "(e.g. '*' or '*/*/*') is supported: '$glob'\n";
+ my (@left, @right, @patterns);
+ my $state = "left";
+ my $die_msg = "Only one set of wildcard directories " .
+ "(e.g. '*' or '*/*/*') is supported: '$glob'\n";
+ for my $part (split(m|/|, $glob)) {
+ if ($part =~ /\*/ && $part ne "*") {
+ die "Invalid pattern in '$glob': $part\n";
+ } elsif ($pattern_ok && $part =~ /[{}]/ &&
+ $part !~ /^\{[^{}]+\}/) {
+ die "Invalid pattern in '$glob': $part\n";
+ }
+ if ($part eq "*") {
+ die $die_msg if $state eq "right";
+ $state = "pattern";
+ push(@patterns, "[^/]*");
+ } elsif ($pattern_ok && $part =~ /^\{(.*)\}$/) {
+ die $die_msg if $state eq "right";
+ $state = "pattern";
+ my $p = quotemeta($1);
+ $p =~ s/\\,/|/g;
+ push(@patterns, "(?:$p)");
+ } else {
+ if ($state eq "left") {
+ push(@left, $part);
+ } else {
+ push(@right, $part);
+ $state = "right";
+ }
+ }
}
+ my $depth = @patterns;
if ($depth == 0) {
- die "One '*' is needed for glob: '$glob'\n";
- }
- $re =~ s!\*!\[^/\]*!g;
- $re = quotemeta($left) . "($re)" . quotemeta($right);
- if (length $left && !($left =~ s!/+$!!g)) {
- die "Missing trailing '/' on left side of: '$glob' ($left)\n";
- }
- if (length $right && !($right =~ s!^/+!!g)) {
- die "Missing leading '/' on right side of: '$glob' ($right)\n";
+ die "One '*' is needed in glob: '$glob'\n";
}
+ my $left = join('/', @left);
+ my $right = join('/', @right);
+ $re = join('/', @patterns);
+ $re = join('\/',
+ grep(length, quotemeta($left), "($re)", quotemeta($right)));
my $left_re = qr/^\/\Q$left\E(\/|$)/;
bless { left => $left, right => $right, left_regex => $left_re,
regex => qr/$re/, glob => $glob, depth => $depth }, $class;