# For remote repositories a local clone is stored in
# "$GIT_DIR/hg/origin/clone/.hg/".
-from mercurial import hg, ui, bookmarks, context, encoding, node, error, extensions
+from mercurial import hg, ui, bookmarks, context, encoding, node, error, extensions, discovery, util
import re
import sys
def gitref(ref):
return ref.replace(' ', '___')
+def check_version(*check):
+ if not hg_version:
+ return True
+ return hg_version >= check
+
def get_config(config):
cmd = ['git', 'config', '--get', config]
process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
return tagnode
+def checkheads(repo, remote, p_revs):
+
+ remotemap = remote.branchmap()
+ if not remotemap:
+ # empty repo
+ return
+
+ new = {}
+
+ for node in p_revs:
+ ctx = repo[node]
+ branch = ctx.branch()
+ if not branch in remotemap:
+ # new branch
+ continue
+ new.setdefault(branch, []).append(ctx.rev())
+
+ for branch, heads in new.iteritems():
+ old = [repo.changelog.rev(x) for x in remotemap[branch]]
+ for rev in heads:
+ if check_version(2, 3):
+ ancestors = repo.changelog.ancestors([rev], stoprev=min(old))
+ else:
+ ancestors = repo.changelog.ancestors(rev)
+ found = False
+
+ for x in old:
+ if x in ancestors:
+ found = True
+ break
+
+ if found:
+ continue
+
+ raise Exception("non-fast-forward")
+
+def push_unsafe(repo, remote, parsed_refs, p_revs):
+
+ force = force_push
+
+ fci = discovery.findcommonincoming
+ commoninc = fci(repo, remote, force=force)
+ common, _, remoteheads = commoninc
+
+ if not force:
+ checkheads(repo, remote, p_revs)
+
+ cg = repo.getbundle('push', heads=list(p_revs), common=common)
+
+ unbundle = remote.capable('unbundle')
+ if unbundle:
+ if force:
+ remoteheads = ['force']
+ return remote.unbundle(cg, remoteheads, 'push')
+ else:
+ return remote.addchangegroup(cg, 'push', repo.url())
+
+def push(repo, remote, parsed_refs, p_revs):
+ if hasattr(remote, 'canpush') and not remote.canpush():
+ print "error cannot push"
+
+ if not p_revs:
+ # nothing to push
+ return
+
+ lock = None
+ unbundle = remote.capable('unbundle')
+ if not unbundle:
+ lock = remote.lock()
+ try:
+ ret = push_unsafe(repo, remote, parsed_refs, p_revs)
+ finally:
+ if lock is not None:
+ lock.release()
+
+ return ret
+
def do_export(parser):
global parsed_refs, bmarks, peer
p_bmarks = []
+ p_revs = set()
parser.next()
if branch in branches and bnode in branches[branch]:
# up to date
continue
+ p_revs.add(bnode)
print "ok %s" % ref
elif ref.startswith('refs/heads/'):
bmark = ref[len('refs/heads/'):]
- p_bmarks.append((bmark, node))
- continue
+ new = node
+ old = bmarks[bmark].hex() if bmark in bmarks else ''
+
+ if old == new:
+ continue
+
+ print "ok %s" % ref
+ if bmark != fake_bmark and \
+ not (bmark == 'master' and bmark not in parser.repo._bookmarks):
+ p_bmarks.append((ref, bmark, old, new))
+
+ p_revs.add(bnode)
elif ref.startswith('refs/tags/'):
tag = ref[len('refs/tags/'):]
tag = hgref(tag)
if mode == 'git':
if not msg:
msg = 'Added tag %s for changeset %s' % (tag, node[:12]);
- write_tag(parser.repo, tag, node, msg, author)
+ tagnode = write_tag(parser.repo, tag, node, msg, author)
+ p_revs.add(tagnode)
else:
fp = parser.repo.opener('localtags', 'a')
fp.write('%s %s\n' % (node, tag))
fp.close()
+ p_revs.add(bnode)
print "ok %s" % ref
else:
# transport-helper/fast-export bugs
continue
if peer:
- parser.repo.push(peer, force=force_push, newbranch=True)
- remote_bmarks = peer.listkeys('bookmarks')
-
- # handle bookmarks
- for bmark, node in p_bmarks:
- ref = 'refs/heads/' + bmark
- new = node
+ push(parser.repo, peer, parsed_refs, p_revs)
- if bmark in bmarks:
- old = bmarks[bmark].hex()
- else:
- old = ''
-
- if old == new:
- continue
-
- if bmark == fake_bmark or \
- bmark == 'master' and 'master' not in parser.repo._bookmarks:
- print "ok %s" % ref
- continue
- elif bookmarks.pushbookmark(parser.repo, bmark, old, new):
- # updated locally
- pass
- else:
- print "error %s" % ref
- continue
-
- if peer:
- old = remote_bmarks.get(bmark, '')
+ # update remote bookmarks
+ remote_bmarks = peer.listkeys('bookmarks')
+ for ref, bmark, old, new in p_bmarks:
+ if force_push:
+ old = remote_bmarks.get(bmark, '')
if not peer.pushkey('bookmarks', bmark, old, new):
print "error %s" % ref
- continue
-
- print "ok %s" % ref
+ else:
+ # update local bookmarks
+ for ref, bmark, old, new in p_bmarks:
+ if not bookmarks.pushbookmark(parser.repo, bmark, old, new):
+ print "error %s" % ref
print
global track_branches, force_push, is_tmp
global parsed_tags
global filenodes
- global fake_bmark
+ global fake_bmark, hg_version
alias = args[1]
url = args[2]
parsed_tags = {}
filenodes = {}
fake_bmark = None
+ try:
+ hg_version = tuple(int(e) for e in util.version().split('.'))
+ except:
+ hg_version = None
repo = get_repo(url, alias)
prefix = 'refs/hg/%s' % alias