# For remote repositories a local clone is stored in
# "$GIT_DIR/hg/origin/clone/.hg/".
-from mercurial import hg, ui, bookmarks, context, encoding, node, error, extensions
+from mercurial import hg, ui, bookmarks, context, encoding, node, error, extensions, discovery, util
import re
import sys
def gitref(ref):
return ref.replace(' ', '___')
+def check_version(*check):
+ if not hg_version:
+ return True
+ return hg_version >= check
+
def get_config(config):
cmd = ['git', 'config', '--get', config]
process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
return '%s <%s>' % (name, mail)
+def updatebookmarks(repo, peer):
+ remotemarks = peer.listkeys('bookmarks')
+ localmarks = repo._bookmarks
+
+ if not remotemarks:
+ return
+
+ for k, v in remotemarks.iteritems():
+ localmarks[k] = hgbin(v)
+
+ if hasattr(localmarks, 'write'):
+ localmarks.write()
+ else:
+ bookmarks.write(repo)
+
def get_repo(url, alias):
global dirname, peer
extensions.loadall(myui)
- if hg.islocal(url):
+ if hg.islocal(url) and not os.environ.get('GIT_REMOTE_HG_TEST_REMOTE'):
repo = hg.repository(myui, url)
if not os.path.exists(dirname):
os.makedirs(dirname)
die('Repository error')
repo.pull(peer, heads=None, force=True)
+ updatebookmarks(repo, peer)
+
return repo
def rev_to_mark(rev):
print
-def branch_tip(repo, branch):
- # older versions of mercurial don't have this
- if hasattr(repo, 'branchtip'):
- return repo.branchtip(branch)
- else:
- return repo.branchtags()[branch]
+def branch_tip(branch):
+ return branches[branch][-1]
def get_branch_tip(repo, branch):
global branches
# verify there's only one head
if (len(heads) > 1):
warn("Branch '%s' has more than one head, consider merging" % branch)
- return branch_tip(repo, hgref(branch))
+ return branch_tip(hgref(branch))
return heads[0]
list_head(repo, cur)
if track_branches:
- for branch in repo.branchmap():
- heads = repo.branchheads(branch)
- if len(heads):
+ orig = peer if peer else repo
+
+ for branch, heads in orig.branchmap().iteritems():
+ # only open heads
+ heads = [h for h in heads if 'close' not in repo.changelog.read(h)[5]]
+ if heads:
branches[branch] = heads
for branch in branches:
def write_tag(repo, tag, node, msg, author):
branch = repo[node].branch()
- tip = branch_tip(repo, branch)
+ tip = branch_tip(branch)
tip = repo[tip]
def getfilectx(repo, memctx, f):
encoding.encoding = tmp
- return tagnode
+ return (tagnode, branch)
+
+def checkheads_bmark(repo, ref, ctx):
+ if force_push:
+ return True
+
+ bmark = ref[len('refs/heads/'):]
+ if not bmark in bmarks:
+ # new bmark
+ return True
+
+ ctx_old = bmarks[bmark]
+ ctx_new = ctx
+ if not repo.changelog.descendant(ctx_old.rev(), ctx_new.rev()):
+ print "error %s non-fast forward" % ref
+ return False
+
+ return True
+
+def checkheads(repo, remote, p_revs):
+
+ remotemap = remote.branchmap()
+ if not remotemap:
+ # empty repo
+ return True
+
+ new = {}
+ ret = True
+
+ for node, ref in p_revs.iteritems():
+ ctx = repo[node]
+ branch = ctx.branch()
+ if not branch in remotemap:
+ # new branch
+ continue
+ if not ref.startswith('refs/heads/branches'):
+ if ref.startswith('refs/heads/'):
+ if not checkheads_bmark(repo, ref, ctx):
+ ret = False
+
+ # only check branches
+ continue
+ new.setdefault(branch, []).append(ctx.rev())
+
+ for branch, heads in new.iteritems():
+ old = [repo.changelog.rev(x) for x in remotemap[branch]]
+ for rev in heads:
+ if check_version(2, 3):
+ ancestors = repo.changelog.ancestors([rev], stoprev=min(old))
+ else:
+ ancestors = repo.changelog.ancestors(rev)
+ found = False
+
+ for x in old:
+ if x in ancestors:
+ found = True
+ break
+
+ if found:
+ continue
+
+ node = repo.changelog.node(rev)
+ print "error %s non-fast forward" % p_revs[node]
+ ret = False
+
+ return ret
+
+def push_unsafe(repo, remote, parsed_refs, p_revs):
+
+ force = force_push
+
+ fci = discovery.findcommonincoming
+ commoninc = fci(repo, remote, force=force)
+ common, _, remoteheads = commoninc
+
+ if not force and not checkheads(repo, remote, p_revs):
+ return None
+
+ cg = repo.getbundle('push', heads=list(p_revs), common=common)
+
+ unbundle = remote.capable('unbundle')
+ if unbundle:
+ if force:
+ remoteheads = ['force']
+ return remote.unbundle(cg, remoteheads, 'push')
+ else:
+ return remote.addchangegroup(cg, 'push', repo.url())
+
+def push(repo, remote, parsed_refs, p_revs):
+ if hasattr(remote, 'canpush') and not remote.canpush():
+ print "error cannot push"
+
+ if not p_revs:
+ # nothing to push
+ return
+
+ lock = None
+ unbundle = remote.capable('unbundle')
+ if not unbundle:
+ lock = remote.lock()
+ try:
+ ret = push_unsafe(repo, remote, parsed_refs, p_revs)
+ finally:
+ if lock is not None:
+ lock.release()
+
+ return ret
def do_export(parser):
global parsed_refs, bmarks, peer
p_bmarks = []
+ p_revs = {}
parser.next()
if branch in branches and bnode in branches[branch]:
# up to date
continue
+ p_revs[bnode] = ref
print "ok %s" % ref
elif ref.startswith('refs/heads/'):
bmark = ref[len('refs/heads/'):]
- p_bmarks.append((bmark, node))
- continue
+ new = node
+ old = bmarks[bmark].hex() if bmark in bmarks else ''
+
+ if old == new:
+ continue
+
+ print "ok %s" % ref
+ if bmark != fake_bmark and \
+ not (bmark == 'master' and bmark not in parser.repo._bookmarks):
+ p_bmarks.append((ref, bmark, old, new))
+
+ p_revs[bnode] = ref
elif ref.startswith('refs/tags/'):
tag = ref[len('refs/tags/'):]
tag = hgref(tag)
if mode == 'git':
if not msg:
msg = 'Added tag %s for changeset %s' % (tag, node[:12]);
- write_tag(parser.repo, tag, node, msg, author)
+ tagnode, branch = write_tag(parser.repo, tag, node, msg, author)
+ p_revs[tagnode] = 'refs/heads/branches/' + gitref(branch)
else:
fp = parser.repo.opener('localtags', 'a')
fp.write('%s %s\n' % (node, tag))
fp.close()
+ p_revs[bnode] = ref
print "ok %s" % ref
else:
# transport-helper/fast-export bugs
continue
if peer:
- parser.repo.push(peer, force=force_push, newbranch=True)
- remote_bmarks = peer.listkeys('bookmarks')
-
- # handle bookmarks
- for bmark, node in p_bmarks:
- ref = 'refs/heads/' + bmark
- new = node
-
- if bmark in bmarks:
- old = bmarks[bmark].hex()
- else:
- old = ''
-
- if old == new:
- continue
-
- if bmark == fake_bmark or \
- bmark == 'master' and 'master' not in parser.repo._bookmarks:
- print "ok %s" % ref
- continue
- elif bookmarks.pushbookmark(parser.repo, bmark, old, new):
- # updated locally
- pass
- else:
- print "error %s" % ref
- continue
+ if not push(parser.repo, peer, parsed_refs, p_revs):
+ # do not update bookmarks
+ print
+ return
- if peer:
- old = remote_bmarks.get(bmark, '')
+ # update remote bookmarks
+ remote_bmarks = peer.listkeys('bookmarks')
+ for ref, bmark, old, new in p_bmarks:
+ if force_push:
+ old = remote_bmarks.get(bmark, '')
if not peer.pushkey('bookmarks', bmark, old, new):
print "error %s" % ref
- continue
-
- print "ok %s" % ref
+ else:
+ # update local bookmarks
+ for ref, bmark, old, new in p_bmarks:
+ if not bookmarks.pushbookmark(parser.repo, bmark, old, new):
+ print "error %s" % ref
print
global track_branches, force_push, is_tmp
global parsed_tags
global filenodes
- global fake_bmark
+ global fake_bmark, hg_version
alias = args[1]
url = args[2]
parsed_tags = {}
filenodes = {}
fake_bmark = None
+ try:
+ hg_version = tuple(int(e) for e in util.version().split('.'))
+ except:
+ hg_version = None
repo = get_repo(url, alias)
prefix = 'refs/hg/%s' % alias