import sys
import bzrlib
-bzrlib.initialize()
+if hasattr(bzrlib, "initialize"):
+ bzrlib.initialize()
import bzrlib.plugin
bzrlib.plugin.load_plugins()
import bzrlib.generate_ids
+import bzrlib.transport
import sys
import os
final = []
for path, fid in files.iteritems():
+ kind = tree.kind(fid)
+
h = tree.get_file_sha1(fid)
- mode = '100644'
+ if kind == 'symlink':
+ d = tree.get_symlink_target(fid)
+ mode = '120000'
+ elif kind == 'file':
+
+ if tree.is_executable(fid):
+ mode = '100755'
+ else:
+ mode = '100644'
+
+ # is the blog already exported?
+ if h in filenodes:
+ mark = filenodes[h]
+ final.append((mode, mark, path))
+ continue
- # is the blob already exported?
- if h in filenodes:
- mark = filenodes[h]
- else:
d = tree.get_file_text(fid)
+ elif kind == 'directory':
+ continue
+ else:
+ die("Unhandled kind '%s' for path '%s'" % (kind, path))
- mark = marks.next_mark()
- filenodes[h] = mark
+ mark = marks.next_mark()
+ filenodes[h] = mark
- print "blob"
- print "mark :%u" % mark
- print "data %d" % len(d)
- print d
+ print "blob"
+ print "mark :%u" % mark
+ print "data %d" % len(d)
+ print d
final.append((mode, mark, path))
self.repo = repo
self.revid = revid
self.parents = parents
- self.updates = files
+ self.updates = {}
def copy_tree(revid):
files = files_cache[revid] = {}
self.files = files_cache[revid] = self.base_files.copy()
+ for path, f in files.iteritems():
+ fid = self.files.get(path, None)
+ if not fid:
+ fid = bzrlib.generate_ids.gen_file_id(path)
+ f['path'] = path
+ self.updates[fid] = f
+
def last_revision(self):
return self.base_id
return parent_fid
if basename == '':
return None
- d = add_entry(dirname, 'directory')
- return d[0]
+ fid = bzrlib.generate_ids.gen_file_id(path)
+ d = add_entry(fid, dirname, 'directory')
+ return fid
- def add_entry(path, kind):
+ def add_entry(fid, path, kind, mode = None):
dirname, basename = os.path.split(path)
parent_fid = get_parent(dirname, basename)
- fid = bzrlib.generate_ids.gen_file_id(path)
+
+ executable = False
+ if mode == '100755':
+ executable = True
+ elif mode == '120000':
+ kind = 'symlink'
+
change = (fid,
(None, path),
True,
(None, parent_fid),
(None, basename),
(None, kind),
- (None, False))
+ (None, executable))
self.files[path] = change[0]
changes.append(change)
return change
- def update_entry(path, kind):
+ def update_entry(fid, path, kind, mode = None):
dirname, basename = os.path.split(path)
- fid = self.base_files[path]
parent_fid = get_parent(dirname, basename)
+
+ executable = False
+ if mode == '100755':
+ executable = True
+ elif mode == '120000':
+ kind = 'symlink'
+
change = (fid,
(path, path),
True,
(None, parent_fid),
(None, basename),
(None, kind),
- (None, False))
+ (None, executable))
self.files[path] = change[0]
changes.append(change)
return change
- def remove_entry(path, kind):
+ def remove_entry(fid, path, kind):
dirname, basename = os.path.split(path)
- fid = self.base_files[path]
parent_fid = get_parent(dirname, basename)
change = (fid,
(path, None),
changes.append(change)
return change
- for path, f in self.updates.iteritems():
+ for fid, f in self.updates.iteritems():
+ path = f['path']
+
if 'deleted' in f:
- remove_entry(path, 'file')
- elif path in self.base_files:
- update_entry(path, 'file')
+ remove_entry(fid, path, 'file')
+ continue
+
+ if path in self.base_files:
+ update_entry(fid, path, 'file', f['mode'])
else:
- add_entry(path, 'file')
+ add_entry(fid, path, 'file', f['mode'])
return changes
def get_file_with_stat(self, file_id, path=None):
- return (StringIO.StringIO(self.updates[path]['data']), None)
+ return (StringIO.StringIO(self.updates[file_id]['data']), None)
+
+ def get_symlink_target(self, file_id):
+ return self.updates[file_id]['data']
def parse_commit(parser):
global marks, blob_marks, bmarks, parsed_refs
repo.lock_write()
try:
- builder = repo.get_commit_builder(parents, None, date, tz, committer, props, revid, False)
+ builder = repo.get_commit_builder(parents, None, date, tz, committer, props, revid)
try:
list(builder.record_iter_changes(mtree, mtree.last_revision(), changes))
builder.finish_inventory()
parsed_refs[ref] = mark_to_rev(from_mark)
def do_export(parser):
- global parsed_refs, dirname
+ global parsed_refs, dirname, peer
parser.next()
for ref, revid in parsed_refs.iteritems():
if ref == 'refs/heads/master':
repo.generate_revision_history(revid, marks.get_tip('master'))
+ revno, revid = repo.last_revision_info()
+ if peer:
+ if hasattr(peer, "import_last_revision_info_and_tags"):
+ peer.import_last_revision_info_and_tags(repo, revno, revid)
+ else:
+ peer.import_last_revision_info(repo.repository, revno, revid)
+ wt = peer.bzrdir.open_workingtree()
+ else:
+ wt = repo.bzrdir.open_workingtree()
+ wt.update()
print "ok %s" % ref
print
print
def get_repo(url, alias):
- origin = bzrlib.controldir.ControlDir.open(url)
- return origin.open_branch()
+ global dirname, peer
+
+ origin = bzrlib.bzrdir.BzrDir.open(url)
+ branch = origin.open_branch()
+
+ if not isinstance(origin.transport, bzrlib.transport.local.LocalTransport):
+ clone_path = os.path.join(dirname, 'clone')
+ remote_branch = branch
+ if os.path.exists(clone_path):
+ # pull
+ d = bzrlib.bzrdir.BzrDir.open(clone_path)
+ branch = d.open_branch()
+ result = branch.pull(remote_branch, [], None, False)
+ else:
+ # clone
+ d = origin.sprout(clone_path, None,
+ hardlink=True, create_tree_if_local=False,
+ source_branch=remote_branch)
+ branch = d.open_branch()
+ branch.bind(remote_branch)
+
+ peer = remote_branch
+ else:
+ peer = None
+
+ return branch
def main(args):
global marks, prefix, dirname