'option check-connectivity' \{'true'|'false'\}::
Request the helper to check connectivity of a clone.
+ 'option force' \{'true'|'false'\}::
+ Request the helper to perform a force update. Defaults to
+ 'false'.
+
+'option cloning \{'true'|'false'\}::
+ Notify the helper this is a clone request (i.e. the current
+ repository is guaranteed empty).
+
+'option update-shallow \{'true'|'false'\}::
+ Allow to extend .git/shallow if the new refs require it.
+
SEE ALSO
--------
linkgit:git-remote[1]
import atexit, shutil, hashlib, urlparse, subprocess
NAME_RE = re.compile('^([^<>]+)')
-AUTHOR_RE = re.compile('^([^<>]+?)? ?<([^<>]*)>$')
-EMAIL_RE = re.compile('^([^<>]+[^ \\\t<>])?\\b(?:[ \\t<>]*?)\\b([^ \\t<>]+@[^ \\t<>]+)')
+AUTHOR_RE = re.compile('^([^<>]+?)? ?[<>]([^<>]*)(?:$|>)')
+EMAIL_RE = re.compile(r'([^ \t<>]+@[^ \t<>]+)')
RAW_AUTHOR_RE = re.compile('^(\w+) (.+)? <(.*)> (\d+) ([+-]\d+)')
def die(msg, *args):
else:
m = EMAIL_RE.match(user)
if m:
- name = m.group(1)
- mail = m.group(2)
+ mail = m.group(1)
else:
m = NAME_RE.match(user)
if m:
peer = bzrlib.branch.Branch.open(peers[name],
possible_transports=transports)
try:
- peer.bzrdir.push_branch(branch, revision_id=revid)
+ peer.bzrdir.push_branch(branch, revision_id=revid,
+ overwrite=force)
except bzrlib.errors.DivergedBranches:
print "error %s non-fast forward" % ref
continue
print "*import-marks %s" % path
print "*export-marks %s" % path
+ print "option"
print
+ class InvalidOptionValue(Exception):
+ pass
+
+ def get_bool_option(val):
+ if val == 'true':
+ return True
+ elif val == 'false':
+ return False
+ else:
+ raise InvalidOptionValue()
+
+ def do_option(parser):
+ global force
+ opt, val = parser[1:3]
+ try:
+ if opt == 'force':
+ force = get_bool_option(val)
+ print 'ok'
+ else:
+ print 'unsupported'
+ except InvalidOptionValue:
+ print "error '%s' is not a valid value for option '%s'" % (val, opt)
+
def ref_is_valid(name):
return not True in [c in name for c in '~^: \\']
global is_tmp
global branches, peers
global transports
+ global force
+ marks = None
+ is_tmp = False
+ gitdir = os.environ.get('GIT_DIR', None)
+
+ if len(args) < 3:
+ die('Not enough arguments.')
+
+ if not gitdir:
+ die('GIT_DIR not set')
+
alias = args[1]
url = args[2]
blob_marks = {}
parsed_refs = {}
files_cache = {}
- marks = None
branches = {}
peers = {}
transports = []
+ force = False
if alias[5:] == url:
is_tmp = True
alias = hashlib.sha1(alias).hexdigest()
- else:
- is_tmp = False
prefix = 'refs/bzr/%s' % alias
- gitdir = os.environ['GIT_DIR']
dirname = os.path.join(gitdir, 'bzr', alias)
if not is_tmp:
do_import(parser)
elif parser.check('export'):
do_export(parser)
+ elif parser.check('option'):
+ do_option(parser)
else:
die('unhandled command: %s' % line)
sys.stdout.flush()
test_description='Test remote-bzr'
-. ./test-lib.sh
+test -n "$TEST_DIRECTORY" || TEST_DIRECTORY=${0%/*}/../../t
+. "$TEST_DIRECTORY"/test-lib.sh
if ! test_have_prereq PYTHON
then
test_cmp expected actual
'
+ test_expect_success 'forced pushing' '
+ (
+ cd gitrepo &&
+ echo three-new >content &&
+ git commit -a --amend -m three-new &&
+ git push -f
+ ) &&
+
+ (
+ cd bzrrepo &&
+ # the forced update overwrites the bzr branch but not the bzr
+ # working directory (it tries to merge instead)
+ bzr revert
+ ) &&
+
+ echo three-new >expected &&
+ cat bzrrepo/content >actual &&
+ test_cmp expected actual
+ '
+
test_expect_success 'roundtrip' '
(
cd gitrepo &&
git pull &&
git log --format="%s" -1 origin/master >actual
) &&
- echo three >expected &&
+ echo three-new >expected &&
test_cmp expected actual &&
(cd gitrepo && git push && git pull) &&
git add content &&
git commit -m one &&
git remote add bzr "bzr::../bzrrepo" &&
- git push bzr
+ git push bzr master
) &&
(
test_description='Test remote-hg'
-. ./test-lib.sh
+test -n "$TEST_DIRECTORY" || TEST_DIRECTORY=${0%/*}/../../t
+. "$TEST_DIRECTORY"/test-lib.sh
if ! test_have_prereq PYTHON
then
}
check_push () {
- local expected_ret=$1 ret=0 ref_ret=0 IFS=':'
+ expected_ret=$1 ret=0 ref_ret=0
shift
git push origin "$@" 2>error
ret=$?
cat error
- while read branch kind
+ while IFS=':' read branch kind
do
case "$kind" in
'new')
test $ref_ret -ne 0 && echo "match for '$branch' failed" && break
done
- if test $expected_ret -ne $ret -o $ref_ret -ne 0
+ if test $expected_ret -ne $ret || test $ref_ret -ne 0
then
return 1
fi
>../expected &&
author_test alpha "" "H G Wells <wells@example.com>" &&
- author_test beta "test" "test <unknown>" &&
- author_test beta "test <test@example.com> (comment)" "test <test@example.com>" &&
- author_test gamma "<test@example.com>" "Unknown <test@example.com>" &&
- author_test delta "name<test@example.com>" "name <test@example.com>" &&
- author_test epsilon "name <test@example.com" "name <test@example.com>" &&
- author_test zeta " test " "test <unknown>" &&
- author_test eta "test < test@example.com >" "test <test@example.com>" &&
- author_test theta "test >test@example.com>" "test <test@example.com>" &&
- author_test iota "test < test <at> example <dot> com>" "test <unknown>" &&
- author_test kappa "test@example.com" "Unknown <test@example.com>"
+ author_test beta "beta" "beta <unknown>" &&
+ author_test gamma "gamma <test@example.com> (comment)" "gamma <test@example.com>" &&
+ author_test delta "<delta@example.com>" "Unknown <delta@example.com>" &&
+ author_test epsilon "epsilon<test@example.com>" "epsilon <test@example.com>" &&
+ author_test zeta "zeta <test@example.com" "zeta <test@example.com>" &&
+ author_test eta " eta " "eta <unknown>" &&
+ author_test theta "theta < test@example.com >" "theta <test@example.com>" &&
+ author_test iota "iota >test@example.com>" "iota <test@example.com>" &&
+ author_test kappa "kappa < test <at> example <dot> com>" "kappa <unknown>" &&
+ author_test lambda "lambda@example.com" "Unknown <lambda@example.com>" &&
+ author_test mu "mu.mu@example.com" "Unknown <mu.mu@example.com>"
) &&
git clone "hg::hgrepo" gitrepo &&
check gitrepo HEAD zero
'
+test_expect_success 'moving remote clone' '
+ test_when_finished "rm -rf gitrepo*" &&
+
+ (
+ git clone "hg::hgrepo" gitrepo &&
+ mv gitrepo gitrepo2 &&
+ cd gitrepo2 &&
+ git fetch
+ )
+'
+
test_expect_success 'remote update bookmark' '
test_when_finished "rm -rf gitrepo*" &&
# cleanup previous stuff
rm -rf hgrepo
+test_expect_success 'fetch special filenames' '
+ test_when_finished "rm -rf hgrepo gitrepo && LC_ALL=C" &&
+
+ LC_ALL=en_US.UTF-8
+ export LC_ALL
+
+ (
+ hg init hgrepo &&
+ cd hgrepo &&
+
+ echo test >> "æ rø" &&
+ hg add "æ rø" &&
+ echo test >> "ø~?" &&
+ hg add "ø~?" &&
+ hg commit -m add-utf-8 &&
+ echo test >> "æ rø" &&
+ hg commit -m test-utf-8 &&
+ hg rm "ø~?" &&
+ hg mv "æ rø" "ø~?" &&
+ hg commit -m hg-mv-utf-8
+ ) &&
+
+ (
+ git clone "hg::hgrepo" gitrepo &&
+ cd gitrepo &&
+ git -c core.quotepath=false ls-files > ../actual
+ ) &&
+ echo "ø~?" > expected &&
+ test_cmp expected actual
+'
+
+test_expect_success 'push special filenames' '
+ test_when_finished "rm -rf hgrepo gitrepo && LC_ALL=C" &&
+
+ mkdir -p tmp && cd tmp &&
+
+ LC_ALL=en_US.UTF-8
+ export LC_ALL
+
+ (
+ hg init hgrepo &&
+ cd hgrepo &&
+
+ echo one >> content &&
+ hg add content &&
+ hg commit -m one
+ ) &&
+
+ (
+ git clone "hg::hgrepo" gitrepo &&
+ cd gitrepo &&
+
+ echo test >> "æ rø" &&
+ git add "æ rø" &&
+ git commit -m utf-8 &&
+
+ git push
+ ) &&
+
+ (cd hgrepo &&
+ hg update &&
+ hg manifest > ../actual
+ ) &&
+
+ printf "content\næ rø\n" > expected &&
+ test_cmp expected actual
+'
+
setup_big_push () {
(
hg init hgrepo &&
)
'
- test_expect_failure 'remote big push force' '
+ test_expect_success 'remote big push force' '
test_when_finished "rm -rf hgrepo gitrepo*" &&
setup_big_push
check_bookmark hgrepo new_bmark six
'
- test_expect_failure 'remote big push dry-run' '
+ test_expect_success 'remote big push dry-run' '
test_when_finished "rm -rf hgrepo gitrepo*" &&
setup_big_push
data->export = 1;
else if (!strcmp(capname, "check-connectivity"))
data->check_connectivity = 1;
- else if (!data->refspecs && !prefixcmp(capname, "refspec ")) {
+ else if (!data->refspecs && starts_with(capname, "refspec ")) {
ALLOC_GROW(refspecs,
refspec_nr + 1,
refspec_alloc);
data->connect = 1;
} else if (!strcmp(capname, "signed-tags")) {
data->signed_tags = 1;
- } else if (!prefixcmp(capname, "export-marks ")) {
+ } else if (starts_with(capname, "export-marks ")) {
struct strbuf arg = STRBUF_INIT;
strbuf_addstr(&arg, "--export-marks=");
strbuf_addstr(&arg, capname + strlen("export-marks "));
data->export_marks = strbuf_detach(&arg, NULL);
- } else if (!prefixcmp(capname, "import-marks")) {
+ } else if (starts_with(capname, "import-marks")) {
struct strbuf arg = STRBUF_INIT;
strbuf_addstr(&arg, "--import-marks=");
strbuf_addstr(&arg, capname + strlen("import-marks "));
data->import_marks = strbuf_detach(&arg, NULL);
- } else if (!prefixcmp(capname, "no-private-update")) {
+ } else if (starts_with(capname, "no-private-update")) {
data->no_private_update = 1;
} else if (mandatory) {
die("Unknown mandatory capability %s. This remote "
TRANS_OPT_THIN,
TRANS_OPT_KEEP
};
+
static const char *boolean_options[] = {
TRANS_OPT_THIN,
TRANS_OPT_KEEP,
if (!strcmp(buf.buf, "ok"))
ret = 0;
- else if (!prefixcmp(buf.buf, "error")) {
+ else if (starts_with(buf.buf, "error")) {
ret = -1;
} else if (!strcmp(buf.buf, "unsupported"))
ret = 1;
data->transport_options.check_self_contained_and_connected)
set_helper_option(transport, "check-connectivity", "true");
+ if (transport->cloning)
+ set_helper_option(transport, "cloning", "true");
+
+ if (data->transport_options.update_shallow)
+ set_helper_option(transport, "update-shallow", "true");
+
for (i = 0; i < nr_heads; i++) {
const struct ref *posn = to_fetch[i];
if (posn->status & REF_STATUS_UPTODATE)
while (1) {
recvline(data, &buf);
- if (!prefixcmp(buf.buf, "lock ")) {
+ if (starts_with(buf.buf, "lock ")) {
const char *name = buf.buf + 5;
if (transport->pack_lockfile)
warning("%s also locked %s", data->name, name);
struct ref *remote_refs)
{
char *refname, *msg;
- int status;
+ int status, forced = 0;
- if (!prefixcmp(buf->buf, "ok ")) {
+ if (starts_with(buf->buf, "ok ")) {
status = REF_STATUS_OK;
refname = buf->buf + 3;
- } else if (!prefixcmp(buf->buf, "error ")) {
+ } else if (starts_with(buf->buf, "error ")) {
status = REF_STATUS_REMOTE_REJECT;
refname = buf->buf + 6;
} else
free(msg);
msg = NULL;
}
+ else if (!strcmp(msg, "forced update")) {
+ forced = 1;
+ free(msg);
+ msg = NULL;
+ }
}
if (*ref)
}
(*ref)->status = status;
+ (*ref)->forced_update |= forced;
(*ref)->remote_status = msg;
return !(status == REF_STATUS_OK);
}
static void push_update_refs_status(struct helper_data *data,
- struct ref *remote_refs)
+ struct ref *remote_refs,
+ int flags)
{
struct strbuf buf = STRBUF_INIT;
struct ref *ref = remote_refs;
if (push_update_ref_status(&buf, &ref, remote_refs))
continue;
- if (!data->refspecs || data->no_private_update)
+ if (flags & TRANSPORT_PUSH_DRY_RUN || !data->refspecs || data->no_private_update)
continue;
/* propagate back the update to the remote namespace */
sendline(data, &buf);
strbuf_release(&buf);
- push_update_refs_status(data, remote_refs);
+ push_update_refs_status(data, remote_refs, flags);
return 0;
}
die("helper %s does not support dry-run", data->name);
}
+ if (flags & TRANSPORT_PUSH_FORCE) {
+ if (set_helper_option(transport, "force", "true") != 0)
+ warning("helper %s does not support 'force'", data->name);
+ }
+
helper = get_helper(transport);
write_constant(helper->in, "export\n");
}
free(private);
- if (ref->deletion)
- die("remote-helpers do not support ref deletion");
-
if (ref->peer_ref) {
if (strcmp(ref->peer_ref->name, ref->name))
die("remote-helpers do not support old:new syntax");
if (finish_command(&exporter))
die("Error while running fast-export");
- push_update_refs_status(data, remote_refs);
+ push_update_refs_status(data, remote_refs, flags);
return 0;
}
return 0; /* Nothing to write. */
transfer_debug("%s is writable", t->dest_name);
- bytes = write(t->dest, t->buf, t->bufuse);
- if (bytes < 0 && errno != EWOULDBLOCK && errno != EAGAIN &&
- errno != EINTR) {
+ bytes = xwrite(t->dest, t->buf, t->bufuse);
+ if (bytes < 0 && errno != EWOULDBLOCK) {
error("write(%s) failed: %s", t->dest_name, strerror(errno));
return -1;
} else if (bytes > 0) {