# The Linux build installs the defined dependency versions below.
# The OS X build installs the latest available versions. Keep that
# in mind when you encounter a broken OS X build!
- - LINUX_P4_VERSION="16.1"
- - LINUX_GIT_LFS_VERSION="1.2.0"
+ - LINUX_P4_VERSION="16.2"
+ - LINUX_GIT_LFS_VERSION="1.5.2"
- DEFAULT_TEST_TARGET=prove
- GIT_PROVE_OPTS="--timer --jobs 3 --state=failed,slow,save"
- GIT_TEST_OPTS="--verbose-log"
--- /dev/null
+Git v2.10.3 Release Notes
+=========================
+
+Fixes since v2.10.2
+-------------------
+
+ * Extract a small helper out of the function that reads the authors
+ script file "git am" internally uses.
+ This by itself is not useful until a second caller appears in the
+ future for "rebase -i" helper.
+
+ * The command-line completion script (in contrib/) learned to
+ complete "git cmd ^mas<HT>" to complete the negative end of
+ reference to "git cmd ^master".
+
+ * "git send-email" attempts to pick up valid e-mails from the
+ trailers, but people in real world write non-addresses there, like
+ "Cc: Stable <add@re.ss> # 4.8+", which broke the output depending
+ on the availability and vintage of Mail::Address perl module.
+
+ * The code that we have used for the past 10+ years to cycle
+ 4-element ring buffers turns out to be not quite portable in
+ theoretical world.
+
+ * "git daemon" used fixed-length buffers to turn URL to the
+ repository the client asked for into the server side directory
+ path, using snprintf() to avoid overflowing these buffers, but
+ allowed possibly truncated paths to the directory. This has been
+ tightened to reject such a request that causes overlong path to be
+ required to serve.
+
+ * Recent update to git-sh-setup (a library of shell functions that
+ are used by our in-tree scripted Porcelain commands) included
+ another shell library git-sh-i18n without specifying where it is,
+ relying on the $PATH. This has been fixed to be more explicit by
+ prefixing $(git --exec-path) output in front.
+
+ * Fix for a racy false-positive test failure.
+
+ * Portability update and workaround for builds on recent Mac OS X.
+
+ * Update to the test framework made in 2.9 timeframe broke running
+ the tests under valgrind, which has been fixed.
+
+ * Improve the rule to convert "unsigned char [20]" into "struct
+ object_id *" in contrib/coccinelle/
+
+Also contains minor documentation updates and code clean-ups.
--- /dev/null
+Git 2.12 Release Notes
+======================
+
+Backward compatibility notes.
+
+ * Use of an empty string that is used for 'everything matches' is
+ still warned and Git asks users to use a more explicit '.' for that
+ instead. The hope is that existing users will not mind this
+ change, and eventually the warning can be turned into a hard error,
+ upgrading the deprecation into removal of this (mis)feature. That
+ is not scheduled to happen in the upcoming release (yet).
+
+ * The historical argument order "git merge <msg> HEAD <commit>..."
+ has been deprecated for quite some time, and will be removed in the
+ upcoming release.
+
+
+Updates since v2.11
+-------------------
+
+UI, Workflows & Features
+
+ * Various updates to "git p4".
+
+
+Performance, Internal Implementation, Development Support etc.
+
+ *
+
+
+Also contains various documentation updates and code clean-ups.
+
+ *
+
+
+Fixes since v2.10
+-----------------
+
+Unless otherwise noted, all the fixes since v2.9 in the maintenance
+track are contained in this release (see the maintenance releases'
+notes for details).
+
+ * We often decide if a session is interactive by checking if the
+ standard I/O streams are connected to a TTY, but isatty() that
+ comes with Windows incorrectly returned true if it is used on NUL
+ (i.e. an equivalent to /dev/null). This has been fixed.
+ (merge cbb3f3c9b1 js/mingw-isatty later to maint).
+
+ * "git svn" did not work well with path components that are "0", and
+ some configuration variable it uses were not documented.
+ (merge ea9a93dcc2 ew/svn-fixes later to maint).
+
+ * "git rev-parse --symbolic" failed with a more recent notation like
+ "HEAD^-1" and "HEAD^!".
+ (merge a2e7b04c44 jk/rev-parse-symbolic-parents-fix later to maint).
+
+ * An empty directory in a working tree that can simply be nuked used
+ to interfere while merging or cherry-picking a change to create a
+ submodule directory there, which has been fixed..
+ (merge 5423d2e700 dt/empty-submodule-in-merge later to maint).
+
+ * The code in "git push" to compute if any commit being pushed in the
+ superproject binds a commit in a submodule that hasn't been pushed
+ out was overly inefficient, making it unusable even for a small
+ project that does not have any submodule but have a reasonable
+ number of refs.
+ (merge 250ab24ab3 hv/submodule-not-yet-pushed-fix later to maint).
+
+ * "git push --dry-run --recurse-submodule=on-demand" wasn't
+ "--dry-run" in the submodules.
+ (merge 0301c821c5 bw/push-dry-run later to maint).
+
+ * The output from "git worktree list" was made in readdir() order,
+ and was unstable.
+ (merge 4df1d4d466 nd/worktree-list-fixup later to maint).
+
+ * mergetool.<tool>.trustExitCode configuration variable did not apply
+ to built-in tools, but now it does.
+ (merge 2967284456 da/mergetool-trust-exit-code later to maint).
+
+ * "git p4" LFS support was broken when LFS stores an empty blob.
+ (merge d5eb3cf5e7 ls/p4-empty-file-on-lfs later to maint).
+
+ * Other minor doc, test and build updates and code cleanups.
+ (merge fa6ca11105 nd/qsort-in-merge-recursive later to maint).
+ (merge fa3142c919 ak/lazy-prereq-mktemp later to maint).
submit manually or revert. This option always stops after the
first (oldest) commit. Git tags are not exported to p4.
+--shelve::
+ Instead of submitting create a series of shelved changelists.
+ After creating each shelve, the relevant files are reverted/deleted.
+ If you have multiple commits pending multiple shelves will be created.
+
+--update-shelve CHANGELIST::
+ Update an existing shelved changelist with this commit. Implies
+ --shelve.
+
--conflict=(ask|skip|quit)::
Conflicts can occur when applying a commit to p4. When this
happens, the default behavior ("ask") is to prompt whether to
Client specified as an option to all p4 commands, with
'-c <client>', including the client spec.
+git-p4.retries::
+ Specifies the number of times to retry a p4 command (notably,
+ 'p4 sync') if the network times out. The default value is 3.
+
Clone and sync variables
~~~~~~~~~~~~~~~~~~~~~~~~
git-p4.syncFromOrigin::
When retrieving svn commits into Git (as part of 'fetch', 'rebase', or
'dcommit' operations), look for the first `From:` or `Signed-off-by:` line
in the log message and use that as the author string.
++
+[verse]
+config key: svn.useLogAuthor
+
--add-author-from::
When committing to svn from Git (as part of 'commit-diff', 'set-tree' or 'dcommit'
operations), if the existing log message doesn't already have a
`From:` or `Signed-off-by:` line, append a `From:` line based on the
Git commit's author string. If you use this, then `--use-log-author`
will retrieve a valid author string for all commits.
-
++
+[verse]
+config key: svn.addAuthorFrom
ADVANCED OPTIONS
----------------
#!/bin/sh
GVF=GIT-VERSION-FILE
-DEF_VER=v2.11.0
+DEF_VER=v2.11.GIT
LF='
'
-Documentation/RelNotes/2.11.0.txt
\ No newline at end of file
+Documentation/RelNotes/2.12.0.txt
\ No newline at end of file
exit(3);
}
- fprintf(stderr, _("Some %s revs are not ancestor of the %s rev.\n"
+ fprintf(stderr, _("Some %s revs are not ancestors of the %s rev.\n"
"git bisect cannot work properly in this case.\n"
"Maybe you mistook %s and %s revs?\n"),
term_good, term_bad, term_good, term_bad);
int replace_each_worktree_head_symref(const char *oldref, const char *newref)
{
int ret = 0;
- struct worktree **worktrees = get_worktrees();
+ struct worktree **worktrees = get_worktrees(0);
int i;
for (i = 0; worktrees[i]; i++) {
static void reject_rebase_or_bisect_branch(const char *target)
{
- struct worktree **worktrees = get_worktrees();
+ struct worktree **worktrees = get_worktrees(0);
int i;
for (i = 0; worktrees[i]; i++) {
OPT_STRING(0, "shallow-since", &option_since, N_("time"),
N_("create a shallow clone since a specific time")),
OPT_STRING_LIST(0, "shallow-exclude", &option_not, N_("revision"),
- N_("deepen history of shallow clone by excluding rev")),
+ N_("deepen history of shallow clone, excluding rev")),
OPT_BOOL(0, "single-branch", &option_single_branch,
N_("clone only one branch, HEAD or --branch")),
OPT_BOOL(0, "shallow-submodules", &option_shallow_submodules,
strbuf_stripspace(&sb, 0);
if (signoff)
- append_signoff(&sb, ignore_non_trailer(&sb), 0);
+ append_signoff(&sb, ignore_non_trailer(sb.buf, sb.len), 0);
if (fwrite(sb.buf, 1, sb.len, s->fp) < sb.len)
die_errno(_("could not write commit template"));
OPT_STRING(0, "shallow-since", &deepen_since, N_("time"),
N_("deepen history of shallow repository based on time")),
OPT_STRING_LIST(0, "shallow-exclude", &deepen_not, N_("revision"),
- N_("deepen history of shallow clone by excluding rev")),
+ N_("deepen history of shallow clone, excluding rev")),
OPT_INTEGER(0, "deepen", &deepen_relative,
N_("deepen history of shallow clone")),
{ OPTION_SET_INT, 0, "unshallow", &unshallow, NULL,
"with what you pushed, and will require 'git reset --hard' to match\n"
"the work tree to HEAD.\n"
"\n"
- "You can set 'receive.denyCurrentBranch' configuration variable to\n"
- "'ignore' or 'warn' in the remote repository to allow pushing into\n"
+ "You can set the 'receive.denyCurrentBranch' configuration variable\n"
+ "to 'ignore' or 'warn' in the remote repository to allow pushing into\n"
"its current branch; however, this is not recommended unless you\n"
"arranged to update its work tree to match what you pushed in some\n"
"other way.\n"
for (parents = commit->parents, parent_number = 1;
parents;
parents = parents->next, parent_number++) {
+ char *name = NULL;
+
if (exclude_parent && parent_number != exclude_parent)
continue;
+ if (symbolic)
+ name = xstrfmt("%s^%d", arg, parent_number);
show_rev(include_parents ? NORMAL : REVERSED,
- parents->item->object.oid.hash, arg);
+ parents->item->object.oid.hash, name);
+ free(name);
}
*dotdot = '^';
printf("HEAD %s\n", sha1_to_hex(wt->head_sha1));
if (wt->is_detached)
printf("detached\n");
- else
+ else if (wt->head_ref)
printf("branch %s\n", wt->head_ref);
}
printf("\n");
else {
strbuf_addf(&sb, "%-*s ", abbrev_len,
find_unique_abbrev(wt->head_sha1, DEFAULT_ABBREV));
- if (!wt->is_detached)
+ if (wt->is_detached)
+ strbuf_addstr(&sb, "(detached HEAD)");
+ else if (wt->head_ref)
strbuf_addf(&sb, "[%s]", shorten_unambiguous_ref(wt->head_ref, 0));
else
- strbuf_addstr(&sb, "(detached HEAD)");
+ strbuf_addstr(&sb, "(error)");
}
printf("%s\n", sb.buf);
if (ac)
usage_with_options(worktree_usage, options);
else {
- struct worktree **worktrees = get_worktrees();
+ struct worktree **worktrees = get_worktrees(GWT_SORT_LINKED);
int path_maxlen = 0, abbrev = DEFAULT_ABBREV, i;
if (!porcelain)
if (ac != 1)
usage_with_options(worktree_usage, options);
- worktrees = get_worktrees();
+ worktrees = get_worktrees(0);
wt = find_worktree(worktrees, prefix, av[0]);
if (!wt)
die(_("'%s' is not a working tree"), av[0]);
if (ac != 1)
usage_with_options(worktree_usage, options);
- worktrees = get_worktrees();
+ worktrees = get_worktrees(0);
wt = find_worktree(worktrees, prefix, av[0]);
if (!wt)
die(_("'%s' is not a working tree"), av[0]);
}
/*
- * Inspect sb and determine the true "end" of the log message, in
+ * Inspect the given string and determine the true "end" of the log message, in
* order to find where to put a new Signed-off-by: line. Ignored are
* trailing comment lines and blank lines, and also the traditional
* "Conflicts:" block that is not commented out, so that we can use
* Returns the number of bytes from the tail to ignore, to be fed as
* the second parameter to append_signoff().
*/
-int ignore_non_trailer(struct strbuf *sb)
+int ignore_non_trailer(const char *buf, size_t len)
{
int boc = 0;
int bol = 0;
int in_old_conflicts_block = 0;
- while (bol < sb->len) {
- char *next_line;
+ while (bol < len) {
+ const char *next_line = memchr(buf + bol, '\n', len - bol);
- if (!(next_line = memchr(sb->buf + bol, '\n', sb->len - bol)))
- next_line = sb->buf + sb->len;
+ if (!next_line)
+ next_line = buf + len;
else
next_line++;
- if (sb->buf[bol] == comment_line_char || sb->buf[bol] == '\n') {
+ if (buf[bol] == comment_line_char || buf[bol] == '\n') {
/* is this the first of the run of comments? */
if (!boc)
boc = bol;
/* otherwise, it is just continuing */
- } else if (starts_with(sb->buf + bol, "Conflicts:\n")) {
+ } else if (starts_with(buf + bol, "Conflicts:\n")) {
in_old_conflicts_block = 1;
if (!boc)
boc = bol;
- } else if (in_old_conflicts_block && sb->buf[bol] == '\t') {
+ } else if (in_old_conflicts_block && buf[bol] == '\t') {
; /* a pathname in the conflicts block */
} else if (boc) {
/* the previous was not trailing comment */
boc = 0;
in_old_conflicts_block = 0;
}
- bol = next_line - sb->buf;
+ bol = next_line - buf;
}
- return boc ? sb->len - boc : 0;
+ return boc ? len - boc : 0;
}
size_t *out_len);
/* Find the end of the log message, the right place for a new trailer. */
-extern int ignore_non_trailer(struct strbuf *sb);
+extern int ignore_non_trailer(const char *buf, size_t len);
typedef void (*each_mergetag_fn)(struct commit *commit, struct commit_extra_header *extra,
void *cb_data);
* ANSI emulation wrappers
*/
+int winansi_isatty(int fd);
+#define isatty winansi_isatty
+
void winansi_init(void);
HANDLE winansi_get_osfhandle(int fd);
#include <wingdi.h>
#include <winreg.h>
+/* In this file, we actually want to use Windows' own isatty(). */
+#undef isatty
+
/*
ANSI codes used by git: m, K
#endif
+int winansi_isatty(int fd)
+{
+ int res = isatty(fd);
+
+ if (res) {
+ /*
+ * Make sure that /dev/null is not fooling Git into believing
+ * that we are connected to a terminal, as "_isatty() returns a
+ * nonzero value if the descriptor is associated with a
+ * character device."; for more information, see
+ *
+ * https://msdn.microsoft.com/en-us/library/f4s0ddew.aspx
+ */
+ HANDLE handle = (HANDLE)_get_osfhandle(fd);
+ if (fd == STDIN_FILENO) {
+ DWORD dummy;
+
+ if (!GetConsoleMode(handle, &dummy))
+ res = 0;
+ } else if (fd == STDOUT_FILENO || fd == STDERR_FILENO) {
+ CONSOLE_SCREEN_BUFFER_INFO dummy;
+
+ if (!GetConsoleScreenBufferInfo(handle, &dummy))
+ res = 0;
+ }
+ }
+
+ return res;
+}
+
void winansi_init(void)
{
int con1, con2;
if (convert_is_binary(len, &stats))
return 0;
/*
- * If the file in the index has any CR in it, do not convert.
- * This is the new safer autocrlf handling.
+ * If the file in the index has any CR in it, do not
+ * convert. This is the new safer autocrlf handling,
+ * unless we want to renormalize in a merge or
+ * cherry-pick.
*/
- if (checksafe == SAFE_CRLF_RENORMALIZE)
- checksafe = SAFE_CRLF_FALSE;
- else if (has_cr_in_index(path))
+ if ((checksafe != SAFE_CRLF_RENORMALIZE) && has_cr_in_index(path))
convert_crlf_into_lf = 0;
}
- if (checksafe && len) {
+ if ((checksafe == SAFE_CRLF_WARN ||
+ (checksafe == SAFE_CRLF_FAIL)) && len) {
struct text_stat new_stats;
memcpy(&new_stats, &stats, sizeof(new_stats));
/* simulate "git add" */
}
merge_cmd () {
- trust_exit_code=$(git config --bool \
- "mergetool.$1.trustExitCode" || echo false)
- if test "$trust_exit_code" = "false"
- then
- touch "$BACKUP"
- ( eval $merge_tool_cmd )
- check_unchanged
- else
- ( eval $merge_tool_cmd )
- fi
+ ( eval $merge_tool_cmd )
}
}
echo "$1"
}
+ # Most tools' exit codes cannot be trusted, so By default we ignore
+ # their exit code and check the merged file's modification time in
+ # check_unchanged() to determine whether or not the merge was
+ # successful. The return value from run_merge_cmd, by default, is
+ # determined by check_unchanged().
+ #
+ # When a tool's exit code can be trusted then the return value from
+ # run_merge_cmd is simply the tool's exit code, and check_unchanged()
+ # is not called.
+ #
+ # The return value of exit_code_trustable() tells us whether or not we
+ # can trust the tool's exit code.
+ #
+ # User-defined and built-in tools default to false.
+ # Built-in tools advertise that their exit code is trustable by
+ # redefining exit_code_trustable() to true.
+
+ exit_code_trustable () {
+ false
+ }
+
+
if ! test -f "$MERGE_TOOLS_DIR/$tool"
then
setup_user_tool
fi
}
+trust_exit_code () {
+ if git config --bool "mergetool.$1.trustExitCode"
+ then
+ :; # OK
+ elif exit_code_trustable
+ then
+ echo true
+ else
+ echo false
+ fi
+}
+
+
# Entry point for running tools
run_merge_tool () {
# If GIT_PREFIX is empty then we cannot use it in tools
# Run a either a configured or built-in merge tool
run_merge_cmd () {
- merge_cmd "$1"
+ mergetool_trust_exit_code=$(trust_exit_code "$1")
+ if test "$mergetool_trust_exit_code" = "true"
+ then
+ merge_cmd "$1"
+ else
+ touch "$BACKUP"
+ merge_cmd "$1"
+ check_unchanged
+ fi
}
list_merge_tool_candidates () {
if len(client) > 0:
real_cmd += ["-c", client]
+ retries = gitConfigInt("git-p4.retries")
+ if retries is None:
+ # Perform 3 retries by default
+ retries = 3
+ real_cmd += ["-r", str(retries)]
if isinstance(cmd,basestring):
real_cmd = ' '.join(real_cmd) + ' ' + cmd
def p4_reopen(type, f):
p4_system(["reopen", "-t", type, wildcard_encode(f)])
+def p4_reopen_in_change(changelist, files):
+ cmd = ["reopen", "-c", str(changelist)] + files
+ p4_system(cmd)
+
def p4_move(src, dest):
p4_system(["move", "-k", wildcard_encode(src), wildcard_encode(dest)])
steps."""
if self.exceedsLargeFileThreshold(relPath, contents) or self.hasLargeFileExtension(relPath):
contentTempFile = self.generateTempFile(contents)
- (git_mode, contents, localLargeFile) = self.generatePointer(contentTempFile)
-
- # Move temp file to final location in large file system
- largeFileDir = os.path.dirname(localLargeFile)
- if not os.path.isdir(largeFileDir):
- os.makedirs(largeFileDir)
- shutil.move(contentTempFile, localLargeFile)
- self.addLargeFile(relPath)
- if gitConfigBool('git-p4.largeFilePush'):
- self.pushFile(localLargeFile)
- if verbose:
- sys.stderr.write("%s moved to large file system (%s)\n" % (relPath, localLargeFile))
+ (pointer_git_mode, contents, localLargeFile) = self.generatePointer(contentTempFile)
+ if pointer_git_mode:
+ git_mode = pointer_git_mode
+ if localLargeFile:
+ # Move temp file to final location in large file system
+ largeFileDir = os.path.dirname(localLargeFile)
+ if not os.path.isdir(largeFileDir):
+ os.makedirs(largeFileDir)
+ shutil.move(contentTempFile, localLargeFile)
+ self.addLargeFile(relPath)
+ if gitConfigBool('git-p4.largeFilePush'):
+ self.pushFile(localLargeFile)
+ if verbose:
+ sys.stderr.write("%s moved to large file system (%s)\n" % (relPath, localLargeFile))
return (git_mode, contents)
class MockLFS(LargeFileSystem):
the actual content. Return also the new location of the actual
content.
"""
+ if os.path.getsize(contentFile) == 0:
+ return (None, '', None)
+
pointerProcess = subprocess.Popen(
['git', 'lfs', 'pointer', '--file=' + contentFile],
stdout=subprocess.PIPE
optparse.make_option("--conflict", dest="conflict_behavior",
choices=self.conflict_behavior_choices),
optparse.make_option("--branch", dest="branch"),
+ optparse.make_option("--shelve", dest="shelve", action="store_true",
+ help="Shelve instead of submit. Shelved files are reverted, "
+ "restoring the workspace to the state before the shelve"),
+ optparse.make_option("--update-shelve", dest="update_shelve", action="store", type="int",
+ metavar="CHANGELIST",
+ help="update an existing shelved changelist, implies --shelve")
]
self.description = "Submit changes from git to the perforce depot."
self.usage += " [name of git branch to submit into perforce depot]"
self.detectRenames = False
self.preserveUser = gitConfigBool("git-p4.preserveUser")
self.dry_run = False
+ self.shelve = False
+ self.update_shelve = None
self.prepare_p4_only = False
self.conflict_behavior = None
self.isWindows = (platform.system() == "Windows")
return 1
return 0
- def prepareSubmitTemplate(self):
+ def prepareSubmitTemplate(self, changelist=None):
"""Run "p4 change -o" to grab a change specification template.
This does not use "p4 -G", as it is nice to keep the submission
template in original order, since a human might edit it.
template = ""
inFilesSection = False
- for line in p4_read_pipe_lines(['change', '-o']):
+ args = ['change', '-o']
+ if changelist:
+ args.append(str(changelist))
+
+ for line in p4_read_pipe_lines(args):
if line.endswith("\r\n"):
line = line[:-2] + "\n"
if inFilesSection:
editedFiles = set()
pureRenameCopy = set()
filesToChangeExecBit = {}
+ all_files = list()
for line in diff:
diff = parseDiffTreeEntry(line)
modifier = diff['status']
path = diff['src']
+ all_files.append(path)
+
if modifier == "M":
p4_edit(path)
if isModeExecChanged(diff['src_mode'], diff['dst_mode']):
mode = filesToChangeExecBit[f]
setP4ExecBit(f, mode)
+ if self.update_shelve:
+ print("all_files = %s" % str(all_files))
+ p4_reopen_in_change(self.update_shelve, all_files)
+
#
# Build p4 change description, starting with the contents
# of the git commit message.
logMessage = logMessage.strip()
(logMessage, jobs) = self.separate_jobs_from_description(logMessage)
- template = self.prepareSubmitTemplate()
+ template = self.prepareSubmitTemplate(self.update_shelve)
submitTemplate = self.prepareLogMessage(template, logMessage, jobs)
if self.preserveUser:
if self.isWindows:
message = message.replace("\r\n", "\n")
submitTemplate = message[:message.index(separatorLine)]
- p4_write_pipe(['submit', '-i'], submitTemplate)
+
+ if self.update_shelve:
+ p4_write_pipe(['shelve', '-r', '-i'], submitTemplate)
+ elif self.shelve:
+ p4_write_pipe(['shelve', '-i'], submitTemplate)
+ else:
+ p4_write_pipe(['submit', '-i'], submitTemplate)
+ # The rename/copy happened by applying a patch that created a
+ # new file. This leaves it writable, which confuses p4.
+ for f in pureRenameCopy:
+ p4_sync(f, "-f")
if self.preserveUser:
if p4User:
changelist = self.lastP4Changelist()
self.modifyChangelistUser(changelist, p4User)
- # The rename/copy happened by applying a patch that created a
- # new file. This leaves it writable, which confuses p4.
- for f in pureRenameCopy:
- p4_sync(f, "-f")
submitted = True
finally:
# skip this patch
- if not submitted:
- print "Submission cancelled, undoing p4 changes."
- for f in editedFiles:
+ if not submitted or self.shelve:
+ if self.shelve:
+ print ("Reverting shelved files.")
+ else:
+ print ("Submission cancelled, undoing p4 changes.")
+ for f in editedFiles | filesToDelete:
p4_revert(f)
for f in filesToAdd:
p4_revert(f)
os.remove(f)
- for f in filesToDelete:
- p4_revert(f)
os.remove(fileName)
return submitted
if len(self.origin) == 0:
self.origin = upstream
+ if self.update_shelve:
+ self.shelve = True
+
if self.preserveUser:
if not self.canChangeChangelists():
die("Cannot preserve user names without p4 super-user or admin permissions")
break
chdir(self.oldWorkingDirectory)
-
+ shelved_applied = "shelved" if self.shelve else "applied"
if self.dry_run:
pass
elif self.prepare_p4_only:
pass
elif len(commits) == len(applied):
- print "All commits applied!"
+ print ("All commits {0}!".format(shelved_applied))
sync = P4Sync()
if self.branch:
else:
if len(applied) == 0:
- print "No commits applied."
+ print ("No commits {0}.".format(shelved_applied))
else:
- print "Applied only the commits marked with '*':"
+ print ("{0} only the commits marked with '*':".format(shelved_applied.capitalize()))
for c in commits:
if c in applied:
star = "*"
struct cache_entry *nce;
nce = refresh_cache_entry(ce, CE_MATCH_REFRESH | CE_MATCH_IGNORE_MISSING);
+ if (!nce)
+ return err(o, _("addinfo_cache failed for path '%s'"), path);
if (nce != ce)
ret = add_cache_entry(nce, options);
}
return unmerged;
}
-static int string_list_df_name_compare(const void *a, const void *b)
+static int string_list_df_name_compare(const char *one, const char *two)
{
- const struct string_list_item *one = a;
- const struct string_list_item *two = b;
- int onelen = strlen(one->string);
- int twolen = strlen(two->string);
+ int onelen = strlen(one);
+ int twolen = strlen(two);
/*
* Here we only care that entries for D/F conflicts are
* adjacent, in particular with the file of the D/F conflict
* since in other cases any changes in their order due to
* sorting cause no problems for us.
*/
- int cmp = df_name_compare(one->string, onelen, S_IFDIR,
- two->string, twolen, S_IFDIR);
+ int cmp = df_name_compare(one, onelen, S_IFDIR,
+ two, twolen, S_IFDIR);
/*
* Now that 'foo' and 'foo/bar' compare equal, we have to make sure
* that 'foo' comes before 'foo/bar'.
string_list_append(&df_sorted_entries, next->string)->util =
next->util;
}
- qsort(df_sorted_entries.items, entries->nr, sizeof(*entries->items),
- string_list_df_name_compare);
+ df_sorted_entries.cmp = string_list_df_name_compare;
+ string_list_sort(&df_sorted_entries);
string_list_clear(&o->df_conflict_file_set, 1);
for (i = 0; i < df_sorted_entries.nr; i++) {
return strbuf_detach(&newpath, NULL);
}
-static int dir_in_way(const char *path, int check_working_copy)
+/**
+ * Check whether a directory in the index is in the way of an incoming
+ * file. Return 1 if so. If check_working_copy is non-zero, also
+ * check the working directory. If empty_ok is non-zero, also return
+ * 0 in the case where the working-tree dir exists but is empty.
+ */
+static int dir_in_way(const char *path, int check_working_copy, int empty_ok)
{
int pos;
struct strbuf dirpath = STRBUF_INIT;
}
strbuf_release(&dirpath);
- return check_working_copy && !lstat(path, &st) && S_ISDIR(st.st_mode);
+ return check_working_copy && !lstat(path, &st) && S_ISDIR(st.st_mode) &&
+ !(empty_ok && is_empty_dir(path));
}
static int was_tracked(const char *path)
{
char *renamed = NULL;
int ret = 0;
- if (dir_in_way(path, !o->call_depth)) {
+ if (dir_in_way(path, !o->call_depth, 0)) {
renamed = unique_path(o, path, a_oid ? o->branch1 : o->branch2);
}
remove_file(o, 0, rename->path, 0);
dst_name = unique_path(o, rename->path, cur_branch);
} else {
- if (dir_in_way(rename->path, !o->call_depth)) {
+ if (dir_in_way(rename->path, !o->call_depth, 0)) {
dst_name = unique_path(o, rename->path, cur_branch);
output(o, 1, _("%s is a directory in %s adding as %s instead"),
rename->path, other_branch, dst_name);
o->branch2 == rename_conflict_info->branch1) ?
pair1->two->path : pair1->one->path;
- if (dir_in_way(path, !o->call_depth))
+ if (dir_in_way(path, !o->call_depth,
+ S_ISGITLINK(pair1->two->mode)))
df_conflict_remains = 1;
}
if (merge_file_special_markers(o, &one, &a, &b,
oid = b_oid;
conf = _("directory/file");
}
- if (dir_in_way(path, !o->call_depth)) {
+ if (dir_in_way(path, !o->call_depth,
+ S_ISGITLINK(a_mode))) {
char *new_path = unique_path(o, path, add_branch);
clean_merge = 0;
output(o, 1, _("CONFLICT (%s): There is a directory with name %s in %s. "
}
merge_cmd () {
- touch "$BACKUP"
if $base_present
then
"$merge_tool_path" -wait -merge -3 -a1 \
"$merge_tool_path" -wait -2 \
"$LOCAL" "$REMOTE" "$MERGED" >/dev/null 2>&1
fi
- check_unchanged
}
translate_merge_tool_path() {
}
merge_cmd () {
- touch "$BACKUP"
if $base_present
then
"$merge_tool_path" "$LOCAL" "$REMOTE" "$BASE" \
"$merge_tool_path" "$LOCAL" "$REMOTE" \
-mergeoutput="$MERGED"
fi
- check_unchanged
}
translate_merge_tool_path() {
}
merge_cmd () {
- touch "$BACKUP"
if $base_present
then
"$merge_tool_path" -MF="$LOCAL" -TF="$REMOTE" -BF="$BASE" \
"$merge_tool_path" -MF="$LOCAL" -TF="$REMOTE" \
-RF="$MERGED"
fi
- check_unchanged
}
translate_merge_tool_path() {
fi >/dev/null 2>&1
}
-translate_merge_tool_path() {
+translate_merge_tool_path () {
echo DeltaWalker
}
+
+exit_code_trustable () {
+ true
+}
--result="$MERGED" "$LOCAL" "$REMOTE"
fi
}
+
+exit_code_trustable () {
+ true
+}
}
merge_cmd () {
- touch "$BACKUP"
if $base_present
then
"$merge_tool_path" \
"$merge_tool_path" \
"$LOCAL" "$MERGED" "$REMOTE" | cat
fi
- check_unchanged
}
}
merge_cmd () {
- touch "$BACKUP"
if $base_present
then
"$merge_tool_path" "$BASE" "$LOCAL" "$REMOTE" \
"$merge_tool_path" "$LOCAL" "$REMOTE" \
--default --mode=merge2 --to="$MERGED"
fi
- check_unchanged
}
translate_merge_tool_path() {
echo emacs
}
+
+exit_code_trustable () {
+ true
+}
}
merge_cmd () {
- touch "$BACKUP"
if $base_present
then
"$merge_tool_path" -merge "$LOCAL" "$BASE" "$REMOTE" -o:"$MERGED" -nh
else
"$merge_tool_path" -merge "$LOCAL" "$REMOTE" -o:"$MERGED" -nh
fi
- check_unchanged
}
translate_merge_tool_path() {
>/dev/null 2>&1
fi
}
+
+exit_code_trustable () {
+ true
+}
diff_cmd () {
"$merge_tool_path" "$LOCAL" "$REMOTE"
}
+
+exit_code_trustable () {
+ true
+}
then
check_meld_for_output_version
fi
- touch "$BACKUP"
+
if test "$meld_has_output_option" = true
then
"$merge_tool_path" --output "$MERGED" \
else
"$merge_tool_path" "$LOCAL" "$MERGED" "$REMOTE"
fi
- check_unchanged
}
# Check whether we should use 'meld --output <file>'
}
merge_cmd () {
- touch "$BACKUP"
if $base_present
then
"$merge_tool_path" "$LOCAL" "$REMOTE" \
"$merge_tool_path" "$LOCAL" "$REMOTE" \
-merge "$MERGED" | cat
fi
- check_unchanged
}
}
merge_cmd () {
- touch "$BACKUP"
if ! $base_present
then
cp -- "$LOCAL" "$BASE"
create_virtual_base "$BASE" "$REMOTE"
fi
"$merge_tool_path" "$BASE" "$REMOTE" "$LOCAL" "$MERGED"
- check_unchanged
}
create_empty_file () {
"$merge_tool_path" -o "$MERGED" "$LOCAL" "$REMOTE"
fi
}
+
+exit_code_trustable () {
+ true
+}
merge_cmd () {
if $base_present
then
- touch "$BACKUP"
basename="$(basename "$merge_tool_path" .exe)"
if test "$basename" = "tortoisegitmerge"
then
-base:"$BASE" -mine:"$LOCAL" \
-theirs:"$REMOTE" -merged:"$MERGED"
fi
- check_unchanged
else
echo "$merge_tool_path cannot be used without a base" 1>&2
return 1
}
merge_cmd () {
- touch "$BACKUP"
case "$1" in
gvimdiff|vimdiff)
if $base_present
fi
;;
esac
- check_unchanged
}
translate_merge_tool_path() {
;;
esac
}
+
+exit_code_trustable () {
+ true
+}
merge_cmd () {
# mergetool.winmerge.trustExitCode is implicitly false.
# touch $BACKUP so that we can check_unchanged.
- touch "$BACKUP"
"$merge_tool_path" -u -e -dl Local -dr Remote \
"$LOCAL" "$REMOTE" "$MERGED"
- check_unchanged
}
translate_merge_tool_path() {
}
merge_cmd () {
- touch "$BACKUP"
if $base_present
then
"$merge_tool_path" -X --show-merged-pane \
-R 'Accel.SearchForward: "Ctrl-G"' \
--merged-file "$MERGED" "$LOCAL" "$REMOTE"
fi
- check_unchanged
}
my $latest = $ra->get_latest_revnum;
$ra->get_log("", $latest, 0, 1, 0, 1, sub {});
};
- } while ($@ && ($c = shift @components));
+ } while ($@ && defined($c = shift @components));
return canonicalize_url($url);
}
#include "refs.h"
#include "argv-array.h"
#include "quote.h"
+#include "trailer.h"
#define GIT_REFLOG_ACTION "GIT_REFLOG_ACTION"
return git_path_todo_file();
}
-static int is_rfc2822_line(const char *buf, int len)
-{
- int i;
-
- for (i = 0; i < len; i++) {
- int ch = buf[i];
- if (ch == ':')
- return 1;
- if (!isalnum(ch) && ch != '-')
- break;
- }
-
- return 0;
-}
-
-static int is_cherry_picked_from_line(const char *buf, int len)
-{
- /*
- * We only care that it looks roughly like (cherry picked from ...)
- */
- return len > strlen(cherry_picked_prefix) + 1 &&
- starts_with(buf, cherry_picked_prefix) && buf[len - 1] == ')';
-}
-
/*
* Returns 0 for non-conforming footer
* Returns 1 for conforming footer
static int has_conforming_footer(struct strbuf *sb, struct strbuf *sob,
int ignore_footer)
{
- char prev;
- int i, k;
- int len = sb->len - ignore_footer;
- const char *buf = sb->buf;
- int found_sob = 0;
-
- /* footer must end with newline */
- if (!len || buf[len - 1] != '\n')
- return 0;
+ struct trailer_info info;
+ int i;
+ int found_sob = 0, found_sob_last = 0;
- prev = '\0';
- for (i = len - 1; i > 0; i--) {
- char ch = buf[i];
- if (prev == '\n' && ch == '\n') /* paragraph break */
- break;
- prev = ch;
- }
+ trailer_info_get(&info, sb->buf);
- /* require at least one blank line */
- if (prev != '\n' || buf[i] != '\n')
+ if (info.trailer_start == info.trailer_end)
return 0;
- /* advance to start of last paragraph */
- while (i < len - 1 && buf[i] == '\n')
- i++;
-
- for (; i < len; i = k) {
- int found_rfc2822;
-
- for (k = i; k < len && buf[k] != '\n'; k++)
- ; /* do nothing */
- k++;
+ for (i = 0; i < info.trailer_nr; i++)
+ if (sob && !strncmp(info.trailers[i], sob->buf, sob->len)) {
+ found_sob = 1;
+ if (i == info.trailer_nr - 1)
+ found_sob_last = 1;
+ }
- found_rfc2822 = is_rfc2822_line(buf + i, k - i - 1);
- if (found_rfc2822 && sob &&
- !strncmp(buf + i, sob->buf, sob->len))
- found_sob = k;
+ trailer_info_release(&info);
- if (!(found_rfc2822 ||
- is_cherry_picked_from_line(buf + i, k - i - 1)))
- return 0;
- }
- if (found_sob == i)
+ if (found_sob_last)
return 3;
if (found_sob)
return 2;
return 1;
}
-static int submodule_needs_pushing(const char *path, const unsigned char sha1[20])
+static int append_sha1_to_argv(const unsigned char sha1[20], void *data)
{
- if (add_submodule_odb(path) || !lookup_commit_reference(sha1))
+ struct argv_array *argv = data;
+ argv_array_push(argv, sha1_to_hex(sha1));
+ return 0;
+}
+
+static int check_has_commit(const unsigned char sha1[20], void *data)
+{
+ int *has_commit = data;
+
+ if (!lookup_commit_reference(sha1))
+ *has_commit = 0;
+
+ return 0;
+}
+
+static int submodule_has_commits(const char *path, struct sha1_array *commits)
+{
+ int has_commit = 1;
+
+ if (add_submodule_odb(path))
+ return 0;
+
+ sha1_array_for_each_unique(commits, check_has_commit, &has_commit);
+ return has_commit;
+}
+
+static int submodule_needs_pushing(const char *path, struct sha1_array *commits)
+{
+ if (!submodule_has_commits(path, commits))
+ /*
+ * NOTE: We do consider it safe to return "no" here. The
+ * correct answer would be "We do not know" instead of
+ * "No push needed", but it is quite hard to change
+ * the submodule pointer without having the submodule
+ * around. If a user did however change the submodules
+ * without having the submodule around, this indicates
+ * an expert who knows what they are doing or a
+ * maintainer integrating work from other people. In
+ * both cases it should be safe to skip this check.
+ */
return 0;
if (for_each_remote_ref_submodule(path, has_remote, NULL) > 0) {
struct child_process cp = CHILD_PROCESS_INIT;
- const char *argv[] = {"rev-list", NULL, "--not", "--remotes", "-n", "1" , NULL};
struct strbuf buf = STRBUF_INIT;
int needs_pushing = 0;
- argv[1] = sha1_to_hex(sha1);
- cp.argv = argv;
+ argv_array_push(&cp.args, "rev-list");
+ sha1_array_for_each_unique(commits, append_sha1_to_argv, &cp.args);
+ argv_array_pushl(&cp.args, "--not", "--remotes", "-n", "1" , NULL);
+
prepare_submodule_repo_env(&cp.env_array);
cp.git_cmd = 1;
cp.no_stdin = 1;
cp.out = -1;
cp.dir = path;
if (start_command(&cp))
- die("Could not run 'git rev-list %s --not --remotes -n 1' command in submodule %s",
- sha1_to_hex(sha1), path);
+ die("Could not run 'git rev-list <commits> --not --remotes -n 1' command in submodule %s",
+ path);
if (strbuf_read(&buf, cp.out, 41))
needs_pushing = 1;
finish_command(&cp);
return 0;
}
+static struct sha1_array *submodule_commits(struct string_list *submodules,
+ const char *path)
+{
+ struct string_list_item *item;
+
+ item = string_list_insert(submodules, path);
+ if (item->util)
+ return (struct sha1_array *) item->util;
+
+ /* NEEDSWORK: should we have sha1_array_init()? */
+ item->util = xcalloc(1, sizeof(struct sha1_array));
+ return (struct sha1_array *) item->util;
+}
+
static void collect_submodules_from_diff(struct diff_queue_struct *q,
struct diff_options *options,
void *data)
{
int i;
- struct string_list *needs_pushing = data;
+ struct string_list *submodules = data;
for (i = 0; i < q->nr; i++) {
struct diff_filepair *p = q->queue[i];
+ struct sha1_array *commits;
if (!S_ISGITLINK(p->two->mode))
continue;
- if (submodule_needs_pushing(p->two->path, p->two->oid.hash))
- string_list_insert(needs_pushing, p->two->path);
+ commits = submodule_commits(submodules, p->two->path);
+ sha1_array_append(commits, p->two->oid.hash);
}
}
diff_tree_combined_merge(commit, 1, &rev);
}
-int find_unpushed_submodules(unsigned char new_sha1[20],
+static void free_submodules_sha1s(struct string_list *submodules)
+{
+ struct string_list_item *item;
+ for_each_string_list_item(item, submodules)
+ sha1_array_clear((struct sha1_array *) item->util);
+ string_list_clear(submodules, 1);
+}
+
+int find_unpushed_submodules(struct sha1_array *commits,
const char *remotes_name, struct string_list *needs_pushing)
{
struct rev_info rev;
struct commit *commit;
- const char *argv[] = {NULL, NULL, "--not", "NULL", NULL};
- int argc = ARRAY_SIZE(argv) - 1;
- char *sha1_copy;
-
- struct strbuf remotes_arg = STRBUF_INIT;
+ struct string_list submodules = STRING_LIST_INIT_DUP;
+ struct string_list_item *submodule;
+ struct argv_array argv = ARGV_ARRAY_INIT;
- strbuf_addf(&remotes_arg, "--remotes=%s", remotes_name);
init_revisions(&rev, NULL);
- sha1_copy = xstrdup(sha1_to_hex(new_sha1));
- argv[1] = sha1_copy;
- argv[3] = remotes_arg.buf;
- setup_revisions(argc, argv, &rev, NULL);
+
+ /* argv.argv[0] will be ignored by setup_revisions */
+ argv_array_push(&argv, "find_unpushed_submodules");
+ sha1_array_for_each_unique(commits, append_sha1_to_argv, &argv);
+ argv_array_push(&argv, "--not");
+ argv_array_pushf(&argv, "--remotes=%s", remotes_name);
+
+ setup_revisions(argv.argc, argv.argv, &rev, NULL);
if (prepare_revision_walk(&rev))
die("revision walk setup failed");
while ((commit = get_revision(&rev)) != NULL)
- find_unpushed_submodule_commits(commit, needs_pushing);
+ find_unpushed_submodule_commits(commit, &submodules);
reset_revision_walk();
- free(sha1_copy);
- strbuf_release(&remotes_arg);
+ argv_array_clear(&argv);
+
+ for_each_string_list_item(submodule, &submodules) {
+ struct sha1_array *commits = (struct sha1_array *) submodule->util;
+
+ if (submodule_needs_pushing(submodule->string, commits))
+ string_list_insert(needs_pushing, submodule->string);
+ }
+ free_submodules_sha1s(&submodules);
return needs_pushing->nr;
}
-static int push_submodule(const char *path)
+static int push_submodule(const char *path, int dry_run)
{
if (add_submodule_odb(path))
return 1;
if (for_each_remote_ref_submodule(path, has_remote, NULL) > 0) {
struct child_process cp = CHILD_PROCESS_INIT;
- const char *argv[] = {"push", NULL};
+ argv_array_push(&cp.args, "push");
+ if (dry_run)
+ argv_array_push(&cp.args, "--dry-run");
- cp.argv = argv;
prepare_submodule_repo_env(&cp.env_array);
cp.git_cmd = 1;
cp.no_stdin = 1;
return 1;
}
-int push_unpushed_submodules(unsigned char new_sha1[20], const char *remotes_name)
+int push_unpushed_submodules(struct sha1_array *commits,
+ const char *remotes_name,
+ int dry_run)
{
int i, ret = 1;
struct string_list needs_pushing = STRING_LIST_INIT_DUP;
- if (!find_unpushed_submodules(new_sha1, remotes_name, &needs_pushing))
+ if (!find_unpushed_submodules(commits, remotes_name, &needs_pushing))
return 1;
for (i = 0; i < needs_pushing.nr; i++) {
const char *path = needs_pushing.items[i].string;
fprintf(stderr, "Pushing submodule '%s'\n", path);
- if (!push_submodule(path)) {
+ if (!push_submodule(path, dry_run)) {
fprintf(stderr, "Unable to push submodule '%s'\n", path);
ret = 0;
}
struct diff_options;
struct argv_array;
+struct sha1_array;
enum {
RECURSE_SUBMODULES_CHECK = -4,
int ok_to_remove_submodule(const char *path);
int merge_submodule(unsigned char result[20], const char *path, const unsigned char base[20],
const unsigned char a[20], const unsigned char b[20], int search);
-int find_unpushed_submodules(unsigned char new_sha1[20], const char *remotes_name,
+int find_unpushed_submodules(struct sha1_array *commits, const char *remotes_name,
struct string_list *needs_pushing);
-int push_unpushed_submodules(unsigned char new_sha1[20], const char *remotes_name);
+extern int push_unpushed_submodules(struct sha1_array *commits,
+ const char *remotes_name,
+ int dry_run);
void connect_work_tree_and_git_dir(const char *work_tree, const char *git_dir);
int parallel_submodules(void);
cd repo &&
git init &&
- echo "git-stderr.log" >.gitignore &&
echo "*.r filter=protocol" >.gitattributes &&
git add . &&
- git commit . -m "test commit 1" &&
+ git commit -m "test commit 1" &&
git branch empty-branch &&
cp "$TEST_ROOT/test.o" test.r &&
EOF
test_cmp_count expected.log rot13-filter.log &&
- filter_git commit . -m "test commit 2" &&
+ filter_git commit -m "test commit 2" &&
cat >expected.log <<-EOF &&
START
init handshake complete
rm -rf bare1
'
+test_expect_success 'broken main worktree still at the top' '
+ git init broken-main &&
+ (
+ cd broken-main &&
+ test_commit new &&
+ git worktree add linked &&
+ cat >expected <<-EOF &&
+ worktree $(pwd)
+ HEAD $_z40
+
+ EOF
+ cd linked &&
+ echo "worktree $(pwd)" >expected &&
+ echo "ref: .broken" >../.git/HEAD &&
+ git worktree list --porcelain | head -n 3 >actual &&
+ test_cmp ../expected actual &&
+ git worktree list | head -n 1 >actual.2 &&
+ grep -F "(error)" actual.2
+ )
+'
+
+test_expect_success 'linked worktrees are sorted' '
+ mkdir sorted &&
+ git init sorted/main &&
+ (
+ cd sorted/main &&
+ test_tick &&
+ test_commit new &&
+ git worktree add ../first &&
+ git worktree add ../second &&
+ git worktree list --porcelain | grep ^worktree >actual
+ ) &&
+ cat >expected <<-EOF &&
+ worktree $(pwd)/sorted/main
+ worktree $(pwd)/sorted/first
+ worktree $(pwd)/sorted/second
+ EOF
+ test_cmp expected sorted/main/actual
+'
+
test_done
test_must_fail test -d d
'
-test_expect_failure 'merge-recursive simple w/submodule' '
+test_expect_success 'merge-recursive simple w/submodule' '
git checkout submod &&
git merge remove
'
-test_expect_failure 'merge-recursive simple w/submodule result' '
+test_expect_success 'merge-recursive simple w/submodule result' '
git ls-files -s >actual &&
(
git rebase -i "$1"
}
-KNOWN_FAILURE_NOFF_MERGE_DOESNT_CREATE_EMPTY_SUBMODULE_DIR=1
-# The real reason "replace directory with submodule" fails is because a
-# directory "sub1" exists, but we reuse the suppression added for merge here
test_submodule_switch "git_rebase_interactive"
test_done
test_cmp expect actual
'
+test_expect_success 'cherry-pick works with dirty renamed file' '
+ test_commit to-rename &&
+ git checkout -b unrelated &&
+ test_commit unrelated &&
+ git checkout @{-1} &&
+ git mv to-rename.t renamed &&
+ test_tick &&
+ git commit -m renamed &&
+ echo modified >renamed &&
+ git cherry-pick refs/heads/unrelated
+'
+
test_done
mesg_broken_footer="$mesg_no_footer
-The signed-off-by string should begin with the words Signed-off-by followed
-by a colon and space, and then the signers name and email address. e.g.
-Signed-off-by: $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL>"
+This is not recognized as a footer because Myfooter is not a recognized token.
+Myfooter: A.U. Thor <author@example.com>"
mesg_with_footer_sob="$mesg_with_footer
Signed-off-by: $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL>"
test_cmp expect actual
'
+test_expect_success 'cherry-pick -s recognizes trailer config' '
+ pristine_detach initial &&
+ git -c "trailer.Myfooter.ifexists=add" cherry-pick -s mesg-broken-footer &&
+ cat <<-EOF >expect &&
+ $mesg_broken_footer
+ Signed-off-by: $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL>
+ EOF
+ git log -1 --pretty=format:%B >actual &&
+ test_cmp expect actual
+'
+
test_expect_success 'cherry-pick -x inserts blank line when conforming footer not found' '
pristine_detach initial &&
sha1=$(git rev-parse mesg-no-footer^0) &&
4:Subject: [PATCH] subject
8:
10:Signed-off-by: example happens to be wrapped here.
-11:
-12:Signed-off-by: C O Mitter <committer@example.com>
+11:Signed-off-by: C O Mitter <committer@example.com>
EOF
test_cmp expected actual
'
test_cmp expected actual
'
-test_expect_success 'signoff: detect garbage in non-conforming footer' '
+test_expect_success 'signoff: tolerate garbage in conforming footer' '
append_signoff <<\EOF >actual &&
subject
8:
10:
13:Signed-off-by: C O Mitter <committer@example.com>
-14:
-15:Signed-off-by: C O Mitter <committer@example.com>
+EOF
+ test_cmp expected actual
+'
+
+test_expect_success 'signoff: respect trailer config' '
+ append_signoff <<\EOF >actual &&
+subject
+
+Myfooter: x
+Some Trash
+EOF
+ cat >expected <<\EOF &&
+4:Subject: [PATCH] subject
+8:
+11:
+12:Signed-off-by: C O Mitter <committer@example.com>
+EOF
+ test_cmp expected actual &&
+
+ test_config trailer.Myfooter.ifexists add &&
+ append_signoff <<\EOF >actual &&
+subject
+
+Myfooter: x
+Some Trash
+EOF
+ cat >expected <<\EOF &&
+4:Subject: [PATCH] subject
+8:
+11:Signed-off-by: C O Mitter <committer@example.com>
EOF
test_cmp expected actual
'
cd submodule.git &&
git rev-parse master >../actual
) &&
+ test_when_finished git -C work reset --hard master^ &&
test_cmp expected actual
'
+test_expect_success 'push --dry-run does not recursively update submodules' '
+ (
+ cd work/gar/bage &&
+ git checkout master &&
+ git rev-parse master >../../../expected_submodule &&
+ > junk9 &&
+ git add junk9 &&
+ git commit -m "Ninth junk" &&
+
+ # Go up to 'work' directory
+ cd ../.. &&
+ git checkout master &&
+ git rev-parse master >../expected_pub &&
+ git add gar/bage &&
+ git commit -m "Ninth commit for gar/bage" &&
+ git push --dry-run --recurse-submodules=on-demand ../pub.git master
+ ) &&
+ git -C submodule.git rev-parse master >actual_submodule &&
+ git -C pub.git rev-parse master >actual_pub &&
+ test_cmp expected_pub actual_pub &&
+ test_cmp expected_submodule actual_submodule
+'
+
test_done
test_cmp expect actual
'
+test_expect_success 'symbolic final^1^@ = final^1^1 final^1^2' '
+ git rev-parse --symbolic final^1^1 final^1^2 >expect &&
+ git rev-parse --symbolic final^1^@ >actual &&
+ test_cmp expect actual
+'
+
test_expect_success 'final^1^! = final^1 ^final^1^1 ^final^1^2' '
git rev-parse final^1 ^final^1^1 ^final^1^2 >expect &&
git rev-parse final^1^! >actual &&
test_cmp expect actual
'
+test_expect_success 'symbolic final^1^! = final^1 ^final^1^1 ^final^1^2' '
+ git rev-parse --symbolic final^1 ^final^1^1 ^final^1^2 >expect &&
+ git rev-parse --symbolic final^1^! >actual &&
+ test_cmp expect actual
+'
+
test_expect_success 'large graft octopus' '
test_cmp_rev_output b31 "git rev-parse --verify b1^30"
'
test_cmp expect actual
'
+test_expect_success 'symbolic merge^-1 = merge^1..merge' '
+ git rev-parse --symbolic merge^1..merge >expect &&
+ git rev-parse --symbolic merge^-1 >actual &&
+ test_cmp expect actual
+'
+
test_expect_success 'rev-parse merge^-0 (invalid parent)' '
test_must_fail git rev-parse merge^-0
'
test_cmp expected actual
'
+test_expect_success 'signoff respects trailer config' '
+
+ echo 5 >positive &&
+ git add positive &&
+ git commit -s -m "subject
+
+non-trailer line
+Myfooter: x" &&
+ git cat-file commit HEAD | sed -e "1,/^\$/d" > actual &&
+ (
+ echo subject
+ echo
+ echo non-trailer line
+ echo Myfooter: x
+ echo
+ echo "Signed-off-by: $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL>"
+ ) >expected &&
+ test_cmp expected actual &&
+
+ echo 6 >positive &&
+ git add positive &&
+ git -c "trailer.Myfooter.ifexists=add" commit -s -m "subject
+
+non-trailer line
+Myfooter: x" &&
+ git cat-file commit HEAD | sed -e "1,/^\$/d" > actual &&
+ (
+ echo subject
+ echo
+ echo non-trailer line
+ echo Myfooter: x
+ echo "Signed-off-by: $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL>"
+ ) >expected &&
+ test_cmp expected actual
+'
+
test_expect_success 'multiple -m' '
>negative &&
'
cat >expect <<\EOF
-error: Updating the following directories would lose untracked files in it:
+error: Updating the following directories would lose untracked files in them:
rep
rep2
test_lazy_prereq MKTEMP '
tempdir=$(mktemp -d -t foo.XXXXXX) &&
- test -d "$tempdir"
+ test -d "$tempdir" &&
+ rmdir "$tempdir"
'
test_expect_success MKTEMP 'temporary filenames are used with mergetool.writeToTemp' '
)
'
+test_expect_success 'submit --shelve' '
+ test_when_finished cleanup_git &&
+ git p4 clone --dest="$git" //depot &&
+ (
+ cd "$cli" &&
+ p4 revert ... &&
+ cd "$git" &&
+ git config git-p4.skipSubmitEdit true &&
+ test_commit "shelveme1" &&
+ git p4 submit --origin=HEAD^ &&
+
+ echo 654321 >shelveme2.t &&
+ echo 123456 >>shelveme1.t &&
+ git add shelveme* &&
+ git commit -m"shelvetest" &&
+ git p4 submit --shelve --origin=HEAD^ &&
+
+ test_path_is_file shelveme1.t &&
+ test_path_is_file shelveme2.t
+ ) &&
+ (
+ cd "$cli" &&
+ change=$(p4 -G changes -s shelved -m 1 //depot/... | \
+ marshal_dump change) &&
+ p4 describe -S $change | grep shelveme2 &&
+ p4 describe -S $change | grep 123456 &&
+ test_path_is_file shelveme1.t &&
+ test_path_is_missing shelveme2.t
+ )
+'
+
+# Update an existing shelved changelist
+
+test_expect_success 'submit --update-shelve' '
+ test_when_finished cleanup_git &&
+ git p4 clone --dest="$git" //depot &&
+ (
+ cd "$cli" &&
+ p4 revert ... &&
+ cd "$git" &&
+ git config git-p4.skipSubmitEdit true &&
+ test_commit "test-update-shelved-change" &&
+ git p4 submit --origin=HEAD^ --shelve &&
+
+ shelf_cl=$(p4 -G changes -s shelved -m 1 |\
+ marshal_dump change) &&
+ test -n $shelf_cl &&
+ echo "updating shelved change list $shelf_cl" &&
+
+ echo "updated-line" >>shelf.t &&
+ echo added-file.t >added-file.t &&
+ git add shelf.t added-file.t &&
+ git rm -f test-update-shelved-change.t &&
+ git commit --amend -C HEAD &&
+ git show --stat HEAD &&
+ git p4 submit -v --origin HEAD^ --update-shelve $shelf_cl &&
+ echo "done git p4 submit"
+ ) &&
+ (
+ cd "$cli" &&
+ change=$(p4 -G changes -s shelved -m 1 //depot/... | \
+ marshal_dump change) &&
+ p4 unshelve -c $change -s $change &&
+ grep -q updated-line shelf.t &&
+ p4 describe -S $change | grep added-file.t &&
+ test_path_is_missing test-update-shelved-change.t
+ )
+'
+
test_expect_success 'kill p4d' '
kill_p4d
'
(
cd "$cli" &&
+ >file0.dat &&
+ p4 add file0.dat &&
echo "content 1 txt 23 bytes" >file1.txt &&
p4 add file1.txt &&
echo "content 2-3 bin 25 bytes" >file2.dat &&
static char *separators = ":";
+static int configured;
+
#define TRAILER_ARG_STRING "$ARG"
static const char *git_generated_prefixes[] = {
return same_token(a, b) && same_value(a, b);
}
-static inline int contains_only_spaces(const char *str)
+static inline int is_blank_line(const char *str)
{
const char *s = str;
- while (*s && isspace(*s))
+ while (*s && *s != '\n' && isspace(*s))
s++;
- return !*s;
+ return !*s || *s == '\n';
}
static inline void strbuf_replace(struct strbuf *sb, const char *a, const char *b)
return 0;
}
+static void ensure_configured(void)
+{
+ if (configured)
+ return;
+
+ /* Default config must be setup first */
+ git_config(git_trailer_default_config, NULL);
+ git_config(git_trailer_config, NULL);
+ configured = 1;
+}
+
static const char *token_from_item(struct arg_item *item, char *tok)
{
if (item->conf.key)
}
/*
- * Return the location of the first separator in line, or -1 if there is no
- * separator.
+ * If the given line is of the form
+ * "<token><optional whitespace><separator>..." or "<separator>...", return the
+ * location of the separator. Otherwise, return -1. The optional whitespace
+ * is allowed there primarily to allow things like "Bug #43" where <token> is
+ * "Bug" and <separator> is "#".
+ *
+ * The separator-starts-line case (in which this function returns 0) is
+ * distinguished from the non-well-formed-line case (in which this function
+ * returns -1) because some callers of this function need such a distinction.
*/
static int find_separator(const char *line, const char *separators)
{
- int loc = strcspn(line, separators);
- if (!line[loc])
- return -1;
- return loc;
+ int whitespace_found = 0;
+ const char *c;
+ for (c = line; *c; c++) {
+ if (strchr(separators, *c))
+ return c - line;
+ if (!whitespace_found && (isalnum(*c) || *c == '-'))
+ continue;
+ if (c != line && (*c == ' ' || *c == '\t')) {
+ whitespace_found = 1;
+ continue;
+ }
+ break;
+ }
+ return -1;
}
/*
free(cl_separators);
}
-static struct strbuf **read_input_file(const char *file)
+static void read_input_file(struct strbuf *sb, const char *file)
{
- struct strbuf **lines;
- struct strbuf sb = STRBUF_INIT;
-
if (file) {
- if (strbuf_read_file(&sb, file, 0) < 0)
+ if (strbuf_read_file(sb, file, 0) < 0)
die_errno(_("could not read input file '%s'"), file);
} else {
- if (strbuf_read(&sb, fileno(stdin), 0) < 0)
+ if (strbuf_read(sb, fileno(stdin), 0) < 0)
die_errno(_("could not read from stdin"));
}
+}
- lines = strbuf_split(&sb, '\n');
+static const char *next_line(const char *str)
+{
+ const char *nl = strchrnul(str, '\n');
+ return nl + !!*nl;
+}
- strbuf_release(&sb);
+/*
+ * Return the position of the start of the last line. If len is 0, return -1.
+ */
+static int last_line(const char *buf, size_t len)
+{
+ int i;
+ if (len == 0)
+ return -1;
+ if (len == 1)
+ return 0;
+ /*
+ * Skip the last character (in addition to the null terminator),
+ * because if the last character is a newline, it is considered as part
+ * of the last line anyway.
+ */
+ i = len - 2;
- return lines;
+ for (; i >= 0; i--) {
+ if (buf[i] == '\n')
+ return i + 1;
+ }
+ return 0;
}
/*
- * Return the (0 based) index of the start of the patch or the line
- * count if there is no patch in the message.
+ * Return the position of the start of the patch or the length of str if there
+ * is no patch in the message.
*/
-static int find_patch_start(struct strbuf **lines, int count)
+static int find_patch_start(const char *str)
{
- int i;
+ const char *s;
- /* Get the start of the patch part if any */
- for (i = 0; i < count; i++) {
- if (starts_with(lines[i]->buf, "---"))
- return i;
+ for (s = str; *s; s = next_line(s)) {
+ if (starts_with(s, "---"))
+ return s - str;
}
- return count;
+ return s - str;
}
/*
- * Return the (0 based) index of the first trailer line or count if
- * there are no trailers. Trailers are searched only in the lines from
- * index (count - 1) down to index 0.
+ * Return the position of the first trailer line or len if there are no
+ * trailers.
*/
-static int find_trailer_start(struct strbuf **lines, int count)
+static int find_trailer_start(const char *buf, size_t len)
{
- int start, end_of_title, only_spaces = 1;
+ const char *s;
+ int end_of_title, l, only_spaces = 1;
int recognized_prefix = 0, trailer_lines = 0, non_trailer_lines = 0;
/*
* Number of possible continuation lines encountered. This will be
int possible_continuation_lines = 0;
/* The first paragraph is the title and cannot be trailers */
- for (start = 0; start < count; start++) {
- if (lines[start]->buf[0] == comment_line_char)
+ for (s = buf; s < buf + len; s = next_line(s)) {
+ if (s[0] == comment_line_char)
continue;
- if (contains_only_spaces(lines[start]->buf))
+ if (is_blank_line(s))
break;
}
- end_of_title = start;
+ end_of_title = s - buf;
/*
* Get the start of the trailers by looking starting from the end for a
* trailers, or (ii) contains at least one Git-generated trailer and
* consists of at least 25% trailers.
*/
- for (start = count - 1; start >= end_of_title; start--) {
+ for (l = last_line(buf, len);
+ l >= end_of_title;
+ l = last_line(buf, l)) {
+ const char *bol = buf + l;
const char **p;
int separator_pos;
- if (lines[start]->buf[0] == comment_line_char) {
+ if (bol[0] == comment_line_char) {
non_trailer_lines += possible_continuation_lines;
possible_continuation_lines = 0;
continue;
}
- if (contains_only_spaces(lines[start]->buf)) {
+ if (is_blank_line(bol)) {
if (only_spaces)
continue;
non_trailer_lines += possible_continuation_lines;
if (recognized_prefix &&
trailer_lines * 3 >= non_trailer_lines)
- return start + 1;
- if (trailer_lines && !non_trailer_lines)
- return start + 1;
- return count;
+ return next_line(bol) - buf;
+ else if (trailer_lines && !non_trailer_lines)
+ return next_line(bol) - buf;
+ return len;
}
only_spaces = 0;
for (p = git_generated_prefixes; *p; p++) {
- if (starts_with(lines[start]->buf, *p)) {
+ if (starts_with(bol, *p)) {
trailer_lines++;
possible_continuation_lines = 0;
recognized_prefix = 1;
}
}
- separator_pos = find_separator(lines[start]->buf, separators);
- if (separator_pos >= 1 && !isspace(lines[start]->buf[0])) {
+ separator_pos = find_separator(bol, separators);
+ if (separator_pos >= 1 && !isspace(bol[0])) {
struct list_head *pos;
trailer_lines++;
list_for_each(pos, &conf_head) {
struct arg_item *item;
item = list_entry(pos, struct arg_item, list);
- if (token_matches_item(lines[start]->buf, item,
+ if (token_matches_item(bol, item,
separator_pos)) {
recognized_prefix = 1;
break;
}
}
- } else if (isspace(lines[start]->buf[0]))
+ } else if (isspace(bol[0]))
possible_continuation_lines++;
else {
non_trailer_lines++;
;
}
- return count;
-}
-
-/* Get the index of the end of the trailers */
-static int find_trailer_end(struct strbuf **lines, int patch_start)
-{
- struct strbuf sb = STRBUF_INIT;
- int i, ignore_bytes;
-
- for (i = 0; i < patch_start; i++)
- strbuf_addbuf(&sb, lines[i]);
- ignore_bytes = ignore_non_trailer(&sb);
- strbuf_release(&sb);
- for (i = patch_start - 1; i >= 0 && ignore_bytes > 0; i--)
- ignore_bytes -= lines[i]->len;
-
- return i + 1;
+ return len;
}
-static int has_blank_line_before(struct strbuf **lines, int start)
+/* Return the position of the end of the trailers. */
+static int find_trailer_end(const char *buf, size_t len)
{
- for (;start >= 0; start--) {
- if (lines[start]->buf[0] == comment_line_char)
- continue;
- return contains_only_spaces(lines[start]->buf);
- }
- return 0;
+ return len - ignore_non_trailer(buf, len);
}
-static void print_lines(FILE *outfile, struct strbuf **lines, int start, int end)
+static int ends_with_blank_line(const char *buf, size_t len)
{
- int i;
- for (i = start; lines[i] && i < end; i++)
- fprintf(outfile, "%s", lines[i]->buf);
+ int ll = last_line(buf, len);
+ if (ll < 0)
+ return 0;
+ return is_blank_line(buf + ll);
}
static int process_input_file(FILE *outfile,
- struct strbuf **lines,
+ const char *str,
struct list_head *head)
{
- int count = 0;
- int patch_start, trailer_start, trailer_end, i;
+ struct trailer_info info;
struct strbuf tok = STRBUF_INIT;
struct strbuf val = STRBUF_INIT;
- struct trailer_item *last = NULL;
-
- /* Get the line count */
- while (lines[count])
- count++;
+ int i;
- patch_start = find_patch_start(lines, count);
- trailer_end = find_trailer_end(lines, patch_start);
- trailer_start = find_trailer_start(lines, trailer_end);
+ trailer_info_get(&info, str);
/* Print lines before the trailers as is */
- print_lines(outfile, lines, 0, trailer_start);
+ fwrite(str, 1, info.trailer_start - str, outfile);
- if (!has_blank_line_before(lines, trailer_start - 1))
+ if (!info.blank_line_before_trailer)
fprintf(outfile, "\n");
- /* Parse trailer lines */
- for (i = trailer_start; i < trailer_end; i++) {
+ for (i = 0; i < info.trailer_nr; i++) {
int separator_pos;
- if (lines[i]->buf[0] == comment_line_char)
+ char *trailer = info.trailers[i];
+ if (trailer[0] == comment_line_char)
continue;
- if (last && isspace(lines[i]->buf[0])) {
- struct strbuf sb = STRBUF_INIT;
- strbuf_addf(&sb, "%s\n%s", last->value, lines[i]->buf);
- strbuf_strip_suffix(&sb, "\n");
- free(last->value);
- last->value = strbuf_detach(&sb, NULL);
- continue;
- }
- separator_pos = find_separator(lines[i]->buf, separators);
+ separator_pos = find_separator(trailer, separators);
if (separator_pos >= 1) {
- parse_trailer(&tok, &val, NULL, lines[i]->buf,
+ parse_trailer(&tok, &val, NULL, trailer,
separator_pos);
- last = add_trailer_item(head,
- strbuf_detach(&tok, NULL),
- strbuf_detach(&val, NULL));
+ add_trailer_item(head,
+ strbuf_detach(&tok, NULL),
+ strbuf_detach(&val, NULL));
} else {
- strbuf_addbuf(&val, lines[i]);
+ strbuf_addstr(&val, trailer);
strbuf_strip_suffix(&val, "\n");
add_trailer_item(head,
NULL,
strbuf_detach(&val, NULL));
- last = NULL;
}
}
- return trailer_end;
+ trailer_info_release(&info);
+
+ return info.trailer_end - str;
}
static void free_all(struct list_head *head)
{
LIST_HEAD(head);
LIST_HEAD(arg_head);
- struct strbuf **lines;
+ struct strbuf sb = STRBUF_INIT;
int trailer_end;
FILE *outfile = stdout;
- /* Default config must be setup first */
- git_config(git_trailer_default_config, NULL);
- git_config(git_trailer_config, NULL);
+ ensure_configured();
- lines = read_input_file(file);
+ read_input_file(&sb, file);
if (in_place)
outfile = create_in_place_tempfile(file);
/* Print the lines before the trailers */
- trailer_end = process_input_file(outfile, lines, &head);
+ trailer_end = process_input_file(outfile, sb.buf, &head);
process_command_line_args(&arg_head, trailers);
free_all(&head);
/* Print the lines after the trailers as is */
- print_lines(outfile, lines, trailer_end, INT_MAX);
+ fwrite(sb.buf + trailer_end, 1, sb.len - trailer_end, outfile);
if (in_place)
if (rename_tempfile(&trailers_tempfile, file))
die_errno(_("could not rename temporary file to %s"), file);
- strbuf_list_free(lines);
+ strbuf_release(&sb);
+}
+
+void trailer_info_get(struct trailer_info *info, const char *str)
+{
+ int patch_start, trailer_end, trailer_start;
+ struct strbuf **trailer_lines, **ptr;
+ char **trailer_strings = NULL;
+ size_t nr = 0, alloc = 0;
+ char **last = NULL;
+
+ ensure_configured();
+
+ patch_start = find_patch_start(str);
+ trailer_end = find_trailer_end(str, patch_start);
+ trailer_start = find_trailer_start(str, trailer_end);
+
+ trailer_lines = strbuf_split_buf(str + trailer_start,
+ trailer_end - trailer_start,
+ '\n',
+ 0);
+ for (ptr = trailer_lines; *ptr; ptr++) {
+ if (last && isspace((*ptr)->buf[0])) {
+ struct strbuf sb = STRBUF_INIT;
+ strbuf_attach(&sb, *last, strlen(*last), strlen(*last));
+ strbuf_addbuf(&sb, *ptr);
+ *last = strbuf_detach(&sb, NULL);
+ continue;
+ }
+ ALLOC_GROW(trailer_strings, nr + 1, alloc);
+ trailer_strings[nr] = strbuf_detach(*ptr, NULL);
+ last = find_separator(trailer_strings[nr], separators) >= 1
+ ? &trailer_strings[nr]
+ : NULL;
+ nr++;
+ }
+ strbuf_list_free(trailer_lines);
+
+ info->blank_line_before_trailer = ends_with_blank_line(str,
+ trailer_start);
+ info->trailer_start = str + trailer_start;
+ info->trailer_end = str + trailer_end;
+ info->trailers = trailer_strings;
+ info->trailer_nr = nr;
+}
+
+void trailer_info_release(struct trailer_info *info)
+{
+ int i;
+ for (i = 0; i < info->trailer_nr; i++)
+ free(info->trailers[i]);
+ free(info->trailers);
}
#ifndef TRAILER_H
#define TRAILER_H
+struct trailer_info {
+ /*
+ * True if there is a blank line before the location pointed to by
+ * trailer_start.
+ */
+ int blank_line_before_trailer;
+
+ /*
+ * Pointers to the start and end of the trailer block found. If there
+ * is no trailer block found, these 2 pointers point to the end of the
+ * input string.
+ */
+ const char *trailer_start, *trailer_end;
+
+ /*
+ * Array of trailers found.
+ */
+ char **trailers;
+ size_t trailer_nr;
+};
+
void process_trailers(const char *file, int in_place, int trim_empty,
struct string_list *trailers);
+void trailer_info_get(struct trailer_info *info, const char *str);
+
+void trailer_info_release(struct trailer_info *info);
+
#endif /* TRAILER_H */
if ((flags & TRANSPORT_RECURSE_SUBMODULES_ON_DEMAND) && !is_bare_repository()) {
struct ref *ref = remote_refs;
+ struct sha1_array commits = SHA1_ARRAY_INIT;
+
for (; ref; ref = ref->next)
- if (!is_null_oid(&ref->new_oid) &&
- !push_unpushed_submodules(ref->new_oid.hash,
- transport->remote->name))
- die ("Failed to push all needed submodules!");
+ if (!is_null_oid(&ref->new_oid))
+ sha1_array_append(&commits, ref->new_oid.hash);
+
+ if (!push_unpushed_submodules(&commits,
+ transport->remote->name,
+ pretend)) {
+ sha1_array_clear(&commits);
+ die("Failed to push all needed submodules!");
+ }
+ sha1_array_clear(&commits);
}
- if ((flags & (TRANSPORT_RECURSE_SUBMODULES_ON_DEMAND |
- TRANSPORT_RECURSE_SUBMODULES_CHECK)) && !is_bare_repository()) {
+ if (((flags & TRANSPORT_RECURSE_SUBMODULES_CHECK) ||
+ ((flags & TRANSPORT_RECURSE_SUBMODULES_ON_DEMAND) &&
+ !pretend)) && !is_bare_repository()) {
struct ref *ref = remote_refs;
struct string_list needs_pushing = STRING_LIST_INIT_DUP;
+ struct sha1_array commits = SHA1_ARRAY_INIT;
for (; ref; ref = ref->next)
- if (!is_null_oid(&ref->new_oid) &&
- find_unpushed_submodules(ref->new_oid.hash,
- transport->remote->name, &needs_pushing))
- die_with_unpushed_submodules(&needs_pushing);
+ if (!is_null_oid(&ref->new_oid))
+ sha1_array_append(&commits, ref->new_oid.hash);
+
+ if (find_unpushed_submodules(&commits, transport->remote->name,
+ &needs_pushing)) {
+ sha1_array_clear(&commits);
+ die_with_unpushed_submodules(&needs_pushing);
+ }
+ string_list_clear(&needs_pushing, 0);
+ sha1_array_clear(&commits);
}
push_ret = transport->push_refs(transport, remote_refs, flags);
xstrfmt(msg, cmd, cmd);
msgs[ERROR_NOT_UPTODATE_DIR] =
- _("Updating the following directories would lose untracked files in it:\n%s");
+ _("Updating the following directories would lose untracked files in them:\n%s");
if (!strcmp(cmd, "checkout"))
msg = advice_commit_before_merge
strbuf_addf(&path, "%s/HEAD", get_git_common_dir());
- if (parse_ref(path.buf, &head_ref, &is_detached) < 0)
- goto done;
-
- worktree = xmalloc(sizeof(struct worktree));
+ worktree = xcalloc(1, sizeof(*worktree));
worktree->path = strbuf_detach(&worktree_path, NULL);
- worktree->id = NULL;
worktree->is_bare = is_bare;
- worktree->head_ref = NULL;
worktree->is_detached = is_detached;
- worktree->is_current = 0;
- add_head_info(&head_ref, worktree);
- worktree->lock_reason = NULL;
- worktree->lock_reason_valid = 0;
+ if (!parse_ref(path.buf, &head_ref, &is_detached))
+ add_head_info(&head_ref, worktree);
-done:
strbuf_release(&path);
strbuf_release(&worktree_path);
strbuf_release(&head_ref);
if (parse_ref(path.buf, &head_ref, &is_detached) < 0)
goto done;
- worktree = xmalloc(sizeof(struct worktree));
+ worktree = xcalloc(1, sizeof(*worktree));
worktree->path = strbuf_detach(&worktree_path, NULL);
worktree->id = xstrdup(id);
- worktree->is_bare = 0;
- worktree->head_ref = NULL;
worktree->is_detached = is_detached;
- worktree->is_current = 0;
add_head_info(&head_ref, worktree);
- worktree->lock_reason = NULL;
- worktree->lock_reason_valid = 0;
done:
strbuf_release(&path);
free(git_dir);
}
-struct worktree **get_worktrees(void)
+static int compare_worktree(const void *a_, const void *b_)
+{
+ const struct worktree *const *a = a_;
+ const struct worktree *const *b = b_;
+ return fspathcmp((*a)->path, (*b)->path);
+}
+
+struct worktree **get_worktrees(unsigned flags)
{
struct worktree **list = NULL;
struct strbuf path = STRBUF_INIT;
list = xmalloc(alloc * sizeof(struct worktree *));
- if ((list[counter] = get_main_worktree()))
- counter++;
+ list[counter++] = get_main_worktree();
strbuf_addf(&path, "%s/worktrees", get_git_common_dir());
dir = opendir(path.buf);
ALLOC_GROW(list, counter + 1, alloc);
list[counter] = NULL;
+ if (flags & GWT_SORT_LINKED)
+ /*
+ * don't sort the first item (main worktree), which will
+ * always be the first
+ */
+ QSORT(list + 1, counter - 1, compare_worktree);
+
mark_current_worktree(list);
return list;
}
if (worktrees)
free_worktrees(worktrees);
- worktrees = get_worktrees();
+ worktrees = get_worktrees(0);
for (i = 0; worktrees[i]; i++) {
struct worktree *wt = worktrees[i];
/* Functions for acting on the information about worktrees. */
+#define GWT_SORT_LINKED (1 << 0) /* keeps linked worktrees sorted */
+
/*
* Get the worktrees. The primary worktree will always be the first returned,
* and linked worktrees will be pointed to by 'next' in each subsequent
* The caller is responsible for freeing the memory from the returned
* worktree(s).
*/
-extern struct worktree **get_worktrees(void);
+extern struct worktree **get_worktrees(unsigned flags);
/*
* Return git dir of the worktree. Note that the path may be relative.