*.pl eof=lf diff=perl
*.pm eol=lf diff=perl
*.py eol=lf diff=python
+*.bat eol=crlf
/Documentation/**/*.txt eol=lf
/command-list.txt eol=lf
/GIT-VERSION-GEN eol=lf
/git-difftool
/git-difftool--helper
/git-describe
+/git-env--helper
/git-fast-export
/git-fast-import
/git-fetch
/git-range-diff
/git-read-tree
/git-rebase
-/git-rebase--am
-/git-rebase--common
-/git-rebase--interactive
/git-rebase--preserve-merges
/git-receive-pack
/git-reflog
/git-request-pull
/git-rerere
/git-reset
+/git-restore
/git-rev-list
/git-rev-parse
/git-revert
/git-submodule
/git-submodule--helper
/git-svn
+/git-switch
/git-symbolic-ref
/git-tag
/git-unpack-file
*.user
*.idb
*.pdb
-/Debug/
-/Release/
+*.ilk
+*.iobj
+*.ipdb
+*.dll
+.vs/
+*.manifest
+Debug/
+Release/
+/UpgradeLog*.htm
+/git.VC.VC.opendb
+/git.VC.db
*.dSYM
compiler:
addons:
before_install:
+ - env: jobname=linux-gcc-4.8
+ os: linux
+ dist: trusty
+ compiler:
- env: jobname=Linux32
os: linux
compiler:
API_DOCS = $(patsubst %.txt,%,$(filter-out technical/api-index-skel.txt technical/api-index.txt, $(wildcard technical/api-*.txt)))
SP_ARTICLES += $(API_DOCS)
+TECH_DOCS += MyFirstContribution
TECH_DOCS += SubmittingPatches
TECH_DOCS += technical/hash-function-transition
TECH_DOCS += technical/http-protocol
--- /dev/null
+My First Contribution to the Git Project
+========================================
+:sectanchors:
+
+[[summary]]
+== Summary
+
+This is a tutorial demonstrating the end-to-end workflow of creating a change to
+the Git tree, sending it for review, and making changes based on comments.
+
+[[prerequisites]]
+=== Prerequisites
+
+This tutorial assumes you're already fairly familiar with using Git to manage
+source code. The Git workflow steps will largely remain unexplained.
+
+[[related-reading]]
+=== Related Reading
+
+This tutorial aims to summarize the following documents, but the reader may find
+useful additional context:
+
+- `Documentation/SubmittingPatches`
+- `Documentation/howto/new-command.txt`
+
+[[getting-started]]
+== Getting Started
+
+[[cloning]]
+=== Clone the Git Repository
+
+Git is mirrored in a number of locations. Clone the repository from one of them;
+https://git-scm.com/downloads suggests one of the best places to clone from is
+the mirror on GitHub.
+
+----
+$ git clone https://github.com/git/git git
+$ cd git
+----
+
+[[identify-problem]]
+=== Identify Problem to Solve
+
+////
+Use + to indicate fixed-width here; couldn't get ` to work nicely with the
+quotes around "Pony Saying 'Um, Hello'".
+////
+In this tutorial, we will add a new command, +git psuh+, short for ``Pony Saying
+`Um, Hello''' - a feature which has gone unimplemented despite a high frequency
+of invocation during users' typical daily workflow.
+
+(We've seen some other effort in this space with the implementation of popular
+commands such as `sl`.)
+
+[[setup-workspace]]
+=== Set Up Your Workspace
+
+Let's start by making a development branch to work on our changes. Per
+`Documentation/SubmittingPatches`, since a brand new command is a new feature,
+it's fine to base your work on `master`. However, in the future for bugfixes,
+etc., you should check that document and base it on the appropriate branch.
+
+For the purposes of this document, we will base all our work on the `master`
+branch of the upstream project. Create the `psuh` branch you will use for
+development like so:
+
+----
+$ git checkout -b psuh origin/master
+----
+
+We'll make a number of commits here in order to demonstrate how to send a topic
+with multiple patches up for review simultaneously.
+
+[[code-it-up]]
+== Code It Up!
+
+NOTE: A reference implementation can be found at
+https://github.com/nasamuffin/git/tree/psuh.
+
+[[add-new-command]]
+=== Adding a New Command
+
+Lots of the subcommands are written as builtins, which means they are
+implemented in C and compiled into the main `git` executable. Implementing the
+very simple `psuh` command as a built-in will demonstrate the structure of the
+codebase, the internal API, and the process of working together as a contributor
+with the reviewers and maintainer to integrate this change into the system.
+
+Built-in subcommands are typically implemented in a function named "cmd_"
+followed by the name of the subcommand, in a source file named after the
+subcommand and contained within `builtin/`. So it makes sense to implement your
+command in `builtin/psuh.c`. Create that file, and within it, write the entry
+point for your command in a function matching the style and signature:
+
+----
+int cmd_psuh(int argc, const char **argv, const char *prefix)
+----
+
+We'll also need to add the declaration of psuh; open up `builtin.h`, find the
+declaration for `cmd_push`, and add a new line for `psuh` immediately before it,
+in order to keep the declarations sorted:
+
+----
+int cmd_psuh(int argc, const char **argv, const char *prefix);
+----
+
+Be sure to `#include "builtin.h"` in your `psuh.c`.
+
+Go ahead and add some throwaway printf to that function. This is a decent
+starting point as we can now add build rules and register the command.
+
+NOTE: Your throwaway text, as well as much of the text you will be adding over
+the course of this tutorial, is user-facing. That means it needs to be
+localizable. Take a look at `po/README` under "Marking strings for translation".
+Throughout the tutorial, we will mark strings for translation as necessary; you
+should also do so when writing your user-facing commands in the future.
+
+----
+int cmd_psuh(int argc, const char **argv, const char *prefix)
+{
+ printf(_("Pony saying hello goes here.\n"));
+ return 0;
+}
+----
+
+Let's try to build it. Open `Makefile`, find where `builtin/push.o` is added
+to `BUILTIN_OBJS`, and add `builtin/psuh.o` in the same way next to it in
+alphabetical order. Once you've done so, move to the top-level directory and
+build simply with `make`. Also add the `DEVELOPER=1` variable to turn on
+some additional warnings:
+
+----
+$ echo DEVELOPER=1 >config.mak
+$ make
+----
+
+NOTE: When you are developing the Git project, it's preferred that you use the
+`DEVELOPER` flag; if there's some reason it doesn't work for you, you can turn
+it off, but it's a good idea to mention the problem to the mailing list.
+
+NOTE: The Git build is parallelizable. `-j#` is not included above but you can
+use it as you prefer, here and elsewhere.
+
+Great, now your new command builds happily on its own. But nobody invokes it.
+Let's change that.
+
+The list of commands lives in `git.c`. We can register a new command by adding
+a `cmd_struct` to the `commands[]` array. `struct cmd_struct` takes a string
+with the command name, a function pointer to the command implementation, and a
+setup option flag. For now, let's keep mimicking `push`. Find the line where
+`cmd_push` is registered, copy it, and modify it for `cmd_psuh`, placing the new
+line in alphabetical order.
+
+The options are documented in `builtin.h` under "Adding a new built-in." Since
+we hope to print some data about the user's current workspace context later,
+we need a Git directory, so choose `RUN_SETUP` as your only option.
+
+Go ahead and build again. You should see a clean build, so let's kick the tires
+and see if it works. There's a binary you can use to test with in the
+`bin-wrappers` directory.
+
+----
+$ ./bin-wrappers/git psuh
+----
+
+Check it out! You've got a command! Nice work! Let's commit this.
+
+`git status` reveals modified `Makefile`, `builtin.h`, and `git.c` as well as
+untracked `builtin/psuh.c` and `git-psuh`. First, let's take care of the binary,
+which should be ignored. Open `.gitignore` in your editor, find `/git-push`, and
+add an entry for your new command in alphabetical order:
+
+----
+...
+/git-prune-packed
+/git-psuh
+/git-pull
+/git-push
+/git-quiltimport
+/git-range-diff
+...
+----
+
+Checking `git status` again should show that `git-psuh` has been removed from
+the untracked list and `.gitignore` has been added to the modified list. Now we
+can stage and commit:
+
+----
+$ git add Makefile builtin.h builtin/psuh.c git.c .gitignore
+$ git commit -s
+----
+
+You will be presented with your editor in order to write a commit message. Start
+the commit with a 50-column or less subject line, including the name of the
+component you're working on, followed by a blank line (always required) and then
+the body of your commit message, which should provide the bulk of the context.
+Remember to be explicit and provide the "Why" of your change, especially if it
+couldn't easily be understood from your diff. When editing your commit message,
+don't remove the Signed-off-by line which was added by `-s` above.
+
+----
+psuh: add a built-in by popular demand
+
+Internal metrics indicate this is a command many users expect to be
+present. So here's an implementation to help drive customer
+satisfaction and engagement: a pony which doubtfully greets the user,
+or, a Pony Saying "Um, Hello" (PSUH).
+
+This commit message is intentionally formatted to 72 columns per line,
+starts with a single line as "commit message subject" that is written as
+if to command the codebase to do something (add this, teach a command
+that). The body of the message is designed to add information about the
+commit that is not readily deduced from reading the associated diff,
+such as answering the question "why?".
+
+Signed-off-by: A U Thor <author@example.com>
+----
+
+Go ahead and inspect your new commit with `git show`. "psuh:" indicates you
+have modified mainly the `psuh` command. The subject line gives readers an idea
+of what you've changed. The sign-off line (`-s`) indicates that you agree to
+the Developer's Certificate of Origin 1.1 (see the
+`Documentation/SubmittingPatches` +++[[dco]]+++ header).
+
+For the remainder of the tutorial, the subject line only will be listed for the
+sake of brevity. However, fully-fleshed example commit messages are available
+on the reference implementation linked at the top of this document.
+
+[[implementation]]
+=== Implementation
+
+It's probably useful to do at least something besides printing out a string.
+Let's start by having a look at everything we get.
+
+Modify your `cmd_psuh` implementation to dump the args you're passed, keeping
+existing `printf()` calls in place:
+
+----
+ int i;
+
+ ...
+
+ printf(Q_("Your args (there is %d):\n",
+ "Your args (there are %d):\n",
+ argc),
+ argc);
+ for (i = 0; i < argc; i++)
+ printf("%d: %s\n", i, argv[i]);
+
+ printf(_("Your current working directory:\n<top-level>%s%s\n"),
+ prefix ? "/" : "", prefix ? prefix : "");
+
+----
+
+Build and try it. As you may expect, there's pretty much just whatever we give
+on the command line, including the name of our command. (If `prefix` is empty
+for you, try `cd Documentation/ && ../bin-wrappers/git psuh`). That's not so
+helpful. So what other context can we get?
+
+Add a line to `#include "config.h"`. Then, add the following bits to the
+function body:
+
+----
+ const char *cfg_name;
+
+...
+
+ git_config(git_default_config, NULL);
+ if (git_config_get_string_const("user.name", &cfg_name) > 0)
+ printf(_("No name is found in config\n"));
+ else
+ printf(_("Your name: %s\n"), cfg_name);
+----
+
+`git_config()` will grab the configuration from config files known to Git and
+apply standard precedence rules. `git_config_get_string_const()` will look up
+a specific key ("user.name") and give you the value. There are a number of
+single-key lookup functions like this one; you can see them all (and more info
+about how to use `git_config()`) in `Documentation/technical/api-config.txt`.
+
+You should see that the name printed matches the one you see when you run:
+
+----
+$ git config --get user.name
+----
+
+Great! Now we know how to check for values in the Git config. Let's commit this
+too, so we don't lose our progress.
+
+----
+$ git add builtin/psuh.c
+$ git commit -sm "psuh: show parameters & config opts"
+----
+
+NOTE: Again, the above is for sake of brevity in this tutorial. In a real change
+you should not use `-m` but instead use the editor to write a meaningful
+message.
+
+Still, it'd be nice to know what the user's working context is like. Let's see
+if we can print the name of the user's current branch. We can mimic the
+`git status` implementation; the printer is located in `wt-status.c` and we can
+see that the branch is held in a `struct wt_status`.
+
+`wt_status_print()` gets invoked by `cmd_status()` in `builtin/commit.c`.
+Looking at that implementation we see the status config being populated like so:
+
+----
+status_init_config(&s, git_status_config);
+----
+
+But as we drill down, we can find that `status_init_config()` wraps a call
+to `git_config()`. Let's modify the code we wrote in the previous commit.
+
+Be sure to include the header to allow you to use `struct wt_status`:
+----
+#include "wt-status.h"
+----
+
+Then modify your `cmd_psuh` implementation to declare your `struct wt_status`,
+prepare it, and print its contents:
+
+----
+ struct wt_status status;
+
+...
+
+ wt_status_prepare(the_repository, &status);
+ git_config(git_default_config, &status);
+
+...
+
+ printf(_("Your current branch: %s\n"), status.branch);
+----
+
+Run it again. Check it out - here's the (verbose) name of your current branch!
+
+Let's commit this as well.
+
+----
+$ git add builtin/psuh.c
+$ git commit -sm "psuh: print the current branch"
+----
+
+Now let's see if we can get some info about a specific commit.
+
+Luckily, there are some helpers for us here. `commit.h` has a function called
+`lookup_commit_reference_by_name` to which we can simply provide a hardcoded
+string; `pretty.h` has an extremely handy `pp_commit_easy()` call which doesn't
+require a full format object to be passed.
+
+Add the following includes:
+
+----
+#include "commit.h"
+#include "pretty.h"
+----
+
+Then, add the following lines within your implementation of `cmd_psuh()` near
+the declarations and the logic, respectively.
+
+----
+ struct commit *c = NULL;
+ struct strbuf commitline = STRBUF_INIT;
+
+...
+
+ c = lookup_commit_reference_by_name("origin/master");
+
+ if (c != NULL) {
+ pp_commit_easy(CMIT_FMT_ONELINE, c, &commitline);
+ printf(_("Current commit: %s\n"), commitline.buf);
+ }
+----
+
+The `struct strbuf` provides some safety belts to your basic `char*`, one of
+which is a length member to prevent buffer overruns. It needs to be initialized
+nicely with `STRBUF_INIT`. Keep it in mind when you need to pass around `char*`.
+
+`lookup_commit_reference_by_name` resolves the name you pass it, so you can play
+with the value there and see what kind of things you can come up with.
+
+`pp_commit_easy` is a convenience wrapper in `pretty.h` that takes a single
+format enum shorthand, rather than an entire format struct. It then
+pretty-prints the commit according to that shorthand. These are similar to the
+formats available with `--pretty=FOO` in many Git commands.
+
+Build it and run, and if you're using the same name in the example, you should
+see the subject line of the most recent commit in `origin/master` that you know
+about. Neat! Let's commit that as well.
+
+----
+$ git add builtin/psuh.c
+$ git commit -sm "psuh: display the top of origin/master"
+----
+
+[[add-documentation]]
+=== Adding Documentation
+
+Awesome! You've got a fantastic new command that you're ready to share with the
+community. But hang on just a minute - this isn't very user-friendly. Run the
+following:
+
+----
+$ ./bin-wrappers/git help psuh
+----
+
+Your new command is undocumented! Let's fix that.
+
+Take a look at `Documentation/git-*.txt`. These are the manpages for the
+subcommands that Git knows about. You can open these up and take a look to get
+acquainted with the format, but then go ahead and make a new file
+`Documentation/git-psuh.txt`. Like with most of the documentation in the Git
+project, help pages are written with AsciiDoc (see CodingGuidelines, "Writing
+Documentation" section). Use the following template to fill out your own
+manpage:
+
+// Surprisingly difficult to embed AsciiDoc source within AsciiDoc.
+[listing]
+....
+git-psuh(1)
+===========
+
+NAME
+----
+git-psuh - Delight users' typo with a shy horse
+
+
+SYNOPSIS
+--------
+[verse]
+'git-psuh [<arg>...]'
+
+DESCRIPTION
+-----------
+...
+
+OPTIONS[[OPTIONS]]
+------------------
+...
+
+OUTPUT
+------
+...
+
+GIT
+---
+Part of the linkgit:git[1] suite
+....
+
+The most important pieces of this to note are the file header, underlined by =,
+the NAME section, and the SYNOPSIS, which would normally contain the grammar if
+your command took arguments. Try to use well-established manpage headers so your
+documentation is consistent with other Git and UNIX manpages; this makes life
+easier for your user, who can skip to the section they know contains the
+information they need.
+
+Now that you've written your manpage, you'll need to build it explicitly. We
+convert your AsciiDoc to troff which is man-readable like so:
+
+----
+$ make all doc
+$ man Documentation/git-psuh.1
+----
+
+or
+
+----
+$ make -C Documentation/ git-psuh.1
+$ man Documentation/git-psuh.1
+----
+
+NOTE: You may need to install the package `asciidoc` to get this to work.
+
+While this isn't as satisfying as running through `git help`, you can at least
+check that your help page looks right.
+
+You can also check that the documentation coverage is good (that is, the project
+sees that your command has been implemented as well as documented) by running
+`make check-docs` from the top-level.
+
+Go ahead and commit your new documentation change.
+
+[[add-usage]]
+=== Adding Usage Text
+
+Try and run `./bin-wrappers/git psuh -h`. Your command should crash at the end.
+That's because `-h` is a special case which your command should handle by
+printing usage.
+
+Take a look at `Documentation/technical/api-parse-options.txt`. This is a handy
+tool for pulling out options you need to be able to handle, and it takes a
+usage string.
+
+In order to use it, we'll need to prepare a NULL-terminated array of usage
+strings and a `builtin_psuh_options` array.
+
+Add a line to `#include "parse-options.h"`.
+
+At global scope, add your array of usage strings:
+
+----
+static const char * const psuh_usage[] = {
+ N_("git psuh [<arg>...]"),
+ NULL,
+};
+----
+
+Then, within your `cmd_psuh()` implementation, we can declare and populate our
+`option` struct. Ours is pretty boring but you can add more to it if you want to
+explore `parse_options()` in more detail:
+
+----
+ struct option options[] = {
+ OPT_END()
+ };
+----
+
+Finally, before you print your args and prefix, add the call to
+`parse-options()`:
+
+----
+ argc = parse_options(argc, argv, prefix, options, psuh_usage, 0);
+----
+
+This call will modify your `argv` parameter. It will strip the options you
+specified in `options` from `argv` and the locations pointed to from `options`
+entries will be updated. Be sure to replace your `argc` with the result from
+`parse_options()`, or you will be confused if you try to parse `argv` later.
+
+It's worth noting the special argument `--`. As you may be aware, many Unix
+commands use `--` to indicate "end of named parameters" - all parameters after
+the `--` are interpreted merely as positional arguments. (This can be handy if
+you want to pass as a parameter something which would usually be interpreted as
+a flag.) `parse_options()` will terminate parsing when it reaches `--` and give
+you the rest of the options afterwards, untouched.
+
+Build again. Now, when you run with `-h`, you should see your usage printed and
+your command terminated before anything else interesting happens. Great!
+
+Go ahead and commit this one, too.
+
+[[testing]]
+== Testing
+
+It's important to test your code - even for a little toy command like this one.
+Moreover, your patch won't be accepted into the Git tree without tests. Your
+tests should:
+
+* Illustrate the current behavior of the feature
+* Prove the current behavior matches the expected behavior
+* Ensure the externally-visible behavior isn't broken in later changes
+
+So let's write some tests.
+
+Related reading: `t/README`
+
+[[overview-test-structure]]
+=== Overview of Testing Structure
+
+The tests in Git live in `t/` and are named with a 4-digit decimal number using
+the schema shown in the Naming Tests section of `t/README`.
+
+[[write-new-test]]
+=== Writing Your Test
+
+Since this a toy command, let's go ahead and name the test with t9999. However,
+as many of the family/subcmd combinations are full, best practice seems to be
+to find a command close enough to the one you've added and share its naming
+space.
+
+Create a new file `t/t9999-psuh-tutorial.sh`. Begin with the header as so (see
+"Writing Tests" and "Source 'test-lib.sh'" in `t/README`):
+
+----
+#!/bin/sh
+
+test_description='git-psuh test
+
+This test runs git-psuh and makes sure it does not crash.'
+
+. ./test-lib.sh
+----
+
+Tests are framed inside of a `test_expect_success` in order to output TAP
+formatted results. Let's make sure that `git psuh` doesn't exit poorly and does
+mention the right animal somewhere:
+
+----
+test_expect_success 'runs correctly with no args and good output' '
+ git psuh >actual &&
+ test_i18ngrep Pony actual
+'
+----
+
+Indicate that you've run everything you wanted by adding the following at the
+bottom of your script:
+
+----
+test_done
+----
+
+Make sure you mark your test script executable:
+
+----
+$ chmod +x t/t9999-psuh-tutorial.sh
+----
+
+You can get an idea of whether you created your new test script successfully
+by running `make -C t test-lint`, which will check for things like test number
+uniqueness, executable bit, and so on.
+
+[[local-test]]
+=== Running Locally
+
+Let's try and run locally:
+
+----
+$ make
+$ cd t/ && prove t9999-psuh-tutorial.sh
+----
+
+You can run the full test suite and ensure `git-psuh` didn't break anything:
+
+----
+$ cd t/
+$ prove -j$(nproc) --shuffle t[0-9]*.sh
+----
+
+NOTE: You can also do this with `make test` or use any testing harness which can
+speak TAP. `prove` can run concurrently. `shuffle` randomizes the order the
+tests are run in, which makes them resilient against unwanted inter-test
+dependencies. `prove` also makes the output nicer.
+
+Go ahead and commit this change, as well.
+
+[[ready-to-share]]
+== Getting Ready to Share
+
+You may have noticed already that the Git project performs its code reviews via
+emailed patches, which are then applied by the maintainer when they are ready
+and approved by the community. The Git project does not accept patches from
+pull requests, and the patches emailed for review need to be formatted a
+specific way. At this point the tutorial diverges, in order to demonstrate two
+different methods of formatting your patchset and getting it reviewed.
+
+The first method to be covered is GitGitGadget, which is useful for those
+already familiar with GitHub's common pull request workflow. This method
+requires a GitHub account.
+
+The second method to be covered is `git send-email`, which can give slightly
+more fine-grained control over the emails to be sent. This method requires some
+setup which can change depending on your system and will not be covered in this
+tutorial.
+
+Regardless of which method you choose, your engagement with reviewers will be
+the same; the review process will be covered after the sections on GitGitGadget
+and `git send-email`.
+
+[[howto-ggg]]
+== Sending Patches via GitGitGadget
+
+One option for sending patches is to follow a typical pull request workflow and
+send your patches out via GitGitGadget. GitGitGadget is a tool created by
+Johannes Schindelin to make life as a Git contributor easier for those used to
+the GitHub PR workflow. It allows contributors to open pull requests against its
+mirror of the Git project, and does some magic to turn the PR into a set of
+emails and send them out for you. It also runs the Git continuous integration
+suite for you. It's documented at http://gitgitgadget.github.io.
+
+[[create-fork]]
+=== Forking `git/git` on GitHub
+
+Before you can send your patch off to be reviewed using GitGitGadget, you will
+need to fork the Git project and upload your changes. First thing - make sure
+you have a GitHub account.
+
+Head to the https://github.com/git/git[GitHub mirror] and look for the Fork
+button. Place your fork wherever you deem appropriate and create it.
+
+[[upload-to-fork]]
+=== Uploading to Your Own Fork
+
+To upload your branch to your own fork, you'll need to add the new fork as a
+remote. You can use `git remote -v` to show the remotes you have added already.
+From your new fork's page on GitHub, you can press "Clone or download" to get
+the URL; then you need to run the following to add, replacing your own URL and
+remote name for the examples provided:
+
+----
+$ git remote add remotename git@github.com:remotename/git.git
+----
+
+or to use the HTTPS URL:
+
+----
+$ git remote add remotename https://github.com/remotename/git/.git
+----
+
+Run `git remote -v` again and you should see the new remote showing up.
+`git fetch remotename` (with the real name of your remote replaced) in order to
+get ready to push.
+
+Next, double-check that you've been doing all your development in a new branch
+by running `git branch`. If you didn't, now is a good time to move your new
+commits to their own branch.
+
+As mentioned briefly at the beginning of this document, we are basing our work
+on `master`, so go ahead and update as shown below, or using your preferred
+workflow.
+
+----
+$ git checkout master
+$ git pull -r
+$ git rebase master psuh
+----
+
+Finally, you're ready to push your new topic branch! (Due to our branch and
+command name choices, be careful when you type the command below.)
+
+----
+$ git push remotename psuh
+----
+
+Now you should be able to go and check out your newly created branch on GitHub.
+
+[[send-pr-ggg]]
+=== Sending a PR to GitGitGadget
+
+In order to have your code tested and formatted for review, you need to start by
+opening a Pull Request against `gitgitgadget/git`. Head to
+https://github.com/gitgitgadget/git and open a PR either with the "New pull
+request" button or the convenient "Compare & pull request" button that may
+appear with the name of your newly pushed branch.
+
+Review the PR's title and description, as it's used by GitGitGadget as the cover
+letter for your change. When you're happy, submit your pull request.
+
+[[run-ci-ggg]]
+=== Running CI and Getting Ready to Send
+
+If it's your first time using GitGitGadget (which is likely, as you're using
+this tutorial) then someone will need to give you permission to use the tool.
+As mentioned in the GitGitGadget documentation, you just need someone who
+already uses it to comment on your PR with `/allow <username>`. GitGitGadget
+will automatically run your PRs through the CI even without the permission given
+but you will not be able to `/submit` your changes until someone allows you to
+use the tool.
+
+If the CI fails, you can update your changes with `git rebase -i` and push your
+branch again:
+
+----
+$ git push -f remotename psuh
+----
+
+In fact, you should continue to make changes this way up until the point when
+your patch is accepted into `next`.
+
+////
+TODO https://github.com/gitgitgadget/gitgitgadget/issues/83
+It'd be nice to be able to verify that the patch looks good before sending it
+to everyone on Git mailing list.
+[[check-work-ggg]]
+=== Check Your Work
+////
+
+[[send-mail-ggg]]
+=== Sending Your Patches
+
+Now that your CI is passing and someone has granted you permission to use
+GitGitGadget with the `/allow` command, sending out for review is as simple as
+commenting on your PR with `/submit`.
+
+[[responding-ggg]]
+=== Updating With Comments
+
+Skip ahead to <<reviewing,Responding to Reviews>> for information on how to
+reply to review comments you will receive on the mailing list.
+
+Once you have your branch again in the shape you want following all review
+comments, you can submit again:
+
+----
+$ git push -f remotename psuh
+----
+
+Next, go look at your pull request against GitGitGadget; you should see the CI
+has been kicked off again. Now while the CI is running is a good time for you
+to modify your description at the top of the pull request thread; it will be
+used again as the cover letter. You should use this space to describe what
+has changed since your previous version, so that your reviewers have some idea
+of what they're looking at. When the CI is done running, you can comment once
+more with `/submit` - GitGitGadget will automatically add a v2 mark to your
+changes.
+
+[[howto-git-send-email]]
+== Sending Patches with `git send-email`
+
+If you don't want to use GitGitGadget, you can also use Git itself to mail your
+patches. Some benefits of using Git this way include finer grained control of
+subject line (for example, being able to use the tag [RFC PATCH] in the subject)
+and being able to send a ``dry run'' mail to yourself to ensure it all looks
+good before going out to the list.
+
+[[setup-git-send-email]]
+=== Prerequisite: Setting Up `git send-email`
+
+Configuration for `send-email` can vary based on your operating system and email
+provider, and so will not be covered in this tutorial, beyond stating that in
+many distributions of Linux, `git-send-email` is not packaged alongside the
+typical `git` install. You may need to install this additional package; there
+are a number of resources online to help you do so. You will also need to
+determine the right way to configure it to use your SMTP server; again, as this
+configuration can change significantly based on your system and email setup, it
+is out of scope for the context of this tutorial.
+
+[[format-patch]]
+=== Preparing Initial Patchset
+
+Sending emails with Git is a two-part process; before you can prepare the emails
+themselves, you'll need to prepare the patches. Luckily, this is pretty simple:
+
+----
+$ git format-patch --cover-letter -o psuh/ master..psuh
+----
+
+The `--cover-letter` parameter tells `format-patch` to create a cover letter
+template for you. You will need to fill in the template before you're ready
+to send - but for now, the template will be next to your other patches.
+
+The `-o psuh/` parameter tells `format-patch` to place the patch files into a
+directory. This is useful because `git send-email` can take a directory and
+send out all the patches from there.
+
+`master..psuh` tells `format-patch` to generate patches for the difference
+between `master` and `psuh`. It will make one patch file per commit. After you
+run, you can go have a look at each of the patches with your favorite text
+editor and make sure everything looks alright; however, it's not recommended to
+make code fixups via the patch file. It's a better idea to make the change the
+normal way using `git rebase -i` or by adding a new commit than by modifying a
+patch.
+
+NOTE: Optionally, you can also use the `--rfc` flag to prefix your patch subject
+with ``[RFC PATCH]'' instead of ``[PATCH]''. RFC stands for ``request for
+comments'' and indicates that while your code isn't quite ready for submission,
+you'd like to begin the code review process. This can also be used when your
+patch is a proposal, but you aren't sure whether the community wants to solve
+the problem with that approach or not - to conduct a sort of design review. You
+may also see on the list patches marked ``WIP'' - this means they are incomplete
+but want reviewers to look at what they have so far. You can add this flag with
+`--subject-prefix=WIP`.
+
+Check and make sure that your patches and cover letter template exist in the
+directory you specified - you're nearly ready to send out your review!
+
+[[cover-letter]]
+=== Preparing Email
+
+In addition to an email per patch, the Git community also expects your patches
+to come with a cover letter, typically with a subject line [PATCH 0/x] (where
+x is the number of patches you're sending). Since you invoked `format-patch`
+with `--cover-letter`, you've already got a template ready. Open it up in your
+favorite editor.
+
+You should see a number of headers present already. Check that your `From:`
+header is correct. Then modify your `Subject:` to something which succinctly
+covers the purpose of your entire topic branch, for example:
+
+----
+Subject: [PATCH 0/7] adding the 'psuh' command
+----
+
+Make sure you retain the ``[PATCH 0/X]'' part; that's what indicates to the Git
+community that this email is the beginning of a review, and many reviewers
+filter their email for this type of flag.
+
+You'll need to add some extra parameters when you invoke `git send-email` to add
+the cover letter.
+
+Next you'll have to fill out the body of your cover letter. This is an important
+component of change submission as it explains to the community from a high level
+what you're trying to do, and why, in a way that's more apparent than just
+looking at your diff. Be sure to explain anything your diff doesn't make clear
+on its own.
+
+Here's an example body for `psuh`:
+
+----
+Our internal metrics indicate widespread interest in the command
+git-psuh - that is, many users are trying to use it, but finding it is
+unavailable, using some unknown workaround instead.
+
+The following handful of patches add the psuh command and implement some
+handy features on top of it.
+
+This patchset is part of the MyFirstContribution tutorial and should not
+be merged.
+----
+
+The template created by `git format-patch --cover-letter` includes a diffstat.
+This gives reviewers a summary of what they're in for when reviewing your topic.
+The one generated for `psuh` from the sample implementation looks like this:
+
+----
+ Documentation/git-psuh.txt | 40 +++++++++++++++++++++
+ Makefile | 1 +
+ builtin.h | 1 +
+ builtin/psuh.c | 73 ++++++++++++++++++++++++++++++++++++++
+ git.c | 1 +
+ t/t9999-psuh-tutorial.sh | 12 +++++++
+ 6 files changed, 128 insertions(+)
+ create mode 100644 Documentation/git-psuh.txt
+ create mode 100644 builtin/psuh.c
+ create mode 100755 t/t9999-psuh-tutorial.sh
+----
+
+Finally, the letter will include the version of Git used to generate the
+patches. You can leave that string alone.
+
+[[sending-git-send-email]]
+=== Sending Email
+
+At this point you should have a directory `psuh/` which is filled with your
+patches and a cover letter. Time to mail it out! You can send it like this:
+
+----
+$ git send-email --to=target@example.com psuh/*.patch
+----
+
+NOTE: Check `git help send-email` for some other options which you may find
+valuable, such as changing the Reply-to address or adding more CC and BCC lines.
+
+NOTE: When you are sending a real patch, it will go to git@vger.kernel.org - but
+please don't send your patchset from the tutorial to the real mailing list! For
+now, you can send it to yourself, to make sure you understand how it will look.
+
+After you run the command above, you will be presented with an interactive
+prompt for each patch that's about to go out. This gives you one last chance to
+edit or quit sending something (but again, don't edit code this way). Once you
+press `y` or `a` at these prompts your emails will be sent! Congratulations!
+
+Awesome, now the community will drop everything and review your changes. (Just
+kidding - be patient!)
+
+[[v2-git-send-email]]
+=== Sending v2
+
+Skip ahead to <<reviewing,Responding to Reviews>> for information on how to
+handle comments from reviewers. Continue this section when your topic branch is
+shaped the way you want it to look for your patchset v2.
+
+When you're ready with the next iteration of your patch, the process is fairly
+similar.
+
+First, generate your v2 patches again:
+
+----
+$ git format-patch -v2 --cover-letter -o psuh/ master..psuh
+----
+
+This will add your v2 patches, all named like `v2-000n-my-commit-subject.patch`,
+to the `psuh/` directory. You may notice that they are sitting alongside the v1
+patches; that's fine, but be careful when you are ready to send them.
+
+Edit your cover letter again. Now is a good time to mention what's different
+between your last version and now, if it's something significant. You do not
+need the exact same body in your second cover letter; focus on explaining to
+reviewers the changes you've made that may not be as visible.
+
+You will also need to go and find the Message-Id of your previous cover letter.
+You can either note it when you send the first series, from the output of `git
+send-email`, or you can look it up on the
+https://public-inbox.org/git[mailing list]. Find your cover letter in the
+archives, click on it, then click "permalink" or "raw" to reveal the Message-Id
+header. It should match:
+
+----
+Message-Id: <foo.12345.author@example.com>
+----
+
+Your Message-Id is `<foo.12345.author@example.com>`. This example will be used
+below as well; make sure to replace it with the correct Message-Id for your
+**previous cover letter** - that is, if you're sending v2, use the Message-Id
+from v1; if you're sending v3, use the Message-Id from v2.
+
+While you're looking at the email, you should also note who is CC'd, as it's
+common practice in the mailing list to keep all CCs on a thread. You can add
+these CC lines directly to your cover letter with a line like so in the header
+(before the Subject line):
+
+----
+CC: author@example.com, Othe R <other@example.com>
+----
+
+Now send the emails again, paying close attention to which messages you pass in
+to the command:
+
+----
+$ git send-email --to=target@example.com
+ --in-reply-to="<foo.12345.author@example.com>"
+ psuh/v2*
+----
+
+[[single-patch]]
+=== Bonus Chapter: One-Patch Changes
+
+In some cases, your very small change may consist of only one patch. When that
+happens, you only need to send one email. Your commit message should already be
+meaningful and explain at a high level the purpose (what is happening and why)
+of your patch, but if you need to supply even more context, you can do so below
+the `---` in your patch. Take the example below, which was generated with `git
+format-patch` on a single commit, and then edited to add the content between
+the `---` and the diffstat.
+
+----
+From 1345bbb3f7ac74abde040c12e737204689a72723 Mon Sep 17 00:00:00 2001
+From: A U Thor <author@example.com>
+Date: Thu, 18 Apr 2019 15:11:02 -0700
+Subject: [PATCH] README: change the grammar
+
+I think it looks better this way. This part of the commit message will
+end up in the commit-log.
+
+Signed-off-by: A U Thor <author@example.com>
+---
+Let's have a wild discussion about grammar on the mailing list. This
+part of my email will never end up in the commit log. Here is where I
+can add additional context to the mailing list about my intent, outside
+of the context of the commit log. This section was added after `git
+format-patch` was run, by editing the patch file in a text editor.
+
+ README.md | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/README.md b/README.md
+index 88f126184c..38da593a60 100644
+--- a/README.md
++++ b/README.md
+@@ -3,7 +3,7 @@
+ Git - fast, scalable, distributed revision control system
+ =========================================================
+
+-Git is a fast, scalable, distributed revision control system with an
++Git is a fast, scalable, and distributed revision control system with an
+ unusually rich command set that provides both high-level operations
+ and full access to internals.
+
+--
+2.21.0.392.gf8f6787159e-goog
+----
+
+[[now-what]]
+== My Patch Got Emailed - Now What?
+
+[[reviewing]]
+=== Responding to Reviews
+
+After a few days, you will hopefully receive a reply to your patchset with some
+comments. Woohoo! Now you can get back to work.
+
+It's good manners to reply to each comment, notifying the reviewer that you have
+made the change requested, feel the original is better, or that the comment
+inspired you to do something a new way which is superior to both the original
+and the suggested change. This way reviewers don't need to inspect your v2 to
+figure out whether you implemented their comment or not.
+
+If you are going to push back on a comment, be polite and explain why you feel
+your original is better; be prepared that the reviewer may still disagree with
+you, and the rest of the community may weigh in on one side or the other. As
+with all code reviews, it's important to keep an open mind to doing something a
+different way than you originally planned; other reviewers have a different
+perspective on the project than you do, and may be thinking of a valid side
+effect which had not occurred to you. It is always okay to ask for clarification
+if you aren't sure why a change was suggested, or what the reviewer is asking
+you to do.
+
+Make sure your email client has a plaintext email mode and it is turned on; the
+Git list rejects HTML email. Please also follow the mailing list etiquette
+outlined in the
+https://kernel.googlesource.com/pub/scm/git/git/+/todo/MaintNotes[Maintainer's
+Note], which are similar to etiquette rules in most open source communities
+surrounding bottom-posting and inline replies.
+
+When you're making changes to your code, it is cleanest - that is, the resulting
+commits are easiest to look at - if you use `git rebase -i` (interactive
+rebase). Take a look at this
+https://www.oreilly.com/library/view/git-pocket-guide/9781449327507/ch10.html[overview]
+from O'Reilly. The general idea is to modify each commit which requires changes;
+this way, instead of having a patch A with a mistake, a patch B which was fine
+and required no upstream reviews in v1, and a patch C which fixes patch A for
+v2, you can just ship a v2 with a correct patch A and correct patch B. This is
+changing history, but since it's local history which you haven't shared with
+anyone, that is okay for now! (Later, it may not make sense to do this; take a
+look at the section below this one for some context.)
+
+[[after-approval]]
+=== After Review Approval
+
+The Git project has four integration branches: `pu`, `next`, `master`, and
+`maint`. Your change will be placed into `pu` fairly early on by the maintainer
+while it is still in the review process; from there, when it is ready for wider
+testing, it will be merged into `next`. Plenty of early testers use `next` and
+may report issues. Eventually, changes in `next` will make it to `master`,
+which is typically considered stable. Finally, when a new release is cut,
+`maint` is used to base bugfixes onto. As mentioned at the beginning of this
+document, you can read `Documents/SubmittingPatches` for some more info about
+the use of the various integration branches.
+
+Back to now: your code has been lauded by the upstream reviewers. It is perfect.
+It is ready to be accepted. You don't need to do anything else; the maintainer
+will merge your topic branch to `next` and life is good.
+
+However, if you discover it isn't so perfect after this point, you may need to
+take some special steps depending on where you are in the process.
+
+If the maintainer has announced in the "What's cooking in git.git" email that
+your topic is marked for `next` - that is, that they plan to merge it to `next`
+but have not yet done so - you should send an email asking the maintainer to
+wait a little longer: "I've sent v4 of my series and you marked it for `next`,
+but I need to change this and that - please wait for v5 before you merge it."
+
+If the topic has already been merged to `next`, rather than modifying your
+patches with `git rebase -i`, you should make further changes incrementally -
+that is, with another commit, based on top of the maintainer's topic branch as
+detailed in https://github.com/gitster/git. Your work is still in the same topic
+but is now incremental, rather than a wholesale rewrite of the topic branch.
+
+The topic branches in the maintainer's GitHub are mirrored in GitGitGadget, so
+if you're sending your reviews out that way, you should be sure to open your PR
+against the appropriate GitGitGadget/Git branch.
+
+If you're using `git send-email`, you can use it the same way as before, but you
+should generate your diffs from `<topic>..<mybranch>` and base your work on
+`<topic>` instead of `master`.
--- /dev/null
+Git 2.23 Release Notes
+======================
+
+Updates since v2.22
+-------------------
+
+Backward compatibility note
+
+ * The "--base" option of "format-patch" computed the patch-ids for
+ prerequisite patches in an unstable way, which has been updated to
+ compute in a way that is compatible with "git patch-id --stable".
+
+ * The "git log" command by default behaves as if the --mailmap option
+ was given.
+
+
+UI, Workflows & Features
+
+ * The "git fast-export/import" pair has been taught to handle commits
+ with log messages in encoding other than UTF-8 better.
+
+ * In recent versions of Git, per-worktree refs are exposed in
+ refs/worktrees/<wtname>/ hierarchy, which means that worktree names
+ must be a valid refname component. The code now sanitizes the names
+ given to worktrees, to make sure these refs are well-formed.
+
+ * "git merge" learned "--quit" option that cleans up the in-progress
+ merge while leaving the working tree and the index still in a mess.
+
+ * "git format-patch" learns a configuration to set the default for
+ its --notes=<ref> option.
+
+ * The code to show args with potential typo that cannot be
+ interpreted as a commit-ish has been improved.
+
+ * "git clone --recurse-submodules" learned to set up the submodules
+ to ignore commit object names recorded in the superproject gitlink
+ and instead use the commits that happen to be at the tip of the
+ remote-tracking branches from the get-go, by passing the new
+ "--remote-submodules" option.
+
+ * The pattern "git diff/grep" use to extract funcname and words
+ boundary for Matlab has been extend to cover Octave, which is more
+ or less equivalent.
+
+ * "git help git" was hard to discover (well, at least for some
+ people).
+
+ * The pattern "git diff/grep" use to extract funcname and words
+ boundary for Rust has been added.
+
+ * "git status" can be told a non-standard default value for the
+ "--[no-]ahead-behind" option with a new configuration variable
+ status.aheadBehind.
+
+ * "git fetch" and "git pull" reports when a fetch results in
+ non-fast-forward updates to let the user notice unusual situation.
+ The commands learned "--no-show-forced-updates" option to disable
+ this safety feature.
+
+ * Two new commands "git switch" and "git restore" are introduced to
+ split "checking out a branch to work on advancing its history" and
+ "checking out paths out of the index and/or a tree-ish to work on
+ advancing the current history" out of the single "git checkout"
+ command.
+
+ * "git branch --list" learned to always output the detached HEAD as
+ the first item (when the HEAD is detached, of course), regardless
+ of the locale.
+
+ * The conditional inclusion mechanism learned to base the choice on
+ the branch the HEAD currently is on.
+
+ * "git rev-list --objects" learned the "--no-object-names" option to
+ squelch the path to the object that is used as a grouping hint for
+ pack-objects.
+
+ * A new tag.gpgSign configuration variable turns "git tag -a" into
+ "git tag -s".
+
+ * "git multi-pack-index" learned expire and repack subcommands.
+
+ * "git blame" learned to "ignore" commits in the history, whose
+ effects (as well as their presence) get ignored.
+
+ * "git cherry-pick/revert" learned a new "--skip" action.
+
+ * The tips of refs from the alternate object store can be used as
+ starting point for reachability computation now.
+
+ * Extra blank lines in "git status" output have been reduced.
+
+ * The commits in a repository can be described by multiple
+ commit-graph files now, which allows the commit-graph files to be
+ updated incrementally.
+
+ * "git range-diff" output has been tweaked for easier identification
+ of which part of what file the patch shown is about.
+
+
+Performance, Internal Implementation, Development Support etc.
+
+ * Update supporting parts of "git rebase" to remove code that should
+ no longer be used.
+
+ * Developer support to emulate unsatisfied prerequisites in tests to
+ ensure that the remainder of the tests still succeeds when tests
+ with prerequisites are skipped.
+
+ * "git update-server-info" learned not to rewrite the file with the
+ same contents.
+
+ * The way of specifying the path to find dynamic libraries at runtime
+ has been simplified. The old default to pass -R/path/to/dir has been
+ replaced with the new default to pass -Wl,-rpath,/path/to/dir,
+ which is the more recent GCC uses. Those who need to build with an
+ old GCC can still use "CC_LD_DYNPATH=-R"
+
+ * Prepare use of reachability index in topological walker that works
+ on a range (A..B).
+
+ * A new tutorial targeting specifically aspiring git-core
+ developers has been added.
+
+ * Auto-detect how to tell HP-UX aCC where to use dynamically linked
+ libraries from at runtime.
+
+ * "git mergetool" and its tests now spawn fewer subprocesses.
+
+ * Dev support update to help tracing out tests.
+
+ * Support to build with MSVC has been updated.
+
+ * "git fetch" that grabs from a group of remotes learned to run the
+ auto-gc only once at the very end.
+
+ * A handful of Windows build patches have been upstreamed.
+
+ * The code to read state files used by the sequencer machinery for
+ "git status" has been made more robust against a corrupt or stale
+ state files.
+
+ * "git for-each-ref" with multiple patterns have been optimized.
+
+ * The tree-walk API learned to pass an in-core repository
+ instance throughout more codepaths.
+
+ * When one step in multi step cherry-pick or revert is reset or
+ committed, the command line prompt script failed to notice the
+ current status, which has been improved.
+
+ * Many GIT_TEST_* environment variables control various aspects of
+ how our tests are run, but a few followed "non-empty is true, empty
+ or unset is false" while others followed the usual "there are a few
+ ways to spell true, like yes, on, etc., and also ways to spell
+ false, like no, off, etc." convention.
+
+ * Adjust the dir-iterator API and apply it to the local clone
+ optimization codepath.
+
+ * We have been trying out a few language features outside c89; the
+ coding guidelines document did not talk about them and instead had
+ a blanket ban against them.
+
+ * A test helper has been introduced to optimize preparation of test
+ repositories with many simple commits, and a handful of test
+ scripts have been updated to use it.
+
+
+Fixes since v2.22
+-----------------
+
+ * A relative pathname given to "git init --template=<path> <repo>"
+ ought to be relative to the directory "git init" gets invoked in,
+ but it instead was made relative to the repository, which has been
+ corrected.
+
+ * "git worktree add" used to fail when another worktree connected to
+ the same repository was corrupt, which has been corrected.
+
+ * The ownership rule for the file descriptor to fast-import remote
+ backend was mixed up, leading to an unrelated file descriptor getting
+ closed, which has been fixed.
+
+ * A "merge -c" instruction during "git rebase --rebase-merges" should
+ give the user a chance to edit the log message, even when there is
+ otherwise no need to create a new merge and replace the existing
+ one (i.e. fast-forward instead), but did not. Which has been
+ corrected.
+
+ * Code cleanup and futureproof.
+
+ * More parameter validation.
+
+ * "git update-server-info" used to leave stale packfiles in its
+ output, which has been corrected.
+
+ * The server side support for "git fetch" used to show incorrect
+ value for the HEAD symbolic ref when the namespace feature is in
+ use, which has been corrected.
+
+ * "git am -i --resolved" segfaulted after trying to see a commit as
+ if it were a tree, which has been corrected.
+
+ * "git bundle verify" needs to see if prerequisite objects exist in
+ the receiving repository, but the command did not check if we are
+ in a repository upfront, which has been corrected.
+
+ * "git merge --squash" is designed to update the working tree and the
+ index without creating the commit, and this cannot be countermanded
+ by adding the "--commit" option; the command now refuses to work
+ when both options are given.
+
+ * The data collected by fsmonitor was not properly written back to
+ the on-disk index file, breaking t7519 tests occasionally, which
+ has been corrected.
+
+ * Update to Unicode 12.1 width table.
+
+ * The command line to invoke a "git cat-file" command from inside
+ "git p4" was not properly quoted to protect a caret and running a
+ broken command on Windows, which has been corrected.
+
+ * "git request-pull" learned to warn when the ref we ask them to pull
+ from in the local repository and in the published repository are
+ different.
+
+ * When creating a partial clone, the object filtering criteria is
+ recorded for the origin of the clone, but this incorrectly used a
+ hardcoded name "origin" to name that remote; it has been corrected
+ to honor the "--origin <name>" option.
+
+ * "git fetch" into a lazy clone forgot to fetch base objects that are
+ necessary to complete delta in a thin packfile, which has been
+ corrected.
+
+ * The filter_data used in the list-objects-filter (which manages a
+ lazily sparse clone repository) did not use the dynamic array API
+ correctly---'nr' is supposed to point at one past the last element
+ of the array in use. This has been corrected.
+
+ * The description about slashes in gitignore patterns (used to
+ indicate things like "anchored to this level only" and "only
+ matches directories") has been revamped.
+
+ * The URL decoding code has been updated to avoid going past the end
+ of the string while parsing %-<hex>-<hex> sequence.
+
+ * The list of for-each like macros used by clang-format has been
+ updated.
+
+ * "git branch --list" learned to show branches that are checked out
+ in other worktrees connected to the same repository prefixed with
+ '+', similar to the way the currently checked out branch is shown
+ with '*' in front.
+ (merge 6e9381469e nb/branch-show-other-worktrees-head later to maint).
+
+ * Code restructuring during 2.20 period broke fetching tags via
+ "import" based transports.
+
+ * The commit-graph file is now part of the "files that the runtime
+ may keep open file descriptors on, all of which would need to be
+ closed when done with the object store", and the file descriptor to
+ an existing commit-graph file now is closed before "gc" finalizes a
+ new instance to replace it.
+
+ * "git checkout -p" needs to selectively apply a patch in reverse,
+ which did not work well.
+
+ * Code clean-up to avoid signed integer wraparounds during binary search.
+
+ * "git interpret-trailers" always treated '#' as the comment
+ character, regardless of core.commentChar setting, which has been
+ corrected.
+
+ * "git stash show 23" used to work, but no more after getting
+ rewritten in C; this regression has been corrected.
+
+ * "git rebase --abort" used to leave refs/rewritten/ when concluding
+ "git rebase -r", which has been corrected.
+
+ * An incorrect list of options was cached after command line
+ completion failed (e.g. trying to complete a command that requires
+ a repository outside one), which has been corrected.
+
+ * The code to parse scaled numbers out of configuration files has
+ been made more robust and also easier to follow.
+
+ * The codepath to compute delta islands used to spew progress output
+ without giving the callers any way to squelch it, which has been
+ fixed.
+
+ * Protocol capabilities that go over wire should never be translated,
+ but it was incorrectly marked for translation, which has been
+ corrected. The output of protocol capabilities for debugging has
+ been tweaked a bit.
+
+ * Use "Erase in Line" CSI sequence that is already used in the editor
+ support to clear cruft in the progress output.
+
+ * "git submodule foreach" did not protect command line options passed
+ to the command to be run in each submodule correctly, when the
+ "--recursive" option was in use.
+
+ * The configuration variable rebase.rescheduleFailedExec should be
+ effective only while running an interactive rebase and should not
+ affect anything when running a non-interactive one, which was not
+ the case. This has been corrected.
+
+ * The "git clone" documentation refers to command line options in its
+ description in the short form; they have been replaced with long
+ forms to make them more recognisable.
+
+ * Generation of pack bitmaps are now disabled when .keep files exist,
+ as these are mutually exclusive features.
+ (merge 7328482253 ew/repack-with-bitmaps-by-default later to maint).
+
+ * "git rm" to resolve a conflicted path leaked an internal message
+ "needs merge" before actually removing the path, which was
+ confusing. This has been corrected.
+
+ * "git stash --keep-index" did not work correctly on paths that have
+ been removed, which has been fixed.
+ (merge b932f6a5e8 tg/stash-keep-index-with-removed-paths later to maint).
+
+ * Window 7 update ;-)
+
+ * A codepath that reads from GPG for signed object verification read
+ past the end of allocated buffer, which has been fixed.
+
+ * "git clean" silently skipped a path when it cannot lstat() it; now
+ it gives a warning.
+
+ * "git push --atomic" that goes over the transport-helper (namely,
+ the smart http transport) failed to prevent refs to be pushed when
+ it can locally tell that one of the ref update will fail without
+ having to consult the other end, which has been corrected.
+
+ * The internal diff machinery can be made to read out of bounds while
+ looking for --function-context line in a corner case, which has been
+ corrected.
+ (merge b777f3fd61 jk/xdiff-clamp-funcname-context-index later to maint).
+
+ * Other code cleanup, docfix, build fix, etc.
+ (merge fbec05c210 cc/test-oidmap later to maint).
+ (merge 7a06fb038c jk/no-system-includes-in-dot-c later to maint).
+ (merge 81ed2b405c cb/xdiff-no-system-includes-in-dot-c later to maint).
+ (merge d61e6ce1dd sg/fsck-config-in-doc later to maint).
`-C` options given, the <num> argument of the last `-C` will
take effect.
+--ignore-rev <rev>::
+ Ignore changes made by the revision when assigning blame, as if the
+ change never happened. Lines that were changed or added by an ignored
+ commit will be blamed on the previous commit that changed that line or
+ nearby lines. This option may be specified multiple times to ignore
+ more than one revision. If the `blame.markIgnoredLines` config option
+ is set, then lines that were changed by an ignored commit and attributed to
+ another commit will be marked with a `?` in the blame output. If the
+ `blame.markUnblamableLines` config option is set, then those lines touched
+ by an ignored commit that we could not attribute to another revision are
+ marked with a '*'.
+
+--ignore-revs-file <file>::
+ Ignore revisions listed in `file`, which must be in the same format as an
+ `fsck.skipList`. This option may be repeated, and these files will be
+ processed after any files specified with the `blame.ignoreRevsFile` config
+ option. An empty file name, `""`, will clear the list of revs from
+ previously processed files.
+
-h::
Show help message.
This is the same as `gitdir` except that matching is done
case-insensitively (e.g. on case-insensitive file sytems)
+`onbranch`::
+ The data that follows the keyword `onbranch:` is taken to be a
+ pattern with standard globbing wildcards and two additional
+ ones, `**/` and `/**`, that can match multiple path components.
+ If we are in a worktree where the name of the branch that is
+ currently checked out matches the pattern, the include condition
+ is met.
++
+If the pattern ends with `/`, `**` will be automatically added. For
+example, the pattern `foo/` becomes `foo/**`. In other words, it matches
+all branches that begin with `foo/`. This is useful if your branches are
+organized hierarchically and you would like to apply a configuration to
+all the branches in that hierarchy.
+
A few more notes on matching via `gitdir` and `gitdir/i`:
* Symlinks in `$GIT_DIR` are not resolved before matching.
[includeIf "gitdir:/path/to/group/"]
path = foo.inc
+ ; include only if we are in a worktree where foo-branch is
+ ; currently checked out
+ [includeIf "onbranch:foo-branch"]
+ path = foo.inc
+
Values
~~~~~~
can tell Git that you do not need help by setting these to 'false':
+
--
+ fetchShowForcedUpdates::
+ Advice shown when linkgit:git-fetch[1] takes a long time
+ to calculate forced updates after ref updates, or to warn
+ that the check is disabled.
pushUpdateRejected::
Set this variable to 'false' if you want to disable
'pushNonFFCurrent',
we can still suggest that the user push to either
refs/heads/* or refs/tags/* based on the type of the
source object.
+ statusAheadBehind::
+ Shown when linkgit:git-status[1] computes the ahead/behind
+ counts for a local ref compared to its remote tracking ref,
+ and that calculation takes longer than expected. Will not
+ appear if `status.aheadBehind` is false or the option
+ `--no-ahead-behind` is given.
statusHints::
Show directions on how to proceed from the current
state in the output of linkgit:git-status[1], in
the template shown when writing commit messages in
linkgit:git-commit[1], and in the help message shown
- by linkgit:git-checkout[1] when switching branch.
+ by linkgit:git-switch[1] or
+ linkgit:git-checkout[1] when switching branch.
statusUoption::
Advise to consider using the `-u` option to linkgit:git-status[1]
when the command takes more than 2 seconds to enumerate untracked
resolveConflict::
Advice shown by various commands when conflicts
prevent the operation from being performed.
+ sequencerInUse::
+ Advice shown when a sequencer command is already in progress.
implicitIdentity::
Advice on how to set your identity configuration when
your information is guessed from the system username and
domain name.
detachedHead::
- Advice shown when you used linkgit:git-checkout[1] to
- move to the detach HEAD state, to instruct how to create
- a local branch after the fact.
+ Advice shown when you used
+ linkgit:git-switch[1] or linkgit:git-checkout[1]
+ to move to the detach HEAD state, to instruct how to
+ create a local branch after the fact.
checkoutAmbiguousRemoteBranchName::
Advice shown when the argument to
- linkgit:git-checkout[1] ambiguously resolves to a
+ linkgit:git-checkout[1] and linkgit:git-switch[1]
+ ambiguously resolves to a
remote tracking branch on more than one remote in
situations where an unambiguous argument would have
otherwise caused a remote-tracking branch to be
blame.showRoot::
Do not treat root commits as boundaries in linkgit:git-blame[1].
This option defaults to false.
+
+blame.ignoreRevsFile::
+ Ignore revisions listed in the file, one unabbreviated object name per
+ line, in linkgit:git-blame[1]. Whitespace and comments beginning with
+ `#` are ignored. This option may be repeated multiple times. Empty
+ file names will reset the list of ignored revisions. This option will
+ be handled before the command line option `--ignore-revs-file`.
+
+blame.markUnblamables::
+ Mark lines that were changed by an ignored revision that we could not
+ attribute to another commit with a '*' in the output of
+ linkgit:git-blame[1].
+
+blame.markIgnoredLines::
+ Mark lines that were changed by an ignored revision that we attributed to
+ another commit with a '?' in the output of linkgit:git-blame[1].
branch.autoSetupMerge::
- Tells 'git branch' and 'git checkout' to set up new branches
+ Tells 'git branch', 'git switch' and 'git checkout' to set up new branches
so that linkgit:git-pull[1] will appropriately merge from the
starting point branch. Note that even if this option is not set,
this behavior can be chosen per-branch using the `--track`
branch. This option defaults to true.
branch.autoSetupRebase::
- When a new branch is created with 'git branch' or 'git checkout'
+ When a new branch is created with 'git branch', 'git switch' or 'git checkout'
that tracks another branch, this variable tells Git to set
up pull to rebase instead of merge (see "branch.<name>.rebase").
When `never`, rebase is never automatically set to true.
checkout.defaultRemote::
- When you run 'git checkout <something>' and only have one
+ When you run 'git checkout <something>'
+ or 'git switch <something>' and only have one
remote, it may implicitly fall back on checking out and
tracking e.g. 'origin/<something>'. This stops working as soon
as you have more than one remote with a '<something>'
disambiguation. The typical use-case is to set this to
`origin`.
+
-Currently this is used by linkgit:git-checkout[1] when 'git checkout
-<something>' will checkout the '<something>' branch on another remote,
+Currently this is used by linkgit:git-switch[1] and
+linkgit:git-checkout[1] when 'git checkout <something>'
+or 'git switch <something>'
+will checkout the '<something>' branch on another remote,
and by linkgit:git-worktree[1] when 'git worktree add' refers to a
remote branch. This setting might be used for other checkout-like
commands or functionality in the future.
-
-checkout.optimizeNewBranch::
- Optimizes the performance of "git checkout -b <new_branch>" when
- using sparse-checkout. When set to true, git will not update the
- repo based on the current sparse-checkout settings. This means it
- will not update the skip-worktree bit in the index nor add/remove
- files in the working directory to reflect the current sparse checkout
- settings nor will it show the local changes.
diff.ignoreSubmodules::
Sets the default value of --ignore-submodules. Note that this
affects only 'git diff' Porcelain, and not lower level 'diff'
- commands such as 'git diff-files'. 'git checkout' also honors
+ commands such as 'git diff-files'. 'git checkout'
+ and 'git switch' also honor
this setting when reporting uncommitted changes. Setting it to
'all' disables the submodule summary normally shown by 'git commit'
and 'git status' when `status.submoduleSummary` is set unless it is
Unknown values will cause 'git fetch' to error out.
+
See also the `--negotiation-tip` option for linkgit:git-fetch[1].
+
+fetch.showForcedUpdates::
+ Set to false to enable `--no-show-forced-updates` in
+ linkgit:git-fetch[1] and linkgit:git-pull[1] commands.
+ Defaults to true.
format.useAutoBase::
A boolean value which lets you enable the `--base=auto` option of
format-patch by default.
+
+format.notes::
+ Provides the default value for the `--notes` option to
+ format-patch. Accepts a boolean value, or a ref which specifies
+ where to get notes. If false, format-patch defaults to
+ `--no-notes`. If true, format-patch defaults to `--notes`. If
+ set to a non-boolean value, format-patch defaults to
+ `--notes=<ref>`, where `ref` is the non-boolean value. Defaults
+ to false.
++
+If one wishes to use the ref `ref/notes/true`, please use that literal
+instead.
++
+This configuration can be specified multiple times in order to allow
+multiple notes refs to be included.
In interactive commands, allow the user to provide one-letter
input with a single key (i.e., without hitting enter).
Currently this is used by the `--patch` mode of
- linkgit:git-add[1], linkgit:git-checkout[1], linkgit:git-commit[1],
+ linkgit:git-add[1], linkgit:git-checkout[1],
+ linkgit:git-restore[1], linkgit:git-commit[1],
linkgit:git-reset[1], and linkgit:git-stash[1]. Note that this
setting is silently ignored if portable keystroke input
is not available; requires the Perl module Term::ReadKey.
log.mailmap::
If true, makes linkgit:git-log[1], linkgit:git-show[1], and
- linkgit:git-whatchanged[1] assume `--use-mailmap`.
+ linkgit:git-whatchanged[1] assume `--use-mailmap`, otherwise
+ assume `--no-use-mailmap`. True by default.
Set to true to enable --branch by default in linkgit:git-status[1].
The option --no-branch takes precedence over this variable.
+status.aheadBehind::
+ Set to true to enable `--ahead-behind` and false to enable
+ `--no-ahead-behind` by default in linkgit:git-status[1] for
+ non-porcelain status formats. Defaults to true.
+
status.displayCommentPrefix::
If set to true, linkgit:git-status[1] will insert a comment
prefix before each output line (starting with
linkgit:git-tag[1]. Without the "--sort=<value>" option provided, the
value of this variable will be used as the default.
+tag.gpgSign::
+ A boolean to specify whether all tags should be GPG signed.
+ Use of this option when running in an automated script can
+ result in a large number of tags being signed. It is therefore
+ convenient to use an agent to avoid typing your gpg passphrase
+ several times. Note that this option doesn't affects tag signing
+ behavior enabled by "-u <keyid>" or "--local-user=<keyid>" options.
+
tar.umask::
This variable can be used to restrict the permission bits of
tar archive entries. The default is 0002, which turns off the
instead be left unreferenced in the repository.
+
Due to the non-quarantine nature of the `fetch.fsckObjects`
-implementation it can not be relied upon to leave the object store
+implementation it cannot be relied upon to leave the object store
clean like `receive.fsckObjects` can.
+
As objects are unpacked they're written to the object store, so there
Allow several <repository> and <group> arguments to be
specified. No <refspec>s may be specified.
+--[no-]auto-gc::
+ Run `git gc --auto` at the end to perform garbage collection
+ if needed. This is enabled by default.
+
-p::
--prune::
Before fetching, remove any remote-tracking references that no
When multiple `--server-option=<option>` are given, they are all
sent to the other side in the order listed on the command line.
+--show-forced-updates::
+ By default, git checks if a branch is force-updated during
+ fetch. This can be disabled through fetch.showForcedUpdates, but
+ the --show-forced-updates option guarantees this check occurs.
+ See linkgit:git-config[1].
+
+--no-show-forced-updates::
+ By default, git checks if a branch is force-updated during
+ fetch. Pass --no-show-forced-updates or set fetch.showForcedUpdates
+ to false to skip this check for performance reasons. If used during
+ 'git-pull' the --ff-only option will still check for forced updates
+ before attempting a fast-forward update. See linkgit:git-config[1].
+
-4::
--ipv4::
Use IPv4 addresses only, ignoring IPv6 addresses.
[verse]
'git blame' [-c] [-b] [-l] [--root] [-t] [-f] [-n] [-s] [-e] [-p] [-w] [--incremental]
[-L <range>] [-S <revs-file>] [-M] [-C] [-C] [-C] [--since=<date>]
+ [--ignore-rev <rev>] [--ignore-revs-file <file>]
[--progress] [--abbrev=<n>] [<rev> | --contents <file> | --reverse <rev>..<rev>]
[--] <file>
SYNOPSIS
--------
[verse]
-'git branch' [--color[=<when>] | --no-color]
+'git branch' [--color[=<when>] | --no-color] [--show-current]
[-v [--abbrev=<length> | --no-abbrev]]
- [--show-current]
[--column[=<options>] | --no-column] [--sort=<key>]
[(--merged | --no-merged) [<commit>]]
[--contains [<commit]] [--no-contains [<commit>]]
-----------
If `--list` is given, or if there are no non-option arguments, existing
-branches are listed; the current branch will be highlighted with an
-asterisk. Option `-r` causes the remote-tracking branches to be listed,
+branches are listed; the current branch will be highlighted in green and
+marked with an asterisk. Any branches checked out in linked worktrees will
+be highlighted in cyan and marked with a plus sign. Option `-r` causes the
+remote-tracking branches to be listed,
and option `-a` shows both local and remote branches.
If a `<pattern>`
`HEAD`.
Note that this will create the new branch, but it will not switch the
-working tree to it; use "git checkout <newbranch>" to switch to the
+working tree to it; use "git switch <newbranch>" to switch to the
new branch.
When a local branch is started off a remote-tracking branch, Git sets up the
When in list mode,
show sha1 and commit subject line for each head, along with
relationship to upstream branch (if any). If given twice, print
- the name of the upstream branch, as well (see also `git remote
- show <remote>`).
+ the path of the linked worktree (if any) and the name of the upstream
+ branch, as well (see also `git remote show <remote>`). Note that the
+ current worktree's HEAD will not have its path printed (it will always
+ be your current directory).
-q::
--quiet::
+
This behavior is the default when the start point is a remote-tracking branch.
Set the branch.autoSetupMerge configuration variable to `false` if you
-want `git checkout` and `git branch` to always behave as if `--no-track`
+want `git switch`, `git checkout` and `git branch` to always behave as if `--no-track`
were given. Set it to `always` if you want this behavior when the
start-point is either a local or remote-tracking branch.
$ git clone git://git.kernel.org/pub/scm/.../linux-2.6 my2.6
$ cd my2.6
$ git branch my2.6.14 v2.6.14 <1>
-$ git checkout my2.6.14
+$ git switch my2.6.14
------------
+
<1> This step and the next one could be combined into a single step with
NOTES
-----
-If you are creating a branch that you want to checkout immediately, it is
-easier to use the git checkout command with its `-b` option to create
-a branch and check it out with a single command.
+If you are creating a branch that you want to switch to immediately,
+it is easier to use the "git switch" command with its `-c` option to
+do the same thing with a single command.
The options `--contains`, `--no-contains`, `--merged` and `--no-merged`
serve four related but different purposes:
When run with `--branch` option in a repository, the input is first
expanded for the ``previous checkout syntax''
`@{-n}`. For example, `@{-1}` is a way to refer the last thing that
-was checked out using "git checkout" operation. This option should be
+was checked out using "git switch" or "git checkout" operation.
+This option should be
used by porcelains to accept this syntax anywhere a branch name is
expected, so they can act as if you typed the branch name. As an
exception note that, the ``previous checkout operation'' might result
also update `HEAD` to set the specified branch as the current
branch.
-'git checkout' <branch>::
- To prepare for working on <branch>, switch to it by updating
+'git checkout' [<branch>]::
+ To prepare for working on `<branch>`, switch to it by updating
the index and the files in the working tree, and by pointing
- HEAD at the branch. Local modifications to the files in the
+ `HEAD` at the branch. Local modifications to the files in the
working tree are kept, so that they can be committed to the
- <branch>.
+ `<branch>`.
+
-If <branch> is not found but there does exist a tracking branch in
-exactly one remote (call it <remote>) with a matching name, treat as
-equivalent to
+If `<branch>` is not found but there does exist a tracking branch in
+exactly one remote (call it `<remote>`) with a matching name and
+`--no-guess` is not specified, treat as equivalent to
+
------------
$ git checkout -b <branch> --track <remote>/<branch>
------------
+
-If the branch exists in multiple remotes and one of them is named by
-the `checkout.defaultRemote` configuration variable, we'll use that
-one for the purposes of disambiguation, even if the `<branch>` isn't
-unique across all remotes. Set it to
-e.g. `checkout.defaultRemote=origin` to always checkout remote
-branches from there if `<branch>` is ambiguous but exists on the
-'origin' remote. See also `checkout.defaultRemote` in
-linkgit:git-config[1].
-+
-You could omit <branch>, in which case the command degenerates to
+You could omit `<branch>`, in which case the command degenerates to
"check out the current branch", which is a glorified no-op with
rather expensive side-effects to show only the tracking information,
if exists, for the current branch.
`--track` without `-b` implies branch creation; see the
description of `--track` below.
+
-If `-B` is given, <new_branch> is created if it doesn't exist; otherwise, it
+If `-B` is given, `<new_branch>` is created if it doesn't exist; otherwise, it
is reset. This is the transactional equivalent of
+
------------
'git checkout' --detach [<branch>]::
'git checkout' [--detach] <commit>::
- Prepare to work on top of <commit>, by detaching HEAD at it
+ Prepare to work on top of `<commit>`, by detaching `HEAD` at it
(see "DETACHED HEAD" section), and updating the index and the
files in the working tree. Local modifications to the files
in the working tree are kept, so that the resulting working
tree will be the state recorded in the commit plus the local
modifications.
+
-When the <commit> argument is a branch name, the `--detach` option can
-be used to detach HEAD at the tip of the branch (`git checkout
-<branch>` would check out that branch without detaching HEAD).
+When the `<commit>` argument is a branch name, the `--detach` option can
+be used to detach `HEAD` at the tip of the branch (`git checkout
+<branch>` would check out that branch without detaching `HEAD`).
+
-Omitting <branch> detaches HEAD at the tip of the current branch.
+Omitting `<branch>` detaches `HEAD` at the tip of the current branch.
'git checkout' [<tree-ish>] [--] <pathspec>...::
Overwrite paths in the working tree by replacing with the
- contents in the index or in the <tree-ish> (most often a
- commit). When a <tree-ish> is given, the paths that
- match the <pathspec> are updated both in the index and in
+ contents in the index or in the `<tree-ish>` (most often a
+ commit). When a `<tree-ish>` is given, the paths that
+ match the `<pathspec>` are updated both in the index and in
the working tree.
+
The index may contain unmerged entries because of a previous failed merge.
--quiet::
Quiet, suppress feedback messages.
---[no-]progress::
+--progress::
+--no-progress::
Progress status is reported on the standard error stream
by default when it is attached to a terminal, unless `--quiet`
is specified. This flag enables progress reporting even if not
-f::
--force::
When switching branches, proceed even if the index or the
- working tree differs from HEAD. This is used to throw away
+ working tree differs from `HEAD`. This is used to throw away
local changes.
+
When checking out paths from the index, do not fail upon unmerged
of it").
-b <new_branch>::
- Create a new branch named <new_branch> and start it at
- <start_point>; see linkgit:git-branch[1] for details.
+ Create a new branch named `<new_branch>` and start it at
+ `<start_point>`; see linkgit:git-branch[1] for details.
-B <new_branch>::
- Creates the branch <new_branch> and start it at <start_point>;
- if it already exists, then reset it to <start_point>. This is
+ Creates the branch `<new_branch>` and start it at `<start_point>`;
+ if it already exists, then reset it to `<start_point>`. This is
equivalent to running "git branch" with "-f"; see
linkgit:git-branch[1] for details.
derived from the remote-tracking branch, by looking at the local part of
the refspec configured for the corresponding remote, and then stripping
the initial part up to the "*".
-This would tell us to use "hack" as the local branch when branching
-off of "origin/hack" (or "remotes/origin/hack", or even
-"refs/remotes/origin/hack"). If the given name has no slash, or the above
+This would tell us to use `hack` as the local branch when branching
+off of `origin/hack` (or `remotes/origin/hack`, or even
+`refs/remotes/origin/hack`). If the given name has no slash, or the above
guessing results in an empty name, the guessing is aborted. You can
explicitly give a name with `-b` in such a case.
--no-track::
Do not set up "upstream" configuration, even if the
- branch.autoSetupMerge configuration variable is true.
+ `branch.autoSetupMerge` configuration variable is true.
+
+--guess::
+--no-guess::
+ If `<branch>` is not found but there does exist a tracking
+ branch in exactly one remote (call it `<remote>`) with a
+ matching name, treat as equivalent to
++
+------------
+$ git checkout -b <branch> --track <remote>/<branch>
+------------
++
+If the branch exists in multiple remotes and one of them is named by
+the `checkout.defaultRemote` configuration variable, we'll use that
+one for the purposes of disambiguation, even if the `<branch>` isn't
+unique across all remotes. Set it to
+e.g. `checkout.defaultRemote=origin` to always checkout remote
+branches from there if `<branch>` is ambiguous but exists on the
+'origin' remote. See also `checkout.defaultRemote` in
+linkgit:git-config[1].
++
+Use `--no-guess` to disable this.
-l::
Create the new branch's reflog; see linkgit:git-branch[1] for
--detach::
Rather than checking out a branch to work on it, check out a
commit for inspection and discardable experiments.
- This is the default behavior of "git checkout <commit>" when
- <commit> is not a branch name. See the "DETACHED HEAD" section
+ This is the default behavior of `git checkout <commit>` when
+ `<commit>` is not a branch name. See the "DETACHED HEAD" section
below for details.
--orphan <new_branch>::
- Create a new 'orphan' branch, named <new_branch>, started from
- <start_point> and switch to it. The first commit made on this
+ Create a new 'orphan' branch, named `<new_branch>`, started from
+ `<start_point>` and switch to it. The first commit made on this
new branch will have no parents and it will be the root of a new
history totally disconnected from all the other branches and
commits.
+
The index and the working tree are adjusted as if you had previously run
-"git checkout <start_point>". This allows you to start a new history
-that records a set of paths similar to <start_point> by easily running
-"git commit -a" to make the root commit.
+`git checkout <start_point>`. This allows you to start a new history
+that records a set of paths similar to `<start_point>` by easily running
+`git commit -a` to make the root commit.
+
This can be useful when you want to publish the tree from a commit
without exposing its full history. You might want to do this to publish
code.
+
If you want to start a disconnected history that records a set of paths
-that is totally different from the one of <start_point>, then you should
+that is totally different from the one of `<start_point>`, then you should
clear the index and the working tree right after creating the orphan
-branch by running "git rm -rf ." from the top level of the working tree.
+branch by running `git rm -rf .` from the top level of the working tree.
Afterwards you will be ready to prepare your new files, repopulating the
working tree, by copying them from elsewhere, extracting a tarball, etc.
--ignore-skip-worktree-bits::
In sparse checkout mode, `git checkout -- <paths>` would
- update only entries matched by <paths> and sparse patterns
- in $GIT_DIR/info/sparse-checkout. This option ignores
- the sparse patterns and adds back any files in <paths>.
+ update only entries matched by `<paths>` and sparse patterns
+ in `$GIT_DIR/info/sparse-checkout`. This option ignores
+ the sparse patterns and adds back any files in `<paths>`.
-m::
--merge::
When switching branches with `--merge`, staged changes may be lost.
--conflict=<style>::
- The same as --merge option above, but changes the way the
+ The same as `--merge` option above, but changes the way the
conflicting hunks are presented, overriding the
- merge.conflictStyle configuration variable. Possible values are
+ `merge.conflictStyle` configuration variable. Possible values are
"merge" (default) and "diff3" (in addition to what is shown by
"merge" style, shows the original contents).
-p::
--patch::
Interactively select hunks in the difference between the
- <tree-ish> (or the index, if unspecified) and the working
+ `<tree-ish>` (or the index, if unspecified) and the working
tree. The chosen hunks are then applied in reverse to the
- working tree (and if a <tree-ish> was specified, the index).
+ working tree (and if a `<tree-ish>` was specified, the index).
+
This means that you can use `git checkout -p` to selectively discard
edits from your current working tree. See the ``Interactive Mode''
section of linkgit:git-add[1] to learn how to operate the `--patch` mode.
+
Note that this option uses the no overlay mode by default (see also
-`--[no-]overlay`), and currently doesn't support overlay mode.
+`--overlay`), and currently doesn't support overlay mode.
--ignore-other-worktrees::
`git checkout` refuses when the wanted ref is already checked
out anyway. In other words, the ref can be held by more than one
worktree.
---[no-]recurse-submodules::
- Using --recurse-submodules will update the content of all initialized
+--overwrite-ignore::
+--no-overwrite-ignore::
+ Silently overwrite ignored files when switching branches. This
+ is the default behavior. Use `--no-overwrite-ignore` to abort
+ the operation when the new branch contains ignored files.
+
+--recurse-submodules::
+--no-recurse-submodules::
+ Using `--recurse-submodules` will update the content of all initialized
submodules according to the commit recorded in the superproject. If
local modifications in a submodule would be overwritten the checkout
- will fail unless `-f` is used. If nothing (or --no-recurse-submodules)
+ will fail unless `-f` is used. If nothing (or `--no-recurse-submodules`)
is used, the work trees of submodules will not be updated.
- Just like linkgit:git-submodule[1], this will detach the
- submodules HEAD.
-
---no-guess::
- Do not attempt to create a branch if a remote tracking branch
- of the same name exists.
+ Just like linkgit:git-submodule[1], this will detach `HEAD` of the
+ submodule.
---[no-]overlay::
+--overlay::
+--no-overlay::
In the default overlay mode, `git checkout` never
removes files from the index or the working tree. When
specifying `--no-overlay`, files that appear in the index and
- working tree, but not in <tree-ish> are removed, to make them
- match <tree-ish> exactly.
+ working tree, but not in `<tree-ish>` are removed, to make them
+ match `<tree-ish>` exactly.
<branch>::
Branch to checkout; if it refers to a branch (i.e., a name that,
when prepended with "refs/heads/", is a valid ref), then that
branch is checked out. Otherwise, if it refers to a valid
- commit, your HEAD becomes "detached" and you are no longer on
+ commit, your `HEAD` becomes "detached" and you are no longer on
any branch (see below for details).
+
-You can use the `"@{-N}"` syntax to refer to the N-th last
+You can use the `@{-N}` syntax to refer to the N-th last
branch/commit checked out using "git checkout" operation. You may
-also specify `-` which is synonymous to `"@{-1}"`.
+also specify `-` which is synonymous to `@{-1}`.
+
-As a special case, you may use `"A...B"` as a shortcut for the
+As a special case, you may use `A...B` as a shortcut for the
merge base of `A` and `B` if there is exactly one merge base. You can
leave out at most one of `A` and `B`, in which case it defaults to `HEAD`.
<start_point>::
The name of a commit at which to start the new branch; see
- linkgit:git-branch[1] for details. Defaults to HEAD.
+ linkgit:git-branch[1] for details. Defaults to `HEAD`.
+
As a special case, you may use `"A...B"` as a shortcut for the
merge base of `A` and `B` if there is exactly one merge base. You can
DETACHED HEAD
-------------
-HEAD normally refers to a named branch (e.g. 'master'). Meanwhile, each
+`HEAD` normally refers to a named branch (e.g. `master`). Meanwhile, each
branch refers to a specific commit. Let's look at a repo with three
-commits, one of them tagged, and with branch 'master' checked out:
+commits, one of them tagged, and with branch `master` checked out:
------------
HEAD (refers to branch 'master')
------------
When a commit is created in this state, the branch is updated to refer to
-the new commit. Specifically, 'git commit' creates a new commit 'd', whose
-parent is commit 'c', and then updates branch 'master' to refer to new
-commit 'd'. HEAD still refers to branch 'master' and so indirectly now refers
-to commit 'd':
+the new commit. Specifically, 'git commit' creates a new commit `d`, whose
+parent is commit `c`, and then updates branch `master` to refer to new
+commit `d`. `HEAD` still refers to branch `master` and so indirectly now refers
+to commit `d`:
------------
$ edit; git add; git commit
It is sometimes useful to be able to checkout a commit that is not at
the tip of any named branch, or even to create a new commit that is not
referenced by a named branch. Let's look at what happens when we
-checkout commit 'b' (here we show two ways this may be done):
+checkout commit `b` (here we show two ways this may be done):
------------
$ git checkout v2.0 # or
tag 'v2.0' (refers to commit 'b')
------------
-Notice that regardless of which checkout command we use, HEAD now refers
-directly to commit 'b'. This is known as being in detached HEAD state.
-It means simply that HEAD refers to a specific commit, as opposed to
+Notice that regardless of which checkout command we use, `HEAD` now refers
+directly to commit `b`. This is known as being in detached `HEAD` state.
+It means simply that `HEAD` refers to a specific commit, as opposed to
referring to a named branch. Let's see what happens when we create a commit:
------------
tag 'v2.0' (refers to commit 'b')
------------
-There is now a new commit 'e', but it is referenced only by HEAD. We can
+There is now a new commit `e`, but it is referenced only by `HEAD`. We can
of course add yet another commit in this state:
------------
------------
In fact, we can perform all the normal Git operations. But, let's look
-at what happens when we then checkout master:
+at what happens when we then checkout `master`:
------------
$ git checkout master
------------
It is important to realize that at this point nothing refers to commit
-'f'. Eventually commit 'f' (and by extension commit 'e') will be deleted
+`f`. Eventually commit `f` (and by extension commit `e`) will be deleted
by the routine Git garbage collection process, unless we create a reference
-before that happens. If we have not yet moved away from commit 'f',
+before that happens. If we have not yet moved away from commit `f`,
any of these will create a reference to it:
------------
$ git tag foo <3>
------------
-<1> creates a new branch 'foo', which refers to commit 'f', and then
- updates HEAD to refer to branch 'foo'. In other words, we'll no longer
- be in detached HEAD state after this command.
+<1> creates a new branch `foo`, which refers to commit `f`, and then
+ updates `HEAD` to refer to branch `foo`. In other words, we'll no longer
+ be in detached `HEAD` state after this command.
-<2> similarly creates a new branch 'foo', which refers to commit 'f',
- but leaves HEAD detached.
+<2> similarly creates a new branch `foo`, which refers to commit `f`,
+ but leaves `HEAD` detached.
-<3> creates a new tag 'foo', which refers to commit 'f',
- leaving HEAD detached.
+<3> creates a new tag `foo`, which refers to commit `f`,
+ leaving `HEAD` detached.
-If we have moved away from commit 'f', then we must first recover its object
+If we have moved away from commit `f`, then we must first recover its object
name (typically by using git reflog), and then we can create a reference to
-it. For example, to see the last two commits to which HEAD referred, we
+it. For example, to see the last two commits to which `HEAD` referred, we
can use either of these commands:
------------
ARGUMENT DISAMBIGUATION
-----------------------
-When there is only one argument given and it is not `--` (e.g. "git
-checkout abc"), and when the argument is both a valid `<tree-ish>`
-(e.g. a branch "abc" exists) and a valid `<pathspec>` (e.g. a file
+When there is only one argument given and it is not `--` (e.g. `git
+checkout abc`), and when the argument is both a valid `<tree-ish>`
+(e.g. a branch `abc` exists) and a valid `<pathspec>` (e.g. a file
or a directory whose name is "abc" exists), Git would usually ask
you to disambiguate. Because checking out a branch is so common an
-operation, however, "git checkout abc" takes "abc" as a `<tree-ish>`
+operation, however, `git checkout abc` takes "abc" as a `<tree-ish>`
in such a situation. Use `git checkout -- <pathspec>` if you want
to checkout these paths out of the index.
--------
. The following sequence checks out the `master` branch, reverts
- the `Makefile` to two revisions back, deletes hello.c by
+ the `Makefile` to two revisions back, deletes `hello.c` by
mistake, and gets it back from the index.
+
------------
+
<1> switch branch
<2> take a file out of another commit
-<3> restore hello.c from the index
+<3> restore `hello.c` from the index
+
If you want to check out _all_ C source files out of the index,
you can say
$ git checkout mytopic
------------
+
-However, your "wrong" branch and correct "mytopic" branch may
+However, your "wrong" branch and correct `mytopic` branch may
differ in files that you have modified locally, in which case
the above checkout would fail like this:
+
$ git add frotz
------------
+SEE ALSO
+--------
+linkgit:git-switch[1],
+linkgit:git-restore[1]
+
GIT
---
Part of the linkgit:git[1] suite
[verse]
'git cherry-pick' [--edit] [-n] [-m parent-number] [-s] [-x] [--ff]
[-S[<keyid>]] <commit>...
-'git cherry-pick' --continue
-'git cherry-pick' --quit
-'git cherry-pick' --abort
+'git cherry-pick' (--continue | --skip | --abort | --quit)
DESCRIPTION
-----------
still use the ignore rules given with `-e` options from the command
line. This allows removing all untracked
files, including build products. This can be used (possibly in
- conjunction with 'git reset') to create a pristine
+ conjunction with 'git restore' or 'git reset') to create a pristine
working directory to test a clean build.
-X::
[--dissociate] [--separate-git-dir <git dir>]
[--depth <depth>] [--[no-]single-branch] [--no-tags]
[--recurse-submodules[=<pathspec>]] [--[no-]shallow-submodules]
- [--jobs <n>] [--] <repository> [<directory>]
+ [--[no-]remote-submodules] [--jobs <n>] [--] <repository>
+ [<directory>]
DESCRIPTION
-----------
--[no-]shallow-submodules::
All submodules which are cloned will be shallow with a depth of 1.
+--[no-]remote-submodules::
+ All submodules which are cloned will use the status of the submodule’s
+ remote-tracking branch to update the submodule, rather than the
+ superproject’s recorded SHA-1. Equivalent to passing `--remote` to
+ `git submodule update`.
+
--separate-git-dir=<git dir>::
Instead of placing the cloned repository where it is supposed
to be, place the cloned repository at the specified directory,
--------
[verse]
'git commit-graph read' [--object-dir <dir>]
-'git commit-graph verify' [--object-dir <dir>]
+'git commit-graph verify' [--object-dir <dir>] [--shallow]
'git commit-graph write' <options> [--object-dir <dir>]
Use given directory for the location of packfiles and commit-graph
file. This parameter exists to specify the location of an alternate
that only has the objects directory, not a full `.git` directory. The
- commit-graph file is expected to be at `<dir>/info/commit-graph` and
+ commit-graph file is expected to be in the `<dir>/info` directory and
the packfiles are expected to be in `<dir>/pack`.
+
With the `--append` option, include all commits that are present in the
existing commit-graph file.
++
+With the `--split` option, write the commit-graph as a chain of multiple
+commit-graph files stored in `<dir>/info/commit-graphs`. The new commits
+not already in the commit-graph are added in a new "tip" file. This file
+is merged with the existing file if the following merge conditions are
+met:
++
+* If `--size-multiple=<X>` is not specified, let `X` equal 2. If the new
+tip file would have `N` commits and the previous tip has `M` commits and
+`X` times `N` is greater than `M`, instead merge the two files into a
+single file.
++
+* If `--max-commits=<M>` is specified with `M` a positive integer, and the
+new tip file would have more than `M` commits, then instead merge the new
+tip with the previous tip.
++
+Finally, if `--expire-time=<datetime>` is not specified, let `datetime`
+be the current time. After writing the split commit-graph, delete all
+unused commit-graph whose modified times are older than `datetime`.
'read'::
Read the commit-graph file and verify its contents against the object
database. Used to check for corrupted data.
++
+With the `--shallow` option, only check the tip commit-graph file in
+a chain of split commit-graphs.
EXAMPLES
your working tree are temporarily stored to a staging area
called the "index" with 'git add'. A file can be
reverted back, only in the index but not in the working tree,
-to that of the last commit with `git reset HEAD -- <file>`,
+to that of the last commit with `git restore --staged <file>`,
which effectively reverts 'git add' and prevents the changes to
this file from participating in the next commit. After building
the state to be committed incrementally with these commands,
'git-cvsserver' write access to the database file without granting
them write access to the directory, too.
-The database can not be reliably regenerated in a
+The database cannot be reliably regenerated in a
consistent form after the branch it is tracking has changed.
Example: For merged branches, 'git-cvsserver' only tracks
one branch of development, and after a 'git merge' an
for intermediary filters (e.g. for rewriting commit messages
which refer to older commits, or for stripping blobs by id).
+--reencode=(yes|no|abort)::
+ Specify how to handle `encoding` header in commit objects. When
+ asking to 'abort' (which is the default), this program will die
+ when encountering such a commit object. With 'yes', the commit
+ message will be reencoded into UTF-8. With 'no', the original
+ encoding will be preserved.
+
--refspec::
Apply the specified refspec to each ref exported. Multiple of them can
be specified.
original-oid?
('author' (SP <name>)? SP LT <email> GT SP <when> LF)?
'committer' (SP <name>)? SP LT <email> GT SP <when> LF
+ ('encoding' SP <encoding>)?
data
('from' SP <commit-ish> LF)?
('merge' SP <commit-ish> LF)?
See ``Date Formats'' above for the set of supported formats, and
their syntax.
+`encoding`
+^^^^^^^^^^
+The optional `encoding` command indicates the encoding of the commit
+message. Most commits are UTF-8 and the encoding is omitted, but this
+allows importing commit messages into git without first reencoding them.
+
`from`
^^^^^^
The `from` command is used to specify the commit to initialize
the local repository by fetching from the branches (respectively)
`pu` and `maint` from the remote repository.
+
-The `pu` branch will be updated even if it is does not fast-forward,
+The `pu` branch will be updated even if it does not fast-forward,
because it is prefixed with a plus sign; `tmp` will not be.
* Peek at a remote's branch, without configuring the remote in your local
----
Using --recurse-submodules can only fetch new commits in already checked
out submodules right now. When e.g. upstream added a new submodule in the
-just fetched commits of the superproject the submodule itself can not be
+just fetched commits of the superproject the submodule itself cannot be
fetched, making it impossible to check out that submodule later without
having to do a fetch again. This is expected to be fixed in a future Git
version.
`:lstrip` and `:rstrip` options in the same way as `refname`
above.
+worktreepath::
+ The absolute path to the worktree in which the ref is checked
+ out, if it is checked out in any linked worktree. Empty string
+ otherwise.
+
In addition to the above, for commit and tag objects, the header
field names (`tree`, `parent`, `object`, `type`, and `tag`) can
be used to specify the value in the header field.
[--rfc] [--subject-prefix=Subject-Prefix]
[(--reroll-count|-v) <n>]
[--to=<email>] [--cc=<email>]
- [--[no-]cover-letter] [--quiet] [--notes[=<ref>]]
+ [--[no-]cover-letter] [--quiet]
+ [--no-notes | --notes[=<ref>]]
[--interdiff=<previous>]
[--range-diff=<previous> [--creation-factor=<percent>]]
[--progress]
for details.
--notes[=<ref>]::
+--no-notes::
Append the notes (see linkgit:git-notes[1]) for the commit
after the three-dash line.
+
keeping them as Git notes allows them to be maintained between versions
of the patch series (but see the discussion of the `notes.rewrite`
configuration options in linkgit:git-notes[1] to use this workflow).
++
+The default is `--no-notes`, unless the `format.notes` configuration is
+set.
--[no-]signature=<signature>::
Add a signature to each message produced. Per RFC 3676 the signature
* Apply it:
$ git fetch <project> master:test-apply
- $ git checkout test-apply
- $ git reset --hard
+ $ git switch test-apply
+ $ git restore --source=HEAD --staged --worktree :/
$ git am a.patch
If it does not apply correctly, there can be various reasons.
progress status even if the standard error stream is not
directed to a terminal.
+CONFIGURATION
+-------------
+
+include::config/fsck.txt[]
+
DISCUSSION
----------
Print out the ref name given on the command line by which each
commit was reached.
---use-mailmap::
+--[no-]use-mailmap::
Use mailmap file to map author and committer names and email
addresses to canonical real names and email addresses. See
linkgit:git-shortlog[1].
Discussion on fork-point mode
-----------------------------
-After working on the `topic` branch created with `git checkout -b
+After working on the `topic` branch created with `git switch -c
topic origin/master`, the history of remote-tracking branch
`origin/master` may have been rewound and rebuilt, leading to a
history of this shape:
[-s <strategy>] [-X <strategy-option>] [-S[<keyid>]]
[--[no-]allow-unrelated-histories]
[--[no-]rerere-autoupdate] [-m <msg>] [-F <file>] [<commit>...]
-'git merge' --abort
-'git merge' --continue
+'git merge' (--continue | --abort | --quit)
DESCRIPTION
-----------
Allow the rerere mechanism to update the index with the
result of auto-conflict resolution if possible.
+--overwrite-ignore::
+--no-overwrite-ignore::
+ Silently overwrite ignored files from the merge result. This
+ is the default behavior. Use `--no-overwrite-ignore` to abort.
+
--abort::
Abort the current conflict resolution process, and
try to reconstruct the pre-merge state.
'git merge --abort' is equivalent to 'git reset --merge' when
`MERGE_HEAD` is present.
+--quit::
+ Forget about the current merge in progress. Leave the index
+ and the working tree as-is.
+
--continue::
After a 'git merge' stops due to conflicts you can conclude the
merge by running 'git merge --continue' (see "HOW TO RESOLVE
SYNOPSIS
--------
[verse]
-'git multi-pack-index' [--object-dir=<dir>] <verb>
+'git multi-pack-index' [--object-dir=<dir>] <subcommand>
DESCRIPTION
-----------
`<dir>/packs/multi-pack-index` for the current MIDX file, and
`<dir>/packs` for the pack-files to index.
+The following subcommands are available:
+
write::
- When given as the verb, write a new MIDX file to
- `<dir>/packs/multi-pack-index`.
+ Write a new MIDX file.
verify::
- When given as the verb, verify the contents of the MIDX file
- at `<dir>/packs/multi-pack-index`.
+ Verify the contents of the MIDX file.
+
+expire::
+ Delete the pack-files that are tracked by the MIDX file, but
+ have no objects referenced by the MIDX. Rewrite the MIDX file
+ afterward to remove all references to these pack-files.
+
+repack::
+ Create a new pack-file containing objects in small pack-files
+ referenced by the multi-pack-index. If the size given by the
+ `--batch-size=<size>` argument is zero, then create a pack
+ containing all objects referenced by the multi-pack-index. For
+ a non-zero batch size, Select the pack-files by examining packs
+ from oldest-to-newest, computing the "expected size" by counting
+ the number of objects in the pack referenced by the
+ multi-pack-index, then divide by the total number of objects in
+ the pack and multiply by the pack size. We select packs with
+ expected size below the batch size until the set of packs have
+ total expected size at least the batch size. If the total size
+ does not reach the batch size, then do nothing. If a new pack-
+ file is created, rewrite the multi-pack-index to reference the
+ new pack-file. A later run of 'git multi-pack-index expire' will
+ delete the pack-files that were part of this batch.
EXAMPLES
----
Using --recurse-submodules can only fetch new commits in already checked
out submodules right now. When e.g. upstream added a new submodule in the
-just fetched commits of the superproject the submodule itself can not be
+just fetched commits of the superproject the submodule itself cannot be
fetched, making it impossible to check out that submodule later without
having to do a fetch again. This is expected to be fixed in a future Git
version.
[<upstream> [<branch>]]
'git rebase' [-i | --interactive] [<options>] [--exec <cmd>] [--onto <newbase>]
--root [<branch>]
-'git rebase' --continue | --skip | --abort | --quit | --edit-todo | --show-current-patch
+'git rebase' (--continue | --skip | --abort | --quit | --edit-todo | --show-current-patch)
DESCRIPTION
-----------
If <branch> is specified, 'git rebase' will perform an automatic
-`git checkout <branch>` before doing anything else. Otherwise
+`git switch <branch>` before doing anything else. Otherwise
it remains on the current branch.
If <upstream> is not specified, the upstream configured in
staging/master
staging/staging-linus
staging/staging-next
-$ git checkout -b staging staging/master
+$ git switch -c staging staging/master
...
------------
One way to do it is to pull master into the topic branch:
------------
- $ git checkout topic
+ $ git switch topic
$ git merge master
o---*---o---+ topic
in which case the final commit graph would look like this:
------------
- $ git checkout topic
+ $ git switch topic
$ git merge master
$ ... work on both topic and master branches
- $ git checkout master
+ $ git switch master
$ git merge topic
o---*---o---+---o---o topic
top of the tip before the test merge:
------------
- $ git checkout topic
+ $ git switch topic
$ git merge master
$ git reset --hard HEAD^ ;# rewind the test merge
$ ... work on both topic and master branches
- $ git checkout master
+ $ git switch master
$ git merge topic
o---*---o-------o---o topic
the current branch.)
+
This means that `git reset <paths>` is the opposite of `git add
-<paths>`.
+<paths>`. This command is equivalent to
+`git restore [--source=<tree-ish>] --staged <paths>...`.
+
After running `git reset <paths>` to update the index entry, you can
-use linkgit:git-checkout[1] to check the contents out of the index to
-the working tree.
-Alternatively, using linkgit:git-checkout[1] and specifying a commit, you
+use linkgit:git-restore[1] to check the contents out of the index to
+the working tree. Alternatively, using linkgit:git-restore[1]
+and specifying a commit with `--source`, you
can copy the contents of a path out of a commit to the index and to the
working tree in one go.
changes, reset is aborted.
--
-If you want to undo a commit other than the latest on a branch,
-linkgit:git-revert[1] is your friend.
+See "Reset, restore and revert" in linkgit:git[1] for the differences
+between the three commands.
OPTIONS
Undo a commit, making it a topic branch::
+
------------
-$ git branch topic/wip <1>
-$ git reset --hard HEAD~3 <2>
-$ git checkout topic/wip <3>
+$ git branch topic/wip <1>
+$ git reset --hard HEAD~3 <2>
+$ git switch topic/wip <3>
------------
+
<1> You have made some commits, but realize they were premature
need to get to the other branch for a quick bugfix.
+
------------
-$ git checkout feature ;# you were working in "feature" branch and
-$ work work work ;# got interrupted
+$ git switch feature ;# you were working in "feature" branch and
+$ work work work ;# got interrupted
$ git commit -a -m "snapshot WIP" <1>
-$ git checkout master
+$ git switch master
$ fix fix fix
$ git commit ;# commit with real log
-$ git checkout feature
+$ git switch feature
$ git reset --soft HEAD^ ;# go back to WIP state <2>
$ git reset <3>
------------
+
------------
$ git tag start
-$ git checkout -b branch1
+$ git switch -c branch1
$ edit
$ git commit ... <1>
$ edit
-$ git checkout -b branch2 <2>
+$ git switch -c branch2 <2>
$ git reset --keep start <3>
------------
+
<1> This commits your first edits in `branch1`.
<2> In the ideal world, you could have realized that the earlier
commit did not belong to the new topic when you created and switched
- to `branch2` (i.e. `git checkout -b branch2 start`), but nobody is
+ to `branch2` (i.e. `git switch -c branch2 start`), but nobody is
perfect.
<3> But you can use `reset --keep` to remove the unwanted commit after
you switched to `branch2`.
--- /dev/null
+git-restore(1)
+==============
+
+NAME
+----
+git-restore - Restore working tree files
+
+SYNOPSIS
+--------
+[verse]
+'git restore' [<options>] [--source=<tree>] [--staged] [--worktree] <pathspec>...
+'git restore' (-p|--patch) [<options>] [--source=<tree>] [--staged] [--worktree] [<pathspec>...]
+
+DESCRIPTION
+-----------
+Restore specified paths in the working tree with some contents from a
+restore source. If a path is tracked but does not exist in the restore
+source, it will be removed to match the source.
+
+The command can also be used to restore the content in the index with
+`--staged`, or restore both the working tree and the index with
+`--staged --worktree`.
+
+By default, the restore sources for working tree and the index are the
+index and `HEAD` respectively. `--source` could be used to specify a
+commit as the restore source.
+
+See "Reset, restore and revert" in linkgit:git[1] for the differences
+between the three commands.
+
+THIS COMMAND IS EXPERIMENTAL. THE BEHAVIOR MAY CHANGE.
+
+OPTIONS
+-------
+-s <tree>::
+--source=<tree>::
+ Restore the working tree files with the content from the given
+ tree. It is common to specify the source tree by naming a
+ commit, branch or tag associated with it.
++
+If not specified, the default restore source for the working tree is
+the index, and the default restore source for the index is
+`HEAD`. When both `--staged` and `--worktree` are specified,
+`--source` must also be specified.
+
+-p::
+--patch::
+ Interactively select hunks in the difference between the
+ restore source and the restore location. See the ``Interactive
+ Mode'' section of linkgit:git-add[1] to learn how to operate
+ the `--patch` mode.
++
+Note that `--patch` can accept no pathspec and will prompt to restore
+all modified paths.
+
+-W::
+--worktree::
+-S::
+--staged::
+ Specify the restore location. If neither option is specified,
+ by default the working tree is restored. Specifying `--staged`
+ will only restore the index. Specifying both restores both.
+
+-q::
+--quiet::
+ Quiet, suppress feedback messages. Implies `--no-progress`.
+
+--progress::
+--no-progress::
+ Progress status is reported on the standard error stream
+ by default when it is attached to a terminal, unless `--quiet`
+ is specified. This flag enables progress reporting even if not
+ attached to a terminal, regardless of `--quiet`.
+
+--ours::
+--theirs::
+ When restoring files in the working tree from the index, use
+ stage #2 ('ours') or #3 ('theirs') for unmerged paths.
++
+Note that during `git rebase` and `git pull --rebase`, 'ours' and
+'theirs' may appear swapped. See the explanation of the same options
+in linkgit:git-checkout[1] for details.
+
+-m::
+--merge::
+ When restoring files on the working tree from the index,
+ recreate the conflicted merge in the unmerged paths.
+
+--conflict=<style>::
+ The same as `--merge` option above, but changes the way the
+ conflicting hunks are presented, overriding the
+ `merge.conflictStyle` configuration variable. Possible values
+ are "merge" (default) and "diff3" (in addition to what is
+ shown by "merge" style, shows the original contents).
+
+--ignore-unmerged::
+ When restoring files on the working tree from the index, do
+ not abort the operation if there are unmerged entries and
+ neither `--ours`, `--theirs`, `--merge` or `--conflict` is
+ specified. Unmerged paths on the working tree are left alone.
+
+--ignore-skip-worktree-bits::
+ In sparse checkout mode, by default is to only update entries
+ matched by `<pathspec>` and sparse patterns in
+ $GIT_DIR/info/sparse-checkout. This option ignores the sparse
+ patterns and unconditionally restores any files in
+ `<pathspec>`.
+
+--overlay::
+--no-overlay::
+ In overlay mode, the command never removes files when
+ restoring. In no-overlay mode, tracked files that do not
+ appear in the `--source` tree are removed, to make them match
+ `<tree>` exactly. The default is no-overlay mode.
+
+EXAMPLES
+--------
+
+The following sequence switches to the `master` branch, reverts the
+`Makefile` to two revisions back, deletes hello.c by mistake, and gets
+it back from the index.
+
+------------
+$ git switch master
+$ git restore --source master~2 Makefile <1>
+$ rm -f hello.c
+$ git restore hello.c <2>
+------------
+
+<1> take a file out of another commit
+<2> restore hello.c from the index
+
+If you want to restore _all_ C source files to match the version in
+the index, you can say
+
+------------
+$ git restore '*.c'
+------------
+
+Note the quotes around `*.c`. The file `hello.c` will also be
+restored, even though it is no longer in the working tree, because the
+file globbing is used to match entries in the index (not in the
+working tree by the shell).
+
+To restore all files in the current directory
+
+------------
+$ git restore .
+------------
+
+or to restore all working tree files with 'top' pathspec magic (see
+linkgit:gitglossary[7])
+
+------------
+$ git restore :/
+------------
+
+To restore a file in the index to match the version in `HEAD` (this is
+the same as using linkgit:git-reset[1])
+
+------------
+$ git restore --staged hello.c
+------------
+
+or you can restore both the index and the working tree (this the same
+as using linkgit:git-checkout[1])
+
+------------
+$ git restore --source=HEAD --staged --worktree hello.c
+------------
+
+or the short form which is more practical but less readable:
+
+------------
+$ git restore -s@ -SW hello.c
+------------
+
+SEE ALSO
+--------
+linkgit:git-checkout[1],
+linkgit:git-reset[1]
+
+GIT
+---
+Part of the linkgit:git[1] suite
[ --date=<format>]
[ [ --objects | --objects-edge | --objects-edge-aggressive ]
[ --unpacked ]
+ [ --object-names | --no-object-names ]
[ --filter=<filter-spec> [ --filter-print-omitted ] ] ]
[ --missing=<missing-action> ]
[ --pretty | --header ]
--------
[verse]
'git revert' [--[no-]edit] [-n] [-m parent-number] [-s] [-S[<keyid>]] <commit>...
-'git revert' --continue
-'git revert' --quit
-'git revert' --abort
+'git revert' (--continue | --skip | --abort | --quit)
DESCRIPTION
-----------
throw away all uncommitted changes in your working directory, you
should see linkgit:git-reset[1], particularly the `--hard` option. If
you want to extract specific files as they were in another commit, you
-should see linkgit:git-checkout[1], specifically the `git checkout
-<commit> -- <filename>` syntax. Take care with these alternatives as
+should see linkgit:git-restore[1], specifically the `--source`
+option. Take care with these alternatives as
both will discard uncommitted changes in your working directory.
+See "Reset, restore and revert" in linkgit:git[1] for the differences
+between the three commands.
+
OPTIONS
-------
<commit>...::
Automating
~~~~~~~~~~
+--no-[to|cc|bcc]::
+ Clears any list of "To:", "Cc:", "Bcc:" addresses previously
+ set via config.
+
+--no-identity::
+ Clears the previously read value of `sendemail.identity` set
+ via config, if any.
+
--to-cmd=<command>::
Specify a command to execute once per patch file which
should generate patch file specific "To:" entries.
+
----------------------------------------------------------------
# ... hack hack hack ...
-$ git checkout -b my_wip
+$ git switch -c my_wip
$ git commit -a -m "WIP"
-$ git checkout master
+$ git switch master
$ edit emergency fix
$ git commit -a -m "Fix in a hurry"
-$ git checkout my_wip
+$ git switch my_wip
$ git reset --soft HEAD^
# ... continue hacking ...
----------------------------------------------------------------
linkgit:git-checkout[1],
linkgit:git-commit[1],
linkgit:git-reflog[1],
-linkgit:git-reset[1]
+linkgit:git-reset[1],
+linkgit:git-switch[1]
GIT
---
--- /dev/null
+git-switch(1)
+=============
+
+NAME
+----
+git-switch - Switch branches
+
+SYNOPSIS
+--------
+[verse]
+'git switch' [<options>] [--no-guess] <branch>
+'git switch' [<options>] --detach [<start-point>]
+'git switch' [<options>] (-c|-C) <new-branch> [<start-point>]
+'git switch' [<options>] --orphan <new-branch>
+
+DESCRIPTION
+-----------
+Switch to a specified branch. The working tree and the index are
+updated to match the branch. All new commits will be added to the tip
+of this branch.
+
+Optionally a new branch could be created with either `-c`, `-C`,
+automatically from a remote branch of same name (see `--guess`), or
+detach the working tree from any branch with `--detach`, along with
+switching.
+
+Switching branches does not require a clean index and working tree
+(i.e. no differences compared to `HEAD`). The operation is aborted
+however if the operation leads to loss of local changes, unless told
+otherwise with `--discard-changes` or `--merge`.
+
+THIS COMMAND IS EXPERIMENTAL. THE BEHAVIOR MAY CHANGE.
+
+OPTIONS
+-------
+<branch>::
+ Branch to switch to.
+
+<new-branch>::
+ Name for the new branch.
+
+<start-point>::
+ The starting point for the new branch. Specifying a
+ `<start-point>` allows you to create a branch based on some
+ other point in history than where HEAD currently points. (Or,
+ in the case of `--detach`, allows you to inspect and detach
+ from some other point.)
++
+You can use the `@{-N}` syntax to refer to the N-th last
+branch/commit switched to using "git switch" or "git checkout"
+operation. You may also specify `-` which is synonymous to `@{-1}`.
+This is often used to switch quickly between two branches, or to undo
+a branch switch by mistake.
++
+As a special case, you may use `A...B` as a shortcut for the merge
+base of `A` and `B` if there is exactly one merge base. You can leave
+out at most one of `A` and `B`, in which case it defaults to `HEAD`.
+
+-c <new-branch>::
+--create <new-branch>::
+ Create a new branch named `<new-branch>` starting at
+ `<start-point>` before switching to the branch. This is a
+ convenient shortcut for:
++
+------------
+$ git branch <new-branch>
+$ git switch <new-branch>
+------------
+
+-C <new-branch>::
+--force-create <new-branch>::
+ Similar to `--create` except that if `<new-branch>` already
+ exists, it will be reset to `<start-point>`. This is a
+ convenient shortcut for:
++
+------------
+$ git branch -f <new-branch>
+$ git switch <new-branch>
+------------
+
+-d::
+--detach::
+ Switch to a commit for inspection and discardable
+ experiments. See the "DETACHED HEAD" section in
+ linkgit:git-checkout[1] for details.
+
+--guess::
+--no-guess::
+ If `<branch>` is not found but there does exist a tracking
+ branch in exactly one remote (call it `<remote>`) with a
+ matching name, treat as equivalent to
++
+------------
+$ git switch -c <branch> --track <remote>/<branch>
+------------
++
+If the branch exists in multiple remotes and one of them is named by
+the `checkout.defaultRemote` configuration variable, we'll use that
+one for the purposes of disambiguation, even if the `<branch>` isn't
+unique across all remotes. Set it to e.g. `checkout.defaultRemote=origin`
+to always checkout remote branches from there if `<branch>` is
+ambiguous but exists on the 'origin' remote. See also
+`checkout.defaultRemote` in linkgit:git-config[1].
++
+`--guess` is the default behavior. Use `--no-guess` to disable it.
+
+-f::
+--force::
+ An alias for `--discard-changes`.
+
+--discard-changes::
+ Proceed even if the index or the working tree differs from
+ `HEAD`. Both the index and working tree are restored to match
+ the switching target. If `--recurse-submodules` is specified,
+ submodule content is also restored to match the switching
+ target. This is used to throw away local changes.
+
+-m::
+--merge::
+ If you have local modifications to one or more files that are
+ different between the current branch and the branch to which
+ you are switching, the command refuses to switch branches in
+ order to preserve your modifications in context. However,
+ with this option, a three-way merge between the current
+ branch, your working tree contents, and the new branch is
+ done, and you will be on the new branch.
++
+When a merge conflict happens, the index entries for conflicting
+paths are left unmerged, and you need to resolve the conflicts
+and mark the resolved paths with `git add` (or `git rm` if the merge
+should result in deletion of the path).
+
+--conflict=<style>::
+ The same as `--merge` option above, but changes the way the
+ conflicting hunks are presented, overriding the
+ `merge.conflictStyle` configuration variable. Possible values are
+ "merge" (default) and "diff3" (in addition to what is shown by
+ "merge" style, shows the original contents).
+
+-q::
+--quiet::
+ Quiet, suppress feedback messages.
+
+--progress::
+--no-progress::
+ Progress status is reported on the standard error stream
+ by default when it is attached to a terminal, unless `--quiet`
+ is specified. This flag enables progress reporting even if not
+ attached to a terminal, regardless of `--quiet`.
+
+-t::
+--track::
+ When creating a new branch, set up "upstream" configuration.
+ `-c` is implied. See `--track` in linkgit:git-branch[1] for
+ details.
++
+If no `-c` option is given, the name of the new branch will be derived
+from the remote-tracking branch, by looking at the local part of the
+refspec configured for the corresponding remote, and then stripping
+the initial part up to the "*". This would tell us to use `hack` as
+the local branch when branching off of `origin/hack` (or
+`remotes/origin/hack`, or even `refs/remotes/origin/hack`). If the
+given name has no slash, or the above guessing results in an empty
+name, the guessing is aborted. You can explicitly give a name with
+`-c` in such a case.
+
+--no-track::
+ Do not set up "upstream" configuration, even if the
+ `branch.autoSetupMerge` configuration variable is true.
+
+--orphan <new-branch>::
+ Create a new 'orphan' branch, named `<new-branch>`. All
+ tracked files are removed.
+
+--ignore-other-worktrees::
+ `git switch` refuses when the wanted ref is already
+ checked out by another worktree. This option makes it check
+ the ref out anyway. In other words, the ref can be held by
+ more than one worktree.
+
+--recurse-submodules::
+--no-recurse-submodules::
+ Using `--recurse-submodules` will update the content of all
+ initialized submodules according to the commit recorded in the
+ superproject. If nothing (or `--no-recurse-submodules`) is
+ used, the work trees of submodules will not be updated. Just
+ like linkgit:git-submodule[1], this will detach `HEAD` of the
+ submodules.
+
+EXAMPLES
+--------
+
+The following command switches to the "master" branch:
+
+------------
+$ git switch master
+------------
+
+After working in the wrong branch, switching to the correct branch
+would be done using:
+
+------------
+$ git switch mytopic
+------------
+
+However, your "wrong" branch and correct "mytopic" branch may differ
+in files that you have modified locally, in which case the above
+switch would fail like this:
+
+------------
+$ git switch mytopic
+error: You have local changes to 'frotz'; not switching branches.
+------------
+
+You can give the `-m` flag to the command, which would try a three-way
+merge:
+
+------------
+$ git switch -m mytopic
+Auto-merging frotz
+------------
+
+After this three-way merge, the local modifications are _not_
+registered in your index file, so `git diff` would show you what
+changes you made since the tip of the new branch.
+
+To switch back to the previous branch before we switched to mytopic
+(i.e. "master" branch):
+
+------------
+$ git switch -
+------------
+
+You can grow a new branch from any commit. For example, switch to
+"HEAD~3" and create branch "fixup":
+
+------------
+$ git switch -c fixup HEAD~3
+Switched to a new branch 'fixup'
+------------
+
+If you want to start a new branch from a remote branch of the same
+name:
+
+------------
+$ git switch new-topic
+Branch 'new-topic' set up to track remote branch 'new-topic' from 'origin'
+Switched to a new branch 'new-topic'
+------------
+
+To check out commit `HEAD~3` for temporary inspection or experiment
+without creating a new branch:
+
+------------
+$ git switch --detach HEAD~3
+HEAD is now at 9fc9555312 Merge branch 'cc/shared-index-permbits'
+------------
+
+If it turns out whatever you have done is worth keeping, you can
+always create a new name for it (without switching away):
+
+------------
+$ git switch -c good-surprises
+------------
+
+SEE ALSO
+--------
+linkgit:git-checkout[1],
+linkgit:git-branch[1]
+
+GIT
+---
+Part of the linkgit:git[1] suite
-s::
--sign::
Make a GPG-signed tag, using the default e-mail address's key.
+ The default behavior of tag GPG-signing is controlled by `tag.gpgSign`
+ configuration variable if it exists, or disabled oder otherwise.
+ See linkgit:git-config[1].
+
+--no-sign::
+ Override `tag.gpgSign` configuration variable that is
+ set to force each and every tag to be signed.
-u <keyid>::
--local-user=<keyid>::
SYNOPSIS
--------
[verse]
-'git update-server-info' [--force]
+'git update-server-info'
DESCRIPTION
-----------
what references and packs the server has. This command
generates such auxiliary files.
-
-OPTIONS
--------
-
--f::
---force::
- Update the info files from scratch.
-
-
OUTPUT
------
manual page gives you an overview of the command-line command syntax.
A formatted and hyperlinked copy of the latest Git documentation
-can be viewed at `https://git.github.io/htmldocs/git.html`.
+can be viewed at https://git.github.io/htmldocs/git.html
+or https://git-scm.com/docs.
OPTIONS
include::cmds-foreignscminterface.txt[]
+Reset, restore and revert
+~~~~~~~~~~~~~~~~~~~~~~~~~
+There are three commands with similar names: `git reset`,
+`git restore` and `git revert`.
+
+* linkgit:git-revert[1] is about making a new commit that reverts the
+ changes made by other commits.
+
+* linkgit:git-restore[1] is about restoring files in the working tree
+ from either the index or another commit. This command does not
+ update your branch. The command can also be used to restore files in
+ the index from another commit.
+
+* linkgit:git-reset[1] is about updating your branch, moving the tip
+ in order to add or remove commits from the branch. This operation
+ changes the commit history.
++
+`git reset` can also be used to restore the index, overlapping with
+`git restore`.
+
Low-level commands (plumbing)
-----------------------------
These attributes affect how the contents stored in the
repository are copied to the working tree files when commands
-such as 'git checkout' and 'git merge' run. They also affect how
+such as 'git switch', 'git checkout' and 'git merge' run.
+They also affect how
Git stores the contents you prepare in the working tree in the
repository upon 'git add' and 'git commit'.
- `java` suitable for source code in the Java language.
-- `matlab` suitable for source code in the MATLAB language.
+- `matlab` suitable for source code in the MATLAB and Octave languages.
- `objc` suitable for source code in the Objective-C language.
- `ruby` suitable for source code in the Ruby language.
+- `rust` suitable for source code in the Rust language.
+
- `tex` suitable for source code for LaTeX documents.
things:
+
--------------------------------
-$ git checkout -- *.c
-$ git checkout -- \*.c
+$ git restore *.c
+$ git restore \*.c
--------------------------------
+
The former lets your shell expand the fileglob, and you are asking
http://marc.info/?l=git&m=119150393620273 for further
information.
+Some other commands that also work on files in the working tree and/or
+in the index can take `--staged` and/or `--worktree`.
+
+* `--staged` is exactly like `--cached`, which is used to ask a
+ command to only work on the index, not the working tree.
+
+* `--worktree` is the opposite, to ask a command to work on the
+ working tree only, not the index.
+
+* The two options can be specified together to ask a command to work
+ on both the index and the working tree.
+
GIT
---
Part of the linkgit:git[1] suite
saying that you want to check out a new branch:
------------
-$ git checkout -b mybranch
+$ git switch -c mybranch
------------
will create a new branch based at the current `HEAD` position, and switch
In other words, if you have an earlier tag or branch, you'd just do
------------
-$ git checkout -b mybranch earlier-commit
+$ git switch -c mybranch earlier-commit
------------
and it would create the new branch `mybranch` at the earlier commit,
You can always just jump back to your original `master` branch by doing
------------
-$ git checkout master
+$ git switch master
------------
(or any other branch-name, for that matter) and if you forget which
which will simply _create_ the branch, but will not do anything further.
You can then later -- once you decide that you want to actually develop
-on that branch -- switch to that branch with a regular 'git checkout'
+on that branch -- switch to that branch with a regular 'git switch'
with the branchname as the argument.
that branch, and do some work there.
------------------------------------------------
-$ git checkout mybranch
+$ git switch mybranch
$ echo "Work, work, work" >>hello
$ git commit -m "Some work." -i hello
------------------------------------------------
to the master branch, and editing the same file differently there:
------------
-$ git checkout master
+$ git switch master
------------
Here, take a moment to look at the contents of `hello`, and notice how they
'git merge' to get the "upstream changes" back to your branch.
------------
-$ git checkout mybranch
+$ git switch mybranch
$ git merge -m "Merge upstream changes." master
------------
work." commit.
------------
-$ git checkout mybranch
-$ git reset --hard master^2
-$ git checkout master
+$ git switch -C mybranch master^2
+$ git switch master
$ git reset --hard master^
------------
* linkgit:git-log[1] to see what happened.
- * linkgit:git-checkout[1] and linkgit:git-branch[1] to switch
+ * linkgit:git-switch[1] and linkgit:git-branch[1] to switch
branches.
* linkgit:git-add[1] to manage the index file.
* linkgit:git-commit[1] to advance the current branch.
- * linkgit:git-reset[1] and linkgit:git-checkout[1] (with
- pathname parameters) to undo changes.
+ * linkgit:git-restore[1] to undo changes.
* linkgit:git-merge[1] to merge between local branches.
Create a topic branch and develop.::
+
------------
-$ git checkout -b alsa-audio <1>
+$ git switch -c alsa-audio <1>
$ edit/compile/test
-$ git checkout -- curses/ux_audio_oss.c <2>
+$ git restore curses/ux_audio_oss.c <2>
$ git add curses/ux_audio_alsa.c <3>
$ edit/compile/test
$ git diff HEAD <4>
$ edit/compile/test
$ git diff HEAD^ <6>
$ git commit -a --amend <7>
-$ git checkout master <8>
+$ git switch master <8>
$ git merge alsa-audio <9>
$ git log --since='3 days ago' <10>
$ git log v2.43.. curses/ <11>
------------
$ git clone git://git.kernel.org/pub/scm/.../torvalds/linux-2.6 my2.6
$ cd my2.6
-$ git checkout -b mine master <1>
+$ git switch -c mine master <1>
$ edit/compile/test; git commit -a -s <2>
$ git format-patch master <3>
$ git send-email --to="person <email@example.com>" 00*.patch <4>
-$ git checkout master <5>
+$ git switch master <5>
$ git pull <6>
$ git log -p ORIG_HEAD.. arch/i386 include/asm-i386 <7>
$ git ls-remote --heads http://git.kernel.org/.../jgarzik/libata-dev.git <8>
satellite$ git push origin <4>
mothership$ cd frotz
-mothership$ git checkout master
+mothership$ git switch master
mothership$ git merge satellite/master <5>
------------
+
Branch off of a specific tag.::
+
------------
-$ git checkout -b private2.6.14 v2.6.14 <1>
+$ git switch -c private2.6.14 v2.6.14 <1>
$ edit/compile/test; git commit -a
$ git checkout master
$ git cherry-pick v2.6.14..private2.6.14 <2>
& s 2 3 4 5 ./+to-apply
& s 7 8 ./+hold-linus
& q
-$ git checkout -b topic/one master
+$ git switch -c topic/one master
$ git am -3 -i -s ./+to-apply <4>
$ compile/test
-$ git checkout -b hold/linus && git am -3 -i -s ./+hold-linus <5>
-$ git checkout topic/one && git rebase master <6>
-$ git checkout pu && git reset --hard next <7>
+$ git switch -c hold/linus && git am -3 -i -s ./+hold-linus <5>
+$ git switch topic/one && git rebase master <6>
+$ git switch -C pu next <7>
$ git merge topic/one topic/two && git merge hold/linus <8>
-$ git checkout maint
+$ git switch maint
$ git cherry-pick master~4 <9>
$ compile/test
$ git tag -s -m "GIT 0.99.9x" v0.99.9x <10>
post-checkout
~~~~~~~~~~~~~
-This hook is invoked when a linkgit:git-checkout[1] is run after having updated the
+This hook is invoked when a linkgit:git-checkout[1] or
+linkgit:git-switch[1] is run after having updated the
worktree. The hook is given three parameters: the ref of the previous HEAD,
the ref of the new HEAD (which may or may not have changed), and a flag
indicating whether the checkout was a branch checkout (changing branches,
flag=1) or a file checkout (retrieving a file from the index, flag=0).
-This hook cannot affect the outcome of `git checkout`.
+This hook cannot affect the outcome of `git switch` or `git checkout`.
It is also run after linkgit:git-clone[1], unless the `--no-checkout` (`-n`) option is
used. The first parameter given to the hook is the null-ref, the second the
For example, the hook can simply run `git read-tree -u -m HEAD "$1"`
in order to emulate `git fetch` that is run in the reverse direction
with `git push`, as the two-tree form of `git read-tree -u -m` is
-essentially the same as `git checkout` that switches branches while
+essentially the same as `git switch` or `git checkout`
+that switches branches while
keeping the local changes in the working tree that do not interfere
with the difference between the branches.
$ git status
On branch master
Changes to be committed:
- (use "git reset HEAD <file>..." to unstage)
+ (use "git restore --staged <file>..." to unstage)
new file: closing.txt
Changes not staged for commit:
(use "git add <file>..." to update what will be committed)
- (use "git checkout -- <file>..." to discard changes in working directory)
+ (use "git restore <file>..." to discard changes in working directory)
modified: file.txt
On branch master
Changes to be committed:
Your branch is up to date with 'origin/master'.
- (use "git reset HEAD <file>..." to unstage)
+ (use "git restore --staged <file>..." to unstage)
modified: file1
modified: file2
type
------------------------------------------------
-$ git checkout experimental
+$ git switch experimental
------------------------------------------------
to switch to the experimental branch. Now edit a file, commit the
------------------------------------------------
(edit file)
$ git commit -a
-$ git checkout master
+$ git switch master
------------------------------------------------
Check that the change you made is no longer visible, since it was
.Rewind and rebuild next
[caption="Recipe: "]
=====================================
-* `git checkout next`
-* `git reset --hard master`
+* `git switch -C next master`
* `git merge ai/topic_in_next1`
* `git merge ai/topic_in_next2`
* ...
[[def_object]]object::
The unit of storage in Git. It is uniquely identified by the
<<def_SHA1,SHA-1>> of its contents. Consequently, an
- object can not be changed.
+ object cannot be changed.
[[def_object_database]]object database::
Stores a set of "objects", and an individual <<def_object,object>> is
Pretend as if all objects mentioned by reflogs are listed on the
command line as `<commit>`.
+--alternate-refs::
+ Pretend as if all objects mentioned as ref tips of alternate
+ repositories were listed on the command line. An alternate
+ repository is any repository whose object directory is specified
+ in `objects/info/alternates`. The set of included objects may
+ be modified by `core.alternateRefsCommand`, etc. See
+ linkgit:git-config[1].
+
--single-worktree::
By default, all working trees will be examined by the
following options when there are more than one (see
Only useful with `--objects`; print the object IDs that are not
in packs.
+--object-names::
+ Only useful with `--objects`; print the names of the object IDs
+ that are found. This is the default behavior.
+
+--no-object-names::
+ Only useful with `--objects`; does not print the names of the object
+ IDs that are found. This inverts `--object-names`. This flag allows
+ the output to be more easily parsed by commands such as
+ linkgit:git-cat-file[1].
+
--filter=<filter-spec>::
Only useful with one of the `--objects*`; omits objects (usually
blobs) from the list of printed objects. The '<filter-spec>'
------------------------------
$ git config push.default current
$ git config remote.pushdefault myfork
-$ git checkout -b mybranch origin/master
+$ git switch -c mybranch origin/master
$ git rev-parse --symbolic-full-name @{upstream}
refs/remotes/origin/master
`.git/sequencer`. Can be used to continue after resolving
conflicts in a failed cherry-pick or revert.
+--skip::
+ Skip the current commit and continue with the rest of the
+ sequence.
+
--quit::
Forget about the current operation in progress. Can be used
to clear the sequencer state after a failed cherry-pick or
do not do this you will get an error for each ref that it does not point
to a valid object.
-Note: As a side-effect of this you can not safely assume that all
+Note: As a side-effect of this you cannot safely assume that all
objects you lookup are available in superproject. All submodule objects
will be available the same way as the superprojects objects.
=== The Normal Format Target
The normal format target is a tradition printf format and similar
-to GIT_TRACE format. This format is enabled with the `GIT_TR`
+to GIT_TRACE format. This format is enabled with the `GIT_TRACE2`
environment variable or the `trace2.normalTarget` system or global
config setting.
1-byte number (C) of "chunks"
- 1-byte (reserved for later use)
- Current clients should ignore this value.
+ 1-byte number (B) of base commit-graphs
+ We infer the length (H*B) of the Base Graphs chunk
+ from this value.
CHUNK LOOKUP:
positions for the parents until reaching a value with the most-significant
bit on. The other bits correspond to the position of the last parent.
+ Base Graphs List (ID: {'B', 'A', 'S', 'E'}) [Optional]
+ This list of H-byte hashes describe a set of B commit-graph files that
+ form a commit-graph chain. The graph position for the ith commit in this
+ file's OID Lookup chunk is equal to i plus the number of commits in all
+ base graphs. If B is non-zero, this chunk must exist.
+
TRAILER:
H-byte HASH-checksum of all of the above.
helpful for these clones, anyway. The commit-graph will not be read or
written when shallow commits are present.
+Commit Graphs Chains
+--------------------
+
+Typically, repos grow with near-constant velocity (commits per day). Over time,
+the number of commits added by a fetch operation is much smaller than the
+number of commits in the full history. By creating a "chain" of commit-graphs,
+we enable fast writes of new commit data without rewriting the entire commit
+history -- at least, most of the time.
+
+## File Layout
+
+A commit-graph chain uses multiple files, and we use a fixed naming convention
+to organize these files. Each commit-graph file has a name
+`$OBJDIR/info/commit-graphs/graph-{hash}.graph` where `{hash}` is the hex-
+valued hash stored in the footer of that file (which is a hash of the file's
+contents before that hash). For a chain of commit-graph files, a plain-text
+file at `$OBJDIR/info/commit-graphs/commit-graph-chain` contains the
+hashes for the files in order from "lowest" to "highest".
+
+For example, if the `commit-graph-chain` file contains the lines
+
+```
+ {hash0}
+ {hash1}
+ {hash2}
+```
+
+then the commit-graph chain looks like the following diagram:
+
+ +-----------------------+
+ | graph-{hash2}.graph |
+ +-----------------------+
+ |
+ +-----------------------+
+ | |
+ | graph-{hash1}.graph |
+ | |
+ +-----------------------+
+ |
+ +-----------------------+
+ | |
+ | |
+ | |
+ | graph-{hash0}.graph |
+ | |
+ | |
+ | |
+ +-----------------------+
+
+Let X0 be the number of commits in `graph-{hash0}.graph`, X1 be the number of
+commits in `graph-{hash1}.graph`, and X2 be the number of commits in
+`graph-{hash2}.graph`. If a commit appears in position i in `graph-{hash2}.graph`,
+then we interpret this as being the commit in position (X0 + X1 + i), and that
+will be used as its "graph position". The commits in `graph-{hash2}.graph` use these
+positions to refer to their parents, which may be in `graph-{hash1}.graph` or
+`graph-{hash0}.graph`. We can navigate to an arbitrary commit in position j by checking
+its containment in the intervals [0, X0), [X0, X0 + X1), [X0 + X1, X0 + X1 +
+X2).
+
+Each commit-graph file (except the base, `graph-{hash0}.graph`) contains data
+specifying the hashes of all files in the lower layers. In the above example,
+`graph-{hash1}.graph` contains `{hash0}` while `graph-{hash2}.graph` contains
+`{hash0}` and `{hash1}`.
+
+## Merging commit-graph files
+
+If we only added a new commit-graph file on every write, we would run into a
+linear search problem through many commit-graph files. Instead, we use a merge
+strategy to decide when the stack should collapse some number of levels.
+
+The diagram below shows such a collapse. As a set of new commits are added, it
+is determined by the merge strategy that the files should collapse to
+`graph-{hash1}`. Thus, the new commits, the commits in `graph-{hash2}` and
+the commits in `graph-{hash1}` should be combined into a new `graph-{hash3}`
+file.
+
+ +---------------------+
+ | |
+ | (new commits) |
+ | |
+ +---------------------+
+ | |
+ +-----------------------+ +---------------------+
+ | graph-{hash2} |->| |
+ +-----------------------+ +---------------------+
+ | | |
+ +-----------------------+ +---------------------+
+ | | | |
+ | graph-{hash1} |->| |
+ | | | |
+ +-----------------------+ +---------------------+
+ | tmp_graphXXX
+ +-----------------------+
+ | |
+ | |
+ | |
+ | graph-{hash0} |
+ | |
+ | |
+ | |
+ +-----------------------+
+
+During this process, the commits to write are combined, sorted and we write the
+contents to a temporary file, all while holding a `commit-graph-chain.lock`
+lock-file. When the file is flushed, we rename it to `graph-{hash3}`
+according to the computed `{hash3}`. Finally, we write the new chain data to
+`commit-graph-chain.lock`:
+
+```
+ {hash3}
+ {hash0}
+```
+
+We then close the lock-file.
+
+## Merge Strategy
+
+When writing a set of commits that do not exist in the commit-graph stack of
+height N, we default to creating a new file at level N + 1. We then decide to
+merge with the Nth level if one of two conditions hold:
+
+ 1. `--size-multiple=<X>` is specified or X = 2, and the number of commits in
+ level N is less than X times the number of commits in level N + 1.
+
+ 2. `--max-commits=<C>` is specified with non-zero C and the number of commits
+ in level N + 1 is more than C commits.
+
+This decision cascades down the levels: when we merge a level we create a new
+set of commits that then compares to the next level.
+
+The first condition bounds the number of levels to be logarithmic in the total
+number of commits. The second condition bounds the total number of commits in
+a `graph-{hashN}` file and not in the `commit-graph` file, preventing
+significant performance issues when the stack merges and another process only
+partially reads the previous stack.
+
+The merge strategy values (2 for the size multiple, 64,000 for the maximum
+number of commits) could be extracted into config settings for full
+flexibility.
+
+## Deleting graph-{hash} files
+
+After a new tip file is written, some `graph-{hash}` files may no longer
+be part of a chain. It is important to remove these files from disk, eventually.
+The main reason to delay removal is that another process could read the
+`commit-graph-chain` file before it is rewritten, but then look for the
+`graph-{hash}` files after they are deleted.
+
+To allow holding old split commit-graphs for a while after they are unreferenced,
+we update the modified times of the files when they become unreferenced. Then,
+we scan the `$OBJDIR/info/commit-graphs/` directory for `graph-{hash}`
+files whose modified times are older than a given expiry window. This window
+defaults to zero, but can be changed using command-line arguments or a config
+setting.
+
+## Chains across multiple object directories
+
+In a repo with alternates, we look for the `commit-graph-chain` file starting
+in the local object directory and then in each alternate. The first file that
+exists defines our chain. As we look for the `graph-{hash}` files for
+each `{hash}` in the chain file, we follow the same pattern for the host
+directories.
+
+This allows commit-graphs to be split across multiple forks in a fork network.
+The typical case is a large "base" repo with many smaller forks.
+
+As the base repo advances, it will likely update and merge its commit-graph
+chain more frequently than the forks. If a fork updates their commit-graph after
+the base repo, then it should "reparent" the commit-graph chain onto the new
+chain in the base repo. When reading each `graph-{hash}` file, we track
+the object directory containing it. During a write of a new commit-graph file,
+we check for any changes in the source object directory and read the
+`commit-graph-chain` file for that source and create a new file based on those
+files. During this "reparent" operation, we necessarily need to collapse all
+levels in the fork, as all of the files are invalid against the new base file.
+
+It is crucial to be careful when cleaning up "unreferenced" `graph-{hash}.graph`
+files in this scenario. It falls to the user to define the proper settings for
+their custom environment:
+
+ 1. When merging levels in the base repo, the unreferenced files may still be
+ referenced by chains from fork repos.
+
+ 2. The expiry time should be set to a length of time such that every fork has
+ time to recompute their commit-graph chain to "reparent" onto the new base
+ file(s).
+
+ 3. If the commit-graph chain is updated in the base, the fork will not have
+ access to the new chain until its chain is updated to reference those files.
+ (This may change in the future [5].)
+
Related Links
-------------
[0] https://bugs.chromium.org/p/git/issues/detail?id=8
[4] https://public-inbox.org/git/20180108154822.54829-1-git@jeffhostetler.com/T/#u
A patch to remove the ahead-behind calculation from 'status'.
+
+[5] https://public-inbox.org/git/f27db281-abad-5043-6d71-cbb083b1c877@gmail.com/
+ A discussion of a "two-dimensional graph position" that can allow reading
+ multiple commit-graph chains at the same time.
while heads are expected to advance as development progresses.
Create a new branch head pointing to one of these versions and check it
-out using linkgit:git-checkout[1]:
+out using linkgit:git-switch[1]:
------------------------------------------------
-$ git checkout -b new v2.6.13
+$ git switch -c new v2.6.13
------------------------------------------------
The working directory then reflects the contents that the project had
this command will fail with a warning.
`git branch -D <branch>`::
delete the branch `<branch>` irrespective of its merged status.
-`git checkout <branch>`::
+`git switch <branch>`::
make the current branch `<branch>`, updating the working
directory to reflect the version referenced by `<branch>`.
-`git checkout -b <new> <start-point>`::
+`git switch -c <new> <start-point>`::
create a new branch `<new>` referencing `<start-point>`, and
check it out.
Examining an old version without creating a new branch
------------------------------------------------------
-The `git checkout` command normally expects a branch head, but will also
-accept an arbitrary commit; for example, you can check out the commit
-referenced by a tag:
+The `git switch` command normally expects a branch head, but will also
+accept an arbitrary commit when invoked with --detach; for example,
+you can check out the commit referenced by a tag:
------------------------------------------------
-$ git checkout v2.6.17
+$ git switch --detach v2.6.17
Note: checking out 'v2.6.17'.
You are in 'detached HEAD' state. You can look around, make experimental
changes and commit them, and you can discard any commits you make in this
-state without impacting any branches by performing another checkout.
+state without impacting any branches by performing another switch.
If you want to create a new branch to retain commits you create, you may
-do so (now or later) by using -b with the checkout command again. Example:
+do so (now or later) by using -c with the switch command again. Example:
- git checkout -b new_branch_name
+ git switch -c new_branch_name
HEAD is now at 427abfa Linux v2.6.17
------------------------------------------------
on a branch of your own, just as you would for a tag:
------------------------------------------------
-$ git checkout -b my-todo-copy origin/todo
+$ git switch -c my-todo-copy origin/todo
------------------------------------------------
You can also check out `origin/todo` directly to examine it or
away, you can always return to the pre-merge state with
-------------------------------------------------
-$ git reset --hard HEAD
+$ git merge --abort
-------------------------------------------------
Or, if you've already committed the merge that you want to throw away,
state with
-------------------------------------------------
-$ git reset --hard HEAD
+$ git restore --staged --worktree :/
-------------------------------------------------
If you make a commit that you later wish you hadn't, there are two
In the process of undoing a previous bad change, you may find it
useful to check out an older version of a particular file using
-linkgit:git-checkout[1]. We've used `git checkout` before to switch
-branches, but it has quite different behavior if it is given a path
-name: the command
+linkgit:git-restore[1]. The command
-------------------------------------------------
-$ git checkout HEAD^ path/to/file
+$ git restore --source=HEAD^ path/to/file
-------------------------------------------------
replaces path/to/file by the contents it had in the commit HEAD^, and
These can be easily kept up to date using linkgit:git-pull[1].
-------------------------------------------------
-$ git checkout test && git pull
-$ git checkout release && git pull
+$ git switch test && git pull
+$ git switch release && git pull
-------------------------------------------------
Important note! If you have any local changes in these branches, then
2) help future bug hunters that use `git bisect` to find problems
-------------------------------------------------
-$ git checkout -b speed-up-spinlocks v2.6.35
+$ git switch -c speed-up-spinlocks v2.6.35
-------------------------------------------------
Now you apply the patch(es), run some tests, and commit the change(s). If
"test" branch in preparation to make it public:
-------------------------------------------------
-$ git checkout test && git merge speed-up-spinlocks
+$ git switch test && git merge speed-up-spinlocks
-------------------------------------------------
It is unlikely that you would have any conflicts here ... but you might if you
means that the patches can be moved into the `release` tree in any order.
-------------------------------------------------
-$ git checkout release && git merge speed-up-spinlocks
+$ git switch release && git merge speed-up-spinlocks
-------------------------------------------------
After a while, you will have a number of branches, and despite the
`origin`, and create some commits on top of it:
-------------------------------------------------
-$ git checkout -b mywork origin
+$ git switch -c mywork origin
$ vi file.txt
$ git commit
$ vi otherfile.txt
linkgit:git-rebase[1]:
-------------------------------------------------
-$ git checkout mywork
+$ git switch mywork
$ git rebase origin
-------------------------------------------------
new commit:
-------------------------------------------------
-$ git checkout master
+$ git switch master
-------------------------------------------------
or
-------------------------------------------------
-$ git checkout -b fix-up
+$ git switch -c fix-up
-------------------------------------------------
then
The Workflow
------------
-High-level operations such as linkgit:git-commit[1],
-linkgit:git-checkout[1] and linkgit:git-reset[1] work by moving data
+High-level operations such as linkgit:git-commit[1] and
+linkgit:git-restore[1] work by moving data
between the working tree, the index, and the object database. Git
provides low-level operations which perform each of these steps
individually.
A good place to start is with the contents of the initial commit, with:
----------------------------------------------------
-$ git checkout e83c5163
+$ git switch --detach e83c5163
----------------------------------------------------
The initial revision lays the foundation for almost everything Git has
-----------------
-----------------------------------------------
-$ git branch # list all local branches in this repo
-$ git checkout test # switch working directory to branch "test"
-$ git branch new # create branch "new" starting at current HEAD
-$ git branch -d new # delete branch "new"
+$ git branch # list all local branches in this repo
+$ git switch test # switch working directory to branch "test"
+$ git branch new # create branch "new" starting at current HEAD
+$ git branch -d new # delete branch "new"
-----------------------------------------------
Instead of basing a new branch on current HEAD (the default), use:
Create and switch to a new branch at the same time:
-----------------------------------------------
-$ git checkout -b new v2.6.15
+$ git switch -c new v2.6.15
-----------------------------------------------
Update and examine branches from the repository you cloned from:
origin/master
origin/next
...
-$ git checkout -b masterwork origin/master
+$ git switch -c masterwork origin/master
-----------------------------------------------
Fetch a branch from a different repository, and give it a new
#!/bin/sh
GVF=GIT-VERSION-FILE
-DEF_VER=v2.22.0
+DEF_VER=v2.23.0-rc1
LF='
'
#
# Define NO_DEFLATE_BOUND if your zlib does not have deflateBound.
#
-# Define NO_R_TO_GCC_LINKER if your gcc does not like "-R/path/lib"
-# that tells runtime paths to dynamic libraries;
-# "-Wl,-rpath=/path/lib" is used instead.
-#
# Define NO_NORETURN if using buggy versions of gcc 4.6+ and profile feedback,
# as the compiler can crash (http://gcc.gnu.org/bugzilla/show_bug.cgi?id=49299)
#
SCRIPT_LIB += git-mergetool--lib
SCRIPT_LIB += git-parse-remote
-SCRIPT_LIB += git-rebase--am
-SCRIPT_LIB += git-rebase--common
SCRIPT_LIB += git-rebase--preserve-merges
SCRIPT_LIB += git-sh-setup
SCRIPT_LIB += git-sh-i18n
TEST_BUILTINS_OBJS += test-ctype.o
TEST_BUILTINS_OBJS += test-date.o
TEST_BUILTINS_OBJS += test-delta.o
+TEST_BUILTINS_OBJS += test-dir-iterator.o
TEST_BUILTINS_OBJS += test-drop-caches.o
TEST_BUILTINS_OBJS += test-dump-cache-tree.o
TEST_BUILTINS_OBJS += test-dump-fsmonitor.o
TEST_BUILTINS_OBJS += test-match-trees.o
TEST_BUILTINS_OBJS += test-mergesort.o
TEST_BUILTINS_OBJS += test-mktemp.o
+TEST_BUILTINS_OBJS += test-oidmap.o
TEST_BUILTINS_OBJS += test-online-cpus.o
TEST_BUILTINS_OBJS += test-parse-options.o
TEST_BUILTINS_OBJS += test-path-utils.o
BUILT_INS += git-fsck-objects$X
BUILT_INS += git-init$X
BUILT_INS += git-merge-subtree$X
+BUILT_INS += git-restore$X
BUILT_INS += git-show$X
BUILT_INS += git-stage$X
BUILT_INS += git-status$X
+BUILT_INS += git-switch$X
BUILT_INS += git-whatchanged$X
# what 'all' will build and 'install' will install in gitexecdir,
BUILTIN_OBJS += builtin/diff-tree.o
BUILTIN_OBJS += builtin/diff.o
BUILTIN_OBJS += builtin/difftool.o
+BUILTIN_OBJS += builtin/env--helper.o
BUILTIN_OBJS += builtin/fast-export.o
BUILTIN_OBJS += builtin/fetch-pack.o
BUILTIN_OBJS += builtin/fetch.o
# which'll override these defaults.
CFLAGS = -g -O2 -Wall
LDFLAGS =
+CC_LD_DYNPATH = -Wl,-rpath,
BASIC_CFLAGS = -I.
BASIC_LDFLAGS =
ifdef SANE_TOOL_PATH
SANE_TOOL_PATH_SQ = $(subst ','\'',$(SANE_TOOL_PATH))
-BROKEN_PATH_FIX = 's|^\# @@BROKEN_PATH_FIX@@$$|git_broken_path_fix $(SANE_TOOL_PATH_SQ)|'
+BROKEN_PATH_FIX = 's|^\# @@BROKEN_PATH_FIX@@$$|git_broken_path_fix "$(SANE_TOOL_PATH_SQ)"|'
PATH := $(SANE_TOOL_PATH):${PATH}
else
BROKEN_PATH_FIX = '/^\# @@BROKEN_PATH_FIX@@$$/d'
PTHREAD_LIBS =
endif
-ifndef CC_LD_DYNPATH
- ifdef NO_R_TO_GCC_LINKER
- # Some gcc does not accept and pass -R to the linker to specify
- # the runtime dynamic library path.
- CC_LD_DYNPATH = -Wl,-rpath,
- else
- CC_LD_DYNPATH = -R
- endif
-endif
-
ifdef NO_LIBGEN_H
COMPAT_CFLAGS += -DNO_LIBGEN_H
COMPAT_OBJS += compat/basename.o
@mkdir -p bin-wrappers
$(QUIET_GEN)sed -e '1s|#!.*/sh|#!$(SHELL_PATH_SQ)|' \
-e 's|@@BUILD_DIR@@|$(shell pwd)|' \
- -e 's|@@PROG@@|$(patsubst test-%,t/helper/test-%,$(@F))|' < $< > $@ && \
+ -e 's|@@PROG@@|$(patsubst test-%,t/helper/test-%$(X),$(@F))$(patsubst git%,$(X),$(filter $(@F),$(BINDIR_PROGRAMS_NEED_X)))|' < $< > $@ && \
chmod +x $@
# GNU make supports exporting all variables by "export" without parameters.
$(INSTALL) $(ALL_PROGRAMS) '$(DESTDIR_SQ)$(gitexec_instdir_SQ)'
$(INSTALL) -m 644 $(SCRIPT_LIB) '$(DESTDIR_SQ)$(gitexec_instdir_SQ)'
$(INSTALL) $(install_bindir_programs) '$(DESTDIR_SQ)$(bindir_SQ)'
+ifdef MSVC
+ # We DO NOT install the individual foo.o.pdb files because they
+ # have already been rolled up into the exe's pdb file.
+ # We DO NOT have pdb files for the builtin commands (like git-status.exe)
+ # because it is just a copy/hardlink of git.exe, rather than a unique binary.
+ $(INSTALL) git.pdb '$(DESTDIR_SQ)$(bindir_SQ)'
+ $(INSTALL) git-shell.pdb '$(DESTDIR_SQ)$(bindir_SQ)'
+ $(INSTALL) git-upload-pack.pdb '$(DESTDIR_SQ)$(bindir_SQ)'
+ $(INSTALL) git-credential-store.pdb '$(DESTDIR_SQ)$(gitexec_instdir_SQ)'
+ $(INSTALL) git-daemon.pdb '$(DESTDIR_SQ)$(gitexec_instdir_SQ)'
+ $(INSTALL) git-fast-import.pdb '$(DESTDIR_SQ)$(gitexec_instdir_SQ)'
+ $(INSTALL) git-http-backend.pdb '$(DESTDIR_SQ)$(gitexec_instdir_SQ)'
+ $(INSTALL) git-http-fetch.pdb '$(DESTDIR_SQ)$(gitexec_instdir_SQ)'
+ $(INSTALL) git-http-push.pdb '$(DESTDIR_SQ)$(gitexec_instdir_SQ)'
+ $(INSTALL) git-imap-send.pdb '$(DESTDIR_SQ)$(gitexec_instdir_SQ)'
+ $(INSTALL) git-remote-http.pdb '$(DESTDIR_SQ)$(gitexec_instdir_SQ)'
+ $(INSTALL) git-remote-testsvn.pdb '$(DESTDIR_SQ)$(gitexec_instdir_SQ)'
+ $(INSTALL) git-sh-i18n--envsubst.pdb '$(DESTDIR_SQ)$(gitexec_instdir_SQ)'
+ $(INSTALL) git-show-index.pdb '$(DESTDIR_SQ)$(gitexec_instdir_SQ)'
+ifndef DEBUG
+ $(INSTALL) $(vcpkg_rel_bin)/*.dll '$(DESTDIR_SQ)$(bindir_SQ)'
+ $(INSTALL) $(vcpkg_rel_bin)/*.pdb '$(DESTDIR_SQ)$(bindir_SQ)'
+else
+ $(INSTALL) $(vcpkg_dbg_bin)/*.dll '$(DESTDIR_SQ)$(bindir_SQ)'
+ $(INSTALL) $(vcpkg_dbg_bin)/*.pdb '$(DESTDIR_SQ)$(bindir_SQ)'
+endif
+endif
$(MAKE) -C templates DESTDIR='$(DESTDIR_SQ)' install
$(INSTALL) -d -m 755 '$(DESTDIR_SQ)$(mergetools_instdir_SQ)'
$(INSTALL) -m 644 mergetools/* '$(DESTDIR_SQ)$(mergetools_instdir_SQ)'
$(RM) GIT-VERSION-FILE GIT-CFLAGS GIT-LDFLAGS GIT-BUILD-OPTIONS
$(RM) GIT-USER-AGENT GIT-PREFIX
$(RM) GIT-SCRIPT-DEFINES GIT-PERL-DEFINES GIT-PERL-HEADER GIT-PYTHON-VARS
+ifdef MSVC
+ $(RM) $(patsubst %.o,%.o.pdb,$(OBJECTS))
+ $(RM) $(patsubst %.exe,%.pdb,$(OTHER_PROGRAMS))
+ $(RM) $(patsubst %.exe,%.iobj,$(OTHER_PROGRAMS))
+ $(RM) $(patsubst %.exe,%.ipdb,$(OTHER_PROGRAMS))
+ $(RM) $(patsubst %.exe,%.pdb,$(PROGRAMS))
+ $(RM) $(patsubst %.exe,%.iobj,$(PROGRAMS))
+ $(RM) $(patsubst %.exe,%.ipdb,$(PROGRAMS))
+ $(RM) $(patsubst %.exe,%.pdb,$(TEST_PROGRAMS))
+ $(RM) $(patsubst %.exe,%.iobj,$(TEST_PROGRAMS))
+ $(RM) $(patsubst %.exe,%.ipdb,$(TEST_PROGRAMS))
+ $(RM) compat/vcbuild/MSVC-DEFS-GEN
+endif
.PHONY: all install profile-clean cocciclean clean strip
.PHONY: shell_compatibility_test please_set_SHELL_PATH_to_a_more_modern_shell
-Documentation/RelNotes/2.22.1.txt
\ No newline at end of file
+Documentation/RelNotes/2.23.0.txt
\ No newline at end of file
#include "color.h"
#include "help.h"
+int advice_fetch_show_forced_updates = 1;
int advice_push_update_rejected = 1;
int advice_push_non_ff_current = 1;
int advice_push_non_ff_matching = 1;
int advice_push_unqualified_ref_name = 1;
int advice_status_hints = 1;
int advice_status_u_option = 1;
+int advice_status_ahead_behind_warning = 1;
int advice_commit_before_merge = 1;
int advice_reset_quiet_warning = 1;
int advice_resolve_conflict = 1;
+int advice_sequencer_in_use = 1;
int advice_implicit_identity = 1;
int advice_detached_head = 1;
int advice_set_upstream_failure = 1;
const char *name;
int *preference;
} advice_config[] = {
+ { "fetchShowForcedUpdates", &advice_fetch_show_forced_updates },
{ "pushUpdateRejected", &advice_push_update_rejected },
{ "pushNonFFCurrent", &advice_push_non_ff_current },
{ "pushNonFFMatching", &advice_push_non_ff_matching },
{ "pushUnqualifiedRefName", &advice_push_unqualified_ref_name },
{ "statusHints", &advice_status_hints },
{ "statusUoption", &advice_status_u_option },
+ { "statusAheadBehindWarning", &advice_status_ahead_behind_warning },
{ "commitBeforeMerge", &advice_commit_before_merge },
{ "resetQuiet", &advice_reset_quiet_warning },
{ "resolveConflict", &advice_resolve_conflict },
+ { "sequencerInUse", &advice_sequencer_in_use },
{ "implicitIdentity", &advice_implicit_identity },
{ "detachedHead", &advice_detached_head },
{ "setupStreamFailure", &advice_set_upstream_failure },
void detach_advice(const char *new_name)
{
const char *fmt =
- _("Note: checking out '%s'.\n\n"
+ _("Note: switching to '%s'.\n"
+ "\n"
"You are in 'detached HEAD' state. You can look around, make experimental\n"
"changes and commit them, and you can discard any commits you make in this\n"
- "state without impacting any branches by performing another checkout.\n\n"
+ "state without impacting any branches by switching back to a branch.\n"
+ "\n"
"If you want to create a new branch to retain commits you create, you may\n"
- "do so (now or later) by using -b with the checkout command again. Example:\n\n"
- " git checkout -b <new-branch-name>\n\n");
+ "do so (now or later) by using -c with the switch command. Example:\n"
+ "\n"
+ " git switch -c <new-branch-name>\n"
+ "\n"
+ "Or undo this operation with:\n"
+ "\n"
+ " git switch -\n"
+ "\n"
+ "Turn off this advice by setting config variable advice.detachedHead to false\n\n");
fprintf(stderr, fmt, new_name);
}
#include "git-compat-util.h"
+extern int advice_fetch_show_forced_updates;
extern int advice_push_update_rejected;
extern int advice_push_non_ff_current;
extern int advice_push_non_ff_matching;
extern int advice_push_unqualified_ref_name;
extern int advice_status_hints;
extern int advice_status_u_option;
+extern int advice_status_ahead_behind_warning;
extern int advice_commit_before_merge;
extern int advice_reset_quiet_warning;
extern int advice_resolve_conflict;
+extern int advice_sequencer_in_use;
extern int advice_implicit_identity;
extern int advice_detached_head;
extern int advice_set_upstream_failure;
#include "rerere.h"
#include "apply.h"
+struct gitdiff_data {
+ struct strbuf *root;
+ int linenr;
+ int p_value;
+};
+
static void git_apply_config(void)
{
git_config_get_string_const("apply.whitespace", &apply_default_whitespace);
#define BINARY_DELTA_DEFLATED 1
#define BINARY_LITERAL_DEFLATED 2
-/*
- * This represents a "patch" to a file, both metainfo changes
- * such as creation/deletion, filemode and content changes represented
- * as a series of fragments.
- */
-struct patch {
- char *new_name, *old_name, *def_name;
- unsigned int old_mode, new_mode;
- int is_new, is_delete; /* -1 = unknown, 0 = false, 1 = true */
- int rejected;
- unsigned ws_rule;
- int lines_added, lines_deleted;
- int score;
- int extension_linenr; /* first line specifying delete/new/rename/copy */
- unsigned int is_toplevel_relative:1;
- unsigned int inaccurate_eof:1;
- unsigned int is_binary:1;
- unsigned int is_copy:1;
- unsigned int is_rename:1;
- unsigned int recount:1;
- unsigned int conflicted_threeway:1;
- unsigned int direct_to_threeway:1;
- unsigned int crlf_in_old:1;
- struct fragment *fragments;
- char *result;
- size_t resultsize;
- char old_oid_prefix[GIT_MAX_HEXSZ + 1];
- char new_oid_prefix[GIT_MAX_HEXSZ + 1];
- struct patch *next;
-
- /* three-way fallback result */
- struct object_id threeway_stage[3];
-};
-
static void free_fragment_list(struct fragment *list)
{
while (list) {
return name;
}
-static char *find_name_gnu(struct apply_state *state,
+static char *find_name_gnu(struct strbuf *root,
const char *line,
int p_value)
{
/*
* Proposed "new-style" GNU patch/diff format; see
- * http://marc.info/?l=git&m=112927316408690&w=2
+ * https://public-inbox.org/git/7vll0wvb2a.fsf@assigned-by-dhcp.cox.net/
*/
if (unquote_c_style(&name, line, NULL)) {
strbuf_release(&name);
}
strbuf_remove(&name, 0, cp - name.buf);
- if (state->root.len)
- strbuf_insert(&name, 0, state->root.buf, state->root.len);
+ if (root->len)
+ strbuf_insert(&name, 0, root->buf, root->len);
return squash_slash(strbuf_detach(&name, NULL));
}
return line + len - end;
}
-static char *find_name_common(struct apply_state *state,
+static char *find_name_common(struct strbuf *root,
const char *line,
const char *def,
int p_value,
return squash_slash(xstrdup(def));
}
- if (state->root.len) {
- char *ret = xstrfmt("%s%.*s", state->root.buf, len, start);
+ if (root->len) {
+ char *ret = xstrfmt("%s%.*s", root->buf, len, start);
return squash_slash(ret);
}
return squash_slash(xmemdupz(start, len));
}
-static char *find_name(struct apply_state *state,
+static char *find_name(struct strbuf *root,
const char *line,
char *def,
int p_value,
int terminate)
{
if (*line == '"') {
- char *name = find_name_gnu(state, line, p_value);
+ char *name = find_name_gnu(root, line, p_value);
if (name)
return name;
}
- return find_name_common(state, line, def, p_value, NULL, terminate);
+ return find_name_common(root, line, def, p_value, NULL, terminate);
}
-static char *find_name_traditional(struct apply_state *state,
+static char *find_name_traditional(struct strbuf *root,
const char *line,
char *def,
int p_value)
size_t date_len;
if (*line == '"') {
- char *name = find_name_gnu(state, line, p_value);
+ char *name = find_name_gnu(root, line, p_value);
if (name)
return name;
}
len = strchrnul(line, '\n') - line;
date_len = diff_timestamp_len(line, len);
if (!date_len)
- return find_name_common(state, line, def, p_value, NULL, TERM_TAB);
+ return find_name_common(root, line, def, p_value, NULL, TERM_TAB);
len -= date_len;
- return find_name_common(state, line, def, p_value, line + len, 0);
+ return find_name_common(root, line, def, p_value, line + len, 0);
}
/*
if (is_dev_null(nameline))
return -1;
- name = find_name_traditional(state, nameline, NULL, 0);
+ name = find_name_traditional(&state->root, nameline, NULL, 0);
if (!name)
return -1;
cp = strchr(name, '/');
if (is_dev_null(first)) {
patch->is_new = 1;
patch->is_delete = 0;
- name = find_name_traditional(state, second, NULL, state->p_value);
+ name = find_name_traditional(&state->root, second, NULL, state->p_value);
patch->new_name = name;
} else if (is_dev_null(second)) {
patch->is_new = 0;
patch->is_delete = 1;
- name = find_name_traditional(state, first, NULL, state->p_value);
+ name = find_name_traditional(&state->root, first, NULL, state->p_value);
patch->old_name = name;
} else {
char *first_name;
- first_name = find_name_traditional(state, first, NULL, state->p_value);
- name = find_name_traditional(state, second, first_name, state->p_value);
+ first_name = find_name_traditional(&state->root, first, NULL, state->p_value);
+ name = find_name_traditional(&state->root, second, first_name, state->p_value);
free(first_name);
if (has_epoch_timestamp(first)) {
patch->is_new = 1;
return 0;
}
-static int gitdiff_hdrend(struct apply_state *state,
+static int gitdiff_hdrend(struct gitdiff_data *state,
const char *line,
struct patch *patch)
{
#define DIFF_OLD_NAME 0
#define DIFF_NEW_NAME 1
-static int gitdiff_verify_name(struct apply_state *state,
+static int gitdiff_verify_name(struct gitdiff_data *state,
const char *line,
int isnull,
char **name,
int side)
{
if (!*name && !isnull) {
- *name = find_name(state, line, NULL, state->p_value, TERM_TAB);
+ *name = find_name(state->root, line, NULL, state->p_value, TERM_TAB);
return 0;
}
if (isnull)
return error(_("git apply: bad git-diff - expected /dev/null, got %s on line %d"),
*name, state->linenr);
- another = find_name(state, line, NULL, state->p_value, TERM_TAB);
+ another = find_name(state->root, line, NULL, state->p_value, TERM_TAB);
if (!another || strcmp(another, *name)) {
free(another);
return error((side == DIFF_NEW_NAME) ?
return 0;
}
-static int gitdiff_oldname(struct apply_state *state,
+static int gitdiff_oldname(struct gitdiff_data *state,
const char *line,
struct patch *patch)
{
DIFF_OLD_NAME);
}
-static int gitdiff_newname(struct apply_state *state,
+static int gitdiff_newname(struct gitdiff_data *state,
const char *line,
struct patch *patch)
{
return 0;
}
-static int gitdiff_oldmode(struct apply_state *state,
+static int gitdiff_oldmode(struct gitdiff_data *state,
const char *line,
struct patch *patch)
{
return parse_mode_line(line, state->linenr, &patch->old_mode);
}
-static int gitdiff_newmode(struct apply_state *state,
+static int gitdiff_newmode(struct gitdiff_data *state,
const char *line,
struct patch *patch)
{
return parse_mode_line(line, state->linenr, &patch->new_mode);
}
-static int gitdiff_delete(struct apply_state *state,
+static int gitdiff_delete(struct gitdiff_data *state,
const char *line,
struct patch *patch)
{
return gitdiff_oldmode(state, line, patch);
}
-static int gitdiff_newfile(struct apply_state *state,
+static int gitdiff_newfile(struct gitdiff_data *state,
const char *line,
struct patch *patch)
{
return gitdiff_newmode(state, line, patch);
}
-static int gitdiff_copysrc(struct apply_state *state,
+static int gitdiff_copysrc(struct gitdiff_data *state,
const char *line,
struct patch *patch)
{
patch->is_copy = 1;
free(patch->old_name);
- patch->old_name = find_name(state, line, NULL, state->p_value ? state->p_value - 1 : 0, 0);
+ patch->old_name = find_name(state->root, line, NULL, state->p_value ? state->p_value - 1 : 0, 0);
return 0;
}
-static int gitdiff_copydst(struct apply_state *state,
+static int gitdiff_copydst(struct gitdiff_data *state,
const char *line,
struct patch *patch)
{
patch->is_copy = 1;
free(patch->new_name);
- patch->new_name = find_name(state, line, NULL, state->p_value ? state->p_value - 1 : 0, 0);
+ patch->new_name = find_name(state->root, line, NULL, state->p_value ? state->p_value - 1 : 0, 0);
return 0;
}
-static int gitdiff_renamesrc(struct apply_state *state,
+static int gitdiff_renamesrc(struct gitdiff_data *state,
const char *line,
struct patch *patch)
{
patch->is_rename = 1;
free(patch->old_name);
- patch->old_name = find_name(state, line, NULL, state->p_value ? state->p_value - 1 : 0, 0);
+ patch->old_name = find_name(state->root, line, NULL, state->p_value ? state->p_value - 1 : 0, 0);
return 0;
}
-static int gitdiff_renamedst(struct apply_state *state,
+static int gitdiff_renamedst(struct gitdiff_data *state,
const char *line,
struct patch *patch)
{
patch->is_rename = 1;
free(patch->new_name);
- patch->new_name = find_name(state, line, NULL, state->p_value ? state->p_value - 1 : 0, 0);
+ patch->new_name = find_name(state->root, line, NULL, state->p_value ? state->p_value - 1 : 0, 0);
return 0;
}
-static int gitdiff_similarity(struct apply_state *state,
+static int gitdiff_similarity(struct gitdiff_data *state,
const char *line,
struct patch *patch)
{
return 0;
}
-static int gitdiff_dissimilarity(struct apply_state *state,
+static int gitdiff_dissimilarity(struct gitdiff_data *state,
const char *line,
struct patch *patch)
{
return 0;
}
-static int gitdiff_index(struct apply_state *state,
+static int gitdiff_index(struct gitdiff_data *state,
const char *line,
struct patch *patch)
{
* This is normal for a diff that doesn't change anything: we'll fall through
* into the next diff. Tell the parser to break out.
*/
-static int gitdiff_unrecognized(struct apply_state *state,
+static int gitdiff_unrecognized(struct gitdiff_data *state,
const char *line,
struct patch *patch)
{
* Skip p_value leading components from "line"; as we do not accept
* absolute paths, return NULL in that case.
*/
-static const char *skip_tree_prefix(struct apply_state *state,
+static const char *skip_tree_prefix(int p_value,
const char *line,
int llen)
{
int nslash;
int i;
- if (!state->p_value)
+ if (!p_value)
return (llen && line[0] == '/') ? NULL : line;
- nslash = state->p_value;
+ nslash = p_value;
for (i = 0; i < llen; i++) {
int ch = line[i];
if (ch == '/' && --nslash <= 0)
* creation or deletion of an empty file. In any of these cases,
* both sides are the same name under a/ and b/ respectively.
*/
-static char *git_header_name(struct apply_state *state,
+static char *git_header_name(int p_value,
const char *line,
int llen)
{
goto free_and_fail1;
/* strip the a/b prefix including trailing slash */
- cp = skip_tree_prefix(state, first.buf, first.len);
+ cp = skip_tree_prefix(p_value, first.buf, first.len);
if (!cp)
goto free_and_fail1;
strbuf_remove(&first, 0, cp - first.buf);
if (*second == '"') {
if (unquote_c_style(&sp, second, NULL))
goto free_and_fail1;
- cp = skip_tree_prefix(state, sp.buf, sp.len);
+ cp = skip_tree_prefix(p_value, sp.buf, sp.len);
if (!cp)
goto free_and_fail1;
/* They must match, otherwise ignore */
}
/* unquoted second */
- cp = skip_tree_prefix(state, second, line + llen - second);
+ cp = skip_tree_prefix(p_value, second, line + llen - second);
if (!cp)
goto free_and_fail1;
if (line + llen - cp != first.len ||
}
/* unquoted first name */
- name = skip_tree_prefix(state, line, llen);
+ name = skip_tree_prefix(p_value, line, llen);
if (!name)
return NULL;
if (unquote_c_style(&sp, second, NULL))
goto free_and_fail2;
- np = skip_tree_prefix(state, sp.buf, sp.len);
+ np = skip_tree_prefix(p_value, sp.buf, sp.len);
if (!np)
goto free_and_fail2;
*/
if (!name[len + 1])
return NULL; /* no postimage name */
- second = skip_tree_prefix(state, name + len + 1,
+ second = skip_tree_prefix(p_value, name + len + 1,
line_len - (len + 1));
if (!second)
return NULL;
}
}
-static int check_header_line(struct apply_state *state, struct patch *patch)
+static int check_header_line(int linenr, struct patch *patch)
{
int extensions = (patch->is_delete == 1) + (patch->is_new == 1) +
(patch->is_rename == 1) + (patch->is_copy == 1);
if (extensions > 1)
return error(_("inconsistent header lines %d and %d"),
- patch->extension_linenr, state->linenr);
+ patch->extension_linenr, linenr);
if (extensions && !patch->extension_linenr)
- patch->extension_linenr = state->linenr;
+ patch->extension_linenr = linenr;
return 0;
}
-/* Verify that we recognize the lines following a git header */
-static int parse_git_header(struct apply_state *state,
- const char *line,
- int len,
- unsigned int size,
- struct patch *patch)
+int parse_git_diff_header(struct strbuf *root,
+ int *linenr,
+ int p_value,
+ const char *line,
+ int len,
+ unsigned int size,
+ struct patch *patch)
{
unsigned long offset;
+ struct gitdiff_data parse_hdr_state;
/* A git diff has explicit new/delete information, so we don't guess */
patch->is_new = 0;
* or removing or adding empty files), so we get
* the default name from the header.
*/
- patch->def_name = git_header_name(state, line, len);
- if (patch->def_name && state->root.len) {
- char *s = xstrfmt("%s%s", state->root.buf, patch->def_name);
+ patch->def_name = git_header_name(p_value, line, len);
+ if (patch->def_name && root->len) {
+ char *s = xstrfmt("%s%s", root->buf, patch->def_name);
free(patch->def_name);
patch->def_name = s;
}
line += len;
size -= len;
- state->linenr++;
- for (offset = len ; size > 0 ; offset += len, size -= len, line += len, state->linenr++) {
+ (*linenr)++;
+ parse_hdr_state.root = root;
+ parse_hdr_state.linenr = *linenr;
+ parse_hdr_state.p_value = p_value;
+
+ for (offset = len ; size > 0 ; offset += len, size -= len, line += len, (*linenr)++) {
static const struct opentry {
const char *str;
- int (*fn)(struct apply_state *, const char *, struct patch *);
+ int (*fn)(struct gitdiff_data *, const char *, struct patch *);
} optable[] = {
{ "@@ -", gitdiff_hdrend },
{ "--- ", gitdiff_oldname },
int res;
if (len < oplen || memcmp(p->str, line, oplen))
continue;
- res = p->fn(state, line + oplen, patch);
+ res = p->fn(&parse_hdr_state, line + oplen, patch);
if (res < 0)
return -1;
- if (check_header_line(state, patch))
+ if (check_header_line(*linenr, patch))
return -1;
if (res > 0)
return offset;
* or mode change, so we handle that specially
*/
if (!memcmp("diff --git ", line, 11)) {
- int git_hdr_len = parse_git_header(state, line, len, size, patch);
+ int git_hdr_len = parse_git_diff_header(&state->root, &state->linenr,
+ state->p_value, line, len,
+ size, patch);
if (git_hdr_len < 0)
return -128;
if (git_hdr_len <= len)
int applied_after_fixing_ws;
};
+/*
+ * This represents a "patch" to a file, both metainfo changes
+ * such as creation/deletion, filemode and content changes represented
+ * as a series of fragments.
+ */
+struct patch {
+ char *new_name, *old_name, *def_name;
+ unsigned int old_mode, new_mode;
+ int is_new, is_delete; /* -1 = unknown, 0 = false, 1 = true */
+ int rejected;
+ unsigned ws_rule;
+ int lines_added, lines_deleted;
+ int score;
+ int extension_linenr; /* first line specifying delete/new/rename/copy */
+ unsigned int is_toplevel_relative:1;
+ unsigned int inaccurate_eof:1;
+ unsigned int is_binary:1;
+ unsigned int is_copy:1;
+ unsigned int is_rename:1;
+ unsigned int recount:1;
+ unsigned int conflicted_threeway:1;
+ unsigned int direct_to_threeway:1;
+ unsigned int crlf_in_old:1;
+ struct fragment *fragments;
+ char *result;
+ size_t resultsize;
+ char old_oid_prefix[GIT_MAX_HEXSZ + 1];
+ char new_oid_prefix[GIT_MAX_HEXSZ + 1];
+ struct patch *next;
+
+ /* three-way fallback result */
+ struct object_id threeway_stage[3];
+};
+
int apply_parse_options(int argc, const char **argv,
struct apply_state *state,
int *force_apply, int *options,
void clear_apply_state(struct apply_state *state);
int check_apply_state(struct apply_state *state, int force_apply);
+/*
+ * Parse a git diff header, starting at line. Fills the relevant
+ * metadata information in 'struct patch'.
+ *
+ * Returns -1 on failure, the length of the parsed header otherwise.
+ */
+int parse_git_diff_header(struct strbuf *root,
+ int *linenr,
+ int p_value,
+ const char *line,
+ int len,
+ unsigned int size,
+ struct patch *patch);
+
/*
* Some aspects of the apply behavior are controlled by the following
* bits in the "options" parameter passed to apply_all_patches().
unsigned short mode;
int err;
- err = get_tree_entry(&tree->object.oid, prefix, &tree_oid,
+ err = get_tree_entry(ar_args->repo,
+ &tree->object.oid,
+ prefix, &tree_oid,
&mode);
if (err || !S_ISDIR(mode))
die(_("current working directory is untracked"));
struct object_id blob_oid;
unsigned short mode;
- if (!get_tree_entry(commit_oid, path, &blob_oid, &mode) &&
+ if (!get_tree_entry(r, commit_oid, path, &blob_oid, &mode) &&
oid_object_info(r, &blob_oid, NULL) == OBJ_BLOB)
return;
}
return xdi_diff(file_a, file_b, &xpp, &xecfg, &ecb);
}
+static const char *get_next_line(const char *start, const char *end)
+{
+ const char *nl = memchr(start, '\n', end - start);
+
+ return nl ? nl + 1 : end;
+}
+
+static int find_line_starts(int **line_starts, const char *buf,
+ unsigned long len)
+{
+ const char *end = buf + len;
+ const char *p;
+ int *lineno;
+ int num = 0;
+
+ for (p = buf; p < end; p = get_next_line(p, end))
+ num++;
+
+ ALLOC_ARRAY(*line_starts, num + 1);
+ lineno = *line_starts;
+
+ for (p = buf; p < end; p = get_next_line(p, end))
+ *lineno++ = p - buf;
+
+ *lineno = len;
+
+ return num;
+}
+
+struct fingerprint_entry;
+
+/* A fingerprint is intended to loosely represent a string, such that two
+ * fingerprints can be quickly compared to give an indication of the similarity
+ * of the strings that they represent.
+ *
+ * A fingerprint is represented as a multiset of the lower-cased byte pairs in
+ * the string that it represents. Whitespace is added at each end of the
+ * string. Whitespace pairs are ignored. Whitespace is converted to '\0'.
+ * For example, the string "Darth Radar" will be converted to the following
+ * fingerprint:
+ * {"\0d", "da", "da", "ar", "ar", "rt", "th", "h\0", "\0r", "ra", "ad", "r\0"}
+ *
+ * The similarity between two fingerprints is the size of the intersection of
+ * their multisets, including repeated elements. See fingerprint_similarity for
+ * examples.
+ *
+ * For ease of implementation, the fingerprint is implemented as a map
+ * of byte pairs to the count of that byte pair in the string, instead of
+ * allowing repeated elements in a set.
+ */
+struct fingerprint {
+ struct hashmap map;
+ /* As we know the maximum number of entries in advance, it's
+ * convenient to store the entries in a single array instead of having
+ * the hashmap manage the memory.
+ */
+ struct fingerprint_entry *entries;
+};
+
+/* A byte pair in a fingerprint. Stores the number of times the byte pair
+ * occurs in the string that the fingerprint represents.
+ */
+struct fingerprint_entry {
+ /* The hashmap entry - the hash represents the byte pair in its
+ * entirety so we don't need to store the byte pair separately.
+ */
+ struct hashmap_entry entry;
+ /* The number of times the byte pair occurs in the string that the
+ * fingerprint represents.
+ */
+ int count;
+};
+
+/* See `struct fingerprint` for an explanation of what a fingerprint is.
+ * \param result the fingerprint of the string is stored here. This must be
+ * freed later using free_fingerprint.
+ * \param line_begin the start of the string
+ * \param line_end the end of the string
+ */
+static void get_fingerprint(struct fingerprint *result,
+ const char *line_begin,
+ const char *line_end)
+{
+ unsigned int hash, c0 = 0, c1;
+ const char *p;
+ int max_map_entry_count = 1 + line_end - line_begin;
+ struct fingerprint_entry *entry = xcalloc(max_map_entry_count,
+ sizeof(struct fingerprint_entry));
+ struct fingerprint_entry *found_entry;
+
+ hashmap_init(&result->map, NULL, NULL, max_map_entry_count);
+ result->entries = entry;
+ for (p = line_begin; p <= line_end; ++p, c0 = c1) {
+ /* Always terminate the string with whitespace.
+ * Normalise whitespace to 0, and normalise letters to
+ * lower case. This won't work for multibyte characters but at
+ * worst will match some unrelated characters.
+ */
+ if ((p == line_end) || isspace(*p))
+ c1 = 0;
+ else
+ c1 = tolower(*p);
+ hash = c0 | (c1 << 8);
+ /* Ignore whitespace pairs */
+ if (hash == 0)
+ continue;
+ hashmap_entry_init(entry, hash);
+
+ found_entry = hashmap_get(&result->map, entry, NULL);
+ if (found_entry) {
+ found_entry->count += 1;
+ } else {
+ entry->count = 1;
+ hashmap_add(&result->map, entry);
+ ++entry;
+ }
+ }
+}
+
+static void free_fingerprint(struct fingerprint *f)
+{
+ hashmap_free(&f->map, 0);
+ free(f->entries);
+}
+
+/* Calculates the similarity between two fingerprints as the size of the
+ * intersection of their multisets, including repeated elements. See
+ * `struct fingerprint` for an explanation of the fingerprint representation.
+ * The similarity between "cat mat" and "father rather" is 2 because "at" is
+ * present twice in both strings while the similarity between "tim" and "mit"
+ * is 0.
+ */
+static int fingerprint_similarity(struct fingerprint *a, struct fingerprint *b)
+{
+ int intersection = 0;
+ struct hashmap_iter iter;
+ const struct fingerprint_entry *entry_a, *entry_b;
+
+ hashmap_iter_init(&b->map, &iter);
+
+ while ((entry_b = hashmap_iter_next(&iter))) {
+ if ((entry_a = hashmap_get(&a->map, entry_b, NULL))) {
+ intersection += entry_a->count < entry_b->count ?
+ entry_a->count : entry_b->count;
+ }
+ }
+ return intersection;
+}
+
+/* Subtracts byte-pair elements in B from A, modifying A in place.
+ */
+static void fingerprint_subtract(struct fingerprint *a, struct fingerprint *b)
+{
+ struct hashmap_iter iter;
+ struct fingerprint_entry *entry_a;
+ const struct fingerprint_entry *entry_b;
+
+ hashmap_iter_init(&b->map, &iter);
+
+ while ((entry_b = hashmap_iter_next(&iter))) {
+ if ((entry_a = hashmap_get(&a->map, entry_b, NULL))) {
+ if (entry_a->count <= entry_b->count)
+ hashmap_remove(&a->map, entry_b, NULL);
+ else
+ entry_a->count -= entry_b->count;
+ }
+ }
+}
+
+/* Calculate fingerprints for a series of lines.
+ * Puts the fingerprints in the fingerprints array, which must have been
+ * preallocated to allow storing line_count elements.
+ */
+static void get_line_fingerprints(struct fingerprint *fingerprints,
+ const char *content, const int *line_starts,
+ long first_line, long line_count)
+{
+ int i;
+ const char *linestart, *lineend;
+
+ line_starts += first_line;
+ for (i = 0; i < line_count; ++i) {
+ linestart = content + line_starts[i];
+ lineend = content + line_starts[i + 1];
+ get_fingerprint(fingerprints + i, linestart, lineend);
+ }
+}
+
+static void free_line_fingerprints(struct fingerprint *fingerprints,
+ int nr_fingerprints)
+{
+ int i;
+
+ for (i = 0; i < nr_fingerprints; i++)
+ free_fingerprint(&fingerprints[i]);
+}
+
+/* This contains the data necessary to linearly map a line number in one half
+ * of a diff chunk to the line in the other half of the diff chunk that is
+ * closest in terms of its position as a fraction of the length of the chunk.
+ */
+struct line_number_mapping {
+ int destination_start, destination_length,
+ source_start, source_length;
+};
+
+/* Given a line number in one range, offset and scale it to map it onto the
+ * other range.
+ * Essentially this mapping is a simple linear equation but the calculation is
+ * more complicated to allow performing it with integer operations.
+ * Another complication is that if a line could map onto many lines in the
+ * destination range then we want to choose the line at the center of those
+ * possibilities.
+ * Example: if the chunk is 2 lines long in A and 10 lines long in B then the
+ * first 5 lines in B will map onto the first line in the A chunk, while the
+ * last 5 lines will all map onto the second line in the A chunk.
+ * Example: if the chunk is 10 lines long in A and 2 lines long in B then line
+ * 0 in B will map onto line 2 in A, and line 1 in B will map onto line 7 in A.
+ */
+static int map_line_number(int line_number,
+ const struct line_number_mapping *mapping)
+{
+ return ((line_number - mapping->source_start) * 2 + 1) *
+ mapping->destination_length /
+ (mapping->source_length * 2) +
+ mapping->destination_start;
+}
+
+/* Get a pointer to the element storing the similarity between a line in A
+ * and a line in B.
+ *
+ * The similarities are stored in a 2-dimensional array. Each "row" in the
+ * array contains the similarities for a line in B. The similarities stored in
+ * a row are the similarities between the line in B and the nearby lines in A.
+ * To keep the length of each row the same, it is padded out with values of -1
+ * where the search range extends beyond the lines in A.
+ * For example, if max_search_distance_a is 2 and the two sides of a diff chunk
+ * look like this:
+ * a | m
+ * b | n
+ * c | o
+ * d | p
+ * e | q
+ * Then the similarity array will contain:
+ * [-1, -1, am, bm, cm,
+ * -1, an, bn, cn, dn,
+ * ao, bo, co, do, eo,
+ * bp, cp, dp, ep, -1,
+ * cq, dq, eq, -1, -1]
+ * Where similarities are denoted either by -1 for invalid, or the
+ * concatenation of the two lines in the diff being compared.
+ *
+ * \param similarities array of similarities between lines in A and B
+ * \param line_a the index of the line in A, in the same frame of reference as
+ * closest_line_a.
+ * \param local_line_b the index of the line in B, relative to the first line
+ * in B that similarities represents.
+ * \param closest_line_a the index of the line in A that is deemed to be
+ * closest to local_line_b. This must be in the same
+ * frame of reference as line_a. This value defines
+ * where similarities is centered for the line in B.
+ * \param max_search_distance_a maximum distance in lines from the closest line
+ * in A for other lines in A for which
+ * similarities may be calculated.
+ */
+static int *get_similarity(int *similarities,
+ int line_a, int local_line_b,
+ int closest_line_a, int max_search_distance_a)
+{
+ assert(abs(line_a - closest_line_a) <=
+ max_search_distance_a);
+ return similarities + line_a - closest_line_a +
+ max_search_distance_a +
+ local_line_b * (max_search_distance_a * 2 + 1);
+}
+
+#define CERTAIN_NOTHING_MATCHES -2
+#define CERTAINTY_NOT_CALCULATED -1
+
+/* Given a line in B, first calculate its similarities with nearby lines in A
+ * if not already calculated, then identify the most similar and second most
+ * similar lines. The "certainty" is calculated based on those two
+ * similarities.
+ *
+ * \param start_a the index of the first line of the chunk in A
+ * \param length_a the length in lines of the chunk in A
+ * \param local_line_b the index of the line in B, relative to the first line
+ * in the chunk.
+ * \param fingerprints_a array of fingerprints for the chunk in A
+ * \param fingerprints_b array of fingerprints for the chunk in B
+ * \param similarities 2-dimensional array of similarities between lines in A
+ * and B. See get_similarity() for more details.
+ * \param certainties array of values indicating how strongly a line in B is
+ * matched with some line in A.
+ * \param second_best_result array of absolute indices in A for the second
+ * closest match of a line in B.
+ * \param result array of absolute indices in A for the closest match of a line
+ * in B.
+ * \param max_search_distance_a maximum distance in lines from the closest line
+ * in A for other lines in A for which
+ * similarities may be calculated.
+ * \param map_line_number_in_b_to_a parameter to map_line_number().
+ */
+static void find_best_line_matches(
+ int start_a,
+ int length_a,
+ int start_b,
+ int local_line_b,
+ struct fingerprint *fingerprints_a,
+ struct fingerprint *fingerprints_b,
+ int *similarities,
+ int *certainties,
+ int *second_best_result,
+ int *result,
+ const int max_search_distance_a,
+ const struct line_number_mapping *map_line_number_in_b_to_a)
+{
+
+ int i, search_start, search_end, closest_local_line_a, *similarity,
+ best_similarity = 0, second_best_similarity = 0,
+ best_similarity_index = 0, second_best_similarity_index = 0;
+
+ /* certainty has already been calculated so no need to redo the work */
+ if (certainties[local_line_b] != CERTAINTY_NOT_CALCULATED)
+ return;
+
+ closest_local_line_a = map_line_number(
+ local_line_b + start_b, map_line_number_in_b_to_a) - start_a;
+
+ search_start = closest_local_line_a - max_search_distance_a;
+ if (search_start < 0)
+ search_start = 0;
+
+ search_end = closest_local_line_a + max_search_distance_a + 1;
+ if (search_end > length_a)
+ search_end = length_a;
+
+ for (i = search_start; i < search_end; ++i) {
+ similarity = get_similarity(similarities,
+ i, local_line_b,
+ closest_local_line_a,
+ max_search_distance_a);
+ if (*similarity == -1) {
+ /* This value will never exceed 10 but assert just in
+ * case
+ */
+ assert(abs(i - closest_local_line_a) < 1000);
+ /* scale the similarity by (1000 - distance from
+ * closest line) to act as a tie break between lines
+ * that otherwise are equally similar.
+ */
+ *similarity = fingerprint_similarity(
+ fingerprints_b + local_line_b,
+ fingerprints_a + i) *
+ (1000 - abs(i - closest_local_line_a));
+ }
+ if (*similarity > best_similarity) {
+ second_best_similarity = best_similarity;
+ second_best_similarity_index = best_similarity_index;
+ best_similarity = *similarity;
+ best_similarity_index = i;
+ } else if (*similarity > second_best_similarity) {
+ second_best_similarity = *similarity;
+ second_best_similarity_index = i;
+ }
+ }
+
+ if (best_similarity == 0) {
+ /* this line definitely doesn't match with anything. Mark it
+ * with this special value so it doesn't get invalidated and
+ * won't be recalculated.
+ */
+ certainties[local_line_b] = CERTAIN_NOTHING_MATCHES;
+ result[local_line_b] = -1;
+ } else {
+ /* Calculate the certainty with which this line matches.
+ * If the line matches well with two lines then that reduces
+ * the certainty. However we still want to prioritise matching
+ * a line that matches very well with two lines over matching a
+ * line that matches poorly with one line, hence doubling
+ * best_similarity.
+ * This means that if we have
+ * line X that matches only one line with a score of 3,
+ * line Y that matches two lines equally with a score of 5,
+ * and line Z that matches only one line with a score or 2,
+ * then the lines in order of certainty are X, Y, Z.
+ */
+ certainties[local_line_b] = best_similarity * 2 -
+ second_best_similarity;
+
+ /* We keep both the best and second best results to allow us to
+ * check at a later stage of the matching process whether the
+ * result needs to be invalidated.
+ */
+ result[local_line_b] = start_a + best_similarity_index;
+ second_best_result[local_line_b] =
+ start_a + second_best_similarity_index;
+ }
+}
+
+/*
+ * This finds the line that we can match with the most confidence, and
+ * uses it as a partition. It then calls itself on the lines on either side of
+ * that partition. In this way we avoid lines appearing out of order, and
+ * retain a sensible line ordering.
+ * \param start_a index of the first line in A with which lines in B may be
+ * compared.
+ * \param start_b index of the first line in B for which matching should be
+ * done.
+ * \param length_a number of lines in A with which lines in B may be compared.
+ * \param length_b number of lines in B for which matching should be done.
+ * \param fingerprints_a mutable array of fingerprints in A. The first element
+ * corresponds to the line at start_a.
+ * \param fingerprints_b array of fingerprints in B. The first element
+ * corresponds to the line at start_b.
+ * \param similarities 2-dimensional array of similarities between lines in A
+ * and B. See get_similarity() for more details.
+ * \param certainties array of values indicating how strongly a line in B is
+ * matched with some line in A.
+ * \param second_best_result array of absolute indices in A for the second
+ * closest match of a line in B.
+ * \param result array of absolute indices in A for the closest match of a line
+ * in B.
+ * \param max_search_distance_a maximum distance in lines from the closest line
+ * in A for other lines in A for which
+ * similarities may be calculated.
+ * \param max_search_distance_b an upper bound on the greatest possible
+ * distance between lines in B such that they will
+ * both be compared with the same line in A
+ * according to max_search_distance_a.
+ * \param map_line_number_in_b_to_a parameter to map_line_number().
+ */
+static void fuzzy_find_matching_lines_recurse(
+ int start_a, int start_b,
+ int length_a, int length_b,
+ struct fingerprint *fingerprints_a,
+ struct fingerprint *fingerprints_b,
+ int *similarities,
+ int *certainties,
+ int *second_best_result,
+ int *result,
+ int max_search_distance_a,
+ int max_search_distance_b,
+ const struct line_number_mapping *map_line_number_in_b_to_a)
+{
+ int i, invalidate_min, invalidate_max, offset_b,
+ second_half_start_a, second_half_start_b,
+ second_half_length_a, second_half_length_b,
+ most_certain_line_a, most_certain_local_line_b = -1,
+ most_certain_line_certainty = -1,
+ closest_local_line_a;
+
+ for (i = 0; i < length_b; ++i) {
+ find_best_line_matches(start_a,
+ length_a,
+ start_b,
+ i,
+ fingerprints_a,
+ fingerprints_b,
+ similarities,
+ certainties,
+ second_best_result,
+ result,
+ max_search_distance_a,
+ map_line_number_in_b_to_a);
+
+ if (certainties[i] > most_certain_line_certainty) {
+ most_certain_line_certainty = certainties[i];
+ most_certain_local_line_b = i;
+ }
+ }
+
+ /* No matches. */
+ if (most_certain_local_line_b == -1)
+ return;
+
+ most_certain_line_a = result[most_certain_local_line_b];
+
+ /*
+ * Subtract the most certain line's fingerprint in B from the matched
+ * fingerprint in A. This means that other lines in B can't also match
+ * the same parts of the line in A.
+ */
+ fingerprint_subtract(fingerprints_a + most_certain_line_a - start_a,
+ fingerprints_b + most_certain_local_line_b);
+
+ /* Invalidate results that may be affected by the choice of most
+ * certain line.
+ */
+ invalidate_min = most_certain_local_line_b - max_search_distance_b;
+ invalidate_max = most_certain_local_line_b + max_search_distance_b + 1;
+ if (invalidate_min < 0)
+ invalidate_min = 0;
+ if (invalidate_max > length_b)
+ invalidate_max = length_b;
+
+ /* As the fingerprint in A has changed, discard previously calculated
+ * similarity values with that fingerprint.
+ */
+ for (i = invalidate_min; i < invalidate_max; ++i) {
+ closest_local_line_a = map_line_number(
+ i + start_b, map_line_number_in_b_to_a) - start_a;
+
+ /* Check that the lines in A and B are close enough that there
+ * is a similarity value for them.
+ */
+ if (abs(most_certain_line_a - start_a - closest_local_line_a) >
+ max_search_distance_a) {
+ continue;
+ }
+
+ *get_similarity(similarities, most_certain_line_a - start_a,
+ i, closest_local_line_a,
+ max_search_distance_a) = -1;
+ }
+
+ /* More invalidating of results that may be affected by the choice of
+ * most certain line.
+ * Discard the matches for lines in B that are currently matched with a
+ * line in A such that their ordering contradicts the ordering imposed
+ * by the choice of most certain line.
+ */
+ for (i = most_certain_local_line_b - 1; i >= invalidate_min; --i) {
+ /* In this loop we discard results for lines in B that are
+ * before most-certain-line-B but are matched with a line in A
+ * that is after most-certain-line-A.
+ */
+ if (certainties[i] >= 0 &&
+ (result[i] >= most_certain_line_a ||
+ second_best_result[i] >= most_certain_line_a)) {
+ certainties[i] = CERTAINTY_NOT_CALCULATED;
+ }
+ }
+ for (i = most_certain_local_line_b + 1; i < invalidate_max; ++i) {
+ /* In this loop we discard results for lines in B that are
+ * after most-certain-line-B but are matched with a line in A
+ * that is before most-certain-line-A.
+ */
+ if (certainties[i] >= 0 &&
+ (result[i] <= most_certain_line_a ||
+ second_best_result[i] <= most_certain_line_a)) {
+ certainties[i] = CERTAINTY_NOT_CALCULATED;
+ }
+ }
+
+ /* Repeat the matching process for lines before the most certain line.
+ */
+ if (most_certain_local_line_b > 0) {
+ fuzzy_find_matching_lines_recurse(
+ start_a, start_b,
+ most_certain_line_a + 1 - start_a,
+ most_certain_local_line_b,
+ fingerprints_a, fingerprints_b, similarities,
+ certainties, second_best_result, result,
+ max_search_distance_a,
+ max_search_distance_b,
+ map_line_number_in_b_to_a);
+ }
+ /* Repeat the matching process for lines after the most certain line.
+ */
+ if (most_certain_local_line_b + 1 < length_b) {
+ second_half_start_a = most_certain_line_a;
+ offset_b = most_certain_local_line_b + 1;
+ second_half_start_b = start_b + offset_b;
+ second_half_length_a =
+ length_a + start_a - second_half_start_a;
+ second_half_length_b =
+ length_b + start_b - second_half_start_b;
+ fuzzy_find_matching_lines_recurse(
+ second_half_start_a, second_half_start_b,
+ second_half_length_a, second_half_length_b,
+ fingerprints_a + second_half_start_a - start_a,
+ fingerprints_b + offset_b,
+ similarities +
+ offset_b * (max_search_distance_a * 2 + 1),
+ certainties + offset_b,
+ second_best_result + offset_b, result + offset_b,
+ max_search_distance_a,
+ max_search_distance_b,
+ map_line_number_in_b_to_a);
+ }
+}
+
+/* Find the lines in the parent line range that most closely match the lines in
+ * the target line range. This is accomplished by matching fingerprints in each
+ * blame_origin, and choosing the best matches that preserve the line ordering.
+ * See struct fingerprint for details of fingerprint matching, and
+ * fuzzy_find_matching_lines_recurse for details of preserving line ordering.
+ *
+ * The performance is believed to be O(n log n) in the typical case and O(n^2)
+ * in a pathological case, where n is the number of lines in the target range.
+ */
+static int *fuzzy_find_matching_lines(struct blame_origin *parent,
+ struct blame_origin *target,
+ int tlno, int parent_slno, int same,
+ int parent_len)
+{
+ /* We use the terminology "A" for the left hand side of the diff AKA
+ * parent, and "B" for the right hand side of the diff AKA target. */
+ int start_a = parent_slno;
+ int length_a = parent_len;
+ int start_b = tlno;
+ int length_b = same - tlno;
+
+ struct line_number_mapping map_line_number_in_b_to_a = {
+ start_a, length_a, start_b, length_b
+ };
+
+ struct fingerprint *fingerprints_a = parent->fingerprints;
+ struct fingerprint *fingerprints_b = target->fingerprints;
+
+ int i, *result, *second_best_result,
+ *certainties, *similarities, similarity_count;
+
+ /*
+ * max_search_distance_a means that given a line in B, compare it to
+ * the line in A that is closest to its position, and the lines in A
+ * that are no greater than max_search_distance_a lines away from the
+ * closest line in A.
+ *
+ * max_search_distance_b is an upper bound on the greatest possible
+ * distance between lines in B such that they will both be compared
+ * with the same line in A according to max_search_distance_a.
+ */
+ int max_search_distance_a = 10, max_search_distance_b;
+
+ if (length_a <= 0)
+ return NULL;
+
+ if (max_search_distance_a >= length_a)
+ max_search_distance_a = length_a ? length_a - 1 : 0;
+
+ max_search_distance_b = ((2 * max_search_distance_a + 1) * length_b
+ - 1) / length_a;
+
+ result = xcalloc(sizeof(int), length_b);
+ second_best_result = xcalloc(sizeof(int), length_b);
+ certainties = xcalloc(sizeof(int), length_b);
+
+ /* See get_similarity() for details of similarities. */
+ similarity_count = length_b * (max_search_distance_a * 2 + 1);
+ similarities = xcalloc(sizeof(int), similarity_count);
+
+ for (i = 0; i < length_b; ++i) {
+ result[i] = -1;
+ second_best_result[i] = -1;
+ certainties[i] = CERTAINTY_NOT_CALCULATED;
+ }
+
+ for (i = 0; i < similarity_count; ++i)
+ similarities[i] = -1;
+
+ fuzzy_find_matching_lines_recurse(start_a, start_b,
+ length_a, length_b,
+ fingerprints_a + start_a,
+ fingerprints_b + start_b,
+ similarities,
+ certainties,
+ second_best_result,
+ result,
+ max_search_distance_a,
+ max_search_distance_b,
+ &map_line_number_in_b_to_a);
+
+ free(similarities);
+ free(certainties);
+ free(second_best_result);
+
+ return result;
+}
+
+static void fill_origin_fingerprints(struct blame_origin *o)
+{
+ int *line_starts;
+
+ if (o->fingerprints)
+ return;
+ o->num_lines = find_line_starts(&line_starts, o->file.ptr,
+ o->file.size);
+ o->fingerprints = xcalloc(sizeof(struct fingerprint), o->num_lines);
+ get_line_fingerprints(o->fingerprints, o->file.ptr, line_starts,
+ 0, o->num_lines);
+ free(line_starts);
+}
+
+static void drop_origin_fingerprints(struct blame_origin *o)
+{
+ if (o->fingerprints) {
+ free_line_fingerprints(o->fingerprints, o->num_lines);
+ o->num_lines = 0;
+ FREE_AND_NULL(o->fingerprints);
+ }
+}
+
/*
* Given an origin, prepare mmfile_t structure to be used by the
* diff machinery
*/
static void fill_origin_blob(struct diff_options *opt,
- struct blame_origin *o, mmfile_t *file, int *num_read_blob)
+ struct blame_origin *o, mmfile_t *file,
+ int *num_read_blob, int fill_fingerprints)
{
if (!o->file.ptr) {
enum object_type type;
}
else
*file = o->file;
+ if (fill_fingerprints)
+ fill_origin_fingerprints(o);
}
static void drop_origin_blob(struct blame_origin *o)
{
FREE_AND_NULL(o->file.ptr);
+ drop_origin_fingerprints(o);
}
/*
for (ent = sb->ent; ent && (next = ent->next); ent = next) {
if (ent->suspect == next->suspect &&
- ent->s_lno + ent->num_lines == next->s_lno) {
+ ent->s_lno + ent->num_lines == next->s_lno &&
+ ent->ignored == next->ignored &&
+ ent->unblamable == next->unblamable) {
ent->num_lines += next->num_lines;
ent->next = next->next;
blame_origin_decref(next->suspect);
{
if (!is_null_oid(&origin->blob_oid))
return 0;
- if (get_tree_entry(&origin->commit->object.oid, origin->path, &origin->blob_oid, &origin->mode))
+ if (get_tree_entry(r, &origin->commit->object.oid, origin->path, &origin->blob_oid, &origin->mode))
goto error_out;
if (oid_object_info(r, &origin->blob_oid, NULL) != OBJ_BLOB)
goto error_out;
struct blame_origin *parent)
{
int chunk_end_lno;
+ int i;
memset(split, 0, sizeof(struct blame_entry [3]));
+ for (i = 0; i < 3; i++) {
+ split[i].ignored = e->ignored;
+ split[i].unblamable = e->unblamable;
+ }
+
if (e->s_lno < tlno) {
/* there is a pre-chunk part not blamed on parent */
split[0].suspect = blame_origin_incref(e->suspect);
return tail;
}
+/*
+ * Splits a blame entry into two entries at 'len' lines. The original 'e'
+ * consists of len lines, i.e. [e->lno, e->lno + len), and the second part,
+ * which is returned, consists of the remainder: [e->lno + len, e->lno +
+ * e->num_lines). The caller needs to sort out the reference counting for the
+ * new entry's suspect.
+ */
+static struct blame_entry *split_blame_at(struct blame_entry *e, int len,
+ struct blame_origin *new_suspect)
+{
+ struct blame_entry *n = xcalloc(1, sizeof(struct blame_entry));
+
+ n->suspect = new_suspect;
+ n->ignored = e->ignored;
+ n->unblamable = e->unblamable;
+ n->lno = e->lno + len;
+ n->s_lno = e->s_lno + len;
+ n->num_lines = e->num_lines - len;
+ e->num_lines = len;
+ e->score = 0;
+ return n;
+}
+
+struct blame_line_tracker {
+ int is_parent;
+ int s_lno;
+};
+
+static int are_lines_adjacent(struct blame_line_tracker *first,
+ struct blame_line_tracker *second)
+{
+ return first->is_parent == second->is_parent &&
+ first->s_lno + 1 == second->s_lno;
+}
+
+static int scan_parent_range(struct fingerprint *p_fps,
+ struct fingerprint *t_fps, int t_idx,
+ int from, int nr_lines)
+{
+ int sim, p_idx;
+ #define FINGERPRINT_FILE_THRESHOLD 10
+ int best_sim_val = FINGERPRINT_FILE_THRESHOLD;
+ int best_sim_idx = -1;
+
+ for (p_idx = from; p_idx < from + nr_lines; p_idx++) {
+ sim = fingerprint_similarity(&t_fps[t_idx], &p_fps[p_idx]);
+ if (sim < best_sim_val)
+ continue;
+ /* Break ties with the closest-to-target line number */
+ if (sim == best_sim_val && best_sim_idx != -1 &&
+ abs(best_sim_idx - t_idx) < abs(p_idx - t_idx))
+ continue;
+ best_sim_val = sim;
+ best_sim_idx = p_idx;
+ }
+ return best_sim_idx;
+}
+
+/*
+ * The first pass checks the blame entry (from the target) against the parent's
+ * diff chunk. If that fails for a line, the second pass tries to match that
+ * line to any part of parent file. That catches cases where a change was
+ * broken into two chunks by 'context.'
+ */
+static void guess_line_blames(struct blame_origin *parent,
+ struct blame_origin *target,
+ int tlno, int offset, int same, int parent_len,
+ struct blame_line_tracker *line_blames)
+{
+ int i, best_idx, target_idx;
+ int parent_slno = tlno + offset;
+ int *fuzzy_matches;
+
+ fuzzy_matches = fuzzy_find_matching_lines(parent, target,
+ tlno, parent_slno, same,
+ parent_len);
+ for (i = 0; i < same - tlno; i++) {
+ target_idx = tlno + i;
+ if (fuzzy_matches && fuzzy_matches[i] >= 0) {
+ best_idx = fuzzy_matches[i];
+ } else {
+ best_idx = scan_parent_range(parent->fingerprints,
+ target->fingerprints,
+ target_idx, 0,
+ parent->num_lines);
+ }
+ if (best_idx >= 0) {
+ line_blames[i].is_parent = 1;
+ line_blames[i].s_lno = best_idx;
+ } else {
+ line_blames[i].is_parent = 0;
+ line_blames[i].s_lno = target_idx;
+ }
+ }
+ free(fuzzy_matches);
+}
+
+/*
+ * This decides which parts of a blame entry go to the parent (added to the
+ * ignoredp list) and which stay with the target (added to the diffp list). The
+ * actual decision was made in a separate heuristic function, and those answers
+ * for the lines in 'e' are in line_blames. This consumes e, essentially
+ * putting it on a list.
+ *
+ * Note that the blame entries on the ignoredp list are not necessarily sorted
+ * with respect to the parent's line numbers yet.
+ */
+static void ignore_blame_entry(struct blame_entry *e,
+ struct blame_origin *parent,
+ struct blame_entry **diffp,
+ struct blame_entry **ignoredp,
+ struct blame_line_tracker *line_blames)
+{
+ int entry_len, nr_lines, i;
+
+ /*
+ * We carve new entries off the front of e. Each entry comes from a
+ * contiguous chunk of lines: adjacent lines from the same origin
+ * (either the parent or the target).
+ */
+ entry_len = 1;
+ nr_lines = e->num_lines; /* e changes in the loop */
+ for (i = 0; i < nr_lines; i++) {
+ struct blame_entry *next = NULL;
+
+ /*
+ * We are often adjacent to the next line - only split the blame
+ * entry when we have to.
+ */
+ if (i + 1 < nr_lines) {
+ if (are_lines_adjacent(&line_blames[i],
+ &line_blames[i + 1])) {
+ entry_len++;
+ continue;
+ }
+ next = split_blame_at(e, entry_len,
+ blame_origin_incref(e->suspect));
+ }
+ if (line_blames[i].is_parent) {
+ e->ignored = 1;
+ blame_origin_decref(e->suspect);
+ e->suspect = blame_origin_incref(parent);
+ e->s_lno = line_blames[i - entry_len + 1].s_lno;
+ e->next = *ignoredp;
+ *ignoredp = e;
+ } else {
+ e->unblamable = 1;
+ /* e->s_lno is already in the target's address space. */
+ e->next = *diffp;
+ *diffp = e;
+ }
+ assert(e->num_lines == entry_len);
+ e = next;
+ entry_len = 1;
+ }
+ assert(!e);
+}
+
/*
* Process one hunk from the patch between the current suspect for
* blame_entry e and its parent. This first blames any unfinished
* -C options may lead to overlapping/duplicate source line number
* ranges, all we can rely on from sorting/merging is the order of the
* first suspect line number.
+ *
+ * tlno: line number in the target where this chunk begins
+ * same: line number in the target where this chunk ends
+ * offset: add to tlno to get the chunk starting point in the parent
+ * parent_len: number of lines in the parent chunk
*/
static void blame_chunk(struct blame_entry ***dstq, struct blame_entry ***srcq,
- int tlno, int offset, int same,
- struct blame_origin *parent)
+ int tlno, int offset, int same, int parent_len,
+ struct blame_origin *parent,
+ struct blame_origin *target, int ignore_diffs)
{
struct blame_entry *e = **srcq;
- struct blame_entry *samep = NULL, *diffp = NULL;
+ struct blame_entry *samep = NULL, *diffp = NULL, *ignoredp = NULL;
+ struct blame_line_tracker *line_blames = NULL;
while (e && e->s_lno < tlno) {
struct blame_entry *next = e->next;
*/
if (e->s_lno + e->num_lines > tlno) {
/* Move second half to a new record */
- int len = tlno - e->s_lno;
- struct blame_entry *n = xcalloc(1, sizeof (struct blame_entry));
- n->suspect = e->suspect;
- n->lno = e->lno + len;
- n->s_lno = e->s_lno + len;
- n->num_lines = e->num_lines - len;
- e->num_lines = len;
- e->score = 0;
+ struct blame_entry *n;
+
+ n = split_blame_at(e, tlno - e->s_lno, e->suspect);
/* Push new record to diffp */
n->next = diffp;
diffp = n;
*/
samep = NULL;
diffp = NULL;
+
+ if (ignore_diffs && same - tlno > 0) {
+ line_blames = xcalloc(sizeof(struct blame_line_tracker),
+ same - tlno);
+ guess_line_blames(parent, target, tlno, offset, same,
+ parent_len, line_blames);
+ }
+
while (e && e->s_lno < same) {
struct blame_entry *next = e->next;
* Move second half to a new record to be
* processed by later chunks
*/
- int len = same - e->s_lno;
- struct blame_entry *n = xcalloc(1, sizeof (struct blame_entry));
- n->suspect = blame_origin_incref(e->suspect);
- n->lno = e->lno + len;
- n->s_lno = e->s_lno + len;
- n->num_lines = e->num_lines - len;
- e->num_lines = len;
- e->score = 0;
+ struct blame_entry *n;
+
+ n = split_blame_at(e, same - e->s_lno,
+ blame_origin_incref(e->suspect));
/* Push new record to samep */
n->next = samep;
samep = n;
}
- e->next = diffp;
- diffp = e;
+ if (ignore_diffs) {
+ ignore_blame_entry(e, parent, &diffp, &ignoredp,
+ line_blames + e->s_lno - tlno);
+ } else {
+ e->next = diffp;
+ diffp = e;
+ }
e = next;
}
+ free(line_blames);
+ if (ignoredp) {
+ /*
+ * Note ignoredp is not sorted yet, and thus neither is dstq.
+ * That list must be sorted before we queue_blames(). We defer
+ * sorting until after all diff hunks are processed, so that
+ * guess_line_blames() can pick *any* line in the parent. The
+ * slight drawback is that we end up sorting all blame entries
+ * passed to the parent, including those that are unrelated to
+ * changes made by the ignored commit.
+ */
+ **dstq = reverse_blame(ignoredp, **dstq);
+ *dstq = &ignoredp->next;
+ }
**srcq = reverse_blame(diffp, reverse_blame(samep, e));
/* Move across elements that are in the unblamable portion */
if (diffp)
struct blame_chunk_cb_data {
struct blame_origin *parent;
+ struct blame_origin *target;
long offset;
+ int ignore_diffs;
struct blame_entry **dstq;
struct blame_entry **srcq;
};
if (start_a - start_b != d->offset)
die("internal error in blame::blame_chunk_cb");
blame_chunk(&d->dstq, &d->srcq, start_b, start_a - start_b,
- start_b + count_b, d->parent);
+ start_b + count_b, count_a, d->parent, d->target,
+ d->ignore_diffs);
d->offset = start_a + count_a - (start_b + count_b);
return 0;
}
*/
static void pass_blame_to_parent(struct blame_scoreboard *sb,
struct blame_origin *target,
- struct blame_origin *parent)
+ struct blame_origin *parent, int ignore_diffs)
{
mmfile_t file_p, file_o;
struct blame_chunk_cb_data d;
return; /* nothing remains for this target */
d.parent = parent;
+ d.target = target;
d.offset = 0;
+ d.ignore_diffs = ignore_diffs;
d.dstq = &newdest; d.srcq = &target->suspects;
- fill_origin_blob(&sb->revs->diffopt, parent, &file_p, &sb->num_read_blob);
- fill_origin_blob(&sb->revs->diffopt, target, &file_o, &sb->num_read_blob);
+ fill_origin_blob(&sb->revs->diffopt, parent, &file_p,
+ &sb->num_read_blob, ignore_diffs);
+ fill_origin_blob(&sb->revs->diffopt, target, &file_o,
+ &sb->num_read_blob, ignore_diffs);
sb->num_get_patch++;
if (diff_hunks(&file_p, &file_o, blame_chunk_cb, &d, sb->xdl_opts))
oid_to_hex(&parent->commit->object.oid),
oid_to_hex(&target->commit->object.oid));
/* The rest are the same as the parent */
- blame_chunk(&d.dstq, &d.srcq, INT_MAX, d.offset, INT_MAX, parent);
+ blame_chunk(&d.dstq, &d.srcq, INT_MAX, d.offset, INT_MAX, 0,
+ parent, target, 0);
*d.dstq = NULL;
+ if (ignore_diffs)
+ newdest = llist_mergesort(newdest, get_next_blame,
+ set_next_blame,
+ compare_blame_suspect);
queue_blames(sb, parent, newdest);
return;
if (!unblamed)
return; /* nothing remains for this target */
- fill_origin_blob(&sb->revs->diffopt, parent, &file_p, &sb->num_read_blob);
+ fill_origin_blob(&sb->revs->diffopt, parent, &file_p,
+ &sb->num_read_blob, 0);
if (!file_p.ptr)
return;
norigin = get_origin(parent, p->one->path);
oidcpy(&norigin->blob_oid, &p->one->oid);
norigin->mode = p->one->mode;
- fill_origin_blob(&sb->revs->diffopt, norigin, &file_p, &sb->num_read_blob);
+ fill_origin_blob(&sb->revs->diffopt, norigin, &file_p,
+ &sb->num_read_blob, 0);
if (!file_p.ptr)
continue;
blame_origin_incref(porigin);
origin->previous = porigin;
}
- pass_blame_to_parent(sb, origin, porigin);
+ pass_blame_to_parent(sb, origin, porigin, 0);
if (!origin->suspects)
goto finish;
}
+ /*
+ * Pass remaining suspects for ignored commits to their parents.
+ */
+ if (oidset_contains(&sb->ignore_list, &commit->object.oid)) {
+ for (i = 0, sg = first_scapegoat(revs, commit, sb->reverse);
+ i < num_sg && sg;
+ sg = sg->next, i++) {
+ struct blame_origin *porigin = sg_origin[i];
+
+ if (!porigin)
+ continue;
+ pass_blame_to_parent(sb, origin, porigin, 1);
+ /*
+ * Preemptively drop porigin so we can refresh the
+ * fingerprints if we use the parent again, which can
+ * occur if you ignore back-to-back commits.
+ */
+ drop_origin_blob(porigin);
+ if (!origin->suspects)
+ goto finish;
+ }
+ }
+
/*
* Optionally find moves in parents' files.
*/
}
}
-static const char *get_next_line(const char *start, const char *end)
-{
- const char *nl = memchr(start, '\n', end - start);
- return nl ? nl + 1 : end;
-}
-
/*
* To allow quick access to the contents of nth line in the
* final image, prepare an index in the scoreboard.
*/
static int prepare_lines(struct blame_scoreboard *sb)
{
- const char *buf = sb->final_buf;
- unsigned long len = sb->final_buf_size;
- const char *end = buf + len;
- const char *p;
- int *lineno;
- int num = 0;
-
- for (p = buf; p < end; p = get_next_line(p, end))
- num++;
-
- ALLOC_ARRAY(sb->lineno, num + 1);
- lineno = sb->lineno;
-
- for (p = buf; p < end; p = get_next_line(p, end))
- *lineno++ = p - buf;
-
- *lineno = len;
-
- sb->num_lines = num;
+ sb->num_lines = find_line_starts(&sb->lineno, sb->final_buf,
+ sb->final_buf_size);
return sb->num_lines;
}
*/
struct blame_entry *suspects;
mmfile_t file;
+ int num_lines;
+ void *fingerprints;
struct object_id blob_oid;
unsigned short mode;
/* guilty gets set when shipping any suspects to the final
* scanning the lines over and over.
*/
unsigned score;
+ int ignored;
+ int unblamable;
};
/*
/* linked list of blames */
struct blame_entry *ent;
+ struct oidset ignore_list;
+
/* look-up a line in the final buffer */
int num_lines;
int *lineno;
struct blob *lookup_blob(struct repository *r, const struct object_id *oid)
{
- struct object *obj = lookup_object(r, oid->hash);
+ struct object *obj = lookup_object(r, oid);
if (!obj)
- return create_object(r, oid->hash,
- alloc_blob_node(r));
+ return create_object(r, oid, alloc_blob_node(r));
return object_as_type(r, obj, OBJ_BLOB, 0);
}
free(real_ref);
}
-void remove_branch_state(struct repository *r)
+void remove_merge_branch_state(struct repository *r)
{
- sequencer_post_commit_cleanup(r);
unlink(git_path_merge_head(r));
unlink(git_path_merge_rr(r));
unlink(git_path_merge_msg(r));
unlink(git_path_merge_mode(r));
+}
+
+void remove_branch_state(struct repository *r, int verbose)
+{
+ sequencer_post_commit_cleanup(r, verbose);
unlink(git_path_squash_msg(r));
+ remove_merge_branch_state(r);
}
void die_if_checked_out(const char *branch, int ignore_current_worktree)
*/
int validate_new_branchname(const char *name, struct strbuf *ref, int force);
+/*
+ * Remove information about the merge state on the current
+ * branch. (E.g., MERGE_HEAD)
+ */
+void remove_merge_branch_state(struct repository *r);
+
/*
* Remove information about the state of working on the current
* branch. (E.g., MERGE_HEAD)
*/
-void remove_branch_state(struct repository *r);
+void remove_branch_state(struct repository *r, int verbose);
/*
* Configure local branch "local" as downstream to branch "remote"
int cmd_diff(int argc, const char **argv, const char *prefix);
int cmd_diff_tree(int argc, const char **argv, const char *prefix);
int cmd_difftool(int argc, const char **argv, const char *prefix);
+int cmd_env__helper(int argc, const char **argv, const char *prefix);
int cmd_fast_export(int argc, const char **argv, const char *prefix);
int cmd_fetch(int argc, const char **argv, const char *prefix);
int cmd_fetch_pack(int argc, const char **argv, const char *prefix);
int cmd_repack(int argc, const char **argv, const char *prefix);
int cmd_rerere(int argc, const char **argv, const char *prefix);
int cmd_reset(int argc, const char **argv, const char *prefix);
+int cmd_restore(int argc, const char **argv, const char *prefix);
int cmd_rev_list(int argc, const char **argv, const char *prefix);
int cmd_rev_parse(int argc, const char **argv, const char *prefix);
int cmd_revert(int argc, const char **argv, const char *prefix);
int cmd_stash(int argc, const char **argv, const char *prefix);
int cmd_stripspace(int argc, const char **argv, const char *prefix);
int cmd_submodule__helper(int argc, const char **argv, const char *prefix);
+int cmd_switch(int argc, const char **argv, const char *prefix);
int cmd_symbolic_ref(int argc, const char **argv, const char *prefix);
int cmd_tag(int argc, const char **argv, const char *prefix);
int cmd_tar_tree(int argc, const char **argv, const char *prefix);
if (merge_tree(remote_tree))
return -1;
- remove_branch_state(the_repository);
+ remove_branch_state(the_repository, 0);
return 0;
}
static int show_progress;
static char repeated_meta_color[COLOR_MAXLEN];
static int coloring_mode;
+static struct string_list ignore_revs_file_list = STRING_LIST_INIT_NODUP;
+static int mark_unblamable_lines;
+static int mark_ignored_lines;
static struct date_mode blame_date_mode = { DATE_ISO8601 };
static size_t blame_date_width;
static struct string_list mailmap = STRING_LIST_INIT_NODUP;
-#ifndef DEBUG
-#define DEBUG 0
+#ifndef DEBUG_BLAME
+#define DEBUG_BLAME 0
#endif
static unsigned blame_move_score;
}
}
+ if (mark_unblamable_lines && ent->unblamable) {
+ length--;
+ putchar('*');
+ }
+ if (mark_ignored_lines && ent->ignored) {
+ length--;
+ putchar('?');
+ }
printf("%.*s", length, hex);
if (opt & OUTPUT_ANNOTATE_COMPAT) {
const char *name;
parse_date_format(value, &blame_date_mode);
return 0;
}
+ if (!strcmp(var, "blame.ignorerevsfile")) {
+ const char *str;
+ int ret;
+
+ ret = git_config_pathname(&str, var, value);
+ if (ret)
+ return ret;
+ string_list_insert(&ignore_revs_file_list, str);
+ return 0;
+ }
+ if (!strcmp(var, "blame.markunblamablelines")) {
+ mark_unblamable_lines = git_config_bool(var, value);
+ return 0;
+ }
+ if (!strcmp(var, "blame.markignoredlines")) {
+ mark_ignored_lines = git_config_bool(var, value);
+ return 0;
+ }
if (!strcmp(var, "color.blame.repeatedlines")) {
if (color_parse_mem(value, strlen(value), repeated_meta_color))
warning(_("invalid color '%s' in color.blame.repeatedLines"),
return OBJ_NONE < oid_object_info(the_repository, &oid, NULL);
}
+static void build_ignorelist(struct blame_scoreboard *sb,
+ struct string_list *ignore_revs_file_list,
+ struct string_list *ignore_rev_list)
+{
+ struct string_list_item *i;
+ struct object_id oid;
+
+ oidset_init(&sb->ignore_list, 0);
+ for_each_string_list_item(i, ignore_revs_file_list) {
+ if (!strcmp(i->string, ""))
+ oidset_clear(&sb->ignore_list);
+ else
+ oidset_parse_file(&sb->ignore_list, i->string);
+ }
+ for_each_string_list_item(i, ignore_rev_list) {
+ if (get_oid_committish(i->string, &oid))
+ die(_("cannot find revision %s to ignore"), i->string);
+ oidset_insert(&sb->ignore_list, &oid);
+ }
+}
+
int cmd_blame(int argc, const char **argv, const char *prefix)
{
struct rev_info revs;
struct progress_info pi = { NULL, 0 };
struct string_list range_list = STRING_LIST_INIT_NODUP;
+ struct string_list ignore_rev_list = STRING_LIST_INIT_NODUP;
int output_option = 0, opt = 0;
int show_stats = 0;
const char *revs_file = NULL;
OPT_BIT('s', NULL, &output_option, N_("Suppress author name and timestamp (Default: off)"), OUTPUT_NO_AUTHOR),
OPT_BIT('e', "show-email", &output_option, N_("Show author email instead of name (Default: off)"), OUTPUT_SHOW_EMAIL),
OPT_BIT('w', NULL, &xdl_opts, N_("Ignore whitespace differences"), XDF_IGNORE_WHITESPACE),
+ OPT_STRING_LIST(0, "ignore-rev", &ignore_rev_list, N_("rev"), N_("Ignore <rev> when blaming")),
+ OPT_STRING_LIST(0, "ignore-revs-file", &ignore_revs_file_list, N_("file"), N_("Ignore revisions from <file>")),
OPT_BIT(0, "color-lines", &output_option, N_("color redundant metadata from previous line differently"), OUTPUT_COLOR_LINE),
OPT_BIT(0, "color-by-age", &output_option, N_("color lines by age"), OUTPUT_SHOW_AGE_WITH_COLOR),
sb.contents_from = contents_from;
sb.reverse = reverse;
sb.repo = the_repository;
+ build_ignorelist(&sb, &ignore_revs_file_list, &ignore_rev_list);
+ string_list_clear(&ignore_revs_file_list, 0);
+ string_list_clear(&ignore_rev_list, 0);
setup_scoreboard(&sb, path, &o);
lno = sb.num_lines;
if (blame_copy_score)
sb.copy_score = blame_copy_score;
- sb.debug = DEBUG;
+ sb.debug = DEBUG_BLAME;
sb.on_sanity_fail = &sanity_check_on_fail;
sb.show_root = show_root;
GIT_COLOR_NORMAL, /* LOCAL */
GIT_COLOR_GREEN, /* CURRENT */
GIT_COLOR_BLUE, /* UPSTREAM */
+ GIT_COLOR_CYAN, /* WORKTREE */
};
enum color_branch {
BRANCH_COLOR_RESET = 0,
BRANCH_COLOR_REMOTE = 2,
BRANCH_COLOR_LOCAL = 3,
BRANCH_COLOR_CURRENT = 4,
- BRANCH_COLOR_UPSTREAM = 5
+ BRANCH_COLOR_UPSTREAM = 5,
+ BRANCH_COLOR_WORKTREE = 6
};
static const char *color_branch_slots[] = {
[BRANCH_COLOR_LOCAL] = "local",
[BRANCH_COLOR_CURRENT] = "current",
[BRANCH_COLOR_UPSTREAM] = "upstream",
+ [BRANCH_COLOR_WORKTREE] = "worktree",
};
static struct string_list output = STRING_LIST_INIT_DUP;
struct strbuf local = STRBUF_INIT;
struct strbuf remote = STRBUF_INIT;
- strbuf_addf(&local, "%%(if)%%(HEAD)%%(then)* %s%%(else) %s%%(end)",
- branch_get_color(BRANCH_COLOR_CURRENT),
- branch_get_color(BRANCH_COLOR_LOCAL));
+ strbuf_addf(&local, "%%(if)%%(HEAD)%%(then)* %s%%(else)%%(if)%%(worktreepath)%%(then)+ %s%%(else) %s%%(end)%%(end)",
+ branch_get_color(BRANCH_COLOR_CURRENT),
+ branch_get_color(BRANCH_COLOR_WORKTREE),
+ branch_get_color(BRANCH_COLOR_LOCAL));
strbuf_addf(&remote, " %s",
branch_get_color(BRANCH_COLOR_REMOTE));
strbuf_addf(&local, " %s ", obname.buf);
if (filter->verbose > 1)
+ {
+ strbuf_addf(&local, "%%(if:notequals=*)%%(HEAD)%%(then)%%(if)%%(worktreepath)%%(then)(%s%%(worktreepath)%s) %%(end)%%(end)",
+ branch_get_color(BRANCH_COLOR_WORKTREE), branch_get_color(BRANCH_COLOR_RESET));
strbuf_addf(&local, "%%(if)%%(upstream)%%(then)[%s%%(upstream:short)%s%%(if)%%(upstream:track)"
"%%(then): %%(upstream:track,nobracket)%%(end)] %%(end)%%(contents:subject)",
branch_get_color(BRANCH_COLOR_UPSTREAM), branch_get_color(BRANCH_COLOR_RESET));
+ }
else
strbuf_addf(&local, "%%(if)%%(upstream:track)%%(then)%%(upstream:track) %%(end)%%(contents:subject)");
* fall-back to the usual case.
*/
}
- buf = read_object_with_reference(&oid, exp_type, &size, NULL);
+ buf = read_object_with_reference(the_repository,
+ &oid, exp_type, &size, NULL);
break;
default:
#define USE_THE_INDEX_COMPATIBILITY_MACROS
#include "builtin.h"
-#include "config.h"
+#include "advice.h"
+#include "blob.h"
+#include "branch.h"
+#include "cache-tree.h"
#include "checkout.h"
+#include "commit.h"
+#include "config.h"
+#include "diff.h"
+#include "dir.h"
+#include "ll-merge.h"
#include "lockfile.h"
+#include "merge-recursive.h"
+#include "object-store.h"
#include "parse-options.h"
#include "refs.h"
-#include "object-store.h"
-#include "commit.h"
+#include "remote.h"
+#include "resolve-undo.h"
+#include "revision.h"
+#include "run-command.h"
+#include "submodule.h"
+#include "submodule-config.h"
#include "tree.h"
#include "tree-walk.h"
-#include "cache-tree.h"
#include "unpack-trees.h"
-#include "dir.h"
-#include "run-command.h"
-#include "merge-recursive.h"
-#include "branch.h"
-#include "diff.h"
-#include "revision.h"
-#include "remote.h"
-#include "blob.h"
+#include "wt-status.h"
#include "xdiff-interface.h"
-#include "ll-merge.h"
-#include "resolve-undo.h"
-#include "submodule-config.h"
-#include "submodule.h"
-#include "advice.h"
-
-static int checkout_optimize_new_branch;
static const char * const checkout_usage[] = {
N_("git checkout [<options>] <branch>"),
NULL,
};
+static const char * const switch_branch_usage[] = {
+ N_("git switch [<options>] [<branch>]"),
+ NULL,
+};
+
+static const char * const restore_usage[] = {
+ N_("git restore [<options>] [--source=<branch>] <file>..."),
+ NULL,
+};
+
struct checkout_opts {
int patch_mode;
int quiet;
int merge;
int force;
int force_detach;
+ int implicit_detach;
int writeout_stage;
int overwrite_ignore;
int ignore_skipworktree;
int show_progress;
int count_checkout_paths;
int overlay_mode;
- /*
- * If new checkout options are added, skip_merge_working_tree
- * should be updated accordingly.
- */
+ int dwim_new_local_branch;
+ int discard_changes;
+ int accept_ref;
+ int accept_pathspec;
+ int switch_branch_doing_nothing_is_ok;
+ int only_merge_on_switching_branches;
+ int can_switch_when_in_progress;
+ int orphan_from_empty_tree;
+ int empty_pathspec_ok;
+ int checkout_index;
+ int checkout_worktree;
+ const char *ignore_unmerged_opt;
+ int ignore_unmerged;
const char *new_branch;
const char *new_branch_force;
int new_branch_log;
enum branch_track track;
struct diff_options diff_options;
+ char *conflict_style;
int branch_exists;
const char *prefix;
struct pathspec pathspec;
+ const char *from_treeish;
struct tree *source_tree;
};
}
}
+static int checkout_worktree(const struct checkout_opts *opts)
+{
+ struct checkout state = CHECKOUT_INIT;
+ int nr_checkouts = 0, nr_unmerged = 0;
+ int errs = 0;
+ int pos;
+
+ state.force = 1;
+ state.refresh_cache = 1;
+ state.istate = &the_index;
+
+ enable_delayed_checkout(&state);
+ for (pos = 0; pos < active_nr; pos++) {
+ struct cache_entry *ce = active_cache[pos];
+ if (ce->ce_flags & CE_MATCHED) {
+ if (!ce_stage(ce)) {
+ errs |= checkout_entry(ce, &state,
+ NULL, &nr_checkouts);
+ continue;
+ }
+ if (opts->writeout_stage)
+ errs |= checkout_stage(opts->writeout_stage,
+ ce, pos,
+ &state,
+ &nr_checkouts, opts->overlay_mode);
+ else if (opts->merge)
+ errs |= checkout_merged(pos, &state,
+ &nr_unmerged);
+ pos = skip_same_name(ce, pos) - 1;
+ }
+ }
+ remove_marked_cache_entries(&the_index, 1);
+ remove_scheduled_dirs();
+ errs |= finish_delayed_checkout(&state, &nr_checkouts);
+
+ if (opts->count_checkout_paths) {
+ if (nr_unmerged)
+ fprintf_ln(stderr, Q_("Recreated %d merge conflict",
+ "Recreated %d merge conflicts",
+ nr_unmerged),
+ nr_unmerged);
+ if (opts->source_tree)
+ fprintf_ln(stderr, Q_("Updated %d path from %s",
+ "Updated %d paths from %s",
+ nr_checkouts),
+ nr_checkouts,
+ find_unique_abbrev(&opts->source_tree->object.oid,
+ DEFAULT_ABBREV));
+ else if (!nr_unmerged || nr_checkouts)
+ fprintf_ln(stderr, Q_("Updated %d path from the index",
+ "Updated %d paths from the index",
+ nr_checkouts),
+ nr_checkouts);
+ }
+
+ return errs;
+}
+
static int checkout_paths(const struct checkout_opts *opts,
const char *revision)
{
int pos;
- struct checkout state = CHECKOUT_INIT;
static char *ps_matched;
struct object_id rev;
struct commit *head;
int errs = 0;
struct lock_file lock_file = LOCK_INIT;
- int nr_checkouts = 0, nr_unmerged = 0;
+ int checkout_index;
trace2_cmd_mode(opts->patch_mode ? "patch" : "path");
if (opts->new_branch_log)
die(_("'%s' cannot be used with updating paths"), "-l");
- if (opts->force && opts->patch_mode)
- die(_("'%s' cannot be used with updating paths"), "-f");
+ if (opts->ignore_unmerged && opts->patch_mode)
+ die(_("'%s' cannot be used with updating paths"),
+ opts->ignore_unmerged_opt);
if (opts->force_detach)
die(_("'%s' cannot be used with updating paths"), "--detach");
if (opts->merge && opts->patch_mode)
die(_("'%s' cannot be used with %s"), "--merge", "--patch");
- if (opts->force && opts->merge)
- die(_("'%s' cannot be used with %s"), "-f", "-m");
+ if (opts->ignore_unmerged && opts->merge)
+ die(_("'%s' cannot be used with %s"),
+ opts->ignore_unmerged_opt, "-m");
if (opts->new_branch)
die(_("Cannot update paths and switch to branch '%s' at the same time."),
opts->new_branch);
- if (opts->patch_mode)
- return run_add_interactive(revision, "--patch=checkout",
- &opts->pathspec);
+ if (!opts->checkout_worktree && !opts->checkout_index)
+ die(_("neither '%s' or '%s' is specified"),
+ "--staged", "--worktree");
+
+ if (!opts->checkout_worktree && !opts->from_treeish)
+ die(_("'%s' must be used when '%s' is not specified"),
+ "--worktree", "--source");
+
+ if (opts->checkout_index && !opts->checkout_worktree &&
+ opts->writeout_stage)
+ die(_("'%s' or '%s' cannot be used with %s"),
+ "--ours", "--theirs", "--staged");
+
+ if (opts->checkout_index && !opts->checkout_worktree &&
+ opts->merge)
+ die(_("'%s' or '%s' cannot be used with %s"),
+ "--merge", "--conflict", "--staged");
+
+ if (opts->patch_mode) {
+ const char *patch_mode;
+
+ if (opts->checkout_index && opts->checkout_worktree)
+ patch_mode = "--patch=checkout";
+ else if (opts->checkout_index && !opts->checkout_worktree)
+ patch_mode = "--patch=reset";
+ else if (!opts->checkout_index && opts->checkout_worktree)
+ patch_mode = "--patch=worktree";
+ else
+ BUG("either flag must have been set, worktree=%d, index=%d",
+ opts->checkout_worktree, opts->checkout_index);
+ return run_add_interactive(revision, patch_mode, &opts->pathspec);
+ }
repo_hold_locked_index(the_repository, &lock_file, LOCK_DIE_ON_ERROR);
if (read_cache_preload(&opts->pathspec) < 0)
if (ce->ce_flags & CE_MATCHED) {
if (!ce_stage(ce))
continue;
- if (opts->force) {
- warning(_("path '%s' is unmerged"), ce->name);
+ if (opts->ignore_unmerged) {
+ if (!opts->quiet)
+ warning(_("path '%s' is unmerged"), ce->name);
} else if (opts->writeout_stage) {
errs |= check_stage(opts->writeout_stage, ce, pos, opts->overlay_mode);
} else if (opts->merge) {
return 1;
/* Now we are committed to check them out */
- state.force = 1;
- state.refresh_cache = 1;
- state.istate = &the_index;
+ if (opts->checkout_worktree)
+ errs |= checkout_worktree(opts);
- enable_delayed_checkout(&state);
- for (pos = 0; pos < active_nr; pos++) {
- struct cache_entry *ce = active_cache[pos];
- if (ce->ce_flags & CE_MATCHED) {
- if (!ce_stage(ce)) {
- errs |= checkout_entry(ce, &state,
- NULL, &nr_checkouts);
- continue;
- }
- if (opts->writeout_stage)
- errs |= checkout_stage(opts->writeout_stage,
- ce, pos,
- &state,
- &nr_checkouts, opts->overlay_mode);
- else if (opts->merge)
- errs |= checkout_merged(pos, &state,
- &nr_unmerged);
- pos = skip_same_name(ce, pos) - 1;
- }
- }
- remove_marked_cache_entries(&the_index, 1);
- remove_scheduled_dirs();
- errs |= finish_delayed_checkout(&state, &nr_checkouts);
+ /*
+ * Allow updating the index when checking out from the index.
+ * This is to save new stat info.
+ */
+ if (opts->checkout_worktree && !opts->checkout_index && !opts->source_tree)
+ checkout_index = 1;
+ else
+ checkout_index = opts->checkout_index;
- if (opts->count_checkout_paths) {
- if (nr_unmerged)
- fprintf_ln(stderr, Q_("Recreated %d merge conflict",
- "Recreated %d merge conflicts",
- nr_unmerged),
- nr_unmerged);
- if (opts->source_tree)
- fprintf_ln(stderr, Q_("Updated %d path from %s",
- "Updated %d paths from %s",
- nr_checkouts),
- nr_checkouts,
- find_unique_abbrev(&opts->source_tree->object.oid,
- DEFAULT_ABBREV));
- else if (!nr_unmerged || nr_checkouts)
- fprintf_ln(stderr, Q_("Updated %d path from the index",
- "Updated %d paths from the index",
- nr_checkouts),
- nr_checkouts);
+ if (checkout_index) {
+ if (write_locked_index(&the_index, &lock_file, COMMIT_LOCK))
+ die(_("unable to write new index file"));
+ } else {
+ /*
+ * NEEDSWORK: if --worktree is not specified, we
+ * should save stat info of checked out files in the
+ * index to avoid the next (potentially costly)
+ * refresh. But it's a bit tricker to do...
+ */
+ rollback_lock_file(&lock_file);
}
- if (write_locked_index(&the_index, &lock_file, COMMIT_LOCK))
- die(_("unable to write new index file"));
-
read_ref_full("HEAD", 0, &rev, NULL);
head = lookup_commit_reference_gently(the_repository, &rev, 1);
branch->path = strbuf_detach(&buf, NULL);
}
-/*
- * Skip merging the trees, updating the index and working directory if and
- * only if we are creating a new branch via "git checkout -b <new_branch>."
- */
-static int skip_merge_working_tree(const struct checkout_opts *opts,
- const struct branch_info *old_branch_info,
- const struct branch_info *new_branch_info)
-{
- /*
- * Do the merge if sparse checkout is on and the user has not opted in
- * to the optimized behavior
- */
- if (core_apply_sparse_checkout && !checkout_optimize_new_branch)
- return 0;
-
- /*
- * We must do the merge if we are actually moving to a new commit.
- */
- if (!old_branch_info->commit || !new_branch_info->commit ||
- !oideq(&old_branch_info->commit->object.oid,
- &new_branch_info->commit->object.oid))
- return 0;
-
- /*
- * opts->patch_mode cannot be used with switching branches so is
- * not tested here
- */
-
- /*
- * opts->quiet only impacts output so doesn't require a merge
- */
-
- /*
- * Honor the explicit request for a three-way merge or to throw away
- * local changes
- */
- if (opts->merge || opts->force)
- return 0;
-
- /*
- * --detach is documented as "updating the index and the files in the
- * working tree" but this optimization skips those steps so fall through
- * to the regular code path.
- */
- if (opts->force_detach)
- return 0;
-
- /*
- * opts->writeout_stage cannot be used with switching branches so is
- * not tested here
- */
-
- /*
- * Honor the explicit ignore requests
- */
- if (!opts->overwrite_ignore || opts->ignore_skipworktree ||
- opts->ignore_other_worktrees)
- return 0;
-
- /*
- * opts->show_progress only impacts output so doesn't require a merge
- */
-
- /*
- * opts->overlay_mode cannot be used with switching branches so is
- * not tested here
- */
-
- /*
- * If we aren't creating a new branch any changes or updates will
- * happen in the existing branch. Since that could only be updating
- * the index and working directory, we don't want to skip those steps
- * or we've defeated any purpose in running the command.
- */
- if (!opts->new_branch)
- return 0;
-
- /*
- * new_branch_force is defined to "create/reset and checkout a branch"
- * so needs to go through the merge to do the reset
- */
- if (opts->new_branch_force)
- return 0;
-
- /*
- * A new orphaned branch requrires the index and the working tree to be
- * adjusted to <start_point>
- */
- if (opts->new_orphan_branch)
- return 0;
-
- /*
- * Remaining variables are not checkout options but used to track state
- */
-
- /*
- * Do the merge if this is the initial checkout. We cannot use
- * is_cache_unborn() here because the index hasn't been loaded yet
- * so cache_nr and timestamp.sec are always zero.
- */
- if (!file_exists(get_index_file()))
- return 0;
-
- return 1;
-}
-
static int merge_working_tree(const struct checkout_opts *opts,
struct branch_info *old_branch_info,
struct branch_info *new_branch_info,
{
int ret;
struct lock_file lock_file = LOCK_INIT;
+ struct tree *new_tree;
hold_locked_index(&lock_file, LOCK_DIE_ON_ERROR);
if (read_cache_preload(NULL) < 0)
return error(_("index file corrupt"));
resolve_undo_clear();
- if (opts->force) {
- ret = reset_tree(get_commit_tree(new_branch_info->commit),
- opts, 1, writeout_error);
+ if (opts->new_orphan_branch && opts->orphan_from_empty_tree) {
+ if (new_branch_info->commit)
+ BUG("'switch --orphan' should never accept a commit as starting point");
+ new_tree = parse_tree_indirect(the_hash_algo->empty_tree);
+ } else
+ new_tree = get_commit_tree(new_branch_info->commit);
+ if (opts->discard_changes) {
+ ret = reset_tree(new_tree, opts, 1, writeout_error);
if (ret)
return ret;
} else {
&old_branch_info->commit->object.oid :
the_hash_algo->empty_tree);
init_tree_desc(&trees[0], tree->buffer, tree->size);
- tree = parse_tree_indirect(&new_branch_info->commit->object.oid);
+ parse_tree(new_tree);
+ tree = new_tree;
init_tree_desc(&trees[1], tree->buffer, tree->size);
ret = unpack_trees(2, trees, &topts);
o.verbosity = 0;
work = write_tree_from_memory(&o);
- ret = reset_tree(get_commit_tree(new_branch_info->commit),
+ ret = reset_tree(new_tree,
opts, 1,
writeout_error);
if (ret)
o.branch1 = new_branch_info->name;
o.branch2 = "local";
ret = merge_trees(&o,
- get_commit_tree(new_branch_info->commit),
+ new_tree,
work,
old_tree,
&result);
if (ret < 0)
exit(128);
- ret = reset_tree(get_commit_tree(new_branch_info->commit),
+ ret = reset_tree(new_tree,
opts, 0,
writeout_error);
strbuf_release(&o.obuf);
if (write_locked_index(&the_index, &lock_file, COMMIT_LOCK))
die(_("unable to write new index file"));
- if (!opts->force && !opts->quiet)
+ if (!opts->discard_changes && !opts->quiet && new_branch_info->commit)
show_local_changes(&new_branch_info->commit->object, &opts->diff_options);
return 0;
delete_reflog(old_branch_info->path);
}
}
- remove_branch_state(the_repository);
+ remove_branch_state(the_repository, !opts->quiet);
strbuf_release(&msg);
if (!opts->quiet &&
(new_branch_info->path || (!opts->force_detach && !strcmp(new_branch_info->name, "HEAD"))))
add_pending_object(&revs, object, oid_to_hex(&object->oid));
for_each_ref(add_pending_uninteresting_ref, &revs);
- add_pending_oid(&revs, "HEAD", &new_commit->object.oid, UNINTERESTING);
+ if (new_commit)
+ add_pending_oid(&revs, "HEAD",
+ &new_commit->object.oid,
+ UNINTERESTING);
if (prepare_revision_walk(&revs))
die(_("internal error in revision walk"));
void *path_to_free;
struct object_id rev;
int flag, writeout_error = 0;
+ int do_merge = 1;
trace2_cmd_mode("branch");
if (old_branch_info.path)
skip_prefix(old_branch_info.path, "refs/heads/", &old_branch_info.name);
+ if (opts->new_orphan_branch && opts->orphan_from_empty_tree) {
+ if (new_branch_info->name)
+ BUG("'switch --orphan' should never accept a commit as starting point");
+ new_branch_info->commit = NULL;
+ new_branch_info->name = "(empty)";
+ do_merge = 1;
+ }
+
if (!new_branch_info->name) {
new_branch_info->name = "HEAD";
new_branch_info->commit = old_branch_info.commit;
if (!new_branch_info->commit)
die(_("You are on a branch yet to be born"));
parse_commit_or_die(new_branch_info->commit);
+
+ if (opts->only_merge_on_switching_branches)
+ do_merge = 0;
}
- /* optimize the "checkout -b <new_branch> path */
- if (skip_merge_working_tree(opts, &old_branch_info, new_branch_info)) {
- if (!checkout_optimize_new_branch && !opts->quiet) {
- if (read_cache_preload(NULL) < 0)
- return error(_("index file corrupt"));
- show_local_changes(&new_branch_info->commit->object, &opts->diff_options);
- }
- } else {
+ if (do_merge) {
ret = merge_working_tree(opts, &old_branch_info, new_branch_info, &writeout_error);
if (ret) {
free(path_to_free);
static int git_checkout_config(const char *var, const char *value, void *cb)
{
- if (!strcmp(var, "checkout.optimizenewbranch")) {
- checkout_optimize_new_branch = git_config_bool(var, value);
- return 0;
- }
-
if (!strcmp(var, "diff.ignoresubmodules")) {
struct checkout_opts *opts = cb;
handle_ignore_submodules_arg(&opts->diff_options, value);
return git_xmerge_config(var, value, NULL);
}
+static void setup_new_branch_info_and_source_tree(
+ struct branch_info *new_branch_info,
+ struct checkout_opts *opts,
+ struct object_id *rev,
+ const char *arg)
+{
+ struct tree **source_tree = &opts->source_tree;
+ struct object_id branch_rev;
+
+ new_branch_info->name = arg;
+ setup_branch_path(new_branch_info);
+
+ if (!check_refname_format(new_branch_info->path, 0) &&
+ !read_ref(new_branch_info->path, &branch_rev))
+ oidcpy(rev, &branch_rev);
+ else
+ new_branch_info->path = NULL; /* not an existing branch */
+
+ new_branch_info->commit = lookup_commit_reference_gently(the_repository, rev, 1);
+ if (!new_branch_info->commit) {
+ /* not a commit */
+ *source_tree = parse_tree_indirect(rev);
+ } else {
+ parse_commit_or_die(new_branch_info->commit);
+ *source_tree = get_commit_tree(new_branch_info->commit);
+ }
+}
+
static int parse_branchname_arg(int argc, const char **argv,
int dwim_new_local_branch_ok,
struct branch_info *new_branch_info,
struct object_id *rev,
int *dwim_remotes_matched)
{
- struct tree **source_tree = &opts->source_tree;
const char **new_branch = &opts->new_branch;
int argcount = 0;
- struct object_id branch_rev;
const char *arg;
int dash_dash_pos;
int has_dash_dash = 0;
if (!argc)
return 0;
+ if (!opts->accept_pathspec) {
+ if (argc > 1)
+ die(_("only one reference expected"));
+ has_dash_dash = 1; /* helps disambiguate */
+ }
+
arg = argv[0];
dash_dash_pos = -1;
for (i = 0; i < argc; i++) {
- if (!strcmp(argv[i], "--")) {
+ if (opts->accept_pathspec && !strcmp(argv[i], "--")) {
dash_dash_pos = i;
break;
}
recover_with_dwim = 0;
/*
- * Accept "git checkout foo" and "git checkout foo --"
- * as candidates for dwim.
+ * Accept "git checkout foo", "git checkout foo --"
+ * and "git switch foo" as candidates for dwim.
*/
if (!(argc == 1 && !has_dash_dash) &&
- !(argc == 2 && has_dash_dash))
+ !(argc == 2 && has_dash_dash) &&
+ opts->accept_pathspec)
recover_with_dwim = 0;
if (recover_with_dwim) {
argv++;
argc--;
- new_branch_info->name = arg;
- setup_branch_path(new_branch_info);
-
- if (!check_refname_format(new_branch_info->path, 0) &&
- !read_ref(new_branch_info->path, &branch_rev))
- oidcpy(rev, &branch_rev);
- else
- new_branch_info->path = NULL; /* not an existing branch */
-
- new_branch_info->commit = lookup_commit_reference_gently(the_repository, rev, 1);
- if (!new_branch_info->commit) {
- /* not a commit */
- *source_tree = parse_tree_indirect(rev);
- } else {
- parse_commit_or_die(new_branch_info->commit);
- *source_tree = get_commit_tree(new_branch_info->commit);
- }
+ setup_new_branch_info_and_source_tree(new_branch_info, opts, rev, arg);
- if (!*source_tree) /* case (1): want a tree */
+ if (!opts->source_tree) /* case (1): want a tree */
die(_("reference is not a tree: %s"), arg);
+
if (!has_dash_dash) { /* case (3).(d) -> (1) */
/*
* Do not complain the most common case
*/
if (argc)
verify_non_filename(opts->prefix, arg);
- } else {
+ } else if (opts->accept_pathspec) {
argcount++;
argv++;
argc--;
return status;
}
+static void die_expecting_a_branch(const struct branch_info *branch_info)
+{
+ struct object_id oid;
+ char *to_free;
+
+ if (dwim_ref(branch_info->name, strlen(branch_info->name), &oid, &to_free) == 1) {
+ const char *ref = to_free;
+
+ if (skip_prefix(ref, "refs/tags/", &ref))
+ die(_("a branch is expected, got tag '%s'"), ref);
+ if (skip_prefix(ref, "refs/remotes/", &ref))
+ die(_("a branch is expected, got remote branch '%s'"), ref);
+ die(_("a branch is expected, got '%s'"), ref);
+ }
+ if (branch_info->commit)
+ die(_("a branch is expected, got commit '%s'"), branch_info->name);
+ /*
+ * This case should never happen because we already die() on
+ * non-commit, but just in case.
+ */
+ die(_("a branch is expected, got '%s'"), branch_info->name);
+}
+
+static void die_if_some_operation_in_progress(void)
+{
+ struct wt_status_state state;
+
+ memset(&state, 0, sizeof(state));
+ wt_status_get_state(the_repository, &state, 0);
+
+ if (state.merge_in_progress)
+ die(_("cannot switch branch while merging\n"
+ "Consider \"git merge --quit\" "
+ "or \"git worktree add\"."));
+ if (state.am_in_progress)
+ die(_("cannot switch branch in the middle of an am session\n"
+ "Consider \"git am --quit\" "
+ "or \"git worktree add\"."));
+ if (state.rebase_interactive_in_progress || state.rebase_in_progress)
+ die(_("cannot switch branch while rebasing\n"
+ "Consider \"git rebase --quit\" "
+ "or \"git worktree add\"."));
+ if (state.cherry_pick_in_progress)
+ die(_("cannot switch branch while cherry-picking\n"
+ "Consider \"git cherry-pick --quit\" "
+ "or \"git worktree add\"."));
+ if (state.revert_in_progress)
+ die(_("cannot switch branch while reverting\n"
+ "Consider \"git revert --quit\" "
+ "or \"git worktree add\"."));
+ if (state.bisect_in_progress)
+ warning(_("you are switching branch while bisecting"));
+}
+
static int checkout_branch(struct checkout_opts *opts,
struct branch_info *new_branch_info)
{
die(_("'%s' cannot be used with switching branches"),
"--patch");
- if (!opts->overlay_mode)
+ if (opts->overlay_mode != -1)
die(_("'%s' cannot be used with switching branches"),
- "--no-overlay");
+ "--[no]-overlay");
if (opts->writeout_stage)
die(_("'%s' cannot be used with switching branches"),
if (opts->force && opts->merge)
die(_("'%s' cannot be used with '%s'"), "-f", "-m");
+ if (opts->discard_changes && opts->merge)
+ die(_("'%s' cannot be used with '%s'"), "--discard-changes", "--merge");
+
if (opts->force_detach && opts->new_branch)
die(_("'%s' cannot be used with '%s'"),
"--detach", "-b/-B/--orphan");
if (opts->new_orphan_branch) {
if (opts->track != BRANCH_TRACK_UNSPECIFIED)
die(_("'%s' cannot be used with '%s'"), "--orphan", "-t");
+ if (opts->orphan_from_empty_tree && new_branch_info->name)
+ die(_("'%s' cannot take <start-point>"), "--orphan");
} else if (opts->force_detach) {
if (opts->track != BRANCH_TRACK_UNSPECIFIED)
die(_("'%s' cannot be used with '%s'"), "--detach", "-t");
die(_("Cannot switch branch to a non-commit '%s'"),
new_branch_info->name);
+ if (!opts->switch_branch_doing_nothing_is_ok &&
+ !new_branch_info->name &&
+ !opts->new_branch &&
+ !opts->force_detach)
+ die(_("missing branch or commit argument"));
+
+ if (!opts->implicit_detach &&
+ !opts->force_detach &&
+ !opts->new_branch &&
+ !opts->new_branch_force &&
+ new_branch_info->name &&
+ !new_branch_info->path)
+ die_expecting_a_branch(new_branch_info);
+
+ if (!opts->can_switch_when_in_progress)
+ die_if_some_operation_in_progress();
+
if (new_branch_info->path && !opts->force_detach && !opts->new_branch &&
!opts->ignore_other_worktrees) {
int flag;
return switch_branches(opts, new_branch_info);
}
-int cmd_checkout(int argc, const char **argv, const char *prefix)
+static struct option *add_common_options(struct checkout_opts *opts,
+ struct option *prevopts)
{
- struct checkout_opts opts;
- struct branch_info new_branch_info;
- char *conflict_style = NULL;
- int dwim_new_local_branch, no_dwim_new_local_branch = 0;
- int dwim_remotes_matched = 0;
struct option options[] = {
- OPT__QUIET(&opts.quiet, N_("suppress progress reporting")),
- OPT_STRING('b', NULL, &opts.new_branch, N_("branch"),
- N_("create and checkout a new branch")),
- OPT_STRING('B', NULL, &opts.new_branch_force, N_("branch"),
- N_("create/reset and checkout a branch")),
- OPT_BOOL('l', NULL, &opts.new_branch_log, N_("create reflog for new branch")),
- OPT_BOOL(0, "detach", &opts.force_detach, N_("detach HEAD at named commit")),
- OPT_SET_INT('t', "track", &opts.track, N_("set upstream info for new branch"),
+ OPT__QUIET(&opts->quiet, N_("suppress progress reporting")),
+ { OPTION_CALLBACK, 0, "recurse-submodules", NULL,
+ "checkout", "control recursive updating of submodules",
+ PARSE_OPT_OPTARG, option_parse_recurse_submodules_worktree_updater },
+ OPT_BOOL(0, "progress", &opts->show_progress, N_("force progress reporting")),
+ OPT_BOOL('m', "merge", &opts->merge, N_("perform a 3-way merge with the new branch")),
+ OPT_STRING(0, "conflict", &opts->conflict_style, N_("style"),
+ N_("conflict style (merge or diff3)")),
+ OPT_END()
+ };
+ struct option *newopts = parse_options_concat(prevopts, options);
+ free(prevopts);
+ return newopts;
+}
+
+static struct option *add_common_switch_branch_options(
+ struct checkout_opts *opts, struct option *prevopts)
+{
+ struct option options[] = {
+ OPT_BOOL('d', "detach", &opts->force_detach, N_("detach HEAD at named commit")),
+ OPT_SET_INT('t', "track", &opts->track, N_("set upstream info for new branch"),
BRANCH_TRACK_EXPLICIT),
- OPT_STRING(0, "orphan", &opts.new_orphan_branch, N_("new-branch"), N_("new unparented branch")),
- OPT_SET_INT_F('2', "ours", &opts.writeout_stage,
+ OPT__FORCE(&opts->force, N_("force checkout (throw away local modifications)"),
+ PARSE_OPT_NOCOMPLETE),
+ OPT_STRING(0, "orphan", &opts->new_orphan_branch, N_("new-branch"), N_("new unparented branch")),
+ OPT_BOOL_F(0, "overwrite-ignore", &opts->overwrite_ignore,
+ N_("update ignored files (default)"),
+ PARSE_OPT_NOCOMPLETE),
+ OPT_BOOL(0, "ignore-other-worktrees", &opts->ignore_other_worktrees,
+ N_("do not check if another worktree is holding the given ref")),
+ OPT_END()
+ };
+ struct option *newopts = parse_options_concat(prevopts, options);
+ free(prevopts);
+ return newopts;
+}
+
+static struct option *add_checkout_path_options(struct checkout_opts *opts,
+ struct option *prevopts)
+{
+ struct option options[] = {
+ OPT_SET_INT_F('2', "ours", &opts->writeout_stage,
N_("checkout our version for unmerged files"),
2, PARSE_OPT_NONEG),
- OPT_SET_INT_F('3', "theirs", &opts.writeout_stage,
+ OPT_SET_INT_F('3', "theirs", &opts->writeout_stage,
N_("checkout their version for unmerged files"),
3, PARSE_OPT_NONEG),
- OPT__FORCE(&opts.force, N_("force checkout (throw away local modifications)"),
- PARSE_OPT_NOCOMPLETE),
- OPT_BOOL('m', "merge", &opts.merge, N_("perform a 3-way merge with the new branch")),
- OPT_BOOL_F(0, "overwrite-ignore", &opts.overwrite_ignore,
- N_("update ignored files (default)"),
- PARSE_OPT_NOCOMPLETE),
- OPT_STRING(0, "conflict", &conflict_style, N_("style"),
- N_("conflict style (merge or diff3)")),
- OPT_BOOL('p', "patch", &opts.patch_mode, N_("select hunks interactively")),
- OPT_BOOL(0, "ignore-skip-worktree-bits", &opts.ignore_skipworktree,
+ OPT_BOOL('p', "patch", &opts->patch_mode, N_("select hunks interactively")),
+ OPT_BOOL(0, "ignore-skip-worktree-bits", &opts->ignore_skipworktree,
N_("do not limit pathspecs to sparse entries only")),
- OPT_BOOL(0, "no-guess", &no_dwim_new_local_branch,
- N_("do not second guess 'git checkout <no-such-branch>'")),
- OPT_BOOL(0, "ignore-other-worktrees", &opts.ignore_other_worktrees,
- N_("do not check if another worktree is holding the given ref")),
- { OPTION_CALLBACK, 0, "recurse-submodules", NULL,
- "checkout", "control recursive updating of submodules",
- PARSE_OPT_OPTARG, option_parse_recurse_submodules_worktree_updater },
- OPT_BOOL(0, "progress", &opts.show_progress, N_("force progress reporting")),
- OPT_BOOL(0, "overlay", &opts.overlay_mode, N_("use overlay mode (default)")),
- OPT_END(),
+ OPT_END()
};
+ struct option *newopts = parse_options_concat(prevopts, options);
+ free(prevopts);
+ return newopts;
+}
+
+static int checkout_main(int argc, const char **argv, const char *prefix,
+ struct checkout_opts *opts, struct option *options,
+ const char * const usagestr[])
+{
+ struct branch_info new_branch_info;
+ int dwim_remotes_matched = 0;
+ int parseopt_flags = 0;
- memset(&opts, 0, sizeof(opts));
memset(&new_branch_info, 0, sizeof(new_branch_info));
- opts.overwrite_ignore = 1;
- opts.prefix = prefix;
- opts.show_progress = -1;
- opts.overlay_mode = -1;
+ opts->overwrite_ignore = 1;
+ opts->prefix = prefix;
+ opts->show_progress = -1;
+
+ git_config(git_checkout_config, opts);
- git_config(git_checkout_config, &opts);
+ opts->track = BRANCH_TRACK_UNSPECIFIED;
- opts.track = BRANCH_TRACK_UNSPECIFIED;
+ if (!opts->accept_pathspec && !opts->accept_ref)
+ BUG("make up your mind, you need to take _something_");
+ if (opts->accept_pathspec && opts->accept_ref)
+ parseopt_flags = PARSE_OPT_KEEP_DASHDASH;
- argc = parse_options(argc, argv, prefix, options, checkout_usage,
- PARSE_OPT_KEEP_DASHDASH);
+ argc = parse_options(argc, argv, prefix, options,
+ usagestr, parseopt_flags);
- dwim_new_local_branch = !no_dwim_new_local_branch;
- if (opts.show_progress < 0) {
- if (opts.quiet)
- opts.show_progress = 0;
+ if (opts->show_progress < 0) {
+ if (opts->quiet)
+ opts->show_progress = 0;
else
- opts.show_progress = isatty(2);
+ opts->show_progress = isatty(2);
}
- if (conflict_style) {
- opts.merge = 1; /* implied */
- git_xmerge_config("merge.conflictstyle", conflict_style, NULL);
+ if (opts->conflict_style) {
+ opts->merge = 1; /* implied */
+ git_xmerge_config("merge.conflictstyle", opts->conflict_style, NULL);
+ }
+ if (opts->force) {
+ opts->discard_changes = 1;
+ opts->ignore_unmerged_opt = "--force";
+ opts->ignore_unmerged = 1;
}
- if ((!!opts.new_branch + !!opts.new_branch_force + !!opts.new_orphan_branch) > 1)
+ if ((!!opts->new_branch + !!opts->new_branch_force + !!opts->new_orphan_branch) > 1)
die(_("-b, -B and --orphan are mutually exclusive"));
- if (opts.overlay_mode == 1 && opts.patch_mode)
+ if (opts->overlay_mode == 1 && opts->patch_mode)
die(_("-p and --overlay are mutually exclusive"));
+ if (opts->checkout_index >= 0 || opts->checkout_worktree >= 0) {
+ if (opts->checkout_index < 0)
+ opts->checkout_index = 0;
+ if (opts->checkout_worktree < 0)
+ opts->checkout_worktree = 0;
+ } else {
+ if (opts->checkout_index < 0)
+ opts->checkout_index = -opts->checkout_index - 1;
+ if (opts->checkout_worktree < 0)
+ opts->checkout_worktree = -opts->checkout_worktree - 1;
+ }
+ if (opts->checkout_index < 0 || opts->checkout_worktree < 0)
+ BUG("these flags should be non-negative by now");
+ /*
+ * convenient shortcut: "git restore --staged" equals
+ * "git restore --staged --source HEAD"
+ */
+ if (!opts->from_treeish && opts->checkout_index && !opts->checkout_worktree)
+ opts->from_treeish = "HEAD";
+
/*
* From here on, new_branch will contain the branch to be checked out,
* and new_branch_force and new_orphan_branch will tell us which one of
* -b/-B/--orphan is being used.
*/
- if (opts.new_branch_force)
- opts.new_branch = opts.new_branch_force;
+ if (opts->new_branch_force)
+ opts->new_branch = opts->new_branch_force;
- if (opts.new_orphan_branch)
- opts.new_branch = opts.new_orphan_branch;
+ if (opts->new_orphan_branch)
+ opts->new_branch = opts->new_orphan_branch;
/* --track without -b/-B/--orphan should DWIM */
- if (opts.track != BRANCH_TRACK_UNSPECIFIED && !opts.new_branch) {
+ if (opts->track != BRANCH_TRACK_UNSPECIFIED && !opts->new_branch) {
const char *argv0 = argv[0];
if (!argc || !strcmp(argv0, "--"))
die(_("--track needs a branch name"));
argv0 = strchr(argv0, '/');
if (!argv0 || !argv0[1])
die(_("missing branch name; try -b"));
- opts.new_branch = argv0 + 1;
+ opts->new_branch = argv0 + 1;
}
/*
* including "last branch" syntax and DWIM-ery for names of
* remote branches, erroring out for invalid or ambiguous cases.
*/
- if (argc) {
+ if (argc && opts->accept_ref) {
struct object_id rev;
int dwim_ok =
- !opts.patch_mode &&
- dwim_new_local_branch &&
- opts.track == BRANCH_TRACK_UNSPECIFIED &&
- !opts.new_branch;
+ !opts->patch_mode &&
+ opts->dwim_new_local_branch &&
+ opts->track == BRANCH_TRACK_UNSPECIFIED &&
+ !opts->new_branch;
int n = parse_branchname_arg(argc, argv, dwim_ok,
- &new_branch_info, &opts, &rev,
+ &new_branch_info, opts, &rev,
&dwim_remotes_matched);
argv += n;
argc -= n;
+ } else if (!opts->accept_ref && opts->from_treeish) {
+ struct object_id rev;
+
+ if (get_oid_mb(opts->from_treeish, &rev))
+ die(_("could not resolve %s"), opts->from_treeish);
+
+ setup_new_branch_info_and_source_tree(&new_branch_info,
+ opts, &rev,
+ opts->from_treeish);
+
+ if (!opts->source_tree)
+ die(_("reference is not a tree: %s"), opts->from_treeish);
}
+ if (opts->accept_pathspec && !opts->empty_pathspec_ok && !argc &&
+ !opts->patch_mode) /* patch mode is special */
+ die(_("you must specify path(s) to restore"));
+
if (argc) {
- parse_pathspec(&opts.pathspec, 0,
- opts.patch_mode ? PATHSPEC_PREFIX_ORIGIN : 0,
+ parse_pathspec(&opts->pathspec, 0,
+ opts->patch_mode ? PATHSPEC_PREFIX_ORIGIN : 0,
prefix, argv);
- if (!opts.pathspec.nr)
+ if (!opts->pathspec.nr)
die(_("invalid path specification"));
/*
* Try to give more helpful suggestion.
* new_branch && argc > 1 will be caught later.
*/
- if (opts.new_branch && argc == 1)
+ if (opts->new_branch && argc == 1)
die(_("'%s' is not a commit and a branch '%s' cannot be created from it"),
- argv[0], opts.new_branch);
+ argv[0], opts->new_branch);
- if (opts.force_detach)
+ if (opts->force_detach)
die(_("git checkout: --detach does not take a path argument '%s'"),
argv[0]);
- if (1 < !!opts.writeout_stage + !!opts.force + !!opts.merge)
+ if (1 < !!opts->writeout_stage + !!opts->force + !!opts->merge)
die(_("git checkout: --ours/--theirs, --force and --merge are incompatible when\n"
"checking out of the index."));
}
- if (opts.new_branch) {
+ if (opts->new_branch) {
struct strbuf buf = STRBUF_INIT;
- if (opts.new_branch_force)
- opts.branch_exists = validate_branchname(opts.new_branch, &buf);
+ if (opts->new_branch_force)
+ opts->branch_exists = validate_branchname(opts->new_branch, &buf);
else
- opts.branch_exists =
- validate_new_branchname(opts.new_branch, &buf, 0);
+ opts->branch_exists =
+ validate_new_branchname(opts->new_branch, &buf, 0);
strbuf_release(&buf);
}
UNLEAK(opts);
- if (opts.patch_mode || opts.pathspec.nr) {
- int ret = checkout_paths(&opts, new_branch_info.name);
+ if (opts->patch_mode || opts->pathspec.nr) {
+ int ret = checkout_paths(opts, new_branch_info.name);
if (ret && dwim_remotes_matched > 1 &&
advice_checkout_ambiguous_remote_branch_name)
advise(_("'%s' matched more than one remote tracking branch.\n"
dwim_remotes_matched);
return ret;
} else {
- return checkout_branch(&opts, &new_branch_info);
+ return checkout_branch(opts, &new_branch_info);
}
}
+
+int cmd_checkout(int argc, const char **argv, const char *prefix)
+{
+ struct checkout_opts opts;
+ struct option *options;
+ struct option checkout_options[] = {
+ OPT_STRING('b', NULL, &opts.new_branch, N_("branch"),
+ N_("create and checkout a new branch")),
+ OPT_STRING('B', NULL, &opts.new_branch_force, N_("branch"),
+ N_("create/reset and checkout a branch")),
+ OPT_BOOL('l', NULL, &opts.new_branch_log, N_("create reflog for new branch")),
+ OPT_BOOL(0, "guess", &opts.dwim_new_local_branch,
+ N_("second guess 'git checkout <no-such-branch>' (default)")),
+ OPT_BOOL(0, "overlay", &opts.overlay_mode, N_("use overlay mode (default)")),
+ OPT_END()
+ };
+ int ret;
+
+ memset(&opts, 0, sizeof(opts));
+ opts.dwim_new_local_branch = 1;
+ opts.switch_branch_doing_nothing_is_ok = 1;
+ opts.only_merge_on_switching_branches = 0;
+ opts.accept_ref = 1;
+ opts.accept_pathspec = 1;
+ opts.implicit_detach = 1;
+ opts.can_switch_when_in_progress = 1;
+ opts.orphan_from_empty_tree = 0;
+ opts.empty_pathspec_ok = 1;
+ opts.overlay_mode = -1;
+ opts.checkout_index = -2; /* default on */
+ opts.checkout_worktree = -2; /* default on */
+
+ options = parse_options_dup(checkout_options);
+ options = add_common_options(&opts, options);
+ options = add_common_switch_branch_options(&opts, options);
+ options = add_checkout_path_options(&opts, options);
+
+ ret = checkout_main(argc, argv, prefix, &opts,
+ options, checkout_usage);
+ FREE_AND_NULL(options);
+ return ret;
+}
+
+int cmd_switch(int argc, const char **argv, const char *prefix)
+{
+ struct checkout_opts opts;
+ struct option *options = NULL;
+ struct option switch_options[] = {
+ OPT_STRING('c', "create", &opts.new_branch, N_("branch"),
+ N_("create and switch to a new branch")),
+ OPT_STRING('C', "force-create", &opts.new_branch_force, N_("branch"),
+ N_("create/reset and switch to a branch")),
+ OPT_BOOL(0, "guess", &opts.dwim_new_local_branch,
+ N_("second guess 'git switch <no-such-branch>'")),
+ OPT_BOOL(0, "discard-changes", &opts.discard_changes,
+ N_("throw away local modifications")),
+ OPT_END()
+ };
+ int ret;
+
+ memset(&opts, 0, sizeof(opts));
+ opts.dwim_new_local_branch = 1;
+ opts.accept_ref = 1;
+ opts.accept_pathspec = 0;
+ opts.switch_branch_doing_nothing_is_ok = 0;
+ opts.only_merge_on_switching_branches = 1;
+ opts.implicit_detach = 0;
+ opts.can_switch_when_in_progress = 0;
+ opts.orphan_from_empty_tree = 1;
+ opts.overlay_mode = -1;
+
+ options = parse_options_dup(switch_options);
+ options = add_common_options(&opts, options);
+ options = add_common_switch_branch_options(&opts, options);
+
+ ret = checkout_main(argc, argv, prefix, &opts,
+ options, switch_branch_usage);
+ FREE_AND_NULL(options);
+ return ret;
+}
+
+int cmd_restore(int argc, const char **argv, const char *prefix)
+{
+ struct checkout_opts opts;
+ struct option *options;
+ struct option restore_options[] = {
+ OPT_STRING('s', "source", &opts.from_treeish, "<tree-ish>",
+ N_("where the checkout from")),
+ OPT_BOOL('S', "staged", &opts.checkout_index,
+ N_("restore the index")),
+ OPT_BOOL('W', "worktree", &opts.checkout_worktree,
+ N_("restore the working tree (default)")),
+ OPT_BOOL(0, "ignore-unmerged", &opts.ignore_unmerged,
+ N_("ignore unmerged entries")),
+ OPT_BOOL(0, "overlay", &opts.overlay_mode, N_("use overlay mode")),
+ OPT_END()
+ };
+ int ret;
+
+ memset(&opts, 0, sizeof(opts));
+ opts.accept_ref = 0;
+ opts.accept_pathspec = 1;
+ opts.empty_pathspec_ok = 0;
+ opts.overlay_mode = 0;
+ opts.checkout_index = -1; /* default off */
+ opts.checkout_worktree = -2; /* default on */
+ opts.ignore_unmerged_opt = "--ignore-unmerged";
+
+ options = parse_options_dup(restore_options);
+ options = add_common_options(&opts, options);
+ options = add_checkout_path_options(&opts, options);
+
+ ret = checkout_main(argc, argv, prefix, &opts,
+ options, restore_usage);
+ FREE_AND_NULL(options);
+ return ret;
+}
#include "transport.h"
#include "strbuf.h"
#include "dir.h"
+#include "dir-iterator.h"
+#include "iterator.h"
#include "sigchain.h"
#include "branch.h"
#include "remote.h"
static struct string_list option_recurse_submodules = STRING_LIST_INIT_NODUP;
static struct list_objects_filter_options filter_options;
static struct string_list server_options = STRING_LIST_INIT_NODUP;
+static int option_remote_submodules;
static int recurse_submodules_cb(const struct option *opt,
const char *arg, int unset)
OPT_SET_INT('6', "ipv6", &family, N_("use IPv6 addresses only"),
TRANSPORT_FAMILY_IPV6),
OPT_PARSE_LIST_OBJECTS_FILTER(&filter_options),
+ OPT_BOOL(0, "remote-submodules", &option_remote_submodules,
+ N_("any cloned submodules will use their remote-tracking branch")),
OPT_END()
};
add_one_reference, &required);
}
-static void copy_alternates(struct strbuf *src, struct strbuf *dst,
- const char *src_repo)
+static void copy_alternates(struct strbuf *src, const char *src_repo)
{
/*
* Read from the source objects/info/alternates file
fclose(in);
}
+static void mkdir_if_missing(const char *pathname, mode_t mode)
+{
+ struct stat st;
+
+ if (!mkdir(pathname, mode))
+ return;
+
+ if (errno != EEXIST)
+ die_errno(_("failed to create directory '%s'"), pathname);
+ else if (stat(pathname, &st))
+ die_errno(_("failed to stat '%s'"), pathname);
+ else if (!S_ISDIR(st.st_mode))
+ die(_("%s exists and is not a directory"), pathname);
+}
+
static void copy_or_link_directory(struct strbuf *src, struct strbuf *dest,
- const char *src_repo, int src_baselen)
+ const char *src_repo)
{
- struct dirent *de;
- struct stat buf;
int src_len, dest_len;
- DIR *dir;
-
- dir = opendir(src->buf);
- if (!dir)
- die_errno(_("failed to open '%s'"), src->buf);
-
- if (mkdir(dest->buf, 0777)) {
- if (errno != EEXIST)
- die_errno(_("failed to create directory '%s'"), dest->buf);
- else if (stat(dest->buf, &buf))
- die_errno(_("failed to stat '%s'"), dest->buf);
- else if (!S_ISDIR(buf.st_mode))
- die(_("%s exists and is not a directory"), dest->buf);
- }
+ struct dir_iterator *iter;
+ int iter_status;
+ unsigned int flags;
+
+ mkdir_if_missing(dest->buf, 0777);
+
+ flags = DIR_ITERATOR_PEDANTIC | DIR_ITERATOR_FOLLOW_SYMLINKS;
+ iter = dir_iterator_begin(src->buf, flags);
+
+ if (!iter)
+ die_errno(_("failed to start iterator over '%s'"), src->buf);
strbuf_addch(src, '/');
src_len = src->len;
strbuf_addch(dest, '/');
dest_len = dest->len;
- while ((de = readdir(dir)) != NULL) {
+ while ((iter_status = dir_iterator_advance(iter)) == ITER_OK) {
strbuf_setlen(src, src_len);
- strbuf_addstr(src, de->d_name);
+ strbuf_addstr(src, iter->relative_path);
strbuf_setlen(dest, dest_len);
- strbuf_addstr(dest, de->d_name);
- if (stat(src->buf, &buf)) {
- warning (_("failed to stat %s\n"), src->buf);
- continue;
- }
- if (S_ISDIR(buf.st_mode)) {
- if (de->d_name[0] != '.')
- copy_or_link_directory(src, dest,
- src_repo, src_baselen);
+ strbuf_addstr(dest, iter->relative_path);
+
+ if (S_ISDIR(iter->st.st_mode)) {
+ mkdir_if_missing(dest->buf, 0777);
continue;
}
/* Files that cannot be copied bit-for-bit... */
- if (!strcmp(src->buf + src_baselen, "/info/alternates")) {
- copy_alternates(src, dest, src_repo);
+ if (!fspathcmp(iter->relative_path, "info/alternates")) {
+ copy_alternates(src, src_repo);
continue;
}
if (unlink(dest->buf) && errno != ENOENT)
die_errno(_("failed to unlink '%s'"), dest->buf);
if (!option_no_hardlinks) {
- if (!link(src->buf, dest->buf))
+ if (!link(real_path(src->buf), dest->buf))
continue;
if (option_local > 0)
die_errno(_("failed to create link '%s'"), dest->buf);
if (copy_file_with_time(dest->buf, src->buf, 0666))
die_errno(_("failed to copy file to '%s'"), dest->buf);
}
- closedir(dir);
+
+ if (iter_status != ITER_DONE) {
+ strbuf_setlen(src, src_len);
+ die(_("failed to iterate over '%s'"), src->buf);
+ }
}
static void clone_local(const char *src_repo, const char *dest_repo)
get_common_dir(&dest, dest_repo);
strbuf_addstr(&src, "/objects");
strbuf_addstr(&dest, "/objects");
- copy_or_link_directory(&src, &dest, src_repo, src.len);
+ copy_or_link_directory(&src, &dest, src_repo);
strbuf_release(&src);
strbuf_release(&dest);
}
static const char junk_leave_repo_msg[] =
N_("Clone succeeded, but checkout failed.\n"
"You can inspect what was checked out with 'git status'\n"
- "and retry the checkout with 'git checkout -f HEAD'\n");
+ "and retry with 'git restore --source=HEAD :/'\n");
static void remove_junk(void)
{
if (option_verbosity < 0)
argv_array_push(&args, "--quiet");
+ if (option_remote_submodules) {
+ argv_array_push(&args, "--remote");
+ argv_array_push(&args, "--no-fetch");
+ }
+
err = run_command_v_opt(args.argv, RUN_GIT_CMD);
argv_array_clear(&args);
}
memset(&copts, 0, sizeof(copts));
copts.padding = 1;
- argc = parse_options(argc, argv, "", options, builtin_column_usage, 0);
+ argc = parse_options(argc, argv, prefix, options, builtin_column_usage, 0);
if (argc)
usage_with_options(builtin_column_usage, options);
if (real_command || command) {
#include "parse-options.h"
#include "repository.h"
#include "commit-graph.h"
+#include "object-store.h"
static char const * const builtin_commit_graph_usage[] = {
N_("git commit-graph [--object-dir <objdir>]"),
N_("git commit-graph read [--object-dir <objdir>]"),
- N_("git commit-graph verify [--object-dir <objdir>]"),
- N_("git commit-graph write [--object-dir <objdir>] [--append] [--reachable|--stdin-packs|--stdin-commits]"),
+ N_("git commit-graph verify [--object-dir <objdir>] [--shallow]"),
+ N_("git commit-graph write [--object-dir <objdir>] [--append|--split] [--reachable|--stdin-packs|--stdin-commits] <split options>"),
NULL
};
static const char * const builtin_commit_graph_verify_usage[] = {
- N_("git commit-graph verify [--object-dir <objdir>]"),
+ N_("git commit-graph verify [--object-dir <objdir>] [--shallow]"),
NULL
};
};
static const char * const builtin_commit_graph_write_usage[] = {
- N_("git commit-graph write [--object-dir <objdir>] [--append] [--reachable|--stdin-packs|--stdin-commits]"),
+ N_("git commit-graph write [--object-dir <objdir>] [--append|--split] [--reachable|--stdin-packs|--stdin-commits] <split options>"),
NULL
};
int stdin_packs;
int stdin_commits;
int append;
+ int split;
+ int shallow;
} opts;
-
static int graph_verify(int argc, const char **argv)
{
struct commit_graph *graph = NULL;
int open_ok;
int fd;
struct stat st;
+ int flags = 0;
static struct option builtin_commit_graph_verify_options[] = {
OPT_STRING(0, "object-dir", &opts.obj_dir,
N_("dir"),
N_("The object directory to store the graph")),
+ OPT_BOOL(0, "shallow", &opts.shallow,
+ N_("if the commit-graph is split, only verify the tip file")),
OPT_END(),
};
if (!opts.obj_dir)
opts.obj_dir = get_object_directory();
+ if (opts.shallow)
+ flags |= COMMIT_GRAPH_VERIFY_SHALLOW;
graph_name = get_commit_graph_filename(opts.obj_dir);
open_ok = open_commit_graph(graph_name, &fd, &st);
- if (!open_ok && errno == ENOENT)
- return 0;
- if (!open_ok)
+ if (!open_ok && errno != ENOENT)
die_errno(_("Could not open commit-graph '%s'"), graph_name);
- graph = load_commit_graph_one_fd_st(fd, &st);
+
FREE_AND_NULL(graph_name);
+ if (open_ok)
+ graph = load_commit_graph_one_fd_st(fd, &st);
+ else
+ graph = read_commit_graph_one(the_repository, opts.obj_dir);
+
+ /* Return failure if open_ok predicted success */
if (!graph)
- return 1;
+ return !!open_ok;
UNLEAK(graph);
- return verify_commit_graph(the_repository, graph);
+ return verify_commit_graph(the_repository, graph, flags);
}
static int graph_read(int argc, const char **argv)
}
extern int read_replace_refs;
+static struct split_commit_graph_opts split_opts;
static int graph_write(int argc, const char **argv)
{
N_("start walk at commits listed by stdin")),
OPT_BOOL(0, "append", &opts.append,
N_("include all commits already in the commit-graph file")),
+ OPT_BOOL(0, "split", &opts.split,
+ N_("allow writing an incremental commit-graph file")),
+ OPT_INTEGER(0, "max-commits", &split_opts.max_commits,
+ N_("maximum number of commits in a non-base split commit-graph")),
+ OPT_INTEGER(0, "size-multiple", &split_opts.size_multiple,
+ N_("maximum ratio between two levels of a split commit-graph")),
+ OPT_EXPIRY_DATE(0, "expire-time", &split_opts.expire_time,
+ N_("maximum number of commits in a non-base split commit-graph")),
OPT_END(),
};
+ split_opts.size_multiple = 2;
+ split_opts.max_commits = 0;
+ split_opts.expire_time = 0;
+
argc = parse_options(argc, argv, NULL,
builtin_commit_graph_write_options,
builtin_commit_graph_write_usage, 0);
opts.obj_dir = get_object_directory();
if (opts.append)
flags |= COMMIT_GRAPH_APPEND;
+ if (opts.split)
+ flags |= COMMIT_GRAPH_SPLIT;
read_replace_refs = 0;
- if (opts.reachable)
- return write_commit_graph_reachable(opts.obj_dir, flags);
+ if (opts.reachable) {
+ if (write_commit_graph_reachable(opts.obj_dir, flags, &split_opts))
+ return 1;
+ return 0;
+ }
string_list_init(&lines, 0);
if (opts.stdin_packs || opts.stdin_commits) {
if (write_commit_graph(opts.obj_dir,
pack_indexes,
commit_hex,
- flags))
+ flags,
+ &split_opts))
result = 1;
UNLEAK(lines);
"\n");
static const char empty_cherry_pick_advice_single[] =
-N_("Otherwise, please use 'git reset'\n");
+N_("Otherwise, please use 'git cherry-pick --skip'\n");
static const char empty_cherry_pick_advice_multi[] =
-N_("If you wish to skip this commit, use:\n"
+N_("and then use:\n"
"\n"
-" git reset\n"
+" git cherry-pick --continue\n"
"\n"
-"Then \"git cherry-pick --continue\" will resume cherry-picking\n"
-"the remaining commits.\n");
+"to resume cherry-picking the remaining commits.\n"
+"If you wish to skip this commit, use:\n"
+"\n"
+" git cherry-pick --skip\n"
+"\n");
static const char *color_status_slots[] = {
[WT_STATUS_HEADER] = "header",
static struct status_deferred_config {
enum wt_status_format status_format;
int show_branch;
+ enum ahead_behind_flags ahead_behind;
} status_deferred_config = {
STATUS_FORMAT_UNSPECIFIED,
- -1 /* unspecified */
+ -1, /* unspecified */
+ AHEAD_BEHIND_UNSPECIFIED,
};
static void finalize_deferred_config(struct wt_status *s)
if (s->show_branch < 0)
s->show_branch = 0;
+ /*
+ * If the user did not give a "--[no]-ahead-behind" command
+ * line argument *AND* we will print in a human-readable format
+ * (short, long etc.) then we inherit from the status.aheadbehind
+ * config setting. In all other cases (and porcelain V[12] formats
+ * in particular), we inherit _FULL for backwards compatibility.
+ */
+ if (use_deferred_config &&
+ s->ahead_behind_flags == AHEAD_BEHIND_UNSPECIFIED)
+ s->ahead_behind_flags = status_deferred_config.ahead_behind;
+
if (s->ahead_behind_flags == AHEAD_BEHIND_UNSPECIFIED)
s->ahead_behind_flags = AHEAD_BEHIND_FULL;
}
status_deferred_config.show_branch = git_config_bool(k, v);
return 0;
}
+ if (!strcmp(k, "status.aheadbehind")) {
+ status_deferred_config.ahead_behind = git_config_bool(k, v);
+ return 0;
+ }
if (!strcmp(k, "status.showstash")) {
s->show_stash = git_config_bool(k, v);
return 0;
die("%s", err.buf);
}
- sequencer_post_commit_cleanup(the_repository);
+ sequencer_post_commit_cleanup(the_repository, 0);
unlink(git_path_merge_head(the_repository));
unlink(git_path_merge_msg(the_repository));
unlink(git_path_merge_mode(the_repository));
if (commit_index_files())
die(_("repository has been updated, but unable to write\n"
"new_index file. Check that disk is not full and quota is\n"
- "not exceeded, and then \"git reset HEAD\" to recover."));
+ "not exceeded, and then \"git restore --staged :/\" to recover."));
if (git_env_bool(GIT_TEST_COMMIT_GRAPH, 0) &&
- write_commit_graph_reachable(get_object_directory(), 0))
+ write_commit_graph_reachable(get_object_directory(), 0, NULL))
return 1;
repo_rerere(the_repository, 0);
static inline struct commit_name *find_commit_name(const struct object_id *peeled)
{
- return hashmap_get_from_hash(&names, sha1hash(peeled->hash), peeled->hash);
+ return hashmap_get_from_hash(&names, oidhash(peeled), peeled);
}
static int replace_name(struct commit_name *e,
if (!e) {
e = xmalloc(sizeof(struct commit_name));
oidcpy(&e->peeled, peeled);
- hashmap_entry_init(e, sha1hash(peeled->hash));
+ hashmap_entry_init(e, oidhash(peeled));
hashmap_add(&names, e);
e->path = NULL;
}
--- /dev/null
+#include "builtin.h"
+#include "config.h"
+#include "parse-options.h"
+
+static char const * const env__helper_usage[] = {
+ N_("git env--helper --type=[bool|ulong] <options> <env-var>"),
+ NULL
+};
+
+static enum {
+ ENV_HELPER_TYPE_BOOL = 1,
+ ENV_HELPER_TYPE_ULONG
+} cmdmode = 0;
+
+static int option_parse_type(const struct option *opt, const char *arg,
+ int unset)
+{
+ if (!strcmp(arg, "bool"))
+ cmdmode = ENV_HELPER_TYPE_BOOL;
+ else if (!strcmp(arg, "ulong"))
+ cmdmode = ENV_HELPER_TYPE_ULONG;
+ else
+ die(_("unrecognized --type argument, %s"), arg);
+
+ return 0;
+}
+
+int cmd_env__helper(int argc, const char **argv, const char *prefix)
+{
+ int exit_code = 0;
+ const char *env_variable = NULL;
+ const char *env_default = NULL;
+ int ret;
+ int ret_int, default_int;
+ unsigned long ret_ulong, default_ulong;
+ struct option opts[] = {
+ OPT_CALLBACK_F(0, "type", &cmdmode, N_("type"),
+ N_("value is given this type"), PARSE_OPT_NONEG,
+ option_parse_type),
+ OPT_STRING(0, "default", &env_default, N_("value"),
+ N_("default for git_env_*(...) to fall back on")),
+ OPT_BOOL(0, "exit-code", &exit_code,
+ N_("be quiet only use git_env_*() value as exit code")),
+ OPT_END(),
+ };
+
+ argc = parse_options(argc, argv, prefix, opts, env__helper_usage,
+ PARSE_OPT_KEEP_UNKNOWN);
+ if (env_default && !*env_default)
+ usage_with_options(env__helper_usage, opts);
+ if (!cmdmode)
+ usage_with_options(env__helper_usage, opts);
+ if (argc != 1)
+ usage_with_options(env__helper_usage, opts);
+ env_variable = argv[0];
+
+ switch (cmdmode) {
+ case ENV_HELPER_TYPE_BOOL:
+ if (env_default) {
+ default_int = git_parse_maybe_bool(env_default);
+ if (default_int == -1) {
+ error(_("option `--default' expects a boolean value with `--type=bool`, not `%s`"),
+ env_default);
+ usage_with_options(env__helper_usage, opts);
+ }
+ } else {
+ default_int = 0;
+ }
+ ret_int = git_env_bool(env_variable, default_int);
+ if (!exit_code)
+ puts(ret_int ? "true" : "false");
+ ret = ret_int;
+ break;
+ case ENV_HELPER_TYPE_ULONG:
+ if (env_default) {
+ if (!git_parse_ulong(env_default, &default_ulong)) {
+ error(_("option `--default' expects an unsigned long value with `--type=ulong`, not `%s`"),
+ env_default);
+ usage_with_options(env__helper_usage, opts);
+ }
+ } else {
+ default_ulong = 0;
+ }
+ ret_ulong = git_env_ulong(env_variable, default_ulong);
+ if (!exit_code)
+ printf("%lu\n", ret_ulong);
+ ret = ret_ulong;
+ break;
+ default:
+ BUG("unknown <type> value");
+ break;
+ }
+
+ return !ret;
+}
static int progress;
static enum { SIGNED_TAG_ABORT, VERBATIM, WARN, WARN_STRIP, STRIP } signed_tag_mode = SIGNED_TAG_ABORT;
static enum { TAG_FILTERING_ABORT, DROP, REWRITE } tag_of_filtered_mode = TAG_FILTERING_ABORT;
+static enum { REENCODE_ABORT, REENCODE_YES, REENCODE_NO } reencode_mode = REENCODE_ABORT;
static int fake_missing_tagger;
static int use_done_feature;
static int no_data;
return 0;
}
+static int parse_opt_reencode_mode(const struct option *opt,
+ const char *arg, int unset)
+{
+ if (unset) {
+ reencode_mode = REENCODE_ABORT;
+ return 0;
+ }
+
+ switch (git_parse_maybe_bool(arg)) {
+ case 0:
+ reencode_mode = REENCODE_NO;
+ break;
+ case 1:
+ reencode_mode = REENCODE_YES;
+ break;
+ default:
+ if (!strcasecmp(arg, "abort"))
+ reencode_mode = REENCODE_ABORT;
+ else
+ return error("Unknown reencoding mode: %s", arg);
+ }
+
+ return 0;
+}
+
static struct decoration idnums;
static uint32_t last_idnum;
if (is_null_oid(oid))
return;
- object = lookup_object(the_repository, oid->hash);
+ object = lookup_object(the_repository, oid);
if (object && object->flags & SHOWN)
return;
&spec->oid));
else {
struct object *object = lookup_object(the_repository,
- spec->oid.hash);
+ &spec->oid);
printf("M %06o :%d ", spec->mode,
get_object_mark(object));
}
bol = memmem(begin, end ? end - begin : strlen(begin),
needle, strlen(needle));
if (!bol)
- return git_commit_encoding;
+ return NULL;
bol += strlen(needle);
eol = strchrnul(bol, '\n');
*eol = '\0';
}
mark_next_object(&commit->object);
- if (anonymize)
+ if (anonymize) {
reencoded = anonymize_commit_message(message);
- else if (!is_encoding_utf8(encoding))
- reencoded = reencode_string(message, "UTF-8", encoding);
+ } else if (encoding) {
+ switch(reencode_mode) {
+ case REENCODE_YES:
+ reencoded = reencode_string(message, "UTF-8", encoding);
+ break;
+ case REENCODE_NO:
+ break;
+ case REENCODE_ABORT:
+ die("Encountered commit-specific encoding %s in commit "
+ "%s; use --reencode=[yes|no] to handle it",
+ encoding, oid_to_hex(&commit->object.oid));
+ }
+ }
if (!commit->parents)
printf("reset %s\n", refname);
printf("commit %s\nmark :%"PRIu32"\n", refname, last_idnum);
if (show_original_ids)
printf("original-oid %s\n", oid_to_hex(&commit->object.oid));
- printf("%.*s\n%.*s\ndata %u\n%s",
+ printf("%.*s\n%.*s\n",
(int)(author_end - author), author,
- (int)(committer_end - committer), committer,
+ (int)(committer_end - committer), committer);
+ if (!reencoded && encoding)
+ printf("encoding %s\n", encoding);
+ printf("data %u\n%s",
(unsigned)(reencoded
? strlen(reencoded) : message
? strlen(message) : 0),
OPT_CALLBACK(0, "tag-of-filtered-object", &tag_of_filtered_mode, N_("mode"),
N_("select handling of tags that tag filtered objects"),
parse_opt_tag_of_filtered_mode),
+ OPT_CALLBACK(0, "reencode", &reencode_mode, N_("mode"),
+ N_("select handling of commit messages in an alternate encoding"),
+ parse_opt_reencode_mode),
OPT_STRING(0, "export-marks", &export_filename, N_("file"),
N_("Dump marks to this file")),
OPT_STRING(0, "import-marks", &import_filename, N_("file"),
#include "list-objects-filter-options.h"
#include "commit-reach.h"
+#define FORCED_UPDATES_DELAY_WARNING_IN_MS (10 * 1000)
+
static const char * const builtin_fetch_usage[] = {
N_("git fetch [<options>] [<repository> [<refspec>...]]"),
N_("git fetch [<options>] <group>"),
};
static int fetch_prune_config = -1; /* unspecified */
+static int fetch_show_forced_updates = 1;
+static uint64_t forced_updates_ms = 0;
static int prune = -1; /* unspecified */
#define PRUNE_BY_DEFAULT 0 /* do we prune by default? */
static int all, append, dry_run, force, keep, multiple, update_head_ok, verbosity, deepen_relative;
static int progress = -1;
+static int enable_auto_gc = 1;
static int tags = TAGS_DEFAULT, unshallow, update_shallow, deepen;
static int max_children = 1;
static enum transport_family family;
return 0;
}
+ if (!strcmp(k, "fetch.showforcedupdates")) {
+ fetch_show_forced_updates = git_config_bool(k, v);
+ return 0;
+ }
+
if (!strcmp(k, "submodule.recurse")) {
int r = git_config_bool(k, v) ?
RECURSE_SUBMODULES_ON : RECURSE_SUBMODULES_OFF;
OPT_STRING_LIST(0, "negotiation-tip", &negotiation_tip, N_("revision"),
N_("report that we have only objects reachable from this object")),
OPT_PARSE_LIST_OBJECTS_FILTER(&filter_options),
+ OPT_BOOL(0, "auto-gc", &enable_auto_gc,
+ N_("run 'gc --auto' after fetching")),
+ OPT_BOOL(0, "show-forced-updates", &fetch_show_forced_updates,
+ N_("check for forced-updates on all updated branches")),
OPT_END()
};
enum object_type type;
struct branch *current_branch = branch_get(NULL);
const char *pretty_ref = prettify_refname(ref->name);
+ int fast_forward = 0;
type = oid_object_info(the_repository, &ref->new_oid, NULL);
if (type < 0)
return r;
}
- if (in_merge_bases(current, updated)) {
+ if (fetch_show_forced_updates) {
+ uint64_t t_before = getnanotime();
+ fast_forward = in_merge_bases(current, updated);
+ forced_updates_ms += (getnanotime() - t_before) / 1000000;
+ } else {
+ fast_forward = 1;
+ }
+
+ if (fast_forward) {
struct strbuf quickref = STRBUF_INIT;
int r;
+
strbuf_add_unique_abbrev(&quickref, ¤t->object.oid, DEFAULT_ABBREV);
strbuf_addstr(&quickref, "..");
strbuf_add_unique_abbrev(&quickref, &ref->new_oid, DEFAULT_ABBREV);
" 'git remote prune %s' to remove any old, conflicting "
"branches"), remote_name);
+ if (advice_fetch_show_forced_updates) {
+ if (!fetch_show_forced_updates) {
+ warning(_("Fetch normally indicates which branches had a forced update, but that check has been disabled."));
+ warning(_("To re-enable, use '--show-forced-updates' flag or run 'git config fetch.showForcedUpdates true'."));
+ } else if (forced_updates_ms > FORCED_UPDATES_DELAY_WARNING_IN_MS) {
+ warning(_("It took %.2f seconds to check forced updates. You can use '--no-show-forced-updates'\n"),
+ forced_updates_ms / 1000.0);
+ warning(_("or run 'git config fetch.showForcedUpdates false' to avoid this check.\n"));
+ }
+ }
+
abort:
strbuf_release(¬e);
free(url);
return errcode;
}
- argv_array_pushl(&argv, "fetch", "--append", NULL);
+ argv_array_pushl(&argv, "fetch", "--append", "--no-auto-gc", NULL);
add_options_to_argv(&argv);
for (i = 0; i < list->nr; i++) {
close_object_store(the_repository->objects);
- argv_array_pushl(&argv_gc_auto, "gc", "--auto", NULL);
- if (verbosity < 0)
- argv_array_push(&argv_gc_auto, "--quiet");
- run_command_v_opt(argv_gc_auto.argv, RUN_GIT_CMD);
- argv_array_clear(&argv_gc_auto);
+ if (enable_auto_gc) {
+ argv_array_pushl(&argv_gc_auto, "gc", "--auto", NULL);
+ if (verbosity < 0)
+ argv_array_push(&argv_gc_auto, "--quiet");
+ run_command_v_opt(argv_gc_auto.argv, RUN_GIT_CMD);
+ argv_array_clear(&argv_gc_auto);
+ }
return result;
}
static void mark_unreachable_referents(const struct object_id *oid)
{
struct fsck_options options = FSCK_OPTIONS_DEFAULT;
- struct object *obj = lookup_object(the_repository, oid->hash);
+ struct object *obj = lookup_object(the_repository, oid);
if (!obj || !(obj->flags & HAS_OBJ))
return; /* not part of our original set */
struct object *obj;
if (!is_null_oid(oid)) {
- obj = lookup_object(the_repository, oid->hash);
+ obj = lookup_object(the_repository, oid);
if (obj && (obj->flags & HAS_OBJ)) {
if (timestamp && name_objects)
add_decoration(fsck_walk_options.object_names,
static void mark_object_for_connectivity(const struct object_id *oid)
{
- struct object *obj = lookup_unknown_object(oid->hash);
+ struct object *obj = lookup_unknown_object(oid);
obj->flags |= HAS_OBJ;
}
struct object_id oid;
if (!get_oid(arg, &oid)) {
struct object *obj = lookup_object(the_repository,
- oid.hash);
+ &oid);
if (!obj || !(obj->flags & HAS_OBJ)) {
if (is_promisor_object(&oid))
if (gc_write_commit_graph &&
write_commit_graph_reachable(get_object_directory(),
- !quiet && !daemonized ? COMMIT_GRAPH_PROGRESS : 0))
+ !quiet && !daemonized ? COMMIT_GRAPH_PROGRESS : 0,
+ NULL))
return 1;
if (auto_gc && too_many_loose_objects())
object = parse_object_or_die(oid, oid_to_hex(oid));
grep_read_lock();
- data = read_object_with_reference(&object->oid, tree_type,
+ data = read_object_with_reference(&subrepo,
+ &object->oid, tree_type,
&size, NULL);
grep_read_unlock();
int hit, len;
grep_read_lock();
- data = read_object_with_reference(&obj->oid, tree_type,
+ data = read_object_with_reference(opt->repo,
+ &obj->oid, tree_type,
&size, NULL);
grep_read_unlock();
int i;
const char *errstr = NULL;
- argc = parse_options(argc, argv, NULL, hash_object_options,
+ argc = parse_options(argc, argv, prefix, hash_object_options,
hash_object_usage, 0);
if (flags & HASH_WRITE_OBJECT)
static int default_show_signature;
static int decoration_style;
static int decoration_given;
-static int use_mailmap_config;
+static int use_mailmap_config = 1;
static const char *fmt_patch_subject_prefix = "PATCH";
static const char *fmt_pretty;
struct string_list args;
};
+static int session_is_interactive(void)
+{
+ return isatty(1) || pager_in_use();
+}
+
static int auto_decoration_style(void)
{
- return (isatty(1) || pager_in_use()) ? DECORATE_SHORT_REFS : 0;
+ return session_is_interactive() ? DECORATE_SHORT_REFS : 0;
}
static int parse_decoration_style(const char *value)
struct rev_info *rev, struct setup_revision_opt *opt)
{
struct userformat_want w;
- int quiet = 0, source = 0, mailmap = 0;
+ int quiet = 0, source = 0, mailmap;
static struct line_opt_callback_data line_cb = {NULL, NULL, STRING_LIST_INIT_DUP};
static struct string_list decorate_refs_exclude = STRING_LIST_INIT_NODUP;
static struct string_list decorate_refs_include = STRING_LIST_INIT_NODUP;
static int git_format_config(const char *var, const char *value, void *cb)
{
+ struct rev_info *rev = cb;
+
if (!strcmp(var, "format.headers")) {
if (!value)
die(_("format.headers without value"));
from = NULL;
return 0;
}
+ if (!strcmp(var, "format.notes")) {
+ struct strbuf buf = STRBUF_INIT;
+ int b = git_parse_maybe_bool(value);
+ if (!b)
+ return 0;
+ rev->show_notes = 1;
+ if (b < 0) {
+ strbuf_addstr(&buf, value);
+ expand_notes_ref(&buf);
+ string_list_append(&rev->notes_opt.extra_notes_refs,
+ strbuf_detach(&buf, NULL));
+ } else {
+ rev->notes_opt.use_default_notes = 1;
+ }
+ return 0;
+ }
return git_log_config(var, value, cb);
}
struct object_id *patch_id;
if (*commit_base_at(&commit_base, commit))
continue;
- if (commit_patch_id(commit, &diffopt, &oid, 0))
+ if (commit_patch_id(commit, &diffopt, &oid, 0, 1))
die(_("cannot get patch id"));
ALLOC_GROW(bases->patch_id, bases->nr_patch_id + 1, bases->alloc_patch_id);
patch_id = bases->patch_id + bases->nr_patch_id;
extra_to.strdup_strings = 1;
extra_cc.strdup_strings = 1;
init_log_defaults();
- git_config(git_format_config, NULL);
repo_init_revisions(the_repository, &rev, prefix);
+ git_config(git_format_config, &rev);
rev.commit_format = CMIT_FMT_EMAIL;
rev.expand_tabs_in_log_default = 0;
rev.verbose_header = 1;
static void unresolved_directory(const struct traverse_info *info,
struct name_entry n[3])
{
+ struct repository *r = the_repository;
char *newbase;
struct name_entry *p;
struct tree_desc t[3];
newbase = traverse_path(info, p);
#define ENTRY_OID(e) (((e)->mode && S_ISDIR((e)->mode)) ? &(e)->oid : NULL)
- buf0 = fill_tree_descriptor(t + 0, ENTRY_OID(n + 0));
- buf1 = fill_tree_descriptor(t + 1, ENTRY_OID(n + 1));
- buf2 = fill_tree_descriptor(t + 2, ENTRY_OID(n + 2));
+ buf0 = fill_tree_descriptor(r, t + 0, ENTRY_OID(n + 0));
+ buf1 = fill_tree_descriptor(r, t + 1, ENTRY_OID(n + 1));
+ buf2 = fill_tree_descriptor(r, t + 2, ENTRY_OID(n + 2));
#undef ENTRY_OID
merge_trees(t, newbase);
traverse_trees(&the_index, 3, t, &info);
}
-static void *get_tree_descriptor(struct tree_desc *desc, const char *rev)
+static void *get_tree_descriptor(struct repository *r,
+ struct tree_desc *desc,
+ const char *rev)
{
struct object_id oid;
void *buf;
- if (get_oid(rev, &oid))
+ if (repo_get_oid(r, rev, &oid))
die("unknown rev %s", rev);
- buf = fill_tree_descriptor(desc, &oid);
+ buf = fill_tree_descriptor(r, desc, &oid);
if (!buf)
die("%s is not a tree", rev);
return buf;
int cmd_merge_tree(int argc, const char **argv, const char *prefix)
{
+ struct repository *r = the_repository;
struct tree_desc t[3];
void *buf1, *buf2, *buf3;
if (argc != 4)
usage(merge_tree_usage);
- buf1 = get_tree_descriptor(t+0, argv[1]);
- buf2 = get_tree_descriptor(t+1, argv[2]);
- buf3 = get_tree_descriptor(t+2, argv[3]);
+ buf1 = get_tree_descriptor(r, t+0, argv[1]);
+ buf2 = get_tree_descriptor(r, t+1, argv[2]);
+ buf3 = get_tree_descriptor(r, t+2, argv[3]);
merge_trees(t, "");
free(buf1);
free(buf2);
#include "packfile.h"
#include "tag.h"
#include "alias.h"
+#include "branch.h"
#include "commit-reach.h"
#include "wt-status.h"
static int verbosity;
static int allow_rerere_auto;
static int abort_current_merge;
+static int quit_current_merge;
static int continue_current_merge;
static int allow_unrelated_histories;
static int show_progress = -1;
OPT__VERBOSITY(&verbosity),
OPT_BOOL(0, "abort", &abort_current_merge,
N_("abort the current in-progress merge")),
+ OPT_BOOL(0, "quit", &quit_current_merge,
+ N_("--abort but leave index and working tree alone")),
OPT_BOOL(0, "continue", &continue_current_merge,
N_("continue the current in-progress merge")),
OPT_BOOL(0, "allow-unrelated-histories", &allow_unrelated_histories,
OPT_END()
};
-/* Cleans up metadata that is uninteresting after a succeeded merge. */
-static void drop_save(void)
-{
- unlink(git_path_merge_head(the_repository));
- unlink(git_path_merge_msg(the_repository));
- unlink(git_path_merge_mode(the_repository));
-}
-
static int save_state(struct object_id *stash)
{
int len;
{
if (verbosity >= 0)
printf("%s%s\n", squash ? _(" (nothing to squash)") : "", msg);
- drop_save();
+ remove_merge_branch_state(the_repository);
}
static void squash_message(struct commit *commit, struct commit_list *remoteheads)
&result_commit, NULL, sign_commit))
die(_("failed to write commit object"));
finish(head, remoteheads, &result_commit, "In-index merge");
- drop_save();
+ remove_merge_branch_state(the_repository);
return 0;
}
struct strbuf buf = STRBUF_INIT;
struct object_id result_commit;
+ write_tree_trivial(result_tree);
free_commit_list(common);
parents = remoteheads;
if (!head_subsumed || fast_forward == FF_NO)
strbuf_addf(&buf, "Merge made by the '%s' strategy.", wt_strategy);
finish(head, remoteheads, &result_commit, buf.buf);
strbuf_release(&buf);
- drop_save();
+ remove_merge_branch_state(the_repository);
return 0;
}
goto done;
}
+ if (quit_current_merge) {
+ if (orig_argc != 2)
+ usage_msg_opt(_("--quit expects no arguments"),
+ builtin_merge_usage,
+ builtin_merge_options);
+
+ remove_merge_branch_state(the_repository);
+ goto done;
+ }
+
if (continue_current_merge) {
int nargc = 1;
const char *nargv[] = {"commit", NULL};
}
finish(head_commit, remoteheads, &commit->object.oid, msg.buf);
- drop_save();
+ remove_merge_branch_state(the_repository);
goto done;
} else if (!remoteheads->next && common->next)
;
save_state(&stash))
oidclr(&stash);
- for (i = 0; i < use_strategies_nr; i++) {
- int ret;
+ for (i = 0; !merge_was_ok && i < use_strategies_nr; i++) {
+ int ret, cnt;
if (i) {
printf(_("Rewinding the tree to pristine...\n"));
restore_state(&head_commit->object.oid, &stash);
ret = try_merge_strategy(use_strategies[i]->name,
common, remoteheads,
head_commit);
- if (!option_commit && !ret) {
- merge_was_ok = 1;
- /*
- * This is necessary here just to avoid writing
- * the tree, but later we will *not* exit with
- * status code 1 because merge_was_ok is set.
- */
- ret = 1;
- }
-
- if (ret) {
- /*
- * The backend exits with 1 when conflicts are
- * left to be resolved, with 2 when it does not
- * handle the given merge at all.
- */
- if (ret == 1) {
- int cnt = evaluate_result();
-
- if (best_cnt <= 0 || cnt <= best_cnt) {
- best_strategy = use_strategies[i]->name;
- best_cnt = cnt;
+ /*
+ * The backend exits with 1 when conflicts are
+ * left to be resolved, with 2 when it does not
+ * handle the given merge at all.
+ */
+ if (ret < 2) {
+ if (!ret) {
+ if (option_commit) {
+ /* Automerge succeeded. */
+ automerge_was_ok = 1;
+ break;
}
+ merge_was_ok = 1;
+ }
+ cnt = evaluate_result();
+ if (best_cnt <= 0 || cnt <= best_cnt) {
+ best_strategy = use_strategies[i]->name;
+ best_cnt = cnt;
}
- if (merge_was_ok)
- break;
- else
- continue;
}
-
- /* Automerge succeeded. */
- write_tree_trivial(&result_tree);
- automerge_was_ok = 1;
- break;
}
/*
NULL
};
-static void mktree_line(char *buf, size_t len, int nul_term_line, int allow_missing)
+static void mktree_line(char *buf, int nul_term_line, int allow_missing)
{
char *ptr, *ntr;
const char *p;
break;
die("input format error: (blank line only valid in batch mode)");
}
- mktree_line(sb.buf, sb.len, nul_term_line, allow_missing);
+ mktree_line(sb.buf, nul_term_line, allow_missing);
}
if (is_batch_mode && got_eof && used < 1) {
/*
#include "trace2.h"
static char const * const builtin_multi_pack_index_usage[] = {
- N_("git multi-pack-index [--object-dir=<dir>] (write|verify)"),
+ N_("git multi-pack-index [--object-dir=<dir>] (write|verify|expire|repack --batch-size=<size>)"),
NULL
};
static struct opts_multi_pack_index {
const char *object_dir;
+ unsigned long batch_size;
} opts;
int cmd_multi_pack_index(int argc, const char **argv,
static struct option builtin_multi_pack_index_options[] = {
OPT_FILENAME(0, "object-dir", &opts.object_dir,
N_("object directory containing set of packfile and pack-index pairs")),
+ OPT_MAGNITUDE(0, "batch-size", &opts.batch_size,
+ N_("during repack, collect pack-files of smaller size into a batch that is larger than this size")),
OPT_END(),
};
trace2_cmd_mode(argv[0]);
+ if (!strcmp(argv[0], "repack"))
+ return midx_repack(the_repository, opts.object_dir, (size_t)opts.batch_size);
+ if (opts.batch_size)
+ die(_("--batch-size option is only for 'repack' subcommand"));
+
if (!strcmp(argv[0], "write"))
return write_midx_file(opts.object_dir);
if (!strcmp(argv[0], "verify"))
return verify_midx_file(the_repository, opts.object_dir);
+ if (!strcmp(argv[0], "expire"))
+ return expire_midx_packs(the_repository, opts.object_dir);
- die(_("unrecognized verb: %s"), argv[0]);
+ die(_("unrecognized subcommand: %s"), argv[0]);
}
}
static int is_better_name(struct rev_name *name,
- const char *tip_name,
timestamp_t taggerdate,
- int generation,
int distance,
int from_tag)
{
name = xmalloc(sizeof(rev_name));
set_commit_rev_name(commit, name);
goto copy_data;
- } else if (is_better_name(name, tip_name, taggerdate,
- generation, distance, from_tag)) {
+ } else if (is_better_name(name, taggerdate, distance, from_tag)) {
copy_data:
name->tip_name = tip_name;
name->taggerdate = taggerdate;
*(p+1) = 0;
if (!get_oid(p - (hexsz - 1), &oid)) {
struct object *o =
- lookup_object(the_repository,
- oid.hash);
+ lookup_object(the_repository, &oid);
if (o)
name = get_rev_name(o, &buf);
}
static int use_bitmap_index_default = 1;
static int use_bitmap_index = -1;
-static int write_bitmap_index;
+static enum {
+ WRITE_BITMAP_FALSE = 0,
+ WRITE_BITMAP_QUIET,
+ WRITE_BITMAP_TRUE,
+} write_bitmap_index;
static uint16_t write_bitmap_options = BITMAP_OPT_HASH_CACHE;
static int exclude_promisor_objects;
void *cb_data)
{
struct object_id peeled;
- struct object_entry *entry = packlist_find(&to_pack, oid->hash, NULL);
+ struct object_entry *entry = packlist_find(&to_pack, oid, NULL);
if (entry)
entry->tagged = 1;
if (!peel_ref(path, &peeled)) {
- entry = packlist_find(&to_pack, peeled.hash, NULL);
+ entry = packlist_find(&to_pack, &peeled, NULL);
if (entry)
entry->tagged = 1;
}
nr_written, oid.hash, offset);
close(fd);
if (write_bitmap_index) {
- warning(_(no_split_warning));
+ if (write_bitmap_index != WRITE_BITMAP_QUIET)
+ warning(_(no_split_warning));
write_bitmap_index = 0;
}
}
{
struct object_entry *entry;
- entry = packlist_find(&to_pack, oid->hash, index_pos);
+ entry = packlist_find(&to_pack, oid, index_pos);
if (!entry)
return 0;
if (!want_object_in_pack(oid, exclude, &found_pack, &found_offset)) {
/* The pack is missing an object, so it will not have closure */
if (write_bitmap_index) {
- warning(_(no_closure_warning));
+ if (write_bitmap_index != WRITE_BITMAP_QUIET)
+ warning(_(no_closure_warning));
write_bitmap_index = 0;
}
return 0;
if (window <= num_preferred_base++)
return;
- data = read_object_with_reference(oid, tree_type, &size, &tree_oid);
+ data = read_object_with_reference(the_repository, oid,
+ tree_type, &size, &tree_oid);
if (!data)
return;
if (!base_sha1)
return 0;
+ oidread(&base_oid, base_sha1);
+
/*
* First see if we're already sending the base (or it's explicitly in
* our "excluded" list).
*/
- base = packlist_find(&to_pack, base_sha1, NULL);
+ base = packlist_find(&to_pack, &base_oid, NULL);
if (base) {
if (!in_same_island(&delta->idx.oid, &base->idx.oid))
return 0;
* even if it was buried too deep in history to make it into the
* packing list.
*/
- oidread(&base_oid, base_sha1);
if (thin && bitmap_has_oid_in_uninteresting(bitmap_git, &base_oid)) {
if (use_delta_islands) {
if (!in_same_island(&delta->idx.oid, &base_oid))
* it was included via bitmaps, we would not have parsed it
* previously).
*/
- if (packlist_find(&to_pack, oid->hash, NULL))
+ if (packlist_find(&to_pack, oid, NULL))
return;
tag = lookup_tag(the_repository, oid);
if (starts_with(path, "refs/tags/") && /* is a tag? */
!peel_ref(path, &peeled) && /* peelable? */
- packlist_find(&to_pack, peeled.hash, NULL)) /* object packed? */
+ packlist_find(&to_pack, &peeled, NULL)) /* object packed? */
add_tag_chain(oid);
return 0;
}
for (p = strchr(name, '/'); p; p = strchr(p + 1, '/'))
depth++;
- ent = packlist_find(&to_pack, obj->oid.hash, NULL);
+ ent = packlist_find(&to_pack, &obj->oid, NULL);
if (ent && depth > oe_tree_depth(&to_pack, ent))
oe_set_tree_depth(&to_pack, ent, depth);
}
return oidcmp(&a->object->oid, &b->object->oid);
}
-static void add_objects_in_unpacked_packs(struct rev_info *revs)
+static void add_objects_in_unpacked_packs(void)
{
struct packed_git *p;
struct in_pack in_pack;
for (i = 0; i < p->num_objects; i++) {
nth_packed_object_oid(&oid, p, i);
- o = lookup_unknown_object(oid.hash);
+ o = lookup_unknown_object(&oid);
if (!(o->flags & OBJECT_ADDED))
mark_in_pack_object(o, p, &in_pack);
o->flags |= OBJECT_ADDED;
return 1;
}
-static void loosen_unused_packed_objects(struct rev_info *revs)
+static void loosen_unused_packed_objects(void)
{
struct packed_git *p;
uint32_t i;
for (i = 0; i < p->num_objects; i++) {
nth_packed_object_oid(&oid, p, i);
- if (!packlist_find(&to_pack, oid.hash, NULL) &&
+ if (!packlist_find(&to_pack, &oid, NULL) &&
!has_sha1_pack_kept_or_nonlocal(&oid) &&
!loosened_object_can_be_discarded(&oid, p->mtime))
if (force_object_loose(&oid, p->mtime))
}
if (keep_unreachable)
- add_objects_in_unpacked_packs(&revs);
+ add_objects_in_unpacked_packs();
if (pack_loose_unreachable)
add_unreachable_loose_objects();
if (unpack_unreachable)
- loosen_unused_packed_objects(&revs);
+ loosen_unused_packed_objects();
oid_array_clear(&recent_objects);
}
N_("do not hide commits by grafts"), 0),
OPT_BOOL(0, "use-bitmap-index", &use_bitmap_index,
N_("use a bitmap index if available to speed up counting objects")),
- OPT_BOOL(0, "write-bitmap-index", &write_bitmap_index,
- N_("write a bitmap index together with the pack index")),
+ OPT_SET_INT(0, "write-bitmap-index", &write_bitmap_index,
+ N_("write a bitmap index together with the pack index"),
+ WRITE_BITMAP_TRUE),
+ OPT_SET_INT_F(0, "write-bitmap-index-quiet",
+ &write_bitmap_index,
+ N_("write a bitmap index if possible"),
+ WRITE_BITMAP_QUIET, PARSE_OPT_HIDDEN),
OPT_PARSE_LIST_OBJECTS_FILTER(&filter_options),
{ OPTION_CALLBACK, 0, "missing", NULL, N_("action"),
N_("handling for missing objects"), PARSE_OPT_NONEG,
#include "builtin.h"
#include "config.h"
+#include "diff.h"
static void flush_current_id(int patchlen, struct object_id *id, struct object_id *result)
{
return 1;
}
-static void flush_one_hunk(struct object_id *result, git_SHA_CTX *ctx)
-{
- unsigned char hash[GIT_MAX_RAWSZ];
- unsigned short carry = 0;
- int i;
-
- git_SHA1_Final(hash, ctx);
- git_SHA1_Init(ctx);
- /* 20-byte sum, with carry */
- for (i = 0; i < GIT_SHA1_RAWSZ; ++i) {
- carry += result->hash[i] + hash[i];
- result->hash[i] = carry;
- carry >>= 8;
- }
-}
-
static int get_one_patchid(struct object_id *next_oid, struct object_id *result,
struct strbuf *line_buf, int stable)
{
perform_reachability_traversal(revs);
- obj = lookup_object(the_repository, oid->hash);
+ obj = lookup_object(the_repository, oid);
return obj && (obj->flags & SEEN);
}
static char *opt_refmap;
static char *opt_ipv4;
static char *opt_ipv6;
+static int opt_show_forced_updates = -1;
static struct option pull_options[] = {
/* Shared options */
OPT_PASSTHRU('6', "ipv6", &opt_ipv6, NULL,
N_("use IPv6 addresses only"),
PARSE_OPT_NOARG),
+ OPT_BOOL(0, "show-forced-updates", &opt_show_forced_updates,
+ N_("check for forced-updates on all updated branches")),
OPT_END()
};
argv_array_push(&args, opt_ipv4);
if (opt_ipv6)
argv_array_push(&args, opt_ipv6);
+ if (opt_show_forced_updates > 0)
+ argv_array_push(&args, "--show-forced-updates");
+ else if (opt_show_forced_updates == 0)
+ argv_array_push(&args, "--no-show-forced-updates");
if (repo) {
argv_array_push(&args, repo);
repo_diff_setup(the_repository, &diffopt);
options = parse_options_concat(range_diff_options, diffopt.parseopts);
- argc = parse_options(argc, argv, NULL, options,
+ argc = parse_options(argc, argv, prefix, options,
builtin_range_diff_usage, 0);
diff_setup_done(&diffopt);
return git_default_config(var, value, cb);
}
-int cmd_read_tree(int argc, const char **argv, const char *unused_prefix)
+int cmd_read_tree(int argc, const char **argv, const char *cmd_prefix)
{
int i, stage = 0;
struct object_id oid;
git_config(git_read_tree_config, NULL);
- argc = parse_options(argc, argv, unused_prefix, read_tree_options,
+ argc = parse_options(argc, argv, cmd_prefix, read_tree_options,
read_tree_usage, 0);
hold_locked_index(&lock_file, LOCK_DIE_ON_ERROR);
if (argc == 1)
usage_with_options(builtin_rebase_interactive_usage, options);
- argc = parse_options(argc, argv, NULL, options,
+ argc = parse_options(argc, argv, prefix, options,
builtin_rebase_interactive_usage, PARSE_OPT_KEEP_ARGV0);
if (!is_null_oid(&squash_onto))
goto leave_reset_head;
}
- if (!reset_hard && !fill_tree_descriptor(&desc[nr++], &head_oid)) {
+ if (!reset_hard && !fill_tree_descriptor(the_repository, &desc[nr++], &head_oid)) {
ret = error(_("failed to find tree of %s"),
oid_to_hex(&head_oid));
goto leave_reset_head;
}
- if (!fill_tree_descriptor(&desc[nr++], oid)) {
+ if (!fill_tree_descriptor(the_repository, &desc[nr++], oid)) {
ret = error(_("failed to find tree of %s"), oid_to_hex(oid));
goto leave_reset_head;
}
}
switch (opts->type) {
- case REBASE_AM:
- backend = "git-rebase--am";
- backend_func = "git_rebase__am";
- break;
case REBASE_PRESERVE_MERGES:
backend = "git-rebase--preserve-merges";
backend_func = "git_rebase__preserve_merges";
}
strbuf_addf(&script_snippet,
- ". git-sh-setup && . git-rebase--common &&"
- " . %s && %s", backend, backend_func);
+ ". git-sh-setup && . %s && %s", backend, backend_func);
argv[0] = script_snippet.buf;
status = run_command_v_opt(argv, RUN_USING_SHELL);
usage_with_options(builtin_rebase_usage,
builtin_rebase_options);
- prefix = setup_git_directory();
- trace_repo_setup(prefix);
- setup_work_tree();
-
options.allow_empty_message = 1;
git_config(rebase_config, &options);
if (reset_head(NULL, "reset", NULL, RESET_HEAD_HARD,
NULL, NULL) < 0)
die(_("could not discard worktree changes"));
- remove_branch_state(the_repository);
+ remove_branch_state(the_repository, 0);
if (read_basic_state(&options))
exit(1);
goto run_rebase;
NULL, NULL) < 0)
die(_("could not move back to %s"),
oid_to_hex(&options.orig_head));
- remove_branch_state(the_repository);
+ remove_branch_state(the_repository, 0);
ret = !!finish_rebase(&options);
goto cleanup;
}
strbuf_addf(&msg, "%s: checkout %s",
getenv(GIT_REFLOG_ACTION_ENVIRONMENT), options.onto_name);
if (reset_head(&options.onto->object.oid, "checkout", NULL,
- RESET_HEAD_DETACH | RESET_ORIG_HEAD |
+ RESET_HEAD_DETACH | RESET_ORIG_HEAD |
RESET_HEAD_RUN_POST_CHECKOUT_HOOK,
NULL, msg.buf))
die(_("Could not detach HEAD"));
#include "object.h"
#include "remote.h"
#include "connect.h"
-#include "transport.h"
#include "string-list.h"
#include "sha1-array.h"
#include "connected.h"
return ret;
}
-static void prepare_shallow_update(struct command *commands,
- struct shallow_info *si)
+static void prepare_shallow_update(struct shallow_info *si)
{
int i, j, k, bitmap_size = DIV_ROUND_UP(si->ref->nr, 32);
si->ref = ref;
if (shallow_update) {
- prepare_shallow_update(commands, si);
+ prepare_shallow_update(si);
return;
}
return retval;
}
-static int remove_all_fetch_refspecs(const char *remote, const char *key)
+static int remove_all_fetch_refspecs(const char *key)
{
return git_config_set_multivar_gently(key, NULL, NULL, 1);
}
if (!remote_is_configured(remote, 1))
die(_("No such remote '%s'"), remotename);
- if (!add_mode && remove_all_fetch_refspecs(remotename, key.buf)) {
+ if (!add_mode && remove_all_fetch_refspecs(key.buf)) {
strbuf_release(&key);
return 1;
}
static void remove_redundant_pack(const char *dir_name, const char *base_name)
{
- const char *exts[] = {".pack", ".idx", ".keep", ".bitmap", ".promisor"};
- int i;
struct strbuf buf = STRBUF_INIT;
- size_t plen;
-
- strbuf_addf(&buf, "%s/%s", dir_name, base_name);
- plen = buf.len;
-
- for (i = 0; i < ARRAY_SIZE(exts); i++) {
- strbuf_setlen(&buf, plen);
- strbuf_addstr(&buf, exts[i]);
- unlink(buf.buf);
- }
+ strbuf_addf(&buf, "%s/%s.pack", dir_name, base_name);
+ unlink_pack_path(buf.buf, 1);
strbuf_release(&buf);
}
(unpack_unreachable || (pack_everything & LOOSEN_UNREACHABLE)))
die(_("--keep-unreachable and -A are incompatible"));
- if (write_bitmaps < 0)
- write_bitmaps = (pack_everything & ALL_INTO_ONE) &&
- is_bare_repository();
+ if (write_bitmaps < 0) {
+ if (!(pack_everything & ALL_INTO_ONE) ||
+ !is_bare_repository())
+ write_bitmaps = 0;
+ }
if (pack_kept_objects < 0)
- pack_kept_objects = write_bitmaps;
+ pack_kept_objects = write_bitmaps > 0;
if (write_bitmaps && !(pack_everything & ALL_INTO_ONE))
die(_(incremental_bitmap_conflict_error));
argv_array_push(&cmd.args, "--indexed-objects");
if (repository_format_partial_clone)
argv_array_push(&cmd.args, "--exclude-promisor-objects");
- if (write_bitmaps)
+ if (write_bitmaps > 0)
argv_array_push(&cmd.args, "--write-bitmap-index");
+ else if (write_bitmaps < 0)
+ argv_array_push(&cmd.args, "--write-bitmap-index-quiet");
if (use_delta_islands)
argv_array_push(&cmd.args, "--delta-islands");
struct object_id head_oid;
if (get_oid("HEAD", &head_oid))
return error(_("You do not have a valid HEAD."));
- if (!fill_tree_descriptor(desc + nr, &head_oid))
+ if (!fill_tree_descriptor(the_repository, desc + nr, &head_oid))
return error(_("Failed to find tree of HEAD."));
nr++;
opts.fn = twoway_merge;
}
- if (!fill_tree_descriptor(desc + nr, oid)) {
+ if (!fill_tree_descriptor(the_repository, desc + nr, oid)) {
error(_("Failed to find tree of %s."), oid_to_hex(oid));
goto out;
}
print_new_head_line(lookup_commit_reference(the_repository, &oid));
}
if (!pathspec.nr)
- remove_branch_state(the_repository);
+ remove_branch_state(the_repository, 0);
return update_ref_status;
}
" --objects | --objects-edge\n"
" --unpacked\n"
" --header | --pretty\n"
+" --[no-]object-names\n"
" --abbrev=<n> | --no-abbrev\n"
" --abbrev-commit\n"
" --left-right\n"
};
static enum missing_action arg_missing_action;
+/* display only the oid of each object encountered */
+static int arg_show_object_names = 1;
+
#define DEFAULT_OIDSET_SIZE (16*1024)
-static void finish_commit(struct commit *commit, void *data);
+static void finish_commit(struct commit *commit);
static void show_commit(struct commit *commit, void *data)
{
struct rev_list_info *info = data;
display_progress(progress, ++progress_counter);
if (info->flags & REV_LIST_QUIET) {
- finish_commit(commit, data);
+ finish_commit(commit);
return;
}
revs->count_left++;
else
revs->count_right++;
- finish_commit(commit, data);
+ finish_commit(commit);
return;
}
putchar('\n');
}
maybe_flush_or_die(stdout, "stdout");
- finish_commit(commit, data);
+ finish_commit(commit);
}
-static void finish_commit(struct commit *commit, void *data)
+static void finish_commit(struct commit *commit)
{
if (commit->parents) {
free_commit_list(commit->parents);
display_progress(progress, ++progress_counter);
if (info->flags & REV_LIST_QUIET)
return;
- show_object_with_name(stdout, obj, name);
+ if (arg_show_object_names)
+ show_object_with_name(stdout, obj, name);
+ else
+ printf("%s\n", oid_to_hex(&obj->oid));
}
static void show_edge(struct commit *commit)
if (skip_prefix(arg, "--missing=", &arg))
continue; /* already handled above */
+ if (!strcmp(arg, ("--no-object-names"))) {
+ arg_show_object_names = 0;
+ continue;
+ }
+
+ if (!strcmp(arg, ("--object-names"))) {
+ arg_show_object_names = 1;
+ continue;
+ }
+
usage(rev_list_usage);
}
OPT_CMDMODE(0, "quit", &cmd, N_("end revert or cherry-pick sequence"), 'q'),
OPT_CMDMODE(0, "continue", &cmd, N_("resume revert or cherry-pick sequence"), 'c'),
OPT_CMDMODE(0, "abort", &cmd, N_("cancel revert or cherry-pick sequence"), 'a'),
+ OPT_CMDMODE(0, "skip", &cmd, N_("skip current commit and continue"), 's'),
OPT_CLEANUP(&cleanup_arg),
OPT_BOOL('n', "no-commit", &opts->no_commit, N_("don't automatically commit")),
OPT_BOOL('e', "edit", &opts->edit, N_("edit the commit message")),
this_operation = "--quit";
else if (cmd == 'c')
this_operation = "--continue";
+ else if (cmd == 's')
+ this_operation = "--skip";
else {
assert(cmd == 'a');
this_operation = "--abort";
if (cmd == 'q') {
int ret = sequencer_remove_state(opts);
if (!ret)
- remove_branch_state(the_repository);
+ remove_branch_state(the_repository, 0);
return ret;
}
if (cmd == 'c')
return sequencer_continue(the_repository, opts);
if (cmd == 'a')
return sequencer_rollback(the_repository, opts);
+ if (cmd == 's')
+ return sequencer_skip(the_repository, opts);
return sequencer_pick_revisions(the_repository, opts);
}
}
}
-static void submodules_absorb_gitdir_if_needed(const char *prefix)
+static void submodules_absorb_gitdir_if_needed(void)
{
int i;
for (i = 0; i < list.nr; i++) {
continue;
if (!submodule_uses_gitfile(name))
- absorb_git_dir_into_superproject(prefix, name,
+ absorb_git_dir_into_superproject(name,
ABSORB_GITDIR_RECURSE_SUBMODULES);
}
}
* way as changed from the HEAD.
*/
if (no_head
- || get_tree_entry(head, name, &oid, &mode)
+ || get_tree_entry(the_repository, head, name, &oid, &mode)
|| ce->ce_mode != create_ce_mode(mode)
|| !oideq(&ce->oid, &oid))
staged_changes = 1;
}
if (!index_only)
- submodules_absorb_gitdir_if_needed(prefix);
+ submodules_absorb_gitdir_if_needed();
/*
* If not forced, the file, the index and the HEAD (if exists)
static int show_independent(struct commit **rev,
int num_rev,
- char **ref_name,
unsigned int *rev_mask)
{
int i;
return show_merge_base(seen, num_rev);
if (independent)
- return show_independent(rev, num_rev, ref_name, rev_mask);
+ return show_independent(rev, num_rev, rev_mask);
/* Show list; --more=-1 means list-only */
if (1 < num_rev || extra < 0) {
}
if (keep_index == 1 && !is_null_oid(&info.i_tree)) {
- struct child_process cp_ls = CHILD_PROCESS_INIT;
- struct child_process cp_checkout = CHILD_PROCESS_INIT;
- struct strbuf out = STRBUF_INIT;
-
- if (reset_tree(&info.i_tree, 0, 1)) {
- ret = -1;
- goto done;
- }
-
- cp_ls.git_cmd = 1;
- argv_array_pushl(&cp_ls.args, "ls-files", "-z",
- "--modified", "--", NULL);
-
- add_pathspecs(&cp_ls.args, ps);
- if (pipe_command(&cp_ls, NULL, 0, &out, 0, NULL, 0)) {
- ret = -1;
- goto done;
- }
+ struct child_process cp = CHILD_PROCESS_INIT;
- cp_checkout.git_cmd = 1;
- argv_array_pushl(&cp_checkout.args, "checkout-index",
- "-z", "--force", "--stdin", NULL);
- if (pipe_command(&cp_checkout, out.buf, out.len, NULL,
- 0, NULL, 0)) {
+ cp.git_cmd = 1;
+ argv_array_pushl(&cp.args, "checkout", "--no-overlay",
+ oid_to_hex(&info.i_tree), "--", NULL);
+ if (!ps->nr)
+ argv_array_push(&cp.args, ":/");
+ else
+ add_pathspecs(&cp.args, ps);
+ if (run_command(&cp)) {
ret = -1;
goto done;
}
return 1;
for (i = 0; i < list.nr; i++)
- absorb_git_dir_into_superproject(prefix,
- list.entries[i]->name, flags);
+ absorb_git_dir_into_superproject(list.entries[i]->name, flags);
return 0;
}
static unsigned int colopts;
static int force_sign_annotate;
+static int config_sign_tag = -1; /* unspecified */
static int list_tags(struct ref_filter *filter, struct ref_sorting *sorting,
struct ref_format *format)
int status;
struct ref_sorting **sorting_tail = (struct ref_sorting **)cb;
+ if (!strcmp(var, "tag.gpgsign")) {
+ config_sign_tag = git_config_bool(var, value);
+ return 0;
+ }
+
if (!strcmp(var, "tag.sort")) {
if (!value)
return config_error_nonbool(var);
memset(&opt, 0, sizeof(opt));
memset(&filter, 0, sizeof(filter));
filter.lines = -1;
+ opt.sign = -1;
argc = parse_options(argc, argv, prefix, options, git_tag_usage, 0);
- if (keyid) {
- opt.sign = 1;
- set_signing_key(keyid);
- }
- create_tag_object = (opt.sign || annotate || msg.given || msgfile);
-
if (!cmdmode) {
if (argc == 0)
cmdmode = 'l';
if (cmdmode == 'l')
setup_auto_pager("tag", 1);
+ if (opt.sign == -1)
+ opt.sign = cmdmode ? 0 : config_sign_tag > 0;
+
+ if (keyid) {
+ opt.sign = 1;
+ set_signing_key(keyid);
+ }
+ create_tag_object = (opt.sign || annotate || msg.given || msgfile);
+
if ((create_tag_object || force) && (cmdmode != 0))
usage_with_options(git_tag_usage, options);
{
struct object *obj;
struct obj_buffer *obj_buffer;
- obj = lookup_object(the_repository, base->hash);
+ obj = lookup_object(the_repository, base);
if (!obj)
return 0;
obj_buffer = lookup_object_buffer(obj);
struct object_id oid;
struct cache_entry *ce;
- if (get_tree_entry(ent, path, &oid, &mode)) {
+ if (get_tree_entry(the_repository, ent, path, &oid, &mode)) {
if (which)
error("%s: not in %s branch.", path, which);
return NULL;
packet_trace_identity("upload-pack");
read_replace_refs = 0;
- argc = parse_options(argc, argv, NULL, options, upload_pack_usage, 0);
+ argc = parse_options(argc, argv, prefix, options, upload_pack_usage, 0);
if (argc != 1)
usage_with_options(upload_pack_usage, options);
#include "repository.h"
#include "commit.h"
#include "run-command.h"
-#include <signal.h>
#include "parse-options.h"
#include "gpg-interface.h"
NULL
};
-static int run_gpg_verify(const struct object_id *oid, const char *buf, unsigned long size, unsigned flags)
+static int run_gpg_verify(struct commit *commit, unsigned flags)
{
struct signature_check signature_check;
int ret;
memset(&signature_check, 0, sizeof(signature_check));
- ret = check_commit_signature(lookup_commit(the_repository, oid),
- &signature_check);
+ ret = check_commit_signature(commit, &signature_check);
print_signature_buffer(&signature_check, flags);
signature_check_clear(&signature_check);
static int verify_commit(const char *name, unsigned flags)
{
- enum object_type type;
struct object_id oid;
- char *buf;
- unsigned long size;
- int ret;
+ struct object *obj;
if (get_oid(name, &oid))
return error("commit '%s' not found.", name);
- buf = read_object_file(&oid, &type, &size);
- if (!buf)
+ obj = parse_object(the_repository, &oid);
+ if (!obj)
return error("%s: unable to read file.", name);
- if (type != OBJ_COMMIT)
+ if (obj->type != OBJ_COMMIT)
return error("%s: cannot verify a non-commit object of type %s.",
- name, type_name(type));
-
- ret = run_gpg_verify(&oid, buf, size, flags);
+ name, type_name(obj->type));
- free(buf);
- return ret;
+ return run_gpg_verify((struct commit *)obj, flags);
}
static int git_verify_commit_config(const char *var, const char *value, void *cb)
#include "builtin.h"
#include "tag.h"
#include "run-command.h"
-#include <signal.h>
#include "parse-options.h"
#include "gpg-interface.h"
#include "ref-filter.h"
struct strbuf symref = STRBUF_INIT;
struct commit *commit = NULL;
int is_branch = 0;
+ struct strbuf sb_name = STRBUF_INIT;
validate_worktree_add(path, opts);
die(_("invalid reference: %s"), refname);
name = worktree_basename(path, &len);
- git_path_buf(&sb_repo, "worktrees/%.*s", (int)(path + len - name), name);
+ strbuf_add(&sb, name, path + len - name);
+ sanitize_refname_component(sb.buf, &sb_name);
+ if (!sb_name.len)
+ BUG("How come '%s' becomes empty after sanitization?", sb.buf);
+ strbuf_reset(&sb);
+ name = sb_name.buf;
+ git_path_buf(&sb_repo, "worktrees/%s", name);
len = sb_repo.len;
if (safe_create_leading_directories_const(sb_repo.buf))
die_errno(_("could not create leading directories of '%s'"),
strbuf_release(&symref);
strbuf_release(&sb_repo);
strbuf_release(&sb_git);
+ strbuf_release(&sb_name);
return ret;
}
NULL
};
-int cmd_write_tree(int argc, const char **argv, const char *unused_prefix)
+int cmd_write_tree(int argc, const char **argv, const char *cmd_prefix)
{
int flags = 0, ret;
- const char *prefix = NULL;
+ const char *tree_prefix = NULL;
struct object_id oid;
const char *me = "git-write-tree";
struct option write_tree_options[] = {
OPT_BIT(0, "missing-ok", &flags, N_("allow missing objects"),
WRITE_TREE_MISSING_OK),
- OPT_STRING(0, "prefix", &prefix, N_("<prefix>/"),
+ OPT_STRING(0, "prefix", &tree_prefix, N_("<prefix>/"),
N_("write tree object for a subdirectory <prefix>")),
{ OPTION_BIT, 0, "ignore-cache-tree", &flags, NULL,
N_("only useful for debugging"),
};
git_config(git_default_config, NULL);
- argc = parse_options(argc, argv, unused_prefix, write_tree_options,
+ argc = parse_options(argc, argv, cmd_prefix, write_tree_options,
write_tree_usage, 0);
- ret = write_cache_as_tree(&oid, flags, prefix);
+ ret = write_cache_as_tree(&oid, flags, tree_prefix);
switch (ret) {
case 0:
printf("%s\n", oid_to_hex(&oid));
die("%s: error building trees", me);
break;
case WRITE_TREE_PREFIX_ERROR:
- die("%s: prefix %s not found", me, prefix);
+ die("%s: prefix %s not found", me, tree_prefix);
break;
}
return ret;
#include "object-store.h"
#include "replace-object.h"
-#ifndef DEBUG
-#define DEBUG 0
+#ifndef DEBUG_CACHE_TREE
+#define DEBUG_CACHE_TREE 0
#endif
struct cache_tree *cache_tree(void)
int namelen;
struct cache_tree_sub *down;
-#if DEBUG
+#if DEBUG_CACHE_TREE
fprintf(stderr, "cache-tree invalidate <%s>\n", path);
#endif
strbuf_addf(&buffer, "%o %.*s%c", mode, entlen, path + baselen, '\0');
strbuf_add(&buffer, oid->hash, the_hash_algo->rawsz);
-#if DEBUG
+#if DEBUG_CACHE_TREE
fprintf(stderr, "cache-tree update-one %o %.*s\n",
mode, entlen, path + baselen);
#endif
strbuf_release(&buffer);
it->entry_count = to_invalidate ? -1 : i - *skip_count;
-#if DEBUG
+#if DEBUG_CACHE_TREE
fprintf(stderr, "cache-tree update-one (%d ent, %d subtree) %s\n",
it->entry_count, it->subtree_nr,
oid_to_hex(&it->oid));
strbuf_add(buffer, path, pathlen);
strbuf_addf(buffer, "%c%d %d\n", 0, it->entry_count, it->subtree_nr);
-#if DEBUG
+#if DEBUG_CACHE_TREE
if (0 <= it->entry_count)
fprintf(stderr, "cache-tree <%.*s> (%d ent, %d subtree) %s\n",
pathlen, path, it->entry_count, it->subtree_nr,
size -= rawsz;
}
-#if DEBUG
+#if DEBUG_CACHE_TREE
if (0 <= it->entry_count)
fprintf(stderr, "cache-tree <%s> (%d ent, %d subtree) %s\n",
*buffer, it->entry_count, subtree_nr,
int git_deflate(git_zstream *, int flush);
unsigned long git_deflate_bound(git_zstream *, unsigned long);
-/* The length in bytes and in hex digits of an object name (SHA-1 value). */
-#define GIT_SHA1_RAWSZ 20
-#define GIT_SHA1_HEXSZ (2 * GIT_SHA1_RAWSZ)
-/* The block size of SHA-1. */
-#define GIT_SHA1_BLKSZ 64
-
-/* The length in bytes and in hex digits of an object name (SHA-256 value). */
-#define GIT_SHA256_RAWSZ 32
-#define GIT_SHA256_HEXSZ (2 * GIT_SHA256_RAWSZ)
-/* The block size of SHA-256. */
-#define GIT_SHA256_BLKSZ 64
-
-/* The length in byte and in hex digits of the largest possible hash value. */
-#define GIT_MAX_RAWSZ GIT_SHA256_RAWSZ
-#define GIT_MAX_HEXSZ GIT_SHA256_HEXSZ
-/* The largest possible block size for any supported hash. */
-#define GIT_MAX_BLKSZ GIT_SHA256_BLKSZ
-
-struct object_id {
- unsigned char hash[GIT_MAX_RAWSZ];
-};
-
-#define the_hash_algo the_repository->hash_algo
-
#if defined(DT_UNKNOWN) && !defined(NO_D_TYPE_IN_DIRENT)
#define DTYPE(de) ((de)->d_type)
#else
int name_compare(const char *name1, size_t len1, const char *name2, size_t len2);
int cache_name_stage_compare(const char *name1, int len1, int stage1, const char *name2, int len2, int stage2);
-void *read_object_with_reference(const struct object_id *oid,
+void *read_object_with_reference(struct repository *r,
+ const struct object_id *oid,
const char *required_type,
unsigned long *size,
struct object_id *oid_ret);
extern int diff_auto_refresh_index;
/* match-trees.c */
-void shift_tree(const struct object_id *, const struct object_id *, struct object_id *, int);
-void shift_tree_by(const struct object_id *, const struct object_id *, struct object_id *, const char *);
+void shift_tree(struct repository *, const struct object_id *, const struct object_id *, struct object_id *, int);
+void shift_tree_by(struct repository *, const struct object_id *, const struct object_id *, struct object_id *, const char *);
/*
* whitespace rules.
export GIT_SKIP_TESTS="t9810 t9816"
;;
GIT_TEST_GETTEXT_POISON)
- export GIT_TEST_GETTEXT_POISON=YesPlease
+ export GIT_TEST_GETTEXT_POISON=true
;;
esac
esac
make
-make test
-if test "$jobname" = "linux-gcc"
-then
+case "$jobname" in
+linux-gcc)
+ make test
export GIT_TEST_SPLIT_INDEX=yes
export GIT_TEST_FULL_IN_PACK_ARRAY=true
export GIT_TEST_OE_SIZE=10
export GIT_TEST_COMMIT_GRAPH=1
export GIT_TEST_MULTI_PACK_INDEX=1
make test
-fi
+ ;;
+linux-gcc-4.8)
+ # Don't run the tests; we only care about whether Git can be
+ # built with GCC 4.8, as it errors out on some undesired (C99)
+ # constructs that newer compilers seem to quietly accept.
+ ;;
+*)
+ make test
+ ;;
+esac
check_unignored_build_artifacts
git-check-attr purehelpers
git-check-ignore purehelpers
git-check-mailmap purehelpers
-git-checkout mainporcelain history
+git-checkout mainporcelain
git-checkout-index plumbingmanipulators
git-check-ref-format purehelpers
git-cherry plumbinginterrogators complete
git-cvsserver foreignscminterface
git-daemon synchingrepositories
git-describe mainporcelain
-git-diff mainporcelain history
+git-diff mainporcelain info
git-diff-files plumbinginterrogators
git-diff-index plumbinginterrogators
git-diff-tree plumbinginterrogators
git-replace ancillarymanipulators complete
git-request-pull foreignscminterface complete
git-rerere ancillaryinterrogators
-git-reset mainporcelain worktree
+git-reset mainporcelain history
+git-restore mainporcelain worktree
git-revert mainporcelain
git-rev-list plumbinginterrogators
git-rev-parse plumbinginterrogators
git-stripspace purehelpers
git-submodule mainporcelain
git-svn foreignscminterface
+git-switch mainporcelain history
git-symbolic-ref plumbingmanipulators
git-tag mainporcelain history
git-unpack-file plumbinginterrogators
#define GRAPH_CHUNKID_OIDLOOKUP 0x4f49444c /* "OIDL" */
#define GRAPH_CHUNKID_DATA 0x43444154 /* "CDAT" */
#define GRAPH_CHUNKID_EXTRAEDGES 0x45444745 /* "EDGE" */
+#define GRAPH_CHUNKID_BASE 0x42415345 /* "BASE" */
#define GRAPH_DATA_WIDTH (the_hash_algo->rawsz + 16)
char *get_commit_graph_filename(const char *obj_dir)
{
- return xstrfmt("%s/info/commit-graph", obj_dir);
+ char *filename = xstrfmt("%s/info/commit-graph", obj_dir);
+ char *normalized = xmalloc(strlen(filename) + 1);
+ normalize_path_copy(normalized, filename);
+ free(filename);
+ return normalized;
+}
+
+static char *get_split_graph_filename(const char *obj_dir,
+ const char *oid_hex)
+{
+ char *filename = xstrfmt("%s/info/commit-graphs/graph-%s.graph",
+ obj_dir,
+ oid_hex);
+ char *normalized = xmalloc(strlen(filename) + 1);
+ normalize_path_copy(normalized, filename);
+ free(filename);
+ return normalized;
+}
+
+static char *get_chain_filename(const char *obj_dir)
+{
+ return xstrfmt("%s/info/commit-graphs/commit-graph-chain", obj_dir);
}
static uint8_t oid_version(void)
else
graph->chunk_extra_edges = data + chunk_offset;
break;
+
+ case GRAPH_CHUNKID_BASE:
+ if (graph->chunk_base_graphs)
+ chunk_repeated = 1;
+ else
+ graph->chunk_base_graphs = data + chunk_offset;
}
if (chunk_repeated) {
last_chunk_offset = chunk_offset;
}
+ hashcpy(graph->oid.hash, graph->data + graph->data_len - graph->hash_len);
+
if (verify_commit_graph_lite(graph)) {
free(graph);
return NULL;
struct stat st;
int fd;
+ struct commit_graph *g;
int open_ok = open_commit_graph(graph_file, &fd, &st);
if (!open_ok)
return NULL;
- return load_commit_graph_one_fd_st(fd, &st);
+ g = load_commit_graph_one_fd_st(fd, &st);
+
+ if (g)
+ g->filename = xstrdup(graph_file);
+
+ return g;
+}
+
+static struct commit_graph *load_commit_graph_v1(struct repository *r, const char *obj_dir)
+{
+ char *graph_name = get_commit_graph_filename(obj_dir);
+ struct commit_graph *g = load_commit_graph_one(graph_name);
+ free(graph_name);
+
+ if (g)
+ g->obj_dir = obj_dir;
+
+ return g;
+}
+
+static int add_graph_to_chain(struct commit_graph *g,
+ struct commit_graph *chain,
+ struct object_id *oids,
+ int n)
+{
+ struct commit_graph *cur_g = chain;
+
+ if (n && !g->chunk_base_graphs) {
+ warning(_("commit-graph has no base graphs chunk"));
+ return 0;
+ }
+
+ while (n) {
+ n--;
+
+ if (!cur_g ||
+ !oideq(&oids[n], &cur_g->oid) ||
+ !hasheq(oids[n].hash, g->chunk_base_graphs + g->hash_len * n)) {
+ warning(_("commit-graph chain does not match"));
+ return 0;
+ }
+
+ cur_g = cur_g->base_graph;
+ }
+
+ g->base_graph = chain;
+
+ if (chain)
+ g->num_commits_in_base = chain->num_commits + chain->num_commits_in_base;
+
+ return 1;
+}
+
+static struct commit_graph *load_commit_graph_chain(struct repository *r, const char *obj_dir)
+{
+ struct commit_graph *graph_chain = NULL;
+ struct strbuf line = STRBUF_INIT;
+ struct stat st;
+ struct object_id *oids;
+ int i = 0, valid = 1, count;
+ char *chain_name = get_chain_filename(obj_dir);
+ FILE *fp;
+ int stat_res;
+
+ fp = fopen(chain_name, "r");
+ stat_res = stat(chain_name, &st);
+ free(chain_name);
+
+ if (!fp ||
+ stat_res ||
+ st.st_size <= the_hash_algo->hexsz)
+ return NULL;
+
+ count = st.st_size / (the_hash_algo->hexsz + 1);
+ oids = xcalloc(count, sizeof(struct object_id));
+
+ prepare_alt_odb(r);
+
+ for (i = 0; i < count; i++) {
+ struct object_directory *odb;
+
+ if (strbuf_getline_lf(&line, fp) == EOF)
+ break;
+
+ if (get_oid_hex(line.buf, &oids[i])) {
+ warning(_("invalid commit-graph chain: line '%s' not a hash"),
+ line.buf);
+ valid = 0;
+ break;
+ }
+
+ valid = 0;
+ for (odb = r->objects->odb; odb; odb = odb->next) {
+ char *graph_name = get_split_graph_filename(odb->path, line.buf);
+ struct commit_graph *g = load_commit_graph_one(graph_name);
+
+ free(graph_name);
+
+ if (g) {
+ g->obj_dir = odb->path;
+
+ if (add_graph_to_chain(g, graph_chain, oids, i)) {
+ graph_chain = g;
+ valid = 1;
+ }
+
+ break;
+ }
+ }
+
+ if (!valid) {
+ warning(_("unable to find all commit-graph files"));
+ break;
+ }
+ }
+
+ free(oids);
+ fclose(fp);
+
+ return graph_chain;
+}
+
+struct commit_graph *read_commit_graph_one(struct repository *r, const char *obj_dir)
+{
+ struct commit_graph *g = load_commit_graph_v1(r, obj_dir);
+
+ if (!g)
+ g = load_commit_graph_chain(r, obj_dir);
+
+ return g;
}
static void prepare_commit_graph_one(struct repository *r, const char *obj_dir)
{
- char *graph_name;
if (r->objects->commit_graph)
return;
- graph_name = get_commit_graph_filename(obj_dir);
- r->objects->commit_graph =
- load_commit_graph_one(graph_name);
-
- FREE_AND_NULL(graph_name);
+ r->objects->commit_graph = read_commit_graph_one(r, obj_dir);
}
/*
return !!first_generation;
}
+static void close_commit_graph_one(struct commit_graph *g)
+{
+ if (!g)
+ return;
+
+ close_commit_graph_one(g->base_graph);
+ free_commit_graph(g);
+}
+
void close_commit_graph(struct raw_object_store *o)
{
- free_commit_graph(o->commit_graph);
+ close_commit_graph_one(o->commit_graph);
o->commit_graph = NULL;
}
g->chunk_oid_lookup, g->hash_len, pos);
}
+static void load_oid_from_graph(struct commit_graph *g,
+ uint32_t pos,
+ struct object_id *oid)
+{
+ uint32_t lex_index;
+
+ while (g && pos < g->num_commits_in_base)
+ g = g->base_graph;
+
+ if (!g)
+ BUG("NULL commit-graph");
+
+ if (pos >= g->num_commits + g->num_commits_in_base)
+ die(_("invalid commit position. commit-graph is likely corrupt"));
+
+ lex_index = pos - g->num_commits_in_base;
+
+ hashcpy(oid->hash, g->chunk_oid_lookup + g->hash_len * lex_index);
+}
+
static struct commit_list **insert_parent_or_die(struct repository *r,
struct commit_graph *g,
- uint64_t pos,
+ uint32_t pos,
struct commit_list **pptr)
{
struct commit *c;
struct object_id oid;
- if (pos >= g->num_commits)
- die("invalid parent position %"PRIu64, pos);
+ if (pos >= g->num_commits + g->num_commits_in_base)
+ die("invalid parent position %"PRIu32, pos);
- hashcpy(oid.hash, g->chunk_oid_lookup + g->hash_len * pos);
+ load_oid_from_graph(g, pos, &oid);
c = lookup_commit(r, &oid);
if (!c)
die(_("could not find commit %s"), oid_to_hex(&oid));
static void fill_commit_graph_info(struct commit *item, struct commit_graph *g, uint32_t pos)
{
- const unsigned char *commit_data = g->chunk_commit_data + GRAPH_DATA_WIDTH * pos;
+ const unsigned char *commit_data;
+ uint32_t lex_index;
+
+ while (pos < g->num_commits_in_base)
+ g = g->base_graph;
+
+ lex_index = pos - g->num_commits_in_base;
+ commit_data = g->chunk_commit_data + GRAPH_DATA_WIDTH * lex_index;
item->graph_pos = pos;
item->generation = get_be32(commit_data + g->hash_len + 8) >> 2;
}
uint32_t *parent_data_ptr;
uint64_t date_low, date_high;
struct commit_list **pptr;
- const unsigned char *commit_data = g->chunk_commit_data + (g->hash_len + 16) * pos;
+ const unsigned char *commit_data;
+ uint32_t lex_index;
- item->object.parsed = 1;
+ while (pos < g->num_commits_in_base)
+ g = g->base_graph;
+
+ if (pos >= g->num_commits + g->num_commits_in_base)
+ die(_("invalid commit position. commit-graph is likely corrupt"));
+
+ /*
+ * Store the "full" position, but then use the
+ * "local" position for the rest of the calculation.
+ */
item->graph_pos = pos;
+ lex_index = pos - g->num_commits_in_base;
+
+ commit_data = g->chunk_commit_data + (g->hash_len + 16) * lex_index;
+
+ item->object.parsed = 1;
set_commit_tree(item, NULL);
*pos = item->graph_pos;
return 1;
} else {
- return bsearch_graph(g, &(item->object.oid), pos);
+ struct commit_graph *cur_g = g;
+ uint32_t lex_index;
+
+ while (cur_g && !bsearch_graph(cur_g, &(item->object.oid), &lex_index))
+ cur_g = cur_g->base_graph;
+
+ if (cur_g) {
+ *pos = lex_index + cur_g->num_commits_in_base;
+ return 1;
+ }
+
+ return 0;
}
}
struct commit *c)
{
struct object_id oid;
- const unsigned char *commit_data = g->chunk_commit_data +
- GRAPH_DATA_WIDTH * (c->graph_pos);
+ const unsigned char *commit_data;
+
+ while (c->graph_pos < g->num_commits_in_base)
+ g = g->base_graph;
+
+ commit_data = g->chunk_commit_data +
+ GRAPH_DATA_WIDTH * (c->graph_pos - g->num_commits_in_base);
hashcpy(oid.hash, commit_data);
set_commit_tree(c, lookup_tree(r, &oid));
struct write_commit_graph_context {
struct repository *r;
- const char *obj_dir;
+ char *obj_dir;
char *graph_name;
struct packed_oid_list oids;
struct packed_commit_list commits;
struct progress *progress;
int progress_done;
uint64_t progress_cnt;
+
+ char *base_graph_name;
+ int num_commit_graphs_before;
+ int num_commit_graphs_after;
+ char **commit_graph_filenames_before;
+ char **commit_graph_filenames_after;
+ char **commit_graph_hash_after;
+ uint32_t new_num_commits_in_base;
+ struct commit_graph *new_base_graph;
+
unsigned append:1,
- report_progress:1;
+ report_progress:1,
+ split:1;
+
+ const struct split_commit_graph_opts *split_opts;
};
static void write_graph_chunk_fanout(struct hashfile *f,
ctx->commits.nr,
commit_to_sha1);
+ if (edge_value >= 0)
+ edge_value += ctx->new_num_commits_in_base;
+ else {
+ uint32_t pos;
+ if (find_commit_in_graph(parent->item,
+ ctx->new_base_graph,
+ &pos))
+ edge_value = pos;
+ }
+
if (edge_value < 0)
BUG("missing parent %s for commit %s",
oid_to_hex(&parent->item->object.oid),
ctx->commits.list,
ctx->commits.nr,
commit_to_sha1);
+
+ if (edge_value >= 0)
+ edge_value += ctx->new_num_commits_in_base;
+ else {
+ uint32_t pos;
+ if (find_commit_in_graph(parent->item,
+ ctx->new_base_graph,
+ &pos))
+ edge_value = pos;
+ }
+
if (edge_value < 0)
BUG("missing parent %s for commit %s",
oid_to_hex(&parent->item->object.oid),
ctx->commits.nr,
commit_to_sha1);
+ if (edge_value >= 0)
+ edge_value += ctx->new_num_commits_in_base;
+ else {
+ uint32_t pos;
+ if (find_commit_in_graph(parent->item,
+ ctx->new_base_graph,
+ &pos))
+ edge_value = pos;
+ }
+
if (edge_value < 0)
BUG("missing parent %s for commit %s",
oid_to_hex(&parent->item->object.oid),
}
}
-static int commit_compare(const void *_a, const void *_b)
+static int oid_compare(const void *_a, const void *_b)
{
const struct object_id *a = (const struct object_id *)_a;
const struct object_id *b = (const struct object_id *)_b;
display_progress(ctx->progress, i + 1);
commit = lookup_commit(ctx->r, &ctx->oids.list[i]);
- if (commit && !parse_commit_no_graph(commit))
+ if (!commit)
+ continue;
+ if (ctx->split) {
+ if (!parse_commit(commit) &&
+ commit->graph_pos == COMMIT_NOT_FROM_GRAPH)
+ add_missing_parents(ctx, commit);
+ } else if (!parse_commit_no_graph(commit))
add_missing_parents(ctx, commit);
}
stop_progress(&ctx->progress);
return 0;
}
-int write_commit_graph_reachable(const char *obj_dir, unsigned int flags)
+int write_commit_graph_reachable(const char *obj_dir, unsigned int flags,
+ const struct split_commit_graph_opts *split_opts)
{
struct string_list list = STRING_LIST_INIT_DUP;
int result;
for_each_ref(add_ref_to_list, &list);
result = write_commit_graph(obj_dir, NULL, &list,
- flags);
+ flags, split_opts);
string_list_clear(&list, 0);
return result;
_("Counting distinct commits in commit graph"),
ctx->oids.nr);
display_progress(ctx->progress, 0); /* TODO: Measure QSORT() progress */
- QSORT(ctx->oids.list, ctx->oids.nr, commit_compare);
+ QSORT(ctx->oids.list, ctx->oids.nr, oid_compare);
for (i = 1; i < ctx->oids.nr; i++) {
display_progress(ctx->progress, i + 1);
- if (!oideq(&ctx->oids.list[i - 1], &ctx->oids.list[i]))
+ if (!oideq(&ctx->oids.list[i - 1], &ctx->oids.list[i])) {
+ if (ctx->split) {
+ struct commit *c = lookup_commit(ctx->r, &ctx->oids.list[i]);
+
+ if (!c || c->graph_pos != COMMIT_NOT_FROM_GRAPH)
+ continue;
+ }
+
count_distinct++;
+ }
}
stop_progress(&ctx->progress);
if (i > 0 && oideq(&ctx->oids.list[i - 1], &ctx->oids.list[i]))
continue;
+ ALLOC_GROW(ctx->commits.list, ctx->commits.nr + 1, ctx->commits.alloc);
ctx->commits.list[ctx->commits.nr] = lookup_commit(ctx->r, &ctx->oids.list[i]);
+
+ if (ctx->split &&
+ ctx->commits.list[ctx->commits.nr]->graph_pos != COMMIT_NOT_FROM_GRAPH)
+ continue;
+
parse_commit_no_graph(ctx->commits.list[ctx->commits.nr]);
for (parent = ctx->commits.list[ctx->commits.nr]->parents;
stop_progress(&ctx->progress);
}
+static int write_graph_chunk_base_1(struct hashfile *f,
+ struct commit_graph *g)
+{
+ int num = 0;
+
+ if (!g)
+ return 0;
+
+ num = write_graph_chunk_base_1(f, g->base_graph);
+ hashwrite(f, g->oid.hash, the_hash_algo->rawsz);
+ return num + 1;
+}
+
+static int write_graph_chunk_base(struct hashfile *f,
+ struct write_commit_graph_context *ctx)
+{
+ int num = write_graph_chunk_base_1(f, ctx->new_base_graph);
+
+ if (num != ctx->num_commit_graphs_after - 1) {
+ error(_("failed to write correct number of base graph ids"));
+ return -1;
+ }
+
+ return 0;
+}
+
static int write_commit_graph_file(struct write_commit_graph_context *ctx)
{
uint32_t i;
+ int fd;
struct hashfile *f;
struct lock_file lk = LOCK_INIT;
- uint32_t chunk_ids[5];
- uint64_t chunk_offsets[5];
+ uint32_t chunk_ids[6];
+ uint64_t chunk_offsets[6];
const unsigned hashsz = the_hash_algo->rawsz;
struct strbuf progress_title = STRBUF_INIT;
- int num_chunks = ctx->num_extra_edges ? 4 : 3;
+ int num_chunks = 3;
+ struct object_id file_hash;
+
+ if (ctx->split) {
+ struct strbuf tmp_file = STRBUF_INIT;
+
+ strbuf_addf(&tmp_file,
+ "%s/info/commit-graphs/tmp_graph_XXXXXX",
+ ctx->obj_dir);
+ ctx->graph_name = strbuf_detach(&tmp_file, NULL);
+ } else {
+ ctx->graph_name = get_commit_graph_filename(ctx->obj_dir);
+ }
- ctx->graph_name = get_commit_graph_filename(ctx->obj_dir);
if (safe_create_leading_directories(ctx->graph_name)) {
UNLEAK(ctx->graph_name);
error(_("unable to create leading directories of %s"),
return -1;
}
- hold_lock_file_for_update(&lk, ctx->graph_name, LOCK_DIE_ON_ERROR);
- f = hashfd(lk.tempfile->fd, lk.tempfile->filename.buf);
+ if (ctx->split) {
+ char *lock_name = get_chain_filename(ctx->obj_dir);
- hashwrite_be32(f, GRAPH_SIGNATURE);
+ hold_lock_file_for_update(&lk, lock_name, LOCK_DIE_ON_ERROR);
- hashwrite_u8(f, GRAPH_VERSION);
- hashwrite_u8(f, oid_version());
- hashwrite_u8(f, num_chunks);
- hashwrite_u8(f, 0); /* unused padding byte */
+ fd = git_mkstemp_mode(ctx->graph_name, 0444);
+ if (fd < 0) {
+ error(_("unable to create '%s'"), ctx->graph_name);
+ return -1;
+ }
+
+ f = hashfd(fd, ctx->graph_name);
+ } else {
+ hold_lock_file_for_update(&lk, ctx->graph_name, LOCK_DIE_ON_ERROR);
+ fd = lk.tempfile->fd;
+ f = hashfd(lk.tempfile->fd, lk.tempfile->filename.buf);
+ }
chunk_ids[0] = GRAPH_CHUNKID_OIDFANOUT;
chunk_ids[1] = GRAPH_CHUNKID_OIDLOOKUP;
chunk_ids[2] = GRAPH_CHUNKID_DATA;
- if (ctx->num_extra_edges)
- chunk_ids[3] = GRAPH_CHUNKID_EXTRAEDGES;
- else
- chunk_ids[3] = 0;
- chunk_ids[4] = 0;
+ if (ctx->num_extra_edges) {
+ chunk_ids[num_chunks] = GRAPH_CHUNKID_EXTRAEDGES;
+ num_chunks++;
+ }
+ if (ctx->num_commit_graphs_after > 1) {
+ chunk_ids[num_chunks] = GRAPH_CHUNKID_BASE;
+ num_chunks++;
+ }
+
+ chunk_ids[num_chunks] = 0;
chunk_offsets[0] = 8 + (num_chunks + 1) * GRAPH_CHUNKLOOKUP_WIDTH;
chunk_offsets[1] = chunk_offsets[0] + GRAPH_FANOUT_SIZE;
chunk_offsets[2] = chunk_offsets[1] + hashsz * ctx->commits.nr;
chunk_offsets[3] = chunk_offsets[2] + (hashsz + 16) * ctx->commits.nr;
- chunk_offsets[4] = chunk_offsets[3] + 4 * ctx->num_extra_edges;
+
+ num_chunks = 3;
+ if (ctx->num_extra_edges) {
+ chunk_offsets[num_chunks + 1] = chunk_offsets[num_chunks] +
+ 4 * ctx->num_extra_edges;
+ num_chunks++;
+ }
+ if (ctx->num_commit_graphs_after > 1) {
+ chunk_offsets[num_chunks + 1] = chunk_offsets[num_chunks] +
+ hashsz * (ctx->num_commit_graphs_after - 1);
+ num_chunks++;
+ }
+
+ hashwrite_be32(f, GRAPH_SIGNATURE);
+
+ hashwrite_u8(f, GRAPH_VERSION);
+ hashwrite_u8(f, oid_version());
+ hashwrite_u8(f, num_chunks);
+ hashwrite_u8(f, ctx->num_commit_graphs_after - 1);
for (i = 0; i <= num_chunks; i++) {
uint32_t chunk_write[3];
write_graph_chunk_data(f, hashsz, ctx);
if (ctx->num_extra_edges)
write_graph_chunk_extra_edges(f, ctx);
+ if (ctx->num_commit_graphs_after > 1 &&
+ write_graph_chunk_base(f, ctx)) {
+ return -1;
+ }
stop_progress(&ctx->progress);
strbuf_release(&progress_title);
+ if (ctx->split && ctx->base_graph_name && ctx->num_commit_graphs_after > 1) {
+ char *new_base_hash = xstrdup(oid_to_hex(&ctx->new_base_graph->oid));
+ char *new_base_name = get_split_graph_filename(ctx->new_base_graph->obj_dir, new_base_hash);
+
+ free(ctx->commit_graph_filenames_after[ctx->num_commit_graphs_after - 2]);
+ free(ctx->commit_graph_hash_after[ctx->num_commit_graphs_after - 2]);
+ ctx->commit_graph_filenames_after[ctx->num_commit_graphs_after - 2] = new_base_name;
+ ctx->commit_graph_hash_after[ctx->num_commit_graphs_after - 2] = new_base_hash;
+ }
+
close_commit_graph(ctx->r->objects);
- finalize_hashfile(f, NULL, CSUM_HASH_IN_STREAM | CSUM_FSYNC);
+ finalize_hashfile(f, file_hash.hash, CSUM_HASH_IN_STREAM | CSUM_FSYNC);
+
+ if (ctx->split) {
+ FILE *chainf = fdopen_lock_file(&lk, "w");
+ char *final_graph_name;
+ int result;
+
+ close(fd);
+
+ if (!chainf) {
+ error(_("unable to open commit-graph chain file"));
+ return -1;
+ }
+
+ if (ctx->base_graph_name) {
+ const char *dest = ctx->commit_graph_filenames_after[
+ ctx->num_commit_graphs_after - 2];
+
+ if (strcmp(ctx->base_graph_name, dest)) {
+ result = rename(ctx->base_graph_name, dest);
+
+ if (result) {
+ error(_("failed to rename base commit-graph file"));
+ return -1;
+ }
+ }
+ } else {
+ char *graph_name = get_commit_graph_filename(ctx->obj_dir);
+ unlink(graph_name);
+ }
+
+ ctx->commit_graph_hash_after[ctx->num_commit_graphs_after - 1] = xstrdup(oid_to_hex(&file_hash));
+ final_graph_name = get_split_graph_filename(ctx->obj_dir,
+ ctx->commit_graph_hash_after[ctx->num_commit_graphs_after - 1]);
+ ctx->commit_graph_filenames_after[ctx->num_commit_graphs_after - 1] = final_graph_name;
+
+ result = rename(ctx->graph_name, final_graph_name);
+
+ for (i = 0; i < ctx->num_commit_graphs_after; i++)
+ fprintf(lk.tempfile->fp, "%s\n", ctx->commit_graph_hash_after[i]);
+
+ if (result) {
+ error(_("failed to rename temporary commit-graph file"));
+ return -1;
+ }
+ }
+
commit_lock_file(&lk);
return 0;
}
+static void split_graph_merge_strategy(struct write_commit_graph_context *ctx)
+{
+ struct commit_graph *g = ctx->r->objects->commit_graph;
+ uint32_t num_commits = ctx->commits.nr;
+ uint32_t i;
+
+ int max_commits = 0;
+ int size_mult = 2;
+
+ if (ctx->split_opts) {
+ max_commits = ctx->split_opts->max_commits;
+ size_mult = ctx->split_opts->size_multiple;
+ }
+
+ g = ctx->r->objects->commit_graph;
+ ctx->num_commit_graphs_after = ctx->num_commit_graphs_before + 1;
+
+ while (g && (g->num_commits <= size_mult * num_commits ||
+ (max_commits && num_commits > max_commits))) {
+ if (strcmp(g->obj_dir, ctx->obj_dir))
+ break;
+
+ num_commits += g->num_commits;
+ g = g->base_graph;
+
+ ctx->num_commit_graphs_after--;
+ }
+
+ ctx->new_base_graph = g;
+
+ if (ctx->num_commit_graphs_after == 2) {
+ char *old_graph_name = get_commit_graph_filename(g->obj_dir);
+
+ if (!strcmp(g->filename, old_graph_name) &&
+ strcmp(g->obj_dir, ctx->obj_dir)) {
+ ctx->num_commit_graphs_after = 1;
+ ctx->new_base_graph = NULL;
+ }
+
+ free(old_graph_name);
+ }
+
+ ALLOC_ARRAY(ctx->commit_graph_filenames_after, ctx->num_commit_graphs_after);
+ ALLOC_ARRAY(ctx->commit_graph_hash_after, ctx->num_commit_graphs_after);
+
+ for (i = 0; i < ctx->num_commit_graphs_after &&
+ i < ctx->num_commit_graphs_before; i++)
+ ctx->commit_graph_filenames_after[i] = xstrdup(ctx->commit_graph_filenames_before[i]);
+
+ i = ctx->num_commit_graphs_before - 1;
+ g = ctx->r->objects->commit_graph;
+
+ while (g) {
+ if (i < ctx->num_commit_graphs_after)
+ ctx->commit_graph_hash_after[i] = xstrdup(oid_to_hex(&g->oid));
+
+ i--;
+ g = g->base_graph;
+ }
+}
+
+static void merge_commit_graph(struct write_commit_graph_context *ctx,
+ struct commit_graph *g)
+{
+ uint32_t i;
+ uint32_t offset = g->num_commits_in_base;
+
+ ALLOC_GROW(ctx->commits.list, ctx->commits.nr + g->num_commits, ctx->commits.alloc);
+
+ for (i = 0; i < g->num_commits; i++) {
+ struct object_id oid;
+ struct commit *result;
+
+ display_progress(ctx->progress, i + 1);
+
+ load_oid_from_graph(g, i + offset, &oid);
+
+ /* only add commits if they still exist in the repo */
+ result = lookup_commit_reference_gently(ctx->r, &oid, 1);
+
+ if (result) {
+ ctx->commits.list[ctx->commits.nr] = result;
+ ctx->commits.nr++;
+ }
+ }
+}
+
+static int commit_compare(const void *_a, const void *_b)
+{
+ const struct commit *a = *(const struct commit **)_a;
+ const struct commit *b = *(const struct commit **)_b;
+ return oidcmp(&a->object.oid, &b->object.oid);
+}
+
+static void sort_and_scan_merged_commits(struct write_commit_graph_context *ctx)
+{
+ uint32_t i, num_parents;
+ struct commit_list *parent;
+
+ if (ctx->report_progress)
+ ctx->progress = start_delayed_progress(
+ _("Scanning merged commits"),
+ ctx->commits.nr);
+
+ QSORT(ctx->commits.list, ctx->commits.nr, commit_compare);
+
+ ctx->num_extra_edges = 0;
+ for (i = 0; i < ctx->commits.nr; i++) {
+ display_progress(ctx->progress, i);
+
+ if (i && oideq(&ctx->commits.list[i - 1]->object.oid,
+ &ctx->commits.list[i]->object.oid)) {
+ die(_("unexpected duplicate commit id %s"),
+ oid_to_hex(&ctx->commits.list[i]->object.oid));
+ } else {
+ num_parents = 0;
+ for (parent = ctx->commits.list[i]->parents; parent; parent = parent->next)
+ num_parents++;
+
+ if (num_parents > 2)
+ ctx->num_extra_edges += num_parents - 1;
+ }
+ }
+
+ stop_progress(&ctx->progress);
+}
+
+static void merge_commit_graphs(struct write_commit_graph_context *ctx)
+{
+ struct commit_graph *g = ctx->r->objects->commit_graph;
+ uint32_t current_graph_number = ctx->num_commit_graphs_before;
+ struct strbuf progress_title = STRBUF_INIT;
+
+ while (g && current_graph_number >= ctx->num_commit_graphs_after) {
+ current_graph_number--;
+
+ if (ctx->report_progress) {
+ strbuf_addstr(&progress_title, _("Merging commit-graph"));
+ ctx->progress = start_delayed_progress(progress_title.buf, 0);
+ }
+
+ merge_commit_graph(ctx, g);
+ stop_progress(&ctx->progress);
+ strbuf_release(&progress_title);
+
+ g = g->base_graph;
+ }
+
+ if (g) {
+ ctx->new_base_graph = g;
+ ctx->new_num_commits_in_base = g->num_commits + g->num_commits_in_base;
+ }
+
+ if (ctx->new_base_graph)
+ ctx->base_graph_name = xstrdup(ctx->new_base_graph->filename);
+
+ sort_and_scan_merged_commits(ctx);
+}
+
+static void mark_commit_graphs(struct write_commit_graph_context *ctx)
+{
+ uint32_t i;
+ time_t now = time(NULL);
+
+ for (i = ctx->num_commit_graphs_after - 1; i < ctx->num_commit_graphs_before; i++) {
+ struct stat st;
+ struct utimbuf updated_time;
+
+ stat(ctx->commit_graph_filenames_before[i], &st);
+
+ updated_time.actime = st.st_atime;
+ updated_time.modtime = now;
+ utime(ctx->commit_graph_filenames_before[i], &updated_time);
+ }
+}
+
+static void expire_commit_graphs(struct write_commit_graph_context *ctx)
+{
+ struct strbuf path = STRBUF_INIT;
+ DIR *dir;
+ struct dirent *de;
+ size_t dirnamelen;
+ timestamp_t expire_time = time(NULL);
+
+ if (ctx->split_opts && ctx->split_opts->expire_time)
+ expire_time -= ctx->split_opts->expire_time;
+ if (!ctx->split) {
+ char *chain_file_name = get_chain_filename(ctx->obj_dir);
+ unlink(chain_file_name);
+ free(chain_file_name);
+ ctx->num_commit_graphs_after = 0;
+ }
+
+ strbuf_addstr(&path, ctx->obj_dir);
+ strbuf_addstr(&path, "/info/commit-graphs");
+ dir = opendir(path.buf);
+
+ if (!dir) {
+ strbuf_release(&path);
+ return;
+ }
+
+ strbuf_addch(&path, '/');
+ dirnamelen = path.len;
+ while ((de = readdir(dir)) != NULL) {
+ struct stat st;
+ uint32_t i, found = 0;
+
+ strbuf_setlen(&path, dirnamelen);
+ strbuf_addstr(&path, de->d_name);
+
+ stat(path.buf, &st);
+
+ if (st.st_mtime > expire_time)
+ continue;
+ if (path.len < 6 || strcmp(path.buf + path.len - 6, ".graph"))
+ continue;
+
+ for (i = 0; i < ctx->num_commit_graphs_after; i++) {
+ if (!strcmp(ctx->commit_graph_filenames_after[i],
+ path.buf)) {
+ found = 1;
+ break;
+ }
+ }
+
+ if (!found)
+ unlink(path.buf);
+ }
+}
+
int write_commit_graph(const char *obj_dir,
struct string_list *pack_indexes,
struct string_list *commit_hex,
- unsigned int flags)
+ unsigned int flags,
+ const struct split_commit_graph_opts *split_opts)
{
struct write_commit_graph_context *ctx;
uint32_t i, count_distinct = 0;
+ size_t len;
int res = 0;
if (!commit_graph_compatible(the_repository))
ctx = xcalloc(1, sizeof(struct write_commit_graph_context));
ctx->r = the_repository;
- ctx->obj_dir = obj_dir;
+
+ /* normalize object dir with no trailing slash */
+ ctx->obj_dir = xmallocz(strlen(obj_dir) + 1);
+ normalize_path_copy(ctx->obj_dir, obj_dir);
+ len = strlen(ctx->obj_dir);
+ if (len && ctx->obj_dir[len - 1] == '/')
+ ctx->obj_dir[len - 1] = 0;
+
ctx->append = flags & COMMIT_GRAPH_APPEND ? 1 : 0;
ctx->report_progress = flags & COMMIT_GRAPH_PROGRESS ? 1 : 0;
+ ctx->split = flags & COMMIT_GRAPH_SPLIT ? 1 : 0;
+ ctx->split_opts = split_opts;
+
+ if (ctx->split) {
+ struct commit_graph *g;
+ prepare_commit_graph(ctx->r);
+
+ g = ctx->r->objects->commit_graph;
+
+ while (g) {
+ ctx->num_commit_graphs_before++;
+ g = g->base_graph;
+ }
+
+ if (ctx->num_commit_graphs_before) {
+ ALLOC_ARRAY(ctx->commit_graph_filenames_before, ctx->num_commit_graphs_before);
+ i = ctx->num_commit_graphs_before;
+ g = ctx->r->objects->commit_graph;
+
+ while (g) {
+ ctx->commit_graph_filenames_before[--i] = xstrdup(g->filename);
+ g = g->base_graph;
+ }
+ }
+ }
ctx->approx_nr_objects = approximate_object_count();
ctx->oids.alloc = ctx->approx_nr_objects / 32;
+ if (ctx->split && split_opts && ctx->oids.alloc > split_opts->max_commits)
+ ctx->oids.alloc = split_opts->max_commits;
+
if (ctx->append) {
prepare_commit_graph_one(ctx->r, ctx->obj_dir);
if (ctx->r->objects->commit_graph)
goto cleanup;
}
+ if (!ctx->commits.nr)
+ goto cleanup;
+
+ if (ctx->split) {
+ split_graph_merge_strategy(ctx);
+
+ merge_commit_graphs(ctx);
+ } else
+ ctx->num_commit_graphs_after = 1;
+
compute_generation_numbers(ctx);
res = write_commit_graph_file(ctx);
+ if (ctx->split)
+ mark_commit_graphs(ctx);
+
+ expire_commit_graphs(ctx);
+
cleanup:
free(ctx->graph_name);
free(ctx->commits.list);
free(ctx->oids.list);
+ free(ctx->obj_dir);
+
+ if (ctx->commit_graph_filenames_after) {
+ for (i = 0; i < ctx->num_commit_graphs_after; i++) {
+ free(ctx->commit_graph_filenames_after[i]);
+ free(ctx->commit_graph_hash_after[i]);
+ }
+
+ for (i = 0; i < ctx->num_commit_graphs_before; i++)
+ free(ctx->commit_graph_filenames_before[i]);
+
+ free(ctx->commit_graph_filenames_after);
+ free(ctx->commit_graph_filenames_before);
+ free(ctx->commit_graph_hash_after);
+ }
+
free(ctx);
return res;
#define GENERATION_ZERO_EXISTS 1
#define GENERATION_NUMBER_EXISTS 2
-int verify_commit_graph(struct repository *r, struct commit_graph *g)
+int verify_commit_graph(struct repository *r, struct commit_graph *g, int flags)
{
uint32_t i, cur_fanout_pos = 0;
struct object_id prev_oid, cur_oid, checksum;
struct hashfile *f;
int devnull;
struct progress *progress = NULL;
+ int local_error = 0;
if (!g) {
graph_report("no commit-graph file loaded");
hashcpy(cur_oid.hash, g->chunk_oid_lookup + g->hash_len * i);
graph_commit = lookup_commit(r, &cur_oid);
- odb_commit = (struct commit *)create_object(r, cur_oid.hash, alloc_commit_node(r));
+ odb_commit = (struct commit *)create_object(r, &cur_oid, alloc_commit_node(r));
if (parse_commit_internal(odb_commit, 0, 0)) {
graph_report(_("failed to parse commit %s from object database for commit-graph"),
oid_to_hex(&cur_oid));
break;
}
+ /* parse parent in case it is in a base graph */
+ parse_commit_in_graph_one(r, g, graph_parents->item);
+
if (!oideq(&graph_parents->item->object.oid, &odb_parents->item->object.oid))
graph_report(_("commit-graph parent for %s is %s != %s"),
oid_to_hex(&cur_oid),
}
stop_progress(&progress);
- return verify_commit_graph_error;
+ local_error = verify_commit_graph_error;
+
+ if (!(flags & COMMIT_GRAPH_VERIFY_SHALLOW) && g->base_graph)
+ local_error |= verify_commit_graph(r, g->base_graph, flags);
+
+ return local_error;
}
void free_commit_graph(struct commit_graph *g)
g->data = NULL;
close(g->graph_fd);
}
+ free(g->filename);
free(g);
}
unsigned char num_chunks;
uint32_t num_commits;
struct object_id oid;
+ char *filename;
+ const char *obj_dir;
+
+ uint32_t num_commits_in_base;
+ struct commit_graph *base_graph;
const uint32_t *chunk_oid_fanout;
const unsigned char *chunk_oid_lookup;
const unsigned char *chunk_commit_data;
const unsigned char *chunk_extra_edges;
+ const unsigned char *chunk_base_graphs;
};
struct commit_graph *load_commit_graph_one_fd_st(int fd, struct stat *st);
-
+struct commit_graph *read_commit_graph_one(struct repository *r, const char *obj_dir);
struct commit_graph *parse_commit_graph(void *graph_map, int fd,
size_t graph_size);
#define COMMIT_GRAPH_APPEND (1 << 0)
#define COMMIT_GRAPH_PROGRESS (1 << 1)
+#define COMMIT_GRAPH_SPLIT (1 << 2)
+
+struct split_commit_graph_opts {
+ int size_multiple;
+ int max_commits;
+ timestamp_t expire_time;
+};
/*
* The write_commit_graph* methods return zero on success
* is not compatible with the commit-graph feature, then the
* methods will return 0 without writing a commit-graph.
*/
-int write_commit_graph_reachable(const char *obj_dir, unsigned int flags);
+int write_commit_graph_reachable(const char *obj_dir, unsigned int flags,
+ const struct split_commit_graph_opts *split_opts);
int write_commit_graph(const char *obj_dir,
struct string_list *pack_indexes,
struct string_list *commit_hex,
- unsigned int flags);
+ unsigned int flags,
+ const struct split_commit_graph_opts *split_opts);
+
+#define COMMIT_GRAPH_VERIFY_SHALLOW (1 << 0)
-int verify_commit_graph(struct repository *r, struct commit_graph *g);
+int verify_commit_graph(struct repository *r, struct commit_graph *g, int flags);
void close_commit_graph(struct raw_object_store *);
void free_commit_graph(struct commit_graph *);
struct commit *lookup_commit(struct repository *r, const struct object_id *oid)
{
- struct object *obj = lookup_object(r, oid->hash);
+ struct object *obj = lookup_object(r, oid);
if (!obj)
- return create_object(r, oid->hash,
- alloc_commit_node(r));
+ return create_object(r, oid, alloc_commit_node(r));
return object_as_type(r, obj, OBJ_COMMIT, 0);
}
do_unset_environment_variables();
/* Determine whether or not we are associated to a console */
- cons = CreateFile("CONOUT$", GENERIC_WRITE,
+ cons = CreateFileW(L"CONOUT$", GENERIC_WRITE,
FILE_SHARE_WRITE, NULL, OPEN_EXISTING,
FILE_ATTRIBUTE_NORMAL, NULL);
if (cons == INVALID_HANDLE_VALUE) {
if (prog) {
int exec_id;
int argc = 0;
- const char **argv2;
+#ifndef _MSC_VER
+ const
+#endif
+ char **argv2;
while (argv[argc]) argc++;
ALLOC_ARRAY(argv2, argc + 1);
argv2[0] = (char *)cmd; /* full path to the script file */
static unsigned initialized;
static char user_name[100];
static struct passwd *p;
+ wchar_t buf[100];
DWORD len;
if (initialized)
return p;
- len = sizeof(user_name);
- if (!GetUserName(user_name, &len)) {
+ len = ARRAY_SIZE(buf);
+ if (!GetUserNameW(buf, &len)) {
+ initialized = 1;
+ return NULL;
+ }
+
+ if (xwcstoutf(user_name, buf, sizeof(user_name)) < 0) {
initialized = 1;
return NULL;
}
sigint_fn(SIGINT);
return 0;
+#if defined(_MSC_VER)
+ case SIGILL:
+ case SIGFPE:
+ case SIGSEGV:
+ case SIGTERM:
+ case SIGBREAK:
+ case SIGABRT:
+ case SIGABRT_COMPAT:
+ /*
+ * The <signal.h> header in the MS C Runtime defines 8 signals
+ * as being supported on the platform. Anything else causes an
+ * "Invalid signal or error" (which in DEBUG builds causes the
+ * Abort/Retry/Ignore dialog). We by-pass the CRT for things we
+ * already know will fail.
+ */
+ return raise(sig);
+ default:
+ errno = EINVAL;
+ return -1;
+
+#else
+
default:
return raise(sig);
+
+#endif
+
}
}
/* simulate TERM to enable auto-color (see color.c) */
if (!getenv("TERM"))
setenv("TERM", "cygwin", 1);
+
+ /* calculate HOME if not set */
+ if (!getenv("HOME")) {
+ /*
+ * try $HOMEDRIVE$HOMEPATH - the home share may be a network
+ * location, thus also check if the path exists (i.e. is not
+ * disconnected)
+ */
+ if ((tmp = getenv("HOMEDRIVE"))) {
+ struct strbuf buf = STRBUF_INIT;
+ strbuf_addstr(&buf, tmp);
+ if ((tmp = getenv("HOMEPATH"))) {
+ strbuf_addstr(&buf, tmp);
+ if (is_directory(buf.buf))
+ setenv("HOME", buf.buf, 1);
+ else
+ tmp = NULL; /* use $USERPROFILE */
+ }
+ strbuf_release(&buf);
+ }
+ /* use $USERPROFILE if the home share is not available */
+ if (!tmp && (tmp = getenv("USERPROFILE")))
+ setenv("HOME", tmp, 1);
+ }
}
+#if !defined(_MSC_VER)
/*
* Disable MSVCRT command line wildcard expansion (__getmainargs called from
* mingw startup code, see init.c in mingw runtime).
*/
int _CRT_glob = 0;
-
-typedef struct {
- int newmode;
-} _startupinfo;
-
-extern int __wgetmainargs(int *argc, wchar_t ***argv, wchar_t ***env, int glob,
- _startupinfo *si);
+#endif
static NORETURN void die_startup(void)
{
GENERIC_WRITE, FILE_FLAG_NO_BUFFERING);
}
-void mingw_startup(void)
+#ifdef _MSC_VER
+#ifdef _DEBUG
+#include <crtdbg.h>
+#endif
+#endif
+
+/*
+ * We implement wmain() and compile with -municode, which would
+ * normally ignore main(), but we call the latter from the former
+ * so that we can handle non-ASCII command-line parameters
+ * appropriately.
+ *
+ * To be more compatible with the core git code, we convert
+ * argv into UTF8 and pass them directly to main().
+ */
+int wmain(int argc, const wchar_t **wargv)
{
- int i, maxlen, argc;
- char *buffer;
- wchar_t **wenv, **wargv;
- _startupinfo si;
+ int i, maxlen, exit_status;
+ char *buffer, **save;
+ const char **argv;
trace2_initialize_clock();
- maybe_redirect_std_handles();
+#ifdef _MSC_VER
+#ifdef _DEBUG
+ _CrtSetReportMode(_CRT_ASSERT, _CRTDBG_MODE_DEBUG);
+#endif
- /* get wide char arguments and environment */
- si.newmode = 0;
- if (__wgetmainargs(&argc, &wargv, &wenv, _CRT_glob, &si) < 0)
- die_startup();
+#ifdef USE_MSVC_CRTDBG
+ _CrtSetDbgFlag(_CRTDBG_ALLOC_MEM_DF | _CRTDBG_LEAK_CHECK_DF);
+#endif
+#endif
+
+ maybe_redirect_std_handles();
/* determine size of argv and environ conversion buffer */
maxlen = wcslen(wargv[0]);
maxlen = 3 * maxlen + 1;
buffer = malloc_startup(maxlen);
- /* convert command line arguments and environment to UTF-8 */
+ /*
+ * Create a UTF-8 version of w_argv. Also create a "save" copy
+ * to remember all the string pointers because parse_options()
+ * will remove claimed items from the argv that we pass down.
+ */
+ ALLOC_ARRAY(argv, argc + 1);
+ ALLOC_ARRAY(save, argc + 1);
for (i = 0; i < argc; i++)
- __argv[i] = wcstoutfdup_startup(buffer, wargv[i], maxlen);
+ argv[i] = save[i] = wcstoutfdup_startup(buffer, wargv[i], maxlen);
+ argv[i] = save[i] = NULL;
free(buffer);
/* fix Windows specific environment settings */
/* initialize Unicode console */
winansi_init();
+
+ /* invoke the real main() using our utf8 version of argv. */
+ exit_status = main(argc, argv);
+
+ for (i = 0; i < argc; i++)
+ free(save[i]);
+ free(save);
+ free(argv);
+
+ return exit_status;
}
int uname(struct utsname *buf)
#ifndef __MINGW64_VERSION_MAJOR
#define off_t off64_t
#define lseek _lseeki64
+#ifndef _MSC_VER
struct timespec {
time_t tv_sec;
long tv_nsec;
};
#endif
+#endif
struct mingw_stat {
_dev_t st_dev;
extern CRITICAL_SECTION pinfo_cs;
/*
- * A replacement of main() that adds win32 specific initialization.
+ * Git, like most portable C applications, implements a main() function. On
+ * Windows, this main() function would receive parameters encoded in the
+ * current locale, but Git for Windows would prefer UTF-8 encoded parameters.
+ *
+ * To make that happen, we still declare main() here, and then declare and
+ * implement wmain() (which is the Unicode variant of main()) and compile with
+ * -municode. This wmain() function reencodes the parameters from UTF-16 to
+ * UTF-8 format, sets up a couple of other things as required on Windows, and
+ * then hands off to the main() function.
*/
-
-void mingw_startup(void);
-#define main(c,v) dummy_decl_mingw_main(void); \
-static int mingw_main(c,v); \
-int main(int argc, const char **argv) \
-{ \
- mingw_startup(); \
- return mingw_main(__argc, (void *)__argv); \
-} \
-static int mingw_main(c,v)
+int wmain(int argc, const wchar_t **w_argv);
+int main(int argc, const char **argv);
/*
* Used by Pthread API implementation for Windows
#include <malloc.h>
#include <io.h>
+#pragma warning(disable: 4018) /* signed/unsigned comparison */
+#pragma warning(disable: 4244) /* type conversion, possible loss of data */
+#pragma warning(disable: 4090) /* 'function' : different 'const' qualifiers (ALLOC_GROW etc.)*/
+
/* porting function */
#define inline __inline
#define __inline__ __inline
#undef ERROR
+#define ftello _ftelli64
+
+typedef int sigset_t;
+/* open for reading, writing, or both (not in fcntl.h) */
+#define O_ACCMODE (_O_RDONLY | _O_WRONLY | _O_RDWR)
+
#include "compat/mingw.h"
#endif
( (h)->temp.tempint = (char *) (obj) - (char *) (h)->chunk, \
((((h)->temp.tempint > 0 \
&& (h)->temp.tempint < (h)->chunk_limit - (char *) (h)->chunk)) \
- ? (int) ((h)->next_free = (h)->object_base \
+ ? (ptrdiff_t) ((h)->next_free = (h)->object_base \
= (h)->temp.tempint + (char *) (h)->chunk) \
: (((obstack_free) ((h), (h)->temp.tempint + (char *) (h)->chunk), 0), 0)))
if (!once_only)
{
NtQueryInformationFile = (PNtQueryInformationFile)(void (*)(void))
- GetProcAddress (GetModuleHandle ("ntdll.dll"),
+ GetProcAddress (GetModuleHandleW (L"ntdll.dll"),
"NtQueryInformationFile");
once_only = TRUE;
}
--- /dev/null
+/vcpkg/
+/MSVC-DEFS-GEN
+/VCPKG-DEFS
+The Steps to Build Git with VS2015 or VS2017 from the command line.
+
+1. Install the "vcpkg" open source package manager and build essential
+ third-party libraries. The steps for this have been captured in a
+ set of convenience scripts. These can be run from a stock Command
+ Prompt or from an SDK bash window:
+
+ $ cd <repo_root>
+ $ ./compat/vcbuild/vcpkg_install.bat
+
+ The vcpkg tools and all of the third-party sources will be installed
+ in this folder:
+ <repo_root>/compat/vcbuild/vcpkg/
+
+ A file will be created with a set of Makefile macros pointing to a
+ unified "include", "lib", and "bin" directory (release and debug) for
+ all of the required packages. This file will be included by the main
+ Makefile:
+ <repo_root>/compat/vcbuild/MSVC-DEFS-GEN
+
+2. OPTIONALLY copy the third-party *.dll and *.pdb files into the repo
+ root to make it easier to run and debug git.exe without having to
+ manipulate your PATH. This is especially true for debug sessions in
+ Visual Studio.
+
+ Use ONE of the following forms which should match how you want to
+ compile git.exe.
+
+ $ ./compat/vcbuild/vcpkg_copy_packages.bat debug
+ $ ./compat/vcbuild/vcpkg_copy_packages.bat release
+
+3. Build git using MSVC from an SDK bash window using one of the
+ following commands:
+
+ $ make MSVC=1
+ $ make MSVC=1 DEBUG=1
+
+================================================================
+
+Alternatively, run `make vcxproj` and then load the generated `git.sln` in
+Visual Studio. The initial build will install the vcpkg system and build the
+dependencies automatically. This will take a while.
+
+Instead of generating the `git.sln` file yourself (which requires a full Git
+for Windows SDK), you may want to consider fetching the `vs/master` branch of
+https://github.com/git-for-windows/git instead (which is updated automatically
+via CI running `make vcxproj`). The `vs/master` branch does not require a Git
+for Windows to build, but you can run the test scripts in a regular Git Bash.
+
+Note that `make vcxproj` will automatically add and commit the generated `.sln`
+and `.vcxproj` files to the repo. This is necessary to allow building a
+fully-testable Git in Visual Studio, where a regular Git Bash can be used to
+run the test scripts (as opposed to a full Git for Windows SDK): a number of
+build targets, such as Git commands implemented as Unix shell scripts (where
+`@@SHELL_PATH@@` and other placeholders are interpolated) require a full-blown
+Git for Windows SDK (which is about 10x the size of a regular Git for Windows
+installation).
+
+If your plan is to open a Pull Request with Git for Windows, it is a good idea
+to drop this commit before submitting.
+
+================================================================
The Steps of Build Git with VS2008
1. You need the build environment, which contains the Git dependencies
--- /dev/null
+@ECHO OFF
+REM ================================================================
+REM You can use either GCC (the default) or MSVC to build git
+REM using the GIT-SDK command line tools.
+REM $ make
+REM $ make MSVC=1
+REM
+REM GIT-SDK BASH windows inherit environment variables with all of
+REM the bin/lib/include paths for GCC. It DOES NOT inherit values
+REM for the corresponding MSVC tools.
+REM
+REM During normal (non-git) Windows development, you launch one
+REM of the provided "developer command prompts" to set environment
+REM variables for the MSVC tools.
+REM
+REM Therefore, to allow MSVC command line builds of git from BASH
+REM and MAKE, we must blend these two different worlds. This script
+REM attempts to do that.
+REM ================================================================
+REM This BAT file starts in a plain (non-developer) command prompt,
+REM searches for the "best" commmand prompt setup script, installs
+REM it into the current CMD process, and exports the various MSVC
+REM environment variables for use by MAKE.
+REM
+REM The output of this script should be written to a make "include
+REM file" and referenced by the top-level Makefile.
+REM
+REM See "config.mak.uname" (look for compat/vcbuild/MSVC-DEFS-GEN).
+REM ================================================================
+REM The provided command prompts are custom to each VS release and
+REM filled with lots of internal knowledge (such as Registry settings);
+REM even their names vary by release, so it is not appropriate for us
+REM to look inside them. Rather, just run them in a subordinate
+REM process and extract the settings we need.
+REM ================================================================
+REM
+REM Current (VS2017 and beyond)
+REM -------------------
+REM Visual Studio 2017 introduced a new installation layout and
+REM support for side-by-side installation of multiple versions of
+REM VS2017. Furthermore, these can all coexist with installations
+REM of previous versions of VS (which have a completely different
+REM layout on disk).
+REM
+REM VS2017 Update 2 introduced a "vswhere.exe" command:
+REM https://github.com/Microsoft/vswhere
+REM https://blogs.msdn.microsoft.com/heaths/2017/02/25/vswhere-available/
+REM https://blogs.msdn.microsoft.com/vcblog/2017/03/06/finding-the-visual-c-compiler-tools-in-visual-studio-2017/
+REM
+REM VS2015
+REM ------
+REM Visual Studio 2015 uses the traditional VcVarsAll.
+REM
+REM Earlier Versions
+REM ----------------
+REM Currently unsupported.
+REM
+REM ================================================================
+REM Note: Throughout this script we use "dir <path> && <cmd>" rather
+REM than "if exist <path>" because of script problems with pathnames
+REM containing spaces.
+REM ================================================================
+
+REM Sanitize PATH to prevent git-sdk paths from confusing "wmic.exe"
+REM (called internally in some of the system BAT files).
+SET PATH=%SystemRoot%\system32;%SystemRoot%;%SystemRoot%\System32\Wbem;
+
+REM ================================================================
+
+:current
+ SET vs_where=C:\Program Files (x86)\Microsoft Visual Studio\Installer\vswhere.exe
+ dir "%vs_where%" >nul 2>nul && GOTO have_vs_where
+ GOTO not_2017
+
+:have_vs_where
+ REM Try to use VsWhere to get the location of VsDevCmd.
+
+ REM Keep VsDevCmd from cd'ing away.
+ SET VSCMD_START_DIR=.
+
+ REM Get the root of the VS product installation.
+ FOR /F "usebackq tokens=*" %%i IN (`"%vs_where%" -latest -requires Microsoft.VisualStudio.Workload.NativeDesktop -property installationPath`) DO @SET vs_ip=%%i
+
+ SET vs_devcmd=%vs_ip%\Common7\Tools\VsDevCmd.bat
+ dir "%vs_devcmd%" >nul 2>nul && GOTO have_vs_devcmd
+ GOTO not_2017
+
+:have_vs_devcmd
+ REM Use VsDevCmd to setup the environment of this process.
+ REM Setup CL for building 64-bit apps using 64-bit tools.
+ @call "%vs_devcmd%" -no_logo -arch=x64 -host_arch=x64
+
+ SET tgt=%VSCMD_ARG_TGT_ARCH%
+
+ SET mn=%VCToolsInstallDir%
+ SET msvc_includes=-I"%mn%INCLUDE"
+ SET msvc_libs=-L"%mn%lib\%tgt%"
+ SET msvc_bin_dir=%mn%bin\Host%VSCMD_ARG_HOST_ARCH%\%tgt%
+
+ SET sdk_dir=%WindowsSdkDir%
+ SET sdk_ver=%WindowsSDKVersion%
+ SET si=%sdk_dir%Include\%sdk_ver%
+ SET sdk_includes=-I"%si%ucrt" -I"%si%um" -I"%si%shared"
+ SET sl=%sdk_dir%lib\%sdk_ver%
+ SET sdk_libs=-L"%sl%ucrt\%tgt%" -L"%sl%um\%tgt%"
+
+ SET vs_ver=%VisualStudioVersion%
+
+ GOTO print_vars
+
+REM ================================================================
+
+:not_2017
+ REM See if VS2015 is installed.
+
+ SET vs_2015_bat=C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat
+ dir "%vs_2015_bat%" >nul 2>nul && GOTO have_vs_2015
+ GOTO not_2015
+
+:have_vs_2015
+ REM Use VcVarsAll like the "x64 Native" command prompt.
+ REM Setup CL for building 64-bit apps using 64-bit tools.
+ @call "%vs_2015_bat%" amd64
+
+ REM Note that in VS2015 they use "x64" in some contexts and "amd64" in others.
+ SET mn=C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\
+ SET msvc_includes=-I"%mn%INCLUDE"
+ SET msvc_libs=-L"%mn%lib\amd64"
+ SET msvc_bin_dir=%mn%bin\amd64
+
+ SET sdk_dir=%WindowsSdkDir%
+ SET sdk_ver=%WindowsSDKVersion%
+ SET si=%sdk_dir%Include\%sdk_ver%
+ SET sdk_includes=-I"%si%ucrt" -I"%si%um" -I"%si%shared" -I"%si%winrt"
+ SET sl=%sdk_dir%lib\%sdk_ver%
+ SET sdk_libs=-L"%sl%ucrt\x64" -L"%sl%um\x64"
+
+ SET vs_ver=%VisualStudioVersion%
+
+ GOTO print_vars
+
+REM ================================================================
+
+:not_2015
+ echo "ERROR: unsupported VS version (older than VS2015)" >&2
+ EXIT /B 1
+
+REM ================================================================
+
+:print_vars
+ REM Dump the essential vars to stdout to allow the main
+ REM Makefile to include it. See config.mak.uname.
+ REM Include DOS-style and BASH-style path for bin dir.
+
+ echo msvc_bin_dir=%msvc_bin_dir%
+ SET X1=%msvc_bin_dir:C:=/C%
+ SET X2=%X1:\=/%
+ echo msvc_bin_dir_msys=%X2%
+
+ echo msvc_includes=%msvc_includes%
+ echo msvc_libs=%msvc_libs%
+
+ echo sdk_includes=%sdk_includes%
+ echo sdk_libs=%sdk_libs%
+
+ echo vs_ver=%vs_ver%
+
+ EXIT /B 0
use strict;
my @args = ();
my @cflags = ();
+my @lflags = ();
my $is_linking = 0;
+my $is_debug = 0;
while (@ARGV) {
my $arg = shift @ARGV;
- if ("$arg" =~ /^-[DIMGO]/) {
+ if ("$arg" eq "-DDEBUG") {
+ # Some vcpkg-based libraries have different names for release
+ # and debug versions. This hack assumes that -DDEBUG comes
+ # before any "-l*" flags.
+ $is_debug = 1;
+ }
+ if ("$arg" =~ /^-[DIMGOZ]/) {
push(@cflags, $arg);
} elsif ("$arg" eq "-o") {
my $file_out = shift @ARGV;
if ("$file_out" =~ /exe$/) {
$is_linking = 1;
+ # Create foo.exe and foo.pdb
push(@args, "-OUT:$file_out");
} else {
+ # Create foo.o and foo.o.pdb
push(@args, "-Fo$file_out");
+ push(@args, "-Fd$file_out.pdb");
}
} elsif ("$arg" eq "-lz") {
+ if ($is_debug) {
+ push(@args, "zlibd.lib");
+ } else{
push(@args, "zlib.lib");
+ }
} elsif ("$arg" eq "-liconv") {
- push(@args, "iconv.lib");
+ push(@args, "libiconv.lib");
} elsif ("$arg" eq "-lcrypto") {
push(@args, "libeay32.lib");
} elsif ("$arg" eq "-lssl") {
push(@args, "ssleay32.lib");
} elsif ("$arg" eq "-lcurl") {
- push(@args, "libcurl.lib");
+ my $lib = "";
+ # Newer vcpkg definitions call this libcurl_imp.lib; Do we
+ # need to use that instead?
+ foreach my $flag (@lflags) {
+ if ($flag =~ /^-LIBPATH:(.*)/) {
+ foreach my $l ("libcurl_imp.lib", "libcurl.lib") {
+ if (-f "$1/$l") {
+ $lib = $l;
+ last;
+ }
+ }
+ }
+ }
+ push(@args, $lib);
+ } elsif ("$arg" eq "-lexpat") {
+ push(@args, "expat.lib");
} elsif ("$arg" =~ /^-L/ && "$arg" ne "-LTCG") {
$arg =~ s/^-L/-LIBPATH:/;
- push(@args, $arg);
+ push(@lflags, $arg);
} elsif ("$arg" =~ /^-R/) {
# eat
} else {
}
}
if ($is_linking) {
+ push(@args, @lflags);
unshift(@args, "link.exe");
} else {
unshift(@args, "cl.exe");
push(@args, @cflags);
}
-#printf("**** @args\n");
+printf(STDERR "**** @args\n\n\n") if (!defined($ENV{'QUIET_GEN'}));
exit (system(@args) != 0);
--- /dev/null
+@ECHO OFF
+REM ================================================================
+REM This script is an optional step. It copies the *.dll and *.pdb
+REM files (created by vcpkg_install.bat) into the top-level directory
+REM of the repo so that you can type "./git.exe" and find them without
+REM having to fixup your PATH.
+REM
+REM NOTE: Because the names of some DLL files change between DEBUG and
+REM NOTE: RELEASE builds when built using "vcpkg.exe", you will need
+REM NOTE: to copy up the corresponding version.
+REM ================================================================
+
+ SETLOCAL EnableDelayedExpansion
+
+ @FOR /F "delims=" %%D IN ("%~dp0") DO @SET cwd=%%~fD
+ cd %cwd%
+
+ SET arch=x64-windows
+ SET inst=%cwd%vcpkg\installed\%arch%
+
+ IF [%1]==[release] (
+ echo Copying RELEASE mode DLLs to repo root...
+ ) ELSE IF [%1]==[debug] (
+ SET inst=%inst%\debug
+ echo Copying DEBUG mode DLLs to repo root...
+ ) ELSE (
+ echo ERROR: Invalid argument.
+ echo Usage: %~0 release
+ echo Usage: %~0 debug
+ EXIT /B 1
+ )
+
+ xcopy /e/s/v/y %inst%\bin\*.dll ..\..\
+ xcopy /e/s/v/y %inst%\bin\*.pdb ..\..\
+
+ xcopy /e/s/v/y %inst%\bin\*.dll ..\..\t\helper\
+ xcopy /e/s/v/y %inst%\bin\*.pdb ..\..\t\helper\
+
+ EXIT /B 0
--- /dev/null
+@ECHO OFF
+REM ================================================================
+REM This script installs the "vcpkg" source package manager and uses
+REM it to build the third-party libraries that git requires when it
+REM is built using MSVC.
+REM
+REM [1] Install VCPKG.
+REM [a] Create <root>/compat/vcbuild/vcpkg/
+REM [b] Download "vcpkg".
+REM [c] Compile using the currently installed version of VS.
+REM [d] Create <root>/compat/vcbuild/vcpkg/vcpkg.exe
+REM
+REM [2] Install third-party libraries.
+REM [a] Download each (which may also install CMAKE).
+REM [b] Compile in RELEASE mode and install in:
+REM vcpkg/installed/<arch>/{bin,lib}
+REM [c] Compile in DEBUG mode and install in:
+REM vcpkg/installed/<arch>/debug/{bin,lib}
+REM [d] Install headers in:
+REM vcpkg/installed/<arch>/include
+REM
+REM [3] Create a set of MAKE definitions for the top-level
+REM Makefile to allow "make MSVC=1" to find the above
+REM third-party libraries.
+REM [a] Write vcpkg/VCPGK-DEFS
+REM
+REM https://blogs.msdn.microsoft.com/vcblog/2016/09/19/vcpkg-a-tool-to-acquire-and-build-c-open-source-libraries-on-windows/
+REM https://github.com/Microsoft/vcpkg
+REM https://vcpkg.readthedocs.io/en/latest/
+REM ================================================================
+
+ SETLOCAL EnableDelayedExpansion
+
+ @FOR /F "delims=" %%D IN ("%~dp0") DO @SET cwd=%%~fD
+ cd %cwd%
+
+ dir vcpkg\vcpkg.exe >nul 2>nul && GOTO :install_libraries
+
+ echo Fetching vcpkg in %cwd%vcpkg
+ git.exe clone https://github.com/Microsoft/vcpkg vcpkg
+ IF ERRORLEVEL 1 ( EXIT /B 1 )
+
+ cd vcpkg
+ echo Building vcpkg
+ powershell -exec bypass scripts\bootstrap.ps1
+ IF ERRORLEVEL 1 ( EXIT /B 1 )
+
+ echo Successfully installed %cwd%vcpkg\vcpkg.exe
+
+:install_libraries
+ SET arch=x64-windows
+
+ echo Installing third-party libraries...
+ FOR %%i IN (zlib expat libiconv openssl libssh2 curl) DO (
+ cd %cwd%vcpkg
+ IF NOT EXIST "packages\%%i_%arch%" CALL :sub__install_one %%i
+ IF ERRORLEVEL 1 ( EXIT /B 1 )
+ )
+
+:install_defines
+ cd %cwd%
+ SET inst=%cwd%vcpkg\installed\%arch%
+
+ echo vcpkg_inc=-I"%inst%\include">VCPKG-DEFS
+ echo vcpkg_rel_lib=-L"%inst%\lib">>VCPKG-DEFS
+ echo vcpkg_rel_bin="%inst%\bin">>VCPKG-DEFS
+ echo vcpkg_dbg_lib=-L"%inst%\debug\lib">>VCPKG-DEFS
+ echo vcpkg_dbg_bin="%inst%\debug\bin">>VCPKG-DEFS
+
+ EXIT /B 0
+
+
+:sub__install_one
+ echo Installing package %1...
+
+ .\vcpkg.exe install %1:%arch%
+ IF ERRORLEVEL 1 ( EXIT /B 1 )
+
+ echo Finished %1
+ goto :EOF
--- /dev/null
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
+ <assemblyIdentity type="win32" name="Git" version="0.0.0.1" />
+ <trustInfo xmlns="urn:schemas-microsoft-com:asm.v2">
+ <security>
+ <requestedPrivileges>
+ <requestedExecutionLevel level="asInvoker" uiAccess="false" />
+ </requestedPrivileges>
+ </security>
+ </trustInfo>
+ <compatibility xmlns="urn:schemas-microsoft-com:compatibility.v1">
+ <application>
+ <!-- Windows Vista -->
+ <supportedOS Id="{e2011457-1546-43c5-a5fe-008deee3d3f0}"/>
+ <!-- Windows 7 -->
+ <supportedOS Id="{35138b9a-5d96-4fbd-8e2d-a2440225f93a}"/>
+ <!-- Windows 8 -->
+ <supportedOS Id="{4a2f28e3-53b9-4441-ba9c-d69d4a4a6e38}"/>
+ <!-- Windows 8.1 -->
+ <supportedOS Id="{1f676c76-80e1-4239-95bb-83d0f6d0da78}"/>
+ <!-- Windows 10 -->
+ <supportedOS Id="{8e0f7a12-bfb3-4fe8-b9a5-48fd50a15a9a}"/>
+ </application>
+ </compatibility>
+</assembly>
#ifdef DETECT_MSYS_TTY
#include <winternl.h>
+
+#if defined(_MSC_VER)
+
+typedef struct _OBJECT_NAME_INFORMATION
+{
+ UNICODE_STRING Name;
+ WCHAR NameBuffer[0];
+} OBJECT_NAME_INFORMATION, *POBJECT_NAME_INFORMATION;
+
+#define ObjectNameInformation 1
+
+#else
#include <ntstatus.h>
+#endif
static void detect_msys_tty(int fd)
{
void winansi_init(void)
{
int con1, con2;
- char name[32];
+ wchar_t name[32];
/* check if either stdout or stderr is a console output screen buffer */
con1 = is_console(1);
}
/* create a named pipe to communicate with the console thread */
- xsnprintf(name, sizeof(name), "\\\\.\\pipe\\winansi%lu", GetCurrentProcessId());
- hwrite = CreateNamedPipe(name, PIPE_ACCESS_OUTBOUND,
+ if (swprintf(name, ARRAY_SIZE(name) - 1, L"\\\\.\\pipe\\winansi%lu",
+ GetCurrentProcessId()) < 0)
+ die("Could not initialize winansi pipe name");
+ hwrite = CreateNamedPipeW(name, PIPE_ACCESS_OUTBOUND,
PIPE_TYPE_BYTE | PIPE_WAIT, 1, BUFFER_SIZE, 0, 0, NULL);
if (hwrite == INVALID_HANDLE_VALUE)
die_lasterr("CreateNamedPipe failed");
- hread = CreateFile(name, GENERIC_READ, 0, NULL, OPEN_EXISTING, 0, NULL);
+ hread = CreateFileW(name, GENERIC_READ, 0, NULL, OPEN_EXISTING, 0, NULL);
if (hread == INVALID_HANDLE_VALUE)
die_lasterr("CreateFile for named pipe failed");
#include "utf8.h"
#include "dir.h"
#include "color.h"
+#include "refs.h"
struct config_source {
struct config_source *prev;
return ret;
}
+static void add_trailing_starstar_for_dir(struct strbuf *pat)
+{
+ if (pat->len && is_dir_sep(pat->buf[pat->len - 1]))
+ strbuf_addstr(pat, "**");
+}
+
static int prepare_include_condition_pattern(struct strbuf *pat)
{
struct strbuf path = STRBUF_INIT;
} else if (!is_absolute_path(pat->buf))
strbuf_insert(pat, 0, "**/", 3);
- if (pat->len && is_dir_sep(pat->buf[pat->len - 1]))
- strbuf_addstr(pat, "**");
+ add_trailing_starstar_for_dir(pat);
strbuf_release(&path);
return prefix;
return ret;
}
+static int include_by_branch(const char *cond, size_t cond_len)
+{
+ int flags;
+ int ret;
+ struct strbuf pattern = STRBUF_INIT;
+ const char *refname = !the_repository || !the_repository->gitdir ?
+ NULL : resolve_ref_unsafe("HEAD", 0, NULL, &flags);
+ const char *shortname;
+
+ if (!refname || !(flags & REF_ISSYMREF) ||
+ !skip_prefix(refname, "refs/heads/", &shortname))
+ return 0;
+
+ strbuf_add(&pattern, cond, cond_len);
+ add_trailing_starstar_for_dir(&pattern);
+ ret = !wildmatch(pattern.buf, shortname, WM_PATHNAME);
+ strbuf_release(&pattern);
+ return ret;
+}
+
static int include_condition_is_true(const struct config_options *opts,
const char *cond, size_t cond_len)
{
return include_by_gitdir(opts, cond, cond_len, 0);
else if (skip_prefix_mem(cond, cond_len, "gitdir/i:", &cond, &cond_len))
return include_by_gitdir(opts, cond, cond_len, 1);
+ else if (skip_prefix_mem(cond, cond_len, "onbranch:", &cond, &cond_len))
+ return include_by_branch(cond, cond_len);
/* unknown conditionals are always false */
return 0;
NORETURN
static void die_bad_number(const char *name, const char *value)
{
- const char * error_type = (errno == ERANGE)? _("out of range"):_("invalid unit");
+ const char *error_type = (errno == ERANGE) ?
+ N_("out of range") : N_("invalid unit");
+ const char *bad_numeric = N_("bad numeric config value '%s' for '%s': %s");
if (!value)
value = "";
+ if (!strcmp(name, "GIT_TEST_GETTEXT_POISON"))
+ /*
+ * We explicitly *don't* use _() here since it would
+ * cause an infinite loop with _() needing to call
+ * use_gettext_poison(). This is why marked up
+ * translations with N_() above.
+ */
+ die(bad_numeric, value, name, error_type);
+
if (!(cf && cf->name))
- die(_("bad numeric config value '%s' for '%s': %s"),
- value, name, error_type);
+ die(_(bad_numeric), value, name, _(error_type));
switch (cf->origin_type) {
case CONFIG_ORIGIN_BLOB:
die(_("bad numeric config value '%s' for '%s' in blob %s: %s"),
- value, name, cf->name, error_type);
+ value, name, cf->name, _(error_type));
case CONFIG_ORIGIN_FILE:
die(_("bad numeric config value '%s' for '%s' in file %s: %s"),
- value, name, cf->name, error_type);
+ value, name, cf->name, _(error_type));
case CONFIG_ORIGIN_STDIN:
die(_("bad numeric config value '%s' for '%s' in standard input: %s"),
- value, name, error_type);
+ value, name, _(error_type));
case CONFIG_ORIGIN_SUBMODULE_BLOB:
die(_("bad numeric config value '%s' for '%s' in submodule-blob %s: %s"),
- value, name, cf->name, error_type);
+ value, name, cf->name, _(error_type));
case CONFIG_ORIGIN_CMDLINE:
die(_("bad numeric config value '%s' for '%s' in command line %s: %s"),
- value, name, cf->name, error_type);
+ value, name, cf->name, _(error_type));
default:
die(_("bad numeric config value '%s' for '%s' in %s: %s"),
- value, name, cf->name, error_type);
+ value, name, cf->name, _(error_type));
}
}
# Platform specific Makefile tweaks based on uname detection
+# Define NO_SAFESEH if you need MSVC/Visual Studio to ignore the lack of
+# Microsoft's Safe Exception Handling in libraries (such as zlib).
+# Typically required for VS2013+/32-bit compilation on Vista+ versions.
+
uname_S := $(shell sh -c 'uname -s 2>/dev/null || echo not')
uname_M := $(shell sh -c 'uname -m 2>/dev/null || echo not')
uname_O := $(shell sh -c 'uname -o 2>/dev/null || echo not')
# avoid the MingW and Cygwin configuration sections
uname_S := Windows
uname_O := Windows
+
+ # Generate and include makefile variables that point to the
+ # currently installed set of MSVC command line tools.
+compat/vcbuild/MSVC-DEFS-GEN: compat/vcbuild/find_vs_env.bat
+ @"$<" | tr '\\' / >"$@"
+include compat/vcbuild/MSVC-DEFS-GEN
+
+ # See if vcpkg and the vcpkg-build versions of the third-party
+ # libraries that we use are installed. We include the result
+ # to get $(vcpkg_*) variables defined for the Makefile.
+ifeq (,$(SKIP_VCPKG))
+compat/vcbuild/VCPKG-DEFS: compat/vcbuild/vcpkg_install.bat
+ @"$<"
+include compat/vcbuild/VCPKG-DEFS
+endif
endif
# We choose to avoid "if .. else if .. else .. endif endif"
ifeq ($(uname_S),Windows)
GIT_VERSION := $(GIT_VERSION).MSVC
pathsep = ;
+ # Assume that this is built in Git for Windows' SDK
+ ifeq (MINGW32,$(MSYSTEM))
+ prefix = /mingw32
+ else
+ prefix = /mingw64
+ endif
+ # Prepend MSVC 64-bit tool-chain to PATH.
+ #
+ # A regular Git Bash *does not* have cl.exe in its $PATH. As there is a
+ # link.exe next to, and required by, cl.exe, we have to prepend this
+ # onto the existing $PATH.
+ #
+ SANE_TOOL_PATH ?= $(msvc_bin_dir_msys)
HAVE_ALLOCA_H = YesPlease
NO_PREAD = YesPlease
NEEDS_CRYPTO_WITH_SSL = YesPlease
NO_STRCASESTR = YesPlease
NO_STRLCPY = YesPlease
NO_MEMMEM = YesPlease
- # NEEDS_LIBICONV = YesPlease
- NO_ICONV = YesPlease
+ NEEDS_LIBICONV = YesPlease
NO_STRTOUMAX = YesPlease
NO_MKDTEMP = YesPlease
- SNPRINTF_RETURNS_BOGUS = YesPlease
+ NO_INTTYPES_H = YesPlease
+ # VS2015 with UCRT claims that snprintf and friends are C99 compliant,
+ # so we don't need this:
+ #
+ # SNPRINTF_RETURNS_BOGUS = YesPlease
NO_SVN_TESTS = YesPlease
RUNTIME_PREFIX = YesPlease
HAVE_WPGMPTR = YesWeDo
NO_REGEX = YesPlease
NO_GETTEXT = YesPlease
NO_PYTHON = YesPlease
- BLK_SHA1 = YesPlease
ETAGS_TARGET = ETAGS
NO_POSIX_GOODIES = UnfortunatelyYes
NATIVE_CRLF = YesPlease
CC = compat/vcbuild/scripts/clink.pl
AR = compat/vcbuild/scripts/lib.pl
CFLAGS =
- BASIC_CFLAGS = -nologo -I. -I../zlib -Icompat/vcbuild -Icompat/vcbuild/include -DWIN32 -D_CONSOLE -DHAVE_STRING_H -D_CRT_SECURE_NO_WARNINGS -D_CRT_NONSTDC_NO_DEPRECATE
+ BASIC_CFLAGS = -nologo -I. -Icompat/vcbuild/include -DWIN32 -D_CONSOLE -DHAVE_STRING_H -D_CRT_SECURE_NO_WARNINGS -D_CRT_NONSTDC_NO_DEPRECATE
COMPAT_OBJS = compat/msvc.o compat/winansi.o \
compat/win32/path-utils.o \
compat/win32/pthread.o compat/win32/syslog.o \
compat/win32/trace2_win32_process_info.o \
compat/win32/dirent.o
- COMPAT_CFLAGS = -D__USE_MINGW_ACCESS -DNOGDI -DHAVE_STRING_H -Icompat -Icompat/regex -Icompat/win32 -DSTRIP_EXTENSION=\".exe\"
- BASIC_LDFLAGS = -IGNORE:4217 -IGNORE:4049 -NOLOGO -SUBSYSTEM:CONSOLE
- EXTLIBS = user32.lib advapi32.lib shell32.lib wininet.lib ws2_32.lib invalidcontinue.obj
+ COMPAT_CFLAGS = -D__USE_MINGW_ACCESS -DDETECT_MSYS_TTY -DNOGDI -DHAVE_STRING_H -Icompat -Icompat/regex -Icompat/win32 -DSTRIP_EXTENSION=\".exe\"
+ BASIC_LDFLAGS = -IGNORE:4217 -IGNORE:4049 -NOLOGO -ENTRY:wmainCRTStartup -SUBSYSTEM:CONSOLE
+ # invalidcontinue.obj allows Git's source code to close the same file
+ # handle twice, or to access the osfhandle of an already-closed stdout
+ # See https://msdn.microsoft.com/en-us/library/ms235330.aspx
+ EXTLIBS = user32.lib advapi32.lib shell32.lib wininet.lib ws2_32.lib invalidcontinue.obj kernel32.lib ntdll.lib
PTHREAD_LIBS =
lib =
+ BASIC_CFLAGS += $(vcpkg_inc) $(sdk_includes) $(msvc_includes)
+ifndef DEBUG
+ BASIC_CFLAGS += $(vcpkg_rel_lib)
+else
+ BASIC_CFLAGS += $(vcpkg_dbg_lib)
+endif
+ BASIC_CFLAGS += $(sdk_libs) $(msvc_libs)
+
+ifneq ($(USE_MSVC_CRTDBG),)
+ # Optionally enable memory leak reporting.
+ BASIC_CFLAGS += -DUSE_MSVC_CRTDBG
+endif
BASIC_CFLAGS += -DPROTECT_NTFS_DEFAULT=1
+ # Always give "-Zi" to the compiler and "-debug" to linker (even in
+ # release mode) to force a PDB to be generated (like RelWithDebInfo).
+ BASIC_CFLAGS += -Zi
+ BASIC_LDFLAGS += -debug -Zf
+
+ifdef NO_SAFESEH
+ LDFLAGS += -SAFESEH:NO
+endif
+
ifndef DEBUG
- BASIC_CFLAGS += -GL -Os -MD
- BASIC_LDFLAGS += -LTCG
+ BASIC_CFLAGS += -GL -Gy -O2 -Oy- -MD -DNDEBUG
+ BASIC_LDFLAGS += -release -LTCG /OPT:REF /OPT:ICF /INCREMENTAL:NO /DEBUGTYPE:CV,FIXUP
AR += -LTCG
else
- BASIC_CFLAGS += -Zi -MDd
+ BASIC_CFLAGS += -MDd -DDEBUG -D_DEBUG
endif
X = .exe
+
+compat/msvc.o: compat/msvc.c compat/mingw.c GIT-CFLAGS
endif
ifeq ($(uname_S),Interix)
NO_INITGROUPS = YesPlease
ETAGS_TARGET = ETAGS
NO_POSIX_GOODIES = UnfortunatelyYes
DEFAULT_HELP_FORMAT = html
+ BASIC_LDFLAGS += -municode
COMPAT_CFLAGS += -DNOGDI -Icompat -Icompat/win32
COMPAT_CFLAGS += -DSTRIP_EXTENSION=\".exe\"
COMPAT_OBJS += compat/mingw.o compat/winansi.o \
INTERNAL_QSORT = YesPlease
HAVE_LIBCHARSET_H = YesPlease
NO_GETTEXT = YesPlease
- COMPAT_CLFAGS += -D__USE_MINGW_ACCESS
+ COMPAT_CFLAGS += -D__USE_MINGW_ACCESS
else
ifneq ($(shell expr "$(uname_R)" : '1\.'),2)
# MSys2
BASIC_LDFLAGS += -Wl,--large-address-aware
endif
CC = gcc
- COMPAT_CFLAGS += -D__USE_MINGW_ANSI_STDIO=0 -DDETECT_MSYS_TTY
+ COMPAT_CFLAGS += -D__USE_MINGW_ANSI_STDIO=0 -DDETECT_MSYS_TTY \
+ -fstack-protector-strong
EXTLIBS += -lntdll
INSTALL = /bin/install
NO_R_TO_GCC_LINKER = YesPlease
NO_STRCASESTR = YesPlease
NO_STRLCPY = YesPlease
endif
+
+vcxproj:
+ # Require clean work tree
+ git update-index -q --refresh && \
+ git diff-files --quiet && \
+ git diff-index --cached --quiet HEAD --
+
+ # Make .vcxproj files and add them
+ unset QUIET_GEN QUIET_BUILT_IN; \
+ perl contrib/buildsystems/generate -g Vcxproj
+ git add -f git.sln {*,*/lib,t/helper/*}/*.vcxproj
+
+ # Generate the LinkOrCopyBuiltins.targets file
+ (echo '<Project xmlns="http://schemas.microsoft.com/developer/msbuild/2003">' && \
+ echo ' <Target Name="CopyBuiltins_AfterBuild" AfterTargets="AfterBuild">' && \
+ for name in $(BUILT_INS);\
+ do \
+ echo ' <Copy SourceFiles="$$(OutDir)\git.exe" DestinationFiles="$$(OutDir)\'"$$name"'" SkipUnchangedFiles="true" UseHardlinksIfPossible="true" />'; \
+ done && \
+ for name in $(REMOTE_CURL_ALIASES); \
+ do \
+ echo ' <Copy SourceFiles="$$(OutDir)\'"$(REMOTE_CURL_PRIMARY)"'" DestinationFiles="$$(OutDir)\'"$$name"'" SkipUnchangedFiles="true" UseHardlinksIfPossible="true" />'; \
+ done && \
+ echo ' </Target>' && \
+ echo '</Project>') >git/LinkOrCopyBuiltins.targets
+ git add -f git/LinkOrCopyBuiltins.targets
+
+ # Add command-list.h
+ $(MAKE) MSVC=1 SKIP_VCPKG=1 prefix=/mingw64 command-list.h
+ git add -f command-list.h
+
+ # Add scripts
+ rm -f perl/perl.mak
+ $(MAKE) MSVC=1 SKIP_VCPKG=1 prefix=/mingw64 \
+ $(SCRIPT_LIB) $(SCRIPT_SH_GEN) $(SCRIPT_PERL_GEN)
+ # Strip out the sane tool path, needed only for building
+ sed -i '/^git_broken_path_fix ".*/d' git-sh-setup
+ git add -f $(SCRIPT_LIB) $(SCRIPT_SH_GEN) $(SCRIPT_PERL_GEN)
+
+ # Add Perl module
+ $(MAKE) $(LIB_PERL_GEN)
+ git add -f perl/build
+
+ # Add bin-wrappers, for testing
+ rm -rf bin-wrappers/
+ $(MAKE) MSVC=1 SKIP_VCPKG=1 prefix=/mingw64 $(test_bindir_programs)
+ # Ensure that the GIT_EXEC_PATH is a Unix-y one, and that the absolute
+ # path of the repository is not hard-coded (GIT_EXEC_PATH will be set
+ # by test-lib.sh according to the current setup)
+ sed -i -e 's/^\(GIT_EXEC_PATH\)=.*/test -n "$${\1##*:*}" ||\
+ \1="$$(cygpath -u "$$\1")"/' \
+ -e "s|'$$(pwd)|\"\$$GIT_EXEC_PATH\"'|g" bin-wrappers/*
+ # Ensure that test-* helpers find the .dll files copied to top-level
+ sed -i 's|^PATH=.*|&:"$$GIT_EXEC_PATH"|' bin-wrappers/test-*
+ # We do not want to force hard-linking builtins
+ sed -i 's|\(git\)-\([-a-z]*\)\.exe"|\1.exe" \2|g' \
+ bin-wrappers/git-{receive-pack,upload-archive}
+ git add -f $(test_bindir_programs)
+ # remote-ext is a builtin, but invoked as if it were external
+ sed 's|receive-pack|remote-ext|g' \
+ <bin-wrappers/git-receive-pack >bin-wrappers/git-remote-ext
+ git add -f bin-wrappers/git-remote-ext
+
+ # Add templates
+ $(MAKE) -C templates
+ git add -f templates/boilerplates.made templates/blt/
+
+ # Add build options
+ $(MAKE) MSVC=1 SKIP_VCPKG=1 prefix=/mingw64 GIT-BUILD-OPTIONS
+ git add -f GIT-BUILD-OPTIONS
+
+ # Commit the whole shebang
+ git commit -m "Generate Visual Studio solution" \
+ -m "Auto-generated by \`$(MAKE)$(MAKEFLAGS) $@\`"
if test "$git_cv_ld_rpath" = "yes"; then
CC_LD_DYNPATH=-rpath
else
- CC_LD_DYNPATH=
- AC_MSG_WARN([linker does not support runtime path to dynamic libraries])
+ AC_CACHE_CHECK([if linker supports -Wl,+b,], git_cv_ld_wl_b, [
+ SAVE_LDFLAGS="${LDFLAGS}"
+ LDFLAGS="${SAVE_LDFLAGS} -Wl,+b,/"
+ AC_LINK_IFELSE([AC_LANG_PROGRAM([], [])], [git_cv_ld_wl_b=yes], [git_cv_ld_wl_b=no])
+ LDFLAGS="${SAVE_LDFLAGS}"
+ ])
+ if test "$git_cv_ld_wl_b" = "yes"; then
+ CC_LD_DYNPATH=-Wl,+b,
+ else
+ CC_LD_DYNPATH=
+ AC_MSG_WARN([linker does not support runtime path to dynamic libraries])
+ fi
fi
fi
fi
argv_array_push(&rev_list.args, "--all");
}
argv_array_push(&rev_list.args, "--quiet");
+ argv_array_push(&rev_list.args, "--alternate-refs");
if (opt->progress)
argv_array_pushf(&rev_list.args, "--progress=%s",
_("Checking connectivity"));
$me = dirname($me);
if (opendir(D,"$me/Generators")) {
foreach my $gen (readdir(D)) {
- next if ($gen =~ /^\.\.?$/);
+ next unless ($gen =~ /\.pm$/);
require "${me}/Generators/$gen";
$gen =~ s,\.pm,,;
push(@AVAILABLE, $gen);
use strict;
use vars qw($VERSION);
+use Digest::SHA qw(sha256_hex);
our $VERSION = '1.00';
our(@ISA, @EXPORT, @EXPORT_OK, @AVAILABLE);
push @EXPORT_OK, qw(generate);
}
-my $guid_index = 0;
-my @GUIDS = (
- "{E07B9989-2BF7-4F21-8918-BE22BA467AC3}",
- "{278FFB51-0296-4A44-A81A-22B87B7C3592}",
- "{7346A2C4-F0FD-444F-9EBE-1AF23B2B5650}",
- "{67F421AC-EB34-4D49-820B-3196807B423F}",
- "{385DCFE1-CC8C-4211-A451-80FCFC31CA51}",
- "{97CC46C5-D2CC-4D26-B634-E75792B79916}",
- "{C7CE21FE-6EF8-4012-A5C7-A22BCEDFBA11}",
- "{51575134-3FDF-42D1-BABD-3FB12669C6C9}",
- "{0AE195E4-9823-4B87-8E6F-20C5614AF2FF}",
- "{4B918255-67CA-43BB-A46C-26704B666E6B}",
- "{18CCFEEF-C8EE-4CC1-A265-26F95C9F4649}",
- "{5D5D90FA-01B7-4973-AFE5-CA88C53AC197}",
- "{1F054320-036D-49E1-B384-FB5DF0BC8AC0}",
- "{7CED65EE-F2D9-4171-825B-C7D561FE5786}",
- "{8D341679-0F07-4664-9A56-3BA0DE88B9BC}",
- "{C189FEDC-2957-4BD7-9FA4-7622241EA145}",
- "{66844203-1B9F-4C53-9274-164FFF95B847}",
- "{E4FEA145-DECC-440D-AEEA-598CF381FD43}",
- "{73300A8E-C8AC-41B0-B555-4F596B681BA7}",
- "{873FDEB1-D01D-40BF-A1BF-8BBC58EC0F51}",
- "{7922C8BE-76C5-4AC6-8BF7-885C0F93B782}",
- "{E245D370-308B-4A49-BFC1-1E527827975F}",
- "{F6FA957B-66FC-4ED7-B260-E59BBE4FE813}",
- "{E6055070-0198-431A-BC49-8DB6CEE770AE}",
- "{54159234-C3EB-43DA-906B-CE5DA5C74654}",
- "{594CFC35-0B60-46F6-B8EF-9983ACC1187D}",
- "{D93FCAB7-1F01-48D2-B832-F761B83231A5}",
- "{DBA5E6AC-E7BE-42D3-8703-4E787141526E}",
- "{6171953F-DD26-44C7-A3BE-CC45F86FC11F}",
- "{9E19DDBE-F5E4-4A26-A2FE-0616E04879B8}",
- "{AE81A615-99E3-4885-9CE0-D9CAA193E867}",
- "{FBF4067E-1855-4F6C-8BCD-4D62E801A04D}",
- "{17007948-6593-4AEB-8106-F7884B4F2C19}",
- "{199D4C8D-8639-4DA6-82EF-08668C35DEE0}",
- "{E085E50E-C140-4CF3-BE4B-094B14F0DDD6}",
- "{00785268-A9CC-4E40-AC29-BAC0019159CE}",
- "{4C06F56A-DCDB-46A6-B67C-02339935CF12}",
- "{3A62D3FD-519E-4EC9-8171-D2C1BFEA022F}",
- "{3A62D3FD-519E-4EC9-8171-D2C1BFEA022F}",
- "{9392EB58-D7BA-410B-B1F0-B2FAA6BC89A7}",
- "{2ACAB2D5-E0CE-4027-BCA0-D78B2D7A6C66}",
- "{86E216C3-43CE-481A-BCB2-BE5E62850635}",
- "{FB631291-7923-4B91-9A57-7B18FDBB7A42}",
- "{0A176EC9-E934-45B8-B87F-16C7F4C80039}",
- "{DF55CA80-46E8-4C53-B65B-4990A23DD444}",
- "{3A0F9895-55D2-4710-BE5E-AD7498B5BF44}",
- "{294BDC5A-F448-48B6-8110-DD0A81820F8C}",
- "{4B9F66E9-FAC9-47AB-B1EF-C16756FBFD06}",
- "{72EA49C6-2806-48BD-B81B-D4905102E19C}",
- "{5728EB7E-8929-486C-8CD5-3238D060E768}"
-);
+sub generate_guid ($) {
+ my $hex = sha256_hex($_[0]);
+ $hex =~ s/^(.{8})(.{4})(.{4})(.{4})(.{12}).*/{$1-$2-$3-$4-$5}/;
+ $hex =~ tr/a-z/A-Z/;
+ return $hex;
+}
sub generate {
my ($git_dir, $out_dir, $rel_dir, %build_structure) = @_;
$target =~ s/\//_/g;
$target =~ s/\.a//;
- my $uuid = $GUIDS[$guid_index];
+ my $uuid = generate_guid($libname);
$$build_structure{"LIBS_${target}_GUID"} = $uuid;
- $guid_index += 1;
my @srcs = sort(map("$rel_dir\\$_", @{$$build_structure{"LIBS_${libname}_SOURCES"}}));
my @sources;
my $includes= join(";", sort(map(""$rel_dir\\$_"", @{$$build_structure{"LIBS_${libname}_INCLUDES"}})));
my $cflags = join(" ", sort(@{$$build_structure{"LIBS_${libname}_CFLAGS"}}));
$cflags =~ s/\"/"/g;
+ $cflags =~ s/</</g;
+ $cflags =~ s/>/>/g;
my $cflags_debug = $cflags;
$cflags_debug =~ s/-MT/-MTd/;
$defines =~ s/-D//g;
$defines =~ s/\"/\\"/g;
+ $defines =~ s/</</g;
+ $defines =~ s/>/>/g;
$defines =~ s/\'//g;
$includes =~ s/-I//g;
mkdir "$target" || die "Could not create the directory $target for lib project!\n";
<Tool
Name="VCXMLDataGeneratorTool"
/>
- <Tool
- Name="VCWebServiceProxyGeneratorTool"
- />
<Tool
Name="VCMIDLTool"
/>
<Tool
Name="VCXMLDataGeneratorTool"
/>
- <Tool
- Name="VCWebServiceProxyGeneratorTool"
- />
<Tool
Name="VCMIDLTool"
/>
$target =~ s/\//_/g;
$target =~ s/\.exe//;
- my $uuid = $GUIDS[$guid_index];
+ my $uuid = generate_guid($appname);
$$build_structure{"APPS_${target}_GUID"} = $uuid;
- $guid_index += 1;
my @srcs = sort(map("$rel_dir\\$_", @{$$build_structure{"APPS_${appname}_SOURCES"}}));
my @sources;
my $includes= join(";", sort(map(""$rel_dir\\$_"", @{$$build_structure{"APPS_${appname}_INCLUDES"}})));
my $cflags = join(" ", sort(@{$$build_structure{"APPS_${appname}_CFLAGS"}}));
$cflags =~ s/\"/"/g;
+ $cflags =~ s/</</g;
+ $cflags =~ s/>/>/g;
my $cflags_debug = $cflags;
$cflags_debug =~ s/-MT/-MTd/;
$defines =~ s/-D//g;
$defines =~ s/\"/\\"/g;
+ $defines =~ s/</</g;
+ $defines =~ s/>/>/g;
$defines =~ s/\'//g;
$defines =~ s/\\\\/\\/g;
$includes =~ s/-I//g;
<Tool
Name="VCXMLDataGeneratorTool"
/>
- <Tool
- Name="VCWebServiceProxyGeneratorTool"
- />
<Tool
Name="VCMIDLTool"
/>
<Tool
Name="VCXMLDataGeneratorTool"
/>
- <Tool
- Name="VCWebServiceProxyGeneratorTool"
- />
<Tool
Name="VCMIDLTool"
/>
foreach (@apps) {
$_ =~ s/\//_/g;
$_ =~ s/\.exe//;
- push(@tmp, $_);
+ if ($_ eq "git" ) {
+ unshift(@tmp, $_);
+ } else {
+ push(@tmp, $_);
+ }
}
@apps = @tmp;
open F, ">git.sln" || die "Could not open git.sln for writing!\n";
binmode F, ":crlf";
print F "$SLN_HEAD";
- foreach (@libs) {
- my $libname = $_;
- my $uuid = $build_structure{"LIBS_${libname}_GUID"};
- print F "$SLN_PRE";
- print F "\"${libname}\", \"${libname}\\${libname}.vcproj\", \"${uuid}\"";
- print F "$SLN_POST";
- }
+
my $uuid_libgit = $build_structure{"LIBS_libgit_GUID"};
my $uuid_xdiff_lib = $build_structure{"LIBS_xdiff_lib_GUID"};
foreach (@apps) {
print F " EndProjectSection";
print F "$SLN_POST";
}
+ foreach (@libs) {
+ my $libname = $_;
+ my $uuid = $build_structure{"LIBS_${libname}_GUID"};
+ print F "$SLN_PRE";
+ print F "\"${libname}\", \"${libname}\\${libname}.vcproj\", \"${uuid}\"";
+ print F "$SLN_POST";
+ }
print F << "EOM";
Global
print F << "EOM";
GlobalSection(ProjectConfigurationPlatforms) = postSolution
EOM
- foreach (@libs) {
- my $libname = $_;
- my $uuid = $build_structure{"LIBS_${libname}_GUID"};
+ foreach (@apps) {
+ my $appname = $_;
+ my $uuid = $build_structure{"APPS_${appname}_GUID"};
print F "\t\t${uuid}.Debug|Win32.ActiveCfg = Debug|Win32\n";
print F "\t\t${uuid}.Debug|Win32.Build.0 = Debug|Win32\n";
print F "\t\t${uuid}.Release|Win32.ActiveCfg = Release|Win32\n";
print F "\t\t${uuid}.Release|Win32.Build.0 = Release|Win32\n";
}
- foreach (@apps) {
- my $appname = $_;
- my $uuid = $build_structure{"APPS_${appname}_GUID"};
+ foreach (@libs) {
+ my $libname = $_;
+ my $uuid = $build_structure{"LIBS_${libname}_GUID"};
print F "\t\t${uuid}.Debug|Win32.ActiveCfg = Debug|Win32\n";
print F "\t\t${uuid}.Debug|Win32.Build.0 = Debug|Win32\n";
print F "\t\t${uuid}.Release|Win32.ActiveCfg = Release|Win32\n";
--- /dev/null
+package Generators::Vcxproj;
+require Exporter;
+
+use strict;
+use vars qw($VERSION);
+use Digest::SHA qw(sha256_hex);
+
+our $VERSION = '1.00';
+our(@ISA, @EXPORT, @EXPORT_OK, @AVAILABLE);
+@ISA = qw(Exporter);
+
+BEGIN {
+ push @EXPORT_OK, qw(generate);
+}
+
+sub generate_guid ($) {
+ my $hex = sha256_hex($_[0]);
+ $hex =~ s/^(.{8})(.{4})(.{4})(.{4})(.{12}).*/{$1-$2-$3-$4-$5}/;
+ $hex =~ tr/a-z/A-Z/;
+ return $hex;
+}
+
+sub generate {
+ my ($git_dir, $out_dir, $rel_dir, %build_structure) = @_;
+ my @libs = @{$build_structure{"LIBS"}};
+ foreach (@libs) {
+ createProject($_, $git_dir, $out_dir, $rel_dir, \%build_structure, 1);
+ }
+
+ my @apps = @{$build_structure{"APPS"}};
+ foreach (@apps) {
+ createProject($_, $git_dir, $out_dir, $rel_dir, \%build_structure, 0);
+ }
+
+ createGlueProject($git_dir, $out_dir, $rel_dir, %build_structure);
+ return 0;
+}
+
+sub createProject {
+ my ($name, $git_dir, $out_dir, $rel_dir, $build_structure, $static_library) = @_;
+ my $label = $static_library ? "lib" : "app";
+ my $prefix = $static_library ? "LIBS_" : "APPS_";
+ my $config_type = $static_library ? "StaticLibrary" : "Application";
+ print "Generate $name vcxproj $label project\n";
+ my $cdup = $name;
+ $cdup =~ s/[^\/]+/../g;
+ $cdup =~ s/\//\\/g;
+ $rel_dir = $rel_dir eq "." ? $cdup : "$cdup\\$rel_dir";
+ $rel_dir =~ s/\//\\/g;
+
+ my $target = $name;
+ if ($static_library) {
+ $target =~ s/\.a//;
+ } else {
+ $target =~ s/\.exe//;
+ }
+
+ my $uuid = generate_guid($name);
+ $$build_structure{"$prefix${target}_GUID"} = $uuid;
+ my $vcxproj = $target;
+ $vcxproj =~ s/(.*\/)?(.*)/$&\/$2.vcxproj/;
+ $vcxproj =~ s/([^\/]*)(\/lib)\/(lib.vcxproj)/$1$2\/$1_$3/;
+ $$build_structure{"$prefix${target}_VCXPROJ"} = $vcxproj;
+
+ my @srcs = sort(map("$rel_dir\\$_", @{$$build_structure{"$prefix${name}_SOURCES"}}));
+ my @sources;
+ foreach (@srcs) {
+ $_ =~ s/\//\\/g;
+ push(@sources, $_);
+ }
+ my $defines = join(";", sort(@{$$build_structure{"$prefix${name}_DEFINES"}}));
+ my $includes= join(";", sort(map { s/^-I//; s/\//\\/g; File::Spec->file_name_is_absolute($_) ? $_ : "$rel_dir\\$_" } @{$$build_structure{"$prefix${name}_INCLUDES"}}));
+ my $cflags = join(" ", sort(map { s/^-[GLMOWZ].*//; s/.* .*/"$&"/; $_; } @{$$build_structure{"$prefix${name}_CFLAGS"}}));
+ $cflags =~ s/</</g;
+ $cflags =~ s/>/>/g;
+
+ my $libs_release = "\n ";
+ my $libs_debug = "\n ";
+ if (!$static_library) {
+ $libs_release = join(";", sort(grep /^(?!libgit\.lib|xdiff\/lib\.lib|vcs-svn\/lib\.lib)/, @{$$build_structure{"$prefix${name}_LIBS"}}));
+ $libs_debug = $libs_release;
+ $libs_debug =~ s/zlib\.lib/zlibd\.lib/;
+ }
+
+ $defines =~ s/-D//g;
+ $defines =~ s/</</g;
+ $defines =~ s/>/>/g;
+ $defines =~ s/\'//g;
+
+ die "Could not create the directory $target for $label project!\n" unless (-d "$target" || mkdir "$target");
+
+ open F, ">$vcxproj" or die "Could not open $vcxproj for writing!\n";
+ binmode F, ":crlf :utf8";
+ print F chr(0xFEFF);
+ print F << "EOM";
+<?xml version="1.0" encoding="utf-8"?>
+<Project DefaultTargets="Build" ToolsVersion="14.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+ <ItemGroup Label="ProjectConfigurations">
+ <ProjectConfiguration Include="Debug|Win32">
+ <Configuration>Debug</Configuration>
+ <Platform>Win32</Platform>
+ </ProjectConfiguration>
+ <ProjectConfiguration Include="Release|Win32">
+ <Configuration>Release</Configuration>
+ <Platform>Win32</Platform>
+ </ProjectConfiguration>
+ <ProjectConfiguration Include="Debug|x64">
+ <Configuration>Debug</Configuration>
+ <Platform>x64</Platform>
+ </ProjectConfiguration>
+ <ProjectConfiguration Include="Release|x64">
+ <Configuration>Release</Configuration>
+ <Platform>x64</Platform>
+ </ProjectConfiguration>
+ </ItemGroup>
+ <PropertyGroup Label="Globals">
+ <ProjectGuid>$uuid</ProjectGuid>
+ <Keyword>Win32Proj</Keyword>
+ <VCPKGArch Condition="'\$(Platform)'=='Win32'">x86-windows</VCPKGArch>
+ <VCPKGArch Condition="'\$(Platform)'!='Win32'">x64-windows</VCPKGArch>
+ <VCPKGArchDirectory>$cdup\\compat\\vcbuild\\vcpkg\\installed\\\$(VCPKGArch)</VCPKGArchDirectory>
+ <VCPKGBinDirectory Condition="'\(Configuration)'=='Debug'">\$(VCPKGArchDirectory)\\debug\\bin</VCPKGBinDirectory>
+ <VCPKGLibDirectory Condition="'\(Configuration)'=='Debug'">\$(VCPKGArchDirectory)\\debug\\lib</VCPKGLibDirectory>
+ <VCPKGBinDirectory Condition="'\(Configuration)'!='Debug'">\$(VCPKGArchDirectory)\\bin</VCPKGBinDirectory>
+ <VCPKGLibDirectory Condition="'\(Configuration)'!='Debug'">\$(VCPKGArchDirectory)\\lib</VCPKGLibDirectory>
+ <VCPKGIncludeDirectory>\$(VCPKGArchDirectory)\\include</VCPKGIncludeDirectory>
+ <VCPKGLibs Condition="'\(Configuration)'=='Debug'">$libs_debug</VCPKGLibs>
+ <VCPKGLibs Condition="'\(Configuration)'!='Debug'">$libs_release</VCPKGLibs>
+ </PropertyGroup>
+ <Import Project="\$(VCTargetsPath)\\Microsoft.Cpp.Default.props" />
+ <PropertyGroup Condition="'\$(Configuration)'=='Debug'" Label="Configuration">
+ <UseDebugLibraries>true</UseDebugLibraries>
+ <LinkIncremental>true</LinkIncremental>
+ </PropertyGroup>
+ <PropertyGroup Condition="'\$(Configuration)'=='Release'" Label="Configuration">
+ <UseDebugLibraries>false</UseDebugLibraries>
+ <WholeProgramOptimization>true</WholeProgramOptimization>
+ </PropertyGroup>
+ <PropertyGroup>
+ <ConfigurationType>$config_type</ConfigurationType>
+ <PlatformToolset>v140</PlatformToolset>
+ <!-- <CharacterSet>UTF-8</CharacterSet> -->
+ <OutDir>..\\</OutDir>
+ <!-- <IntDir>\$(ProjectDir)\$(Configuration)\\</IntDir> -->
+ </PropertyGroup>
+ <Import Project="\$(VCTargetsPath)\\Microsoft.Cpp.props" />
+ <ImportGroup Label="ExtensionSettings">
+ </ImportGroup>
+ <ImportGroup Label="Shared">
+ </ImportGroup>
+ <ImportGroup Label="PropertySheets">
+ <Import Project="\$(UserRootDir)\\Microsoft.Cpp.\$(Platform).user.props" Condition="exists('\$(UserRootDir)\\Microsoft.Cpp.\$(Platform).user.props')" Label="LocalAppDataPlatform" />
+ </ImportGroup>
+ <PropertyGroup Label="UserMacros" />
+ <PropertyGroup>
+ <GenerateManifest>false</GenerateManifest>
+ <EnableManagedIncrementalBuild>true</EnableManagedIncrementalBuild>
+ </PropertyGroup>
+ <ItemDefinitionGroup>
+ <ClCompile>
+ <AdditionalOptions>$cflags %(AdditionalOptions)</AdditionalOptions>
+ <AdditionalIncludeDirectories>$cdup;$cdup\\compat;$cdup\\compat\\regex;$cdup\\compat\\win32;$cdup\\compat\\poll;$cdup\\compat\\vcbuild\\include;\$(VCPKGIncludeDirectory);%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
+ <EnableParallelCodeGeneration />
+ <InlineFunctionExpansion>OnlyExplicitInline</InlineFunctionExpansion>
+ <PrecompiledHeader />
+ <DebugInformationFormat>ProgramDatabase</DebugInformationFormat>
+ </ClCompile>
+ <Lib>
+ <SuppressStartupBanner>true</SuppressStartupBanner>
+ </Lib>
+ <Link>
+ <AdditionalLibraryDirectories>\$(VCPKGLibDirectory);%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
+ <AdditionalDependencies>\$(VCPKGLibs);\$(AdditionalDependencies)</AdditionalDependencies>
+ <AdditionalOptions>invalidcontinue.obj %(AdditionalOptions)</AdditionalOptions>
+ <EntryPointSymbol>wmainCRTStartup</EntryPointSymbol>
+ <ManifestFile>$cdup\\compat\\win32\\git.manifest</ManifestFile>
+ <SubSystem>Console</SubSystem>
+ </Link>
+EOM
+ if ($target eq 'libgit') {
+ print F << "EOM";
+ <PreBuildEvent Condition="!Exists('$cdup\\compat\\vcbuild\\vcpkg\\installed\\\$(VCPKGArch)\\include\\openssl\\ssl.h')">
+ <Message>Initialize VCPKG</Message>
+ <Command>del "$cdup\\compat\\vcbuild\\vcpkg"</Command>
+ <Command>call "$cdup\\compat\\vcbuild\\vcpkg_install.bat"</Command>
+ </PreBuildEvent>
+EOM
+ }
+ print F << "EOM";
+ </ItemDefinitionGroup>
+ <ItemDefinitionGroup Condition="'\$(Platform)'=='Win32'">
+ <Link>
+ <TargetMachine>MachineX86</TargetMachine>
+ </Link>
+ </ItemDefinitionGroup>
+ <ItemDefinitionGroup Condition="'\$(Configuration)'=='Debug'">
+ <ClCompile>
+ <Optimization>Disabled</Optimization>
+ <PreprocessorDefinitions>WIN32;_DEBUG;$defines;%(PreprocessorDefinitions)</PreprocessorDefinitions>
+ <RuntimeLibrary>MultiThreadedDebugDLL</RuntimeLibrary>
+ </ClCompile>
+ <Link>
+ <GenerateDebugInformation>true</GenerateDebugInformation>
+ </Link>
+ </ItemDefinitionGroup>
+ <ItemDefinitionGroup Condition="'\$(Configuration)'=='Release'">
+ <ClCompile>
+ <Optimization>MaxSpeed</Optimization>
+ <IntrinsicFunctions>true</IntrinsicFunctions>
+ <PreprocessorDefinitions>WIN32;NDEBUG;$defines;%(PreprocessorDefinitions)</PreprocessorDefinitions>
+ <RuntimeLibrary>MultiThreadedDLL</RuntimeLibrary>
+ <FunctionLevelLinking>true</FunctionLevelLinking>
+ <FavorSizeOrSpeed>Speed</FavorSizeOrSpeed>
+ </ClCompile>
+ <Link>
+ <GenerateDebugInformation>true</GenerateDebugInformation>
+ <EnableCOMDATFolding>true</EnableCOMDATFolding>
+ <OptimizeReferences>true</OptimizeReferences>
+ </Link>
+ </ItemDefinitionGroup>
+ <ItemGroup>
+EOM
+ foreach(@sources) {
+ print F << "EOM";
+ <ClCompile Include="$_" />
+EOM
+ }
+ print F << "EOM";
+ </ItemGroup>
+EOM
+ if (!$static_library || $target =~ 'vcs-svn' || $target =~ 'xdiff') {
+ my $uuid_libgit = $$build_structure{"LIBS_libgit_GUID"};
+ my $uuid_xdiff_lib = $$build_structure{"LIBS_xdiff/lib_GUID"};
+
+ print F << "EOM";
+ <ItemGroup>
+ <ProjectReference Include="$cdup\\libgit\\libgit.vcxproj">
+ <Project>$uuid_libgit</Project>
+ <ReferenceOutputAssembly>false</ReferenceOutputAssembly>
+ </ProjectReference>
+EOM
+ if (!($name =~ 'xdiff')) {
+ print F << "EOM";
+ <ProjectReference Include="$cdup\\xdiff\\lib\\xdiff_lib.vcxproj">
+ <Project>$uuid_xdiff_lib</Project>
+ <ReferenceOutputAssembly>false</ReferenceOutputAssembly>
+ </ProjectReference>
+EOM
+ }
+ if ($name =~ /(test-(line-buffer|svn-fe)|^git-remote-testsvn)\.exe$/) {
+ my $uuid_vcs_svn_lib = $$build_structure{"LIBS_vcs-svn/lib_GUID"};
+ print F << "EOM";
+ <ProjectReference Include="$cdup\\vcs-svn\\lib\\vcs-svn_lib.vcxproj">
+ <Project>$uuid_vcs_svn_lib</Project>
+ <ReferenceOutputAssembly>false</ReferenceOutputAssembly>
+ </ProjectReference>
+EOM
+ }
+ print F << "EOM";
+ </ItemGroup>
+EOM
+ }
+ print F << "EOM";
+ <Import Project="\$(VCTargetsPath)\\Microsoft.Cpp.targets" />
+EOM
+ if (!$static_library) {
+ print F << "EOM";
+ <Target Name="${target}_AfterBuild" AfterTargets="AfterBuild">
+ <ItemGroup>
+ <DLLsAndPDBs Include="\$(VCPKGBinDirectory)\\*.dll;\$(VCPKGBinDirectory)\\*.pdb" />
+ </ItemGroup>
+ <Copy SourceFiles="@(DLLsAndPDBs)" DestinationFolder="\$(OutDir)" SkipUnchangedFiles="true" UseHardlinksIfPossible="true" />
+ <MakeDir Directories="..\\templates\\blt\\branches" />
+ </Target>
+EOM
+ }
+ if ($target eq 'git') {
+ print F " <Import Project=\"LinkOrCopyBuiltins.targets\" />\n";
+ }
+ print F << "EOM";
+</Project>
+EOM
+ close F;
+}
+
+sub createGlueProject {
+ my ($git_dir, $out_dir, $rel_dir, %build_structure) = @_;
+ print "Generate solutions file\n";
+ $rel_dir = "..\\$rel_dir";
+ $rel_dir =~ s/\//\\/g;
+ my $SLN_HEAD = "Microsoft Visual Studio Solution File, Format Version 11.00\n# Visual Studio 2010\n";
+ my $SLN_PRE = "Project(\"{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}\") = ";
+ my $SLN_POST = "\nEndProject\n";
+
+ my @libs = @{$build_structure{"LIBS"}};
+ my @tmp;
+ foreach (@libs) {
+ $_ =~ s/\.a//;
+ push(@tmp, $_);
+ }
+ @libs = @tmp;
+
+ my @apps = @{$build_structure{"APPS"}};
+ @tmp = ();
+ foreach (@apps) {
+ $_ =~ s/\.exe//;
+ if ($_ eq "git" ) {
+ unshift(@tmp, $_);
+ } else {
+ push(@tmp, $_);
+ }
+ }
+ @apps = @tmp;
+
+ open F, ">git.sln" || die "Could not open git.sln for writing!\n";
+ binmode F, ":crlf :utf8";
+ print F chr(0xFEFF);
+ print F "$SLN_HEAD";
+
+ foreach (@apps) {
+ my $appname = $_;
+ my $uuid = $build_structure{"APPS_${appname}_GUID"};
+ print F "$SLN_PRE";
+ my $vcxproj = $build_structure{"APPS_${appname}_VCXPROJ"};
+ $vcxproj =~ s/\//\\/g;
+ $appname =~ s/.*\///;
+ print F "\"${appname}\", \"${vcxproj}\", \"${uuid}\"";
+ print F "$SLN_POST";
+ }
+ foreach (@libs) {
+ my $libname = $_;
+ my $uuid = $build_structure{"LIBS_${libname}_GUID"};
+ print F "$SLN_PRE";
+ my $vcxproj = $build_structure{"LIBS_${libname}_VCXPROJ"};
+ $vcxproj =~ s/\//\\/g;
+ $libname =~ s/\//_/g;
+ print F "\"${libname}\", \"${vcxproj}\", \"${uuid}\"";
+ print F "$SLN_POST";
+ }
+
+ print F << "EOM";
+Global
+ GlobalSection(SolutionConfigurationPlatforms) = preSolution
+ Debug|x64 = Debug|x64
+ Debug|x86 = Debug|x86
+ Release|x64 = Release|x64
+ Release|x86 = Release|x86
+ EndGlobalSection
+EOM
+ print F << "EOM";
+ GlobalSection(ProjectConfigurationPlatforms) = postSolution
+EOM
+ foreach (@apps) {
+ my $appname = $_;
+ my $uuid = $build_structure{"APPS_${appname}_GUID"};
+ print F "\t\t${uuid}.Debug|x64.ActiveCfg = Debug|x64\n";
+ print F "\t\t${uuid}.Debug|x64.Build.0 = Debug|x64\n";
+ print F "\t\t${uuid}.Debug|x86.ActiveCfg = Debug|Win32\n";
+ print F "\t\t${uuid}.Debug|x86.Build.0 = Debug|Win32\n";
+ print F "\t\t${uuid}.Release|x64.ActiveCfg = Release|x64\n";
+ print F "\t\t${uuid}.Release|x64.Build.0 = Release|x64\n";
+ print F "\t\t${uuid}.Release|x86.ActiveCfg = Release|Win32\n";
+ print F "\t\t${uuid}.Release|x86.Build.0 = Release|Win32\n";
+ }
+ foreach (@libs) {
+ my $libname = $_;
+ my $uuid = $build_structure{"LIBS_${libname}_GUID"};
+ print F "\t\t${uuid}.Debug|x64.ActiveCfg = Debug|x64\n";
+ print F "\t\t${uuid}.Debug|x64.Build.0 = Debug|x64\n";
+ print F "\t\t${uuid}.Debug|x86.ActiveCfg = Debug|Win32\n";
+ print F "\t\t${uuid}.Debug|x86.Build.0 = Debug|Win32\n";
+ print F "\t\t${uuid}.Release|x64.ActiveCfg = Release|x64\n";
+ print F "\t\t${uuid}.Release|x64.Build.0 = Release|x64\n";
+ print F "\t\t${uuid}.Release|x86.ActiveCfg = Release|Win32\n";
+ print F "\t\t${uuid}.Release|x86.Build.0 = Release|Win32\n";
+ }
+
+ print F << "EOM";
+ EndGlobalSection
+ GlobalSection(SolutionProperties) = preSolution
+ HideSolutionNode = FALSE
+ EndGlobalSection
+EndGlobal
+EOM
+ close F;
+}
+
+1;
use File::Spec;
use Cwd;
use Generators;
+use Text::ParseWords;
my (%build_structure, %compile_options, @makedry);
my $out_dir = getcwd();
-g <GENERATOR> --gen <GENERATOR> Specify the buildsystem generator (default: $gen)
Available: $genlist
-o <PATH> --out <PATH> Specify output directory generation (default: .)
+ --make-out <PATH> Write the output of GNU Make into a file
-i <FILE> --in <FILE> Specify input file, instead of running GNU Make
-h,-? --help This help
EOM
}
# Parse command-line options
+my $make_out;
while (@ARGV) {
my $arg = shift @ARGV;
if ("$arg" eq "-h" || "$arg" eq "--help" || "$arg" eq "-?") {
exit(0);
} elsif("$arg" eq "--out" || "$arg" eq "-o") {
$out_dir = shift @ARGV;
+ } elsif("$arg" eq "--make-out") {
+ $make_out = shift @ARGV;
} elsif("$arg" eq "--gen" || "$arg" eq "-g") {
$gen = shift @ARGV;
} elsif("$arg" eq "--in" || "$arg" eq "-i") {
open(F, "<$infile") || die "Couldn't open file $infile";
@makedry = <F>;
close(F);
+ } else {
+ die "Unknown option: " . $arg;
}
}
EOM
# Pipe a make --dry-run into a variable, if not already loaded from file
-@makedry = `cd $git_dir && make -n MSVC=1 V=1 2>/dev/null` if !@makedry;
+# Capture the make dry stderr to file for review (will be empty for a release build).
+
+my $ErrsFile = "msvc-build-makedryerrors.txt";
+@makedry = `make -C $git_dir -n MSVC=1 SKIP_VCPKG=1 V=1 2>$ErrsFile`
+if !@makedry;
+# test for an empty Errors file and remove it
+unlink $ErrsFile if -f -z $ErrsFile;
+
+if (defined $make_out) {
+ open OUT, ">" . $make_out;
+ print OUT @makedry;
+ close OUT;
+}
# Parse the make output into usable info
parseMakeOutput();
next;
}
+ if ($text =~ /^(mkdir|msgfmt) /) {
+ # options to the Portable Object translations
+ # the line "mkdir ... && msgfmt ..." contains no linker options
+ next;
+ }
+
if($text =~ / -c /) {
# compilation
handleCompileLine($text, $line);
sub handleCompileLine
{
my ($line, $lineno) = @_;
- my @parts = split(' ', $line);
+ my @parts = shellwords($line);
my $sourcefile;
shift(@parts); # ignore cmd
while (my $part = shift @parts) {
my (@objfiles, @lflags, $libout, $part);
# kill cmd and rm 'prefix'
$line =~ s/^rm -f .* && .* rcs //;
- my @parts = split(' ', $line);
+ my @parts = shellwords($line);
while ($part = shift @parts) {
if ($part =~ /^-/) {
push(@lflags, $part);
# exit(1);
foreach (@objfiles) {
my $sourcefile = $_;
- $sourcefile =~ s/\.o/.c/;
+ $sourcefile =~ s/\.o$/.c/;
push(@sources, $sourcefile);
push(@cflags, @{$compile_options{"${sourcefile}_CFLAGS"}});
push(@defines, @{$compile_options{"${sourcefile}_DEFINES"}});
{
my ($line, $lineno) = @_;
my (@objfiles, @lflags, @libs, $appout, $part);
- my @parts = split(' ', $line);
+ my @parts = shellwords($line);
shift(@parts); # ignore cmd
while ($part = shift @parts) {
if ($part =~ /^-IGNORE/) {
$appout = shift @parts;
} elsif ("$part" eq "-lz") {
push(@libs, "zlib.lib");
- } elsif ("$part" eq "-lcrypto") {
+ } elsif ("$part" eq "-lcrypto") {
push(@libs, "libeay32.lib");
} elsif ("$part" eq "-lssl") {
push(@libs, "ssleay32.lib");
- } elsif ($part =~ /^-/) {
+ } elsif ("$part" eq "-lcurl") {
+ push(@libs, "libcurl.lib");
+ } elsif ("$part" eq "-lexpat") {
+ push(@libs, "expat.lib");
+ } elsif ("$part" eq "-liconv") {
+ push(@libs, "libiconv.lib");
+ } elsif ($part =~ /^[-\/]/) {
push(@lflags, $part);
} elsif ($part =~ /\.(a|lib)$/) {
$part =~ s/\.a$/.lib/;
push(@libs, $part);
- } elsif ($part =~ /\.(o|obj)$/) {
+ } elsif ($part eq 'invalidcontinue.obj') {
+ # ignore - known to MSVC
+ } elsif ($part =~ /\.o$/) {
push(@objfiles, $part);
+ } elsif ($part =~ /\.obj$/) {
+ # do nothing, 'make' should not be producing .obj, only .o files
} else {
- die "Unhandled lib option @ line $lineno: $part";
+ die "Unhandled link option @ line $lineno: $part";
}
}
# print "AppOut: '$appout'\nLFlags: @lflags\nLibs : @libs\nOfiles: @objfiles\n";
# exit(1);
foreach (@objfiles) {
my $sourcefile = $_;
- $sourcefile =~ s/\.o/.c/;
+ $sourcefile =~ s/\.o$/.c/;
push(@sources, $sourcefile);
push(@cflags, @{$compile_options{"${sourcefile}_CFLAGS"}});
push(@defines, @{$compile_options{"${sourcefile}_DEFINES"}});
# GIT_COMPLETION_CHECKOUT_NO_GUESS
#
# When set to "1", do not include "DWIM" suggestions in git-checkout
-# completion (e.g., completing "foo" when "origin/foo" exists).
+# and git-switch completion (e.g., completing "foo" when "origin/foo"
+# exists).
case "$COMP_WORDBREAKS" in
*:*) : great ;;
__git_complete_index_file "$complete_opt"
}
+_git_switch ()
+{
+ case "$cur" in
+ --conflict=*)
+ __gitcomp "diff3 merge" "" "${cur##--conflict=}"
+ ;;
+ --*)
+ __gitcomp_builtin switch
+ ;;
+ *)
+ # check if --track, --no-track, or --no-guess was specified
+ # if so, disable DWIM mode
+ local track_opt="--track" only_local_ref=n
+ if [ "$GIT_COMPLETION_CHECKOUT_NO_GUESS" = "1" ] ||
+ [ -n "$(__git_find_on_cmdline "--track --no-track --no-guess")" ]; then
+ track_opt=''
+ fi
+ # explicit --guess enables DWIM mode regardless of
+ # $GIT_COMPLETION_CHECKOUT_NO_GUESS
+ if [ -n "$(__git_find_on_cmdline "--guess")" ]; then
+ track_opt='--track'
+ fi
+ if [ -z "$(__git_find_on_cmdline "-d --detach")" ]; then
+ only_local_ref=y
+ else
+ # --guess --detach is invalid combination, no
+ # dwim will be done when --detach is specified
+ track_opt=
+ fi
+ if [ $only_local_ref = y -a -z "$track_opt" ]; then
+ __gitcomp_direct "$(__git_heads "" "$cur" " ")"
+ else
+ __git_complete_refs $track_opt
+ fi
+ ;;
+ esac
+}
+
__git_config_get_set_variables ()
{
local prevword word config_file= c=$cword
__git_complete_refs
}
+_git_restore ()
+{
+ case "$cur" in
+ --conflict=*)
+ __gitcomp "diff3 merge" "" "${cur##--conflict=}"
+ ;;
+ --source=*)
+ __git_complete_refs --cur="${cur##--source=}"
+ ;;
+ --*)
+ __gitcomp_builtin restore
+ ;;
+ esac
+}
+
__git_revert_inprogress_options="--continue --quit --abort"
_git_revert ()
test -r "$1" && IFS=$'\r\n' read "$2" <"$1"
}
+# see if a cherry-pick or revert is in progress, if the user has committed a
+# conflict resolution with 'git commit' in the middle of a sequence of picks or
+# reverts then CHERRY_PICK_HEAD/REVERT_HEAD will not exist so we have to read
+# the todo file.
+__git_sequencer_status ()
+{
+ local todo
+ if test -f "$g/CHERRY_PICK_HEAD"
+ then
+ r="|CHERRY-PICKING"
+ return 0;
+ elif test -f "$g/REVERT_HEAD"
+ then
+ r="|REVERTING"
+ return 0;
+ elif __git_eread "$g/sequencer/todo" todo
+ then
+ case "$todo" in
+ p[\ \ ]|pick[\ \ ]*)
+ r="|CHERRY-PICKING"
+ return 0
+ ;;
+ revert[\ \ ]*)
+ r="|REVERTING"
+ return 0
+ ;;
+ esac
+ fi
+ return 1
+}
+
# __git_ps1 accepts 0 or 1 arguments (i.e., format string)
# when called from PS1 using command substitution
# in this mode it prints text to add to bash PS1 prompt (includes branch name)
fi
elif [ -f "$g/MERGE_HEAD" ]; then
r="|MERGING"
- elif [ -f "$g/CHERRY_PICK_HEAD" ]; then
- r="|CHERRY-PICKING"
- elif [ -f "$g/REVERT_HEAD" ]; then
- r="|REVERTING"
+ elif __git_sequencer_status; then
+ :
elif [ -f "$g/BISECT_LOG" ]; then
r="|BISECTING"
fi
static unsigned int hash_obj(const struct object *obj, unsigned int n)
{
- return sha1hash(obj->oid.hash) % n;
+ return oidhash(&obj->oid) % n;
}
static void *insert_decoration(struct decoration *n, const struct object *base, void *decoration)
KHASH_INIT(str, const char *, void *, 1, kh_str_hash_func, kh_str_hash_equal)
-static khash_sha1 *island_marks;
+static kh_oid_map_t *island_marks;
static unsigned island_counter;
static unsigned island_counter_core;
* If we don't have a bitmap for the target, we can delta it
* against anything -- it's not an important object
*/
- trg_pos = kh_get_sha1(island_marks, trg_oid->hash);
+ trg_pos = kh_get_oid_map(island_marks, *trg_oid);
if (trg_pos >= kh_end(island_marks))
return 1;
* if the source (our delta base) doesn't have a bitmap,
* we don't want to base any deltas on it!
*/
- src_pos = kh_get_sha1(island_marks, src_oid->hash);
+ src_pos = kh_get_oid_map(island_marks, *src_oid);
if (src_pos >= kh_end(island_marks))
return 0;
if (!island_marks)
return 0;
- a_pos = kh_get_sha1(island_marks, a->hash);
+ a_pos = kh_get_oid_map(island_marks, *a);
if (a_pos < kh_end(island_marks))
a_bitmap = kh_value(island_marks, a_pos);
- b_pos = kh_get_sha1(island_marks, b->hash);
+ b_pos = kh_get_oid_map(island_marks, *b);
if (b_pos < kh_end(island_marks))
b_bitmap = kh_value(island_marks, b_pos);
khiter_t pos;
int hash_ret;
- pos = kh_put_sha1(island_marks, obj->oid.hash, &hash_ret);
+ pos = kh_put_oid_map(island_marks, obj->oid, &hash_ret);
if (hash_ret)
kh_value(island_marks, pos) = island_bitmap_new(NULL);
khiter_t pos;
int hash_ret;
- pos = kh_put_sha1(island_marks, obj->oid.hash, &hash_ret);
+ pos = kh_put_oid_map(island_marks, obj->oid, &hash_ret);
if (hash_ret) {
/*
* We don't have one yet; make a copy-on-write of the
struct name_entry entry;
khiter_t pos;
- pos = kh_get_sha1(island_marks, ent->idx.oid.hash);
+ pos = kh_get_oid_map(island_marks, ent->idx.oid);
if (pos >= kh_end(island_marks))
continue;
if (S_ISGITLINK(entry.mode))
continue;
- obj = lookup_object(r, entry.oid.hash);
+ obj = lookup_object(r, &entry.oid);
if (!obj)
continue;
void load_delta_islands(struct repository *r, int progress)
{
- island_marks = kh_init_sha1();
+ island_marks = kh_init_oid_map();
remote_islands = kh_init_str();
git_config(island_config_callback, NULL);
void propagate_island_marks(struct commit *commit)
{
- khiter_t pos = kh_get_sha1(island_marks, commit->object.oid.hash);
+ khiter_t pos = kh_get_oid_map(island_marks, commit->object.oid);
if (pos < kh_end(island_marks)) {
struct commit_list *p;
for (i = 0; i < to_pack->nr_objects; ++i) {
struct object_entry *entry = &to_pack->objects[i];
- khiter_t pos = kh_get_sha1(island_marks, entry->idx.oid.hash);
+ khiter_t pos = kh_get_oid_map(island_marks, entry->idx.oid);
oe_set_layer(to_pack, entry, 1);
if (ecbdata->opt->flags.dual_color_diffed_diffs)
strbuf_addstr(&msgbuf, reverse);
strbuf_addstr(&msgbuf, frag);
- strbuf_add(&msgbuf, line, ep - line);
+ if (ecbdata->opt->flags.suppress_hunk_header_line_count)
+ strbuf_add(&msgbuf, atat, sizeof(atat));
+ else
+ strbuf_add(&msgbuf, line, ep - line);
strbuf_addstr(&msgbuf, reset);
/*
return dst - line;
}
+void flush_one_hunk(struct object_id *result, git_SHA_CTX *ctx)
+{
+ unsigned char hash[GIT_MAX_RAWSZ];
+ unsigned short carry = 0;
+ int i;
+
+ git_SHA1_Final(hash, ctx);
+ git_SHA1_Init(ctx);
+ /* 20-byte sum, with carry */
+ for (i = 0; i < GIT_SHA1_RAWSZ; ++i) {
+ carry += result->hash[i] + hash[i];
+ result->hash[i] = carry;
+ carry >>= 8;
+ }
+}
+
static void patch_id_consume(void *priv, char *line, unsigned long len)
{
struct patch_id_t *data = priv;
git_SHA1_Update(ctx, buf, len);
}
-/* returns 0 upon success, and writes result into sha1 */
-static int diff_get_patch_id(struct diff_options *options, struct object_id *oid, int diff_header_only)
+/* returns 0 upon success, and writes result into oid */
+static int diff_get_patch_id(struct diff_options *options, struct object_id *oid, int diff_header_only, int stable)
{
struct diff_queue_struct *q = &diff_queued_diff;
int i;
git_SHA1_Init(&ctx);
memset(&data, 0, sizeof(struct patch_id_t));
data.ctx = &ctx;
+ oidclr(oid);
for (i = 0; i < q->nr; i++) {
xpparam_t xpp;
patch_id_consume, &data, &xpp, &xecfg))
return error("unable to generate patch-id diff for %s",
p->one->path);
+
+ if (stable)
+ flush_one_hunk(oid, &ctx);
}
- git_SHA1_Final(oid->hash, &ctx);
+ if (!stable)
+ git_SHA1_Final(oid->hash, &ctx);
+
return 0;
}
-int diff_flush_patch_id(struct diff_options *options, struct object_id *oid, int diff_header_only)
+int diff_flush_patch_id(struct diff_options *options, struct object_id *oid, int diff_header_only, int stable)
{
struct diff_queue_struct *q = &diff_queued_diff;
int i;
- int result = diff_get_patch_id(options, oid, diff_header_only);
+ int result = diff_get_patch_id(options, oid, diff_header_only, stable);
for (i = 0; i < q->nr; i++)
diff_free_filepair(q->queue[i]);
unsigned stat_with_summary;
unsigned suppress_diff_headers;
unsigned dual_color_diffed_diffs;
+ unsigned suppress_hunk_header_line_count;
};
static inline void diff_flags_or(struct diff_flags *a,
int run_diff_index(struct rev_info *revs, int cached);
int do_diff_cache(const struct object_id *, struct diff_options *);
-int diff_flush_patch_id(struct diff_options *, struct object_id *, int);
+int diff_flush_patch_id(struct diff_options *, struct object_id *, int, int);
+void flush_one_hunk(struct object_id *, git_SHA_CTX *);
int diff_result_code(struct diff_options *, int);
hash_object_file(filespec->data, filespec->size, "blob",
&filespec->oid);
}
- return sha1hash(filespec->oid.hash);
+ return oidhash(&filespec->oid);
}
static int find_identical_files(struct hashmap *srcs,
#include "dir-iterator.h"
struct dir_iterator_level {
- int initialized;
-
DIR *dir;
/*
* (including a trailing '/'):
*/
size_t prefix_len;
-
- /*
- * The last action that has been taken with the current entry
- * (needed for directories, which have to be included in the
- * iteration and also iterated into):
- */
- enum {
- DIR_STATE_ITER,
- DIR_STATE_RECURSE
- } dir_state;
};
/*
struct dir_iterator base;
/*
- * The number of levels currently on the stack. This is always
- * at least 1, because when it becomes zero the iteration is
- * ended and this struct is freed.
+ * The number of levels currently on the stack. After the first
+ * call to dir_iterator_begin(), if it succeeds to open the
+ * first level's dir, this will always be at least 1. Then,
+ * when it comes to zero the iteration is ended and this
+ * struct is freed.
*/
size_t levels_nr;
* that will be included in this iteration.
*/
struct dir_iterator_level *levels;
+
+ /* Combination of flags for this dir-iterator */
+ unsigned int flags;
};
+/*
+ * Push a level in the iter stack and initialize it with information from
+ * the directory pointed by iter->base->path. It is assumed that this
+ * strbuf points to a valid directory path. Return 0 on success and -1
+ * otherwise, setting errno accordingly and leaving the stack unchanged.
+ */
+static int push_level(struct dir_iterator_int *iter)
+{
+ struct dir_iterator_level *level;
+
+ ALLOC_GROW(iter->levels, iter->levels_nr + 1, iter->levels_alloc);
+ level = &iter->levels[iter->levels_nr++];
+
+ if (!is_dir_sep(iter->base.path.buf[iter->base.path.len - 1]))
+ strbuf_addch(&iter->base.path, '/');
+ level->prefix_len = iter->base.path.len;
+
+ level->dir = opendir(iter->base.path.buf);
+ if (!level->dir) {
+ int saved_errno = errno;
+ if (errno != ENOENT) {
+ warning_errno("error opening directory '%s'",
+ iter->base.path.buf);
+ }
+ iter->levels_nr--;
+ errno = saved_errno;
+ return -1;
+ }
+
+ return 0;
+}
+
+/*
+ * Pop the top level on the iter stack, releasing any resources associated
+ * with it. Return the new value of iter->levels_nr.
+ */
+static int pop_level(struct dir_iterator_int *iter)
+{
+ struct dir_iterator_level *level =
+ &iter->levels[iter->levels_nr - 1];
+
+ if (level->dir && closedir(level->dir))
+ warning_errno("error closing directory '%s'",
+ iter->base.path.buf);
+ level->dir = NULL;
+
+ return --iter->levels_nr;
+}
+
+/*
+ * Populate iter->base with the necessary information on the next iteration
+ * entry, represented by the given dirent de. Return 0 on success and -1
+ * otherwise, setting errno accordingly.
+ */
+static int prepare_next_entry_data(struct dir_iterator_int *iter,
+ struct dirent *de)
+{
+ int err, saved_errno;
+
+ strbuf_addstr(&iter->base.path, de->d_name);
+ /*
+ * We have to reset these because the path strbuf might have
+ * been realloc()ed at the previous strbuf_addstr().
+ */
+ iter->base.relative_path = iter->base.path.buf +
+ iter->levels[0].prefix_len;
+ iter->base.basename = iter->base.path.buf +
+ iter->levels[iter->levels_nr - 1].prefix_len;
+
+ if (iter->flags & DIR_ITERATOR_FOLLOW_SYMLINKS)
+ err = stat(iter->base.path.buf, &iter->base.st);
+ else
+ err = lstat(iter->base.path.buf, &iter->base.st);
+
+ saved_errno = errno;
+ if (err && errno != ENOENT)
+ warning_errno("failed to stat '%s'", iter->base.path.buf);
+
+ errno = saved_errno;
+ return err;
+}
+
int dir_iterator_advance(struct dir_iterator *dir_iterator)
{
struct dir_iterator_int *iter =
(struct dir_iterator_int *)dir_iterator;
+ if (S_ISDIR(iter->base.st.st_mode) && push_level(iter)) {
+ if (errno != ENOENT && iter->flags & DIR_ITERATOR_PEDANTIC)
+ goto error_out;
+ if (iter->levels_nr == 0)
+ goto error_out;
+ }
+
+ /* Loop until we find an entry that we can give back to the caller. */
while (1) {
+ struct dirent *de;
struct dir_iterator_level *level =
&iter->levels[iter->levels_nr - 1];
- struct dirent *de;
- if (!level->initialized) {
- /*
- * Note: dir_iterator_begin() ensures that
- * path is not the empty string.
- */
- if (!is_dir_sep(iter->base.path.buf[iter->base.path.len - 1]))
- strbuf_addch(&iter->base.path, '/');
- level->prefix_len = iter->base.path.len;
-
- level->dir = opendir(iter->base.path.buf);
- if (!level->dir && errno != ENOENT) {
- warning("error opening directory %s: %s",
- iter->base.path.buf, strerror(errno));
- /* Popping the level is handled below */
- }
-
- level->initialized = 1;
- } else if (S_ISDIR(iter->base.st.st_mode)) {
- if (level->dir_state == DIR_STATE_ITER) {
- /*
- * The directory was just iterated
- * over; now prepare to iterate into
- * it.
- */
- level->dir_state = DIR_STATE_RECURSE;
- ALLOC_GROW(iter->levels, iter->levels_nr + 1,
- iter->levels_alloc);
- level = &iter->levels[iter->levels_nr++];
- level->initialized = 0;
- continue;
- } else {
- /*
- * The directory has already been
- * iterated over and iterated into;
- * we're done with it.
- */
+ strbuf_setlen(&iter->base.path, level->prefix_len);
+ errno = 0;
+ de = readdir(level->dir);
+
+ if (!de) {
+ if (errno) {
+ warning_errno("error reading directory '%s'",
+ iter->base.path.buf);
+ if (iter->flags & DIR_ITERATOR_PEDANTIC)
+ goto error_out;
+ } else if (pop_level(iter) == 0) {
+ return dir_iterator_abort(dir_iterator);
}
+ continue;
}
- if (!level->dir) {
- /*
- * This level is exhausted (or wasn't opened
- * successfully); pop up a level.
- */
- if (--iter->levels_nr == 0)
- return dir_iterator_abort(dir_iterator);
+ if (is_dot_or_dotdot(de->d_name))
+ continue;
+ if (prepare_next_entry_data(iter, de)) {
+ if (errno != ENOENT && iter->flags & DIR_ITERATOR_PEDANTIC)
+ goto error_out;
continue;
}
- /*
- * Loop until we find an entry that we can give back
- * to the caller:
- */
- while (1) {
- strbuf_setlen(&iter->base.path, level->prefix_len);
- errno = 0;
- de = readdir(level->dir);
-
- if (!de) {
- /* This level is exhausted; pop up a level. */
- if (errno) {
- warning("error reading directory %s: %s",
- iter->base.path.buf, strerror(errno));
- } else if (closedir(level->dir))
- warning("error closing directory %s: %s",
- iter->base.path.buf, strerror(errno));
-
- level->dir = NULL;
- if (--iter->levels_nr == 0)
- return dir_iterator_abort(dir_iterator);
- break;
- }
-
- if (is_dot_or_dotdot(de->d_name))
- continue;
-
- strbuf_addstr(&iter->base.path, de->d_name);
- if (lstat(iter->base.path.buf, &iter->base.st) < 0) {
- if (errno != ENOENT)
- warning("error reading path '%s': %s",
- iter->base.path.buf,
- strerror(errno));
- continue;
- }
-
- /*
- * We have to set these each time because
- * the path strbuf might have been realloc()ed.
- */
- iter->base.relative_path =
- iter->base.path.buf + iter->levels[0].prefix_len;
- iter->base.basename =
- iter->base.path.buf + level->prefix_len;
- level->dir_state = DIR_STATE_ITER;
-
- return ITER_OK;
- }
+ return ITER_OK;
}
+
+error_out:
+ dir_iterator_abort(dir_iterator);
+ return ITER_ERROR;
}
int dir_iterator_abort(struct dir_iterator *dir_iterator)
&iter->levels[iter->levels_nr - 1];
if (level->dir && closedir(level->dir)) {
+ int saved_errno = errno;
strbuf_setlen(&iter->base.path, level->prefix_len);
- warning("error closing directory %s: %s",
- iter->base.path.buf, strerror(errno));
+ errno = saved_errno;
+ warning_errno("error closing directory '%s'",
+ iter->base.path.buf);
}
}
return ITER_DONE;
}
-struct dir_iterator *dir_iterator_begin(const char *path)
+struct dir_iterator *dir_iterator_begin(const char *path, unsigned int flags)
{
struct dir_iterator_int *iter = xcalloc(1, sizeof(*iter));
struct dir_iterator *dir_iterator = &iter->base;
-
- if (!path || !*path)
- BUG("empty path passed to dir_iterator_begin()");
+ int saved_errno;
strbuf_init(&iter->base.path, PATH_MAX);
strbuf_addstr(&iter->base.path, path);
ALLOC_GROW(iter->levels, 10, iter->levels_alloc);
+ iter->levels_nr = 0;
+ iter->flags = flags;
- iter->levels_nr = 1;
- iter->levels[0].initialized = 0;
+ /*
+ * Note: stat already checks for NULL or empty strings and
+ * inexistent paths.
+ */
+ if (stat(iter->base.path.buf, &iter->base.st) < 0) {
+ saved_errno = errno;
+ goto error_out;
+ }
+
+ if (!S_ISDIR(iter->base.st.st_mode)) {
+ saved_errno = ENOTDIR;
+ goto error_out;
+ }
return dir_iterator;
+
+error_out:
+ dir_iterator_abort(dir_iterator);
+ errno = saved_errno;
+ return NULL;
}
*
* Iterate over a directory tree, recursively, including paths of all
* types and hidden paths. Skip "." and ".." entries and don't follow
- * symlinks except for the original path.
+ * symlinks except for the original path. Note that the original path
+ * is not included in the iteration.
*
* Every time dir_iterator_advance() is called, update the members of
* the dir_iterator structure to reflect the next path in the
* iteration. The order that paths are iterated over within a
- * directory is undefined, but directory paths are always iterated
- * over before the subdirectory contents.
+ * directory is undefined, directory paths are always given before
+ * their contents.
*
* A typical iteration looks like this:
*
* int ok;
- * struct iterator *iter = dir_iterator_begin(path);
+ * unsigned int flags = DIR_ITERATOR_PEDANTIC;
+ * struct dir_iterator *iter = dir_iterator_begin(path, flags);
+ *
+ * if (!iter)
+ * goto error_handler;
*
* while ((ok = dir_iterator_advance(iter)) == ITER_OK) {
* if (want_to_stop_iteration()) {
* dir_iterator_advance() again.
*/
+/*
+ * Flags for dir_iterator_begin:
+ *
+ * - DIR_ITERATOR_PEDANTIC: override dir-iterator's default behavior
+ * in case of an error at dir_iterator_advance(), which is to keep
+ * looking for a next valid entry. With this flag, resources are freed
+ * and ITER_ERROR is returned immediately. In both cases, a meaningful
+ * warning is emitted. Note: ENOENT errors are always ignored so that
+ * the API users may remove files during iteration.
+ *
+ * - DIR_ITERATOR_FOLLOW_SYMLINKS: make dir-iterator follow symlinks.
+ * i.e., linked directories' contents will be iterated over and
+ * iter->base.st will contain information on the referred files,
+ * not the symlinks themselves, which is the default behavior. Broken
+ * symlinks are ignored.
+ *
+ * Warning: circular symlinks are also followed when
+ * DIR_ITERATOR_FOLLOW_SYMLINKS is set. The iteration may end up with
+ * an ELOOP if they happen and DIR_ITERATOR_PEDANTIC is set.
+ */
+#define DIR_ITERATOR_PEDANTIC (1 << 0)
+#define DIR_ITERATOR_FOLLOW_SYMLINKS (1 << 1)
+
struct dir_iterator {
/* The current path: */
struct strbuf path;
/* The current basename: */
const char *basename;
- /* The result of calling lstat() on path: */
+ /*
+ * The result of calling lstat() on path; or stat(), if the
+ * DIR_ITERATOR_FOLLOW_SYMLINKS flag was set at
+ * dir_iterator's initialization.
+ */
struct stat st;
};
/*
- * Start a directory iteration over path. Return a dir_iterator that
- * holds the internal state of the iteration.
+ * Start a directory iteration over path with the combination of
+ * options specified by flags. On success, return a dir_iterator
+ * that holds the internal state of the iteration. In case of
+ * failure, return NULL and set errno accordingly.
*
* The iteration includes all paths under path, not including path
* itself and not including "." or ".." entries.
*
- * path is the starting directory. An internal copy will be made.
+ * Parameters are:
+ * - path is the starting directory. An internal copy will be made.
+ * - flags is a combination of the possible flags to initialize a
+ * dir-iterator or 0 for default behavior.
*/
-struct dir_iterator *dir_iterator_begin(const char *path);
+struct dir_iterator *dir_iterator_begin(const char *path, unsigned int flags);
/*
* Advance the iterator to the first or next item and return ITER_OK.
* If the iteration is exhausted, free the dir_iterator and any
- * resources associated with it and return ITER_DONE. On error, free
- * dir_iterator and associated resources and return ITER_ERROR. It is
- * a bug to use iterator or call this function again after it has
- * returned ITER_DONE or ITER_ERROR.
+ * resources associated with it and return ITER_DONE.
+ *
+ * It is a bug to use iterator or call this function again after it
+ * has returned ITER_DONE or ITER_ERROR (which may be returned iff
+ * the DIR_ITERATOR_PEDANTIC flag was set).
*/
int dir_iterator_advance(struct dir_iterator *iterator);
oidcpy(&commit_oid, &commit_oe->idx.oid);
} else if (!get_oid(p, &commit_oid)) {
unsigned long size;
- char *buf = read_object_with_reference(&commit_oid,
+ char *buf = read_object_with_reference(the_repository,
+ &commit_oid,
commit_type, &size,
&commit_oid);
if (!buf || size < the_hash_algo->hexsz + 6)
unsigned long size;
char *buf;
- buf = read_object_with_reference(&b->oid, commit_type, &size,
+ buf = read_object_with_reference(the_repository,
+ &b->oid, commit_type, &size,
&b->oid);
parse_from_commit(b, buf, size);
free(buf);
oidcpy(&n->oid, &oe->idx.oid);
} else if (!get_oid(from, &n->oid)) {
unsigned long size;
- char *buf = read_object_with_reference(&n->oid,
+ char *buf = read_object_with_reference(the_repository,
+ &n->oid,
commit_type,
&size, &n->oid);
if (!buf || size < the_hash_algo->hexsz + 6)
struct branch *b;
char *author = NULL;
char *committer = NULL;
+ const char *encoding = NULL;
struct hash_list *merge_list = NULL;
unsigned int merge_count;
unsigned char prev_fanout, new_fanout;
}
if (!committer)
die("Expected committer but didn't get one");
+ if (skip_prefix(command_buf.buf, "encoding ", &encoding))
+ read_next_command();
parse_data(&msg, 0, NULL);
read_next_command();
parse_from(b);
}
strbuf_addf(&new_data,
"author %s\n"
- "committer %s\n"
- "\n",
+ "committer %s\n",
author ? author : committer, committer);
+ if (encoding)
+ strbuf_addf(&new_data,
+ "encoding %s\n",
+ encoding);
+ strbuf_addch(&new_data, '\n');
strbuf_addbuf(&new_data, &msg);
free(author);
free(committer);
* we cannot trust the object flags).
*/
if (!args->no_dependents &&
- ((o = lookup_object(the_repository, remote->hash)) != NULL) &&
+ ((o = lookup_object(the_repository, remote)) != NULL) &&
(o->flags & COMPLETE)) {
continue;
}
if (skip_prefix(reader.line, "unshallow ", &arg)) {
if (get_oid_hex(arg, &oid))
die(_("invalid unshallow line: %s"), reader.line);
- if (!lookup_object(the_repository, oid.hash))
+ if (!lookup_object(the_repository, &oid))
die(_("object not found: %s"), reader.line);
/* make sure that it is parsed as shallow */
if (!parse_object(the_repository, &oid))
for (ref = *refs; ref; ref = ref->next) {
struct object *o = deref_tag(the_repository,
lookup_object(the_repository,
- ref->old_oid.hash),
+ &ref->old_oid),
NULL, 0);
if (!o || o->type != OBJ_COMMIT || !(o->flags & COMPLETE))
const struct object_id *remote = &ref->old_oid;
struct object *o;
- o = lookup_object(the_repository, remote->hash);
+ o = lookup_object(the_repository, remote);
if (!o || !(o->flags & COMPLETE)) {
retval = 0;
print_verbose(args, "want %s (%s)", oid_to_hex(remote),
sort_ref_list(&ref, ref_compare_name);
QSORT(sought, nr_sought, cmp_ref_by_name);
- if ((args->depth > 0 || is_repository_shallow(the_repository)) && !server_supports("shallow"))
+ if ((agent_feature = server_feature_value("agent", &agent_len))) {
+ agent_supported = 1;
+ if (agent_len)
+ print_verbose(args, _("Server version is %.*s"),
+ agent_len, agent_feature);
+ }
+
+ if (server_supports("shallow"))
+ print_verbose(args, _("Server supports %s"), "shallow");
+ else if (args->depth > 0 || is_repository_shallow(the_repository))
die(_("Server does not support shallow clients"));
if (args->depth > 0 || args->deepen_since || args->deepen_not)
args->deepen = 1;
if (server_supports("multi_ack_detailed")) {
- print_verbose(args, _("Server supports multi_ack_detailed"));
+ print_verbose(args, _("Server supports %s"), "multi_ack_detailed");
multi_ack = 2;
if (server_supports("no-done")) {
- print_verbose(args, _("Server supports no-done"));
+ print_verbose(args, _("Server supports %s"), "no-done");
if (args->stateless_rpc)
no_done = 1;
}
}
else if (server_supports("multi_ack")) {
- print_verbose(args, _("Server supports multi_ack"));
+ print_verbose(args, _("Server supports %s"), "multi_ack");
multi_ack = 1;
}
if (server_supports("side-band-64k")) {
- print_verbose(args, _("Server supports side-band-64k"));
+ print_verbose(args, _("Server supports %s"), "side-band-64k");
use_sideband = 2;
}
else if (server_supports("side-band")) {
- print_verbose(args, _("Server supports side-band"));
+ print_verbose(args, _("Server supports %s"), "side-band");
use_sideband = 1;
}
if (server_supports("allow-tip-sha1-in-want")) {
- print_verbose(args, _("Server supports allow-tip-sha1-in-want"));
+ print_verbose(args, _("Server supports %s"), "allow-tip-sha1-in-want");
allow_unadvertised_object_request |= ALLOW_TIP_SHA1;
}
if (server_supports("allow-reachable-sha1-in-want")) {
- print_verbose(args, _("Server supports allow-reachable-sha1-in-want"));
+ print_verbose(args, _("Server supports %s"), "allow-reachable-sha1-in-want");
allow_unadvertised_object_request |= ALLOW_REACHABLE_SHA1;
}
- if (!server_supports("thin-pack"))
+ if (server_supports("thin-pack"))
+ print_verbose(args, _("Server supports %s"), "thin-pack");
+ else
args->use_thin_pack = 0;
- if (!server_supports("no-progress"))
+ if (server_supports("no-progress"))
+ print_verbose(args, _("Server supports %s"), "no-progress");
+ else
args->no_progress = 0;
- if (!server_supports("include-tag"))
+ if (server_supports("include-tag"))
+ print_verbose(args, _("Server supports %s"), "include-tag");
+ else
args->include_tag = 0;
if (server_supports("ofs-delta"))
- print_verbose(args, _("Server supports ofs-delta"));
+ print_verbose(args, _("Server supports %s"), "ofs-delta");
else
prefer_ofs_delta = 0;
if (server_supports("filter")) {
server_supports_filtering = 1;
- print_verbose(args, _("Server supports filter"));
+ print_verbose(args, _("Server supports %s"), "filter");
} else if (args->filter_options.choice) {
warning("filtering not recognized by server, ignoring");
}
- if ((agent_feature = server_feature_value("agent", &agent_len))) {
- agent_supported = 1;
- if (agent_len)
- print_verbose(args, _("Server version is %.*s"),
- agent_len, agent_feature);
- }
- if (server_supports("deepen-since"))
+ if (server_supports("deepen-since")) {
+ print_verbose(args, _("Server supports %s"), "deepen-since");
deepen_since_ok = 1;
- else if (args->deepen_since)
+ } else if (args->deepen_since)
die(_("Server does not support --shallow-since"));
- if (server_supports("deepen-not"))
+ if (server_supports("deepen-not")) {
+ print_verbose(args, _("Server supports %s"), "deepen-not");
deepen_not_ok = 1;
- else if (args->deepen_not)
+ } else if (args->deepen_not)
die(_("Server does not support --shallow-exclude"));
- if (!server_supports("deepen-relative") && args->deepen_relative)
+ if (server_supports("deepen-relative"))
+ print_verbose(args, _("Server supports %s"), "deepen-relative");
+ else if (args->deepen_relative)
die(_("Server does not support --deepen"));
if (!args->no_dependents) {
* we cannot trust the object flags).
*/
if (!no_dependents &&
- ((o = lookup_object(the_repository, remote->hash)) != NULL) &&
+ ((o = lookup_object(the_repository, remote)) != NULL) &&
(o->flags & COMPLETE)) {
continue;
}
if (skip_prefix(reader->line, "unshallow ", &arg)) {
if (get_oid_hex(arg, &oid))
die(_("invalid unshallow line: %s"), reader->line);
- if (!lookup_object(the_repository, oid.hash))
+ if (!lookup_object(the_repository, &oid))
die(_("object not found: %s"), reader->line);
/* make sure that it is parsed as shallow */
if (!parse_object(the_repository, &oid))
return msg_type;
}
-static void init_skiplist(struct fsck_options *options, const char *path)
-{
- FILE *fp;
- struct strbuf sb = STRBUF_INIT;
- struct object_id oid;
-
- fp = fopen(path, "r");
- if (!fp)
- die("Could not open skip list: %s", path);
- while (!strbuf_getline(&sb, fp)) {
- const char *p;
- const char *hash;
-
- /*
- * Allow trailing comments, leading whitespace
- * (including before commits), and empty or whitespace
- * only lines.
- */
- hash = strchr(sb.buf, '#');
- if (hash)
- strbuf_setlen(&sb, hash - sb.buf);
- strbuf_trim(&sb);
- if (!sb.len)
- continue;
-
- if (parse_oid_hex(sb.buf, &oid, &p) || *p != '\0')
- die("Invalid SHA-1: %s", sb.buf);
- oidset_insert(&options->skiplist, &oid);
- }
- if (ferror(fp))
- die_errno("Could not read '%s'", path);
- fclose(fp);
- strbuf_release(&sb);
-}
-
static int parse_msg_type(const char *str)
{
if (!strcmp(str, "error"))
if (!strcmp(buf, "skiplist")) {
if (equal == len)
die("skiplist requires a path");
- init_skiplist(options, buf + equal + 1);
+ oidset_parse_file(&options->skiplist, buf + equal + 1);
buf += len + 1;
continue;
}
blob = lookup_blob(the_repository, oid);
if (!blob) {
- struct object *obj = lookup_unknown_object(oid->hash);
+ struct object *obj = lookup_unknown_object(oid);
ret |= report(options, obj,
FSCK_MSG_GITMODULES_BLOB,
"non-blob found at .gitmodules");
#ifndef NO_GETTEXT
# include <locale.h>
# include <libintl.h>
-# ifdef HAVE_LIBCHARSET_H
+# ifdef GIT_WINDOWS_NATIVE
+
+static const char *locale_charset(void)
+{
+ const char *env = getenv("LC_ALL"), *dot;
+
+ if (!env || !*env)
+ env = getenv("LC_CTYPE");
+ if (!env || !*env)
+ env = getenv("LANG");
+
+ if (!env)
+ return "UTF-8";
+
+ dot = strchr(env, '.');
+ return !dot ? env : dot + 1;
+}
+
+# elif defined HAVE_LIBCHARSET_H
# include <libcharset.h>
# else
# include <langinfo.h>
int use_gettext_poison(void)
{
static int poison_requested = -1;
- if (poison_requested == -1) {
- const char *v = getenv("GIT_TEST_GETTEXT_POISON");
- poison_requested = v && strlen(v) ? 1 : 0;
- }
+ if (poison_requested == -1)
+ poison_requested = git_env_bool("GIT_TEST_GETTEXT_POISON", 0);
return poison_requested;
}
FILTER => undef,
IS_REVERSE => 0,
},
+ 'worktree_head' => {
+ DIFF => 'diff-index -p',
+ APPLY => sub { apply_patch 'apply -R', @_ },
+ APPLY_CHECK => 'apply -R',
+ FILTER => undef,
+ IS_REVERSE => 1,
+ },
+ 'worktree_nothead' => {
+ DIFF => 'diff-index -R -p',
+ APPLY => sub { apply_patch 'apply', @_ },
+ APPLY_CHECK => 'apply',
+ FILTER => undef,
+ IS_REVERSE => 0,
+ },
);
$patch_mode = 'stage';
marked for discarding."),
checkout_nothead => N__(
"If the patch applies cleanly, the edited hunk will immediately be
+marked for applying."),
+ worktree_head => N__(
+"If the patch applies cleanly, the edited hunk will immediately be
+marked for discarding."),
+ worktree_nothead => N__(
+"If the patch applies cleanly, the edited hunk will immediately be
marked for applying."),
);
n - do not apply this hunk to index and worktree
q - quit; do not apply this hunk or any of the remaining ones
a - apply this hunk and all later hunks in the file
+d - do not apply this hunk or any of the later hunks in the file"),
+ worktree_head => N__(
+"y - discard this hunk from worktree
+n - do not discard this hunk from worktree
+q - quit; do not discard this hunk or any of the remaining ones
+a - discard this hunk and all later hunks in the file
+d - do not discard this hunk or any of the later hunks in the file"),
+ worktree_nothead => N__(
+"y - apply this hunk to worktree
+n - do not apply this hunk to worktree
+q - quit; do not apply this hunk or any of the remaining ones
+a - apply this hunk and all later hunks in the file
d - do not apply this hunk or any of the later hunks in the file"),
);
deletion => N__("Apply deletion to index and worktree [y,n,q,a,d%s,?]? "),
hunk => N__("Apply this hunk to index and worktree [y,n,q,a,d%s,?]? "),
},
+ worktree_head => {
+ mode => N__("Discard mode change from worktree [y,n,q,a,d%s,?]? "),
+ deletion => N__("Discard deletion from worktree [y,n,q,a,d%s,?]? "),
+ hunk => N__("Discard this hunk from worktree [y,n,q,a,d%s,?]? "),
+ },
+ worktree_nothead => {
+ mode => N__("Apply mode change to worktree [y,n,q,a,d%s,?]? "),
+ deletion => N__("Apply deletion to worktree [y,n,q,a,d%s,?]? "),
+ hunk => N__("Apply this hunk to worktree [y,n,q,a,d%s,?]? "),
+ },
);
sub patch_update_file {
'checkout_head' : 'checkout_nothead');
$arg = shift @ARGV or die __("missing --");
}
+ } elsif ($1 eq 'worktree') {
+ $arg = shift @ARGV or die __("missing --");
+ if ($arg eq '--') {
+ $patch_mode = 'checkout_index';
+ } else {
+ $patch_mode_revision = $arg;
+ $patch_mode = ($arg eq 'HEAD' ?
+ 'worktree_head' : 'worktree_nothead');
+ $arg = shift @ARGV or die __("missing --");
+ }
} elsif ($1 eq 'stage' or $1 eq 'stash') {
$patch_mode = $1;
$arg = shift @ARGV or die __("missing --");
#ifndef GIT_COMPAT_UTIL_H
#define GIT_COMPAT_UTIL_H
+#ifdef USE_MSVC_CRTDBG
+/*
+ * For these to work they must appear very early in each
+ * file -- before most of the standard header files.
+ */
+#include <stdlib.h>
+#include <crtdbg.h>
+#endif
+
#define _FILE_OFFSET_BITS 64
}
checkout_staged_file () {
- tmpfile=$(expr \
- "$(git checkout-index --temp --stage="$1" "$2" 2>/dev/null)" \
- : '\([^ ]*\) ')
+ tmpfile="$(git checkout-index --temp --stage="$1" "$2" 2>/dev/null)" &&
+ tmpfile=${tmpfile%%' '*}
if test $? -eq 0 && test -n "$tmpfile"
then
return 1
fi
- if BASE=$(expr "$MERGED" : '\(.*\)\.[^/]*$')
- then
- ext=$(expr "$MERGED" : '.*\(\.[^/]*\)$')
- else
+ # extract file extension from the last path component
+ case "${MERGED##*/}" in
+ *.*)
+ ext=.${MERGED##*.}
+ BASE=${MERGED%"$ext"}
+ ;;
+ *)
BASE=$MERGED
ext=
- fi
+ esac
mergetool_tmpdir_init
REMOTE="$MERGETOOL_TMPDIR/${BASE}_REMOTE_$$$ext"
BASE="$MERGETOOL_TMPDIR/${BASE}_BASE_$$$ext"
- base_mode=$(git ls-files -u -- "$MERGED" | awk '{if ($3==1) print $1;}')
- local_mode=$(git ls-files -u -- "$MERGED" | awk '{if ($3==2) print $1;}')
- remote_mode=$(git ls-files -u -- "$MERGED" | awk '{if ($3==3) print $1;}')
+ base_mode= local_mode= remote_mode=
+
+ # here, $IFS is just a LF
+ for line in $f
+ do
+ mode=${line%% *} # 1st word
+ sha1=${line#"$mode "}
+ sha1=${sha1%% *} # 2nd word
+ case "${line#$mode $sha1 }" in # remainder
+ '1 '*)
+ base_mode=$mode
+ ;;
+ '2 '*)
+ local_mode=$mode local_sha1=$sha1
+ ;;
+ '3 '*)
+ remote_mode=$mode remote_sha1=$sha1
+ ;;
+ esac
+ done
if is_submodule "$local_mode" || is_submodule "$remote_mode"
then
echo "Submodule merge conflict for '$MERGED':"
- local_sha1=$(git ls-files -u -- "$MERGED" | awk '{if ($3==2) print $2;}')
- remote_sha1=$(git ls-files -u -- "$MERGED" | awk '{if ($3==3) print $2;}')
describe_file "$local_mode" "local" "$local_sha1"
describe_file "$remote_mode" "remote" "$remote_sha1"
resolve_submodule_merge
-t|--tool*)
case "$#,$1" in
*,*=*)
- merge_tool=$(expr "z$1" : 'z-[^=]*=\(.*\)')
+ merge_tool=${1#*=}
;;
1,*)
usage ;;
self.needsGit = True
self.verbose = False
- # This is required for the "append" cloneExclude action
+ # This is required for the "append" update_shelve action
def ensure_value(self, attr, value):
if not hasattr(self, attr) or getattr(self, attr) is None:
setattr(self, attr, value)
die( "Error: %s is not found in client spec path" % depot_path )
return ""
+def cloneExcludeCallback(option, opt_str, value, parser):
+ # prepend "/" because the first "/" was consumed as part of the option itself.
+ # ("-//depot/A/..." becomes "/depot/A/..." after option parsing)
+ parser.values.cloneExclude += ["/" + re.sub(r"\.\.\.$", "", value)]
+
class P4Sync(Command, P4UserMap):
def __init__(self):
optparse.make_option("--use-client-spec", dest="useClientSpec", action='store_true',
help="Only sync files that are included in the Perforce Client Spec"),
optparse.make_option("-/", dest="cloneExclude",
- action="append", type="string",
+ action="callback", callback=cloneExcludeCallback, type="string",
help="exclude depot path"),
]
self.description = """Imports from Perforce into a git repository.\n
if self.verbose:
print("checkpoint finished: " + out)
+ def isPathWanted(self, path):
+ for p in self.cloneExclude:
+ if p.endswith("/"):
+ if p4PathStartsWith(path, p):
+ return False
+ # "-//depot/file1" without a trailing "/" should only exclude "file1", but not "file111" or "file1_dir/file2"
+ elif path.lower() == p.lower():
+ return False
+ for p in self.depotPaths:
+ if p4PathStartsWith(path, p):
+ return True
+ return False
+
def extractFilesFromCommit(self, commit, shelved=False, shelved_cl = 0):
- self.cloneExclude = [re.sub(r"\.\.\.$", "", path)
- for path in self.cloneExclude]
files = []
fnum = 0
while "depotFile%s" % fnum in commit:
path = commit["depotFile%s" % fnum]
-
- if [p for p in self.cloneExclude
- if p4PathStartsWith(path, p)]:
- found = False
- else:
- found = [p for p in self.depotPaths
- if p4PathStartsWith(path, p)]
+ found = self.isPathWanted(path)
if not found:
fnum = fnum + 1
continue
path = self.clientSpecDirs.map_in_client(path)
if self.detectBranches:
for b in self.knownBranches:
- if path.startswith(b + "/"):
+ if p4PathStartsWith(path, b + "/"):
path = path[len(b)+1:]
elif self.keepRepoPath:
fnum = 0
while "depotFile%s" % fnum in commit:
path = commit["depotFile%s" % fnum]
- found = [p for p in self.depotPaths
- if p4PathStartsWith(path, p)]
+ found = self.isPathWanted(path)
if not found:
fnum = fnum + 1
continue
for branch in self.knownBranches.keys():
# add a trailing slash so that a commit into qt/4.2foo
# doesn't end up in qt/4.2, e.g.
- if relPath.startswith(branch + "/"):
+ if p4PathStartsWith(relPath, branch + "/"):
if branch not in branches:
branches[branch] = []
branches[branch].append(file)
if currentChange < change:
earliestCommit = "^%s" % next
else:
- latestCommit = "%s" % next
+ if next == latestCommit:
+ die("Infinite loop while looking in ref %s for change %s. Check your branch mappings" % (ref, change))
+ latestCommit = "%s^@" % next
return ""
self.cloneDestination = depotPaths[-1]
depotPaths = depotPaths[:-1]
- self.cloneExclude = ["/"+p for p in self.cloneExclude]
for p in depotPaths:
if not p.startswith("//"):
sys.stderr.write('Depot paths must start with "//": %s\n' % p)
+++ /dev/null
-# This shell script fragment is sourced by git-rebase to implement
-# its default, fast, patch-based, non-interactive mode.
-#
-# Copyright (c) 2010 Junio C Hamano.
-#
-
-git_rebase__am () {
-
-case "$action" in
-continue)
- git am --resolved --resolvemsg="$resolvemsg" \
- ${gpg_sign_opt:+"$gpg_sign_opt"} &&
- move_to_original_branch
- return
- ;;
-skip)
- git am --skip --resolvemsg="$resolvemsg" &&
- move_to_original_branch
- return
- ;;
-show-current-patch)
- exec git am --show-current-patch
- ;;
-esac
-
-if test -z "$rebase_root"
- # this is now equivalent to ! -z "$upstream"
-then
- revisions=$upstream...$orig_head
-else
- revisions=$onto...$orig_head
-fi
-
-ret=0
-rm -f "$GIT_DIR/rebased-patches"
-
-git format-patch -k --stdout --full-index --cherry-pick --right-only \
- --src-prefix=a/ --dst-prefix=b/ --no-renames --no-cover-letter \
- --pretty=mboxrd --topo-order \
- $git_format_patch_opt \
- "$revisions" ${restrict_revision+^$restrict_revision} \
- >"$GIT_DIR/rebased-patches"
-ret=$?
-
-if test 0 != $ret
-then
- rm -f "$GIT_DIR/rebased-patches"
- case "$head_name" in
- refs/heads/*)
- git checkout -q "$head_name"
- ;;
- *)
- git checkout -q "$orig_head"
- ;;
- esac
-
- cat >&2 <<-EOF
-
- git encountered an error while preparing the patches to replay
- these revisions:
-
- $revisions
-
- As a result, git cannot rebase them.
- EOF
- return $ret
-fi
-
-git am $git_am_opt --rebasing --resolvemsg="$resolvemsg" \
- --patch-format=mboxrd \
- $allow_rerere_autoupdate \
- ${gpg_sign_opt:+"$gpg_sign_opt"} <"$GIT_DIR/rebased-patches"
-ret=$?
-
-rm -f "$GIT_DIR/rebased-patches"
-
-if test 0 != $ret
-then
- test -d "$state_dir" && write_basic_state
- return $ret
-fi
-
-move_to_original_branch
-
-}
+++ /dev/null
-
-resolvemsg="
-$(gettext 'Resolve all conflicts manually, mark them as resolved with
-"git add/rm <conflicted_files>", then run "git rebase --continue".
-You can instead skip this commit: run "git rebase --skip".
-To abort and get back to the state before "git rebase", run "git rebase --abort".')
-"
-
-write_basic_state () {
- echo "$head_name" > "$state_dir"/head-name &&
- echo "$onto" > "$state_dir"/onto &&
- echo "$orig_head" > "$state_dir"/orig-head &&
- test t = "$GIT_QUIET" && : > "$state_dir"/quiet
- test t = "$verbose" && : > "$state_dir"/verbose
- test -n "$strategy" && echo "$strategy" > "$state_dir"/strategy
- test -n "$strategy_opts" && echo "$strategy_opts" > \
- "$state_dir"/strategy_opts
- test -n "$allow_rerere_autoupdate" && echo "$allow_rerere_autoupdate" > \
- "$state_dir"/allow_rerere_autoupdate
- test -n "$gpg_sign_opt" && echo "$gpg_sign_opt" > "$state_dir"/gpg_sign_opt
- test -n "$signoff" && echo "$signoff" >"$state_dir"/signoff
- test -n "$reschedule_failed_exec" && : > "$state_dir"/reschedule-failed-exec
-}
-
-apply_autostash () {
- if test -f "$state_dir/autostash"
- then
- stash_sha1=$(cat "$state_dir/autostash")
- if git stash apply $stash_sha1 >/dev/null 2>&1
- then
- echo "$(gettext 'Applied autostash.')" >&2
- else
- git stash store -m "autostash" -q $stash_sha1 ||
- die "$(eval_gettext "Cannot store \$stash_sha1")"
- gettext 'Applying autostash resulted in conflicts.
-Your changes are safe in the stash.
-You can run "git stash pop" or "git stash drop" at any time.
-' >&2
- fi
- fi
-}
-
-move_to_original_branch () {
- case "$head_name" in
- refs/*)
- message="rebase finished: $head_name onto $onto"
- git update-ref -m "$message" \
- $head_name $(git rev-parse HEAD) $orig_head &&
- git symbolic-ref \
- -m "rebase finished: returning to $head_name" \
- HEAD $head_name ||
- die "$(eval_gettext "Could not move back to \$head_name")"
- ;;
- esac
-}
-
-output () {
- case "$verbose" in
- '')
- output=$("$@" 2>&1 )
- status=$?
- test $status != 0 && printf "%s\n" "$output"
- return $status
- ;;
- *)
- "$@"
- ;;
- esac
-}
# and leaves CR at the end instead.
cr=$(printf "\015")
+resolvemsg="
+$(gettext 'Resolve all conflicts manually, mark them as resolved with
+"git add/rm <conflicted_files>", then run "git rebase --continue".
+You can instead skip this commit: run "git rebase --skip".
+To abort and get back to the state before "git rebase", run "git rebase --abort".')
+"
+
+write_basic_state () {
+ echo "$head_name" > "$state_dir"/head-name &&
+ echo "$onto" > "$state_dir"/onto &&
+ echo "$orig_head" > "$state_dir"/orig-head &&
+ test t = "$GIT_QUIET" && : > "$state_dir"/quiet
+ test t = "$verbose" && : > "$state_dir"/verbose
+ test -n "$strategy" && echo "$strategy" > "$state_dir"/strategy
+ test -n "$strategy_opts" && echo "$strategy_opts" > \
+ "$state_dir"/strategy_opts
+ test -n "$allow_rerere_autoupdate" && echo "$allow_rerere_autoupdate" > \
+ "$state_dir"/allow_rerere_autoupdate
+ test -n "$gpg_sign_opt" && echo "$gpg_sign_opt" > "$state_dir"/gpg_sign_opt
+ test -n "$signoff" && echo "$signoff" >"$state_dir"/signoff
+ test -n "$reschedule_failed_exec" && : > "$state_dir"/reschedule-failed-exec
+}
+
+apply_autostash () {
+ if test -f "$state_dir/autostash"
+ then
+ stash_sha1=$(cat "$state_dir/autostash")
+ if git stash apply $stash_sha1 >/dev/null 2>&1
+ then
+ echo "$(gettext 'Applied autostash.')" >&2
+ else
+ git stash store -m "autostash" -q $stash_sha1 ||
+ die "$(eval_gettext "Cannot store \$stash_sha1")"
+ gettext 'Applying autostash resulted in conflicts.
+Your changes are safe in the stash.
+You can run "git stash pop" or "git stash drop" at any time.
+' >&2
+ fi
+ fi
+}
+
+output () {
+ case "$verbose" in
+ '')
+ output=$("$@" 2>&1 )
+ status=$?
+ test $status != 0 && printf "%s\n" "$output"
+ return $status
+ ;;
+ *)
+ "$@"
+ ;;
+ esac
+}
+
strategy_args=${strategy:+--strategy=$strategy}
test -n "$strategy_opts" &&
eval '
my $re_encoded_word = qr/=\?($re_token)\?($re_token)\?($re_encoded_text)\?=/;
# Variables we fill in automatically, or via prompting:
-my (@to,$no_to,@initial_to,@cc,$no_cc,@initial_cc,@bcclist,$no_bcc,@xh,
+my (@to,@cc,@xh,$envelope_sender,
$initial_in_reply_to,$reply_to,$initial_subject,@files,
- $author,$sender,$smtp_authpass,$annotate,$use_xmailer,$compose,$time);
-
-my $envelope_sender;
+ $author,$sender,$smtp_authpass,$annotate,$compose,$time);
+# Things we either get from config, *or* are overridden on the
+# command-line.
+my ($no_cc, $no_to, $no_bcc, $no_identity);
+my (@config_to, @getopt_to);
+my (@config_cc, @getopt_cc);
+my (@config_bcc, @getopt_bcc);
# Example reply to:
#$initial_in_reply_to = ''; #<20050203173208.GA23964@foobar.com>';
}
# Variables with corresponding config settings
-my ($thread, $chain_reply_to, $suppress_from, $signed_off_by_cc);
+my ($suppress_from, $signed_off_by_cc);
my ($cover_cc, $cover_to);
my ($to_cmd, $cc_cmd);
my ($smtp_server, $smtp_server_port, @smtp_server_options);
my ($smtp_authuser, $smtp_encryption, $smtp_ssl_cert_path);
my ($batch_size, $relogin_delay);
my ($identity, $aliasfiletype, @alias_files, $smtp_domain, $smtp_auth);
-my ($validate, $confirm);
+my ($confirm);
my (@suppress_cc);
my ($auto_8bit_encoding);
my ($compose_encoding);
-my $target_xfer_encoding = 'auto';
-
+# Variables with corresponding config settings & hardcoded defaults
my ($debug_net_smtp) = 0; # Net::SMTP, see send_message()
+my $thread = 1;
+my $chain_reply_to = 0;
+my $use_xmailer = 1;
+my $validate = 1;
+my $target_xfer_encoding = 'auto';
my %config_bool_settings = (
- "thread" => [\$thread, 1],
- "chainreplyto" => [\$chain_reply_to, 0],
- "suppressfrom" => [\$suppress_from, undef],
- "signedoffbycc" => [\$signed_off_by_cc, undef],
- "cccover" => [\$cover_cc, undef],
- "tocover" => [\$cover_to, undef],
- "signedoffcc" => [\$signed_off_by_cc, undef], # Deprecated
- "validate" => [\$validate, 1],
- "multiedit" => [\$multiedit, undef],
- "annotate" => [\$annotate, undef],
- "xmailer" => [\$use_xmailer, 1]
+ "thread" => \$thread,
+ "chainreplyto" => \$chain_reply_to,
+ "suppressfrom" => \$suppress_from,
+ "signedoffbycc" => \$signed_off_by_cc,
+ "cccover" => \$cover_cc,
+ "tocover" => \$cover_to,
+ "signedoffcc" => \$signed_off_by_cc,
+ "validate" => \$validate,
+ "multiedit" => \$multiedit,
+ "annotate" => \$annotate,
+ "xmailer" => \$use_xmailer,
);
my %config_settings = (
"smtpauth" => \$smtp_auth,
"smtpbatchsize" => \$batch_size,
"smtprelogindelay" => \$relogin_delay,
- "to" => \@initial_to,
+ "to" => \@config_to,
"tocmd" => \$to_cmd,
- "cc" => \@initial_cc,
+ "cc" => \@config_cc,
"cccmd" => \$cc_cmd,
"aliasfiletype" => \$aliasfiletype,
- "bcc" => \@bcclist,
+ "bcc" => \@config_bcc,
"suppresscc" => \@suppress_cc,
"envelopesender" => \$envelope_sender,
"confirm" => \$confirm,
$SIG{TERM} = \&signal_handler;
$SIG{INT} = \&signal_handler;
+# Read our sendemail.* config
+sub read_config {
+ my ($configured, $prefix) = @_;
+
+ foreach my $setting (keys %config_bool_settings) {
+ my $target = $config_bool_settings{$setting};
+ my $v = Git::config_bool(@repo, "$prefix.$setting");
+ next unless defined $v;
+ next if $configured->{$setting}++;
+ $$target = $v;
+ }
+
+ foreach my $setting (keys %config_path_settings) {
+ my $target = $config_path_settings{$setting};
+ if (ref($target) eq "ARRAY") {
+ my @values = Git::config_path(@repo, "$prefix.$setting");
+ next unless @values;
+ next if $configured->{$setting}++;
+ @$target = @values;
+ }
+ else {
+ my $v = Git::config_path(@repo, "$prefix.$setting");
+ next unless defined $v;
+ next if $configured->{$setting}++;
+ $$target = $v;
+ }
+ }
+
+ foreach my $setting (keys %config_settings) {
+ my $target = $config_settings{$setting};
+ if (ref($target) eq "ARRAY") {
+ my @values = Git::config(@repo, "$prefix.$setting");
+ next unless @values;
+ next if $configured->{$setting}++;
+ @$target = @values;
+ }
+ else {
+ my $v = Git::config(@repo, "$prefix.$setting");
+ next unless defined $v;
+ next if $configured->{$setting}++;
+ $$target = $v;
+ }
+ }
+
+ if (!defined $smtp_encryption) {
+ my $setting = "$prefix.smtpencryption";
+ my $enc = Git::config(@repo, $setting);
+ return unless defined $enc;
+ return if $configured->{$setting}++;
+ if (defined $enc) {
+ $smtp_encryption = $enc;
+ } elsif (Git::config_bool(@repo, "$prefix.smtpssl")) {
+ $smtp_encryption = 'ssl';
+ }
+ }
+}
+
+# sendemail.identity yields to --identity. We must parse this
+# special-case first before the rest of the config is read.
+$identity = Git::config(@repo, "sendemail.identity");
+my $rc = GetOptions(
+ "identity=s" => \$identity,
+ "no-identity" => \$no_identity,
+);
+usage() unless $rc;
+undef $identity if $no_identity;
+
+# Now we know enough to read the config
+{
+ my %configured;
+ read_config(\%configured, "sendemail.$identity") if defined $identity;
+ read_config(\%configured, "sendemail");
+}
+
# Begin by accumulating all the variables (defined above), that we will end up
# needing, first, from the command line:
my $help;
my $git_completion_helper;
-my $rc = GetOptions("h" => \$help,
- "dump-aliases" => \$dump_aliases);
+$rc = GetOptions("h" => \$help,
+ "dump-aliases" => \$dump_aliases);
usage() unless $rc;
die __("--dump-aliases incompatible with other options\n")
if !$help and $dump_aliases and @ARGV;
"in-reply-to=s" => \$initial_in_reply_to,
"reply-to=s" => \$reply_to,
"subject=s" => \$initial_subject,
- "to=s" => \@initial_to,
+ "to=s" => \@getopt_to,
"to-cmd=s" => \$to_cmd,
"no-to" => \$no_to,
- "cc=s" => \@initial_cc,
+ "cc=s" => \@getopt_cc,
"no-cc" => \$no_cc,
- "bcc=s" => \@bcclist,
+ "bcc=s" => \@getopt_bcc,
"no-bcc" => \$no_bcc,
"chain-reply-to!" => \$chain_reply_to,
"no-chain-reply-to" => sub {$chain_reply_to = 0},
"smtp-domain:s" => \$smtp_domain,
"smtp-auth=s" => \$smtp_auth,
"no-smtp-auth" => sub {$smtp_auth = 'none'},
- "identity=s" => \$identity,
"annotate!" => \$annotate,
"no-annotate" => sub {$annotate = 0},
"compose" => \$compose,
"git-completion-helper" => \$git_completion_helper,
);
+# Munge any "either config or getopt, not both" variables
+my @initial_to = @getopt_to ? @getopt_to : ($no_to ? () : @config_to);
+my @initial_cc = @getopt_cc ? @getopt_cc : ($no_cc ? () : @config_cc);
+my @initial_bcc = @getopt_bcc ? @getopt_bcc : ($no_bcc ? () : @config_bcc);
+
usage() if $help;
completion_helper() if $git_completion_helper;
unless ($rc) {
"(via command-line or configuration option)\n")
if defined $relogin_delay and not defined $batch_size;
-# Now, let's fill any that aren't set in with defaults:
-
-sub read_config {
- my ($prefix) = @_;
-
- foreach my $setting (keys %config_bool_settings) {
- my $target = $config_bool_settings{$setting}->[0];
- $$target = Git::config_bool(@repo, "$prefix.$setting") unless (defined $$target);
- }
-
- foreach my $setting (keys %config_path_settings) {
- my $target = $config_path_settings{$setting};
- if (ref($target) eq "ARRAY") {
- unless (@$target) {
- my @values = Git::config_path(@repo, "$prefix.$setting");
- @$target = @values if (@values && defined $values[0]);
- }
- }
- else {
- $$target = Git::config_path(@repo, "$prefix.$setting") unless (defined $$target);
- }
- }
-
- foreach my $setting (keys %config_settings) {
- my $target = $config_settings{$setting};
- next if $setting eq "to" and defined $no_to;
- next if $setting eq "cc" and defined $no_cc;
- next if $setting eq "bcc" and defined $no_bcc;
- if (ref($target) eq "ARRAY") {
- unless (@$target) {
- my @values = Git::config(@repo, "$prefix.$setting");
- @$target = @values if (@values && defined $values[0]);
- }
- }
- else {
- $$target = Git::config(@repo, "$prefix.$setting") unless (defined $$target);
- }
- }
-
- if (!defined $smtp_encryption) {
- my $enc = Git::config(@repo, "$prefix.smtpencryption");
- if (defined $enc) {
- $smtp_encryption = $enc;
- } elsif (Git::config_bool(@repo, "$prefix.smtpssl")) {
- $smtp_encryption = 'ssl';
- }
- }
-}
-
-# read configuration from [sendemail "$identity"], fall back on [sendemail]
-$identity = Git::config(@repo, "sendemail.identity") unless (defined $identity);
-read_config("sendemail.$identity") if (defined $identity);
-read_config("sendemail");
-
-# fall back on builtin bool defaults
-foreach my $setting (values %config_bool_settings) {
- ${$setting->[0]} = $setting->[1] unless (defined (${$setting->[0]}));
-}
-
# 'default' encryption is none -- this only prevents a warning
$smtp_encryption = '' unless (defined $smtp_encryption);
@initial_to = process_address_list(@initial_to);
@initial_cc = process_address_list(@initial_cc);
-@bcclist = process_address_list(@bcclist);
+@initial_bcc = process_address_list(@initial_bcc);
if ($thread && !defined $initial_in_reply_to && $prompting) {
$initial_in_reply_to = ask(
}
@cc);
my $to = join (",\n\t", @recipients);
- @recipients = unique_email_list(@recipients,@cc,@bcclist);
+ @recipients = unique_email_list(@recipients,@cc,@initial_bcc);
@recipients = (map { extract_valid_address_or_die($_) } @recipients);
my $date = format_2822_time($time++);
my $gitversion = '@@GIT_VERSION@@';
# First decide what scheme to use...
GIT_INTERNAL_GETTEXT_SH_SCHEME=fallthrough
-if test -n "$GIT_TEST_GETTEXT_POISON"
+if test -n "$GIT_TEST_GETTEXT_POISON" &&
+ git env--helper --type=bool --default=0 --exit-code \
+ GIT_TEST_GETTEXT_POISON
then
GIT_INTERNAL_GETTEXT_SH_SCHEME=poison
elif test -n "@@USE_GETTEXT_SCHEME@@"
const char git_more_info_string[] =
N_("'git help -a' and 'git help -g' list available subcommands and some\n"
"concept guides. See 'git help <command>' or 'git help <concept>'\n"
- "to read about a specific subcommand or concept.");
+ "to read about a specific subcommand or concept.\n"
+ "See 'git help git' for an overview of the system.");
static int use_pager = -1;
{ "diff-index", cmd_diff_index, RUN_SETUP | NO_PARSEOPT },
{ "diff-tree", cmd_diff_tree, RUN_SETUP | NO_PARSEOPT },
{ "difftool", cmd_difftool, RUN_SETUP_GENTLY },
+ { "env--helper", cmd_env__helper },
{ "fast-export", cmd_fast_export, RUN_SETUP },
{ "fetch", cmd_fetch, RUN_SETUP },
{ "fetch-pack", cmd_fetch_pack, RUN_SETUP | NO_PARSEOPT },
{ "push", cmd_push, RUN_SETUP },
{ "range-diff", cmd_range_diff, RUN_SETUP | USE_PAGER },
{ "read-tree", cmd_read_tree, RUN_SETUP | SUPPORT_SUPER_PREFIX},
- /*
- * NEEDSWORK: Until the rebase is independent and needs no redirection
- * to rebase shell script this is kept as is, then should be changed to
- * RUN_SETUP | NEED_WORK_TREE
- */
- { "rebase", cmd_rebase },
+ { "rebase", cmd_rebase, RUN_SETUP | NEED_WORK_TREE },
{ "rebase--interactive", cmd_rebase__interactive, RUN_SETUP | NEED_WORK_TREE },
{ "receive-pack", cmd_receive_pack },
{ "reflog", cmd_reflog, RUN_SETUP },
{ "replace", cmd_replace, RUN_SETUP },
{ "rerere", cmd_rerere, RUN_SETUP },
{ "reset", cmd_reset, RUN_SETUP },
+ { "restore", cmd_restore, RUN_SETUP | NEED_WORK_TREE },
{ "rev-list", cmd_rev_list, RUN_SETUP | NO_PARSEOPT },
{ "rev-parse", cmd_rev_parse, NO_PARSEOPT },
{ "revert", cmd_revert, RUN_SETUP | NEED_WORK_TREE },
{ "status", cmd_status, RUN_SETUP | NEED_WORK_TREE },
{ "stripspace", cmd_stripspace },
{ "submodule--helper", cmd_submodule__helper, RUN_SETUP | SUPPORT_SUPER_PREFIX | NO_PARSEOPT },
+ { "switch", cmd_switch, RUN_SETUP | NEED_WORK_TREE },
{ "symbolic-ref", cmd_symbolic_ref, RUN_SETUP },
{ "tag", cmd_tag, RUN_SETUP | DELAY_PAGER_CONFIG },
{ "unpack-file", cmd_unpack_file, RUN_SETUP | NO_PARSEOPT },
*/
if (!done_alias)
handle_builtin(*argcp, *argv);
-
-#if 0 // TODO In GFW, need to amend a7924b655e940b06cb547c235d6bed9767929673 to include trace2_ and _tr2 lines.
else if (get_builtin(**argv)) {
struct argv_array args = ARGV_ARRAY_INIT;
int i;
exit(i);
die("could not execute builtin %s", **argv);
}
-#endif // a7924b655e940b06cb547c235d6bed9767929673
/* .. then try the external ones */
execv_dashed_external(*argv);
VALUE "Translation", 0x409, 1200
END
END
+
+1 RT_MANIFEST "compat/win32/git.manifest"
pcre2_jit_stack_assign(p->pcre2_match_context, NULL, p->pcre2_jit_stack);
} else if (p->pcre2_jit_on != 0) {
BUG("The pcre2_jit_on variable should be 0 or 1, not %d",
- p->pcre1_jit_on);
+ p->pcre2_jit_on);
}
}
return p - hash_algos;
}
+/* The length in bytes and in hex digits of an object name (SHA-1 value). */
+#define GIT_SHA1_RAWSZ 20
+#define GIT_SHA1_HEXSZ (2 * GIT_SHA1_RAWSZ)
+/* The block size of SHA-1. */
+#define GIT_SHA1_BLKSZ 64
+
+/* The length in bytes and in hex digits of an object name (SHA-256 value). */
+#define GIT_SHA256_RAWSZ 32
+#define GIT_SHA256_HEXSZ (2 * GIT_SHA256_RAWSZ)
+/* The block size of SHA-256. */
+#define GIT_SHA256_BLKSZ 64
+
+/* The length in byte and in hex digits of the largest possible hash value. */
+#define GIT_MAX_RAWSZ GIT_SHA256_RAWSZ
+#define GIT_MAX_HEXSZ GIT_SHA256_HEXSZ
+/* The largest possible block size for any supported hash. */
+#define GIT_MAX_BLKSZ GIT_SHA256_BLKSZ
+
+struct object_id {
+ unsigned char hash[GIT_MAX_RAWSZ];
+};
+
+#define the_hash_algo the_repository->hash_algo
+
#endif
#ifndef HASHMAP_H
#define HASHMAP_H
+#include "hash.h"
+
/*
* Generic implementation of hash-based key-value mappings.
*
* the results will be different on big-endian and little-endian
* platforms, so they should not be stored or transferred over the net.
*/
-static inline unsigned int sha1hash(const unsigned char *sha1)
+static inline unsigned int oidhash(const struct object_id *oid)
{
/*
- * Equivalent to 'return *(unsigned int *)sha1;', but safe on
+ * Equivalent to 'return *(unsigned int *)oid->hash;', but safe on
* platforms that don't support unaligned reads.
*/
unsigned int hash;
- memcpy(&hash, sha1, sizeof(hash));
+ memcpy(&hash, oid->hash, sizeof(hash));
return hash;
}
{
struct similar_ref_cb *cb = (struct similar_ref_cb *)(cb_data);
char *branch = strrchr(refname, '/') + 1;
- const char *remote;
/* A remote branch of the same name is deemed similar */
- if (skip_prefix(refname, "refs/remotes/", &remote) &&
+ if (starts_with(refname, "refs/remotes/") &&
!strcmp(branch, cb->base_ref))
- string_list_append(cb->similar_refs, remote);
+ string_list_append_nodup(cb->similar_refs,
+ shorten_unambiguous_ref(refname, 1));
return 0;
}
static struct string_list guess_refs(const char *ref)
{
struct similar_ref_cb ref_cb;
- struct string_list similar_refs = STRING_LIST_INIT_NODUP;
+ struct string_list similar_refs = STRING_LIST_INIT_DUP;
ref_cb.base_ref = ref;
ref_cb.similar_refs = &similar_refs;
{
struct object *obj;
- obj = lookup_object(the_repository, oid->hash);
+ obj = lookup_object(the_repository, oid);
if (!obj)
obj = parse_object(the_repository, oid);
* may be required for updating server info later.
*/
if (repo->can_update_info_refs && !has_object_file(&ref->old_oid)) {
- obj = lookup_unknown_object(ref->old_oid.hash);
+ obj = lookup_unknown_object(&ref->old_oid);
fprintf(stderr, " fetch %s for %s\n",
oid_to_hex(&ref->old_oid), refname);
add_fetch_request(obj);
code; \
} }
-#define __kh_oid_cmp(a, b) (hashcmp(a, b) == 0)
-
-KHASH_INIT(sha1, const unsigned char *, void *, 1, sha1hash, __kh_oid_cmp)
-typedef kh_sha1_t khash_sha1;
-
-KHASH_INIT(sha1_pos, const unsigned char *, int, 1, sha1hash, __kh_oid_cmp)
-typedef kh_sha1_pos_t khash_sha1_pos;
-
-static inline unsigned int oid_hash(struct object_id oid)
+static inline unsigned int oidhash_by_value(struct object_id oid)
{
- return sha1hash(oid.hash);
+ return oidhash(&oid);
}
-static inline int oid_equal(struct object_id a, struct object_id b)
+static inline int oideq_by_value(struct object_id a, struct object_id b)
{
return oideq(&a, &b);
}
-KHASH_INIT(oid, struct object_id, int, 0, oid_hash, oid_equal)
+KHASH_INIT(oid_set, struct object_id, int, 0, oidhash_by_value, oideq_by_value)
-KHASH_INIT(oid_map, struct object_id, void *, 1, oid_hash, oid_equal)
-typedef kh_oid_t khash_oid_map;
+KHASH_INIT(oid_map, struct object_id, void *, 1, oidhash_by_value, oideq_by_value)
-KHASH_INIT(oid_pos, struct object_id, int, 1, oid_hash, oid_equal)
-typedef kh_oid_pos_t khash_oid_pos;
+KHASH_INIT(oid_pos, struct object_id, int, 1, oidhash_by_value, oideq_by_value)
#endif /* __AC_KHASH_H */
return (struct commit *) commit;
}
-static void fill_blob_sha1(struct commit *commit, struct diff_filespec *spec)
+static void fill_blob_sha1(struct repository *r, struct commit *commit,
+ struct diff_filespec *spec)
{
unsigned short mode;
struct object_id oid;
- if (get_tree_entry(&commit->object.oid, spec->path, &oid, &mode))
+ if (get_tree_entry(r, &commit->object.oid, spec->path, &oid, &mode))
die("There is no path %s in the commit", spec->path);
fill_filespec(spec, &oid, 1, mode);
name_part);
spec = alloc_filespec(full_name);
- fill_blob_sha1(commit, spec);
+ fill_blob_sha1(r, commit, spec);
fill_line_ends(r, spec, &lines, &ends);
cb_data.spec = spec;
cb_data.lines = lines;
*/
if (errbuf)
- strbuf_addf(errbuf, "invalid filter-spec '%s'", arg);
+ strbuf_addf(errbuf, _("invalid filter-spec '%s'"), arg);
memset(filter_options, 0, sizeof(*filter_options));
return 1;
* other hand, it could cover tree one and we might need to pick a
* subtree of it.
*/
-void shift_tree(const struct object_id *hash1,
+void shift_tree(struct repository *r,
+ const struct object_id *hash1,
const struct object_id *hash2,
struct object_id *shifted,
int depth_limit)
if (!*del_prefix)
return;
- if (get_tree_entry(hash2, del_prefix, shifted, &mode))
+ if (get_tree_entry(r, hash2, del_prefix, shifted, &mode))
die("cannot find path %s in tree %s",
del_prefix, oid_to_hex(hash2));
return;
* Unfortunately we cannot fundamentally tell which one to
* be prefixed, as recursive merge can work in either direction.
*/
-void shift_tree_by(const struct object_id *hash1,
+void shift_tree_by(struct repository *r,
+ const struct object_id *hash1,
const struct object_id *hash2,
struct object_id *shifted,
const char *shift_prefix)
unsigned candidate = 0;
/* Can hash2 be a tree at shift_prefix in tree hash1? */
- if (!get_tree_entry(hash1, shift_prefix, &sub1, &mode1) &&
+ if (!get_tree_entry(r, hash1, shift_prefix, &sub1, &mode1) &&
S_ISDIR(mode1))
candidate |= 1;
/* Can hash1 be a tree at shift_prefix in tree hash2? */
- if (!get_tree_entry(hash2, shift_prefix, &sub2, &mode2) &&
+ if (!get_tree_entry(r, hash2, shift_prefix, &sub2, &mode2) &&
S_ISDIR(mode2))
candidate |= 2;
struct object_id shifted;
if (!*subtree_shift) {
- shift_tree(&one->object.oid, &two->object.oid, &shifted, 0);
+ shift_tree(repo, &one->object.oid, &two->object.oid, &shifted, 0);
} else {
- shift_tree_by(&one->object.oid, &two->object.oid, &shifted,
+ shift_tree_by(repo, &one->object.oid, &two->object.oid, &shifted,
subtree_shift);
}
if (oideq(&two->object.oid, &shifted))
{
struct pathspec match_all;
memset(&match_all, 0, sizeof(match_all));
- read_tree_recursive(the_repository, tree, "", 0, 0,
+ read_tree_recursive(opt->repo, tree, "", 0, 0,
&match_all, save_files_dirs, opt);
}
-static int get_tree_entry_if_blob(const struct object_id *tree,
+static int get_tree_entry_if_blob(struct repository *r,
+ const struct object_id *tree,
const char *path,
struct diff_filespec *dfs)
{
int ret;
- ret = get_tree_entry(tree, path, &dfs->oid, &dfs->mode);
+ ret = get_tree_entry(r, tree, path, &dfs->oid, &dfs->mode);
if (S_ISDIR(dfs->mode)) {
oidcpy(&dfs->oid, &null_oid);
dfs->mode = 0;
* Returns an index_entry instance which doesn't have to correspond to
* a real cache entry in Git's index.
*/
-static struct stage_data *insert_stage_data(const char *path,
+static struct stage_data *insert_stage_data(struct repository *r,
+ const char *path,
struct tree *o, struct tree *a, struct tree *b,
struct string_list *entries)
{
struct string_list_item *item;
struct stage_data *e = xcalloc(1, sizeof(struct stage_data));
- get_tree_entry_if_blob(&o->object.oid, path, &e->stages[1]);
- get_tree_entry_if_blob(&a->object.oid, path, &e->stages[2]);
- get_tree_entry_if_blob(&b->object.oid, path, &e->stages[3]);
+ get_tree_entry_if_blob(r, &o->object.oid, path, &e->stages[1]);
+ get_tree_entry_if_blob(r, &a->object.oid, path, &e->stages[2]);
+ get_tree_entry_if_blob(r, &b->object.oid, path, &e->stages[3]);
item = string_list_insert(entries, path);
item->util = e;
return e;
return ret;
}
-static int tree_has_path(struct tree *tree, const char *path)
+static int tree_has_path(struct repository *r, struct tree *tree,
+ const char *path)
{
struct object_id hashy;
unsigned short mode_o;
- return !get_tree_entry(&tree->object.oid, path,
+ return !get_tree_entry(r,
+ &tree->object.oid, path,
&hashy, &mode_o);
}
*/
if (collision_ent->reported_already) {
clean = 0;
- } else if (tree_has_path(tree, new_path)) {
+ } else if (tree_has_path(opt->repo, tree, new_path)) {
collision_ent->reported_already = 1;
strbuf_add_separated_string_list(&collision_paths, ", ",
&collision_ent->source_files);
string_list_append(&remove_from_merge,
merge_ent->dir)->util = merge_ent;
strbuf_release(&merge_ent->new_dir);
- } else if (tree_has_path(head, head_ent->dir)) {
+ } else if (tree_has_path(opt->repo, head, head_ent->dir)) {
/* 2. This wasn't a directory rename after all */
string_list_append(&remove_from_head,
head_ent->dir)->util = head_ent;
hashmap_iter_init(dir_re_merge, &iter);
while ((merge_ent = hashmap_iter_next(&iter))) {
head_ent = dir_rename_find_entry(dir_re_head, merge_ent->dir);
- if (tree_has_path(merge, merge_ent->dir)) {
+ if (tree_has_path(opt->repo, merge, merge_ent->dir)) {
/* 2. This wasn't a directory rename after all */
string_list_append(&remove_from_merge,
merge_ent->dir)->util = merge_ent;
if (pair->status == 'R')
re->dst_entry->processed = 1;
- re->dst_entry = insert_stage_data(new_path,
+ re->dst_entry = insert_stage_data(opt->repo, new_path,
o_tree, a_tree, b_tree,
entries);
item = string_list_insert(entries, new_path);
* the various handle_rename_*() functions update the index
* explicitly rather than relying on unpack_trees() to have done it.
*/
- get_tree_entry(&tree->object.oid,
+ get_tree_entry(opt->repo,
+ &tree->object.oid,
pair->two->path,
&re->dst_entry->stages[stage].oid,
&re->dst_entry->stages[stage].mode);
re->dir_rename_original_dest = NULL;
item = string_list_lookup(entries, re->pair->one->path);
if (!item)
- re->src_entry = insert_stage_data(re->pair->one->path,
+ re->src_entry = insert_stage_data(opt->repo,
+ re->pair->one->path,
o_tree, a_tree, b_tree, entries);
else
re->src_entry = item->util;
item = string_list_lookup(entries, re->pair->two->path);
if (!item)
- re->dst_entry = insert_stage_data(re->pair->two->path,
+ re->dst_entry = insert_stage_data(opt->repo,
+ re->pair->two->path,
o_tree, a_tree, b_tree, entries);
else
re->dst_entry = item->util;
#include "midx.h"
#include "progress.h"
#include "trace2.h"
+#include "run-command.h"
#define MIDX_SIGNATURE 0x4d494458 /* "MIDX" */
#define MIDX_VERSION 1
#define MIDX_CHUNK_LARGE_OFFSET_WIDTH (sizeof(uint64_t))
#define MIDX_LARGE_OFFSET_NEEDED 0x80000000
+#define PACK_EXPIRED UINT_MAX
+
static char *get_midx_filename(const char *object_dir)
{
return xstrfmt("%s/pack/multi-pack-index", object_dir);
return MIDX_HEADER_SIZE;
}
+struct pack_info {
+ uint32_t orig_pack_int_id;
+ char *pack_name;
+ struct packed_git *p;
+ unsigned expired : 1;
+};
+
+static int pack_info_compare(const void *_a, const void *_b)
+{
+ struct pack_info *a = (struct pack_info *)_a;
+ struct pack_info *b = (struct pack_info *)_b;
+ return strcmp(a->pack_name, b->pack_name);
+}
+
struct pack_list {
- struct packed_git **list;
- char **names;
+ struct pack_info *info;
uint32_t nr;
- uint32_t alloc_list;
- uint32_t alloc_names;
- size_t pack_name_concat_len;
+ uint32_t alloc;
struct multi_pack_index *m;
};
if (packs->m && midx_contains_pack(packs->m, file_name))
return;
- ALLOC_GROW(packs->list, packs->nr + 1, packs->alloc_list);
- ALLOC_GROW(packs->names, packs->nr + 1, packs->alloc_names);
+ ALLOC_GROW(packs->info, packs->nr + 1, packs->alloc);
- packs->list[packs->nr] = add_packed_git(full_path,
- full_path_len,
- 0);
+ packs->info[packs->nr].p = add_packed_git(full_path,
+ full_path_len,
+ 0);
- if (!packs->list[packs->nr]) {
+ if (!packs->info[packs->nr].p) {
warning(_("failed to add packfile '%s'"),
full_path);
return;
}
- if (open_pack_index(packs->list[packs->nr])) {
+ if (open_pack_index(packs->info[packs->nr].p)) {
warning(_("failed to open pack-index '%s'"),
full_path);
- close_pack(packs->list[packs->nr]);
- FREE_AND_NULL(packs->list[packs->nr]);
+ close_pack(packs->info[packs->nr].p);
+ FREE_AND_NULL(packs->info[packs->nr].p);
return;
}
- packs->names[packs->nr] = xstrdup(file_name);
- packs->pack_name_concat_len += strlen(file_name) + 1;
+ packs->info[packs->nr].pack_name = xstrdup(file_name);
+ packs->info[packs->nr].orig_pack_int_id = packs->nr;
+ packs->info[packs->nr].expired = 0;
packs->nr++;
}
}
-struct pack_pair {
- uint32_t pack_int_id;
- char *pack_name;
-};
-
-static int pack_pair_compare(const void *_a, const void *_b)
-{
- struct pack_pair *a = (struct pack_pair *)_a;
- struct pack_pair *b = (struct pack_pair *)_b;
- return strcmp(a->pack_name, b->pack_name);
-}
-
-static void sort_packs_by_name(char **pack_names, uint32_t nr_packs, uint32_t *perm)
-{
- uint32_t i;
- struct pack_pair *pairs;
-
- ALLOC_ARRAY(pairs, nr_packs);
-
- for (i = 0; i < nr_packs; i++) {
- pairs[i].pack_int_id = i;
- pairs[i].pack_name = pack_names[i];
- }
-
- QSORT(pairs, nr_packs, pack_pair_compare);
-
- for (i = 0; i < nr_packs; i++) {
- pack_names[i] = pairs[i].pack_name;
- perm[pairs[i].pack_int_id] = i;
- }
-
- free(pairs);
-}
-
struct pack_midx_entry {
struct object_id oid;
uint32_t pack_int_id;
}
static int nth_midxed_pack_midx_entry(struct multi_pack_index *m,
- uint32_t *pack_perm,
struct pack_midx_entry *e,
uint32_t pos)
{
return 1;
nth_midxed_object_oid(&e->oid, m, pos);
- e->pack_int_id = pack_perm[nth_midxed_pack_int_id(m, pos)];
+ e->pack_int_id = nth_midxed_pack_int_id(m, pos);
e->offset = nth_midxed_offset(m, pos);
/* consider objects in midx to be from "old" packs */
* of a packfile containing the object).
*/
static struct pack_midx_entry *get_sorted_entries(struct multi_pack_index *m,
- struct packed_git **p,
- uint32_t *perm,
+ struct pack_info *info,
uint32_t nr_packs,
uint32_t *nr_objects)
{
uint32_t start_pack = m ? m->num_packs : 0;
for (cur_pack = start_pack; cur_pack < nr_packs; cur_pack++)
- total_objects += p[cur_pack]->num_objects;
+ total_objects += info[cur_pack].p->num_objects;
/*
* As we de-duplicate by fanout value, we expect the fanout
for (cur_object = start; cur_object < end; cur_object++) {
ALLOC_GROW(entries_by_fanout, nr_fanout + 1, alloc_fanout);
- nth_midxed_pack_midx_entry(m, perm,
+ nth_midxed_pack_midx_entry(m,
&entries_by_fanout[nr_fanout],
cur_object);
nr_fanout++;
uint32_t start = 0, end;
if (cur_fanout)
- start = get_pack_fanout(p[cur_pack], cur_fanout - 1);
- end = get_pack_fanout(p[cur_pack], cur_fanout);
+ start = get_pack_fanout(info[cur_pack].p, cur_fanout - 1);
+ end = get_pack_fanout(info[cur_pack].p, cur_fanout);
for (cur_object = start; cur_object < end; cur_object++) {
ALLOC_GROW(entries_by_fanout, nr_fanout + 1, alloc_fanout);
- fill_pack_entry(perm[cur_pack], p[cur_pack], cur_object, &entries_by_fanout[nr_fanout]);
+ fill_pack_entry(cur_pack, info[cur_pack].p, cur_object, &entries_by_fanout[nr_fanout]);
nr_fanout++;
}
}
}
static size_t write_midx_pack_names(struct hashfile *f,
- char **pack_names,
+ struct pack_info *info,
uint32_t num_packs)
{
uint32_t i;
size_t written = 0;
for (i = 0; i < num_packs; i++) {
- size_t writelen = strlen(pack_names[i]) + 1;
+ size_t writelen;
+
+ if (info[i].expired)
+ continue;
- if (i && strcmp(pack_names[i], pack_names[i - 1]) <= 0)
+ if (i && strcmp(info[i].pack_name, info[i - 1].pack_name) <= 0)
BUG("incorrect pack-file order: %s before %s",
- pack_names[i - 1],
- pack_names[i]);
+ info[i - 1].pack_name,
+ info[i].pack_name);
- hashwrite(f, pack_names[i], writelen);
+ writelen = strlen(info[i].pack_name) + 1;
+ hashwrite(f, info[i].pack_name, writelen);
written += writelen;
}
}
static size_t write_midx_object_offsets(struct hashfile *f, int large_offset_needed,
+ uint32_t *perm,
struct pack_midx_entry *objects, uint32_t nr_objects)
{
struct pack_midx_entry *list = objects;
for (i = 0; i < nr_objects; i++) {
struct pack_midx_entry *obj = list++;
- hashwrite_be32(f, obj->pack_int_id);
+ if (perm[obj->pack_int_id] == PACK_EXPIRED)
+ BUG("object %s is in an expired pack with int-id %d",
+ oid_to_hex(&obj->oid),
+ obj->pack_int_id);
+
+ hashwrite_be32(f, perm[obj->pack_int_id]);
if (large_offset_needed && obj->offset >> 31)
hashwrite_be32(f, MIDX_LARGE_OFFSET_NEEDED | nr_large_offset++);
return written;
}
-int write_midx_file(const char *object_dir)
+static int write_midx_internal(const char *object_dir, struct multi_pack_index *m,
+ struct string_list *packs_to_drop)
{
unsigned char cur_chunk, num_chunks = 0;
char *midx_name;
uint32_t nr_entries, num_large_offsets = 0;
struct pack_midx_entry *entries = NULL;
int large_offsets_needed = 0;
+ int pack_name_concat_len = 0;
+ int dropped_packs = 0;
+ int result = 0;
midx_name = get_midx_filename(object_dir);
if (safe_create_leading_directories(midx_name)) {
midx_name);
}
- packs.m = load_multi_pack_index(object_dir, 1);
+ if (m)
+ packs.m = m;
+ else
+ packs.m = load_multi_pack_index(object_dir, 1);
packs.nr = 0;
- packs.alloc_list = packs.m ? packs.m->num_packs : 16;
- packs.alloc_names = packs.alloc_list;
- packs.list = NULL;
- packs.names = NULL;
- packs.pack_name_concat_len = 0;
- ALLOC_ARRAY(packs.list, packs.alloc_list);
- ALLOC_ARRAY(packs.names, packs.alloc_names);
+ packs.alloc = packs.m ? packs.m->num_packs : 16;
+ packs.info = NULL;
+ ALLOC_ARRAY(packs.info, packs.alloc);
if (packs.m) {
for (i = 0; i < packs.m->num_packs; i++) {
- ALLOC_GROW(packs.list, packs.nr + 1, packs.alloc_list);
- ALLOC_GROW(packs.names, packs.nr + 1, packs.alloc_names);
+ ALLOC_GROW(packs.info, packs.nr + 1, packs.alloc);
- packs.list[packs.nr] = NULL;
- packs.names[packs.nr] = xstrdup(packs.m->pack_names[i]);
- packs.pack_name_concat_len += strlen(packs.names[packs.nr]) + 1;
+ packs.info[packs.nr].orig_pack_int_id = i;
+ packs.info[packs.nr].pack_name = xstrdup(packs.m->pack_names[i]);
+ packs.info[packs.nr].p = NULL;
+ packs.info[packs.nr].expired = 0;
packs.nr++;
}
}
for_each_file_in_pack_dir(object_dir, add_pack_to_midx, &packs);
- if (packs.m && packs.nr == packs.m->num_packs)
+ if (packs.m && packs.nr == packs.m->num_packs && !packs_to_drop)
goto cleanup;
- if (packs.pack_name_concat_len % MIDX_CHUNK_ALIGNMENT)
- packs.pack_name_concat_len += MIDX_CHUNK_ALIGNMENT -
- (packs.pack_name_concat_len % MIDX_CHUNK_ALIGNMENT);
-
- ALLOC_ARRAY(pack_perm, packs.nr);
- sort_packs_by_name(packs.names, packs.nr, pack_perm);
-
- entries = get_sorted_entries(packs.m, packs.list, pack_perm, packs.nr, &nr_entries);
+ entries = get_sorted_entries(packs.m, packs.info, packs.nr, &nr_entries);
for (i = 0; i < nr_entries; i++) {
if (entries[i].offset > 0x7fffffff)
large_offsets_needed = 1;
}
+ QSORT(packs.info, packs.nr, pack_info_compare);
+
+ if (packs_to_drop && packs_to_drop->nr) {
+ int drop_index = 0;
+ int missing_drops = 0;
+
+ for (i = 0; i < packs.nr && drop_index < packs_to_drop->nr; i++) {
+ int cmp = strcmp(packs.info[i].pack_name,
+ packs_to_drop->items[drop_index].string);
+
+ if (!cmp) {
+ drop_index++;
+ packs.info[i].expired = 1;
+ } else if (cmp > 0) {
+ error(_("did not see pack-file %s to drop"),
+ packs_to_drop->items[drop_index].string);
+ drop_index++;
+ missing_drops++;
+ i--;
+ } else {
+ packs.info[i].expired = 0;
+ }
+ }
+
+ if (missing_drops) {
+ result = 1;
+ goto cleanup;
+ }
+ }
+
+ /*
+ * pack_perm stores a permutation between pack-int-ids from the
+ * previous multi-pack-index to the new one we are writing:
+ *
+ * pack_perm[old_id] = new_id
+ */
+ ALLOC_ARRAY(pack_perm, packs.nr);
+ for (i = 0; i < packs.nr; i++) {
+ if (packs.info[i].expired) {
+ dropped_packs++;
+ pack_perm[packs.info[i].orig_pack_int_id] = PACK_EXPIRED;
+ } else {
+ pack_perm[packs.info[i].orig_pack_int_id] = i - dropped_packs;
+ }
+ }
+
+ for (i = 0; i < packs.nr; i++) {
+ if (!packs.info[i].expired)
+ pack_name_concat_len += strlen(packs.info[i].pack_name) + 1;
+ }
+
+ if (pack_name_concat_len % MIDX_CHUNK_ALIGNMENT)
+ pack_name_concat_len += MIDX_CHUNK_ALIGNMENT -
+ (pack_name_concat_len % MIDX_CHUNK_ALIGNMENT);
+
hold_lock_file_for_update(&lk, midx_name, LOCK_DIE_ON_ERROR);
f = hashfd(lk.tempfile->fd, lk.tempfile->filename.buf);
FREE_AND_NULL(midx_name);
cur_chunk = 0;
num_chunks = large_offsets_needed ? 5 : 4;
- written = write_midx_header(f, num_chunks, packs.nr);
+ written = write_midx_header(f, num_chunks, packs.nr - dropped_packs);
chunk_ids[cur_chunk] = MIDX_CHUNKID_PACKNAMES;
chunk_offsets[cur_chunk] = written + (num_chunks + 1) * MIDX_CHUNKLOOKUP_WIDTH;
cur_chunk++;
chunk_ids[cur_chunk] = MIDX_CHUNKID_OIDFANOUT;
- chunk_offsets[cur_chunk] = chunk_offsets[cur_chunk - 1] + packs.pack_name_concat_len;
+ chunk_offsets[cur_chunk] = chunk_offsets[cur_chunk - 1] + pack_name_concat_len;
cur_chunk++;
chunk_ids[cur_chunk] = MIDX_CHUNKID_OIDLOOKUP;
switch (chunk_ids[i]) {
case MIDX_CHUNKID_PACKNAMES:
- written += write_midx_pack_names(f, packs.names, packs.nr);
+ written += write_midx_pack_names(f, packs.info, packs.nr);
break;
case MIDX_CHUNKID_OIDFANOUT:
break;
case MIDX_CHUNKID_OBJECTOFFSETS:
- written += write_midx_object_offsets(f, large_offsets_needed, entries, nr_entries);
+ written += write_midx_object_offsets(f, large_offsets_needed, pack_perm, entries, nr_entries);
break;
case MIDX_CHUNKID_LARGEOFFSETS:
cleanup:
for (i = 0; i < packs.nr; i++) {
- if (packs.list[i]) {
- close_pack(packs.list[i]);
- free(packs.list[i]);
+ if (packs.info[i].p) {
+ close_pack(packs.info[i].p);
+ free(packs.info[i].p);
}
- free(packs.names[i]);
+ free(packs.info[i].pack_name);
}
- free(packs.list);
- free(packs.names);
+ free(packs.info);
free(entries);
free(pack_perm);
free(midx_name);
- return 0;
+ return result;
+}
+
+int write_midx_file(const char *object_dir)
+{
+ return write_midx_internal(object_dir, NULL, NULL);
}
void clear_midx_file(struct repository *r)
return verify_midx_error;
}
+
+int expire_midx_packs(struct repository *r, const char *object_dir)
+{
+ uint32_t i, *count, result = 0;
+ struct string_list packs_to_drop = STRING_LIST_INIT_DUP;
+ struct multi_pack_index *m = load_multi_pack_index(object_dir, 1);
+
+ if (!m)
+ return 0;
+
+ count = xcalloc(m->num_packs, sizeof(uint32_t));
+ for (i = 0; i < m->num_objects; i++) {
+ int pack_int_id = nth_midxed_pack_int_id(m, i);
+ count[pack_int_id]++;
+ }
+
+ for (i = 0; i < m->num_packs; i++) {
+ char *pack_name;
+
+ if (count[i])
+ continue;
+
+ if (prepare_midx_pack(r, m, i))
+ continue;
+
+ if (m->packs[i]->pack_keep)
+ continue;
+
+ pack_name = xstrdup(m->packs[i]->pack_name);
+ close_pack(m->packs[i]);
+
+ string_list_insert(&packs_to_drop, m->pack_names[i]);
+ unlink_pack_path(pack_name, 0);
+ free(pack_name);
+ }
+
+ free(count);
+
+ if (packs_to_drop.nr)
+ result = write_midx_internal(object_dir, m, &packs_to_drop);
+
+ string_list_clear(&packs_to_drop, 0);
+ return result;
+}
+
+struct repack_info {
+ timestamp_t mtime;
+ uint32_t referenced_objects;
+ uint32_t pack_int_id;
+};
+
+static int compare_by_mtime(const void *a_, const void *b_)
+{
+ const struct repack_info *a, *b;
+
+ a = (const struct repack_info *)a_;
+ b = (const struct repack_info *)b_;
+
+ if (a->mtime < b->mtime)
+ return -1;
+ if (a->mtime > b->mtime)
+ return 1;
+ return 0;
+}
+
+static int fill_included_packs_all(struct multi_pack_index *m,
+ unsigned char *include_pack)
+{
+ uint32_t i;
+
+ for (i = 0; i < m->num_packs; i++)
+ include_pack[i] = 1;
+
+ return m->num_packs < 2;
+}
+
+static int fill_included_packs_batch(struct repository *r,
+ struct multi_pack_index *m,
+ unsigned char *include_pack,
+ size_t batch_size)
+{
+ uint32_t i, packs_to_repack;
+ size_t total_size;
+ struct repack_info *pack_info = xcalloc(m->num_packs, sizeof(struct repack_info));
+
+ for (i = 0; i < m->num_packs; i++) {
+ pack_info[i].pack_int_id = i;
+
+ if (prepare_midx_pack(r, m, i))
+ continue;
+
+ pack_info[i].mtime = m->packs[i]->mtime;
+ }
+
+ for (i = 0; batch_size && i < m->num_objects; i++) {
+ uint32_t pack_int_id = nth_midxed_pack_int_id(m, i);
+ pack_info[pack_int_id].referenced_objects++;
+ }
+
+ QSORT(pack_info, m->num_packs, compare_by_mtime);
+
+ total_size = 0;
+ packs_to_repack = 0;
+ for (i = 0; total_size < batch_size && i < m->num_packs; i++) {
+ int pack_int_id = pack_info[i].pack_int_id;
+ struct packed_git *p = m->packs[pack_int_id];
+ size_t expected_size;
+
+ if (!p)
+ continue;
+ if (open_pack_index(p) || !p->num_objects)
+ continue;
+
+ expected_size = (size_t)(p->pack_size
+ * pack_info[i].referenced_objects);
+ expected_size /= p->num_objects;
+
+ if (expected_size >= batch_size)
+ continue;
+
+ packs_to_repack++;
+ total_size += expected_size;
+ include_pack[pack_int_id] = 1;
+ }
+
+ free(pack_info);
+
+ if (total_size < batch_size || packs_to_repack < 2)
+ return 1;
+
+ return 0;
+}
+
+int midx_repack(struct repository *r, const char *object_dir, size_t batch_size)
+{
+ int result = 0;
+ uint32_t i;
+ unsigned char *include_pack;
+ struct child_process cmd = CHILD_PROCESS_INIT;
+ struct strbuf base_name = STRBUF_INIT;
+ struct multi_pack_index *m = load_multi_pack_index(object_dir, 1);
+
+ if (!m)
+ return 0;
+
+ include_pack = xcalloc(m->num_packs, sizeof(unsigned char));
+
+ if (batch_size) {
+ if (fill_included_packs_batch(r, m, include_pack, batch_size))
+ goto cleanup;
+ } else if (fill_included_packs_all(m, include_pack))
+ goto cleanup;
+
+ argv_array_push(&cmd.args, "pack-objects");
+
+ strbuf_addstr(&base_name, object_dir);
+ strbuf_addstr(&base_name, "/pack/pack");
+ argv_array_push(&cmd.args, base_name.buf);
+ strbuf_release(&base_name);
+
+ cmd.git_cmd = 1;
+ cmd.in = cmd.out = -1;
+
+ if (start_command(&cmd)) {
+ error(_("could not start pack-objects"));
+ result = 1;
+ goto cleanup;
+ }
+
+ for (i = 0; i < m->num_objects; i++) {
+ struct object_id oid;
+ uint32_t pack_int_id = nth_midxed_pack_int_id(m, i);
+
+ if (!include_pack[pack_int_id])
+ continue;
+
+ nth_midxed_object_oid(&oid, m, i);
+ xwrite(cmd.in, oid_to_hex(&oid), the_hash_algo->hexsz);
+ xwrite(cmd.in, "\n", 1);
+ }
+ close(cmd.in);
+
+ if (finish_command(&cmd)) {
+ error(_("could not finish pack-objects"));
+ result = 1;
+ goto cleanup;
+ }
+
+ result = write_midx_internal(object_dir, m, NULL);
+ m = NULL;
+
+cleanup:
+ if (m)
+ close_midx(m);
+ free(include_pack);
+ return result;
+}
int write_midx_file(const char *object_dir);
void clear_midx_file(struct repository *r);
int verify_midx_file(struct repository *r, const char *object_dir);
+int expire_midx_packs(struct repository *r, const char *object_dir);
+int midx_repack(struct repository *r, const char *object_dir, size_t batch_size);
void close_midx(struct multi_pack_index *m);
struct name_entry entry;
const unsigned hashsz = the_hash_algo->rawsz;
- buf = fill_tree_descriptor(&desc, &subtree->val_oid);
+ buf = fill_tree_descriptor(the_repository, &desc, &subtree->val_oid);
if (!buf)
die("Could not read %s for notes-index",
oid_to_hex(&subtree->val_oid));
return;
if (flags & NOTES_INIT_WRITABLE && read_ref(notes_ref, &object_oid))
die("Cannot use notes ref %s", notes_ref);
- if (get_tree_entry(&object_oid, "", &oid, &mode))
+ if (get_tree_entry(the_repository, &object_oid, "", &oid, &mode))
die("Failed to read notes tree referenced by %s (%s)",
notes_ref, oid_to_hex(&object_oid));
char *compute_alternate_path(const char *path, struct strbuf *err);
typedef int alt_odb_fn(struct object_directory *, void *);
int foreach_alt_odb(alt_odb_fn, void*);
+typedef void alternate_ref_fn(const struct object_id *oid, void *);
+void for_each_alternate_ref(alternate_ref_fn, void *);
/*
* Add the directory to the on-disk alternates file; the new entry will also
* the specified sha1. n must be a power of 2. Please note that the
* return value is *not* consistent across computer architectures.
*/
-static unsigned int hash_obj(const unsigned char *sha1, unsigned int n)
+static unsigned int hash_obj(const struct object_id *oid, unsigned int n)
{
- return sha1hash(sha1) & (n - 1);
+ return oidhash(oid) & (n - 1);
}
/*
*/
static void insert_obj_hash(struct object *obj, struct object **hash, unsigned int size)
{
- unsigned int j = hash_obj(obj->oid.hash, size);
+ unsigned int j = hash_obj(&obj->oid, size);
while (hash[j]) {
j++;
* Look up the record for the given sha1 in the hash map stored in
* obj_hash. Return NULL if it was not found.
*/
-struct object *lookup_object(struct repository *r, const unsigned char *sha1)
+struct object *lookup_object(struct repository *r, const struct object_id *oid)
{
unsigned int i, first;
struct object *obj;
if (!r->parsed_objects->obj_hash)
return NULL;
- first = i = hash_obj(sha1, r->parsed_objects->obj_hash_size);
+ first = i = hash_obj(oid, r->parsed_objects->obj_hash_size);
while ((obj = r->parsed_objects->obj_hash[i]) != NULL) {
- if (hasheq(sha1, obj->oid.hash))
+ if (oideq(oid, &obj->oid))
break;
i++;
if (i == r->parsed_objects->obj_hash_size)
r->parsed_objects->obj_hash_size = new_hash_size;
}
-void *create_object(struct repository *r, const unsigned char *sha1, void *o)
+void *create_object(struct repository *r, const struct object_id *oid, void *o)
{
struct object *obj = o;
obj->parsed = 0;
obj->flags = 0;
- hashcpy(obj->oid.hash, sha1);
+ oidcpy(&obj->oid, oid);
if (r->parsed_objects->obj_hash_size - 1 <= r->parsed_objects->nr_objs * 2)
grow_object_hash(r);
}
}
-struct object *lookup_unknown_object(const unsigned char *sha1)
+struct object *lookup_unknown_object(const struct object_id *oid)
{
- struct object *obj = lookup_object(the_repository, sha1);
+ struct object *obj = lookup_object(the_repository, oid);
if (!obj)
- obj = create_object(the_repository, sha1,
+ obj = create_object(the_repository, oid,
alloc_object_node(the_repository));
return obj;
}
void *buffer;
struct object *obj;
- obj = lookup_object(r, oid->hash);
+ obj = lookup_object(r, oid);
if (obj && obj->parsed)
return obj;
return NULL;
}
parse_blob_buffer(lookup_blob(r, oid), NULL, 0);
- return lookup_object(r, oid->hash);
+ return lookup_object(r, oid);
}
buffer = repo_read_object_file(r, oid, &type, &size);
* half-initialised objects, the caller is expected to initialize them
* by calling parse_object() on them.
*/
-struct object *lookup_object(struct repository *r, const unsigned char *sha1);
+struct object *lookup_object(struct repository *r, const struct object_id *oid);
-void *create_object(struct repository *r, const unsigned char *sha1, void *obj);
+void *create_object(struct repository *r, const struct object_id *oid, void *obj);
void *object_as_type(struct repository *r, struct object *obj, enum object_type type, int quiet);
struct object *parse_object_buffer(struct repository *r, const struct object_id *oid, enum object_type type, unsigned long size, void *buffer, int *eaten_p);
/** Returns the object, with potentially excess memory allocated. **/
-struct object *lookup_unknown_object(const unsigned char *sha1);
+struct object *lookup_unknown_object(const struct object_id *oid);
struct object_list *object_list_insert(struct object *item,
struct object_list **list_p);
&((const struct oidmap_entry *) entry_or_key)->oid);
}
-static int hash(const struct object_id *oid)
-{
- int hash;
- memcpy(&hash, oid->hash, sizeof(hash));
- return hash;
-}
-
void oidmap_init(struct oidmap *map, size_t initial_size)
{
hashmap_init(&map->map, oidmap_neq, NULL, initial_size);
if (!map->map.cmpfn)
return NULL;
- return hashmap_get_from_hash(&map->map, hash(key), key);
+ return hashmap_get_from_hash(&map->map, oidhash(key), key);
}
void *oidmap_remove(struct oidmap *map, const struct object_id *key)
if (!map->map.cmpfn)
oidmap_init(map, 0);
- hashmap_entry_init(&entry, hash(key));
+ hashmap_entry_init(&entry, oidhash(key));
return hashmap_remove(&map->map, &entry, key);
}
if (!map->map.cmpfn)
oidmap_init(map, 0);
- hashmap_entry_init(&to_put->internal_entry, hash(&to_put->oid));
+ hashmap_entry_init(&to_put->internal_entry, oidhash(&to_put->oid));
return hashmap_put(&map->map, to_put);
}
{
memset(&set->set, 0, sizeof(set->set));
if (initial_size)
- kh_resize_oid(&set->set, initial_size);
+ kh_resize_oid_set(&set->set, initial_size);
}
int oidset_contains(const struct oidset *set, const struct object_id *oid)
{
- khiter_t pos = kh_get_oid(&set->set, *oid);
+ khiter_t pos = kh_get_oid_set(&set->set, *oid);
return pos != kh_end(&set->set);
}
int oidset_insert(struct oidset *set, const struct object_id *oid)
{
int added;
- kh_put_oid(&set->set, *oid, &added);
+ kh_put_oid_set(&set->set, *oid, &added);
return !added;
}
int oidset_remove(struct oidset *set, const struct object_id *oid)
{
- khiter_t pos = kh_get_oid(&set->set, *oid);
+ khiter_t pos = kh_get_oid_set(&set->set, *oid);
if (pos == kh_end(&set->set))
return 0;
- kh_del_oid(&set->set, pos);
+ kh_del_oid_set(&set->set, pos);
return 1;
}
void oidset_clear(struct oidset *set)
{
- kh_release_oid(&set->set);
+ kh_release_oid_set(&set->set);
oidset_init(set, 0);
}
+
+void oidset_parse_file(struct oidset *set, const char *path)
+{
+ FILE *fp;
+ struct strbuf sb = STRBUF_INIT;
+ struct object_id oid;
+
+ fp = fopen(path, "r");
+ if (!fp)
+ die("could not open object name list: %s", path);
+ while (!strbuf_getline(&sb, fp)) {
+ const char *p;
+ const char *name;
+
+ /*
+ * Allow trailing comments, leading whitespace
+ * (including before commits), and empty or whitespace
+ * only lines.
+ */
+ name = strchr(sb.buf, '#');
+ if (name)
+ strbuf_setlen(&sb, name - sb.buf);
+ strbuf_trim(&sb);
+ if (!sb.len)
+ continue;
+
+ if (parse_oid_hex(sb.buf, &oid, &p) || *p != '\0')
+ die("invalid object name: %s", sb.buf);
+ oidset_insert(set, &oid);
+ }
+ if (ferror(fp))
+ die_errno("Could not read '%s'", path);
+ fclose(fp);
+ strbuf_release(&sb);
+}
* A single oidset; should be zero-initialized (or use OIDSET_INIT).
*/
struct oidset {
- kh_oid_t set;
+ kh_oid_set_t set;
};
#define OIDSET_INIT { { 0 } }
*/
void oidset_clear(struct oidset *set);
+/**
+ * Add the contents of the file 'path' to an initialized oidset. Each line is
+ * an unabbreviated object name. Comments begin with '#', and trailing comments
+ * are allowed. Leading whitespace and empty or white-space only lines are
+ * ignored.
+ */
+void oidset_parse_file(struct oidset *set, const char *path);
+
struct oidset_iter {
- kh_oid_t *set;
+ kh_oid_set_t *set;
khiter_t iter;
};
struct ewah_bitmap *blobs;
struct ewah_bitmap *tags;
- khash_sha1 *bitmaps;
- khash_sha1 *reused;
+ kh_oid_map_t *bitmaps;
+ kh_oid_map_t *reused;
struct packing_data *to_pack;
struct bitmapped_commit *selected;
seen_objects_nr = 0;
}
-static uint32_t find_object_pos(const unsigned char *hash)
+static uint32_t find_object_pos(const struct object_id *oid)
{
- struct object_entry *entry = packlist_find(writer.to_pack, hash, NULL);
+ struct object_entry *entry = packlist_find(writer.to_pack, oid, NULL);
if (!entry) {
die("Failed to write bitmap index. Packfile doesn't have full closure "
- "(object %s is missing)", hash_to_hex(hash));
+ "(object %s is missing)", oid_to_hex(oid));
}
return oe_in_pack_pos(writer.to_pack, entry);
static void show_object(struct object *object, const char *name, void *data)
{
struct bitmap *base = data;
- bitmap_set(base, find_object_pos(object->oid.hash));
+ bitmap_set(base, find_object_pos(&object->oid));
mark_as_seen(object);
}
add_to_include_set(struct bitmap *base, struct commit *commit)
{
khiter_t hash_pos;
- uint32_t bitmap_pos = find_object_pos(commit->object.oid.hash);
+ uint32_t bitmap_pos = find_object_pos(&commit->object.oid);
if (bitmap_get(base, bitmap_pos))
return 0;
- hash_pos = kh_get_sha1(writer.bitmaps, commit->object.oid.hash);
+ hash_pos = kh_get_oid_map(writer.bitmaps, commit->object.oid);
if (hash_pos < kh_end(writer.bitmaps)) {
struct bitmapped_commit *bc = kh_value(writer.bitmaps, hash_pos);
bitmap_or_ewah(base, bc->bitmap);
struct bitmap *base = bitmap_new();
struct rev_info revs;
- writer.bitmaps = kh_init_sha1();
+ writer.bitmaps = kh_init_oid_map();
writer.to_pack = to_pack;
if (writer.show_progress)
if (i >= reuse_after)
stored->flags |= BITMAP_FLAG_REUSE;
- hash_pos = kh_put_sha1(writer.bitmaps, object->oid.hash, &hash_ret);
+ hash_pos = kh_put_oid_map(writer.bitmaps, object->oid, &hash_ret);
if (hash_ret == 0)
die("Duplicate entry when writing index: %s",
oid_to_hex(&object->oid));
if (!(bitmap_git = prepare_bitmap_git(to_pack->repo)))
return;
- writer.reused = kh_init_sha1();
+ writer.reused = kh_init_oid_map();
rebuild_existing_bitmaps(bitmap_git, to_pack, writer.reused,
writer.show_progress);
/*
*/
}
-static struct ewah_bitmap *find_reused_bitmap(const unsigned char *sha1)
+static struct ewah_bitmap *find_reused_bitmap(const struct object_id *oid)
{
khiter_t hash_pos;
if (!writer.reused)
return NULL;
- hash_pos = kh_get_sha1(writer.reused, sha1);
+ hash_pos = kh_get_oid_map(writer.reused, *oid);
if (hash_pos >= kh_end(writer.reused))
return NULL;
if (next == 0) {
chosen = indexed_commits[i];
- reused_bitmap = find_reused_bitmap(chosen->object.oid.hash);
+ reused_bitmap = find_reused_bitmap(&chosen->object.oid);
} else {
chosen = indexed_commits[i + next];
for (j = 0; j <= next; ++j) {
struct commit *cm = indexed_commits[i + j];
- reused_bitmap = find_reused_bitmap(cm->object.oid.hash);
+ reused_bitmap = find_reused_bitmap(&cm->object.oid);
if (reused_bitmap || (cm->object.flags & NEEDS_BITMAP) != 0) {
chosen = cm;
break;
static inline int bitmap_position_extended(struct bitmap_index *bitmap_git,
const struct object_id *oid)
{
- khash_oid_pos *positions = bitmap_git->ext_index.positions;
+ kh_oid_pos_t *positions = bitmap_git->ext_index.positions;
khiter_t pos = kh_get_oid_pos(positions, *oid);
if (pos < kh_end(positions)) {
int rebuild_existing_bitmaps(struct bitmap_index *bitmap_git,
struct packing_data *mapping,
- khash_sha1 *reused_bitmaps,
+ kh_oid_map_t *reused_bitmaps,
int show_progress)
{
uint32_t i, num_objects;
reposition = xcalloc(num_objects, sizeof(uint32_t));
for (i = 0; i < num_objects; ++i) {
- const unsigned char *sha1;
+ struct object_id oid;
struct revindex_entry *entry;
struct object_entry *oe;
entry = &bitmap_git->pack->revindex[i];
- sha1 = nth_packed_object_sha1(bitmap_git->pack, entry->nr);
- oe = packlist_find(mapping, sha1, NULL);
+ nth_packed_object_oid(&oid, bitmap_git->pack, entry->nr);
+ oe = packlist_find(mapping, &oid, NULL);
if (oe)
reposition[i] = oe_in_pack_pos(mapping, oe) + 1;
if (!rebuild_bitmap(reposition,
lookup_stored_bitmap(stored),
rebuild)) {
- hash_pos = kh_put_sha1(reused_bitmaps,
- stored->oid.hash,
- &hash_ret);
+ hash_pos = kh_put_oid_map(reused_bitmaps,
+ stored->oid,
+ &hash_ret);
kh_value(reused_bitmaps, hash_pos) =
bitmap_to_ewah(rebuild);
}
struct packed_git **packfile,
uint32_t *entries, off_t *up_to);
int rebuild_existing_bitmaps(struct bitmap_index *, struct packing_data *mapping,
- khash_sha1 *reused_bitmaps, int show_progress);
+ kh_oid_map_t *reused_bitmaps, int show_progress);
void free_bitmap_index(struct bitmap_index *);
/*
#include "config.h"
static uint32_t locate_object_entry_hash(struct packing_data *pdata,
- const unsigned char *sha1,
+ const struct object_id *oid,
int *found)
{
uint32_t i, mask = (pdata->index_size - 1);
- i = sha1hash(sha1) & mask;
+ i = oidhash(oid) & mask;
while (pdata->index[i] > 0) {
uint32_t pos = pdata->index[i] - 1;
- if (hasheq(sha1, pdata->objects[pos].idx.oid.hash)) {
+ if (oideq(oid, &pdata->objects[pos].idx.oid)) {
*found = 1;
return i;
}
for (i = 0; i < pdata->nr_objects; i++) {
int found;
uint32_t ix = locate_object_entry_hash(pdata,
- entry->idx.oid.hash,
+ &entry->idx.oid,
&found);
if (found)
}
struct object_entry *packlist_find(struct packing_data *pdata,
- const unsigned char *sha1,
+ const struct object_id *oid,
uint32_t *index_pos)
{
uint32_t i;
if (!pdata->index_size)
return NULL;
- i = locate_object_entry_hash(pdata, sha1, &found);
+ i = locate_object_entry_hash(pdata, oid, &found);
if (index_pos)
*index_pos = i;
uint32_t index_pos);
struct object_entry *packlist_find(struct packing_data *pdata,
- const unsigned char *sha1,
+ const struct object_id *oid,
uint32_t *index_pos);
static inline uint32_t pack_name_hash(const char *name)
close_commit_graph(o);
}
+void unlink_pack_path(const char *pack_name, int force_delete)
+{
+ static const char *exts[] = {".pack", ".idx", ".keep", ".bitmap", ".promisor"};
+ int i;
+ struct strbuf buf = STRBUF_INIT;
+ size_t plen;
+
+ strbuf_addstr(&buf, pack_name);
+ strip_suffix_mem(buf.buf, &buf.len, ".pack");
+ plen = buf.len;
+
+ if (!force_delete) {
+ strbuf_addstr(&buf, ".keep");
+ if (!access(buf.buf, F_OK)) {
+ strbuf_release(&buf);
+ return;
+ }
+ }
+
+ for (i = 0; i < ARRAY_SIZE(exts); i++) {
+ strbuf_setlen(&buf, plen);
+ strbuf_addstr(&buf, exts[i]);
+ unlink(buf.buf);
+ }
+
+ strbuf_release(&buf);
+}
+
/*
* The LRU pack is the one with the oldest MRU window, preferring packs
* with no used windows, or the oldest mtime if it has no windows allocated.
void clear_delta_base_cache(void);
struct packed_git *add_packed_git(const char *path, size_t path_len, int local);
+/*
+ * Unlink the .pack and associated extension files.
+ * Does not unlink if 'force_delete' is false and the pack-file is
+ * marked as ".keep".
+ */
+extern void unlink_pack_path(const char *pack_name, int force_delete);
+
/*
* Make sure that a pointer access into an mmap'd index file is within bounds,
* and can provide at least 8 bytes of data.
return 0;
}
+struct option *parse_options_dup(const struct option *o)
+{
+ struct option *opts;
+ int nr = 0;
+
+ while (o && o->type != OPTION_END) {
+ nr++;
+ o++;
+ }
+
+ ALLOC_ARRAY(opts, nr + 1);
+ memcpy(opts, o - nr, sizeof(*o) * nr);
+ memset(opts + nr, 0, sizeof(*opts));
+ opts[nr].type = OPTION_END;
+ return opts;
+}
+
struct option *parse_options_concat(struct option *a, struct option *b)
{
struct option *ret;
int parse_options_end(struct parse_opt_ctx_t *ctx);
+struct option *parse_options_dup(const struct option *a);
struct option *parse_options_concat(struct option *a, struct option *b);
/*----- some often used options -----*/
}
int commit_patch_id(struct commit *commit, struct diff_options *options,
- struct object_id *oid, int diff_header_only)
+ struct object_id *oid, int diff_header_only, int stable)
{
if (!patch_id_defined(commit))
return -1;
else
diff_root_tree_oid(&commit->object.oid, "", options);
diffcore_std(options);
- return diff_flush_patch_id(options, oid, diff_header_only);
+ return diff_flush_patch_id(options, oid, diff_header_only, stable);
}
/*
struct patch_id *b = (void *)entry_or_key;
if (is_null_oid(&a->patch_id) &&
- commit_patch_id(a->commit, opt, &a->patch_id, 0))
+ commit_patch_id(a->commit, opt, &a->patch_id, 0, 0))
return error("Could not get patch ID for %s",
oid_to_hex(&a->commit->object.oid));
if (is_null_oid(&b->patch_id) &&
- commit_patch_id(b->commit, opt, &b->patch_id, 0))
+ commit_patch_id(b->commit, opt, &b->patch_id, 0, 0))
return error("Could not get patch ID for %s",
oid_to_hex(&b->commit->object.oid));
return !oideq(&a->patch_id, &b->patch_id);
struct object_id header_only_patch_id;
patch->commit = commit;
- if (commit_patch_id(commit, &ids->diffopts, &header_only_patch_id, 1))
+ if (commit_patch_id(commit, &ids->diffopts, &header_only_patch_id, 1, 0))
return -1;
- hashmap_entry_init(patch, sha1hash(header_only_patch_id.hash));
+ hashmap_entry_init(patch, oidhash(&header_only_patch_id));
return 0;
}
};
int commit_patch_id(struct commit *commit, struct diff_options *options,
- struct object_id *oid, int);
+ struct object_id *oid, int, int);
int init_patch_ids(struct repository *, struct patch_ids *);
int free_patch_ids(struct patch_ids *);
struct patch_id *add_commit_patch_id(struct commit *, struct patch_ids *);
emits gibberish on every call to gettext. To use it run the test suite
with it, e.g.:
- cd t && GIT_TEST_GETTEXT_POISON=YesPlease prove -j 9 ./t[0-9]*.sh
+ cd t && GIT_TEST_GETTEXT_POISON=true prove -j 9 ./t[0-9]*.sh
If tests break with it you should inspect them manually and see if
what you're translating is sane, i.e. that you're not translating
strbuf_addstr(buf, ", ");
strbuf_humanise_bytes(buf, total);
strbuf_addstr(buf, " | ");
- strbuf_humanise_bytes(buf, rate * 1024);
- strbuf_addstr(buf, "/s");
+ strbuf_humanise_rate(buf, rate * 1024);
}
void display_throughput(struct progress *progress, uint64_t total)
#include "commit.h"
#include "pretty.h"
#include "userdiff.h"
+#include "apply.h"
struct patch_util {
/* For the search for an exact match */
struct object_id oid;
};
+static size_t find_end_of_line(char *buffer, unsigned long size)
+{
+ char *eol = memchr(buffer, '\n', size);
+
+ if (!eol)
+ return size;
+
+ *eol = '\0';
+ return eol + 1 - buffer;
+}
+
/*
* Reads the patches into a string list, with the `util` field being populated
* as struct object_id (will need to be free()d).
static int read_patches(const char *range, struct string_list *list)
{
struct child_process cp = CHILD_PROCESS_INIT;
- FILE *in;
- struct strbuf buf = STRBUF_INIT, line = STRBUF_INIT;
+ struct strbuf buf = STRBUF_INIT, contents = STRBUF_INIT;
struct patch_util *util = NULL;
int in_header = 1;
+ char *line, *current_filename = NULL;
+ int offset, len;
+ size_t size;
argv_array_pushl(&cp.args, "log", "--no-color", "-p", "--no-merges",
"--reverse", "--date-order", "--decorate=no",
if (start_command(&cp))
return error_errno(_("could not start `log`"));
- in = fdopen(cp.out, "r");
- if (!in) {
+ if (strbuf_read(&contents, cp.out, 0) < 0) {
error_errno(_("could not read `log` output"));
finish_command(&cp);
return -1;
}
- while (strbuf_getline(&line, in) != EOF) {
+ line = contents.buf;
+ size = contents.len;
+ for (offset = 0; size > 0; offset += len, size -= len, line += len) {
const char *p;
- if (skip_prefix(line.buf, "commit ", &p)) {
+ len = find_end_of_line(line, size);
+ line[len - 1] = '\0';
+ if (skip_prefix(line, "commit ", &p)) {
if (util) {
string_list_append(list, buf.buf)->util = util;
strbuf_reset(&buf);
free(util);
string_list_clear(list, 1);
strbuf_release(&buf);
- strbuf_release(&line);
- fclose(in);
+ strbuf_release(&contents);
finish_command(&cp);
return -1;
}
continue;
}
- if (starts_with(line.buf, "diff --git")) {
+ if (starts_with(line, "diff --git")) {
+ struct patch patch = { 0 };
+ struct strbuf root = STRBUF_INIT;
+ int linenr = 0;
+
in_header = 0;
strbuf_addch(&buf, '\n');
if (!util->diff_offset)
util->diff_offset = buf.len;
- strbuf_addch(&buf, ' ');
- strbuf_addbuf(&buf, &line);
+ line[len - 1] = '\n';
+ len = parse_git_diff_header(&root, &linenr, 1, line,
+ len, size, &patch);
+ if (len < 0)
+ die(_("could not parse git header '%.*s'"), (int)len, line);
+ strbuf_addstr(&buf, " ## ");
+ if (patch.is_new > 0)
+ strbuf_addf(&buf, "%s (new)", patch.new_name);
+ else if (patch.is_delete > 0)
+ strbuf_addf(&buf, "%s (deleted)", patch.old_name);
+ else if (patch.is_rename)
+ strbuf_addf(&buf, "%s => %s", patch.old_name, patch.new_name);
+ else
+ strbuf_addstr(&buf, patch.new_name);
+
+ free(current_filename);
+ if (patch.is_delete > 0)
+ current_filename = xstrdup(patch.old_name);
+ else
+ current_filename = xstrdup(patch.new_name);
+
+ if (patch.new_mode && patch.old_mode &&
+ patch.old_mode != patch.new_mode)
+ strbuf_addf(&buf, " (mode change %06o => %06o)",
+ patch.old_mode, patch.new_mode);
+
+ strbuf_addstr(&buf, " ##");
} else if (in_header) {
- if (starts_with(line.buf, "Author: ")) {
- strbuf_addbuf(&buf, &line);
+ if (starts_with(line, "Author: ")) {
+ strbuf_addstr(&buf, " ## Metadata ##\n");
+ strbuf_addstr(&buf, line);
strbuf_addstr(&buf, "\n\n");
- } else if (starts_with(line.buf, " ")) {
- strbuf_rtrim(&line);
- strbuf_addbuf(&buf, &line);
+ strbuf_addstr(&buf, " ## Commit message ##\n");
+ } else if (starts_with(line, " ")) {
+ p = line + len - 2;
+ while (isspace(*p) && p >= line)
+ p--;
+ strbuf_add(&buf, line, p - line + 1);
strbuf_addch(&buf, '\n');
}
continue;
- } else if (starts_with(line.buf, "@@ "))
+ } else if (skip_prefix(line, "@@ ", &p)) {
+ p = strstr(p, "@@");
strbuf_addstr(&buf, "@@");
- else if (!line.buf[0] || starts_with(line.buf, "index "))
+ if (current_filename && p[2])
+ strbuf_addf(&buf, " %s:", current_filename);
+ if (p)
+ strbuf_addstr(&buf, p + 2);
+ } else if (!line[0])
/*
* A completely blank (not ' \n', which is context)
* line is not valid in a diff. We skip it
* silently, because this neatly handles the blank
* separator line between commits in git-log
* output.
- *
- * We also want to ignore the diff's `index` lines
- * because they contain exact blob hashes in which
- * we are not interested.
*/
continue;
- else if (line.buf[0] == '>') {
+ else if (line[0] == '>') {
strbuf_addch(&buf, '+');
- strbuf_add(&buf, line.buf + 1, line.len - 1);
- } else if (line.buf[0] == '<') {
+ strbuf_addstr(&buf, line + 1);
+ } else if (line[0] == '<') {
strbuf_addch(&buf, '-');
- strbuf_add(&buf, line.buf + 1, line.len - 1);
- } else if (line.buf[0] == '#') {
+ strbuf_addstr(&buf, line + 1);
+ } else if (line[0] == '#') {
strbuf_addch(&buf, ' ');
- strbuf_add(&buf, line.buf + 1, line.len - 1);
+ strbuf_addstr(&buf, line + 1);
} else {
strbuf_addch(&buf, ' ');
- strbuf_addbuf(&buf, &line);
+ strbuf_addstr(&buf, line);
}
strbuf_addch(&buf, '\n');
util->diffsize++;
}
- fclose(in);
- strbuf_release(&line);
+ strbuf_release(&contents);
if (util)
string_list_append(list, buf.buf)->util = util;
strbuf_release(&buf);
+ free(current_filename);
if (finish_command(&cp))
return -1;
}
static int patch_util_cmp(const void *dummy, const struct patch_util *a,
- const struct patch_util *b, const char *keydata)
+ const struct patch_util *b, const char *keydata)
{
return strcmp(a->diff, keydata ? keydata : b->diff);
}
fwrite(buf->buf, buf->len, 1, diffopt->file);
}
-static struct userdiff_driver no_func_name = {
- .funcname = { "$^", 0 }
+static struct userdiff_driver section_headers = {
+ .funcname = { "^ ## (.*) ##$\n"
+ "^.?@@ (.*)$", REG_EXTENDED }
};
static struct diff_filespec *get_filespec(const char *name, const char *p)
spec->size = strlen(p);
spec->should_munmap = 0;
spec->is_stdin = 1;
- spec->driver = &no_func_name;
+ spec->driver = §ion_headers;
return spec;
}
static void patch_diff(const char *a, const char *b,
- struct diff_options *diffopt)
+ struct diff_options *diffopt)
{
diff_queue(&diff_queued_diff,
get_filespec("a", a), get_filespec("b", b));
opts.output_format = DIFF_FORMAT_PATCH;
opts.flags.suppress_diff_headers = 1;
opts.flags.dual_color_diffed_diffs = dual_color;
+ opts.flags.suppress_hunk_header_line_count = 1;
opts.output_prefix = output_prefix_cb;
strbuf_addstr(&indent, " ");
opts.output_prefix_data = &indent;
const char *path, void *data)
{
struct stat st;
- struct object *obj = lookup_object(the_repository, oid->hash);
+ struct object *obj = lookup_object(the_repository, oid);
if (obj && obj->flags & SEEN)
return 0;
struct packed_git *p, uint32_t pos,
void *data)
{
- struct object *obj = lookup_object(the_repository, oid->hash);
+ struct object *obj = lookup_object(the_repository, oid);
if (obj && obj->flags & SEEN)
return 0;
}
static unsigned long load_cache_entries_threaded(struct index_state *istate, const char *mmap, size_t mmap_size,
- unsigned long src_offset, int nr_threads, struct index_entry_offset_table *ieot)
+ int nr_threads, struct index_entry_offset_table *ieot)
{
int i, offset, ieot_blocks, ieot_start, err;
struct load_cache_entries_thread_data *data;
ieot = read_ieot_extension(mmap, mmap_size, extension_offset);
if (ieot) {
- src_offset += load_cache_entries_threaded(istate, mmap, mmap_size, src_offset, nr_threads, ieot);
+ src_offset += load_cache_entries_threaded(istate, mmap, mmap_size, nr_threads, ieot);
free(ieot);
} else {
src_offset += load_all_cache_entries(istate, mmap, mmap_size, src_offset);
#include "commit-slab.h"
#include "commit-graph.h"
#include "commit-reach.h"
+#include "worktree.h"
+#include "hashmap.h"
+#include "argv-array.h"
static struct ref_msg {
const char *gone;
struct object_info info;
} oi, oi_deref;
+struct ref_to_worktree_entry {
+ struct hashmap_entry ent; /* must be the first member! */
+ struct worktree *wt; /* key is wt->head_ref */
+};
+
+static int ref_to_worktree_map_cmpfnc(const void *unused_lookupdata,
+ const void *existing_hashmap_entry_to_test,
+ const void *key,
+ const void *keydata_aka_refname)
+{
+ const struct ref_to_worktree_entry *e = existing_hashmap_entry_to_test;
+ const struct ref_to_worktree_entry *k = key;
+ return strcmp(e->wt->head_ref,
+ keydata_aka_refname ? keydata_aka_refname : k->wt->head_ref);
+}
+
+static struct ref_to_worktree_map {
+ struct hashmap map;
+ struct worktree **worktrees;
+} ref_to_worktree_map;
+
/*
* An atom is a valid field atom listed below, possibly prefixed with
* a "*" to denote deref_tag().
{ "flag", SOURCE_NONE },
{ "HEAD", SOURCE_NONE, FIELD_STR, head_atom_parser },
{ "color", SOURCE_NONE, FIELD_STR, color_atom_parser },
+ { "worktreepath", SOURCE_NONE },
{ "align", SOURCE_NONE, FIELD_STR, align_atom_parser },
{ "end", SOURCE_NONE },
{ "if", SOURCE_NONE, FIELD_STR, if_atom_parser },
struct wt_status_state state;
memset(&state, 0, sizeof(state));
wt_status_get_state(the_repository, &state, 1);
+
+ /*
+ * The ( character must be hard-coded and not part of a localizable
+ * string, since the description is used as a sort key and compared
+ * with ref names.
+ */
+ strbuf_addch(&desc, '(');
if (state.rebase_in_progress ||
state.rebase_interactive_in_progress) {
if (state.branch)
- strbuf_addf(&desc, _("(no branch, rebasing %s)"),
+ strbuf_addf(&desc, _("no branch, rebasing %s"),
state.branch);
else
- strbuf_addf(&desc, _("(no branch, rebasing detached HEAD %s)"),
+ strbuf_addf(&desc, _("no branch, rebasing detached HEAD %s"),
state.detached_from);
} else if (state.bisect_in_progress)
- strbuf_addf(&desc, _("(no branch, bisect started on %s)"),
+ strbuf_addf(&desc, _("no branch, bisect started on %s"),
state.branch);
else if (state.detached_from) {
if (state.detached_at)
- /*
- * TRANSLATORS: make sure this matches "HEAD
- * detached at " in wt-status.c
- */
- strbuf_addf(&desc, _("(HEAD detached at %s)"),
- state.detached_from);
+ strbuf_addstr(&desc, HEAD_DETACHED_AT);
else
- /*
- * TRANSLATORS: make sure this matches "HEAD
- * detached from " in wt-status.c
- */
- strbuf_addf(&desc, _("(HEAD detached from %s)"),
- state.detached_from);
+ strbuf_addstr(&desc, HEAD_DETACHED_FROM);
+ strbuf_addstr(&desc, state.detached_from);
}
else
- strbuf_addstr(&desc, _("(no branch)"));
+ strbuf_addstr(&desc, _("no branch"));
+ strbuf_addch(&desc, ')');
+
free(state.branch);
free(state.onto);
free(state.detached_from);
return 0;
}
+static void populate_worktree_map(struct hashmap *map, struct worktree **worktrees)
+{
+ int i;
+
+ for (i = 0; worktrees[i]; i++) {
+ if (worktrees[i]->head_ref) {
+ struct ref_to_worktree_entry *entry;
+ entry = xmalloc(sizeof(*entry));
+ entry->wt = worktrees[i];
+ hashmap_entry_init(entry, strhash(worktrees[i]->head_ref));
+
+ hashmap_add(map, entry);
+ }
+ }
+}
+
+static void lazy_init_worktree_map(void)
+{
+ if (ref_to_worktree_map.worktrees)
+ return;
+
+ ref_to_worktree_map.worktrees = get_worktrees(0);
+ hashmap_init(&(ref_to_worktree_map.map), ref_to_worktree_map_cmpfnc, NULL, 0);
+ populate_worktree_map(&(ref_to_worktree_map.map), ref_to_worktree_map.worktrees);
+}
+
+static char *get_worktree_path(const struct used_atom *atom, const struct ref_array_item *ref)
+{
+ struct hashmap_entry entry;
+ struct ref_to_worktree_entry *lookup_result;
+
+ lazy_init_worktree_map();
+
+ hashmap_entry_init(&entry, strhash(ref->refname));
+ lookup_result = hashmap_get(&(ref_to_worktree_map.map), &entry, ref->refname);
+
+ if (lookup_result)
+ return xstrdup(lookup_result->wt->path);
+ else
+ return xstrdup("");
+}
+
/*
* Parse the object referred by ref, and grab needed value.
*/
if (starts_with(name, "refname"))
refname = get_refname(atom, ref);
+ else if (!strcmp(name, "worktreepath")) {
+ if (ref->kind == FILTER_REFS_BRANCHES)
+ v->s = get_worktree_path(atom, ref);
+ else
+ v->s = xstrdup("");
+ continue;
+ }
else if (starts_with(name, "symref"))
refname = get_symref(atom, ref);
else if (starts_with(name, "upstream")) {
return match_pattern(filter, refname);
}
-/*
- * Find the longest prefix of pattern we can pass to
- * `for_each_fullref_in()`, namely the part of pattern preceding the
- * first glob character. (Note that `for_each_fullref_in()` is
- * perfectly happy working with a prefix that doesn't end at a
- * pathname component boundary.)
- */
-static void find_longest_prefix(struct strbuf *out, const char *pattern)
+static int qsort_strcmp(const void *va, const void *vb)
{
- const char *p;
+ const char *a = *(const char **)va;
+ const char *b = *(const char **)vb;
- for (p = pattern; *p && !is_glob_special(*p); p++)
- ;
+ return strcmp(a, b);
+}
+
+static void find_longest_prefixes_1(struct string_list *out,
+ struct strbuf *prefix,
+ const char **patterns, size_t nr)
+{
+ size_t i;
+
+ for (i = 0; i < nr; i++) {
+ char c = patterns[i][prefix->len];
+ if (!c || is_glob_special(c)) {
+ string_list_append(out, prefix->buf);
+ return;
+ }
+ }
+
+ i = 0;
+ while (i < nr) {
+ size_t end;
+
+ /*
+ * Set "end" to the index of the element _after_ the last one
+ * in our group.
+ */
+ for (end = i + 1; end < nr; end++) {
+ if (patterns[i][prefix->len] != patterns[end][prefix->len])
+ break;
+ }
- strbuf_add(out, pattern, p - pattern);
+ strbuf_addch(prefix, patterns[i][prefix->len]);
+ find_longest_prefixes_1(out, prefix, patterns + i, end - i);
+ strbuf_setlen(prefix, prefix->len - 1);
+
+ i = end;
+ }
+}
+
+static void find_longest_prefixes(struct string_list *out,
+ const char **patterns)
+{
+ struct argv_array sorted = ARGV_ARRAY_INIT;
+ struct strbuf prefix = STRBUF_INIT;
+
+ argv_array_pushv(&sorted, patterns);
+ QSORT(sorted.argv, sorted.argc, qsort_strcmp);
+
+ find_longest_prefixes_1(out, &prefix, sorted.argv, sorted.argc);
+
+ argv_array_clear(&sorted);
+ strbuf_release(&prefix);
}
/*
void *cb_data,
int broken)
{
- struct strbuf prefix = STRBUF_INIT;
+ struct string_list prefixes = STRING_LIST_INIT_DUP;
+ struct string_list_item *prefix;
int ret;
if (!filter->match_as_path) {
return for_each_fullref_in("", cb, cb_data, broken);
}
- if (filter->name_patterns[1]) {
- /*
- * multiple patterns; in theory this could still work as long
- * as the patterns are disjoint. We'd just make multiple calls
- * to for_each_ref(). But if they're not disjoint, we'd end up
- * reporting the same ref multiple times. So let's punt on that
- * for now.
- */
- return for_each_fullref_in("", cb, cb_data, broken);
- }
+ find_longest_prefixes(&prefixes, filter->name_patterns);
- find_longest_prefix(&prefix, filter->name_patterns[0]);
+ for_each_string_list_item(prefix, &prefixes) {
+ ret = for_each_fullref_in(prefix->string, cb, cb_data, broken);
+ if (ret)
+ break;
+ }
- ret = for_each_fullref_in(prefix.buf, cb, cb_data, broken);
- strbuf_release(&prefix);
+ string_list_clear(&prefixes, 0);
return ret;
}
{
free((char *)item->symref);
if (item->value) {
- free((char *)item->value->s);
+ int i;
+ for (i = 0; i < used_atom_cnt; i++)
+ free((char *)item->value[i].s);
free(item->value);
}
free(item);
{
int i;
- for (i = 0; i < used_atom_cnt; i++)
- free((char *)used_atom[i].name);
- FREE_AND_NULL(used_atom);
- used_atom_cnt = 0;
for (i = 0; i < array->nr; i++)
free_array_item(array->items[i]);
FREE_AND_NULL(array->items);
array->nr = array->alloc = 0;
+
+ for (i = 0; i < used_atom_cnt; i++)
+ free((char *)used_atom[i].name);
+ FREE_AND_NULL(used_atom);
+ used_atom_cnt = 0;
+
+ if (ref_to_worktree_map.worktrees) {
+ hashmap_free(&(ref_to_worktree_map.map), 1);
+ free_worktrees(ref_to_worktree_map.worktrees);
+ ref_to_worktree_map.worktrees = NULL;
+ }
}
static void do_merge_filter(struct ref_filter_cbdata *ref_cbdata)
* not legal. It is legal if it is something reasonable to have under
* ".git/refs/"; We do not like it if:
*
- * - any path component of it begins with ".", or
+ * - it begins with ".", or
* - it has double dots "..", or
* - it has ASCII control characters, or
* - it has ":", "?", "[", "\", "^", "~", SP, or TAB anywhere, or
* - it ends with a "/", or
* - it ends with ".lock", or
* - it contains a "@{" portion
+ *
+ * When sanitized is not NULL, instead of rejecting the input refname
+ * as an error, try to come up with a usable replacement for the input
+ * refname in it.
*/
-static int check_refname_component(const char *refname, int *flags)
+static int check_refname_component(const char *refname, int *flags,
+ struct strbuf *sanitized)
{
const char *cp;
char last = '\0';
+ size_t component_start = 0; /* garbage - not a reasonable initial value */
+
+ if (sanitized)
+ component_start = sanitized->len;
for (cp = refname; ; cp++) {
int ch = *cp & 255;
unsigned char disp = refname_disposition[ch];
+
+ if (sanitized && disp != 1)
+ strbuf_addch(sanitized, ch);
+
switch (disp) {
case 1:
goto out;
case 2:
- if (last == '.')
- return -1; /* Refname contains "..". */
+ if (last == '.') { /* Refname contains "..". */
+ if (sanitized)
+ /* collapse ".." to single "." */
+ strbuf_setlen(sanitized, sanitized->len - 1);
+ else
+ return -1;
+ }
break;
case 3:
- if (last == '@')
- return -1; /* Refname contains "@{". */
+ if (last == '@') { /* Refname contains "@{". */
+ if (sanitized)
+ sanitized->buf[sanitized->len-1] = '-';
+ else
+ return -1;
+ }
break;
case 4:
- return -1;
+ /* forbidden char */
+ if (sanitized)
+ sanitized->buf[sanitized->len-1] = '-';
+ else
+ return -1;
+ break;
case 5:
- if (!(*flags & REFNAME_REFSPEC_PATTERN))
- return -1; /* refspec can't be a pattern */
+ if (!(*flags & REFNAME_REFSPEC_PATTERN)) {
+ /* refspec can't be a pattern */
+ if (sanitized)
+ sanitized->buf[sanitized->len-1] = '-';
+ else
+ return -1;
+ }
/*
* Unset the pattern flag so that we only accept
out:
if (cp == refname)
return 0; /* Component has zero length. */
- if (refname[0] == '.')
- return -1; /* Component starts with '.'. */
+
+ if (refname[0] == '.') { /* Component starts with '.'. */
+ if (sanitized)
+ sanitized->buf[component_start] = '-';
+ else
+ return -1;
+ }
if (cp - refname >= LOCK_SUFFIX_LEN &&
- !memcmp(cp - LOCK_SUFFIX_LEN, LOCK_SUFFIX, LOCK_SUFFIX_LEN))
- return -1; /* Refname ends with ".lock". */
+ !memcmp(cp - LOCK_SUFFIX_LEN, LOCK_SUFFIX, LOCK_SUFFIX_LEN)) {
+ if (!sanitized)
+ return -1;
+ /* Refname ends with ".lock". */
+ while (strbuf_strip_suffix(sanitized, LOCK_SUFFIX)) {
+ /* try again in case we have .lock.lock */
+ }
+ }
return cp - refname;
}
-int check_refname_format(const char *refname, int flags)
+static int check_or_sanitize_refname(const char *refname, int flags,
+ struct strbuf *sanitized)
{
int component_len, component_count = 0;
- if (!strcmp(refname, "@"))
+ if (!strcmp(refname, "@")) {
/* Refname is a single character '@'. */
- return -1;
+ if (sanitized)
+ strbuf_addch(sanitized, '-');
+ else
+ return -1;
+ }
while (1) {
+ if (sanitized && sanitized->len)
+ strbuf_complete(sanitized, '/');
+
/* We are at the start of a path component. */
- component_len = check_refname_component(refname, &flags);
- if (component_len <= 0)
+ component_len = check_refname_component(refname, &flags,
+ sanitized);
+ if (sanitized && component_len == 0)
+ ; /* OK, omit empty component */
+ else if (component_len <= 0)
return -1;
component_count++;
refname += component_len + 1;
}
- if (refname[component_len - 1] == '.')
- return -1; /* Refname ends with '.'. */
+ if (refname[component_len - 1] == '.') {
+ /* Refname ends with '.'. */
+ if (sanitized)
+ ; /* omit ending dot */
+ else
+ return -1;
+ }
if (!(flags & REFNAME_ALLOW_ONELEVEL) && component_count < 2)
return -1; /* Refname has only one component. */
return 0;
}
+int check_refname_format(const char *refname, int flags)
+{
+ return check_or_sanitize_refname(refname, flags, NULL);
+}
+
+void sanitize_refname_component(const char *refname, struct strbuf *out)
+{
+ if (check_or_sanitize_refname(refname, REFNAME_ALLOW_ONELEVEL, out))
+ BUG("sanitizing refname '%s' check returned error", refname);
+}
+
int refname_is_safe(const char *refname)
{
const char *rest;
enum peel_status peel_object(const struct object_id *name, struct object_id *oid)
{
- struct object *o = lookup_unknown_object(name->hash);
+ struct object *o = lookup_unknown_object(name);
if (o->type == OBJ_NONE) {
int type = oid_object_info(the_repository, name, NULL);
*/
int check_refname_format(const char *refname, int flags);
+/*
+ * Apply the rules from check_refname_format, but mutate the result until it
+ * is acceptable, and place the result in "out".
+ */
+void sanitize_refname_component(const char *refname, struct strbuf *out);
+
const char *prettify_refname(const char *refname);
char *refs_shorten_unambiguous_ref(struct ref_store *refs,
static struct ref_iterator *reflog_iterator_begin(struct ref_store *ref_store,
const char *gitdir)
{
- struct files_reflog_iterator *iter = xcalloc(1, sizeof(*iter));
- struct ref_iterator *ref_iterator = &iter->base;
+ struct dir_iterator *diter;
+ struct files_reflog_iterator *iter;
+ struct ref_iterator *ref_iterator;
struct strbuf sb = STRBUF_INIT;
- base_ref_iterator_init(ref_iterator, &files_reflog_iterator_vtable, 0);
strbuf_addf(&sb, "%s/logs", gitdir);
- iter->dir_iterator = dir_iterator_begin(sb.buf);
+
+ diter = dir_iterator_begin(sb.buf, 0);
+ if(!diter)
+ return empty_ref_iterator_begin();
+
+ iter = xcalloc(1, sizeof(*iter));
+ ref_iterator = &iter->base;
+
+ base_ref_iterator_init(ref_iterator, &files_reflog_iterator_vtable, 0);
+ iter->dir_iterator = diter;
iter->ref_store = ref_store;
strbuf_release(&sb);
die("unable to parse commit %s", name);
if (flags & UNINTERESTING) {
mark_parents_uninteresting(commit);
- revs->limited = 1;
+
+ if (!revs->topo_order || !generation_numbers_enabled(the_repository))
+ revs->limited = 1;
}
if (revs->sources) {
char **slot = revision_sources_at(revs->sources, commit);
free_worktrees(worktrees);
}
+struct add_alternate_refs_data {
+ struct rev_info *revs;
+ unsigned int flags;
+};
+
+static void add_one_alternate_ref(const struct object_id *oid,
+ void *vdata)
+{
+ const char *name = ".alternate";
+ struct add_alternate_refs_data *data = vdata;
+ struct object *obj;
+
+ obj = get_reference(data->revs, name, oid, data->flags);
+ add_rev_cmdline(data->revs, obj, name, REV_CMD_REV, data->flags);
+ add_pending_object(data->revs, obj, name);
+}
+
+static void add_alternate_refs_to_pending(struct rev_info *revs,
+ unsigned int flags)
+{
+ struct add_alternate_refs_data data;
+ data.revs = revs;
+ data.flags = flags;
+ for_each_alternate_ref(add_one_alternate_ref, &data);
+}
+
static int add_parents_only(struct rev_info *revs, const char *arg_, int flags,
int exclude_parent)
{
!strcmp(arg, "--no-walk") || !strcmp(arg, "--do-walk") ||
!strcmp(arg, "--bisect") || starts_with(arg, "--glob=") ||
!strcmp(arg, "--indexed-objects") ||
+ !strcmp(arg, "--alternate-refs") ||
starts_with(arg, "--exclude=") ||
starts_with(arg, "--branches=") || starts_with(arg, "--tags=") ||
starts_with(arg, "--remotes=") || starts_with(arg, "--no-walk="))
add_reflogs_to_pending(revs, *flags);
} else if (!strcmp(arg, "--indexed-objects")) {
add_index_objects_to_pending(revs, *flags);
+ } else if (!strcmp(arg, "--alternate-refs")) {
+ add_alternate_refs_to_pending(revs, *flags);
} else if (!strcmp(arg, "--not")) {
*flags ^= UNINTERESTING | BOTTOM;
} else if (!strcmp(arg, "--no-walk")) {
struct commit *parent = p->item;
int *pi;
+ if (parent->object.flags & UNINTERESTING)
+ continue;
+
if (parse_commit_gently(parent, 1) < 0)
continue;
* GIT_AUTHOR_DATE='$author_date'
*
* where $author_name, $author_email and $author_date are quoted. We are strict
- * with our parsing, as the file was meant to be eval'd in the old
+ * with our parsing, as the file was meant to be eval'd in the now-removed
* git-am.sh/git-rebase--interactive.sh scripts, and thus if the file differs
* from what this function expects, it is better to bail out than to do
* something that the user does not expect.
return todo_list->buf.buf + item->arg_offset;
}
+static int is_command(enum todo_command command, const char **bol)
+{
+ const char *str = todo_command_info[command].str;
+ const char nick = todo_command_info[command].c;
+ const char *p = *bol + 1;
+
+ return skip_prefix(*bol, str, bol) ||
+ ((nick && **bol == nick) &&
+ (*p == ' ' || *p == '\t' || *p == '\n' || *p == '\r' || !*p) &&
+ (*bol = p));
+}
+
static int parse_insn_line(struct repository *r, struct todo_item *item,
const char *buf, const char *bol, char *eol)
{
}
for (i = 0; i < TODO_COMMENT; i++)
- if (skip_prefix(bol, todo_command_info[i].str, &bol)) {
- item->command = i;
- break;
- } else if ((bol + 1 == eol || bol[1] == ' ') &&
- *bol == todo_command_info[i].c) {
- bol++;
+ if (is_command(i, &bol)) {
item->command = i;
break;
}
int sequencer_get_last_command(struct repository *r, enum replay_action *action)
{
- struct todo_item item;
- char *eol;
- const char *todo_file;
+ const char *todo_file, *bol;
struct strbuf buf = STRBUF_INIT;
- int ret = -1;
+ int ret = 0;
todo_file = git_path_todo_file();
if (strbuf_read_file(&buf, todo_file, 0) < 0) {
- if (errno == ENOENT)
+ if (errno == ENOENT || errno == ENOTDIR)
return -1;
else
return error_errno("unable to open '%s'", todo_file);
}
- eol = strchrnul(buf.buf, '\n');
- if (buf.buf != eol && eol[-1] == '\r')
- eol--; /* strip Carriage Return */
- if (parse_insn_line(r, &item, buf.buf, buf.buf, eol))
- goto fail;
- if (item.command == TODO_PICK)
+ bol = buf.buf + strspn(buf.buf, " \t\r\n");
+ if (is_command(TODO_PICK, &bol) && (*bol == ' ' || *bol == '\t'))
*action = REPLAY_PICK;
- else if (item.command == TODO_REVERT)
+ else if (is_command(TODO_REVERT, &bol) &&
+ (*bol == ' ' || *bol == '\t'))
*action = REPLAY_REVERT;
else
- goto fail;
-
- ret = 0;
+ ret = -1;
- fail:
strbuf_release(&buf);
return ret;
return ret;
}
-void sequencer_post_commit_cleanup(struct repository *r)
+void sequencer_post_commit_cleanup(struct repository *r, int verbose)
{
struct replay_opts opts = REPLAY_OPTS_INIT;
int need_cleanup = 0;
if (file_exists(git_path_cherry_pick_head(r))) {
- unlink(git_path_cherry_pick_head(r));
+ if (!unlink(git_path_cherry_pick_head(r)) && verbose)
+ warning(_("cancelling a cherry picking in progress"));
opts.action = REPLAY_PICK;
need_cleanup = 1;
}
if (file_exists(git_path_revert_head(r))) {
- unlink(git_path_revert_head(r));
+ if (!unlink(git_path_revert_head(r)) && verbose)
+ warning(_("cancelling a revert in progress"));
opts.action = REPLAY_REVERT;
need_cleanup = 1;
}
return 0;
}
-static int create_seq_dir(void)
+static int create_seq_dir(struct repository *r)
{
- if (file_exists(git_path_seq_dir())) {
- error(_("a cherry-pick or revert is already in progress"));
- advise(_("try \"git cherry-pick (--continue | --quit | --abort)\""));
+ enum replay_action action;
+ const char *in_progress_error = NULL;
+ const char *in_progress_advice = NULL;
+ unsigned int advise_skip = file_exists(git_path_revert_head(r)) ||
+ file_exists(git_path_cherry_pick_head(r));
+
+ if (!sequencer_get_last_command(r, &action)) {
+ switch (action) {
+ case REPLAY_REVERT:
+ in_progress_error = _("revert is already in progress");
+ in_progress_advice =
+ _("try \"git revert (--continue | %s--abort | --quit)\"");
+ break;
+ case REPLAY_PICK:
+ in_progress_error = _("cherry-pick is already in progress");
+ in_progress_advice =
+ _("try \"git cherry-pick (--continue | %s--abort | --quit)\"");
+ break;
+ default:
+ BUG("unexpected action in create_seq_dir");
+ }
+ }
+ if (in_progress_error) {
+ error("%s", in_progress_error);
+ if (advice_sequencer_in_use)
+ advise(in_progress_advice,
+ advise_skip ? "--skip | " : "");
return -1;
- } else if (mkdir(git_path_seq_dir(), 0777) < 0)
+ }
+ if (mkdir(git_path_seq_dir(), 0777) < 0)
return error_errno(_("could not create sequencer directory '%s'"),
git_path_seq_dir());
+
return 0;
}
return oideq(&actual_head, &expected_head);
}
-static int reset_for_rollback(const struct object_id *oid)
+static int reset_merge(const struct object_id *oid)
{
- const char *argv[4]; /* reset --merge <arg> + NULL */
+ int ret;
+ struct argv_array argv = ARGV_ARRAY_INIT;
- argv[0] = "reset";
- argv[1] = "--merge";
- argv[2] = oid_to_hex(oid);
- argv[3] = NULL;
- return run_command_v_opt(argv, RUN_GIT_CMD);
+ argv_array_pushl(&argv, "reset", "--merge", NULL);
+
+ if (!is_null_oid(oid))
+ argv_array_push(&argv, oid_to_hex(oid));
+
+ ret = run_command_v_opt(argv.argv, RUN_GIT_CMD);
+ argv_array_clear(&argv);
+
+ return ret;
}
static int rollback_single_pick(struct repository *r)
return error(_("cannot resolve HEAD"));
if (is_null_oid(&head_oid))
return error(_("cannot abort from a branch yet to be born"));
- return reset_for_rollback(&head_oid);
+ return reset_merge(&head_oid);
+}
+
+static int skip_single_pick(void)
+{
+ struct object_id head;
+
+ if (read_ref_full("HEAD", 0, &head, NULL))
+ return error(_("cannot resolve HEAD"));
+ return reset_merge(&head);
}
int sequencer_rollback(struct repository *r, struct replay_opts *opts)
warning(_("You seem to have moved HEAD. "
"Not rewinding, check your HEAD!"));
} else
- if (reset_for_rollback(&oid))
+ if (reset_merge(&oid))
goto fail;
strbuf_release(&buf);
return sequencer_remove_state(opts);
return -1;
}
+int sequencer_skip(struct repository *r, struct replay_opts *opts)
+{
+ enum replay_action action = -1;
+ sequencer_get_last_command(r, &action);
+
+ /*
+ * Check whether the subcommand requested to skip the commit is actually
+ * in progress and that it's safe to skip the commit.
+ *
+ * opts->action tells us which subcommand requested to skip the commit.
+ * If the corresponding .git/<ACTION>_HEAD exists, we know that the
+ * action is in progress and we can skip the commit.
+ *
+ * Otherwise we check that the last instruction was related to the
+ * particular subcommand we're trying to execute and barf if that's not
+ * the case.
+ *
+ * Finally we check that the rollback is "safe", i.e., has the HEAD
+ * moved? In this case, it doesn't make sense to "reset the merge" and
+ * "skip the commit" as the user already handled this by committing. But
+ * we'd not want to barf here, instead give advice on how to proceed. We
+ * only need to check that when .git/<ACTION>_HEAD doesn't exist because
+ * it gets removed when the user commits, so if it still exists we're
+ * sure the user can't have committed before.
+ */
+ switch (opts->action) {
+ case REPLAY_REVERT:
+ if (!file_exists(git_path_revert_head(r))) {
+ if (action != REPLAY_REVERT)
+ return error(_("no revert in progress"));
+ if (!rollback_is_safe())
+ goto give_advice;
+ }
+ break;
+ case REPLAY_PICK:
+ if (!file_exists(git_path_cherry_pick_head(r))) {
+ if (action != REPLAY_PICK)
+ return error(_("no cherry-pick in progress"));
+ if (!rollback_is_safe())
+ goto give_advice;
+ }
+ break;
+ default:
+ BUG("unexpected action in sequencer_skip");
+ }
+
+ if (skip_single_pick())
+ return error(_("failed to skip the commit"));
+ if (!is_directory(git_path_seq_dir()))
+ return 0;
+
+ return sequencer_continue(r, opts);
+
+give_advice:
+ error(_("there is nothing to skip"));
+
+ if (advice_resolve_conflict) {
+ advise(_("have you committed already?\n"
+ "try \"git %s --continue\""),
+ action == REPLAY_REVERT ? "revert" : "cherry-pick");
+ }
+ return -1;
+}
+
static int save_todo(struct todo_list *todo_list, struct replay_opts *opts)
{
struct lock_file todo_lock = LOCK_INIT;
return error_resolve_conflict(_(action_name(opts)));
}
- if (!fill_tree_descriptor(&desc, &oid)) {
+ if (!fill_tree_descriptor(r, &desc, &oid)) {
error(_("failed to find tree of %s"), oid_to_hex(&oid));
rollback_lock_file(&lock);
free((void *)desc.buffer);
rollback_lock_file(&lock);
ret = fast_forward_to(r, &commit->object.oid,
&head_commit->object.oid, 0, opts);
+ if (flags & TODO_EDIT_MERGE_MSG) {
+ run_commit_flags |= AMEND_MSG;
+ goto fast_forward_edit;
+ }
goto leave_merge;
}
* value (a negative one would indicate that the `merge`
* command needs to be rescheduled).
*/
+ fast_forward_edit:
ret = !!run_git_commit(r, git_path_merge_msg(r), opts,
run_commit_flags);
unlink(rebase_path_author_script());
unlink(rebase_path_stopped_sha());
unlink(rebase_path_amend());
- unlink(git_path_merge_head(the_repository));
+ unlink(git_path_merge_head(r));
delete_ref(NULL, "REBASE_HEAD", NULL, REF_NO_DEREF);
if (item->command == TODO_BREAK) {
opts, flags))
return error(_("could not commit staged changes."));
unlink(rebase_path_amend());
- unlink(git_path_merge_head(the_repository));
+ unlink(git_path_merge_head(r));
if (final_fixup) {
unlink(rebase_path_fixup_msg());
unlink(rebase_path_squash_msg());
*/
if (walk_revs_populate_todo(&todo_list, opts) ||
- create_seq_dir() < 0)
+ create_seq_dir(r) < 0)
return -1;
if (get_oid("HEAD", &oid) && (opts->action == REPLAY_REVERT))
return error(_("can't revert as initial commit"));
struct replay_opts *opts);
int sequencer_continue(struct repository *repo, struct replay_opts *opts);
int sequencer_rollback(struct repository *repo, struct replay_opts *opts);
+int sequencer_skip(struct repository *repo, struct replay_opts *opts);
int sequencer_remove_state(struct replay_opts *opts);
#define TODO_LIST_KEEP_EMPTY (1U << 0)
void parse_strategy_opts(struct replay_opts *opts, char *raw_opts);
int write_basic_state(struct replay_opts *opts, const char *head_name,
struct commit *onto, const char *orig_head);
-void sequencer_post_commit_cleanup(struct repository *r);
+void sequencer_post_commit_cleanup(struct repository *r, int verbose);
int sequencer_get_last_command(struct repository* r,
enum replay_action *action);
#include "tag.h"
#include "packfile.h"
#include "object-store.h"
+#include "strbuf.h"
+
+struct update_info_ctx {
+ FILE *cur_fp;
+ FILE *old_fp; /* becomes NULL if it differs from cur_fp */
+ struct strbuf cur_sb;
+ struct strbuf old_sb;
+};
+
+static void uic_mark_stale(struct update_info_ctx *uic)
+{
+ fclose(uic->old_fp);
+ uic->old_fp = NULL;
+}
+
+static int uic_is_stale(const struct update_info_ctx *uic)
+{
+ return uic->old_fp == NULL;
+}
+
+static int uic_printf(struct update_info_ctx *uic, const char *fmt, ...)
+{
+ va_list ap;
+ int ret = -1;
+
+ va_start(ap, fmt);
+
+ if (uic_is_stale(uic)) {
+ ret = vfprintf(uic->cur_fp, fmt, ap);
+ } else {
+ ssize_t r;
+ struct strbuf *cur = &uic->cur_sb;
+ struct strbuf *old = &uic->old_sb;
+
+ strbuf_reset(cur);
+ strbuf_vinsertf(cur, 0, fmt, ap);
+
+ strbuf_reset(old);
+ strbuf_grow(old, cur->len);
+ r = fread(old->buf, 1, cur->len, uic->old_fp);
+ if (r != cur->len || memcmp(old->buf, cur->buf, r))
+ uic_mark_stale(uic);
+
+ if (fwrite(cur->buf, 1, cur->len, uic->cur_fp) == cur->len)
+ ret = 0;
+ }
+
+ va_end(ap);
+
+ return ret;
+}
/*
* Create the file "path" by writing to a temporary file and renaming
* it into place. The contents of the file come from "generate", which
* should return non-zero if it encounters an error.
*/
-static int update_info_file(char *path, int (*generate)(FILE *))
+static int update_info_file(char *path,
+ int (*generate)(struct update_info_ctx *),
+ int force)
{
char *tmp = mkpathdup("%s_XXXXXX", path);
int ret = -1;
int fd = -1;
- FILE *fp = NULL, *to_close;
+ FILE *to_close;
+ struct update_info_ctx uic = {
+ .cur_fp = NULL,
+ .old_fp = NULL,
+ .cur_sb = STRBUF_INIT,
+ .old_sb = STRBUF_INIT
+ };
safe_create_leading_directories(path);
fd = git_mkstemp_mode(tmp, 0666);
if (fd < 0)
goto out;
- to_close = fp = fdopen(fd, "w");
- if (!fp)
+ to_close = uic.cur_fp = fdopen(fd, "w");
+ if (!uic.cur_fp)
goto out;
fd = -1;
- ret = generate(fp);
+
+ /* no problem on ENOENT and old_fp == NULL, it's stale, now */
+ if (!force)
+ uic.old_fp = fopen_or_warn(path, "r");
+
+ /*
+ * uic_printf will compare incremental comparison aginst old_fp
+ * and mark uic as stale if needed
+ */
+ ret = generate(&uic);
if (ret)
goto out;
- fp = NULL;
+
+ /* new file may be shorter than the old one, check here */
+ if (!uic_is_stale(&uic)) {
+ struct stat st;
+ long new_len = ftell(uic.cur_fp);
+ int old_fd = fileno(uic.old_fp);
+
+ if (new_len < 0) {
+ ret = -1;
+ goto out;
+ }
+ if (fstat(old_fd, &st) || (st.st_size != (size_t)new_len))
+ uic_mark_stale(&uic);
+ }
+
+ uic.cur_fp = NULL;
if (fclose(to_close))
goto out;
- if (adjust_shared_perm(tmp) < 0)
- goto out;
- if (rename(tmp, path) < 0)
- goto out;
+
+ if (uic_is_stale(&uic)) {
+ if (adjust_shared_perm(tmp) < 0)
+ goto out;
+ if (rename(tmp, path) < 0)
+ goto out;
+ } else {
+ unlink(tmp);
+ }
ret = 0;
out:
if (ret) {
error_errno("unable to update %s", path);
- if (fp)
- fclose(fp);
+ if (uic.cur_fp)
+ fclose(uic.cur_fp);
else if (fd >= 0)
close(fd);
unlink(tmp);
}
free(tmp);
+ if (uic.old_fp)
+ fclose(uic.old_fp);
+ strbuf_release(&uic.old_sb);
+ strbuf_release(&uic.cur_sb);
return ret;
}
static int add_info_ref(const char *path, const struct object_id *oid,
int flag, void *cb_data)
{
- FILE *fp = cb_data;
+ struct update_info_ctx *uic = cb_data;
struct object *o = parse_object(the_repository, oid);
if (!o)
return -1;
- if (fprintf(fp, "%s %s\n", oid_to_hex(oid), path) < 0)
+ if (uic_printf(uic, "%s %s\n", oid_to_hex(oid), path) < 0)
return -1;
if (o->type == OBJ_TAG) {
o = deref_tag(the_repository, o, path, 0);
if (o)
- if (fprintf(fp, "%s %s^{}\n",
+ if (uic_printf(uic, "%s %s^{}\n",
oid_to_hex(&o->oid), path) < 0)
return -1;
}
return 0;
}
-static int generate_info_refs(FILE *fp)
+static int generate_info_refs(struct update_info_ctx *uic)
{
- return for_each_ref(add_info_ref, fp);
+ return for_each_ref(add_info_ref, uic);
}
-static int update_info_refs(void)
+static int update_info_refs(int force)
{
char *path = git_pathdup("info/refs");
- int ret = update_info_file(path, generate_info_refs);
+ int ret = update_info_file(path, generate_info_refs, force);
free(path);
return ret;
}
free(info);
}
-static int write_pack_info_file(FILE *fp)
+static int write_pack_info_file(struct update_info_ctx *uic)
{
int i;
for (i = 0; i < num_pack; i++) {
- if (fprintf(fp, "P %s\n", pack_basename(info[i]->p)) < 0)
+ if (uic_printf(uic, "P %s\n", pack_basename(info[i]->p)) < 0)
return -1;
}
- if (fputc('\n', fp) == EOF)
+ if (uic_printf(uic, "\n") < 0)
return -1;
return 0;
}
int ret;
init_pack_info(infofile, force);
- ret = update_info_file(infofile, write_pack_info_file);
+ ret = update_info_file(infofile, write_pack_info_file, force);
free_pack_info();
free(infofile);
return ret;
*/
int errs = 0;
- errs = errs | update_info_refs();
+ errs = errs | update_info_refs(force);
errs = errs | update_info_packs(force);
/* remove leftover rev-cache file if there is any */
return ref_git;
}
+static void fill_alternate_refs_command(struct child_process *cmd,
+ const char *repo_path)
+{
+ const char *value;
+
+ if (!git_config_get_value("core.alternateRefsCommand", &value)) {
+ cmd->use_shell = 1;
+
+ argv_array_push(&cmd->args, value);
+ argv_array_push(&cmd->args, repo_path);
+ } else {
+ cmd->git_cmd = 1;
+
+ argv_array_pushf(&cmd->args, "--git-dir=%s", repo_path);
+ argv_array_push(&cmd->args, "for-each-ref");
+ argv_array_push(&cmd->args, "--format=%(objectname)");
+
+ if (!git_config_get_value("core.alternateRefsPrefixes", &value)) {
+ argv_array_push(&cmd->args, "--");
+ argv_array_split(&cmd->args, value);
+ }
+ }
+
+ cmd->env = local_repo_env;
+ cmd->out = -1;
+}
+
+static void read_alternate_refs(const char *path,
+ alternate_ref_fn *cb,
+ void *data)
+{
+ struct child_process cmd = CHILD_PROCESS_INIT;
+ struct strbuf line = STRBUF_INIT;
+ FILE *fh;
+
+ fill_alternate_refs_command(&cmd, path);
+
+ if (start_command(&cmd))
+ return;
+
+ fh = xfdopen(cmd.out, "r");
+ while (strbuf_getline_lf(&line, fh) != EOF) {
+ struct object_id oid;
+ const char *p;
+
+ if (parse_oid_hex(line.buf, &oid, &p) || *p) {
+ warning(_("invalid line while parsing alternate refs: %s"),
+ line.buf);
+ break;
+ }
+
+ cb(&oid, data);
+ }
+
+ fclose(fh);
+ finish_command(&cmd);
+}
+
+struct alternate_refs_data {
+ alternate_ref_fn *fn;
+ void *data;
+};
+
+static int refs_from_alternate_cb(struct object_directory *e,
+ void *data)
+{
+ struct strbuf path = STRBUF_INIT;
+ size_t base_len;
+ struct alternate_refs_data *cb = data;
+
+ if (!strbuf_realpath(&path, e->path, 0))
+ goto out;
+ if (!strbuf_strip_suffix(&path, "/objects"))
+ goto out;
+ base_len = path.len;
+
+ /* Is this a git repository with refs? */
+ strbuf_addstr(&path, "/refs");
+ if (!is_directory(path.buf))
+ goto out;
+ strbuf_setlen(&path, base_len);
+
+ read_alternate_refs(path.buf, cb->fn, cb->data);
+
+out:
+ strbuf_release(&path);
+ return 0;
+}
+
+void for_each_alternate_ref(alternate_ref_fn fn, void *data)
+{
+ struct alternate_refs_data cb;
+ cb.fn = fn;
+ cb.data = data;
+ foreach_alt_odb(refs_from_alternate_cb, &cb);
+}
+
int foreach_alt_odb(alt_odb_fn fn, void *cb)
{
struct object_directory *ent;
return NULL;
}
-void *read_object_with_reference(const struct object_id *oid,
+void *read_object_with_reference(struct repository *r,
+ const struct object_id *oid,
const char *required_type_name,
unsigned long *size,
struct object_id *actual_oid_return)
int ref_length = -1;
const char *ref_type = NULL;
- buffer = read_object_file(&actual_oid, &type, &isize);
+ buffer = repo_read_object_file(r, &actual_oid, &type, &isize);
if (!buffer)
return NULL;
if (type == required_type) {
* or migrated from loose to packed.
*/
if (status == MISSING_OBJECT) {
- reprepare_packed_git(the_repository);
+ reprepare_packed_git(r);
find_short_object_filename(&ds);
find_short_packed_object(&ds);
status = finish_object_disambiguation(&ds, oid);
"because it will be ignored when you just specify 40-hex. These refs\n"
"may be created by mistake. For example,\n"
"\n"
- " git checkout -b $br $(git rev-parse ...)\n"
+ " git switch -c $br $(git rev-parse ...)\n"
"\n"
"where \"$br\" is somehow empty and a 40-hex ref is created. Please\n"
"examine these refs and maybe delete them. Turn this message off by\n"
two = lookup_commit_reference_gently(r, &oid_tmp, 0);
if (!two)
return -1;
- if (r != the_repository)
- BUG("sorry get_merge_bases() can't take struct repository yet");
- mbs = get_merge_bases(one, two);
+ mbs = repo_get_merge_bases(r, one, two);
if (!mbs || mbs->next)
st = -1;
else {
}
/* Must be called only when object_name:filename doesn't exist. */
-static void diagnose_invalid_oid_path(const char *prefix,
+static void diagnose_invalid_oid_path(struct repository *r,
+ const char *prefix,
const char *filename,
const struct object_id *tree_oid,
const char *object_name,
if (is_missing_file_error(errno)) {
char *fullname = xstrfmt("%s%s", prefix, filename);
- if (!get_tree_entry(tree_oid, fullname, &oid, &mode)) {
+ if (!get_tree_entry(r, tree_oid, fullname, &oid, &mode)) {
die("Path '%s' exists, but not '%s'.\n"
"Did you mean '%.*s:%s' aka '%.*s:./%s'?",
fullname,
new_filename = resolve_relative_path(repo, filename);
if (new_filename)
filename = new_filename;
- /*
- * NEEDSWORK: Eventually get_tree_entry*() should
- * learn to take struct repository directly and we
- * would not need to inject submodule odb to the
- * in-core odb.
- */
- if (repo != the_repository)
- add_to_alternates_memory(repo->objects->odb->path);
if (flags & GET_OID_FOLLOW_SYMLINKS) {
- ret = get_tree_entry_follow_symlinks(&tree_oid,
+ ret = get_tree_entry_follow_symlinks(repo, &tree_oid,
filename, oid, &oc->symlink_path,
&oc->mode);
} else {
- ret = get_tree_entry(&tree_oid, filename, oid,
+ ret = get_tree_entry(repo, &tree_oid, filename, oid,
&oc->mode);
if (ret && only_to_die) {
- diagnose_invalid_oid_path(prefix,
+ diagnose_invalid_oid_path(repo, prefix,
filename,
&tree_oid,
name, len);
if (r->parsed_objects->is_shallow == -1)
BUG("shallow must be initialized by now");
- if (!stat_validity_check(r->parsed_objects->shallow_stat, git_path_shallow(the_repository)))
+ if (!stat_validity_check(r->parsed_objects->shallow_stat,
+ git_path_shallow(r)))
die("shallow file has changed since we read it");
}
strbuf_add_urlencode(sb, s, strlen(s), reserved);
}
-void strbuf_humanise_bytes(struct strbuf *buf, off_t bytes)
+static void strbuf_humanise(struct strbuf *buf, off_t bytes,
+ int humanise_rate)
{
if (bytes > 1 << 30) {
- strbuf_addf(buf, "%u.%2.2u GiB",
+ strbuf_addf(buf,
+ humanise_rate == 0 ?
+ /* TRANSLATORS: IEC 80000-13:2008 gibibyte */
+ _("%u.%2.2u GiB") :
+ /* TRANSLATORS: IEC 80000-13:2008 gibibyte/second */
+ _("%u.%2.2u GiB/s"),
(unsigned)(bytes >> 30),
(unsigned)(bytes & ((1 << 30) - 1)) / 10737419);
} else if (bytes > 1 << 20) {
unsigned x = bytes + 5243; /* for rounding */
- strbuf_addf(buf, "%u.%2.2u MiB",
+ strbuf_addf(buf,
+ humanise_rate == 0 ?
+ /* TRANSLATORS: IEC 80000-13:2008 mebibyte */
+ _("%u.%2.2u MiB") :
+ /* TRANSLATORS: IEC 80000-13:2008 mebibyte/second */
+ _("%u.%2.2u MiB/s"),
x >> 20, ((x & ((1 << 20) - 1)) * 100) >> 20);
} else if (bytes > 1 << 10) {
unsigned x = bytes + 5; /* for rounding */
- strbuf_addf(buf, "%u.%2.2u KiB",
+ strbuf_addf(buf,
+ humanise_rate == 0 ?
+ /* TRANSLATORS: IEC 80000-13:2008 kibibyte */
+ _("%u.%2.2u KiB") :
+ /* TRANSLATORS: IEC 80000-13:2008 kibibyte/second */
+ _("%u.%2.2u KiB/s"),
x >> 10, ((x & ((1 << 10) - 1)) * 100) >> 10);
} else {
- strbuf_addf(buf, "%u bytes", (unsigned)bytes);
+ strbuf_addf(buf,
+ humanise_rate == 0 ?
+ /* TRANSLATORS: IEC 80000-13:2008 byte */
+ Q_("%u byte", "%u bytes", (unsigned)bytes) :
+ /* TRANSLATORS: IEC 80000-13:2008 byte/second */
+ Q_("%u byte/s", "%u bytes/s", (unsigned)bytes),
+ (unsigned)bytes);
}
}
+void strbuf_humanise_bytes(struct strbuf *buf, off_t bytes)
+{
+ strbuf_humanise(buf, bytes, 0);
+}
+
+void strbuf_humanise_rate(struct strbuf *buf, off_t bytes)
+{
+ strbuf_humanise(buf, bytes, 1);
+}
+
void strbuf_add_absolute_path(struct strbuf *sb, const char *path)
{
if (!*path)
*/
void strbuf_humanise_bytes(struct strbuf *buf, off_t bytes);
+/**
+ * Append the given byte rate as a human-readable string (i.e. 12.23 KiB/s,
+ * 3.50 MiB/s).
+ */
+void strbuf_humanise_rate(struct strbuf *buf, off_t bytes);
+
/**
* Add a formatted string to the buffer.
*/
if (!(flags & SUBMODULE_MOVE_HEAD_DRY_RUN)) {
if (old_head) {
if (!submodule_uses_gitfile(path))
- absorb_git_dir_into_superproject("", path,
+ absorb_git_dir_into_superproject(path,
ABSORB_GITDIR_RECURSE_SUBMODULES);
} else {
char *gitdir = xstrfmt("%s/modules/%s",
* Embeds a single submodules git directory into the superprojects git dir,
* non recursively.
*/
-static void relocate_single_git_dir_into_superproject(const char *prefix,
- const char *path)
+static void relocate_single_git_dir_into_superproject(const char *path)
{
char *old_git_dir = NULL, *real_old_git_dir = NULL, *real_new_git_dir = NULL;
const char *new_git_dir;
* having its git directory within the working tree to the git dir nested
* in its superprojects git dir under modules/.
*/
-void absorb_git_dir_into_superproject(const char *prefix,
- const char *path,
+void absorb_git_dir_into_superproject(const char *path,
unsigned flags)
{
int err_code;
char *real_common_git_dir = real_pathdup(get_git_common_dir(), 1);
if (!starts_with(real_sub_git_dir, real_common_git_dir))
- relocate_single_git_dir_into_superproject(prefix, path);
+ relocate_single_git_dir_into_superproject(path);
free(real_sub_git_dir);
free(real_common_git_dir);
void prepare_submodule_repo_env(struct argv_array *out);
#define ABSORB_GITDIR_RECURSE_SUBMODULES (1<<0)
-void absorb_git_dir_into_superproject(const char *prefix,
- const char *path,
+void absorb_git_dir_into_superproject(const char *path,
unsigned flags);
/*
could be enabled by running the test suite with correct GIT_TEST_
environment set.
-GIT_TEST_GETTEXT_POISON=<non-empty?> turns all strings marked for
-translation into gibberish if non-empty (think "test -n"). Used for
-spotting those tests that need to be marked with a C_LOCALE_OUTPUT
-prerequisite when adding more strings for translation. See "Testing
-marked strings" in po/README for details.
+GIT_TEST_FAIL_PREREQS=<boolean> fails all prerequisites. This is
+useful for discovering issues with the tests where say a later test
+implicitly depends on an optional earlier test.
+
+There's a "FAIL_PREREQS" prerequisite that can be used to test for
+whether this mode is active, and e.g. skip some tests that are hard to
+refactor to deal with it. The "SYMLINKS" prerequisite is currently
+excluded as so much relies on it, but this might change in the future.
+
+GIT_TEST_GETTEXT_POISON=<boolean> turns all strings marked for
+translation into gibberish if true. Used for spotting those tests that
+need to be marked with a C_LOCALE_OUTPUT prerequisite when adding more
+strings for translation. See "Testing marked strings" in po/README for
+details.
GIT_TEST_SPLIT_INDEX=<boolean> forces split-index mode on the whole
test suite. Accept any boolean values that are accepted by git-config.
--- /dev/null
+#include "test-tool.h"
+#include "git-compat-util.h"
+#include "strbuf.h"
+#include "iterator.h"
+#include "dir-iterator.h"
+
+static const char *error_name(int error_number)
+{
+ switch (error_number) {
+ case ENOENT: return "ENOENT";
+ case ENOTDIR: return "ENOTDIR";
+ default: return "ESOMETHINGELSE";
+ }
+}
+
+/*
+ * usage:
+ * tool-test dir-iterator [--follow-symlinks] [--pedantic] directory_path
+ */
+int cmd__dir_iterator(int argc, const char **argv)
+{
+ struct strbuf path = STRBUF_INIT;
+ struct dir_iterator *diter;
+ unsigned int flags = 0;
+ int iter_status;
+
+ for (++argv, --argc; *argv && starts_with(*argv, "--"); ++argv, --argc) {
+ if (strcmp(*argv, "--follow-symlinks") == 0)
+ flags |= DIR_ITERATOR_FOLLOW_SYMLINKS;
+ else if (strcmp(*argv, "--pedantic") == 0)
+ flags |= DIR_ITERATOR_PEDANTIC;
+ else
+ die("invalid option '%s'", *argv);
+ }
+
+ if (!*argv || argc != 1)
+ die("dir-iterator needs exactly one non-option argument");
+
+ strbuf_add(&path, *argv, strlen(*argv));
+ diter = dir_iterator_begin(path.buf, flags);
+
+ if (!diter) {
+ printf("dir_iterator_begin failure: %s\n", error_name(errno));
+ exit(EXIT_FAILURE);
+ }
+
+ while ((iter_status = dir_iterator_advance(diter)) == ITER_OK) {
+ if (S_ISDIR(diter->st.st_mode))
+ printf("[d] ");
+ else if (S_ISREG(diter->st.st_mode))
+ printf("[f] ");
+ else if (S_ISLNK(diter->st.st_mode))
+ printf("[s] ");
+ else
+ printf("[?] ");
+
+ printf("(%s) [%s] %s\n", diter->relative_path, diter->basename,
+ diter->path.buf);
+ }
+
+ if (iter_status != ITER_DONE) {
+ printf("dir_iterator_advance failure\n");
+ return 1;
+ }
+
+ return 0;
+}
* Add 2 objects, one with a non-NULL decoration and one with a NULL
* decoration.
*/
- one = lookup_unknown_object(one_oid.hash);
- two = lookup_unknown_object(two_oid.hash);
+ one = lookup_unknown_object(&one_oid);
+ two = lookup_unknown_object(&two_oid);
ret = add_decoration(&n, one, &decoration_a);
if (ret)
BUG("when adding a brand-new object, NULL should be returned");
ret = lookup_decoration(&n, two);
if (ret != &decoration_b)
BUG("lookup should return added declaration");
- three = lookup_unknown_object(three_oid.hash);
+ three = lookup_unknown_object(&three_oid);
ret = lookup_decoration(&n, three);
if (ret)
BUG("lookup for unknown object should return NULL");
p2 = strtok(NULL, DELIM);
}
- if (!strcmp("hash", cmd) && p1) {
-
- /* print results of different hash functions */
- printf("%u %u %u %u\n",
- strhash(p1), memhash(p1, strlen(p1)),
- strihash(p1), memihash(p1, strlen(p1)));
-
- } else if (!strcmp("add", cmd) && p1 && p2) {
+ if (!strcmp("add", cmd) && p1 && p2) {
/* create entry with key = p1, value = p2 */
entry = alloc_test_entry(hash, p1, p2);
if (!two)
die("not a tree-ish %s", av[2]);
- shift_tree(&one->object.oid, &two->object.oid, &shifted, -1);
+ shift_tree(the_repository, &one->object.oid, &two->object.oid, &shifted, -1);
printf("shifted: %s\n", oid_to_hex(&shifted));
exit(0);
--- /dev/null
+#include "test-tool.h"
+#include "cache.h"
+#include "oidmap.h"
+#include "strbuf.h"
+
+/* key is an oid and value is a name (could be a refname for example) */
+struct test_entry {
+ struct oidmap_entry entry;
+ char name[FLEX_ARRAY];
+};
+
+#define DELIM " \t\r\n"
+
+/*
+ * Read stdin line by line and print result of commands to stdout:
+ *
+ * hash oidkey -> sha1hash(oidkey)
+ * put oidkey namevalue -> NULL / old namevalue
+ * get oidkey -> NULL / namevalue
+ * remove oidkey -> NULL / old namevalue
+ * iterate -> oidkey1 namevalue1\noidkey2 namevalue2\n...
+ *
+ */
+int cmd__oidmap(int argc, const char **argv)
+{
+ struct strbuf line = STRBUF_INIT;
+ struct oidmap map = OIDMAP_INIT;
+
+ setup_git_directory();
+
+ /* init oidmap */
+ oidmap_init(&map, 0);
+
+ /* process commands from stdin */
+ while (strbuf_getline(&line, stdin) != EOF) {
+ char *cmd, *p1 = NULL, *p2 = NULL;
+ struct test_entry *entry;
+ struct object_id oid;
+
+ /* break line into command and up to two parameters */
+ cmd = strtok(line.buf, DELIM);
+ /* ignore empty lines */
+ if (!cmd || *cmd == '#')
+ continue;
+
+ p1 = strtok(NULL, DELIM);
+ if (p1)
+ p2 = strtok(NULL, DELIM);
+
+ if (!strcmp("put", cmd) && p1 && p2) {
+
+ if (get_oid(p1, &oid)) {
+ printf("Unknown oid: %s\n", p1);
+ continue;
+ }
+
+ /* create entry with oid_key = p1, name_value = p2 */
+ FLEX_ALLOC_STR(entry, name, p2);
+ oidcpy(&entry->entry.oid, &oid);
+
+ /* add / replace entry */
+ entry = oidmap_put(&map, entry);
+
+ /* print and free replaced entry, if any */
+ puts(entry ? entry->name : "NULL");
+ free(entry);
+
+ } else if (!strcmp("get", cmd) && p1) {
+
+ if (get_oid(p1, &oid)) {
+ printf("Unknown oid: %s\n", p1);
+ continue;
+ }
+
+ /* lookup entry in oidmap */
+ entry = oidmap_get(&map, &oid);
+
+ /* print result */
+ puts(entry ? entry->name : "NULL");
+
+ } else if (!strcmp("remove", cmd) && p1) {
+
+ if (get_oid(p1, &oid)) {
+ printf("Unknown oid: %s\n", p1);
+ continue;
+ }
+
+ /* remove entry from oidmap */
+ entry = oidmap_remove(&map, &oid);
+
+ /* print result and free entry*/
+ puts(entry ? entry->name : "NULL");
+ free(entry);
+
+ } else if (!strcmp("iterate", cmd)) {
+
+ struct oidmap_iter iter;
+ oidmap_iter_init(&map, &iter);
+ while ((entry = oidmap_iter_next(&iter)))
+ printf("%s %s\n", oid_to_hex(&entry->entry.oid), entry->name);
+
+ } else {
+
+ printf("Unknown command %s\n", cmd);
+
+ }
+ }
+
+ strbuf_release(&line);
+ oidmap_free(&map, 1);
+ return 0;
+}
{ "ctype", cmd__ctype },
{ "date", cmd__date },
{ "delta", cmd__delta },
+ { "dir-iterator", cmd__dir_iterator },
{ "drop-caches", cmd__drop_caches },
{ "dump-cache-tree", cmd__dump_cache_tree },
{ "dump-fsmonitor", cmd__dump_fsmonitor },
{ "match-trees", cmd__match_trees },
{ "mergesort", cmd__mergesort },
{ "mktemp", cmd__mktemp },
+ { "oidmap", cmd__oidmap },
{ "online-cpus", cmd__online_cpus },
{ "parse-options", cmd__parse_options },
{ "path-utils", cmd__path_utils },
int cmd__ctype(int argc, const char **argv);
int cmd__date(int argc, const char **argv);
int cmd__delta(int argc, const char **argv);
+int cmd__dir_iterator(int argc, const char **argv);
int cmd__drop_caches(int argc, const char **argv);
int cmd__dump_cache_tree(int argc, const char **argv);
int cmd__dump_fsmonitor(int argc, const char **argv);
int cmd__match_trees(int argc, const char **argv);
int cmd__mergesort(int argc, const char **argv);
int cmd__mktemp(int argc, const char **argv);
+int cmd__oidmap(int argc, const char **argv);
int cmd__online_cpus(int argc, const char **argv);
int cmd__parse_options(int argc, const char **argv);
int cmd__path_utils(int argc, const char **argv);
#
# test_done
-test_tristate GIT_TEST_GIT_DAEMON
-if test "$GIT_TEST_GIT_DAEMON" = false
+if ! git env--helper --type=bool --default=true --exit-code GIT_TEST_GIT_DAEMON
then
skip_all="git-daemon testing disabled (unset GIT_TEST_GIT_DAEMON to enable)"
test_done
if test_have_prereq !PIPE
then
- test_skip_or_die $GIT_TEST_GIT_DAEMON "file system does not support FIFOs"
+ test_skip_or_die GIT_TEST_GIT_DAEMON "file system does not support FIFOs"
fi
test_set_port LIB_GIT_DAEMON_PORT
kill "$GIT_DAEMON_PID"
wait "$GIT_DAEMON_PID"
unset GIT_DAEMON_PID
- test_skip_or_die $GIT_TEST_GIT_DAEMON \
+ test_skip_or_die GIT_TEST_GIT_DAEMON \
"git daemon failed to start"
fi
}
maybe_start_httpd () {
loc=${1-svn}
- test_tristate GIT_SVN_TEST_HTTPD
- case $GIT_SVN_TEST_HTTPD in
- true)
+ if git env--helper --type=bool --default=false --exit-code GIT_TEST_HTTPD
+ then
. "$TEST_DIRECTORY"/lib-httpd.sh
LIB_HTTPD_SVN="$loc"
start_httpd
- ;;
- esac
+ fi
}
convert_to_rev_db () {
}
require_svnserve () {
- test_tristate GIT_TEST_SVNSERVE
- if ! test "$GIT_TEST_SVNSERVE" = true
+ if ! git env--helper --type=bool --default=false --exit-code GIT_TEST_SVNSERVE
then
skip_all='skipping svnserve test. (set $GIT_TEST_SVNSERVE to enable)'
test_done
test_done
fi
-test_tristate GIT_TEST_HTTPD
-if test "$GIT_TEST_HTTPD" = false
+if ! git env--helper --type=bool --default=true --exit-code GIT_TEST_HTTPD
then
skip_all="Network testing disabled (unset GIT_TEST_HTTPD to enable)"
test_done
fi
if ! test_have_prereq NOT_ROOT; then
- test_skip_or_die $GIT_TEST_HTTPD \
+ test_skip_or_die GIT_TEST_HTTPD \
"Cannot run httpd tests as root"
fi
if ! test -x "$LIB_HTTPD_PATH"
then
- test_skip_or_die $GIT_TEST_HTTPD "no web server found at '$LIB_HTTPD_PATH'"
+ test_skip_or_die GIT_TEST_HTTPD "no web server found at '$LIB_HTTPD_PATH'"
fi
HTTPD_VERSION=$($LIB_HTTPD_PATH -v | \
then
if ! test $HTTPD_VERSION -ge 2
then
- test_skip_or_die $GIT_TEST_HTTPD \
+ test_skip_or_die GIT_TEST_HTTPD \
"at least Apache version 2 is required"
fi
if ! test -d "$DEFAULT_HTTPD_MODULE_PATH"
then
- test_skip_or_die $GIT_TEST_HTTPD \
+ test_skip_or_die GIT_TEST_HTTPD \
"Apache module directory not found"
fi
LIB_HTTPD_MODULE_PATH="$DEFAULT_HTTPD_MODULE_PATH"
fi
else
- test_skip_or_die $GIT_TEST_HTTPD \
+ test_skip_or_die GIT_TEST_HTTPD \
"Could not identify web server at '$LIB_HTTPD_PATH'"
fi
if test $? -ne 0
then
cat "$HTTPD_ROOT_PATH"/error.log >&4 2>/dev/null
- test_skip_or_die $GIT_TEST_HTTPD "web server setup failed"
+ test_skip_or_die GIT_TEST_HTTPD "web server setup failed"
fi
}
. ./test-lib.sh
+# set_state <path> <worktree-content> <index-content>
+#
+# Prepare the content for path in worktree and the index as specified.
set_state () {
echo "$3" > "$1" &&
git add "$1" &&
echo "$2" > "$1"
}
+# save_state <path>
+#
+# Save index/worktree content of <path> in the files _worktree_<path>
+# and _index_<path>
save_state () {
noslash="$(echo "$1" | tr / _)" &&
cat "$1" > _worktree_"$noslash" &&
git show :"$1" > _index_"$noslash"
}
+# set_and_save_state <path> <worktree-content> <index-content>
set_and_save_state () {
set_state "$@" &&
save_state "$1"
}
+# verify_state <path> <expected-worktree-content> <expected-index-content>
verify_state () {
test "$(cat "$1")" = "$2" &&
test "$(git show :"$1")" = "$3"
}
+# verify_saved_state <path>
+#
+# Call verify_state with expected contents from the last save_state
verify_saved_state () {
noslash="$(echo "$1" | tr / _)" &&
verify_state "$1" "$(cat _worktree_"$noslash")" "$(cat _index_"$noslash")"
--- /dev/null
+#!/bin/sh
+
+test_description='speed of clone --reference'
+. ./perf-lib.sh
+
+test_perf_default_repo
+
+test_expect_success 'create shareable repository' '
+ git clone --bare . shared.git
+'
+
+test_expect_success 'advance base repository' '
+ # Do not use test_commit here; its test_tick will
+ # use some ancient hard-coded date. The resulting clock
+ # skew will cause pack-objects to traverse in a very
+ # sub-optimal order, skewing the results.
+ echo content >new-file-that-does-not-exist &&
+ git add new-file-that-does-not-exist &&
+ git commit -m "new commit"
+'
+
+test_perf 'clone --reference' '
+ rm -rf dst.git &&
+ git clone --no-local --bare --reference shared.git . dst.git
+'
+
+test_done
test_expect_success DONTHAVEIT 'unmet prerequisite causes test to be skipped' '
donthaveit=no
'
-if test $haveit$donthaveit != yesyes
+if test -z "$GIT_TEST_FAIL_PREREQS_INTERNAL" -a $haveit$donthaveit != yesyes
then
say "bug in test framework: prerequisite tags do not work reliably"
exit 1
test_expect_success DONTHAVEIT,HAVEIT 'unmet prerequisites causes test to be skipped' '
donthaveiteither=no
'
-if test $haveit$donthaveit$donthaveiteither != yesyesyes
+if test -z "$GIT_TEST_FAIL_PREREQS_INTERNAL" -a $haveit$donthaveit$donthaveiteither != yesyesyes
then
say "bug in test framework: multiple prerequisite tags do not work reliably"
exit 1
donthavetrue=no
'
-if test "$havetrue$donthavetrue" != yesyes
+if test -z "$GIT_TEST_FAIL_PREREQS_INTERNAL" -a "$havetrue$donthavetrue" != yesyes
then
say 'bug in test framework: lazy prerequisites do not work'
exit 1
havefalse=no
'
-if test "$nothavefalse$havefalse" != yesyes
+if test -z "$GIT_TEST_FAIL_PREREQS_INTERNAL" -a "$nothavefalse$havefalse" != yesyes
then
say 'bug in test framework: negative lazy prerequisites do not work'
exit 1
test_when_finished clean=yes
'
-if test $clean != yes
+if test -z "$GIT_TEST_FAIL_PREREQS_INTERNAL" -a $clean != yes
then
say "bug in test framework: basic cleanup command does not work reliably"
exit 1
GIT_REDIRECT_STDOUT=output.txt \
GIT_REDIRECT_STDERR="2>&1" \
git rev-parse --git-dir --verify refs/invalid &&
- printf ".git\nfatal: Needed a single revision\n" >expect &&
- test_cmp expect output.txt
+ grep "^\\.git\$" output.txt &&
+ grep "Needed a single revision" output.txt
'
test_done
test_cmp expect actual
'
-test_expect_success !AUTOIDENT 'requested identites are strict' '
+test_expect_success !FAIL_PREREQS,!AUTOIDENT 'requested identites are strict' '
(
sane_unset GIT_COMMITTER_NAME &&
sane_unset GIT_COMMITTER_EMAIL &&
test_cmp expect actual
}
-test_expect_success 'hash functions' '
-
-test_hashmap "hash key1" "2215982743 2215982743 116372151 116372151" &&
-test_hashmap "hash key2" "2215982740 2215982740 116372148 116372148" &&
-test_hashmap "hash fooBarFrotz" "1383912807 1383912807 3189766727 3189766727" &&
-test_hashmap "hash foobarfrotz" "2862305959 2862305959 3189766727 3189766727"
-
-'
-
test_expect_success 'put' '
test_hashmap "put key1 value1
'
test_expect_success 'iterate' '
-
-test_hashmap "put key1 value1
-put key2 value2
-put fooBarFrotz value3
-iterate" "NULL
-NULL
-NULL
-key2 value2
-key1 value1
-fooBarFrotz value3"
-
+ test-tool hashmap >actual.raw <<-\EOF &&
+ put key1 value1
+ put key2 value2
+ put fooBarFrotz value3
+ iterate
+ EOF
+
+ cat >expect <<-\EOF &&
+ NULL
+ NULL
+ NULL
+ fooBarFrotz value3
+ key1 value1
+ key2 value2
+ EOF
+
+ sort <actual.raw >actual &&
+ test_cmp expect actual
'
test_expect_success 'iterate (case insensitive)' '
-
-test_hashmap "put key1 value1
-put key2 value2
-put fooBarFrotz value3
-iterate" "NULL
-NULL
-NULL
-fooBarFrotz value3
-key2 value2
-key1 value1" ignorecase
-
+ test-tool hashmap ignorecase >actual.raw <<-\EOF &&
+ put key1 value1
+ put key2 value2
+ put fooBarFrotz value3
+ iterate
+ EOF
+
+ cat >expect <<-\EOF &&
+ NULL
+ NULL
+ NULL
+ fooBarFrotz value3
+ key1 value1
+ key2 value2
+ EOF
+
+ sort <actual.raw >actual &&
+ test_cmp expect actual
'
test_expect_success 'grow / shrink' '
--- /dev/null
+#!/bin/sh
+
+test_description='test oidmap'
+. ./test-lib.sh
+
+# This purposefully is very similar to t0011-hashmap.sh
+
+test_oidmap () {
+ echo "$1" | test-tool oidmap $3 >actual &&
+ echo "$2" >expect &&
+ test_cmp expect actual
+}
+
+
+test_expect_success 'setup' '
+
+ test_commit one &&
+ test_commit two &&
+ test_commit three &&
+ test_commit four
+
+'
+
+test_expect_success 'put' '
+
+test_oidmap "put one 1
+put two 2
+put invalidOid 4
+put three 3" "NULL
+NULL
+Unknown oid: invalidOid
+NULL"
+
+'
+
+test_expect_success 'replace' '
+
+test_oidmap "put one 1
+put two 2
+put three 3
+put invalidOid 4
+put two deux
+put one un" "NULL
+NULL
+NULL
+Unknown oid: invalidOid
+2
+1"
+
+'
+
+test_expect_success 'get' '
+
+test_oidmap "put one 1
+put two 2
+put three 3
+get two
+get four
+get invalidOid
+get one" "NULL
+NULL
+NULL
+2
+NULL
+Unknown oid: invalidOid
+1"
+
+'
+
+test_expect_success 'remove' '
+
+test_oidmap "put one 1
+put two 2
+put three 3
+remove one
+remove two
+remove invalidOid
+remove four" "NULL
+NULL
+NULL
+1
+2
+Unknown oid: invalidOid
+NULL"
+
+'
+
+test_expect_success 'iterate' '
+ test-tool oidmap >actual.raw <<-\EOF &&
+ put one 1
+ put two 2
+ put three 3
+ iterate
+ EOF
+
+ # sort "expect" too so we do not rely on the order of particular oids
+ sort >expect <<-EOF &&
+ NULL
+ NULL
+ NULL
+ $(git rev-parse one) 1
+ $(git rev-parse two) 2
+ $(git rev-parse three) 3
+ EOF
+
+ sort <actual.raw >actual &&
+ test_cmp expect actual
+'
+
+test_done
--- /dev/null
+#!/bin/sh
+
+test_description='test env--helper'
+
+. ./test-lib.sh
+
+
+test_expect_success 'env--helper usage' '
+ test_must_fail git env--helper &&
+ test_must_fail git env--helper --type=bool &&
+ test_must_fail git env--helper --type=ulong &&
+ test_must_fail git env--helper --type=bool &&
+ test_must_fail git env--helper --type=bool --default &&
+ test_must_fail git env--helper --type=bool --default= &&
+ test_must_fail git env--helper --defaultxyz
+'
+
+test_expect_success 'env--helper bad default values' '
+ test_must_fail git env--helper --type=bool --default=1xyz MISSING &&
+ test_must_fail git env--helper --type=ulong --default=1xyz MISSING
+'
+
+test_expect_success 'env--helper --type=bool' '
+ # Test various --default bool values
+ echo true >expected &&
+ git env--helper --type=bool --default=1 MISSING >actual &&
+ test_cmp expected actual &&
+ git env--helper --type=bool --default=yes MISSING >actual &&
+ test_cmp expected actual &&
+ git env--helper --type=bool --default=true MISSING >actual &&
+ test_cmp expected actual &&
+ echo false >expected &&
+ test_must_fail git env--helper --type=bool --default=0 MISSING >actual &&
+ test_cmp expected actual &&
+ test_must_fail git env--helper --type=bool --default=no MISSING >actual &&
+ test_cmp expected actual &&
+ test_must_fail git env--helper --type=bool --default=false MISSING >actual &&
+ test_cmp expected actual &&
+
+ # No output with --exit-code
+ git env--helper --type=bool --default=true --exit-code MISSING >actual.out 2>actual.err &&
+ test_must_be_empty actual.out &&
+ test_must_be_empty actual.err &&
+ test_must_fail git env--helper --type=bool --default=false --exit-code MISSING >actual.out 2>actual.err &&
+ test_must_be_empty actual.out &&
+ test_must_be_empty actual.err &&
+
+ # Existing variable
+ EXISTS=true git env--helper --type=bool --default=false --exit-code EXISTS >actual.out 2>actual.err &&
+ test_must_be_empty actual.out &&
+ test_must_be_empty actual.err &&
+ test_must_fail \
+ env EXISTS=false \
+ git env--helper --type=bool --default=true --exit-code EXISTS >actual.out 2>actual.err &&
+ test_must_be_empty actual.out &&
+ test_must_be_empty actual.err
+'
+
+test_expect_success 'env--helper --type=ulong' '
+ echo 1234567890 >expected &&
+ git env--helper --type=ulong --default=1234567890 MISSING >actual.out 2>actual.err &&
+ test_cmp expected actual.out &&
+ test_must_be_empty actual.err &&
+
+ echo 0 >expected &&
+ test_must_fail git env--helper --type=ulong --default=0 MISSING >actual &&
+ test_cmp expected actual &&
+
+ git env--helper --type=ulong --default=1234567890 --exit-code MISSING >actual.out 2>actual.err &&
+ test_must_be_empty actual.out &&
+ test_must_be_empty actual.err &&
+
+ EXISTS=1234567890 git env--helper --type=ulong --default=0 EXISTS --exit-code >actual.out 2>actual.err &&
+ test_must_be_empty actual.out &&
+ test_must_be_empty actual.err &&
+
+ echo 1234567890 >expected &&
+ EXISTS=1234567890 git env--helper --type=ulong --default=0 EXISTS >actual.out 2>actual.err &&
+ test_cmp expected actual.out &&
+ test_must_be_empty actual.err
+'
+
+test_expect_success 'env--helper reads config thanks to trace2' '
+ mkdir home &&
+ git config -f home/.gitconfig include.path cycle &&
+ git config -f home/cycle include.path .gitconfig &&
+
+ test_must_fail \
+ env HOME="$(pwd)/home" GIT_TEST_GETTEXT_POISON=false \
+ git config -l 2>err &&
+ grep "exceeded maximum include depth" err &&
+
+ test_must_fail \
+ env HOME="$(pwd)/home" GIT_TEST_GETTEXT_POISON=true \
+ git -C cycle env--helper --type=bool --default=0 --exit-code GIT_TEST_GETTEXT_POISON 2>err &&
+ grep "# GETTEXT POISON #" err
+'
+
+test_done
pfx=$1
exp=$2.expect
act=$pfx.actual.$3
- tr '\015\000abcdef0123456789' QN00000000000000000 <"$2" >"$exp" &&
- tr '\015\000abcdef0123456789' QN00000000000000000 <"$3" >"$act" &&
+ tr '\015\000abcdef0123456789' QN00000000000000000 <"$2" |
+ sed -e "s/0000*/$ZERO_OID/" >"$exp" &&
+ tr '\015\000abcdef0123456789' QN00000000000000000 <"$3" |
+ sed -e "s/0000*/$ZERO_OID/" >"$act" &&
test_cmp "$exp" "$act" &&
rm "$exp" "$act"
}
--- /dev/null
+#!/bin/sh
+
+test_description='Test the dir-iterator functionality'
+
+. ./test-lib.sh
+
+test_expect_success 'setup' '
+ mkdir -p dir &&
+ mkdir -p dir/a/b/c/ &&
+ >dir/b &&
+ >dir/c &&
+ mkdir -p dir/d/e/d/ &&
+ >dir/a/b/c/d &&
+ >dir/a/e &&
+ >dir/d/e/d/a &&
+
+ mkdir -p dir2/a/b/c/ &&
+ >dir2/a/b/c/d
+'
+
+test_expect_success 'dir-iterator should iterate through all files' '
+ cat >expected-iteration-sorted-output <<-EOF &&
+ [d] (a) [a] ./dir/a
+ [d] (a/b) [b] ./dir/a/b
+ [d] (a/b/c) [c] ./dir/a/b/c
+ [d] (d) [d] ./dir/d
+ [d] (d/e) [e] ./dir/d/e
+ [d] (d/e/d) [d] ./dir/d/e/d
+ [f] (a/b/c/d) [d] ./dir/a/b/c/d
+ [f] (a/e) [e] ./dir/a/e
+ [f] (b) [b] ./dir/b
+ [f] (c) [c] ./dir/c
+ [f] (d/e/d/a) [a] ./dir/d/e/d/a
+ EOF
+
+ test-tool dir-iterator ./dir >out &&
+ sort out >./actual-iteration-sorted-output &&
+
+ test_cmp expected-iteration-sorted-output actual-iteration-sorted-output
+'
+
+test_expect_success 'dir-iterator should list files in the correct order' '
+ cat >expected-pre-order-output <<-EOF &&
+ [d] (a) [a] ./dir2/a
+ [d] (a/b) [b] ./dir2/a/b
+ [d] (a/b/c) [c] ./dir2/a/b/c
+ [f] (a/b/c/d) [d] ./dir2/a/b/c/d
+ EOF
+
+ test-tool dir-iterator ./dir2 >actual-pre-order-output &&
+
+ test_cmp expected-pre-order-output actual-pre-order-output
+'
+
+test_expect_success 'begin should fail upon inexistent paths' '
+ test_must_fail test-tool dir-iterator ./inexistent-path \
+ >actual-inexistent-path-output &&
+ echo "dir_iterator_begin failure: ENOENT" >expected-inexistent-path-output &&
+ test_cmp expected-inexistent-path-output actual-inexistent-path-output
+'
+
+test_expect_success 'begin should fail upon non directory paths' '
+ test_must_fail test-tool dir-iterator ./dir/b >actual-non-dir-output &&
+ echo "dir_iterator_begin failure: ENOTDIR" >expected-non-dir-output &&
+ test_cmp expected-non-dir-output actual-non-dir-output
+'
+
+test_expect_success POSIXPERM,SANITY 'advance should not fail on errors by default' '
+ cat >expected-no-permissions-output <<-EOF &&
+ [d] (a) [a] ./dir3/a
+ EOF
+
+ mkdir -p dir3/a &&
+ >dir3/a/b &&
+ chmod 0 dir3/a &&
+
+ test-tool dir-iterator ./dir3 >actual-no-permissions-output &&
+ test_cmp expected-no-permissions-output actual-no-permissions-output &&
+ chmod 755 dir3/a &&
+ rm -rf dir3
+'
+
+test_expect_success POSIXPERM,SANITY 'advance should fail on errors, w/ pedantic flag' '
+ cat >expected-no-permissions-pedantic-output <<-EOF &&
+ [d] (a) [a] ./dir3/a
+ dir_iterator_advance failure
+ EOF
+
+ mkdir -p dir3/a &&
+ >dir3/a/b &&
+ chmod 0 dir3/a &&
+
+ test_must_fail test-tool dir-iterator --pedantic ./dir3 \
+ >actual-no-permissions-pedantic-output &&
+ test_cmp expected-no-permissions-pedantic-output \
+ actual-no-permissions-pedantic-output &&
+ chmod 755 dir3/a &&
+ rm -rf dir3
+'
+
+test_expect_success SYMLINKS 'setup dirs with symlinks' '
+ mkdir -p dir4/a &&
+ mkdir -p dir4/b/c &&
+ >dir4/a/d &&
+ ln -s d dir4/a/e &&
+ ln -s ../b dir4/a/f &&
+
+ mkdir -p dir5/a/b &&
+ mkdir -p dir5/a/c &&
+ ln -s ../c dir5/a/b/d &&
+ ln -s ../ dir5/a/b/e &&
+ ln -s ../../ dir5/a/b/f
+'
+
+test_expect_success SYMLINKS 'dir-iterator should not follow symlinks by default' '
+ cat >expected-no-follow-sorted-output <<-EOF &&
+ [d] (a) [a] ./dir4/a
+ [d] (b) [b] ./dir4/b
+ [d] (b/c) [c] ./dir4/b/c
+ [f] (a/d) [d] ./dir4/a/d
+ [s] (a/e) [e] ./dir4/a/e
+ [s] (a/f) [f] ./dir4/a/f
+ EOF
+
+ test-tool dir-iterator ./dir4 >out &&
+ sort out >actual-no-follow-sorted-output &&
+
+ test_cmp expected-no-follow-sorted-output actual-no-follow-sorted-output
+'
+
+test_expect_success SYMLINKS 'dir-iterator should follow symlinks w/ follow flag' '
+ cat >expected-follow-sorted-output <<-EOF &&
+ [d] (a) [a] ./dir4/a
+ [d] (a/f) [f] ./dir4/a/f
+ [d] (a/f/c) [c] ./dir4/a/f/c
+ [d] (b) [b] ./dir4/b
+ [d] (b/c) [c] ./dir4/b/c
+ [f] (a/d) [d] ./dir4/a/d
+ [f] (a/e) [e] ./dir4/a/e
+ EOF
+
+ test-tool dir-iterator --follow-symlinks ./dir4 >out &&
+ sort out >actual-follow-sorted-output &&
+
+ test_cmp expected-follow-sorted-output actual-follow-sorted-output
+'
+
+test_done
'
test_expect_success PERL 'commit -p with shrinking cache-tree' '
- mkdir -p deep/subdir &&
- echo content >deep/subdir/file &&
+ mkdir -p deep/very-long-subdir &&
+ echo content >deep/very-long-subdir/file &&
git add deep &&
git commit -m add &&
git rm -r deep &&
test_description='Gettext Shell poison'
-GIT_TEST_GETTEXT_POISON=YesPlease
+GIT_TEST_GETTEXT_POISON=true
export GIT_TEST_GETTEXT_POISON
. ./lib-gettext.sh
test_cmp expect actual
'
+test_expect_success "gettext: invalid GIT_TEST_GETTEXT_POISON value doesn't infinitely loop" "
+ test_must_fail env GIT_TEST_GETTEXT_POISON=xyz git version 2>error &&
+ grep \"fatal: bad numeric config value 'xyz' for 'GIT_TEST_GETTEXT_POISON': invalid unit\" error
+"
+
test_done
}
test_blob_does_not_exist() {
- test_expect_success SHA1 'blob does not exist in database' "
+ test_expect_success 'blob does not exist in database' "
test_must_fail git cat-file blob $1
"
}
test_blob_exists() {
- test_expect_success SHA1 'blob exists in database' "
+ test_expect_success 'blob exists in database' "
git cat-file blob $1
"
}
hello_content="Hello World"
-hello_sha1=5e1c309dae7f45e0f39b1bf3ac3cd9db12e7d689
-
example_content="This is an example"
-example_sha1=ddd3f836d3e3fbb7ae289aa9ae83536f76956399
setup_repo() {
echo_without_newline "$hello_content" > hello
rm -rf $test_repo
}
-setup_repo
+test_expect_success 'setup' '
+ setup_repo &&
+ test_oid_cache <<-EOF
+ hello sha1:5e1c309dae7f45e0f39b1bf3ac3cd9db12e7d689
+ hello sha256:1e3b6c04d2eeb2b3e45c8a330445404c0b7cc7b257e2b097167d26f5230090c4
+
+ example sha1:ddd3f836d3e3fbb7ae289aa9ae83536f76956399
+ example sha256:b44fe1fe65589848253737db859bd490453510719d7424daab03daf0767b85ae
+ EOF
+'
# Argument checking
push_repo
-test_expect_success SHA1 'hash a file' '
- test $hello_sha1 = $(git hash-object hello)
+test_expect_success 'hash a file' '
+ test "$(test_oid hello)" = $(git hash-object hello)
'
-test_blob_does_not_exist $hello_sha1
+test_blob_does_not_exist "$(test_oid hello)"
-test_expect_success SHA1 'hash from stdin' '
- test $example_sha1 = $(git hash-object --stdin < example)
+test_expect_success 'hash from stdin' '
+ test "$(test_oid example)" = $(git hash-object --stdin < example)
'
-test_blob_does_not_exist $example_sha1
+test_blob_does_not_exist "$(test_oid example)"
-test_expect_success SHA1 'hash a file and write to database' '
- test $hello_sha1 = $(git hash-object -w hello)
+test_expect_success 'hash a file and write to database' '
+ test "$(test_oid hello)" = $(git hash-object -w hello)
'
-test_blob_exists $hello_sha1
+test_blob_exists "$(test_oid hello)"
test_expect_success 'git hash-object --stdin file1 <file0 first operates on file0, then file1' '
echo foo > file1 &&
for args in "-w --stdin" "--stdin -w"; do
push_repo
- test_expect_success SHA1 "hash from stdin and write to database ($args)" '
- test $example_sha1 = $(git hash-object $args < example)
+ test_expect_success "hash from stdin and write to database ($args)" '
+ test "$(test_oid example)" = $(git hash-object $args < example)
'
- test_blob_exists $example_sha1
+ test_blob_exists "$(test_oid example)"
pop_repo
done
filenames="hello
example"
-sha1s="$hello_sha1
-$example_sha1"
+oids="$(test_oid hello)
+$(test_oid example)"
-test_expect_success SHA1 "hash two files with names on stdin" '
- test "$sha1s" = "$(echo_without_newline "$filenames" | git hash-object --stdin-paths)"
+test_expect_success "hash two files with names on stdin" '
+ test "$oids" = "$(echo_without_newline "$filenames" | git hash-object --stdin-paths)"
'
for args in "-w --stdin-paths" "--stdin-paths -w"; do
push_repo
- test_expect_success SHA1 "hash two files with names on stdin and write to database ($args)" '
- test "$sha1s" = "$(echo_without_newline "$filenames" | git hash-object $args)"
+ test_expect_success "hash two files with names on stdin and write to database ($args)" '
+ test "$oids" = "$(echo_without_newline "$filenames" | git hash-object $args)"
'
- test_blob_exists $hello_sha1
- test_blob_exists $example_sha1
+ test_blob_exists "$(test_oid hello)"
+ test_blob_exists "$(test_oid example)"
pop_repo
done
test_path_is_file c
'
-test_expect_success 'checkout -b checkout.optimizeNewBranch interaction' '
- cp .git/info/sparse-checkout .git/info/sparse-checkout.bak &&
- test_when_finished "
- mv -f .git/info/sparse-checkout.bak .git/info/sparse-checkout
- git checkout master
- " &&
- echo "/b" >>.git/info/sparse-checkout &&
- test "$(git ls-files -t b)" = "S b" &&
- git -c checkout.optimizeNewBranch=true checkout -b fast &&
- test "$(git ls-files -t b)" = "S b" &&
- git checkout -b slow &&
- test "$(git ls-files -t b)" = "H b"
-'
-
test_expect_success 'merge feature branch into sparse checkout of master' '
git merge feature &&
test_path_is_file a &&
)
'
+test_expect_success 'conditional include, onbranch' '
+ echo "[includeIf \"onbranch:foo-branch\"]path=bar9" >>.git/config &&
+ echo "[test]nine=9" >.git/bar9 &&
+ git checkout -b master &&
+ test_must_fail git config test.nine &&
+ git checkout -b foo-branch &&
+ echo 9 >expect &&
+ git config test.nine >actual &&
+ test_cmp expect actual
+'
+
+test_expect_success 'conditional include, onbranch, wildcard' '
+ echo "[includeIf \"onbranch:?oo-*/**\"]path=bar10" >>.git/config &&
+ echo "[test]ten=10" >.git/bar10 &&
+ git checkout -b not-foo-branch/a &&
+ test_must_fail git config test.ten &&
+
+ echo 10 >expect &&
+ git checkout -b foo-branch/a/b/c &&
+ git config test.ten >actual &&
+ test_cmp expect actual &&
+
+ git checkout -b moo-bar/a &&
+ git config test.ten >actual &&
+ test_cmp expect actual
+'
+
+test_expect_success 'conditional include, onbranch, implicit /** for /' '
+ echo "[includeIf \"onbranch:foo-dir/\"]path=bar11" >>.git/config &&
+ echo "[test]eleven=11" >.git/bar11 &&
+ git checkout -b not-foo-dir/a &&
+ test_must_fail git config test.eleven &&
+
+ echo 11 >expect &&
+ git checkout -b foo-dir/a/b/c &&
+ git config test.eleven >actual &&
+ test_cmp expect actual
+'
+
test_expect_success 'include cycles are detected' '
- cat >.gitconfig <<-\EOF &&
- [test]value = gitconfig
- [include]path = cycle
- EOF
- cat >cycle <<-\EOF &&
- [test]value = cycle
- [include]path = .gitconfig
- EOF
- cat >expect <<-\EOF &&
- gitconfig
- cycle
- EOF
- test_must_fail git config --get-all test.value 2>stderr &&
- test_i18ngrep "exceeded maximum include depth" stderr
+ git init --bare cycle &&
+ git -C cycle config include.path cycle &&
+ git config -f cycle/cycle include.path config &&
+ test_must_fail \
+ env GIT_TEST_GETTEXT_POISON=false \
+ git -C cycle config --get-all test.value 2>stderr &&
+ grep "exceeded maximum include depth" stderr
'
test_done
test_with_config "["
'
+test_expect_success 'early config and onbranch' '
+ echo "[broken" >broken &&
+ test_with_config "[includeif \"onbranch:refs/heads/master\"]path=../broken"
+'
+
test_done
}
corrupt () {
- aa=${1%??????????????????????????????????????} zz=${1#??}
- mv .git/objects/$aa/$zz .git/$aa$zz
+ mv .git/objects/$(test_oid_to_path $1) .git/$1
}
recover () {
- aa=${1%??????????????????????????????????????} zz=${1#??}
+ aa=$(echo $1 | cut -c 1-2)
mkdir -p .git/objects/$aa
- mv .git/$aa$zz .git/objects/$aa/$zz
+ mv .git/$1 .git/objects/$(test_oid_to_path $1)
}
check_dont_have () {
}
test_expect_success setup '
+ test_oid_init &&
mkdir -p A/B &&
echo rat >C &&
echo ox >A/D &&
# Each line is 114 characters, so we need 75 to still have a few before the
# last 8K. The 89-character padding on the final entry lines up our
# newline exactly.
-test_expect_success 'parsing reverse reflogs at BUFSIZ boundaries' '
+test_expect_success SHA1 'parsing reverse reflogs at BUFSIZ boundaries' '
git checkout -b reflogskip &&
- z38=00000000000000000000000000000000000000 &&
+ zf=$(test_oid zero_2) &&
ident="abc <xyz> 0000000001 +0000" &&
for i in $(test_seq 1 75); do
- printf "$z38%02d $z38%02d %s\t" $i $(($i+1)) "$ident" &&
+ printf "$zf%02d $zf%02d %s\t" $i $(($i+1)) "$ident" &&
if test $i = 75; then
for j in $(test_seq 1 89); do
printf X
printf "\n"
done >.git/logs/refs/heads/reflogskip &&
git rev-parse reflogskip@{73} >actual &&
- echo ${z38}03 >expect &&
+ echo ${zf}03 >expect &&
test_cmp expect actual
'
. ./test-lib.sh
test_expect_success setup '
+ test_oid_init &&
git config gc.auto 0 &&
git config i18n.commitencoding ISO-8859-1 &&
test_commit A fileA one &&
test_expect_success 'object with bad sha1' '
sha=$(echo blob | git hash-object -w --stdin) &&
- old=$(echo $sha | sed "s+^..+&/+") &&
- new=$(dirname $old)/ffffffffffffffffffffffffffffffffffffff &&
+ old=$(test_oid_to_path "$sha") &&
+ new=$(dirname $old)/$(test_oid ff_2) &&
sha="$(dirname $new)$(basename $new)" &&
mv .git/objects/$old .git/objects/$new &&
test_when_finished "remove_object $sha" &&
test_expect_success 'HEAD link pointing at a funny object' '
test_when_finished "mv .git/SAVED_HEAD .git/HEAD" &&
mv .git/HEAD .git/SAVED_HEAD &&
- echo 0000000000000000000000000000000000000000 >.git/HEAD &&
+ echo $ZERO_OID >.git/HEAD &&
# avoid corrupt/broken HEAD from interfering with repo discovery
test_must_fail env GIT_DIR=.git git fsck 2>out &&
cat out &&
'
test_expect_success 'unparseable tree object' '
+ test_oid_cache <<-\EOF &&
+ junk sha1:twenty-bytes-of-junk
+ junk sha256:twenty-bytes-of-junk-twelve-more
+ EOF
+
test_when_finished "git update-ref -d refs/heads/wrong" &&
test_when_finished "remove_object \$tree_sha1" &&
test_when_finished "remove_object \$commit_sha1" &&
- tree_sha1=$(printf "100644 \0twenty-bytes-of-junk" | git hash-object -t tree --stdin -w --literally) &&
+ junk=$(test_oid junk) &&
+ tree_sha1=$(printf "100644 \0$junk" | git hash-object -t tree --stdin -w --literally) &&
commit_sha1=$(git commit-tree $tree_sha1) &&
git update-ref refs/heads/wrong $commit_sha1 &&
test_must_fail git fsck 2>out &&
'
test_expect_success 'tag pointing to nonexistent' '
- cat >invalid-tag <<-\EOF &&
- object ffffffffffffffffffffffffffffffffffffffff
+ badoid=$(test_oid deadbeef) &&
+ cat >invalid-tag <<-EOF &&
+ object $badoid
type commit
tag invalid
tagger T A Gger <tagger@example.com> 1234567890 -0000
test_expect_success 'rev-list --verify-objects with bad sha1' '
sha=$(echo blob | git hash-object -w --stdin) &&
- old=$(echo $sha | sed "s+^..+&/+") &&
- new=$(dirname $old)/ffffffffffffffffffffffffffffffffffffff &&
+ old=$(test_oid_to_path $sha) &&
+ new=$(dirname $old)/$(test_oid ff_2) &&
sha="$(dirname $new)$(basename $new)" &&
mv .git/objects/$old .git/objects/$new &&
test_when_finished "remove_object $sha" &&
test_might_fail git rev-list --verify-objects refs/heads/bogus >/dev/null 2>out &&
cat out &&
- test_i18ngrep -q "error: hash mismatch 63ffffffffffffffffffffffffffffffffffffff" out
+ test_i18ngrep -q "error: hash mismatch $(dirname $new)$(test_oid ff_2)" out
'
test_expect_success 'force fsck to ignore double author' '
'
_bz='\0'
-_bz5="$_bz$_bz$_bz$_bz$_bz"
-_bz20="$_bz5$_bz5$_bz5$_bz5"
+_bzoid=$(printf $ZERO_OID | sed -e 's/00/\\0/g')
test_expect_success 'fsck notices blob entry pointing to null sha1' '
(git init null-blob &&
cd null-blob &&
- sha=$(printf "100644 file$_bz$_bz20" |
+ sha=$(printf "100644 file$_bz$_bzoid" |
git hash-object -w --stdin -t tree) &&
git fsck 2>out &&
cat out &&
test_expect_success 'fsck notices submodule entry pointing to null sha1' '
(git init null-commit &&
cd null-commit &&
- sha=$(printf "160000 submodule$_bz$_bz20" |
+ sha=$(printf "160000 submodule$_bz$_bzoid" |
git hash-object -w --stdin -t tree) &&
git fsck 2>out &&
cat out &&
# its type. That lets us see that --connectivity-only is
# not actually looking at the contents, but leaves it
# free to examine the type if it chooses.
- empty=.git/objects/e6/9de29bb2d1d6434b8b29ae775ad8c2e48c5391 &&
+ empty=.git/objects/$(test_oid_to_path $EMPTY_BLOB) &&
blob=$(echo unrelated | git hash-object -w --stdin) &&
mv -f $(sha1_file $blob) $empty &&
test_expect_success 'alternate objects are correctly blamed' '
test_when_finished "rm -rf alt.git .git/objects/info/alternates" &&
+ name=$(test_oid numeric) &&
+ path=$(test_oid_to_path "$name") &&
git init --bare alt.git &&
echo "../../alt.git/objects" >.git/objects/info/alternates &&
- mkdir alt.git/objects/12 &&
- >alt.git/objects/12/34567890123456789012345678901234567890 &&
+ mkdir alt.git/objects/$(dirname $path) &&
+ >alt.git/objects/$(dirname $path)/$(basename $path) &&
test_must_fail git fsck >out 2>&1 &&
test_i18ngrep alt.git out
'
test-tool chmtime =-5 "$1"
}
+test_expect_success 'setup' '
+ test_oid_cache <<-EOF
+ own_v3 sha1:8299b0bcd1ac364e5f1d7768efb62fa2da79a339
+ own_v3 sha256:38a6d2925e3eceec33ad7b34cbff4e0086caa0daf28f31e51f5bd94b4a7af86b
+
+ base_v3 sha1:39d890139ee5356c7ef572216cebcd27aa41f9df
+ base_v3 sha256:c9baeadf905112bf6c17aefbd7d02267afd70ded613c30cafed2d40cb506e1ed
+
+ own_v4 sha1:432ef4b63f32193984f339431fd50ca796493569
+ own_v4 sha256:6738ac6319c25b694afa7bcc313deb182d1a59b68bf7a47b4296de83478c0420
+
+ base_v4 sha1:508851a7f0dfa8691e9f69c7f055865389012491
+ base_v4 sha256:3177d4adfdd4b6904f7e921d91d715a471c0dde7cf6a4bba574927f02b699508
+ EOF
+'
+
test_expect_success 'enable split index' '
git config splitIndex.maxPercentChange 100 &&
git update-index --split-index &&
# NEEDSWORK: Stop hard-coding checksums.
if test "$indexversion" = "4"
then
- own=432ef4b63f32193984f339431fd50ca796493569
- base=508851a7f0dfa8691e9f69c7f055865389012491
+ own=$(test_oid own_v4)
+ base=$(test_oid base_v4)
else
- own=8299b0bcd1ac364e5f1d7768efb62fa2da79a339
- base=39d890139ee5356c7ef572216cebcd27aa41f9df
+ own=$(test_oid own_v3)
+ base=$(test_oid base_v3)
fi &&
cat >expect <<-EOF &&
test_expect_success 'modify original file, base index untouched' '
echo modified | create_non_racy_file one &&
+ file1_blob=$(git hash-object one) &&
git update-index one &&
git ls-files --stage >ls-files.actual &&
cat >ls-files.expect <<-EOF &&
- 100644 2e0996000b7e9019eabcad29391bf0f5c7702f0b 0 one
+ 100644 $file1_blob 0 one
EOF
test_cmp ls-files.expect ls-files.actual &&
test-tool dump-split-index .git/index | sed "/^own/d" >actual &&
q_to_tab >expect <<-EOF &&
$BASE
- 100644 2e0996000b7e9019eabcad29391bf0f5c7702f0b 0Q
+ 100644 $file1_blob 0Q
replacements: 0
deletions:
EOF
git update-index --add two &&
git ls-files --stage >ls-files.actual &&
cat >ls-files.expect <<-EOF &&
- 100644 2e0996000b7e9019eabcad29391bf0f5c7702f0b 0 one
+ 100644 $file1_blob 0 one
100644 $EMPTY_BLOB 0 two
EOF
test_cmp ls-files.expect ls-files.actual &&
test-tool dump-split-index .git/index | sed "/^own/d" >actual &&
q_to_tab >expect <<-EOF &&
$BASE
- 100644 2e0996000b7e9019eabcad29391bf0f5c7702f0b 0Q
+ 100644 $file1_blob 0Q
100644 $EMPTY_BLOB 0 two
replacements: 0
deletions:
git update-index --force-remove two &&
git ls-files --stage >ls-files.actual &&
cat >ls-files.expect <<-EOF &&
- 100644 2e0996000b7e9019eabcad29391bf0f5c7702f0b 0 one
+ 100644 $file1_blob 0 one
EOF
test_cmp ls-files.expect ls-files.actual &&
test-tool dump-split-index .git/index | sed "/^own/d" >actual &&
q_to_tab >expect <<-EOF &&
$BASE
- 100644 2e0996000b7e9019eabcad29391bf0f5c7702f0b 0Q
+ 100644 $file1_blob 0Q
replacements: 0
deletions:
EOF
git update-index --add three &&
git ls-files --stage >ls-files.actual &&
cat >ls-files.expect <<-EOF &&
- 100644 e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 0 one
- 100644 e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 0 three
- 100644 e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 0 two
+ 100644 $EMPTY_BLOB 0 one
+ 100644 $EMPTY_BLOB 0 three
+ 100644 $EMPTY_BLOB 0 two
EOF
test_cmp ls-files.expect ls-files.actual &&
BASE=$(test-tool dump-split-index .git/index | grep "^base") &&
git update-index --force-remove three &&
git ls-files --stage >ls-files.actual &&
cat >ls-files.expect <<-EOF &&
- 100644 e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 0 one
- 100644 e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 0 two
+ 100644 $EMPTY_BLOB 0 one
+ 100644 $EMPTY_BLOB 0 two
EOF
test_cmp ls-files.expect ls-files.actual &&
test-tool dump-split-index .git/index | sed "/^own/d" >actual &&
test-tool dump-split-index .git/index | sed "/^own/d" >actual &&
cat >expect <<-EOF &&
$BASE
- 100644 e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 0 four
+ 100644 $EMPTY_BLOB 0 four
replacements:
deletions:
EOF
test-tool dump-split-index .git/index | sed "/^own/d" >actual &&
cat >expect <<-EOF &&
$BASE
- 100644 e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 0 six
+ 100644 $EMPTY_BLOB 0 six
replacements:
deletions:
EOF
--- /dev/null
+#!/bin/sh
+
+test_description='Peter MacMillan'
+. ./test-lib.sh
+
+test_expect_success setup '
+ echo Hello >file &&
+ git add file &&
+ test_tick &&
+ git commit -m V1 &&
+ echo Hello world >file &&
+ git add file &&
+ git checkout -b other
+'
+
+test_expect_success 'check all changes are staged' '
+ git diff --exit-code
+'
+
+test_expect_success 'second commit' '
+ git commit -m V2
+'
+
+test_expect_success 'check' '
+ git diff --cached --exit-code
+'
+
+test_done
+++ /dev/null
-#!/bin/sh
-
-test_description='Peter MacMillan'
-. ./test-lib.sh
-
-test_expect_success setup '
- echo Hello >file &&
- git add file &&
- test_tick &&
- git commit -m V1 &&
- echo Hello world >file &&
- git add file &&
- git checkout -b other
-'
-
-test_expect_success 'check all changes are staged' '
- git diff --exit-code
-'
-
-test_expect_success 'second commit' '
- git commit -m V2
-'
-
-test_expect_success 'check' '
- git diff --cached --exit-code
-'
-
-test_done
# The first detach operation is more chatty than the following ones.
cat >1st_detach <<-EOF &&
- Note: checking out 'HEAD^'.
+ Note: switching to 'HEAD^'.
You are in 'detached HEAD' state. You can look around, make experimental
changes and commit them, and you can discard any commits you make in this
- state without impacting any branches by performing another checkout.
+ state without impacting any branches by switching back to a branch.
If you want to create a new branch to retain commits you create, you may
- do so (now or later) by using -b with the checkout command again. Example:
+ do so (now or later) by using -c with the switch command. Example:
- git checkout -b <new-branch-name>
+ git switch -c <new-branch-name>
+
+ Or undo this operation with:
+
+ git switch -
+
+ Turn off this advice by setting config variable advice.detachedHead to false
HEAD is now at \$commit three
EOF
# The first detach operation is more chatty than the following ones.
cat >1st_detach <<-EOF &&
- Note: checking out 'HEAD^'.
+ Note: switching to 'HEAD^'.
You are in 'detached HEAD' state. You can look around, make experimental
changes and commit them, and you can discard any commits you make in this
- state without impacting any branches by performing another checkout.
+ state without impacting any branches by switching back to a branch.
If you want to create a new branch to retain commits you create, you may
- do so (now or later) by using -b with the checkout command again. Example:
+ do so (now or later) by using -c with the switch command. Example:
+
+ git switch -c <new-branch-name>
+
+ Or undo this operation with:
+
+ git switch -
- git checkout -b <new-branch-name>
+ Turn off this advice by setting config variable advice.detachedHead to false
HEAD is now at \$commit... three
EOF
--- /dev/null
+#!/bin/sh
+
+test_description='switch basic functionality'
+
+. ./test-lib.sh
+
+test_expect_success 'setup' '
+ test_commit first &&
+ git branch first-branch &&
+ test_commit second &&
+ test_commit third &&
+ git remote add origin nohost:/nopath &&
+ git update-ref refs/remotes/origin/foo first-branch
+'
+
+test_expect_success 'switch branch no arguments' '
+ test_must_fail git switch
+'
+
+test_expect_success 'switch branch' '
+ git switch first-branch &&
+ test_path_is_missing second.t
+'
+
+test_expect_success 'switch and detach' '
+ test_when_finished git switch master &&
+ test_must_fail git switch master^{commit} &&
+ git switch --detach master^{commit} &&
+ test_must_fail git symbolic-ref HEAD
+'
+
+test_expect_success 'switch and detach current branch' '
+ test_when_finished git switch master &&
+ git switch master &&
+ git switch --detach &&
+ test_must_fail git symbolic-ref HEAD
+'
+
+test_expect_success 'switch and create branch' '
+ test_when_finished git switch master &&
+ git switch -c temp master^ &&
+ test_cmp_rev master^ refs/heads/temp &&
+ echo refs/heads/temp >expected-branch &&
+ git symbolic-ref HEAD >actual-branch &&
+ test_cmp expected-branch actual-branch
+'
+
+test_expect_success 'force create branch from HEAD' '
+ test_when_finished git switch master &&
+ git switch --detach master &&
+ test_must_fail git switch -c temp &&
+ git switch -C temp &&
+ test_cmp_rev master refs/heads/temp &&
+ echo refs/heads/temp >expected-branch &&
+ git symbolic-ref HEAD >actual-branch &&
+ test_cmp expected-branch actual-branch
+'
+
+test_expect_success 'new orphan branch from empty' '
+ test_when_finished git switch master &&
+ test_must_fail git switch --orphan new-orphan HEAD &&
+ git switch --orphan new-orphan &&
+ test_commit orphan &&
+ git cat-file commit refs/heads/new-orphan >commit &&
+ ! grep ^parent commit &&
+ git ls-files >tracked-files &&
+ echo orphan.t >expected &&
+ test_cmp expected tracked-files
+'
+
+test_expect_success 'switching ignores file of same branch name' '
+ test_when_finished git switch master &&
+ : >first-branch &&
+ git switch first-branch &&
+ echo refs/heads/first-branch >expected &&
+ git symbolic-ref HEAD >actual &&
+ test_cmp expected actual
+'
+
+test_expect_success 'guess and create branch ' '
+ test_when_finished git switch master &&
+ test_must_fail git switch --no-guess foo &&
+ git switch foo &&
+ echo refs/heads/foo >expected &&
+ git symbolic-ref HEAD >actual &&
+ test_cmp expected actual
+'
+
+test_expect_success 'not switching when something is in progress' '
+ test_when_finished rm -f .git/MERGE_HEAD &&
+ # fake a merge-in-progress
+ cp .git/HEAD .git/MERGE_HEAD &&
+ test_must_fail git switch -d @^
+'
+
+test_done
--- /dev/null
+#!/bin/sh
+
+test_description='restore basic functionality'
+
+. ./test-lib.sh
+
+test_expect_success 'setup' '
+ test_commit first &&
+ echo first-and-a-half >>first.t &&
+ git add first.t &&
+ test_commit second &&
+ echo one >one &&
+ echo two >two &&
+ echo untracked >untracked &&
+ echo ignored >ignored &&
+ echo /ignored >.gitignore &&
+ git add one two .gitignore &&
+ git update-ref refs/heads/one master
+'
+
+test_expect_success 'restore without pathspec is not ok' '
+ test_must_fail git restore &&
+ test_must_fail git restore --source=first
+'
+
+test_expect_success 'restore a file, ignoring branch of same name' '
+ cat one >expected &&
+ echo dirty >>one &&
+ git restore one &&
+ test_cmp expected one
+'
+
+test_expect_success 'restore a file on worktree from another ref' '
+ test_when_finished git reset --hard &&
+ git cat-file blob first:./first.t >expected &&
+ git restore --source=first first.t &&
+ test_cmp expected first.t &&
+ git cat-file blob HEAD:./first.t >expected &&
+ git show :first.t >actual &&
+ test_cmp expected actual
+'
+
+test_expect_success 'restore a file in the index from another ref' '
+ test_when_finished git reset --hard &&
+ git cat-file blob first:./first.t >expected &&
+ git restore --source=first --staged first.t &&
+ git show :first.t >actual &&
+ test_cmp expected actual &&
+ git cat-file blob HEAD:./first.t >expected &&
+ test_cmp expected first.t
+'
+
+test_expect_success 'restore a file in both the index and worktree from another ref' '
+ test_when_finished git reset --hard &&
+ git cat-file blob first:./first.t >expected &&
+ git restore --source=first --staged --worktree first.t &&
+ git show :first.t >actual &&
+ test_cmp expected actual &&
+ test_cmp expected first.t
+'
+
+test_expect_success 'restore --staged uses HEAD as source' '
+ test_when_finished git reset --hard &&
+ git cat-file blob :./first.t >expected &&
+ echo index-dirty >>first.t &&
+ git add first.t &&
+ git restore --staged first.t &&
+ git cat-file blob :./first.t >actual &&
+ test_cmp expected actual
+'
+
+test_expect_success 'restore --ignore-unmerged ignores unmerged entries' '
+ git init unmerged &&
+ (
+ cd unmerged &&
+ echo one >unmerged &&
+ echo one >common &&
+ git add unmerged common &&
+ git commit -m common &&
+ git switch -c first &&
+ echo first >unmerged &&
+ git commit -am first &&
+ git switch -c second master &&
+ echo second >unmerged &&
+ git commit -am second &&
+ test_must_fail git merge first &&
+
+ echo dirty >>common &&
+ test_must_fail git restore . &&
+
+ git restore --ignore-unmerged --quiet . >output 2>&1 &&
+ git diff common >diff-output &&
+ test_must_be_empty output &&
+ test_must_be_empty diff-output
+ )
+'
+
+test_done
--- /dev/null
+#!/bin/sh
+
+test_description='git restore --patch'
+
+. ./lib-patch-mode.sh
+
+test_expect_success PERL 'setup' '
+ mkdir dir &&
+ echo parent >dir/foo &&
+ echo dummy >bar &&
+ git add bar dir/foo &&
+ git commit -m initial &&
+ test_tick &&
+ test_commit second dir/foo head &&
+ set_and_save_state bar bar_work bar_index &&
+ save_head
+'
+
+test_expect_success PERL 'restore -p without pathspec is fine' '
+ echo q >cmd &&
+ git restore -p <cmd
+'
+
+# note: bar sorts before dir/foo, so the first 'n' is always to skip 'bar'
+
+test_expect_success PERL 'saying "n" does nothing' '
+ set_and_save_state dir/foo work head &&
+ test_write_lines n n | git restore -p &&
+ verify_saved_state bar &&
+ verify_saved_state dir/foo
+'
+
+test_expect_success PERL 'git restore -p' '
+ set_and_save_state dir/foo work head &&
+ test_write_lines n y | git restore -p &&
+ verify_saved_state bar &&
+ verify_state dir/foo head head
+'
+
+test_expect_success PERL 'git restore -p with staged changes' '
+ set_state dir/foo work index &&
+ test_write_lines n y | git restore -p &&
+ verify_saved_state bar &&
+ verify_state dir/foo index index
+'
+
+test_expect_success PERL 'git restore -p --source=HEAD' '
+ set_state dir/foo work index &&
+ # the third n is to get out in case it mistakenly does not apply
+ test_write_lines n y n | git restore -p --source=HEAD &&
+ verify_saved_state bar &&
+ verify_state dir/foo head index
+'
+
+test_expect_success PERL 'git restore -p --source=HEAD^' '
+ set_state dir/foo work index &&
+ # the third n is to get out in case it mistakenly does not apply
+ test_write_lines n y n | git restore -p --source=HEAD^ &&
+ verify_saved_state bar &&
+ verify_state dir/foo parent index
+'
+
+test_expect_success PERL 'git restore -p handles deletion' '
+ set_state dir/foo work index &&
+ rm dir/foo &&
+ test_write_lines n y | git restore -p &&
+ verify_saved_state bar &&
+ verify_state dir/foo index index
+'
+
+# The idea in the rest is that bar sorts first, so we always say 'y'
+# first and if the path limiter fails it'll apply to bar instead of
+# dir/foo. There's always an extra 'n' to reject edits to dir/foo in
+# the failure case (and thus get out of the loop).
+
+test_expect_success PERL 'path limiting works: dir' '
+ set_state dir/foo work head &&
+ test_write_lines y n | git restore -p dir &&
+ verify_saved_state bar &&
+ verify_state dir/foo head head
+'
+
+test_expect_success PERL 'path limiting works: -- dir' '
+ set_state dir/foo work head &&
+ test_write_lines y n | git restore -p -- dir &&
+ verify_saved_state bar &&
+ verify_state dir/foo head head
+'
+
+test_expect_success PERL 'path limiting works: HEAD^ -- dir' '
+ set_state dir/foo work head &&
+ # the third n is to get out in case it mistakenly does not apply
+ test_write_lines y n n | git restore -p --source=HEAD^ -- dir &&
+ verify_saved_state bar &&
+ verify_state dir/foo parent head
+'
+
+test_expect_success PERL 'path limiting works: foo inside dir' '
+ set_state dir/foo work head &&
+ # the third n is to get out in case it mistakenly does not apply
+ test_write_lines y n n | (cd dir && git restore -p foo) &&
+ verify_saved_state bar &&
+ verify_state dir/foo head head
+'
+
+test_expect_success PERL 'none of this moved HEAD' '
+ verify_saved_head
+'
+
+test_done
test_expect_success '"diff HEAD" includes ita as new files' '
git reset --hard &&
echo new >new-ita &&
+ oid=$(git hash-object new-ita) &&
+ oid=$(git rev-parse --short $oid) &&
git add -N new-ita &&
git diff HEAD >actual &&
- cat >expected <<-\EOF &&
+ cat >expected <<-EOF &&
diff --git a/new-ita b/new-ita
new file mode 100644
- index 0000000..3e75765
+ index 0000000..$oid
--- /dev/null
+++ b/new-ita
@@ -0,0 +1 @@
git worktree add --force --force --detach gnoo
'
+test_expect_success FUNNYNAMES 'sanitize generated worktree name' '
+ git worktree add --detach ". weird*..?.lock.lock" &&
+ test -d .git/worktrees/---weird-.-
+'
+
test_expect_success '"add" should not fail because of another bad worktree' '
git init add-fail &&
(
git worktree add -f bazdir2 baz &&
git branch -M baz bam &&
test $(git -C bazdir rev-parse --abbrev-ref HEAD) = bam &&
- test $(git -C bazdir2 rev-parse --abbrev-ref HEAD) = bam
+ test $(git -C bazdir2 rev-parse --abbrev-ref HEAD) = bam &&
+ rm -r bazdir bazdir2 &&
+ git worktree prune
'
test_expect_success 'git branch -M baz bam should succeed within a worktree in which baz is checked out' '
git checkout -b baz &&
- git worktree add -f bazdir3 baz &&
+ git worktree add -f bazdir baz &&
(
- cd bazdir3 &&
+ cd bazdir &&
git branch -M baz bam &&
test $(git rev-parse --abbrev-ref HEAD) = bam
) &&
- test $(git rev-parse --abbrev-ref HEAD) = bam
+ test $(git rev-parse --abbrev-ref HEAD) = bam &&
+ rm -r bazdir &&
+ git worktree prune
'
test_expect_success 'git branch -M master should work when master is checked out' '
test_expect_success 'deleting currently checked out branch fails' '
git worktree add -b my7 my7 &&
test_must_fail git -C my7 branch -d my7 &&
- test_must_fail git branch -d my7
+ test_must_fail git branch -d my7 &&
+ rm -r my7 &&
+ git worktree prune
'
test_expect_success 'test --track without .fetch entries' '
branch-two
EOF
git checkout branch-one &&
- git worktree add worktree branch-two &&
+ test_when_finished "
+ git worktree remove worktree_dir
+ " &&
+ git worktree add worktree_dir branch-two &&
{
git branch --show-current &&
- git -C worktree branch --show-current
+ git -C worktree_dir branch --show-current
} >actual &&
test_cmp expect actual
'
test_i18ncmp expect actual
'
+test_expect_success 'worktree colors correct' '
+ cat >expect <<-EOF &&
+ * <GREEN>(HEAD detached from fromtag)<RESET>
+ ambiguous<RESET>
+ branch-one<RESET>
+ + <CYAN>branch-two<RESET>
+ master<RESET>
+ ref-to-branch<RESET> -> branch-one
+ ref-to-remote<RESET> -> origin/branch-one
+ EOF
+ git worktree add worktree_dir branch-two &&
+ git branch --color >actual.raw &&
+ rm -r worktree_dir &&
+ git worktree prune &&
+ test_decode_color <actual.raw >actual &&
+ test_i18ncmp expect actual
+'
+
test_expect_success "set up color tests" '
echo "<RED>master<RESET>" >expect.color &&
echo "master" >expect.bare &&
test_cmp expect.color actual
'
+test_expect_success 'verbose output lists worktree path' '
+ one=$(git rev-parse --short HEAD) &&
+ two=$(git rev-parse --short master) &&
+ cat >expect <<-EOF &&
+ * (HEAD detached from fromtag) $one one
+ ambiguous $one one
+ branch-one $two two
+ + branch-two $one ($(pwd)/worktree_dir) one
+ master $two two
+ ref-to-branch $two two
+ ref-to-remote $two two
+ EOF
+ git worktree add worktree_dir branch-two &&
+ git branch -vv >actual &&
+ rm -r worktree_dir &&
+ git worktree prune &&
+ test_i18ncmp expect actual
+'
+
test_done
1: 4de457d = 1: a4b3333 s/5/A/
2: fccce22 = 2: f51d370 s/4/A/
3: 147e64e ! 3: 0559556 s/11/B/
- @@ -10,7 +10,7 @@
+ @@ file: A
9
10
-11
13
14
4: a63e992 ! 4: d966c5c s/12/B/
- @@ -8,7 +8,7 @@
- @@
+ @@ file
+ @@ file: A
9
10
- B
1: 4de457d = 1: a4b3333 s/5/A/
2: fccce22 = 2: f51d370 s/4/A/
3: 147e64e ! 3: 0559556 s/11/B/
- @@ -10,7 +10,7 @@
+ @@ file: A
9
10
-11
13
14
4: a63e992 ! 4: d966c5c s/12/B/
- @@ -8,7 +8,7 @@
- @@
+ @@ file
+ @@ file: A
9
10
- B
test_cmp expected actual
'
+test_expect_success 'renamed file' '
+ git range-diff --no-color --submodule=log topic...renamed-file >actual &&
+ sed s/Z/\ /g >expected <<-EOF &&
+ 1: 4de457d = 1: f258d75 s/5/A/
+ 2: fccce22 ! 2: 017b62d s/4/A/
+ @@ Metadata
+ ZAuthor: Thomas Rast <trast@inf.ethz.ch>
+ Z
+ Z ## Commit message ##
+ - s/4/A/
+ + s/4/A/ + rename file
+ Z
+ - ## file ##
+ + ## file => renamed-file ##
+ Z@@
+ Z 1
+ Z 2
+ 3: 147e64e ! 3: 3ce7af6 s/11/B/
+ @@ Metadata
+ Z ## Commit message ##
+ Z s/11/B/
+ Z
+ - ## file ##
+ -@@ file: A
+ + ## renamed-file ##
+ +@@ renamed-file: A
+ Z 8
+ Z 9
+ Z 10
+ 4: a63e992 ! 4: 1e6226b s/12/B/
+ @@ Metadata
+ Z ## Commit message ##
+ Z s/12/B/
+ Z
+ - ## file ##
+ -@@ file: A
+ + ## renamed-file ##
+ +@@ renamed-file: A
+ Z 9
+ Z 10
+ Z B
+ EOF
+ test_cmp expected actual
+'
+
+test_expect_success 'file added and later removed' '
+ git range-diff --no-color --submodule=log topic...added-removed >actual &&
+ sed s/Z/\ /g >expected <<-EOF &&
+ 1: 4de457d = 1: 096b1ba s/5/A/
+ 2: fccce22 ! 2: d92e698 s/4/A/
+ @@ Metadata
+ ZAuthor: Thomas Rast <trast@inf.ethz.ch>
+ Z
+ Z ## Commit message ##
+ - s/4/A/
+ + s/4/A/ + new-file
+ Z
+ Z ## file ##
+ Z@@
+ @@ file
+ Z A
+ Z 6
+ Z 7
+ +
+ + ## new-file (new) ##
+ 3: 147e64e ! 3: 9a1db4d s/11/B/
+ @@ Metadata
+ ZAuthor: Thomas Rast <trast@inf.ethz.ch>
+ Z
+ Z ## Commit message ##
+ - s/11/B/
+ + s/11/B/ + remove file
+ Z
+ Z ## file ##
+ Z@@ file: A
+ @@ file: A
+ Z 12
+ Z 13
+ Z 14
+ +
+ + ## new-file (deleted) ##
+ 4: a63e992 = 4: fea3b5c s/12/B/
+ EOF
+ test_cmp expected actual
+'
+
test_expect_success 'no commits on one side' '
git commit --amend -m "new message" &&
git range-diff master HEAD@{1} HEAD
sed s/Z/\ /g >expected <<-EOF &&
1: 4de457d = 1: f686024 s/5/A/
2: fccce22 ! 2: 4ab067d s/4/A/
- @@ -2,6 +2,8 @@
- Z
+ @@ Metadata
+ Z ## Commit message ##
Z s/4/A/
Z
+ Also a silly comment here!
+
- Z diff --git a/file b/file
- Z --- a/file
- Z +++ b/file
+ Z ## file ##
+ Z@@
+ Z 1
3: 147e64e = 3: b9cb956 s/11/B/
4: a63e992 = 4: 8add5f1 s/12/B/
EOF
sed -e "s|^:||" >expect <<-\EOF &&
:<YELLOW>1: a4b3333 = 1: f686024 s/5/A/<RESET>
:<RED>2: f51d370 <RESET><YELLOW>!<RESET><GREEN> 2: 4ab067d<RESET><YELLOW> s/4/A/<RESET>
- : <REVERSE><CYAN>@@ -2,6 +2,8 @@<RESET>
- : <RESET>
+ : <REVERSE><CYAN>@@<RESET> <RESET>Metadata<RESET>
+ : ## Commit message ##<RESET>
: s/4/A/<RESET>
: <RESET>
: <REVERSE><GREEN>+<RESET><BOLD> Also a silly comment here!<RESET>
: <REVERSE><GREEN>+<RESET>
- : diff --git a/file b/file<RESET>
- : --- a/file<RESET>
- : +++ b/file<RESET>
+ : ## file ##<RESET>
+ : <CYAN> @@<RESET>
+ : 1<RESET>
:<RED>3: 0559556 <RESET><YELLOW>!<RESET><GREEN> 3: b9cb956<RESET><YELLOW> s/11/B/<RESET>
- : <REVERSE><CYAN>@@ -10,7 +10,7 @@<RESET>
+ : <REVERSE><CYAN>@@<RESET> <RESET>file: A<RESET>
: 9<RESET>
: 10<RESET>
: <RED> -11<RESET>
: 13<RESET>
: 14<RESET>
:<RED>4: d966c5c <RESET><YELLOW>!<RESET><GREEN> 4: 8add5f1<RESET><YELLOW> s/12/B/<RESET>
- : <REVERSE><CYAN>@@ -8,7 +8,7 @@<RESET>
- : <CYAN> @@<RESET>
+ : <REVERSE><CYAN>@@<RESET> <RESET>file<RESET>
+ : <CYAN> @@ file: A<RESET>
: 9<RESET>
: 10<RESET>
: <REVERSE><RED>-<RESET><FAINT> BB<RESET>
19
20
-reset refs/heads/removed
-commit refs/heads/removed
+reset refs/heads/renamed-file
+commit refs/heads/renamed-file
mark :2
author Thomas Rast <trast@inf.ethz.ch> 1374424921 +0200
committer Thomas Rast <trast@inf.ethz.ch> 1374484724 +0200
from :46
M 100644 :28 file
-reset refs/heads/removed
-from :47
+commit refs/heads/added-removed
+mark :48
+author Thomas Rast <trast@inf.ethz.ch> 1374485014 +0200
+committer Thomas Gummerer <t.gummerer@gmail.com> 1556574151 +0100
+data 7
+s/5/A/
+from :2
+M 100644 :3 file
+
+blob
+mark :49
+data 0
+
+commit refs/heads/added-removed
+mark :50
+author Thomas Rast <trast@inf.ethz.ch> 1374485024 +0200
+committer Thomas Gummerer <t.gummerer@gmail.com> 1556574177 +0100
+data 18
+s/4/A/ + new-file
+from :48
+M 100644 :5 file
+M 100644 :49 new-file
+
+commit refs/heads/added-removed
+mark :51
+author Thomas Rast <trast@inf.ethz.ch> 1374485036 +0200
+committer Thomas Gummerer <t.gummerer@gmail.com> 1556574177 +0100
+data 22
+s/11/B/ + remove file
+from :50
+M 100644 :7 file
+D new-file
+
+commit refs/heads/added-removed
+mark :52
+author Thomas Rast <trast@inf.ethz.ch> 1374485044 +0200
+committer Thomas Gummerer <t.gummerer@gmail.com> 1556574177 +0100
+data 8
+s/12/B/
+from :51
+M 100644 :9 file
+
+commit refs/heads/renamed-file
+mark :53
+author Thomas Rast <trast@inf.ethz.ch> 1374485014 +0200
+committer Thomas Gummerer <t.gummerer@gmail.com> 1556574309 +0100
+data 7
+s/5/A/
+from :2
+M 100644 :3 file
+
+commit refs/heads/renamed-file
+mark :54
+author Thomas Rast <trast@inf.ethz.ch> 1374485024 +0200
+committer Thomas Gummerer <t.gummerer@gmail.com> 1556574312 +0100
+data 21
+s/4/A/ + rename file
+from :53
+D file
+M 100644 :5 renamed-file
+
+commit refs/heads/renamed-file
+mark :55
+author Thomas Rast <trast@inf.ethz.ch> 1374485036 +0200
+committer Thomas Gummerer <t.gummerer@gmail.com> 1556574319 +0100
+data 8
+s/11/B/
+from :54
+M 100644 :7 renamed-file
+
+commit refs/heads/renamed-file
+mark :56
+author Thomas Rast <trast@inf.ethz.ch> 1374485044 +0200
+committer Thomas Gummerer <t.gummerer@gmail.com> 1556574319 +0100
+data 8
+s/12/B/
+from :55
+M 100644 :9 renamed-file
test_expect_success 'Add a few hundred commits w/notes to trigger fanout (x -> y)' '
git update-ref refs/notes/y refs/notes/x &&
git config core.notesRef refs/notes/y &&
- i=5 &&
- while test $i -lt $num
+ test_commit_bulk --start=6 --id=commit $((num - 5)) &&
+ i=0 &&
+ while test $i -lt $((num - 5))
do
- i=$(($i + 1)) &&
- test_commit "commit$i" >/dev/null &&
- git notes add -m "notes for commit$i" || return 1
+ git notes add -m "notes for commit$i" HEAD~$i || return 1
+ i=$((i + 1))
done &&
test "$(git rev-parse refs/notes/y)" != "$(git rev-parse refs/notes/x)" &&
# Expected number of commits and notes
test_cmp From_.msg out
'
-test_expect_success 'rebase--am.sh and --show-current-patch' '
+test_expect_success 'rebase --am and --show-current-patch' '
test_create_repo conflict-apply &&
(
cd conflict-apply &&
echo conflicting-change >file2 &&
git add . &&
test_tick &&
- git commit -m "related commit"
+ git commit -m "related commit" &&
+ remove_progress_re="$(printf "s/.*\\r//")"
'
create_expected_success_am () {
q_to_cr >expected <<-EOF
$(grep "^Created autostash: [0-9a-f][0-9a-f]*\$" actual)
HEAD is now at $(git rev-parse --short feature-branch) third commit
- Rebasing (1/2)QRebasing (2/2)QApplied autostash.
- Q QSuccessfully rebased and updated refs/heads/rebased-feature-branch.
+ Applied autostash.
+ Successfully rebased and updated refs/heads/rebased-feature-branch.
EOF
}
}
create_expected_failure_interactive () {
- q_to_cr >expected <<-EOF
+ cat >expected <<-EOF
$(grep "^Created autostash: [0-9a-f][0-9a-f]*\$" actual)
HEAD is now at $(git rev-parse --short feature-branch) third commit
- Rebasing (1/2)QRebasing (2/2)QApplying autostash resulted in conflicts.
+ Applying autostash resulted in conflicts.
Your changes are safe in the stash.
You can run "git stash pop" or "git stash drop" at any time.
- Q QSuccessfully rebased and updated refs/heads/rebased-feature-branch.
+ Successfully rebased and updated refs/heads/rebased-feature-branch.
EOF
}
suffix=interactive
fi &&
create_expected_success_$suffix &&
- test_i18ncmp expected actual
+ sed "$remove_progress_re" <actual >actual2 &&
+ test_i18ncmp expected actual2
'
test_expect_success "rebase$type: dirty index, non-conflicting rebase" '
suffix=interactive
fi &&
create_expected_failure_$suffix &&
- test_i18ncmp expected actual
+ sed "$remove_progress_re" <actual >actual2 &&
+ test_i18ncmp expected actual2
'
}
grep "^Merge branch ${SQ}G${SQ}$" .git/rebase-merge/message
'
+test_expect_success 'fast-forward merge -c still rewords' '
+ git checkout -b fast-forward-merge-c H &&
+ (
+ set_fake_editor &&
+ FAKE_COMMIT_MESSAGE=edited \
+ GIT_SEQUENCE_EDITOR="echo merge -c H G >" \
+ git rebase -ir @^
+ ) &&
+ echo edited >expected &&
+ git log --pretty=format:%B -1 >actual &&
+ test_cmp expected actual
+'
+
test_expect_success 'with a branch tip that was cherry-picked already' '
git checkout -b already-upstream master &&
base="$(git rev-parse --verify HEAD)" &&
test_path_is_missing .git/sequencer
'
+test_expect_success 'cherry-pick --skip requires cherry-pick in progress' '
+ pristine_detach initial &&
+ test_must_fail git cherry-pick --skip
+'
+
+test_expect_success 'revert --skip requires revert in progress' '
+ pristine_detach initial &&
+ test_must_fail git revert --skip
+'
+
+test_expect_success 'cherry-pick --skip to skip commit' '
+ pristine_detach initial &&
+ test_must_fail git cherry-pick anotherpick &&
+ test_must_fail git revert --skip &&
+ git cherry-pick --skip &&
+ test_cmp_rev initial HEAD &&
+ test_path_is_missing .git/CHERRY_PICK_HEAD
+'
+
+test_expect_success 'revert --skip to skip commit' '
+ pristine_detach anotherpick &&
+ test_must_fail git revert anotherpick~1 &&
+ test_must_fail git cherry-pick --skip &&
+ git revert --skip &&
+ test_cmp_rev anotherpick HEAD
+'
+
+test_expect_success 'skip "empty" commit' '
+ pristine_detach picked &&
+ test_commit dummy foo d &&
+ test_must_fail git cherry-pick anotherpick &&
+ git cherry-pick --skip &&
+ test_cmp_rev dummy HEAD
+'
+
+test_expect_success 'skip a commit and check if rest of sequence is correct' '
+ pristine_detach initial &&
+ echo e >expect &&
+ cat >expect.log <<-EOF &&
+ OBJID
+ :100644 100644 OBJID OBJID M foo
+ OBJID
+ :100644 100644 OBJID OBJID M foo
+ OBJID
+ :100644 100644 OBJID OBJID M unrelated
+ OBJID
+ :000000 100644 OBJID OBJID A foo
+ :000000 100644 OBJID OBJID A unrelated
+ EOF
+ test_must_fail git cherry-pick base..yetanotherpick &&
+ test_must_fail git cherry-pick --skip &&
+ echo d >foo &&
+ git add foo &&
+ git cherry-pick --continue &&
+ {
+ git rev-list HEAD |
+ git diff-tree --root --stdin |
+ sed "s/$OID_REGEX/OBJID/g"
+ } >actual.log &&
+ test_cmp expect foo &&
+ test_cmp expect.log actual.log
+'
+
+test_expect_success 'check advice when we move HEAD by committing' '
+ pristine_detach initial &&
+ cat >expect <<-EOF &&
+ error: there is nothing to skip
+ hint: have you committed already?
+ hint: try "git cherry-pick --continue"
+ fatal: cherry-pick failed
+ EOF
+ test_must_fail git cherry-pick base..yetanotherpick &&
+ echo c >foo &&
+ git commit -a &&
+ test_path_is_missing .git/CHERRY_PICK_HEAD &&
+ test_must_fail git cherry-pick --skip 2>advice &&
+ test_i18ncmp expect advice
+'
+
+test_expect_success 'selectively advise --skip while launching another sequence' '
+ pristine_detach initial &&
+ cat >expect <<-EOF &&
+ error: cherry-pick is already in progress
+ hint: try "git cherry-pick (--continue | --skip | --abort | --quit)"
+ fatal: cherry-pick failed
+ EOF
+ test_must_fail git cherry-pick picked..yetanotherpick &&
+ test_must_fail git cherry-pick picked..yetanotherpick 2>advice &&
+ test_i18ncmp expect advice &&
+ cat >expect <<-EOF &&
+ error: cherry-pick is already in progress
+ hint: try "git cherry-pick (--continue | --abort | --quit)"
+ fatal: cherry-pick failed
+ EOF
+ git reset --merge &&
+ test_must_fail git cherry-pick picked..yetanotherpick 2>advice &&
+ test_i18ncmp expect advice
+'
+
+test_expect_success 'allow skipping commit but not abort for a new history' '
+ pristine_detach initial &&
+ cat >expect <<-EOF &&
+ error: cannot abort from a branch yet to be born
+ fatal: cherry-pick failed
+ EOF
+ git checkout --orphan new_disconnected &&
+ git reset --hard &&
+ test_must_fail git cherry-pick anotherpick &&
+ test_must_fail git cherry-pick --abort 2>advice &&
+ git cherry-pick --skip &&
+ test_i18ncmp expect advice
+'
+
+test_expect_success 'allow skipping stopped cherry-pick because of untracked file modifications' '
+ pristine_detach initial &&
+ git rm --cached unrelated &&
+ git commit -m "untrack unrelated" &&
+ test_must_fail git cherry-pick initial base &&
+ test_path_is_missing .git/CHERRY_PICK_HEAD &&
+ git cherry-pick --skip
+'
+
test_expect_success '--quit does not complain when no cherry-pick is in progress' '
pristine_detach initial &&
git cherry-pick --quit
)
'
+test_expect_success 'stash --keep-index with file deleted in index does not resurrect it on disk' '
+ test_commit to-remove to-remove &&
+ git rm to-remove &&
+ git stash --keep-index &&
+ test_path_is_missing to-remove
+'
+
test_done
git checkout master &&
git diff-tree -p C2 | git apply --index &&
test_tick &&
- git commit -m "Master accepts moral equivalent of #2"
+ git commit -m "Master accepts moral equivalent of #2" &&
+ git checkout side &&
+ git checkout -b patchid &&
+ for i in 5 6 1 2 3 A 4 B C 7 8 9 10 D E F; do echo "$i"; done >file2 &&
+ for i in 1 2 3 A 4 B C 7 8 9 10 D E F 5 6; do echo "$i"; done >file3 &&
+ for i in 8 9 10; do echo "$i"; done >file &&
+ git add file file2 file3 &&
+ test_tick &&
+ git commit -m "patchid 1" &&
+ for i in 4 A B 7 8 9 10; do echo "$i"; done >file2 &&
+ for i in 8 9 10 5 6; do echo "$i"; done >file3 &&
+ git add file2 file3 &&
+ test_tick &&
+ git commit -m "patchid 2" &&
+ for i in 10 5 6; do echo "$i"; done >file &&
+ git add file &&
+ test_tick &&
+ git commit -m "patchid 3" &&
+
+ git checkout master
'
test_expect_success "format-patch --ignore-if-in-upstream" '
sed "1,/^---$/d" out | grep "test message"
'
+test_expect_success 'format-patch notes output control' '
+ git notes add -m "notes config message" HEAD &&
+ test_when_finished git notes remove HEAD &&
+
+ git format-patch -1 --stdout >out &&
+ ! grep "notes config message" out &&
+ git format-patch -1 --stdout --notes >out &&
+ grep "notes config message" out &&
+ git format-patch -1 --stdout --no-notes >out &&
+ ! grep "notes config message" out &&
+ git format-patch -1 --stdout --notes --no-notes >out &&
+ ! grep "notes config message" out &&
+ git format-patch -1 --stdout --no-notes --notes >out &&
+ grep "notes config message" out &&
+
+ test_config format.notes true &&
+ git format-patch -1 --stdout >out &&
+ grep "notes config message" out &&
+ git format-patch -1 --stdout --notes >out &&
+ grep "notes config message" out &&
+ git format-patch -1 --stdout --no-notes >out &&
+ ! grep "notes config message" out &&
+ git format-patch -1 --stdout --notes --no-notes >out &&
+ ! grep "notes config message" out &&
+ git format-patch -1 --stdout --no-notes --notes >out &&
+ grep "notes config message" out
+'
+
+test_expect_success 'format-patch with multiple notes refs' '
+ git notes --ref note1 add -m "this is note 1" HEAD &&
+ test_when_finished git notes --ref note1 remove HEAD &&
+ git notes --ref note2 add -m "this is note 2" HEAD &&
+ test_when_finished git notes --ref note2 remove HEAD &&
+
+ git format-patch -1 --stdout >out &&
+ ! grep "this is note 1" out &&
+ ! grep "this is note 2" out &&
+ git format-patch -1 --stdout --notes=note1 >out &&
+ grep "this is note 1" out &&
+ ! grep "this is note 2" out &&
+ git format-patch -1 --stdout --notes=note2 >out &&
+ ! grep "this is note 1" out &&
+ grep "this is note 2" out &&
+ git format-patch -1 --stdout --notes=note1 --notes=note2 >out &&
+ grep "this is note 1" out &&
+ grep "this is note 2" out &&
+
+ test_config format.notes note1 &&
+ git format-patch -1 --stdout >out &&
+ grep "this is note 1" out &&
+ ! grep "this is note 2" out &&
+ git format-patch -1 --stdout --no-notes >out &&
+ ! grep "this is note 1" out &&
+ ! grep "this is note 2" out &&
+ git format-patch -1 --stdout --notes=note2 >out &&
+ grep "this is note 1" out &&
+ grep "this is note 2" out &&
+ git format-patch -1 --stdout --no-notes --notes=note2 >out &&
+ ! grep "this is note 1" out &&
+ grep "this is note 2" out &&
+
+ git config --add format.notes note2 &&
+ git format-patch -1 --stdout >out &&
+ grep "this is note 1" out &&
+ grep "this is note 2" out &&
+ git format-patch -1 --stdout --no-notes >out &&
+ ! grep "this is note 1" out &&
+ ! grep "this is note 2" out
+'
+
echo "fatal: --name-only does not make sense" > expect.name-only
echo "fatal: --name-status does not make sense" > expect.name-status
echo "fatal: --check does not make sense" > expect.check
'
test_expect_success 'format-patch --base' '
- git checkout side &&
+ git checkout patchid &&
git format-patch --stdout --base=HEAD~3 -1 | tail -n 7 >actual1 &&
git format-patch --stdout --base=HEAD~3 HEAD~.. | tail -n 7 >actual2 &&
echo >expected &&
echo "prerequisite-patch-id: $(git show --patch HEAD~1 | git patch-id --stable | awk "{print \$1}")" >>expected &&
signature >> expected &&
test_cmp expected actual1 &&
- test_cmp expected actual2
+ test_cmp expected actual2 &&
+ echo >fail &&
+ echo "base-commit: $(git rev-parse HEAD~3)" >>fail &&
+ echo "prerequisite-patch-id: $(git show --patch HEAD~2 | git patch-id --unstable | awk "{print \$1}")" >>fail &&
+ echo "prerequisite-patch-id: $(git show --patch HEAD~1 | git patch-id --unstable | awk "{print \$1}")" >>fail &&
+ signature >> fail &&
+ ! test_cmp fail actual1 &&
+ ! test_cmp fail actual2
'
test_expect_success 'format-patch --base errors out when base commit is in revision list' '
test_cmp expected actual
'
+# Note that the "6" in the expected hunk header below is funny, since we only
+# show 5 lines (the missing one was blank and thus ignored). This is how
+# --ignore-blank-lines behaves even without --function-context, and this test
+# is just checking the interaction of the two features. Don't take it as an
+# endorsement of that output.
+test_expect_success 'combine --ignore-blank-lines with --function-context' '
+ test_write_lines 1 "" 2 3 4 5 >a &&
+ test_write_lines 1 2 3 4 >b &&
+ test_must_fail git diff --no-index \
+ --ignore-blank-lines --function-context a b >actual.raw &&
+ sed -n "/@@/,\$p" <actual.raw >actual &&
+ cat <<-\EOF >expect &&
+ @@ -1,6 +1,4 @@
+ 1
+ 2
+ 3
+ 4
+ -5
+ EOF
+ test_cmp expect actual
+'
+
test_done
php
python
ruby
+ rust
tex
custom1
custom2
--- /dev/null
+classdef RIGHT
+ properties
+ ChangeMe
+ end
+end
--- /dev/null
+function y = RIGHT()
+x = 5;
+y = ChangeMe + x;
+end
--- /dev/null
+%%% RIGHT section
+# this is octave script
+ChangeMe = 1;
--- /dev/null
+## RIGHT section
+# this is octave script
+ChangeMe = 1;
--- /dev/null
+%% RIGHT section
+% this is understood by both matlab and octave
+ChangeMe = 1;
--- /dev/null
+pub(self) fn RIGHT<T>(x: &[T]) where T: Debug {
+ let _ = x;
+ // a comment
+ let a = ChangeMe;
+}
--- /dev/null
+impl<'a, T: AsRef<[u8]>> std::RIGHT for Git<'a> {
+
+ pub fn ChangeMe(&self) -> () {
+ }
+}
--- /dev/null
+#[derive(Debug)]
+pub(super) struct RIGHT<'a> {
+ name: &'a str,
+ age: ChangeMe,
+}
--- /dev/null
+unsafe trait RIGHT<T> {
+ fn len(&self) -> u32;
+ fn ChangeMe(&self, n: u32) -> T;
+ fn iter<F>(&self, f: F) where F: Fn(T);
+}
test_cmp expect actual
'
-test_expect_success 'log with various grep.patternType configurations & command-lines' '
+test_expect_success !FAIL_PREREQS 'log with various grep.patternType configurations & command-lines' '
git init pattern-type &&
(
cd pattern-type &&
test_cmp expect actual
'
+test_expect_success 'log.mailmap=false disables mailmap' '
+ cat >expect <<-\EOF &&
+ Author: CTO <cto@coompany.xx>
+ Author: claus <me@company.xx>
+ Author: santa <me@company.xx>
+ Author: nick2 <nick2@company.xx>
+ Author: nick2 <bugs@company.xx>
+ Author: nick1 <bugs@company.xx>
+ Author: A U Thor <author@example.com>
+ EOF
+ git -c log.mailmap=False log | grep Author > actual &&
+ test_cmp expect actual
+'
+
+test_expect_success '--no-use-mailmap disables mailmap' '
+ cat >expect <<-\EOF &&
+ Author: CTO <cto@coompany.xx>
+ Author: claus <me@company.xx>
+ Author: santa <me@company.xx>
+ Author: nick2 <nick2@company.xx>
+ Author: nick2 <bugs@company.xx>
+ Author: nick1 <bugs@company.xx>
+ Author: A U Thor <author@example.com>
+ EOF
+ git log --no-use-mailmap | grep Author > actual &&
+ test_cmp expect actual
+'
+
cat >expect <<\EOF
Author: Santa Claus <santa.claus@northpole.xx>
Author: Santa Claus <santa.claus@northpole.xx>
test_cmp expect actual
'
+test_expect_success 'log.mailmap is true by default these days' '
+ git log --author Santa | grep Author >actual &&
+ test_cmp expect actual
+'
+
test_expect_success 'Only grep replaced author with --use-mailmap' '
git log --use-mailmap --author "<cto@coompany.xx>" >actual &&
test_must_be_empty actual
'
}
+test_expect_success 'setup' '
+ test_oid_cache <<-EOF
+ obj sha1:19f9c8273ec45a8938e6999cb59b3ff66739902a
+ obj sha256:3c666f798798601571f5cec0adb57ce4aba8546875e7693177e0535f34d2c49b
+ EOF
+'
+
test_expect_success \
'populate workdir' \
'mkdir a &&
'
test_expect_success LONG_IS_64BIT 'set up repository with huge blob' '
- obj_d=19 &&
- obj_f=f9c8273ec45a8938e6999cb59b3ff66739902a &&
- obj=${obj_d}${obj_f} &&
- mkdir -p .git/objects/$obj_d &&
- cp "$TEST_DIRECTORY"/t5000/$obj .git/objects/$obj_d/$obj_f &&
+ obj=$(test_oid obj) &&
+ path=$(test_oid_to_path $obj) &&
+ mkdir -p .git/objects/$(dirname $path) &&
+ cp "$TEST_DIRECTORY"/t5000/huge-object .git/objects/$path &&
rm -f .git/index &&
git update-index --add --cacheinfo 100644,$obj,huge &&
git commit -m huge
--- /dev/null
+#!/bin/sh
+
+test_description='Test git update-server-info'
+
+. ./test-lib.sh
+
+test_expect_success 'setup' 'test_commit file'
+
+test_expect_success 'create info/refs' '
+ git update-server-info &&
+ test_path_is_file .git/info/refs
+'
+
+test_expect_success 'modify and store mtime' '
+ test-tool chmtime =0 .git/info/refs &&
+ test-tool chmtime --get .git/info/refs >a
+'
+
+test_expect_success 'info/refs is not needlessly overwritten' '
+ git update-server-info &&
+ test-tool chmtime --get .git/info/refs >b &&
+ test_cmp a b
+'
+
+test_expect_success 'info/refs can be forced to update' '
+ git update-server-info -f &&
+ test-tool chmtime --get .git/info/refs >b &&
+ ! test_cmp a b
+'
+
+test_expect_success 'info/refs updates when changes are made' '
+ test-tool chmtime =0 .git/info/refs &&
+ test-tool chmtime --get .git/info/refs >b &&
+ test_cmp a b &&
+ git update-ref refs/heads/foo HEAD &&
+ git update-server-info &&
+ test-tool chmtime --get .git/info/refs >b &&
+ ! test_cmp a b
+'
+
+test_done
}
test_expect_success 'setup repo with moderate-sized history' '
- for i in $(test_seq 1 10)
- do
- test_commit $i
- done &&
+ test_commit_bulk --id=file 100 &&
git checkout -b other HEAD~5 &&
- for i in $(test_seq 1 10)
- do
- test_commit side-$i
- done &&
+ test_commit_bulk --id=side 10 &&
git checkout master &&
bitmaptip=$(git rev-parse master) &&
blob=$(echo tagged-blob | git hash-object -w --stdin) &&
'
test_expect_success 'setup further non-bitmapped commits' '
- for i in $(test_seq 1 10)
- do
- test_commit further-$i
- done
+ test_commit_bulk --id=further 10
'
rev_list_tests 'partial bitmap'
test_expect_success 'write graph with no packs' '
cd "$TRASH_DIRECTORY/full" &&
git commit-graph write --object-dir . &&
- test_path_is_file info/commit-graph
+ test_path_is_missing info/commit-graph
'
test_expect_success 'close with correct error on bad input' '
"incorrect object offset"
'
+test_expect_success 'setup expire tests' '
+ mkdir dup &&
+ (
+ cd dup &&
+ git init &&
+ test-tool genrandom "data" 4096 >large_file.txt &&
+ git update-index --add large_file.txt &&
+ for i in $(test_seq 1 20)
+ do
+ test_commit $i
+ done &&
+ git branch A HEAD &&
+ git branch B HEAD~8 &&
+ git branch C HEAD~13 &&
+ git branch D HEAD~16 &&
+ git branch E HEAD~18 &&
+ git pack-objects --revs .git/objects/pack/pack-A <<-EOF &&
+ refs/heads/A
+ ^refs/heads/B
+ EOF
+ git pack-objects --revs .git/objects/pack/pack-B <<-EOF &&
+ refs/heads/B
+ ^refs/heads/C
+ EOF
+ git pack-objects --revs .git/objects/pack/pack-C <<-EOF &&
+ refs/heads/C
+ ^refs/heads/D
+ EOF
+ git pack-objects --revs .git/objects/pack/pack-D <<-EOF &&
+ refs/heads/D
+ ^refs/heads/E
+ EOF
+ git pack-objects --revs .git/objects/pack/pack-E <<-EOF &&
+ refs/heads/E
+ EOF
+ git multi-pack-index write &&
+ cp -r .git/objects/pack .git/objects/pack-backup
+ )
+'
+
+test_expect_success 'expire does not remove any packs' '
+ (
+ cd dup &&
+ ls .git/objects/pack >expect &&
+ git multi-pack-index expire &&
+ ls .git/objects/pack >actual &&
+ test_cmp expect actual
+ )
+'
+
+test_expect_success 'expire removes unreferenced packs' '
+ (
+ cd dup &&
+ git pack-objects --revs .git/objects/pack/pack-combined <<-EOF &&
+ refs/heads/A
+ ^refs/heads/C
+ EOF
+ git multi-pack-index write &&
+ ls .git/objects/pack | grep -v -e pack-[AB] >expect &&
+ git multi-pack-index expire &&
+ ls .git/objects/pack >actual &&
+ test_cmp expect actual &&
+ ls .git/objects/pack/ | grep idx >expect-idx &&
+ test-tool read-midx .git/objects | grep idx >actual-midx &&
+ test_cmp expect-idx actual-midx &&
+ git multi-pack-index verify &&
+ git fsck
+ )
+'
+
+test_expect_success 'repack with minimum size does not alter existing packs' '
+ (
+ cd dup &&
+ rm -rf .git/objects/pack &&
+ mv .git/objects/pack-backup .git/objects/pack &&
+ touch -m -t 201901010000 .git/objects/pack/pack-D* &&
+ touch -m -t 201901010001 .git/objects/pack/pack-C* &&
+ touch -m -t 201901010002 .git/objects/pack/pack-B* &&
+ touch -m -t 201901010003 .git/objects/pack/pack-A* &&
+ ls .git/objects/pack >expect &&
+ MINSIZE=$(test-tool path-utils file-size .git/objects/pack/*pack | sort -n | head -n 1) &&
+ git multi-pack-index repack --batch-size=$MINSIZE &&
+ ls .git/objects/pack >actual &&
+ test_cmp expect actual
+ )
+'
+
+test_expect_success 'repack creates a new pack' '
+ (
+ cd dup &&
+ ls .git/objects/pack/*idx >idx-list &&
+ test_line_count = 5 idx-list &&
+ THIRD_SMALLEST_SIZE=$(test-tool path-utils file-size .git/objects/pack/*pack | sort -n | head -n 3 | tail -n 1) &&
+ BATCH_SIZE=$(($THIRD_SMALLEST_SIZE + 1)) &&
+ git multi-pack-index repack --batch-size=$BATCH_SIZE &&
+ ls .git/objects/pack/*idx >idx-list &&
+ test_line_count = 6 idx-list &&
+ test-tool read-midx .git/objects | grep idx >midx-list &&
+ test_line_count = 6 midx-list
+ )
+'
+
+test_expect_success 'expire removes repacked packs' '
+ (
+ cd dup &&
+ ls -al .git/objects/pack/*pack &&
+ ls -S .git/objects/pack/*pack | head -n 4 >expect &&
+ git multi-pack-index expire &&
+ ls -S .git/objects/pack/*pack >actual &&
+ test_cmp expect actual &&
+ test-tool read-midx .git/objects | grep idx >midx-list &&
+ test_line_count = 4 midx-list
+ )
+'
+
+test_expect_success 'expire works when adding new packs' '
+ (
+ cd dup &&
+ git pack-objects --revs .git/objects/pack/pack-combined <<-EOF &&
+ refs/heads/A
+ ^refs/heads/B
+ EOF
+ git pack-objects --revs .git/objects/pack/pack-combined <<-EOF &&
+ refs/heads/B
+ ^refs/heads/C
+ EOF
+ git pack-objects --revs .git/objects/pack/pack-combined <<-EOF &&
+ refs/heads/C
+ ^refs/heads/D
+ EOF
+ git multi-pack-index write &&
+ git pack-objects --revs .git/objects/pack/a-pack <<-EOF &&
+ refs/heads/D
+ ^refs/heads/E
+ EOF
+ git multi-pack-index write &&
+ git pack-objects --revs .git/objects/pack/z-pack <<-EOF &&
+ refs/heads/E
+ EOF
+ git multi-pack-index expire &&
+ ls .git/objects/pack/ | grep idx >expect &&
+ test-tool read-midx .git/objects | grep idx >actual &&
+ test_cmp expect actual &&
+ git multi-pack-index verify
+ )
+'
+
+test_expect_success 'expire respects .keep files' '
+ (
+ cd dup &&
+ git pack-objects --revs .git/objects/pack/pack-all <<-EOF &&
+ refs/heads/A
+ EOF
+ git multi-pack-index write &&
+ PACKA=$(ls .git/objects/pack/a-pack*\.pack | sed s/\.pack\$//) &&
+ touch $PACKA.keep &&
+ git multi-pack-index expire &&
+ ls -S .git/objects/pack/a-pack* | grep $PACKA >a-pack-files &&
+ test_line_count = 3 a-pack-files &&
+ test-tool read-midx .git/objects | grep idx >midx-list &&
+ test_line_count = 2 midx-list
+ )
+'
+
+test_expect_success 'repack --batch-size=0 repacks everything' '
+ (
+ cd dup &&
+ rm .git/objects/pack/*.keep &&
+ ls .git/objects/pack/*idx >idx-list &&
+ test_line_count = 2 idx-list &&
+ git multi-pack-index repack --batch-size=0 &&
+ ls .git/objects/pack/*idx >idx-list &&
+ test_line_count = 3 idx-list &&
+ test-tool read-midx .git/objects | grep idx >midx-list &&
+ test_line_count = 3 midx-list &&
+ git multi-pack-index expire &&
+ ls -al .git/objects/pack/*idx >idx-list &&
+ test_line_count = 1 idx-list &&
+ git multi-pack-index repack --batch-size=0 &&
+ ls -al .git/objects/pack/*idx >new-idx-list &&
+ test_cmp idx-list new-idx-list
+ )
+'
+
test_done
--- /dev/null
+#!/bin/sh
+
+test_description='split commit graph'
+. ./test-lib.sh
+
+GIT_TEST_COMMIT_GRAPH=0
+
+test_expect_success 'setup repo' '
+ git init &&
+ git config core.commitGraph true &&
+ infodir=".git/objects/info" &&
+ graphdir="$infodir/commit-graphs" &&
+ test_oid_init
+'
+
+graph_read_expect() {
+ NUM_BASE=0
+ if test ! -z $2
+ then
+ NUM_BASE=$2
+ fi
+ cat >expect <<- EOF
+ header: 43475048 1 1 3 $NUM_BASE
+ num_commits: $1
+ chunks: oid_fanout oid_lookup commit_metadata
+ EOF
+ git commit-graph read >output &&
+ test_cmp expect output
+}
+
+test_expect_success 'create commits and write commit-graph' '
+ for i in $(test_seq 3)
+ do
+ test_commit $i &&
+ git branch commits/$i || return 1
+ done &&
+ git commit-graph write --reachable &&
+ test_path_is_file $infodir/commit-graph &&
+ graph_read_expect 3
+'
+
+graph_git_two_modes() {
+ git -c core.commitGraph=true $1 >output
+ git -c core.commitGraph=false $1 >expect
+ test_cmp expect output
+}
+
+graph_git_behavior() {
+ MSG=$1
+ BRANCH=$2
+ COMPARE=$3
+ test_expect_success "check normal git operations: $MSG" '
+ graph_git_two_modes "log --oneline $BRANCH" &&
+ graph_git_two_modes "log --topo-order $BRANCH" &&
+ graph_git_two_modes "log --graph $COMPARE..$BRANCH" &&
+ graph_git_two_modes "branch -vv" &&
+ graph_git_two_modes "merge-base -a $BRANCH $COMPARE"
+ '
+}
+
+graph_git_behavior 'graph exists' commits/3 commits/1
+
+verify_chain_files_exist() {
+ for hash in $(cat $1/commit-graph-chain)
+ do
+ test_path_is_file $1/graph-$hash.graph || return 1
+ done
+}
+
+test_expect_success 'add more commits, and write a new base graph' '
+ git reset --hard commits/1 &&
+ for i in $(test_seq 4 5)
+ do
+ test_commit $i &&
+ git branch commits/$i || return 1
+ done &&
+ git reset --hard commits/2 &&
+ for i in $(test_seq 6 10)
+ do
+ test_commit $i &&
+ git branch commits/$i || return 1
+ done &&
+ git reset --hard commits/2 &&
+ git merge commits/4 &&
+ git branch merge/1 &&
+ git reset --hard commits/4 &&
+ git merge commits/6 &&
+ git branch merge/2 &&
+ git commit-graph write --reachable &&
+ graph_read_expect 12
+'
+
+test_expect_success 'fork and fail to base a chain on a commit-graph file' '
+ test_when_finished rm -rf fork &&
+ git clone . fork &&
+ (
+ cd fork &&
+ rm .git/objects/info/commit-graph &&
+ echo "$(pwd)/../.git/objects" >.git/objects/info/alternates &&
+ test_commit new-commit &&
+ git commit-graph write --reachable --split &&
+ test_path_is_file $graphdir/commit-graph-chain &&
+ test_line_count = 1 $graphdir/commit-graph-chain &&
+ verify_chain_files_exist $graphdir
+ )
+'
+
+test_expect_success 'add three more commits, write a tip graph' '
+ git reset --hard commits/3 &&
+ git merge merge/1 &&
+ git merge commits/5 &&
+ git merge merge/2 &&
+ git branch merge/3 &&
+ git commit-graph write --reachable --split &&
+ test_path_is_missing $infodir/commit-graph &&
+ test_path_is_file $graphdir/commit-graph-chain &&
+ ls $graphdir/graph-*.graph >graph-files &&
+ test_line_count = 2 graph-files &&
+ verify_chain_files_exist $graphdir
+'
+
+graph_git_behavior 'split commit-graph: merge 3 vs 2' merge/3 merge/2
+
+test_expect_success 'add one commit, write a tip graph' '
+ test_commit 11 &&
+ git branch commits/11 &&
+ git commit-graph write --reachable --split &&
+ test_path_is_missing $infodir/commit-graph &&
+ test_path_is_file $graphdir/commit-graph-chain &&
+ ls $graphdir/graph-*.graph >graph-files &&
+ test_line_count = 3 graph-files &&
+ verify_chain_files_exist $graphdir
+'
+
+graph_git_behavior 'three-layer commit-graph: commit 11 vs 6' commits/11 commits/6
+
+test_expect_success 'add one commit, write a merged graph' '
+ test_commit 12 &&
+ git branch commits/12 &&
+ git commit-graph write --reachable --split &&
+ test_path_is_file $graphdir/commit-graph-chain &&
+ test_line_count = 2 $graphdir/commit-graph-chain &&
+ ls $graphdir/graph-*.graph >graph-files &&
+ test_line_count = 2 graph-files &&
+ verify_chain_files_exist $graphdir
+'
+
+graph_git_behavior 'merged commit-graph: commit 12 vs 6' commits/12 commits/6
+
+test_expect_success 'create fork and chain across alternate' '
+ git clone . fork &&
+ (
+ cd fork &&
+ git config core.commitGraph true &&
+ rm -rf $graphdir &&
+ echo "$(pwd)/../.git/objects" >.git/objects/info/alternates &&
+ test_commit 13 &&
+ git branch commits/13 &&
+ git commit-graph write --reachable --split &&
+ test_path_is_file $graphdir/commit-graph-chain &&
+ test_line_count = 3 $graphdir/commit-graph-chain &&
+ ls $graphdir/graph-*.graph >graph-files &&
+ test_line_count = 1 graph-files &&
+ git -c core.commitGraph=true rev-list HEAD >expect &&
+ git -c core.commitGraph=false rev-list HEAD >actual &&
+ test_cmp expect actual &&
+ test_commit 14 &&
+ git commit-graph write --reachable --split --object-dir=.git/objects/ &&
+ test_line_count = 3 $graphdir/commit-graph-chain &&
+ ls $graphdir/graph-*.graph >graph-files &&
+ test_line_count = 1 graph-files
+ )
+'
+
+graph_git_behavior 'alternate: commit 13 vs 6' commits/13 commits/6
+
+test_expect_success 'test merge stragety constants' '
+ git clone . merge-2 &&
+ (
+ cd merge-2 &&
+ git config core.commitGraph true &&
+ test_line_count = 2 $graphdir/commit-graph-chain &&
+ test_commit 14 &&
+ git commit-graph write --reachable --split --size-multiple=2 &&
+ test_line_count = 3 $graphdir/commit-graph-chain
+
+ ) &&
+ git clone . merge-10 &&
+ (
+ cd merge-10 &&
+ git config core.commitGraph true &&
+ test_line_count = 2 $graphdir/commit-graph-chain &&
+ test_commit 14 &&
+ git commit-graph write --reachable --split --size-multiple=10 &&
+ test_line_count = 1 $graphdir/commit-graph-chain &&
+ ls $graphdir/graph-*.graph >graph-files &&
+ test_line_count = 1 graph-files
+ ) &&
+ git clone . merge-10-expire &&
+ (
+ cd merge-10-expire &&
+ git config core.commitGraph true &&
+ test_line_count = 2 $graphdir/commit-graph-chain &&
+ test_commit 15 &&
+ git commit-graph write --reachable --split --size-multiple=10 --expire-time=1980-01-01 &&
+ test_line_count = 1 $graphdir/commit-graph-chain &&
+ ls $graphdir/graph-*.graph >graph-files &&
+ test_line_count = 3 graph-files
+ ) &&
+ git clone --no-hardlinks . max-commits &&
+ (
+ cd max-commits &&
+ git config core.commitGraph true &&
+ test_line_count = 2 $graphdir/commit-graph-chain &&
+ test_commit 16 &&
+ test_commit 17 &&
+ git commit-graph write --reachable --split --max-commits=1 &&
+ test_line_count = 1 $graphdir/commit-graph-chain &&
+ ls $graphdir/graph-*.graph >graph-files &&
+ test_line_count = 1 graph-files
+ )
+'
+
+test_expect_success 'remove commit-graph-chain file after flattening' '
+ git clone . flatten &&
+ (
+ cd flatten &&
+ test_line_count = 2 $graphdir/commit-graph-chain &&
+ git commit-graph write --reachable &&
+ test_path_is_missing $graphdir/commit-graph-chain &&
+ ls $graphdir >graph-files &&
+ test_line_count = 0 graph-files
+ )
+'
+
+corrupt_file() {
+ file=$1
+ pos=$2
+ data="${3:-\0}"
+ chmod a+w "$file" &&
+ printf "$data" | dd of="$file" bs=1 seek="$pos" conv=notrunc
+}
+
+test_expect_success 'verify hashes along chain, even in shallow' '
+ git clone --no-hardlinks . verify &&
+ (
+ cd verify &&
+ git commit-graph verify &&
+ base_file=$graphdir/graph-$(head -n 1 $graphdir/commit-graph-chain).graph &&
+ corrupt_file "$base_file" 1760 "\01" &&
+ test_must_fail git commit-graph verify --shallow 2>test_err &&
+ grep -v "^+" test_err >err &&
+ test_i18ngrep "incorrect checksum" err
+ )
+'
+
+test_expect_success 'verify --shallow does not check base contents' '
+ git clone --no-hardlinks . verify-shallow &&
+ (
+ cd verify-shallow &&
+ git commit-graph verify &&
+ base_file=$graphdir/graph-$(head -n 1 $graphdir/commit-graph-chain).graph &&
+ corrupt_file "$base_file" 1000 "\01" &&
+ git commit-graph verify --shallow &&
+ test_must_fail git commit-graph verify 2>test_err &&
+ grep -v "^+" test_err >err &&
+ test_i18ngrep "incorrect checksum" err
+ )
+'
+
+test_expect_success 'warn on base graph chunk incorrect' '
+ git clone --no-hardlinks . base-chunk &&
+ (
+ cd base-chunk &&
+ git commit-graph verify &&
+ base_file=$graphdir/graph-$(tail -n 1 $graphdir/commit-graph-chain).graph &&
+ corrupt_file "$base_file" 1376 "\01" &&
+ git commit-graph verify --shallow 2>test_err &&
+ grep -v "^+" test_err >err &&
+ test_i18ngrep "commit-graph chain does not match" err
+ )
+'
+
+test_expect_success 'verify after commit-graph-chain corruption' '
+ git clone --no-hardlinks . verify-chain &&
+ (
+ cd verify-chain &&
+ corrupt_file "$graphdir/commit-graph-chain" 60 "G" &&
+ git commit-graph verify 2>test_err &&
+ grep -v "^+" test_err >err &&
+ test_i18ngrep "invalid commit-graph chain" err &&
+ corrupt_file "$graphdir/commit-graph-chain" 60 "A" &&
+ git commit-graph verify 2>test_err &&
+ grep -v "^+" test_err >err &&
+ test_i18ngrep "unable to find all commit-graph files" err
+ )
+'
+
+test_expect_success 'verify across alternates' '
+ git clone --no-hardlinks . verify-alt &&
+ (
+ cd verify-alt &&
+ rm -rf $graphdir &&
+ altdir="$(pwd)/../.git/objects" &&
+ echo "$altdir" >.git/objects/info/alternates &&
+ git commit-graph verify --object-dir="$altdir/" &&
+ test_commit extra &&
+ git commit-graph write --reachable --split &&
+ tip_file=$graphdir/graph-$(tail -n 1 $graphdir/commit-graph-chain).graph &&
+ corrupt_file "$tip_file" 100 "\01" &&
+ test_must_fail git commit-graph verify --shallow 2>test_err &&
+ grep -v "^+" test_err >err &&
+ test_i18ngrep "commit-graph has incorrect fanout value" err
+ )
+'
+
+test_expect_success 'add octopus merge' '
+ git reset --hard commits/10 &&
+ git merge commits/3 commits/4 &&
+ git branch merge/octopus &&
+ git commit-graph write --reachable --split &&
+ git commit-graph verify 2>err &&
+ test_line_count = 3 err &&
+ test_i18ngrep ! warning err &&
+ test_line_count = 3 $graphdir/commit-graph-chain
+'
+
+graph_git_behavior 'graph exists' merge/octopus commits/12
+
+test_expect_success 'split across alternate where alternate is not split' '
+ git commit-graph write --reachable &&
+ test_path_is_file .git/objects/info/commit-graph &&
+ cp .git/objects/info/commit-graph . &&
+ git clone --no-hardlinks . alt-split &&
+ (
+ cd alt-split &&
+ echo "$(pwd)"/../.git/objects >.git/objects/info/alternates &&
+ test_commit 18 &&
+ git commit-graph write --reachable --split &&
+ test_line_count = 1 $graphdir/commit-graph-chain
+ ) &&
+ test_cmp commit-graph .git/objects/info/commit-graph
+'
+
+test_done
test_expect_success 'fsck with invalid or bogus skipList input' '
git -c fsck.skipList=/dev/null -c fsck.missingEmail=ignore fsck &&
test_must_fail git -c fsck.skipList=does-not-exist -c fsck.missingEmail=ignore fsck 2>err &&
- test_i18ngrep "Could not open skip list: does-not-exist" err &&
+ test_i18ngrep "could not open.*: does-not-exist" err &&
test_must_fail git -c fsck.skipList=.git/config -c fsck.missingEmail=ignore fsck 2>err &&
- test_i18ngrep "Invalid SHA-1: \[core\]" err
+ test_i18ngrep "invalid object name: \[core\]" err
'
test_expect_success 'fsck with other accepted skipList input (comments & empty lines)' '
test_expect_success 'fsck with invalid abbreviated skipList input' '
echo $commit | test_copy_bytes 20 >SKIP.abbreviated &&
test_must_fail git -c fsck.skipList=SKIP.abbreviated fsck 2>err-abbreviated &&
- test_i18ngrep "^fatal: Invalid SHA-1: " err-abbreviated
+ test_i18ngrep "^fatal: invalid object name: " err-abbreviated
'
test_expect_success 'fsck with exhaustive accepted skipList input (various types of comments etc.)' '
test_must_fail git push --porcelain dst bogus &&
git --git-dir=dst/.git config receive.fsck.skipList does-not-exist &&
test_must_fail git push --porcelain dst bogus 2>err &&
- test_i18ngrep "Could not open skip list: does-not-exist" err &&
+ test_i18ngrep "could not open.*: does-not-exist" err &&
git --git-dir=dst/.git config receive.fsck.skipList config &&
test_must_fail git push --porcelain dst bogus 2>err &&
- test_i18ngrep "Invalid SHA-1: \[core\]" err &&
+ test_i18ngrep "invalid object name: \[core\]" err &&
git --git-dir=dst/.git config receive.fsck.skipList SKIP &&
git push --porcelain dst bogus
test_must_fail git --git-dir=dst/.git fetch "file://$(pwd)" $refspec &&
git --git-dir=dst/.git config fetch.fsck.skipList does-not-exist &&
test_must_fail git --git-dir=dst/.git fetch "file://$(pwd)" $refspec 2>err &&
- test_i18ngrep "Could not open skip list: does-not-exist" err &&
+ test_i18ngrep "could not open.*: does-not-exist" err &&
git --git-dir=dst/.git config fetch.fsck.skipList dst/.git/config &&
test_must_fail git --git-dir=dst/.git fetch "file://$(pwd)" $refspec 2>err &&
- test_i18ngrep "Invalid SHA-1: \[core\]" err &&
+ test_i18ngrep "invalid object name: \[core\]" err &&
git --git-dir=dst/.git config fetch.fsck.skipList dst/.git/SKIP &&
git --git-dir=dst/.git fetch "file://$(pwd)" $refspec
check_negotiation_tip
'
+test_expect_success '--no-show-forced-updates' '
+ mkdir forced-updates &&
+ (
+ cd forced-updates &&
+ git init &&
+ test_commit 1 &&
+ test_commit 2
+ ) &&
+ git clone forced-updates forced-update-clone &&
+ git clone forced-updates no-forced-update-clone &&
+ git -C forced-updates reset --hard HEAD~1 &&
+ (
+ cd forced-update-clone &&
+ git fetch --show-forced-updates origin 2>output &&
+ test_i18ngrep "(forced update)" output
+ ) &&
+ (
+ cd no-forced-update-clone &&
+ git fetch --no-show-forced-updates origin 2>output &&
+ ! test_i18ngrep "(forced update)" output
+ )
+'
+
test_done
'
test_lazy_prereq GIT_DAEMON '
- test_tristate GIT_TEST_GIT_DAEMON &&
- test "$GIT_TEST_GIT_DAEMON" != false
+ git env--helper --type=bool --default=true --exit-code GIT_TEST_GIT_DAEMON
'
# This test spawns a daemon, so run it only if the user would be OK with
git remote rm origin &&
git remote add one ../one &&
git remote add two ../two &&
- git fetch --multiple one two &&
+ GIT_TRACE=1 git fetch --multiple one two 2>trace &&
git branch -r > output &&
- test_cmp ../expect output)
+ test_cmp ../expect output &&
+ grep "built-in: git gc" trace >gc &&
+ test_line_count = 1 gc
+ )
'
test_expect_success 'git fetch --multiple (bad remote names)' '
( cd C && git fsck )
'
+test_expect_success 'setup repo with garbage in objects/*' '
+ git init S &&
+ (
+ cd S &&
+ test_commit A &&
+
+ cd .git/objects &&
+ >.some-hidden-file &&
+ >some-file &&
+ mkdir .some-hidden-dir &&
+ >.some-hidden-dir/some-file &&
+ >.some-hidden-dir/.some-dot-file &&
+ mkdir some-dir &&
+ >some-dir/some-file &&
+ >some-dir/.some-dot-file
+ )
+'
+
+test_expect_success 'clone a repo with garbage in objects/*' '
+ for option in --local --no-hardlinks --shared --dissociate
+ do
+ git clone $option S S$option || return 1 &&
+ git -C S$option fsck || return 1
+ done &&
+ find S-* -name "*some*" | sort >actual &&
+ cat >expected <<-EOF &&
+ S--dissociate/.git/objects/.some-hidden-dir
+ S--dissociate/.git/objects/.some-hidden-dir/.some-dot-file
+ S--dissociate/.git/objects/.some-hidden-dir/some-file
+ S--dissociate/.git/objects/.some-hidden-file
+ S--dissociate/.git/objects/some-dir
+ S--dissociate/.git/objects/some-dir/.some-dot-file
+ S--dissociate/.git/objects/some-dir/some-file
+ S--dissociate/.git/objects/some-file
+ S--local/.git/objects/.some-hidden-dir
+ S--local/.git/objects/.some-hidden-dir/.some-dot-file
+ S--local/.git/objects/.some-hidden-dir/some-file
+ S--local/.git/objects/.some-hidden-file
+ S--local/.git/objects/some-dir
+ S--local/.git/objects/some-dir/.some-dot-file
+ S--local/.git/objects/some-dir/some-file
+ S--local/.git/objects/some-file
+ S--no-hardlinks/.git/objects/.some-hidden-dir
+ S--no-hardlinks/.git/objects/.some-hidden-dir/.some-dot-file
+ S--no-hardlinks/.git/objects/.some-hidden-dir/some-file
+ S--no-hardlinks/.git/objects/.some-hidden-file
+ S--no-hardlinks/.git/objects/some-dir
+ S--no-hardlinks/.git/objects/some-dir/.some-dot-file
+ S--no-hardlinks/.git/objects/some-dir/some-file
+ S--no-hardlinks/.git/objects/some-file
+ EOF
+ test_cmp expected actual
+'
+
+test_expect_success SYMLINKS 'setup repo with manually symlinked or unknown files at objects/' '
+ git init T &&
+ (
+ cd T &&
+ git config gc.auto 0 &&
+ test_commit A &&
+ git gc &&
+ test_commit B &&
+
+ cd .git/objects &&
+ mv pack packs &&
+ ln -s packs pack &&
+ find ?? -type d >loose-dirs &&
+ last_loose=$(tail -n 1 loose-dirs) &&
+ mv $last_loose a-loose-dir &&
+ ln -s a-loose-dir $last_loose &&
+ first_loose=$(head -n 1 loose-dirs) &&
+ rm -f loose-dirs &&
+
+ cd $first_loose &&
+ obj=$(ls *) &&
+ mv $obj ../an-object &&
+ ln -s ../an-object $obj &&
+
+ cd ../ &&
+ find . -type f | sort >../../../T.objects-files.raw &&
+ find . -type l | sort >../../../T.objects-symlinks.raw &&
+ echo unknown_content >unknown_file
+ ) &&
+ git -C T fsck &&
+ git -C T rev-list --all --objects >T.objects
+'
+
+
+test_expect_success SYMLINKS 'clone repo with symlinked or unknown files at objects/' '
+ for option in --local --no-hardlinks --shared --dissociate
+ do
+ git clone $option T T$option || return 1 &&
+ git -C T$option fsck || return 1 &&
+ git -C T$option rev-list --all --objects >T$option.objects &&
+ test_cmp T.objects T$option.objects &&
+ (
+ cd T$option/.git/objects &&
+ find . -type f | sort >../../../T$option.objects-files.raw &&
+ find . -type l | sort >../../../T$option.objects-symlinks.raw
+ )
+ done &&
+
+ for raw in $(ls T*.raw)
+ do
+ sed -e "s!/../!/Y/!; s![0-9a-f]\{38,\}!Z!" -e "/commit-graph/d" \
+ -e "/multi-pack-index/d" <$raw >$raw.de-sha || return 1
+ done &&
+
+ cat >expected-files <<-EOF &&
+ ./Y/Z
+ ./Y/Z
+ ./a-loose-dir/Z
+ ./an-object
+ ./Y/Z
+ ./info/packs
+ ./pack/pack-Z.idx
+ ./pack/pack-Z.pack
+ ./packs/pack-Z.idx
+ ./packs/pack-Z.pack
+ ./unknown_file
+ EOF
+
+ for option in --local --no-hardlinks --dissociate
+ do
+ test_cmp expected-files T$option.objects-files.raw.de-sha || return 1 &&
+ test_must_be_empty T$option.objects-symlinks.raw.de-sha || return 1
+ done &&
+
+ echo ./info/alternates >expected-files &&
+ test_cmp expected-files T--shared.objects-files.raw &&
+ test_must_be_empty T--shared.objects-symlinks.raw
+'
+
test_done
--- /dev/null
+#!/bin/sh
+
+test_description='Test cloning repos with submodules using remote-tracking branches'
+
+. ./test-lib.sh
+
+pwd=$(pwd)
+
+test_expect_success 'setup' '
+ git checkout -b master &&
+ test_commit commit1 &&
+ mkdir sub &&
+ (
+ cd sub &&
+ git init &&
+ test_commit subcommit1 &&
+ git tag sub_when_added_to_super
+ ) &&
+ git submodule add "file://$pwd/sub" sub &&
+ git commit -m "add submodule" &&
+ (
+ cd sub &&
+ test_commit subcommit2
+ )
+'
+
+test_expect_success 'clone with --no-remote-submodules' '
+ test_when_finished "rm -rf super_clone" &&
+ git clone --recurse-submodules --no-remote-submodules "file://$pwd/." super_clone &&
+ (
+ cd super_clone/sub &&
+ git diff --exit-code sub_when_added_to_super
+ )
+'
+
+test_expect_success 'clone with --remote-submodules' '
+ test_when_finished "rm -rf super_clone" &&
+ git clone --recurse-submodules --remote-submodules "file://$pwd/." super_clone &&
+ (
+ cd super_clone/sub &&
+ git diff --exit-code remotes/origin/master
+ )
+'
+
+test_expect_success 'check the default is --no-remote-submodules' '
+ test_when_finished "rm -rf super_clone" &&
+ git clone --recurse-submodules "file://$pwd/." super_clone &&
+ (
+ cd super_clone/sub &&
+ git diff --exit-code sub_when_added_to_super
+ )
+'
+
+test_done
--- /dev/null
+#!/bin/sh
+
+test_description='test handling of --alternate-refs traversal'
+. ./test-lib.sh
+
+# Avoid test_commit because we want a specific and known set of refs:
+#
+# base -- one
+# \ \
+# two -- merged
+#
+# where "one" and "two" are on separate refs, and "merged" is available only in
+# the dependent child repository.
+test_expect_success 'set up local refs' '
+ git checkout -b one &&
+ test_tick &&
+ git commit --allow-empty -m base &&
+ test_tick &&
+ git commit --allow-empty -m one &&
+ git checkout -b two HEAD^ &&
+ test_tick &&
+ git commit --allow-empty -m two
+'
+
+# We'll enter the child repository after it's set up since that's where
+# all of the subsequent tests will want to run (and it's easy to forget a
+# "-C child" and get nonsense results).
+test_expect_success 'set up shared clone' '
+ git clone -s . child &&
+ cd child &&
+ git merge origin/one
+'
+
+test_expect_success 'rev-list --alternate-refs' '
+ git rev-list --remotes=origin >expect &&
+ git rev-list --alternate-refs >actual &&
+ test_cmp expect actual
+'
+
+test_expect_success 'rev-list --not --alternate-refs' '
+ git rev-parse HEAD >expect &&
+ git rev-list HEAD --not --alternate-refs >actual &&
+ test_cmp expect actual
+'
+
+test_expect_success 'limiting with alternateRefsPrefixes' '
+ test_config core.alternateRefsPrefixes refs/heads/one &&
+ git rev-list origin/one >expect &&
+ git rev-list --alternate-refs >actual &&
+ test_cmp expect actual
+'
+
+test_expect_success 'log --source shows .alternate marker' '
+ git log --oneline --source --remotes=origin >expect.orig &&
+ sed "s/origin.* /.alternate /" <expect.orig >expect &&
+ git log --oneline --source --alternate-refs >actual &&
+ test_cmp expect actual
+'
+
+test_done
# Add extra commits to the client so that the whole fetch takes more
# than 1 request (due to negotiation)
- for i in $(test_seq 1 32)
- do
- test_commit -C client c$i
- done &&
+ test_commit_bulk -C client --id=c 32 &&
git -C server checkout -b newbranch base &&
test_commit -C server client_wants &&
# Create many commits to extend the negotiation phase across multiple
# requests, so that the server does not send "ready" in the first
# request.
- for i in $(test_seq 1 32)
- do
- test_commit -C http_child c$i
- done &&
+ test_commit_bulk -C http_child --id=c 32 &&
# After the acknowledgments section, pretend that a DELIM
# (0001) was sent instead of a FLUSH (0000).
git clone "http://127.0.0.1:$LIB_HTTPD_PORT/smart/repo" "$LOCAL_PRISTINE" &&
cd "$LOCAL_PRISTINE" &&
git checkout -b side &&
- for i in $(test_seq 1 33); do test_commit s$i; done &&
+ test_commit_bulk --id=s 33 &&
# Add novel commits to upstream
git checkout master &&
git clone "file://$REPO" "$LOCAL_PRISTINE" &&
cd "$LOCAL_PRISTINE" &&
git checkout -b side &&
- for i in $(test_seq 1 33); do test_commit s$i; done &&
+ test_commit_bulk --id=s 33 &&
# Add novel commits to upstream
git checkout master &&
! grep one output
'
+test_expect_success 'rev-list --objects --no-object-names has no space/names' '
+ git rev-list --objects --no-object-names HEAD >output &&
+ ! grep wanted_file output &&
+ ! grep unwanted_file output &&
+ ! grep " " output
+'
+
+test_expect_success 'rev-list --objects --no-object-names works with cat-file' '
+ git rev-list --objects --no-object-names --all >list-output &&
+ git cat-file --batch-check <list-output >cat-output &&
+ ! grep missing cat-output
+'
+
+test_expect_success '--no-object-names and --object-names are last-one-wins' '
+ git rev-list --objects --no-object-names --object-names --all >output &&
+ grep wanted_file output &&
+ git rev-list --objects --object-names --no-object-names --all >output &&
+ ! grep wanted_file output
+'
+
test_expect_success 'rev-list A..B and rev-list ^A B are the same' '
git commit --allow-empty -m another &&
git tag -a -m "annotated" v1.0 &&
git add missing/MISSING &&
git commit -m "6(broken): Added file that will be deleted" &&
git tag BROKEN_HASH6 &&
+ deleted=$(git rev-parse --verify HEAD:missing) &&
add_line_into_file "7(broken): second line on a broken branch" hello2 &&
git tag BROKEN_HASH7 &&
add_line_into_file "8(broken): third line on a broken branch" hello2 &&
git rm missing/MISSING &&
git commit -m "9(broken): Remove missing file" &&
git tag BROKEN_HASH9 &&
- rm .git/objects/39/f7e61a724187ab767d2e08442d9b6b9dab587d
+ rm .git/objects/$(test_oid_to_path $deleted)
'
echo "" > expected.ok
cat > expected.missing-tree.default <<EOF
-fatal: unable to read tree 39f7e61a724187ab767d2e08442d9b6b9dab587d
+fatal: unable to read tree $deleted
EOF
test_expect_success 'bisect fails if tree is broken on start commit' '
"
cat > expected.bisect-log <<EOF
-# bad: [32a594a3fdac2d57cf6d02987e30eec68511498c] Add <4: Ciao for now> into <hello>.
-# good: [7b7f204a749c3125d5224ed61ea2ae1187ad046f] Add <2: A new day for git> into <hello>.
-git bisect start '32a594a3fdac2d57cf6d02987e30eec68511498c' '7b7f204a749c3125d5224ed61ea2ae1187ad046f'
-# good: [3de952f2416b6084f557ec417709eac740c6818c] Add <3: Another new day for git> into <hello>.
-git bisect good 3de952f2416b6084f557ec417709eac740c6818c
-# first bad commit: [32a594a3fdac2d57cf6d02987e30eec68511498c] Add <4: Ciao for now> into <hello>.
+# bad: [$HASH4] Add <4: Ciao for now> into <hello>.
+# good: [$HASH2] Add <2: A new day for git> into <hello>.
+git bisect start '$HASH4' '$HASH2'
+# good: [$HASH3] Add <3: Another new day for git> into <hello>.
+git bisect good $HASH3
+# first bad commit: [$HASH4] Add <4: Ciao for now> into <hello>.
EOF
test_expect_success 'bisect log: successful result' '
'
cat > expected.bisect-skip-log <<EOF
-# bad: [32a594a3fdac2d57cf6d02987e30eec68511498c] Add <4: Ciao for now> into <hello>.
-# good: [7b7f204a749c3125d5224ed61ea2ae1187ad046f] Add <2: A new day for git> into <hello>.
-git bisect start '32a594a3fdac2d57cf6d02987e30eec68511498c' '7b7f204a749c3125d5224ed61ea2ae1187ad046f'
-# skip: [3de952f2416b6084f557ec417709eac740c6818c] Add <3: Another new day for git> into <hello>.
-git bisect skip 3de952f2416b6084f557ec417709eac740c6818c
+# bad: [$HASH4] Add <4: Ciao for now> into <hello>.
+# good: [$HASH2] Add <2: A new day for git> into <hello>.
+git bisect start '$HASH4' '$HASH2'
+# skip: [$HASH3] Add <3: Another new day for git> into <hello>.
+git bisect skip $HASH3
# only skipped commits left to test
-# possible first bad commit: [32a594a3fdac2d57cf6d02987e30eec68511498c] Add <4: Ciao for now> into <hello>.
-# possible first bad commit: [3de952f2416b6084f557ec417709eac740c6818c] Add <3: Another new day for git> into <hello>.
+# possible first bad commit: [$HASH4] Add <4: Ciao for now> into <hello>.
+# possible first bad commit: [$HASH3] Add <3: Another new day for git> into <hello>.
EOF
test_expect_success 'bisect log: only skip commits left' '
advance h
'
-script='s/^..\(b.\) *[0-9a-f]* \(.*\)$/\1 \2/p'
+t6040_script='s/^..\(b.\) *[0-9a-f]* \(.*\)$/\1 \2/p'
cat >expect <<\EOF
b1 [ahead 1, behind 1] d
b2 [ahead 1, behind 1] d
cd test &&
git branch -v
) |
- sed -n -e "$script" >actual &&
+ sed -n -e "$t6040_script" >actual &&
test_i18ncmp expect actual
'
cd test &&
git branch -vv
) |
- sed -n -e "$script" >actual &&
+ sed -n -e "$t6040_script" >actual &&
test_i18ncmp expect actual
'
test_i18ncmp expect actual
'
+cat >expect <<\EOF
+## b1...origin/master [different]
+EOF
+
+test_expect_success 'status.aheadbehind=false status -s -b (diverged from upstream)' '
+ (
+ cd test &&
+ git checkout b1 >/dev/null &&
+ git -c status.aheadbehind=false status -s -b | head -1
+ ) >actual &&
+ test_i18ncmp expect actual
+'
+
cat >expect <<\EOF
On branch b1
Your branch and 'origin/master' have diverged,
test_i18ncmp expect actual
'
+test_expect_success 'status --long --branch' '
+ (
+ cd test &&
+ git checkout b1 >/dev/null &&
+ git -c status.aheadbehind=true status --long -b | head -3
+ ) >actual &&
+ test_i18ncmp expect actual
+'
+
cat >expect <<\EOF
On branch b1
Your branch and 'origin/master' refer to different commits.
test_i18ncmp expect actual
'
+test_expect_success 'status.aheadbehind=false status --long --branch' '
+ (
+ cd test &&
+ git checkout b1 >/dev/null &&
+ git -c status.aheadbehind=false status --long -b | head -2
+ ) >actual &&
+ test_i18ncmp expect actual
+'
+
cat >expect <<\EOF
## b5...brokenbase [gone]
EOF
git commit -a -m "Right #5" &&
git checkout -b long &&
- i=0 &&
- while test $i -lt 30
- do
- test_commit $i one &&
- i=$(($i+1))
- done &&
+ test_commit_bulk --start=0 --message=%s --filename=one 30 &&
git show-branch &&
test_cmp expected actual
'
+cat >expected <<\EOF
+refs/tags/testtag
+refs/tags/testtag-2
+EOF
+
+test_expect_success 'exercise patterns with prefixes' '
+ git tag testtag-2 &&
+ test_when_finished "git tag -d testtag-2" &&
+ git for-each-ref --format="%(refname)" \
+ refs/tags/testtag refs/tags/testtag-2 >actual &&
+ test_cmp expected actual
+'
+
+cat >expected <<\EOF
+refs/tags/testtag
+refs/tags/testtag-2
+EOF
+
+test_expect_success 'exercise glob patterns with prefixes' '
+ git tag testtag-2 &&
+ test_when_finished "git tag -d testtag-2" &&
+ git for-each-ref --format="%(refname)" \
+ refs/tags/testtag "refs/tags/testtag-*" >actual &&
+ test_cmp expected actual
+'
+
cat >expected <<\EOF
'refs/heads/master'
'refs/remotes/origin/master'
test_must_fail git for-each-ref --merged HEAD --no-merged HEAD
'
+test_expect_success 'validate worktree atom' '
+ cat >expect <<-EOF &&
+ master: $(pwd)
+ master_worktree: $(pwd)/worktree_dir
+ side: not checked out
+ EOF
+ git worktree add -b master_worktree worktree_dir master &&
+ git for-each-ref --format="%(refname:short): %(if)%(worktreepath)%(then)%(worktreepath)%(else)not checked out%(end)" refs/heads/ >actual &&
+ rm -r worktree_dir &&
+ git worktree prune &&
+ test_cmp expect actual
+'
+
test_done
test_cmp expect actual
'
+get_tag_header gpgsign-enabled $commit commit $time >expect
+echo "A message" >>expect
+echo '-----BEGIN PGP SIGNATURE-----' >>expect
+test_expect_success GPG \
+ 'git tag configured tag.gpgsign enables GPG sign' \
+ 'test_config tag.gpgsign true &&
+ git tag -m "A message" gpgsign-enabled &&
+ get_tag_msg gpgsign-enabled>actual &&
+ test_cmp expect actual
+'
+
+get_tag_header no-sign $commit commit $time >expect
+echo "A message" >>expect
+test_expect_success GPG \
+ 'git tag --no-sign configured tag.gpgsign skip GPG sign' \
+ 'test_config tag.gpgsign true &&
+ git tag -a --no-sign -m "A message" no-sign &&
+ get_tag_msg no-sign>actual &&
+ test_cmp expect actual
+'
+
test_expect_success GPG \
'trying to create a signed tag with non-existing -F file should fail' '
! test -f nonexistingfile &&
Unmerged paths:
(use "git add/rm <file>..." as appropriate to mark resolution)
-
deleted by us: foo
no changes added to commit (use "git add" and/or "git commit -a")
Unmerged paths:
(use "git add/rm <file>..." as appropriate to mark resolution)
-
both added: conflict.txt
deleted by them: main.txt
Unmerged paths:
(use "git add/rm <file>..." as appropriate to mark resolution)
-
both deleted: main.txt
added by them: sub_master.txt
added by us: sub_second.txt
(use "git merge --abort" to abort the merge)
Changes to be committed:
-
new file: sub_master.txt
Unmerged paths:
(use "git rm <file>..." to mark resolution)
-
both deleted: main.txt
Untracked files not listed (use -u option to show untracked files)
EOF
git status --ahead-behind --porcelain=v2 --branch --untracked-files=all >actual &&
+ test_cmp expect actual &&
+
+ # Confirm that "status.aheadbehind" DOES NOT work on V2 format.
+ git -c status.aheadbehind=false status --porcelain=v2 --branch --untracked-files=all >actual &&
+ test_cmp expect actual &&
+
+ # Confirm that "status.aheadbehind" DOES NOT work on V2 format.
+ git -c status.aheadbehind=true status --porcelain=v2 --branch --untracked-files=all >actual &&
test_cmp expect actual
)
'
test_expect_success 'checkout to detach HEAD' '
git config advice.detachedHead true &&
git checkout -f renamer && git clean -f &&
- GIT_TEST_GETTEXT_POISON= git checkout renamer^ 2>messages &&
+ GIT_TEST_GETTEXT_POISON=false git checkout renamer^ 2>messages &&
grep "HEAD is now at 7329388" messages &&
test_line_count -gt 1 messages &&
H=$(git rev-parse --verify HEAD) &&
)
'
-test_expect_failure 'directory/submodule conflict; should not treat submodule files as untracked or in the way' '
+test_expect_failure !FAIL_PREREQS 'directory/submodule conflict; should not treat submodule files as untracked or in the way' '
test_when_finished "git -C directory-submodule/path reset --hard" &&
test_when_finished "git -C directory-submodule reset --hard" &&
(
exit 0
EOF
-test_expect_success !AUTOIDENT 'do not fire editor when committer is bogus' '
+test_expect_success !FAIL_PREREQS,!AUTOIDENT 'do not fire editor when committer is bogus' '
>.git/result &&
echo >>negative &&
# (use "git pull" to merge the remote branch into yours)
#
# Changes to be committed:
-# (use "git reset HEAD <file>..." to unstage)
-#
+# (use "git restore --staged <file>..." to unstage)
# new file: dir2/added
#
# Changes not staged for commit:
# (use "git add <file>..." to update what will be committed)
-# (use "git checkout -- <file>..." to discard changes in working directory)
-#
+# (use "git restore <file>..." to discard changes in working directory)
# modified: dir1/modified
#
# Untracked files:
# (use "git add <file>..." to include in what will be committed)
-#
# dir1/untracked dir2/untracked
# dir2/modified untracked
#
# (use "git pull" to merge the remote branch into yours)
#
# Changes to be committed:
-# (use "git reset HEAD <file>..." to unstage)
-#
+# (use "git restore --staged <file>..." to unstage)
# new file: dir2/added
#
# Changes not staged for commit:
# (use "git add <file>..." to update what will be committed)
-# (use "git checkout -- <file>..." to discard changes in working directory)
-#
+# (use "git restore <file>..." to discard changes in working directory)
# modified: dir1/modified
#
# Untracked files:
# (use "git add <file>..." to include in what will be committed)
-#
# dir1/untracked
# dir2/modified
# dir2/untracked
(use "git pull" to merge the remote branch into yours)
Changes to be committed:
- (use "git reset HEAD <file>..." to unstage)
-
+ (use "git restore --staged <file>..." to unstage)
new file: dir2/added
Changes not staged for commit:
(use "git add <file>..." to update what will be committed)
- (use "git checkout -- <file>..." to discard changes in working directory)
-
+ (use "git restore <file>..." to discard changes in working directory)
modified: dir1/modified
Untracked files:
(use "git add <file>..." to include in what will be committed)
-
dir2/modified
Ignored files:
(use "git add -f <file>..." to include in what will be committed)
-
.gitignore
dir1/untracked
dir2/untracked
(use "git pull" to merge the remote branch into yours)
Changes to be committed:
- (use "git reset HEAD <file>..." to unstage)
-
+ (use "git restore --staged <file>..." to unstage)
new file: dir2/added
Changes not staged for commit:
(use "git add <file>..." to update what will be committed)
- (use "git checkout -- <file>..." to discard changes in working directory)
-
+ (use "git restore <file>..." to discard changes in working directory)
modified: dir1/modified
Ignored files:
(use "git add -f <file>..." to include in what will be committed)
-
.gitignore
dir1/untracked
dir2/modified
(use "git pull" to merge the remote branch into yours)
Changes to be committed:
- (use "git reset HEAD <file>..." to unstage)
-
+ (use "git restore --staged <file>..." to unstage)
new file: dir2/added
Changes not staged for commit:
(use "git add <file>..." to update what will be committed)
- (use "git checkout -- <file>..." to discard changes in working directory)
-
+ (use "git restore <file>..." to discard changes in working directory)
modified: dir1/modified
Untracked files not listed (use -u option to show untracked files)
(use "git pull" to merge the remote branch into yours)
Changes to be committed:
- (use "git reset HEAD <file>..." to unstage)
-
+ (use "git restore --staged <file>..." to unstage)
new file: dir2/added
Changes not staged for commit:
(use "git add <file>..." to update what will be committed)
- (use "git checkout -- <file>..." to discard changes in working directory)
-
+ (use "git restore <file>..." to discard changes in working directory)
modified: dir1/modified
Untracked files:
(use "git add <file>..." to include in what will be committed)
-
dir1/untracked
dir2/modified
dir2/untracked
(use "git pull" to merge the remote branch into yours)
Changes to be committed:
- (use "git reset HEAD <file>..." to unstage)
-
+ (use "git restore --staged <file>..." to unstage)
new file: dir2/added
Changes not staged for commit:
(use "git add <file>..." to update what will be committed)
- (use "git checkout -- <file>..." to discard changes in working directory)
-
+ (use "git restore <file>..." to discard changes in working directory)
modified: dir1/modified
Untracked files:
(use "git add <file>..." to include in what will be committed)
-
dir1/untracked
dir2/modified
dir2/untracked
(use "git pull" to merge the remote branch into yours)
Changes to be committed:
- (use "git reset HEAD <file>..." to unstage)
-
+ (use "git restore --staged <file>..." to unstage)
new file: ../dir2/added
Changes not staged for commit:
(use "git add <file>..." to update what will be committed)
- (use "git checkout -- <file>..." to discard changes in working directory)
-
+ (use "git restore <file>..." to discard changes in working directory)
modified: modified
Untracked files:
(use "git add <file>..." to include in what will be committed)
-
untracked
../dir2/modified
../dir2/untracked
(use "git pull" to merge the remote branch into yours)
Changes to be committed:
- (use "git reset HEAD <file>..." to unstage)
-
+ (use "git restore --staged <file>..." to unstage)
<GREEN>new file: dir2/added<RESET>
Changes not staged for commit:
(use "git add <file>..." to update what will be committed)
- (use "git checkout -- <file>..." to discard changes in working directory)
-
+ (use "git restore <file>..." to discard changes in working directory)
<RED>modified: dir1/modified<RESET>
Untracked files:
(use "git add <file>..." to include in what will be committed)
-
<BLUE>dir1/untracked<RESET>
<BLUE>dir2/modified<RESET>
<BLUE>dir2/untracked<RESET>
(use "git pull" to merge the remote branch into yours)
Changes to be committed:
- (use "git reset HEAD <file>..." to unstage)
-
+ (use "git restore --staged <file>..." to unstage)
new file: dir2/added
Changes not staged for commit:
(use "git add <file>..." to update what will be committed)
- (use "git checkout -- <file>..." to discard changes in working directory)
-
+ (use "git restore <file>..." to discard changes in working directory)
modified: dir1/modified
Untracked files:
(use "git add <file>..." to include in what will be committed)
-
dir1/untracked
dir2/modified
dir2/untracked
(use "git pull" to merge the remote branch into yours)
Changes to be committed:
- (use "git reset HEAD <file>..." to unstage)
-
+ (use "git restore --staged <file>..." to unstage)
modified: dir1/modified
Untracked files:
(use "git add <file>..." to include in what will be committed)
-
dir1/untracked
dir2/
untracked
(use "git pull" to merge the remote branch into yours)
Changes to be committed:
- (use "git reset HEAD <file>..." to unstage)
-
+ (use "git restore --staged <file>..." to unstage)
new file: dir2/added
new file: sm
Changes not staged for commit:
(use "git add <file>..." to update what will be committed)
- (use "git checkout -- <file>..." to discard changes in working directory)
-
+ (use "git restore <file>..." to discard changes in working directory)
modified: dir1/modified
Untracked files:
(use "git add <file>..." to include in what will be committed)
-
dir1/untracked
dir2/modified
dir2/untracked
(use "git pull" to merge the remote branch into yours)
Changes to be committed:
- (use "git reset HEAD <file>..." to unstage)
-
+ (use "git restore --staged <file>..." to unstage)
new file: dir2/added
new file: sm
Changes not staged for commit:
(use "git add <file>..." to update what will be committed)
- (use "git checkout -- <file>..." to discard changes in working directory)
-
+ (use "git restore <file>..." to discard changes in working directory)
modified: dir1/modified
Submodule changes to be committed:
Untracked files:
(use "git add <file>..." to include in what will be committed)
-
dir1/untracked
dir2/modified
dir2/untracked
Changes not staged for commit:
(use "git add <file>..." to update what will be committed)
- (use "git checkout -- <file>..." to discard changes in working directory)
-
+ (use "git restore <file>..." to discard changes in working directory)
modified: dir1/modified
Untracked files:
(use "git add <file>..." to include in what will be committed)
-
dir1/untracked
dir2/modified
dir2/untracked
(use "git pull" to merge the remote branch into yours)
Changes to be committed:
- (use "git reset HEAD^1 <file>..." to unstage)
-
+ (use "git restore --source=HEAD^1 --staged <file>..." to unstage)
new file: dir2/added
new file: sm
Changes not staged for commit:
(use "git add <file>..." to update what will be committed)
- (use "git checkout -- <file>..." to discard changes in working directory)
-
+ (use "git restore <file>..." to discard changes in working directory)
modified: dir1/modified
Submodule changes to be committed:
Untracked files:
(use "git add <file>..." to include in what will be committed)
-
dir1/untracked
dir2/modified
dir2/untracked
(use "git pull" to merge the remote branch into yours)
Changes to be committed:
- (use "git reset HEAD <file>..." to unstage)
-
+ (use "git restore --staged <file>..." to unstage)
modified: sm
Changes not staged for commit:
(use "git add <file>..." to update what will be committed)
- (use "git checkout -- <file>..." to discard changes in working directory)
-
+ (use "git restore <file>..." to discard changes in working directory)
modified: dir1/modified
Submodule changes to be committed:
Untracked files:
(use "git add <file>..." to include in what will be committed)
-
.gitmodules
dir1/untracked
dir2/modified
(use "git pull" to merge the remote branch into yours)
Changes to be committed:
- (use "git reset HEAD <file>..." to unstage)
-
+ (use "git restore --staged <file>..." to unstage)
modified: sm
Changes not staged for commit:
(use "git add <file>..." to update what will be committed)
- (use "git checkout -- <file>..." to discard changes in working directory)
+ (use "git restore <file>..." to discard changes in working directory)
(commit or discard the untracked or modified content in submodules)
-
modified: dir1/modified
modified: sm (modified content)
Untracked files:
(use "git add <file>..." to include in what will be committed)
-
.gitmodules
dir1/untracked
dir2/modified
(use "git pull" to merge the remote branch into yours)
Changes to be committed:
- (use "git reset HEAD <file>..." to unstage)
-
+ (use "git restore --staged <file>..." to unstage)
modified: sm
Changes not staged for commit:
(use "git add <file>..." to update what will be committed)
- (use "git checkout -- <file>..." to discard changes in working directory)
-
+ (use "git restore <file>..." to discard changes in working directory)
modified: dir1/modified
modified: sm (new commits)
Untracked files:
(use "git add <file>..." to include in what will be committed)
-
.gitmodules
dir1/untracked
dir2/modified
; (use "git pull" to merge the remote branch into yours)
;
; Changes to be committed:
-; (use "git reset HEAD <file>..." to unstage)
-;
+; (use "git restore --staged <file>..." to unstage)
; modified: sm
;
; Changes not staged for commit:
; (use "git add <file>..." to update what will be committed)
-; (use "git checkout -- <file>..." to discard changes in working directory)
-;
+; (use "git restore <file>..." to discard changes in working directory)
; modified: dir1/modified
; modified: sm (new commits)
;
;
; Untracked files:
; (use "git add <file>..." to include in what will be committed)
-;
; .gitmodules
; dir1/untracked
; dir2/modified
Changes not staged for commit:
(use "git add <file>..." to update what will be committed)
- (use "git checkout -- <file>..." to discard changes in working directory)
-
+ (use "git restore <file>..." to discard changes in working directory)
modified: dir1/modified
Untracked files:
(use "git add <file>..." to include in what will be committed)
-
.gitmodules
dir1/untracked
dir2/modified
(use "git pull" to merge the remote branch into yours)
Changes to be committed:
- (use "git reset HEAD <file>..." to unstage)
-
+ (use "git restore --staged <file>..." to unstage)
modified: sm
Changes not staged for commit:
(use "git add <file>..." to update what will be committed)
- (use "git checkout -- <file>..." to discard changes in working directory)
-
+ (use "git restore <file>..." to discard changes in working directory)
modified: dir1/modified
Untracked files:
(use "git add <file>..." to include in what will be committed)
-
.gitmodules
dir1/untracked
dir2/modified
(use "git pull" to merge the remote branch into yours)
Changes to be committed:
- (use "git reset HEAD <file>..." to unstage)
-
+ (use "git restore --staged <file>..." to unstage)
modified: sm
Changes not staged for commit:
(use "git add <file>..." to update what will be committed)
- (use "git checkout -- <file>..." to discard changes in working directory)
-
+ (use "git restore <file>..." to discard changes in working directory)
modified: dir1/modified
Untracked files not listed (use -u option to show untracked files)
Unmerged paths:
(use "git add <file>..." to mark resolution)
-
both modified: main.txt
no changes added to commit (use "git add" and/or "git commit -a")
(use "git commit" to conclude merge)
Changes to be committed:
-
modified: main.txt
Untracked files not listed (use -u option to show untracked files)
(use "git rebase --abort" to check out the original branch)
Unmerged paths:
- (use "git reset HEAD <file>..." to unstage)
+ (use "git restore --staged <file>..." to unstage)
(use "git add <file>..." to mark resolution)
-
both modified: main.txt
no changes added to commit (use "git add" and/or "git commit -a")
(all conflicts fixed: run "git rebase --continue")
Changes to be committed:
- (use "git reset HEAD <file>..." to unstage)
-
+ (use "git restore --staged <file>..." to unstage)
modified: main.txt
Untracked files not listed (use -u option to show untracked files)
(use "git rebase --abort" to check out the original branch)
Unmerged paths:
- (use "git reset HEAD <file>..." to unstage)
+ (use "git restore --staged <file>..." to unstage)
(use "git add <file>..." to mark resolution)
-
both modified: main.txt
no changes added to commit (use "git add" and/or "git commit -a")
(all conflicts fixed: run "git rebase --continue")
Changes to be committed:
- (use "git reset HEAD <file>..." to unstage)
-
+ (use "git restore --staged <file>..." to unstage)
modified: main.txt
Untracked files not listed (use -u option to show untracked files)
Changes not staged for commit:
(use "git add <file>..." to update what will be committed)
- (use "git checkout -- <file>..." to discard changes in working directory)
-
+ (use "git restore <file>..." to discard changes in working directory)
modified: main.txt
no changes added to commit (use "git add" and/or "git commit -a")
Changes not staged for commit:
(use "git add <file>..." to update what will be committed)
- (use "git checkout -- <file>..." to discard changes in working directory)
-
+ (use "git restore <file>..." to discard changes in working directory)
modified: main.txt
no changes added to commit (use "git add" and/or "git commit -a")
Changes not staged for commit:
(use "git add <file>..." to update what will be committed)
- (use "git checkout -- <file>..." to discard changes in working directory)
-
+ (use "git restore <file>..." to discard changes in working directory)
modified: main.txt
no changes added to commit (use "git add" and/or "git commit -a")
Changes not staged for commit:
(use "git add <file>..." to update what will be committed)
- (use "git checkout -- <file>..." to discard changes in working directory)
-
+ (use "git restore <file>..." to discard changes in working directory)
modified: main.txt
no changes added to commit (use "git add" and/or "git commit -a")
Unmerged paths:
(use "git add <file>..." to mark resolution)
-
both modified: main.txt
no changes added to commit (use "git add" and/or "git commit -a")
(use "git cherry-pick --abort" to cancel the cherry-pick operation)
Changes to be committed:
-
modified: main.txt
Untracked files not listed (use -u option to show untracked files)
test_i18ncmp expected actual
'
+test_expect_success 'status shows cherry-pick with invalid oid' '
+ mkdir .git/sequencer &&
+ test_write_lines "pick invalid-oid" >.git/sequencer/todo &&
+ git status --untracked-files=no >actual 2>err &&
+ git cherry-pick --quit &&
+ test_must_be_empty err &&
+ test_i18ncmp expected actual
+'
+
+test_expect_success 'status does not show error if .git/sequencer is a file' '
+ test_when_finished "rm .git/sequencer" &&
+ test_write_lines hello >.git/sequencer &&
+ git status --untracked-files=no 2>err &&
+ test_must_be_empty err
+'
+
test_expect_success 'status showing detached at and from a tag' '
test_commit atag tagging &&
git checkout atag &&
(use "git revert --abort" to cancel the revert operation)
Unmerged paths:
- (use "git reset HEAD <file>..." to unstage)
+ (use "git restore --staged <file>..." to unstage)
(use "git add <file>..." to mark resolution)
-
both modified: to-revert.txt
no changes added to commit (use "git add" and/or "git commit -a")
(use "git revert --abort" to cancel the revert operation)
Changes to be committed:
- (use "git reset HEAD <file>..." to unstage)
-
+ (use "git restore --staged <file>..." to unstage)
modified: to-revert.txt
Untracked files not listed (use -u option to show untracked files)
verify_parents $c0 $c1
'
+test_expect_success 'merge --quit' '
+ git init merge-quit &&
+ (
+ cd merge-quit &&
+ test_commit base &&
+ echo one >>base.t &&
+ git commit -am one &&
+ git branch one &&
+ git checkout base &&
+ echo two >>base.t &&
+ git commit -am two &&
+ test_must_fail git -c rerere.enabled=true merge one &&
+ test_path_is_file .git/MERGE_HEAD &&
+ test_path_is_file .git/MERGE_MODE &&
+ test_path_is_file .git/MERGE_MSG &&
+ git rerere status >rerere.before &&
+ git merge --quit &&
+ test_path_is_missing .git/MERGE_HEAD &&
+ test_path_is_missing .git/MERGE_MODE &&
+ test_path_is_missing .git/MERGE_MSG &&
+ git rerere status >rerere.after &&
+ test_must_be_empty rerere.after &&
+ ! test_cmp rerere.after rerere.before
+ )
+'
+
+test_expect_success 'merge suggests matching remote refname' '
+ git commit --allow-empty -m not-local &&
+ git update-ref refs/remotes/origin/not-local HEAD &&
+ git reset --hard HEAD^ &&
+
+ # This is white-box testing hackery; we happen to know
+ # that reading packed refs is more picky about the memory
+ # ownership of strings we pass to for_each_ref() callbacks.
+ git pack-refs --all --prune &&
+
+ test_must_fail git merge not-local 2>stderr &&
+ grep origin/not-local stderr
+'
+
+test_expect_success 'suggested names are not ambiguous' '
+ git update-ref refs/heads/origin/not-local HEAD &&
+ test_must_fail git merge not-local 2>stderr &&
+ grep remotes/origin/not-local stderr
+'
+
test_done
git checkout -b test$test_count branch1 &&
git submodule update -N &&
test_must_fail git merge master &&
- ( yes "" | git mergetool both ) &&
- ( yes "" | git mergetool file1 file1 ) &&
- ( yes "" | git mergetool file2 "spaced name" ) &&
- ( yes "" | git mergetool subdir/file3 ) &&
- ( yes "d" | git mergetool file11 ) &&
- ( yes "d" | git mergetool file12 ) &&
- ( yes "l" | git mergetool submod ) &&
- test "$(cat file1)" = "master updated" &&
- test "$(cat file2)" = "master new" &&
- test "$(cat subdir/file3)" = "master new sub" &&
- test "$(cat submod/bar)" = "branch1 submodule" &&
+ yes "" | git mergetool both &&
+ yes "" | git mergetool file1 file1 &&
+ yes "" | git mergetool file2 "spaced name" &&
+ yes "" | git mergetool subdir/file3 &&
+ yes "d" | git mergetool file11 &&
+ yes "d" | git mergetool file12 &&
+ yes "l" | git mergetool submod &&
+ echo "master updated" >expect &&
+ test_cmp expect file1 &&
+ echo "master new" >expect &&
+ test_cmp expect file2 &&
+ echo "master new sub" >expect &&
+ test_cmp expect subdir/file3 &&
+ echo "branch1 submodule" >expect &&
+ test_cmp expect submod/bar &&
git commit -m "branch1 resolved with mergetool"
'
git checkout -b test$test_count branch1 &&
git submodule update -N &&
test_must_fail git merge master &&
- ( yes "" | git mergetool --gui both ) &&
- ( yes "" | git mergetool -g file1 file1 ) &&
- ( yes "" | git mergetool --gui file2 "spaced name" ) &&
- ( yes "" | git mergetool --gui subdir/file3 ) &&
- ( yes "d" | git mergetool --gui file11 ) &&
- ( yes "d" | git mergetool --gui file12 ) &&
- ( yes "l" | git mergetool --gui submod ) &&
- test "$(cat file1)" = "gui master updated" &&
- test "$(cat file2)" = "gui master new" &&
- test "$(cat subdir/file3)" = "gui master new sub" &&
- test "$(cat submod/bar)" = "branch1 submodule" &&
+ yes "" | git mergetool --gui both &&
+ yes "" | git mergetool -g file1 file1 &&
+ yes "" | git mergetool --gui file2 "spaced name" &&
+ yes "" | git mergetool --gui subdir/file3 &&
+ yes "d" | git mergetool --gui file11 &&
+ yes "d" | git mergetool --gui file12 &&
+ yes "l" | git mergetool --gui submod &&
+ echo "gui master updated" >expect &&
+ test_cmp expect file1 &&
+ echo "gui master new" >expect &&
+ test_cmp expect file2 &&
+ echo "gui master new sub" >expect &&
+ test_cmp expect subdir/file3 &&
+ echo "branch1 submodule" >expect &&
+ test_cmp expect submod/bar &&
git commit -m "branch1 resolved with mergetool"
'
git checkout -b test$test_count branch1 &&
git submodule update -N &&
test_must_fail git merge master &&
- ( yes "" | git mergetool --gui both ) &&
- ( yes "" | git mergetool -g file1 file1 ) &&
- ( yes "" | git mergetool --gui file2 "spaced name" ) &&
- ( yes "" | git mergetool --gui subdir/file3 ) &&
- ( yes "d" | git mergetool --gui file11 ) &&
- ( yes "d" | git mergetool --gui file12 ) &&
- ( yes "l" | git mergetool --gui submod ) &&
- test "$(cat file1)" = "master updated" &&
- test "$(cat file2)" = "master new" &&
- test "$(cat subdir/file3)" = "master new sub" &&
- test "$(cat submod/bar)" = "branch1 submodule" &&
+ yes "" | git mergetool --gui both &&
+ yes "" | git mergetool -g file1 file1 &&
+ yes "" | git mergetool --gui file2 "spaced name" &&
+ yes "" | git mergetool --gui subdir/file3 &&
+ yes "d" | git mergetool --gui file11 &&
+ yes "d" | git mergetool --gui file12 &&
+ yes "l" | git mergetool --gui submod &&
+ echo "master updated" >expect &&
+ test_cmp expect file1 &&
+ echo "master new" >expect &&
+ test_cmp expect file2 &&
+ echo "master new sub" >expect &&
+ test_cmp expect subdir/file3 &&
+ echo "branch1 submodule" >expect &&
+ test_cmp expect submod/bar &&
git commit -m "branch1 resolved with mergetool"
'
test_config core.autocrlf true &&
git checkout -b test$test_count branch1 &&
test_must_fail git merge master &&
- ( yes "" | git mergetool file1 ) &&
- ( yes "" | git mergetool file2 ) &&
- ( yes "" | git mergetool "spaced name" ) &&
- ( yes "" | git mergetool both ) &&
- ( yes "" | git mergetool subdir/file3 ) &&
- ( yes "d" | git mergetool file11 ) &&
- ( yes "d" | git mergetool file12 ) &&
- ( yes "r" | git mergetool submod ) &&
+ yes "" | git mergetool file1 &&
+ yes "" | git mergetool file2 &&
+ yes "" | git mergetool "spaced name" &&
+ yes "" | git mergetool both &&
+ yes "" | git mergetool subdir/file3 &&
+ yes "d" | git mergetool file11 &&
+ yes "d" | git mergetool file12 &&
+ yes "r" | git mergetool submod &&
test "$(printf x | cat file1 -)" = "$(printf "master updated\r\nx")" &&
test "$(printf x | cat file2 -)" = "$(printf "master new\r\nx")" &&
test "$(printf x | cat subdir/file3 -)" = "$(printf "master new sub\r\nx")" &&
git submodule update -N &&
- test "$(cat submod/bar)" = "master submodule" &&
+ echo "master submodule" >expect &&
+ test_cmp expect submod/bar &&
git commit -m "branch1 resolved with mergetool - autocrlf"
'
(
cd subdir &&
test_must_fail git merge master &&
- ( yes "" | git mergetool file3 ) &&
- test "$(cat file3)" = "master new sub"
+ yes "" | git mergetool file3 &&
+ echo "master new sub" >expect &&
+ test_cmp expect file3
)
'
(
cd subdir &&
test_must_fail git merge master &&
- ( yes "" | git mergetool file3 ) &&
- ( yes "" | git mergetool ../file1 ) &&
- ( yes "" | git mergetool ../file2 ../spaced\ name ) &&
- ( yes "" | git mergetool ../both ) &&
- ( yes "d" | git mergetool ../file11 ) &&
- ( yes "d" | git mergetool ../file12 ) &&
- ( yes "l" | git mergetool ../submod ) &&
- test "$(cat ../file1)" = "master updated" &&
- test "$(cat ../file2)" = "master new" &&
- test "$(cat ../submod/bar)" = "branch1 submodule" &&
+ yes "" | git mergetool file3 &&
+ yes "" | git mergetool ../file1 &&
+ yes "" | git mergetool ../file2 ../spaced\ name &&
+ yes "" | git mergetool ../both &&
+ yes "d" | git mergetool ../file11 &&
+ yes "d" | git mergetool ../file12 &&
+ yes "l" | git mergetool ../submod &&
+ echo "master updated" >expect &&
+ test_cmp expect ../file1 &&
+ echo "master new" >expect &&
+ test_cmp expect ../file2 &&
+ echo "branch1 submodule" >expect &&
+ test_cmp expect ../submod/bar &&
git commit -m "branch1 resolved with mergetool - subdir"
)
'
git submodule update -N &&
test_must_fail git merge master &&
test -n "$(git ls-files -u)" &&
- ( yes "d" | git mergetool file11 ) &&
- ( yes "d" | git mergetool file12 ) &&
- ( yes "l" | git mergetool submod ) &&
+ yes "d" | git mergetool file11 &&
+ yes "d" | git mergetool file12 &&
+ yes "l" | git mergetool submod &&
output="$(git mergetool --no-prompt)" &&
test "$output" = "No files need merging"
'
(
cd subdir &&
test_must_fail git merge master &&
- ( yes "r" | git mergetool ../submod ) &&
- ( yes "d" "d" | git mergetool --no-prompt ) &&
- test "$(cat ../file1)" = "master updated" &&
- test "$(cat ../file2)" = "master new" &&
- test "$(cat file3)" = "master new sub" &&
+ yes "r" | git mergetool ../submod &&
+ yes "d" "d" | git mergetool --no-prompt &&
+ echo "master updated" >expect &&
+ test_cmp expect ../file1 &&
+ echo "master new" >expect &&
+ test_cmp expect ../file2 &&
+ echo "master new sub" >expect &&
+ test_cmp expect file3 &&
( cd .. && git submodule update -N ) &&
- test "$(cat ../submod/bar)" = "master submodule" &&
+ echo "master submodule" >expect &&
+ test_cmp expect ../submod/bar &&
git commit -m "branch2 resolved by mergetool from subdir"
)
'
(
cd subdir &&
test_must_fail git merge master &&
- ( yes "r" | git mergetool ../submod ) &&
- ( yes "d" "d" | git mergetool --no-prompt ) &&
- test "$(cat ../file1)" = "master updated" &&
- test "$(cat ../file2)" = "master new" &&
- test "$(cat file3)" = "master new sub" &&
+ yes "r" | git mergetool ../submod &&
+ yes "d" "d" | git mergetool --no-prompt &&
+ echo "master updated" >expect &&
+ test_cmp expect ../file1 &&
+ echo "master new" >expect &&
+ test_cmp expect ../file2 &&
+ echo "master new sub" >expect &&
+ test_cmp expect file3 &&
( cd .. && git submodule update -N ) &&
- test "$(cat ../submod/bar)" = "master submodule" &&
+ echo "master submodule" >expect &&
+ test_cmp expect ../submod/bar &&
git commit -m "branch2 resolved by mergetool from subdir"
)
'
git checkout -b test$test_count branch1 &&
git submodule update -N &&
test_must_fail git merge master &&
- ( yes "l" | git mergetool --no-prompt submod ) &&
- ( yes "d" "d" | git mergetool --no-prompt ) &&
+ yes "l" | git mergetool --no-prompt submod &&
+ yes "d" "d" | git mergetool --no-prompt &&
git submodule update -N &&
output="$(yes "n" | git mergetool --no-prompt)" &&
test "$output" = "No files need merging"
git submodule update -N &&
test_must_fail git merge master &&
- ( yes "" | git mergetool subdir ) &&
+ yes "" | git mergetool subdir &&
- test "$(cat subdir/file3)" = "master new sub"
+ echo "master new sub" >expect &&
+ test_cmp expect subdir/file3
'
test_expect_success 'mergetool delete/delete conflict' '
git checkout -b test$test_count.a test$test_count &&
test_must_fail git merge master &&
test -n "$(git ls-files -u)" &&
- ( yes "" | git mergetool file1 file2 spaced\ name subdir/file3 ) &&
- ( yes "" | git mergetool both ) &&
- ( yes "d" | git mergetool file11 file12 ) &&
- ( yes "r" | git mergetool submod ) &&
+ yes "" | git mergetool file1 file2 spaced\ name subdir/file3 &&
+ yes "" | git mergetool both &&
+ yes "d" | git mergetool file11 file12 &&
+ yes "r" | git mergetool submod &&
rmdir submod && mv submod-movedaside submod &&
- test "$(cat submod/bar)" = "branch1 submodule" &&
+ echo "branch1 submodule" >expect &&
+ test_cmp expect submod/bar &&
git submodule update -N &&
- test "$(cat submod/bar)" = "master submodule" &&
+ echo "master submodule" >expect &&
+ test_cmp expect submod/bar &&
output="$(git mergetool --no-prompt)" &&
test "$output" = "No files need merging" &&
git commit -m "Merge resolved by keeping module" &&
git submodule update -N &&
test_must_fail git merge master &&
test -n "$(git ls-files -u)" &&
- ( yes "" | git mergetool file1 file2 spaced\ name subdir/file3 ) &&
- ( yes "" | git mergetool both ) &&
- ( yes "d" | git mergetool file11 file12 ) &&
- ( yes "l" | git mergetool submod ) &&
+ yes "" | git mergetool file1 file2 spaced\ name subdir/file3 &&
+ yes "" | git mergetool both &&
+ yes "d" | git mergetool file11 file12 &&
+ yes "l" | git mergetool submod &&
test ! -e submod &&
output="$(git mergetool --no-prompt)" &&
test "$output" = "No files need merging" &&
git submodule update -N &&
test_must_fail git merge test$test_count &&
test -n "$(git ls-files -u)" &&
- ( yes "" | git mergetool file1 file2 spaced\ name subdir/file3 ) &&
- ( yes "" | git mergetool both ) &&
- ( yes "d" | git mergetool file11 file12 ) &&
- ( yes "r" | git mergetool submod ) &&
+ yes "" | git mergetool file1 file2 spaced\ name subdir/file3 &&
+ yes "" | git mergetool both &&
+ yes "d" | git mergetool file11 file12 &&
+ yes "r" | git mergetool submod &&
test ! -e submod &&
test -d submod.orig &&
git submodule update -N &&
git submodule update -N &&
test_must_fail git merge test$test_count &&
test -n "$(git ls-files -u)" &&
- ( yes "" | git mergetool file1 file2 spaced\ name subdir/file3 ) &&
- ( yes "" | git mergetool both ) &&
- ( yes "d" | git mergetool file11 file12 ) &&
- ( yes "l" | git mergetool submod ) &&
- test "$(cat submod/bar)" = "master submodule" &&
- git submodule update -N &&
- test "$(cat submod/bar)" = "master submodule" &&
+ yes "" | git mergetool file1 file2 spaced\ name subdir/file3 &&
+ yes "" | git mergetool both &&
+ yes "d" | git mergetool file11 file12 &&
+ yes "l" | git mergetool submod &&
+ echo "master submodule" >expect &&
+ test_cmp expect submod/bar &&
+ git submodule update -N &&
+ echo "master submodule" >expect &&
+ test_cmp expect submod/bar &&
output="$(git mergetool --no-prompt)" &&
test "$output" = "No files need merging" &&
git commit -m "Merge resolved by keeping module"
git checkout -b test$test_count.a branch1 &&
test_must_fail git merge master &&
test -n "$(git ls-files -u)" &&
- ( yes "" | git mergetool file1 file2 spaced\ name subdir/file3 ) &&
- ( yes "" | git mergetool both ) &&
- ( yes "d" | git mergetool file11 file12 ) &&
- ( yes "r" | git mergetool submod ) &&
+ yes "" | git mergetool file1 file2 spaced\ name subdir/file3 &&
+ yes "" | git mergetool both &&
+ yes "d" | git mergetool file11 file12 &&
+ yes "r" | git mergetool submod &&
rmdir submod && mv submod-movedaside submod &&
- test "$(cat submod/bar)" = "branch1 submodule" &&
+ echo "branch1 submodule" >expect &&
+ test_cmp expect submod/bar &&
git submodule update -N &&
- test "$(cat submod/bar)" = "master submodule" &&
+ echo "master submodule" >expect &&
+ test_cmp expect submod/bar &&
output="$(git mergetool --no-prompt)" &&
test "$output" = "No files need merging" &&
git commit -m "Merge resolved by keeping module" &&
git checkout -b test$test_count.b test$test_count &&
test_must_fail git merge master &&
test -n "$(git ls-files -u)" &&
- ( yes "" | git mergetool file1 file2 spaced\ name subdir/file3 ) &&
- ( yes "" | git mergetool both ) &&
- ( yes "d" | git mergetool file11 file12 ) &&
- ( yes "l" | git mergetool submod ) &&
+ yes "" | git mergetool file1 file2 spaced\ name subdir/file3 &&
+ yes "" | git mergetool both &&
+ yes "d" | git mergetool file11 file12 &&
+ yes "l" | git mergetool submod &&
git submodule update -N &&
- test "$(cat submod)" = "not a submodule" &&
+ echo "not a submodule" >expect &&
+ test_cmp expect submod &&
output="$(git mergetool --no-prompt)" &&
test "$output" = "No files need merging" &&
git commit -m "Merge resolved by keeping file" &&
git submodule update -N &&
test_must_fail git merge test$test_count &&
test -n "$(git ls-files -u)" &&
- ( yes "" | git mergetool file1 file2 spaced\ name subdir/file3 ) &&
- ( yes "" | git mergetool both ) &&
- ( yes "d" | git mergetool file11 file12 ) &&
- ( yes "r" | git mergetool submod ) &&
+ yes "" | git mergetool file1 file2 spaced\ name subdir/file3 &&
+ yes "" | git mergetool both &&
+ yes "d" | git mergetool file11 file12 &&
+ yes "r" | git mergetool submod &&
test -d submod.orig &&
git submodule update -N &&
- test "$(cat submod)" = "not a submodule" &&
+ echo "not a submodule" >expect &&
+ test_cmp expect submod &&
output="$(git mergetool --no-prompt)" &&
test "$output" = "No files need merging" &&
git commit -m "Merge resolved by keeping file" &&
git submodule update -N &&
test_must_fail git merge test$test_count &&
test -n "$(git ls-files -u)" &&
- ( yes "" | git mergetool file1 file2 spaced\ name subdir/file3 ) &&
- ( yes "" | git mergetool both ) &&
- ( yes "d" | git mergetool file11 file12 ) &&
- ( yes "l" | git mergetool submod ) &&
- test "$(cat submod/bar)" = "master submodule" &&
- git submodule update -N &&
- test "$(cat submod/bar)" = "master submodule" &&
+ yes "" | git mergetool file1 file2 spaced\ name subdir/file3 &&
+ yes "" | git mergetool both &&
+ yes "d" | git mergetool file11 file12 &&
+ yes "l" | git mergetool submod &&
+ echo "master submodule" >expect &&
+ test_cmp expect submod/bar &&
+ git submodule update -N &&
+ echo "master submodule" >expect &&
+ test_cmp expect submod/bar &&
output="$(git mergetool --no-prompt)" &&
test "$output" = "No files need merging" &&
git commit -m "Merge resolved by keeping module"
test_must_fail git merge test$test_count.a &&
(
cd subdir &&
- ( yes "l" | git mergetool subdir_module )
+ yes "l" | git mergetool subdir_module
) &&
- test "$(cat subdir/subdir_module/file15)" = "test$test_count.b" &&
+ echo "test$test_count.b" >expect &&
+ test_cmp expect subdir/subdir_module/file15 &&
git submodule update -N &&
- test "$(cat subdir/subdir_module/file15)" = "test$test_count.b" &&
+ echo "test$test_count.b" >expect &&
+ test_cmp expect subdir/subdir_module/file15 &&
git reset --hard &&
git submodule update -N &&
test_must_fail git merge test$test_count.a &&
- ( yes "r" | git mergetool subdir/subdir_module ) &&
- test "$(cat subdir/subdir_module/file15)" = "test$test_count.b" &&
+ yes "r" | git mergetool subdir/subdir_module &&
+ echo "test$test_count.b" >expect &&
+ test_cmp expect subdir/subdir_module/file15 &&
git submodule update -N &&
- test "$(cat subdir/subdir_module/file15)" = "test$test_count.a" &&
+ echo "test$test_count.a" >expect &&
+ test_cmp expect subdir/subdir_module/file15 &&
git commit -m "branch1 resolved with mergetool"
'
test_must_fail git merge master &&
test -n "$(git ls-files -u)" &&
- ( yes "l" | git mergetool submod ) &&
- test "$(cat submod/file16)" = "not a submodule" &&
+ yes "l" | git mergetool submod &&
+ echo "not a submodule" >expect &&
+ test_cmp expect submod/file16 &&
rm -rf submod.orig &&
git reset --hard &&
test_must_fail git merge master &&
test -n "$(git ls-files -u)" &&
test ! -e submod.orig &&
- ( yes "r" | git mergetool submod ) &&
+ yes "r" | git mergetool submod &&
test -d submod.orig &&
- test "$(cat submod.orig/file16)" = "not a submodule" &&
+ echo "not a submodule" >expect &&
+ test_cmp expect submod.orig/file16 &&
rm -r submod.orig &&
mv submod-movedaside/.git submod &&
( cd submod && git clean -f && git reset --hard ) &&
git submodule update -N &&
- test "$(cat submod/bar)" = "master submodule" &&
+ echo "master submodule" >expect &&
+ test_cmp expect submod/bar &&
git reset --hard &&
rm -rf submod-movedaside &&
git submodule update -N &&
test_must_fail git merge test$test_count &&
test -n "$(git ls-files -u)" &&
- ( yes "l" | git mergetool submod ) &&
+ yes "l" | git mergetool submod &&
git submodule update -N &&
- test "$(cat submod/bar)" = "master submodule" &&
+ echo "master submodule" >expect &&
+ test_cmp expect submod/bar &&
git reset --hard &&
git submodule update -N &&
test_must_fail git merge test$test_count &&
test -n "$(git ls-files -u)" &&
test ! -e submod.orig &&
- ( yes "r" | git mergetool submod ) &&
- test "$(cat submod/file16)" = "not a submodule" &&
+ yes "r" | git mergetool submod &&
+ echo "not a submodule" >expect &&
+ test_cmp expect submod/file16 &&
git reset --hard master &&
( cd submod && git clean -f && git reset --hard ) &&
test -z "$bitmap"
'
+test_expect_success 'no bitmaps created if .keep files present' '
+ pack=$(ls bare.git/objects/pack/*.pack) &&
+ test_path_is_file "$pack" &&
+ keep=${pack%.pack}.keep &&
+ test_when_finished "rm -f \"\$keep\"" &&
+ >"$keep" &&
+ git -C bare.git repack -ad 2>stderr &&
+ test_must_be_empty stderr &&
+ find bare.git/objects/pack/ -type f -name "*.bitmap" >actual &&
+ test_must_be_empty actual
+'
+
+test_expect_success 'auto-bitmaps do not complain if unavailable' '
+ test_config -C bare.git pack.packSizeLimit 1M &&
+ blob=$(test-tool genrandom big $((1024*1024)) |
+ git -C bare.git hash-object -w --stdin) &&
+ git -C bare.git update-ref refs/tags/big $blob &&
+ git -C bare.git repack -ad 2>stderr &&
+ test_must_be_empty stderr &&
+ find bare.git/objects/pack -type f -name "*.bitmap" >actual &&
+ test_must_be_empty actual
+'
+
test_done
test_cmp expected actual
'
- test_expect_success !PCRE "grep $L with grep.patterntype=perl errors without PCRE" '
+ test_expect_success !FAIL_PREREQS,!PCRE "grep $L with grep.patterntype=perl errors without PCRE" '
test_must_fail git -c grep.patterntype=perl grep "foo.*bar"
'
test_cmp expected actual
'
-test_expect_success !PCRE 'grep --perl-regexp pattern errors without PCRE' '
+test_expect_success !FAIL_PREREQS,!PCRE 'grep --perl-regexp pattern errors without PCRE' '
test_must_fail git grep --perl-regexp "foo.*bar"
'
'
-test_expect_success !PCRE 'grep -P pattern errors without PCRE' '
+test_expect_success !FAIL_PREREQS,!PCRE 'grep -P pattern errors without PCRE' '
test_must_fail git grep -P "foo.*bar"
'
echo "(3|4)" >b/b &&
git add a b &&
git commit -m "add a and b" &&
+ test_tick &&
git init submodule &&
echo "(1|2)d(3|4)" >submodule/a &&
git -C submodule add a &&
git -C submodule commit -m "add a" &&
git submodule add ./submodule &&
- git commit -m "added submodule"
+ git commit -m "added submodule" &&
+ test_tick
'
test_expect_success 'grep correctly finds patterns in a submodule' '
echo "(1|2)d(3|4)" >submodule/sub/a &&
git -C submodule/sub add a &&
git -C submodule/sub commit -m "add a" &&
+ test_tick &&
git -C submodule submodule add ./sub &&
git -C submodule add sub &&
git -C submodule commit -m "added sub" &&
+ test_tick &&
git add submodule &&
git commit -m "updated submodule" &&
+ test_tick &&
cat >expect <<-\EOF &&
a:(1|2)d(3|4)
echo "(1|2)d(3|4)" >"parent/fi:le" &&
git -C parent add "fi:le" &&
git -C parent commit -m "add fi:le" &&
+ test_tick &&
git init "su:b" &&
test_when_finished "rm -rf su:b" &&
echo "(1|2)d(3|4)" >"su:b/fi:le" &&
git -C "su:b" add "fi:le" &&
git -C "su:b" commit -m "add fi:le" &&
+ test_tick &&
git -C parent submodule add "../su:b" "su:b" &&
git -C parent commit -m "add submodule" &&
+ test_tick &&
cat >expect <<-\EOF &&
fi:le:(1|2)d(3|4)
echo "(1|2)d(3|4)" >parent/file &&
git -C parent add file &&
git -C parent commit -m "add file" &&
+ test_tick &&
git init sub &&
test_when_finished "rm -rf sub" &&
echo "(1|2)d(3|4)" >sub/file &&
git -C sub add file &&
git -C sub commit -m "add file" &&
+ test_tick &&
git -C parent submodule add ../sub dir/sub &&
git -C parent commit -m "add submodule" &&
+ test_tick &&
cat >expect <<-\EOF &&
dir/sub/file:(1|2)d(3|4)
git -C parent mv dir/sub sub-moved &&
git -C parent commit -m "moved submodule" &&
+ test_tick &&
cat >expect <<-\EOF &&
file:(1|2)d(3|4)
echo "(1|2)d(3|4)" >sub/file &&
git -C sub add file &&
git -C sub commit -m "add file" &&
+ test_tick &&
git init parent &&
echo "(1|2)d(3|4)" >parent/file &&
git -C parent add src/file2 &&
git -C parent submodule add ../sub &&
git -C parent commit -m "add files and submodule" &&
+ test_tick &&
# From top works
cat >expect <<-\EOF &&
echo "(1|2)d(3|4)" >sub/file &&
git -C sub add file &&
git -C sub commit -m "add file" &&
+ test_tick &&
git init parent &&
mkdir parent/src &&
git -C parent submodule add ../sub src/sub &&
git -C parent submodule add ../sub sub &&
git -C parent commit -m "add files and submodules" &&
+ test_tick &&
# Verify grep from root works
cat >expect <<-\EOF &&
grep "A U Thor" actual
'
+# Tests the splitting and merging of blame entries in blame_coalesce().
+# The output of blame is the same, regardless of whether blame_coalesce() runs
+# or not, so we'd likely only notice a problem if blame crashes or assigned
+# blame to the "splitting" commit ('SPLIT' below).
+test_expect_success 'blame coalesce' '
+ cat >giraffe <<-\EOF &&
+ ABC
+ DEF
+ EOF
+ git add giraffe &&
+ git commit -m "original file" &&
+ oid=$(git rev-parse HEAD) &&
+
+ cat >giraffe <<-\EOF &&
+ ABC
+ SPLIT
+ DEF
+ EOF
+ git add giraffe &&
+ git commit -m "interior SPLIT line" &&
+
+ cat >giraffe <<-\EOF &&
+ ABC
+ DEF
+ EOF
+ git add giraffe &&
+ git commit -m "same contents as original" &&
+
+ cat >expect <<-EOF &&
+ $oid 1) ABC
+ $oid 2) DEF
+ EOF
+ git -c core.abbrev=40 blame -s giraffe >actual &&
+ test_cmp expect actual
+'
+
test_done
--- /dev/null
+#!/bin/sh
+
+test_description='ignore revisions when blaming'
+. ./test-lib.sh
+
+# Creates:
+# A--B--X
+# A added line 1 and B added line 2. X makes changes to those lines. Sanity
+# check that X is blamed for both lines.
+test_expect_success setup '
+ test_commit A file line1 &&
+
+ echo line2 >>file &&
+ git add file &&
+ test_tick &&
+ git commit -m B &&
+ git tag B &&
+
+ test_write_lines line-one line-two >file &&
+ git add file &&
+ test_tick &&
+ git commit -m X &&
+ git tag X &&
+
+ git blame --line-porcelain file >blame_raw &&
+
+ grep -E "^[0-9a-f]+ [0-9]+ 1" blame_raw | sed -e "s/ .*//" >actual &&
+ git rev-parse X >expect &&
+ test_cmp expect actual &&
+
+ grep -E "^[0-9a-f]+ [0-9]+ 2" blame_raw | sed -e "s/ .*//" >actual &&
+ git rev-parse X >expect &&
+ test_cmp expect actual
+ '
+
+# Ignore X, make sure A is blamed for line 1 and B for line 2.
+test_expect_success ignore_rev_changing_lines '
+ git blame --line-porcelain --ignore-rev X file >blame_raw &&
+
+ grep -E "^[0-9a-f]+ [0-9]+ 1" blame_raw | sed -e "s/ .*//" >actual &&
+ git rev-parse A >expect &&
+ test_cmp expect actual &&
+
+ grep -E "^[0-9a-f]+ [0-9]+ 2" blame_raw | sed -e "s/ .*//" >actual &&
+ git rev-parse B >expect &&
+ test_cmp expect actual
+ '
+
+# For ignored revs that have added 'unblamable' lines, attribute those to the
+# ignored commit.
+# A--B--X--Y
+# Where Y changes lines 1 and 2, and adds lines 3 and 4. The added lines ought
+# to have nothing in common with "line-one" or "line-two", to keep any
+# heuristics from matching them with any lines in the parent.
+test_expect_success ignore_rev_adding_unblamable_lines '
+ test_write_lines line-one-change line-two-changed y3 y4 >file &&
+ git add file &&
+ test_tick &&
+ git commit -m Y &&
+ git tag Y &&
+
+ git rev-parse Y >expect &&
+ git blame --line-porcelain file --ignore-rev Y >blame_raw &&
+
+ grep -E "^[0-9a-f]+ [0-9]+ 3" blame_raw | sed -e "s/ .*//" >actual &&
+ test_cmp expect actual &&
+
+ grep -E "^[0-9a-f]+ [0-9]+ 4" blame_raw | sed -e "s/ .*//" >actual &&
+ test_cmp expect actual
+ '
+
+# Ignore X and Y, both in separate files. Lines 1 == A, 2 == B.
+test_expect_success ignore_revs_from_files '
+ git rev-parse X >ignore_x &&
+ git rev-parse Y >ignore_y &&
+ git blame --line-porcelain file --ignore-revs-file ignore_x --ignore-revs-file ignore_y >blame_raw &&
+
+ grep -E "^[0-9a-f]+ [0-9]+ 1" blame_raw | sed -e "s/ .*//" >actual &&
+ git rev-parse A >expect &&
+ test_cmp expect actual &&
+
+ grep -E "^[0-9a-f]+ [0-9]+ 2" blame_raw | sed -e "s/ .*//" >actual &&
+ git rev-parse B >expect &&
+ test_cmp expect actual
+ '
+
+# Ignore X from the config option, Y from a file.
+test_expect_success ignore_revs_from_configs_and_files '
+ git config --add blame.ignoreRevsFile ignore_x &&
+ git blame --line-porcelain file --ignore-revs-file ignore_y >blame_raw &&
+
+ grep -E "^[0-9a-f]+ [0-9]+ 1" blame_raw | sed -e "s/ .*//" >actual &&
+ git rev-parse A >expect &&
+ test_cmp expect actual &&
+
+ grep -E "^[0-9a-f]+ [0-9]+ 2" blame_raw | sed -e "s/ .*//" >actual &&
+ git rev-parse B >expect &&
+ test_cmp expect actual
+ '
+
+# Override blame.ignoreRevsFile (ignore_x) with an empty string. X should be
+# blamed now for lines 1 and 2, since we are no longer ignoring X.
+test_expect_success override_ignore_revs_file '
+ git blame --line-porcelain file --ignore-revs-file "" --ignore-revs-file ignore_y >blame_raw &&
+ git rev-parse X >expect &&
+
+ grep -E "^[0-9a-f]+ [0-9]+ 1" blame_raw | sed -e "s/ .*//" >actual &&
+ test_cmp expect actual &&
+
+ grep -E "^[0-9a-f]+ [0-9]+ 2" blame_raw | sed -e "s/ .*//" >actual &&
+ test_cmp expect actual
+ '
+test_expect_success bad_files_and_revs '
+ test_must_fail git blame file --ignore-rev NOREV 2>err &&
+ test_i18ngrep "cannot find revision NOREV to ignore" err &&
+
+ test_must_fail git blame file --ignore-revs-file NOFILE 2>err &&
+ test_i18ngrep "could not open.*: NOFILE" err &&
+
+ echo NOREV >ignore_norev &&
+ test_must_fail git blame file --ignore-revs-file ignore_norev 2>err &&
+ test_i18ngrep "invalid object name: NOREV" err
+ '
+
+# For ignored revs that have added 'unblamable' lines, mark those lines with a
+# '*'
+# A--B--X--Y
+# Lines 3 and 4 are from Y and unblamable. This was set up in
+# ignore_rev_adding_unblamable_lines.
+test_expect_success mark_unblamable_lines '
+ git config --add blame.markUnblamableLines true &&
+
+ git blame --ignore-rev Y file >blame_raw &&
+ echo "*" >expect &&
+
+ sed -n "3p" blame_raw | cut -c1 >actual &&
+ test_cmp expect actual &&
+
+ sed -n "4p" blame_raw | cut -c1 >actual &&
+ test_cmp expect actual
+ '
+
+# Commit Z will touch the first two lines. Y touched all four.
+# A--B--X--Y--Z
+# The blame output when ignoring Z should be:
+# ?Y ... 1)
+# ?Y ... 2)
+# Y ... 3)
+# Y ... 4)
+# We're checking only the first character
+test_expect_success mark_ignored_lines '
+ git config --add blame.markIgnoredLines true &&
+
+ test_write_lines line-one-Z line-two-Z y3 y4 >file &&
+ git add file &&
+ test_tick &&
+ git commit -m Z &&
+ git tag Z &&
+
+ git blame --ignore-rev Z file >blame_raw &&
+ echo "?" >expect &&
+
+ sed -n "1p" blame_raw | cut -c1 >actual &&
+ test_cmp expect actual &&
+
+ sed -n "2p" blame_raw | cut -c1 >actual &&
+ test_cmp expect actual &&
+
+ sed -n "3p" blame_raw | cut -c1 >actual &&
+ ! test_cmp expect actual &&
+
+ sed -n "4p" blame_raw | cut -c1 >actual &&
+ ! test_cmp expect actual
+ '
+
+# For ignored revs that added 'unblamable' lines and more recent commits changed
+# the blamable lines, mark the unblamable lines with a
+# '*'
+# A--B--X--Y--Z
+# Lines 3 and 4 are from Y and unblamable, as set up in
+# ignore_rev_adding_unblamable_lines. Z changed lines 1 and 2.
+test_expect_success mark_unblamable_lines_intermediate '
+ git config --add blame.markUnblamableLines true &&
+
+ git blame --ignore-rev Y file >blame_raw 2>stderr &&
+ echo "*" >expect &&
+
+ sed -n "3p" blame_raw | cut -c1 >actual &&
+ test_cmp expect actual &&
+
+ sed -n "4p" blame_raw | cut -c1 >actual &&
+ test_cmp expect actual
+ '
+
+# The heuristic called by guess_line_blames() tries to find the size of a
+# blame_entry 'e' in the parent's address space. Those calculations need to
+# check for negative or zero values for when a blame entry is completely outside
+# the window of the parent's version of a file.
+#
+# This happens when one commit adds several lines (commit B below). A later
+# commit (C) changes one line in the middle of B's change. Commit C gets blamed
+# for its change, and that breaks up B's change into multiple blame entries.
+# When processing B, one of the blame_entries is outside A's window (which was
+# zero - it had no lines added on its side of the diff).
+#
+# A--B--C, ignore B to test the ignore heuristic's boundary checks.
+test_expect_success ignored_chunk_negative_parent_size '
+ rm -rf .git/ &&
+ git init &&
+
+ test_write_lines L1 L2 L7 L8 L9 >file &&
+ git add file &&
+ test_tick &&
+ git commit -m A &&
+ git tag A &&
+
+ test_write_lines L1 L2 L3 L4 L5 L6 L7 L8 L9 >file &&
+ git add file &&
+ test_tick &&
+ git commit -m B &&
+ git tag B &&
+
+ test_write_lines L1 L2 L3 L4 xxx L6 L7 L8 L9 >file &&
+ git add file &&
+ test_tick &&
+ git commit -m C &&
+ git tag C &&
+
+ git blame file --ignore-rev B >blame_raw
+ '
+
+# Resetting the repo and creating:
+#
+# A--B--M
+# \ /
+# C-+
+#
+# 'A' creates a file. B changes line 1, and C changes line 9. M merges.
+test_expect_success ignore_merge '
+ rm -rf .git/ &&
+ git init &&
+
+ test_write_lines L1 L2 L3 L4 L5 L6 L7 L8 L9 >file &&
+ git add file &&
+ test_tick &&
+ git commit -m A &&
+ git tag A &&
+
+ test_write_lines BB L2 L3 L4 L5 L6 L7 L8 L9 >file &&
+ git add file &&
+ test_tick &&
+ git commit -m B &&
+ git tag B &&
+
+ git reset --hard A &&
+ test_write_lines L1 L2 L3 L4 L5 L6 L7 L8 CC >file &&
+ git add file &&
+ test_tick &&
+ git commit -m C &&
+ git tag C &&
+
+ test_merge M B &&
+ git blame --line-porcelain file --ignore-rev M >blame_raw &&
+
+ grep -E "^[0-9a-f]+ [0-9]+ 1" blame_raw | sed -e "s/ .*//" >actual &&
+ git rev-parse B >expect &&
+ test_cmp expect actual &&
+
+ grep -E "^[0-9a-f]+ [0-9]+ 9" blame_raw | sed -e "s/ .*//" >actual &&
+ git rev-parse C >expect &&
+ test_cmp expect actual
+ '
+
+test_done
--- /dev/null
+#!/bin/sh
+
+test_description='git blame ignore fuzzy heuristic'
+. ./test-lib.sh
+
+pick_author='s/^[0-9a-f^]* *(\([^ ]*\) .*/\1/'
+
+# Each test is composed of 4 variables:
+# titleN - the test name
+# aN - the initial content
+# bN - the final content
+# expectedN - the line numbers from aN that we expect git blame
+# on bN to identify, or "Final" if bN itself should
+# be identified as the origin of that line.
+
+# We start at test 2 because setup will show as test 1
+title2="Regression test for partially overlapping search ranges"
+cat <<EOF >a2
+1
+2
+3
+abcdef
+5
+6
+7
+ijkl
+9
+10
+11
+pqrs
+13
+14
+15
+wxyz
+17
+18
+19
+EOF
+cat <<EOF >b2
+abcde
+ijk
+pqr
+wxy
+EOF
+cat <<EOF >expected2
+4
+8
+12
+16
+EOF
+
+title3="Combine 3 lines into 2"
+cat <<EOF >a3
+if ((maxgrow==0) ||
+ ( single_line_field && (field->dcols < maxgrow)) ||
+ (!single_line_field && (field->drows < maxgrow)))
+EOF
+cat <<EOF >b3
+if ((maxgrow == 0) || (single_line_field && (field->dcols < maxgrow)) ||
+ (!single_line_field && (field->drows < maxgrow))) {
+EOF
+cat <<EOF >expected3
+2
+3
+EOF
+
+title4="Add curly brackets"
+cat <<EOF >a4
+ if (rows) *rows = field->rows;
+ if (cols) *cols = field->cols;
+ if (frow) *frow = field->frow;
+ if (fcol) *fcol = field->fcol;
+EOF
+cat <<EOF >b4
+ if (rows) {
+ *rows = field->rows;
+ }
+ if (cols) {
+ *cols = field->cols;
+ }
+ if (frow) {
+ *frow = field->frow;
+ }
+ if (fcol) {
+ *fcol = field->fcol;
+ }
+EOF
+cat <<EOF >expected4
+1
+1
+Final
+2
+2
+Final
+3
+3
+Final
+4
+4
+Final
+EOF
+
+
+title5="Combine many lines and change case"
+cat <<EOF >a5
+for(row=0,pBuffer=field->buf;
+ row<height;
+ row++,pBuffer+=width )
+{
+ if ((len = (int)( After_End_Of_Data( pBuffer, width ) - pBuffer )) > 0)
+ {
+ wmove( win, row, 0 );
+ waddnstr( win, pBuffer, len );
+EOF
+cat <<EOF >b5
+for (Row = 0, PBuffer = field->buf; Row < Height; Row++, PBuffer += Width) {
+ if ((Len = (int)(afterEndOfData(PBuffer, Width) - PBuffer)) > 0) {
+ wmove(win, Row, 0);
+ waddnstr(win, PBuffer, Len);
+EOF
+cat <<EOF >expected5
+1
+5
+7
+8
+EOF
+
+title6="Rename and combine lines"
+cat <<EOF >a6
+bool need_visual_update = ((form != (FORM *)0) &&
+ (form->status & _POSTED) &&
+ (form->current==field));
+
+if (need_visual_update)
+ Synchronize_Buffer(form);
+
+if (single_line_field)
+{
+ growth = field->cols * amount;
+ if (field->maxgrow)
+ growth = Minimum(field->maxgrow - field->dcols,growth);
+ field->dcols += growth;
+ if (field->dcols == field->maxgrow)
+EOF
+cat <<EOF >b6
+bool NeedVisualUpdate = ((Form != (FORM *)0) && (Form->status & _POSTED) &&
+ (Form->current == field));
+
+if (NeedVisualUpdate) {
+ synchronizeBuffer(Form);
+}
+
+if (SingleLineField) {
+ Growth = field->cols * amount;
+ if (field->maxgrow) {
+ Growth = Minimum(field->maxgrow - field->dcols, Growth);
+ }
+ field->dcols += Growth;
+ if (field->dcols == field->maxgrow) {
+EOF
+cat <<EOF >expected6
+1
+3
+4
+5
+6
+Final
+7
+8
+10
+11
+12
+Final
+13
+14
+EOF
+
+# Both lines match identically so position must be used to tie-break.
+title7="Same line twice"
+cat <<EOF >a7
+abc
+abc
+EOF
+cat <<EOF >b7
+abcd
+abcd
+EOF
+cat <<EOF >expected7
+1
+2
+EOF
+
+title8="Enforce line order"
+cat <<EOF >a8
+abcdef
+ghijkl
+ab
+EOF
+cat <<EOF >b8
+ghijk
+abcd
+EOF
+cat <<EOF >expected8
+2
+3
+EOF
+
+title9="Expand lines and rename variables"
+cat <<EOF >a9
+int myFunction(int ArgumentOne, Thing *ArgTwo, Blah XuglyBug) {
+ Squiggle FabulousResult = squargle(ArgumentOne, *ArgTwo,
+ XuglyBug) + EwwwGlobalWithAReallyLongNameYepTooLong;
+ return FabulousResult * 42;
+}
+EOF
+cat <<EOF >b9
+int myFunction(int argument_one, Thing *arg_asdfgh,
+ Blah xugly_bug) {
+ Squiggle fabulous_result = squargle(argument_one,
+ *arg_asdfgh, xugly_bug)
+ + g_ewww_global_with_a_really_long_name_yep_too_long;
+ return fabulous_result * 42;
+}
+EOF
+cat <<EOF >expected9
+1
+1
+2
+3
+3
+4
+5
+EOF
+
+title10="Two close matches versus one less close match"
+cat <<EOF >a10
+abcdef
+abcdef
+ghijkl
+EOF
+cat <<EOF >b10
+gh
+abcdefx
+EOF
+cat <<EOF >expected10
+Final
+2
+EOF
+
+# The first line of b matches best with the last line of a, but the overall
+# match is better if we match it with the the first line of a.
+title11="Piggy in the middle"
+cat <<EOF >a11
+abcdefg
+ijklmn
+abcdefgh
+EOF
+cat <<EOF >b11
+abcdefghx
+ijklm
+EOF
+cat <<EOF >expected11
+1
+2
+EOF
+
+title12="No trailing newline"
+printf "abc\ndef" >a12
+printf "abx\nstu" >b12
+cat <<EOF >expected12
+1
+Final
+EOF
+
+title13="Reorder includes"
+cat <<EOF >a13
+#include "c.h"
+#include "b.h"
+#include "a.h"
+#include "e.h"
+#include "d.h"
+EOF
+cat <<EOF >b13
+#include "a.h"
+#include "b.h"
+#include "c.h"
+#include "d.h"
+#include "e.h"
+EOF
+cat <<EOF >expected13
+3
+2
+1
+5
+4
+EOF
+
+last_test=13
+
+test_expect_success setup '
+ for i in $(test_seq 2 $last_test)
+ do
+ # Append each line in a separate commit to make it easy to
+ # check which original line the blame output relates to.
+
+ line_count=0 &&
+ while IFS= read line
+ do
+ line_count=$((line_count+1)) &&
+ echo "$line" >>"$i" &&
+ git add "$i" &&
+ test_tick &&
+ GIT_AUTHOR_NAME="$line_count" git commit -m "$line_count"
+ done <"a$i"
+ done &&
+
+ for i in $(test_seq 2 $last_test)
+ do
+ # Overwrite the files with the final content.
+ cp b$i $i &&
+ git add $i
+ done &&
+ test_tick &&
+
+ # Commit the final content all at once so it can all be
+ # referred to with the same commit ID.
+ GIT_AUTHOR_NAME=Final git commit -m Final &&
+
+ IGNOREME=$(git rev-parse HEAD)
+'
+
+for i in $(test_seq 2 $last_test); do
+ eval title="\$title$i"
+ test_expect_success "$title" \
+ "git blame -M9 --ignore-rev $IGNOREME $i >output &&
+ sed -e \"$pick_author\" output >actual &&
+ test_cmp expected$i actual"
+done
+
+# This invoked a null pointer dereference when the chunk callback was called
+# with a zero length parent chunk and there were no more suspects.
+test_expect_success 'Diff chunks with no suspects' '
+ test_write_lines xy1 A B C xy1 >file &&
+ git add file &&
+ test_tick &&
+ GIT_AUTHOR_NAME=1 git commit -m 1 &&
+
+ test_write_lines xy2 A B xy2 C xy2 >file &&
+ git add file &&
+ test_tick &&
+ GIT_AUTHOR_NAME=2 git commit -m 2 &&
+ REV_2=$(git rev-parse HEAD) &&
+
+ test_write_lines xy3 A >file &&
+ git add file &&
+ test_tick &&
+ GIT_AUTHOR_NAME=3 git commit -m 3 &&
+ REV_3=$(git rev-parse HEAD) &&
+
+ test_write_lines 1 1 >expected &&
+
+ git blame --ignore-rev $REV_2 --ignore-rev $REV_3 file >output &&
+ sed -e "$pick_author" output >actual &&
+
+ test_cmp expected actual
+ '
+
+test_expect_success 'position matching' '
+ test_write_lines abc def >file2 &&
+ git add file2 &&
+ test_tick &&
+ GIT_AUTHOR_NAME=1 git commit -m 1 &&
+
+ test_write_lines abc def abc def >file2 &&
+ git add file2 &&
+ test_tick &&
+ GIT_AUTHOR_NAME=2 git commit -m 2 &&
+
+ test_write_lines abcx defx abcx defx >file2 &&
+ git add file2 &&
+ test_tick &&
+ GIT_AUTHOR_NAME=3 git commit -m 3 &&
+ REV_3=$(git rev-parse HEAD) &&
+
+ test_write_lines abcy defy abcx defx >file2 &&
+ git add file2 &&
+ test_tick &&
+ GIT_AUTHOR_NAME=4 git commit -m 4 &&
+ REV_4=$(git rev-parse HEAD) &&
+
+ test_write_lines 1 1 2 2 >expected &&
+
+ git blame --ignore-rev $REV_3 --ignore-rev $REV_4 file2 >output &&
+ sed -e "$pick_author" output >actual &&
+
+ test_cmp expected actual
+ '
+
+# This fails if each blame entry is processed independently instead of
+# processing each diff change in full.
+test_expect_success 'preserve order' '
+ test_write_lines bcde >file3 &&
+ git add file3 &&
+ test_tick &&
+ GIT_AUTHOR_NAME=1 git commit -m 1 &&
+
+ test_write_lines bcde fghij >file3 &&
+ git add file3 &&
+ test_tick &&
+ GIT_AUTHOR_NAME=2 git commit -m 2 &&
+
+ test_write_lines bcde fghij abcd >file3 &&
+ git add file3 &&
+ test_tick &&
+ GIT_AUTHOR_NAME=3 git commit -m 3 &&
+
+ test_write_lines abcdx fghijx bcdex >file3 &&
+ git add file3 &&
+ test_tick &&
+ GIT_AUTHOR_NAME=4 git commit -m 4 &&
+ REV_4=$(git rev-parse HEAD) &&
+
+ test_write_lines abcdx fghijy bcdex >file3 &&
+ git add file3 &&
+ test_tick &&
+ GIT_AUTHOR_NAME=5 git commit -m 5 &&
+ REV_5=$(git rev-parse HEAD) &&
+
+ test_write_lines 1 2 3 >expected &&
+
+ git blame --ignore-rev $REV_4 --ignore-rev $REV_5 file3 >output &&
+ sed -e "$pick_author" output >actual &&
+
+ test_cmp expected actual
+ '
+
+test_done
--from="Example <nobody@example.com>" \
--to=nobody@example.com \
--no-thread \
- $patches $patches >stdout &&
+ $patches >stdout &&
! grep "In-Reply-To: " stdout
'
git send-email \
--dry-run \
--from="Example <nobody@example.com>" \
- $patches $patches >stdout &&
+ $patches >stdout &&
grep "To: Somebody <somebody@ex.com>" stdout
'
+test_expect_success $PREREQ 'setup sendemail.identity' '
+ git config --replace-all sendemail.to "default@example.com" &&
+ git config --replace-all sendemail.isp.to "isp@example.com" &&
+ git config --replace-all sendemail.cloud.to "cloud@example.com"
+'
+
+test_expect_success $PREREQ 'sendemail.identity: reads the correct identity config' '
+ git -c sendemail.identity=cloud send-email \
+ --dry-run \
+ --from="nobody@example.com" \
+ $patches >stdout &&
+ grep "To: cloud@example.com" stdout
+'
+
+test_expect_success $PREREQ 'sendemail.identity: identity overrides sendemail.identity' '
+ git -c sendemail.identity=cloud send-email \
+ --identity=isp \
+ --dry-run \
+ --from="nobody@example.com" \
+ $patches >stdout &&
+ grep "To: isp@example.com" stdout
+'
+
+test_expect_success $PREREQ 'sendemail.identity: --no-identity clears previous identity' '
+ git -c sendemail.identity=cloud send-email \
+ --no-identity \
+ --dry-run \
+ --from="nobody@example.com" \
+ $patches >stdout &&
+ grep "To: default@example.com" stdout
+'
+
+test_expect_success $PREREQ 'sendemail.identity: bool identity variable existance overrides' '
+ git -c sendemail.identity=cloud \
+ -c sendemail.xmailer=true \
+ -c sendemail.cloud.xmailer=false \
+ send-email \
+ --dry-run \
+ --from="nobody@example.com" \
+ $patches >stdout &&
+ grep "To: cloud@example.com" stdout &&
+ ! grep "X-Mailer" stdout
+'
+
+test_expect_success $PREREQ 'sendemail.identity: bool variable fallback' '
+ git -c sendemail.identity=cloud \
+ -c sendemail.xmailer=false \
+ send-email \
+ --dry-run \
+ --from="nobody@example.com" \
+ $patches >stdout &&
+ grep "To: cloud@example.com" stdout &&
+ ! grep "X-Mailer" stdout
+'
+
test_expect_success $PREREQ '--no-to overrides sendemail.to' '
git send-email \
--dry-run \
--from="Example <nobody@example.com>" \
--no-to \
--to=nobody@example.com \
- $patches $patches >stdout &&
+ $patches >stdout &&
grep "To: nobody@example.com" stdout &&
! grep "To: Somebody <somebody@ex.com>" stdout
'
--dry-run \
--from="Example <nobody@example.com>" \
--to=nobody@example.com \
- $patches $patches >stdout &&
+ $patches >stdout &&
grep "Cc: Somebody <somebody@ex.com>" stdout
'
--no-cc \
--cc=bodies@example.com \
--to=nobody@example.com \
- $patches $patches >stdout &&
+ $patches >stdout &&
grep "Cc: bodies@example.com" stdout &&
! grep "Cc: Somebody <somebody@ex.com>" stdout
'
--from="Example <nobody@example.com>" \
--to=nobody@example.com \
--smtp-server relay.example.com \
- $patches $patches >stdout &&
+ $patches >stdout &&
grep "RCPT TO:<other@ex.com>" stdout
'
--bcc=bodies@example.com \
--to=nobody@example.com \
--smtp-server relay.example.com \
- $patches $patches >stdout &&
+ $patches >stdout &&
grep "RCPT TO:<bodies@example.com>" stdout &&
! grep "RCPT TO:<other@ex.com>" stdout
'
EOF
'
-test_expect_success $PREREQ 'sendemail.transferencoding=7bit fails on 8bit data' '
+test_expect_success $PREREQ '--transfer-encoding overrides sendemail.transferEncoding' '
clean_fake_sendmail &&
- git config sendemail.transferEncoding 7bit &&
- test_must_fail git send-email \
+ test_must_fail git -c sendemail.transferEncoding=8bit \
+ send-email \
--transfer-encoding=7bit \
--smtp-server="$(pwd)/fake.sendmail" \
email-using-8bit \
test -z "$(ls msgtxt*)"
'
-test_expect_success $PREREQ '--transfer-encoding overrides sendemail.transferEncoding' '
+test_expect_success $PREREQ 'sendemail.transferEncoding via config' '
clean_fake_sendmail &&
- git config sendemail.transferEncoding 8bit &&
- test_must_fail git send-email \
- --transfer-encoding=7bit \
+ test_must_fail git -c sendemail.transferEncoding=7bit \
+ send-email \
--smtp-server="$(pwd)/fake.sendmail" \
email-using-8bit \
2>errors >out &&
test -z "$(ls msgtxt*)"
'
-test_expect_success $PREREQ 'sendemail.transferencoding=8bit' '
+test_expect_success $PREREQ 'sendemail.transferEncoding via cli' '
clean_fake_sendmail &&
- git send-email \
- --transfer-encoding=8bit \
+ test_must_fail git send-email \
+ --transfer-encoding=7bit \
--smtp-server="$(pwd)/fake.sendmail" \
email-using-8bit \
2>errors >out &&
- sed '1,/^$/d' msgtxt1 >actual &&
- sed '1,/^$/d' email-using-8bit >expected &&
- test_cmp expected actual
+ grep "cannot send message as 7bit" errors &&
+ test -z "$(ls msgtxt*)"
'
test_expect_success $PREREQ 'setup expect' '
test_must_fail git send-email --dump-aliases --to=janice@example.com -1 refs/heads/accounting
'
+test_expect_success $PREREQ 'aliases and sendemail.identity' '
+ test_must_fail git \
+ -c sendemail.identity=cloud \
+ -c sendemail.aliasesfile=default-aliases \
+ -c sendemail.cloud.aliasesfile=cloud-aliases \
+ send-email -1 2>stderr &&
+ test_i18ngrep "cloud-aliases" stderr
+'
+
test_sendmail_aliases () {
msg="$1" && shift &&
expect="$@" &&
sed -e s/LFs/LLL/ W-input | tr L "\n" | test_must_fail git fast-import
'
+###
+### series X (other new features)
+###
+
+test_expect_success 'X: handling encoding' '
+ test_tick &&
+ cat >input <<-INPUT_END &&
+ commit refs/heads/encoding
+ committer $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL> $GIT_COMMITTER_DATE
+ encoding iso-8859-7
+ data <<COMMIT
+ INPUT_END
+
+ printf "Pi: \360\nCOMMIT\n" >>input &&
+
+ git fast-import <input &&
+ git cat-file -p encoding | grep $(printf "\360") &&
+ git log -1 --format=%B encoding | grep $(printf "\317\200")
+'
+
test_done
test $MUSS = $(git rev-parse --verify refs/tags/muss)
'
-test_expect_success 'iso-8859-1' '
+test_expect_success 'reencoding iso-8859-7' '
- git config i18n.commitencoding ISO8859-1 &&
- # use author and committer name in ISO-8859-1 to match it.
- . "$TEST_DIRECTORY"/t3901/8859-1.txt &&
+ test_when_finished "git reset --hard HEAD~1" &&
+ test_config i18n.commitencoding iso-8859-7 &&
test_tick &&
echo rosten >file &&
- git commit -s -m den file &&
- git fast-export wer^..wer >iso8859-1.fi &&
- sed "s/wer/i18n/" iso8859-1.fi |
+ git commit -s -F "$TEST_DIRECTORY/t9350/simple-iso-8859-7-commit-message.txt" file &&
+ git fast-export --reencode=yes wer^..wer >iso-8859-7.fi &&
+ sed "s/wer/i18n/" iso-8859-7.fi |
(cd new &&
git fast-import &&
+ # The commit object, if not re-encoded, would be 240 bytes.
+ # Removing the "encoding iso-8859-7\n" header drops 20 bytes.
+ # Re-encoding the Pi character from \xF0 (\360) in iso-8859-7
+ # to \xCF\x80 (\317\200) in UTF-8 adds a byte. Check for
+ # the expected size.
+ test 221 -eq "$(git cat-file -s i18n)" &&
+ # ...and for the expected translation of bytes.
git cat-file commit i18n >actual &&
- grep "Áéí óú" actual)
+ grep $(printf "\317\200") actual &&
+ # Also make sure the commit does not have the "encoding" header
+ ! grep ^encoding actual)
+'
+
+test_expect_success 'aborting on iso-8859-7' '
+ test_when_finished "git reset --hard HEAD~1" &&
+ test_config i18n.commitencoding iso-8859-7 &&
+ echo rosten >file &&
+ git commit -s -F "$TEST_DIRECTORY/t9350/simple-iso-8859-7-commit-message.txt" file &&
+ test_must_fail git fast-export --reencode=abort wer^..wer >iso-8859-7.fi
'
+
+test_expect_success 'preserving iso-8859-7' '
+
+ test_when_finished "git reset --hard HEAD~1" &&
+ test_config i18n.commitencoding iso-8859-7 &&
+ echo rosten >file &&
+ git commit -s -F "$TEST_DIRECTORY/t9350/simple-iso-8859-7-commit-message.txt" file &&
+ git fast-export --reencode=no wer^..wer >iso-8859-7.fi &&
+ sed "s/wer/i18n-no-recoding/" iso-8859-7.fi |
+ (cd new &&
+ git fast-import &&
+ # The commit object, if not re-encoded, is 240 bytes.
+ # Removing the "encoding iso-8859-7\n" header would drops 20
+ # bytes. Re-encoding the Pi character from \xF0 (\360) in
+ # iso-8859-7 to \xCF\x80 (\317\200) in UTF-8 adds a byte.
+ # Check for the expected size...
+ test 240 -eq "$(git cat-file -s i18n-no-recoding)" &&
+ # ...as well as the expected byte.
+ git cat-file commit i18n-no-recoding >actual &&
+ grep $(printf "\360") actual &&
+ # Also make sure the commit has the "encoding" header
+ grep ^encoding actual)
+'
+
+test_expect_success 'encoding preserved if reencoding fails' '
+
+ test_when_finished "git reset --hard HEAD~1" &&
+ test_config i18n.commitencoding iso-8859-7 &&
+ echo rosten >file &&
+ git commit -s -F "$TEST_DIRECTORY/t9350/broken-iso-8859-7-commit-message.txt" file &&
+ git fast-export --reencode=yes wer^..wer >iso-8859-7.fi &&
+ sed "s/wer/i18n-invalid/" iso-8859-7.fi |
+ (cd new &&
+ git fast-import &&
+ git cat-file commit i18n-invalid >actual &&
+ # Make sure the commit still has the encoding header
+ grep ^encoding actual &&
+ # Verify that the commit has the expected size; i.e.
+ # that no bytes were re-encoded to a different encoding.
+ test 252 -eq "$(git cat-file -s i18n-invalid)" &&
+ # ...and check for the original special bytes
+ grep $(printf "\360") actual &&
+ grep $(printf "\377") actual)
+'
+
test_expect_success 'import/export-marks' '
git checkout -b marks master &&
test_expect_success 'setup copies' '
- git config --unset i18n.commitencoding &&
git checkout -b copy rein &&
git mv file file3 &&
git commit -m move1 &&
--- /dev/null
+Pi: ð; Invalid: ÿ
\ No newline at end of file
--- /dev/null
+Pi: ð
\ No newline at end of file
)
'
+# Check that excluded files are omitted during import
+test_expect_success 'git p4 clone complex branches with excluded files' '
+ test_when_finished cleanup_git &&
+ test_create_repo "$git" &&
+ (
+ cd "$git" &&
+ git config git-p4.branchList branch1:branch2 &&
+ git config --add git-p4.branchList branch1:branch3 &&
+ git config --add git-p4.branchList branch1:branch4 &&
+ git config --add git-p4.branchList branch1:branch5 &&
+ git config --add git-p4.branchList branch1:branch6 &&
+ git p4 clone --dest=. --detect-branches -//depot/branch1/file2 -//depot/branch2/file2 -//depot/branch3/file2 -//depot/branch4/file2 -//depot/branch5/file2 -//depot/branch6/file2 //depot@all &&
+ git log --all --graph --decorate --stat &&
+ git reset --hard p4/depot/branch1 &&
+ test_path_is_file file1 &&
+ test_path_is_missing file2 &&
+ test_path_is_file file3 &&
+ git reset --hard p4/depot/branch2 &&
+ test_path_is_file file1 &&
+ test_path_is_missing file2 &&
+ test_path_is_missing file3 &&
+ git reset --hard p4/depot/branch3 &&
+ test_path_is_file file1 &&
+ test_path_is_missing file2 &&
+ test_path_is_missing file3 &&
+ git reset --hard p4/depot/branch4 &&
+ test_path_is_file file1 &&
+ test_path_is_missing file2 &&
+ test_path_is_file file3 &&
+ git reset --hard p4/depot/branch5 &&
+ test_path_is_file file1 &&
+ test_path_is_missing file2 &&
+ test_path_is_file file3 &&
+ git reset --hard p4/depot/branch6 &&
+ test_path_is_file file1 &&
+ test_path_is_missing file2 &&
+ test_path_is_missing file3
+ )
+'
+
# From a report in http://stackoverflow.com/questions/11893688
# where --use-client-spec caused branch prefixes not to be removed;
# every file in git appeared into a subdirectory of the branch name.
)
'
+test_expect_success 'restart p4d (case folding enabled)' '
+ stop_and_cleanup_p4d &&
+ start_p4d -C1
+'
+
+#
+# 1: //depot/main/mf1
+# 2: integrate //depot/main/... -> //depot/branch1/...
+# 3: //depot/main/mf2
+# 4: //depot/BRANCH1/B1f3
+# 5: //depot/branch1/b1f4
+#
+test_expect_success !CASE_INSENSITIVE_FS 'basic p4 branches for case folding' '
+ (
+ cd "$cli" &&
+ mkdir -p main &&
+
+ echo mf1 >main/mf1 &&
+ p4 add main/mf1 &&
+ p4 submit -d "main/mf1" &&
+
+ p4 integrate //depot/main/... //depot/branch1/... &&
+ p4 submit -d "integrate main to branch1" &&
+
+ echo mf2 >main/mf2 &&
+ p4 add main/mf2 &&
+ p4 submit -d "main/mf2" &&
+
+ mkdir BRANCH1 &&
+ echo B1f3 >BRANCH1/B1f3 &&
+ p4 add BRANCH1/B1f3 &&
+ p4 submit -d "BRANCH1/B1f3" &&
+
+ echo b1f4 >branch1/b1f4 &&
+ p4 add branch1/b1f4 &&
+ p4 submit -d "branch1/b1f4"
+ )
+'
+
+# Check that files are properly split across branches when ignorecase is set
+test_expect_success !CASE_INSENSITIVE_FS 'git p4 clone, branchList branch definition, ignorecase' '
+ test_when_finished cleanup_git &&
+ test_create_repo "$git" &&
+ (
+ cd "$git" &&
+ git config git-p4.branchList main:branch1 &&
+ git config --type=bool core.ignoreCase true &&
+ git p4 clone --dest=. --detect-branches //depot@all &&
+
+ git log --all --graph --decorate --stat &&
+
+ git reset --hard p4/master &&
+ test_path_is_file mf1 &&
+ test_path_is_file mf2 &&
+ test_path_is_missing B1f3 &&
+ test_path_is_missing b1f4 &&
+
+ git reset --hard p4/depot/branch1 &&
+ test_path_is_file mf1 &&
+ test_path_is_missing mf2 &&
+ test_path_is_file B1f3 &&
+ test_path_is_file b1f4
+ )
+'
+
+# Check that files are properly split across branches when ignorecase is set, use-client-spec case
+test_expect_success !CASE_INSENSITIVE_FS 'git p4 clone with client-spec, branchList branch definition, ignorecase' '
+ client_view "//depot/... //client/..." &&
+ test_when_finished cleanup_git &&
+ test_create_repo "$git" &&
+ (
+ cd "$git" &&
+ git config git-p4.branchList main:branch1 &&
+ git config --type=bool core.ignoreCase true &&
+ git p4 clone --dest=. --use-client-spec --detect-branches //depot@all &&
+
+ git log --all --graph --decorate --stat &&
+
+ git reset --hard p4/master &&
+ test_path_is_file mf1 &&
+ test_path_is_file mf2 &&
+ test_path_is_missing B1f3 &&
+ test_path_is_missing b1f4 &&
+
+ git reset --hard p4/depot/branch1 &&
+ test_path_is_file mf1 &&
+ test_path_is_missing mf2 &&
+ test_path_is_file B1f3 &&
+ test_path_is_file b1f4
+ )
+'
+
test_done
mkdir -p wanted discard &&
echo wanted >wanted/foo &&
echo discard >discard/foo &&
- p4 add wanted/foo discard/foo &&
+ echo discard_file >discard_file &&
+ echo discard_file_not >discard_file_not &&
+ p4 add wanted/foo discard/foo discard_file discard_file_not &&
p4 submit -d "initial revision"
)
'
(
cd "$git" &&
test_path_is_file wanted/foo &&
- test_path_is_file discard/foo
+ test_path_is_file discard/foo &&
+ test_path_is_file discard_file &&
+ test_path_is_file discard_file_not
)
'
(
cd "$git" &&
test_path_is_file wanted/foo &&
- test_path_is_missing discard/foo
+ test_path_is_missing discard/foo &&
+ test_path_is_file discard_file &&
+ test_path_is_file discard_file_not
+ )
+'
+
+test_expect_success 'clone, excluding single file, no trailing /' '
+ test_when_finished cleanup_git &&
+ git p4 clone -//depot/discard_file --dest="$git" //depot/...@all &&
+ (
+ cd "$git" &&
+ test_path_is_file wanted/foo &&
+ test_path_is_file discard/foo &&
+ test_path_is_missing discard_file &&
+ test_path_is_file discard_file_not
)
'
git p4 clone -//depot/discard/... --dest="$git" //depot/...@all &&
(
cd "$cli" &&
- p4 edit wanted/foo discard/foo &&
+ p4 edit wanted/foo discard/foo discard_file_not &&
date >>wanted/foo &&
date >>discard/foo &&
+ date >>discard_file_not &&
p4 submit -d "updating" &&
cd "$git" &&
git p4 sync -//depot/discard/... &&
test_path_is_file wanted/foo &&
- test_path_is_missing discard/foo
+ test_path_is_missing discard/foo &&
+ test_path_is_file discard_file &&
+ test_path_is_file discard_file_not
+ )
+'
+
+test_expect_success 'clone, then sync with exclude, no trailing /' '
+ test_when_finished cleanup_git &&
+ git p4 clone -//depot/discard/... -//depot/discard_file --dest="$git" //depot/...@all &&
+ (
+ cd "$cli" &&
+ p4 edit wanted/foo discard/foo discard_file_not &&
+ date >>wanted/foo &&
+ date >>discard/foo &&
+ date >>discard_file_not &&
+ p4 submit -d "updating" &&
+
+ cd "$git" &&
+ git p4 sync -//depot/discard/... -//depot/discard_file &&
+ test_path_is_file wanted/foo &&
+ test_path_is_missing discard/foo &&
+ test_path_is_missing discard_file &&
+ test_path_is_file discard_file_not
)
'
'
test_expect_success 'sourcing the completion script clears cached merge strategies' '
- GIT_TEST_GETTEXT_POISON= &&
+ GIT_TEST_GETTEXT_POISON=false &&
__git_compute_merge_strategies &&
verbose test -n "$__git_merge_strategies" &&
. "$GIT_BUILD_DIR/contrib/completion/git-completion.bash" &&
test_expect_success 'prompt - cherry-pick' '
printf " (master|CHERRY-PICKING)" >expected &&
- test_must_fail git cherry-pick b1 &&
- test_when_finished "git reset --hard" &&
+ test_must_fail git cherry-pick b1 b1^ &&
+ test_when_finished "git cherry-pick --abort" &&
+ __git_ps1 >"$actual" &&
+ test_cmp expected "$actual" &&
+ git reset --merge &&
+ test_must_fail git rev-parse CHERRY_PICK_HEAD &&
+ __git_ps1 >"$actual" &&
+ test_cmp expected "$actual"
+'
+
+test_expect_success 'prompt - revert' '
+ printf " (master|REVERTING)" >expected &&
+ test_must_fail git revert b1^ b1 &&
+ test_when_finished "git revert --abort" &&
+ __git_ps1 >"$actual" &&
+ test_cmp expected "$actual" &&
+ git reset --merge &&
+ test_must_fail git rev-parse REVERT_HEAD &&
__git_ps1 >"$actual" &&
test_cmp expected "$actual"
'
git tag "$1"
}
+# Efficiently create <nr> commits, each with a unique number (from 1 to <nr>
+# by default) in the commit message.
+#
+# Usage: test_commit_bulk [options] <nr>
+# -C <dir>:
+# Run all git commands in directory <dir>
+# --ref=<n>:
+# ref on which to create commits (default: HEAD)
+# --start=<n>:
+# number commit messages from <n> (default: 1)
+# --message=<msg>:
+# use <msg> as the commit mesasge (default: "commit %s")
+# --filename=<fn>:
+# modify <fn> in each commit (default: %s.t)
+# --contents=<string>:
+# place <string> in each file (default: "content %s")
+# --id=<string>:
+# shorthand to use <string> and %s in message, filename, and contents
+#
+# The message, filename, and contents strings are evaluated by printf, with the
+# first "%s" replaced by the current commit number. So you can do:
+#
+# test_commit_bulk --filename=file --contents="modification %s"
+#
+# to have every commit touch the same file, but with unique content.
+#
+test_commit_bulk () {
+ tmpfile=.bulk-commit.input
+ indir=.
+ ref=HEAD
+ n=1
+ message='commit %s'
+ filename='%s.t'
+ contents='content %s'
+ while test $# -gt 0
+ do
+ case "$1" in
+ -C)
+ indir=$2
+ shift
+ ;;
+ --ref=*)
+ ref=${1#--*=}
+ ;;
+ --start=*)
+ n=${1#--*=}
+ ;;
+ --message=*)
+ message=${1#--*=}
+ ;;
+ --filename=*)
+ filename=${1#--*=}
+ ;;
+ --contents=*)
+ contents=${1#--*=}
+ ;;
+ --id=*)
+ message="${1#--*=} %s"
+ filename="${1#--*=}-%s.t"
+ contents="${1#--*=} %s"
+ ;;
+ -*)
+ BUG "invalid test_commit_bulk option: $1"
+ ;;
+ *)
+ break
+ ;;
+ esac
+ shift
+ done
+ total=$1
+
+ add_from=
+ if git -C "$indir" rev-parse --verify "$ref"
+ then
+ add_from=t
+ fi
+
+ while test "$total" -gt 0
+ do
+ test_tick &&
+ echo "commit $ref"
+ printf 'author %s <%s> %s\n' \
+ "$GIT_AUTHOR_NAME" \
+ "$GIT_AUTHOR_EMAIL" \
+ "$GIT_AUTHOR_DATE"
+ printf 'committer %s <%s> %s\n' \
+ "$GIT_COMMITTER_NAME" \
+ "$GIT_COMMITTER_EMAIL" \
+ "$GIT_COMMITTER_DATE"
+ echo "data <<EOF"
+ printf "$message\n" $n
+ echo "EOF"
+ if test -n "$add_from"
+ then
+ echo "from $ref^0"
+ add_from=
+ fi
+ printf "M 644 inline $filename\n" $n
+ echo "data <<EOF"
+ printf "$contents\n" $n
+ echo "EOF"
+ echo
+ n=$((n + 1))
+ total=$((total - 1))
+ done >"$tmpfile"
+
+ git -C "$indir" \
+ -c fastimport.unpacklimit=0 \
+ fast-import <"$tmpfile" || return 1
+
+ # This will be left in place on failure, which may aid debugging.
+ rm -f "$tmpfile"
+
+ # If we updated HEAD, then be nice and update the index and working
+ # tree, too.
+ if test "$ref" = "HEAD"
+ then
+ git -C "$indir" checkout -f HEAD || return 1
+ fi
+
+}
+
# This function helps systems where core.filemode=false is set.
# Use it instead of plain 'chmod +x' to set or unset the executable bit
# of a file in the working directory and add it to the index.
}
test_set_prereq () {
+ if test -n "$GIT_TEST_FAIL_PREREQS_INTERNAL"
+ then
+ case "$1" in
+ # The "!" case is handled below with
+ # test_unset_prereq()
+ !*)
+ ;;
+ # (Temporary?) whitelist of things we can't easily
+ # pretend not to support
+ SYMLINKS)
+ ;;
+ # Inspecting whether GIT_TEST_FAIL_PREREQS is on
+ # should be unaffected.
+ FAIL_PREREQS)
+ ;;
+ *)
+ return
+ esac
+ fi
+
case "$1" in
!*)
test_unset_prereq "${1#!}"
command "$PERL_PATH" "$@" 2>&7
} 7>&2 2>&4
-# Is the value one of the various ways to spell a boolean true/false?
-test_normalize_bool () {
- git -c magic.variable="$1" config --bool magic.variable 2>/dev/null
-}
-
-# Given a variable $1, normalize the value of it to one of "true",
-# "false", or "auto" and store the result to it.
-#
-# test_tristate GIT_TEST_HTTPD
-#
-# A variable set to an empty string is set to 'false'.
-# A variable set to 'false' or 'auto' keeps its value.
-# Anything else is set to 'true'.
-# An unset variable defaults to 'auto'.
-#
-# The last rule is to allow people to set the variable to an empty
-# string and export it to decline testing the particular feature
-# for versions both before and after this change. We used to treat
-# both unset and empty variable as a signal for "do not test" and
-# took any non-empty string as "please test".
-
-test_tristate () {
- if eval "test x\"\${$1+isset}\" = xisset"
- then
- # explicitly set
- eval "
- case \"\$$1\" in
- '') $1=false ;;
- auto) ;;
- *) $1=\$(test_normalize_bool \$$1 || echo true) ;;
- esac
- "
- else
- eval "$1=auto"
- fi
-}
-
# Exit the test suite, either by skipping all remaining tests or by
-# exiting with an error. If "$1" is "auto", we then we assume we were
-# opportunistically trying to set up some tests and we skip. If it is
-# "true", then we report a failure.
+# exiting with an error. If our prerequisite variable $1 falls back
+# on a default assume we were opportunistically trying to set up some
+# tests and we skip. If it is explicitly "true", then we report a failure.
#
# The error/skip message should be given by $2.
#
test_skip_or_die () {
- case "$1" in
- auto)
+ if ! git env--helper --type=bool --default=false --exit-code $1
+ then
skip_all=$2
test_done
- ;;
- true)
- error "$2"
- ;;
- *)
- error "BUG: test tristate is '$1' (real error: $2)"
- esac
+ fi
+ error "$2"
}
# The following mingw_* functions obey POSIX shell syntax, but are actually
eval "printf '%s' \"\${$var}\""
}
+# Insert a slash into an object ID so it can be used to reference a location
+# under ".git/objects". For example, "deadbeef..." becomes "de/adbeef..".
+test_oid_to_path () {
+ echo "${1%${1#??}}/${1#??}"
+}
+
# Choose a port number based on the test script's number and store it in
# the given variable name, unless that variable already contains a number.
test_set_port () {
my @env = keys %ENV;
my $ok = join("|", qw(
TRACE
- TR2_
DEBUG
TEST
.*_TEST
done
}
+# The GIT_TEST_FAIL_PREREQS code hooks into test_set_prereq(), and
+# thus needs to be set up really early, and set an internal variable
+# for convenience so the hot test_set_prereq() codepath doesn't need
+# to call "git env--helper". Only do that work if needed by seeing if
+# GIT_TEST_FAIL_PREREQS is set at all.
+GIT_TEST_FAIL_PREREQS_INTERNAL=
+if test -n "$GIT_TEST_FAIL_PREREQS"
+then
+ if git env--helper --type=bool --default=0 --exit-code GIT_TEST_FAIL_PREREQS
+ then
+ GIT_TEST_FAIL_PREREQS_INTERNAL=true
+ test_set_prereq FAIL_PREREQS
+ fi
+else
+ test_lazy_prereq FAIL_PREREQS '
+ git env--helper --type=bool --default=0 --exit-code GIT_TEST_FAIL_PREREQS
+ '
+fi
+
# Fix some commands on Windows
uname_s=$(uname -s)
case $uname_s in
unset GIT_TEST_GETTEXT_POISON_ORIG
fi
-# Can we rely on git's output in the C locale?
-if test -z "$GIT_TEST_GETTEXT_POISON"
-then
- test_set_prereq C_LOCALE_OUTPUT
-fi
+test_lazy_prereq C_LOCALE_OUTPUT '
+ ! git env--helper --type=bool --default=0 --exit-code GIT_TEST_GETTEXT_POISON
+'
if test -z "$GIT_TEST_CHECK_CACHE_TREE"
then
struct tag *lookup_tag(struct repository *r, const struct object_id *oid)
{
- struct object *obj = lookup_object(r, oid->hash);
+ struct object *obj = lookup_object(r, oid);
if (!obj)
- return create_object(r, oid->hash,
- alloc_tag_node(r));
+ return create_object(r, oid, alloc_tag_node(r));
return object_as_type(r, obj, OBJ_TAG, 0);
}
literal_copy:
return xstrdup(url);
}
-
-static void fill_alternate_refs_command(struct child_process *cmd,
- const char *repo_path)
-{
- const char *value;
-
- if (!git_config_get_value("core.alternateRefsCommand", &value)) {
- cmd->use_shell = 1;
-
- argv_array_push(&cmd->args, value);
- argv_array_push(&cmd->args, repo_path);
- } else {
- cmd->git_cmd = 1;
-
- argv_array_pushf(&cmd->args, "--git-dir=%s", repo_path);
- argv_array_push(&cmd->args, "for-each-ref");
- argv_array_push(&cmd->args, "--format=%(objectname)");
-
- if (!git_config_get_value("core.alternateRefsPrefixes", &value)) {
- argv_array_push(&cmd->args, "--");
- argv_array_split(&cmd->args, value);
- }
- }
-
- cmd->env = local_repo_env;
- cmd->out = -1;
-}
-
-static void read_alternate_refs(const char *path,
- alternate_ref_fn *cb,
- void *data)
-{
- struct child_process cmd = CHILD_PROCESS_INIT;
- struct strbuf line = STRBUF_INIT;
- FILE *fh;
-
- fill_alternate_refs_command(&cmd, path);
-
- if (start_command(&cmd))
- return;
-
- fh = xfdopen(cmd.out, "r");
- while (strbuf_getline_lf(&line, fh) != EOF) {
- struct object_id oid;
- const char *p;
-
- if (parse_oid_hex(line.buf, &oid, &p) || *p) {
- warning(_("invalid line while parsing alternate refs: %s"),
- line.buf);
- break;
- }
-
- cb(&oid, data);
- }
-
- fclose(fh);
- finish_command(&cmd);
-}
-
-struct alternate_refs_data {
- alternate_ref_fn *fn;
- void *data;
-};
-
-static int refs_from_alternate_cb(struct object_directory *e,
- void *data)
-{
- struct strbuf path = STRBUF_INIT;
- size_t base_len;
- struct alternate_refs_data *cb = data;
-
- if (!strbuf_realpath(&path, e->path, 0))
- goto out;
- if (!strbuf_strip_suffix(&path, "/objects"))
- goto out;
- base_len = path.len;
-
- /* Is this a git repository with refs? */
- strbuf_addstr(&path, "/refs");
- if (!is_directory(path.buf))
- goto out;
- strbuf_setlen(&path, base_len);
-
- read_alternate_refs(path.buf, cb->fn, cb->data);
-
-out:
- strbuf_release(&path);
- return 0;
-}
-
-void for_each_alternate_ref(alternate_ref_fn fn, void *data)
-{
- struct alternate_refs_data cb;
- cb.fn = fn;
- cb.data = data;
- foreach_alt_odb(refs_from_alternate_cb, &cb);
-}
void transport_print_push_status(const char *dest, struct ref *refs,
int verbose, int porcelain, unsigned int *reject_reasons);
-typedef void alternate_ref_fn(const struct object_id *oid, void *);
-void for_each_alternate_ref(alternate_ref_fn, void *);
#endif
* diff_tree_oid(parent, commit) )
*/
for (i = 0; i < nparent; ++i)
- tptree[i] = fill_tree_descriptor(&tp[i], parents_oid[i]);
- ttree = fill_tree_descriptor(&t, oid);
+ tptree[i] = fill_tree_descriptor(opt->repo, &tp[i], parents_oid[i]);
+ ttree = fill_tree_descriptor(opt->repo, &t, oid);
/* Enable recursion indefinitely */
opt->pathspec.recursive = opt->flags.recursive;
return result;
}
-void *fill_tree_descriptor(struct tree_desc *desc, const struct object_id *oid)
+void *fill_tree_descriptor(struct repository *r,
+ struct tree_desc *desc,
+ const struct object_id *oid)
{
unsigned long size = 0;
void *buf = NULL;
if (oid) {
- buf = read_object_with_reference(oid, tree_type, &size, NULL);
+ buf = read_object_with_reference(r, oid, tree_type, &size, NULL);
if (!buf)
die("unable to read tree %s", oid_to_hex(oid));
}
struct object_id oid;
};
-static int find_tree_entry(struct tree_desc *t, const char *name, struct object_id *result, unsigned short *mode)
+static int find_tree_entry(struct repository *r, struct tree_desc *t,
+ const char *name, struct object_id *result,
+ unsigned short *mode)
{
int namelen = strlen(name);
while (t->size) {
oidcpy(result, &oid);
return 0;
}
- return get_tree_entry(&oid, name + entrylen, result, mode);
+ return get_tree_entry(r, &oid, name + entrylen, result, mode);
}
return -1;
}
-int get_tree_entry(const struct object_id *tree_oid, const char *name, struct object_id *oid, unsigned short *mode)
+int get_tree_entry(struct repository *r,
+ const struct object_id *tree_oid,
+ const char *name,
+ struct object_id *oid,
+ unsigned short *mode)
{
int retval;
void *tree;
unsigned long size;
struct object_id root;
- tree = read_object_with_reference(tree_oid, tree_type, &size, &root);
+ tree = read_object_with_reference(r, tree_oid, tree_type, &size, &root);
if (!tree)
return -1;
} else {
struct tree_desc t;
init_tree_desc(&t, tree, size);
- retval = find_tree_entry(&t, name, oid, mode);
+ retval = find_tree_entry(r, &t, name, oid, mode);
}
free(tree);
return retval;
* See the code for enum get_oid_result for a description of
* the return values.
*/
-enum get_oid_result get_tree_entry_follow_symlinks(struct object_id *tree_oid, const char *name, struct object_id *result, struct strbuf *result_path, unsigned short *mode)
+enum get_oid_result get_tree_entry_follow_symlinks(struct repository *r,
+ struct object_id *tree_oid, const char *name,
+ struct object_id *result, struct strbuf *result_path,
+ unsigned short *mode)
{
int retval = MISSING_OBJECT;
struct dir_state *parents = NULL;
void *tree;
struct object_id root;
unsigned long size;
- tree = read_object_with_reference(¤t_tree_oid,
+ tree = read_object_with_reference(r,
+ ¤t_tree_oid,
tree_type, &size,
&root);
if (!tree)
}
/* Look up the first (or only) path component in the tree. */
- find_result = find_tree_entry(&t, namebuf.buf,
+ find_result = find_tree_entry(r, &t, namebuf.buf,
¤t_tree_oid, mode);
if (find_result) {
goto done;
*/
retval = DANGLING_SYMLINK;
- contents = read_object_file(¤t_tree_oid, &type,
+ contents = repo_read_object_file(r,
+ ¤t_tree_oid, &type,
&link_len);
if (!contents)
int tree_entry(struct tree_desc *, struct name_entry *);
int tree_entry_gently(struct tree_desc *, struct name_entry *);
-void *fill_tree_descriptor(struct tree_desc *desc, const struct object_id *oid);
+void *fill_tree_descriptor(struct repository *r,
+ struct tree_desc *desc,
+ const struct object_id *oid);
struct traverse_info;
typedef int (*traverse_callback_t)(int n, unsigned long mask, unsigned long dirmask, struct name_entry *entry, struct traverse_info *);
int traverse_trees(struct index_state *istate, int n, struct tree_desc *t, struct traverse_info *info);
-enum get_oid_result get_tree_entry_follow_symlinks(struct object_id *tree_oid, const char *name, struct object_id *result, struct strbuf *result_path, unsigned short *mode);
+enum get_oid_result get_tree_entry_follow_symlinks(struct repository *r, struct object_id *tree_oid, const char *name, struct object_id *result, struct strbuf *result_path, unsigned short *mode);
struct traverse_info {
const char *traverse_path;
int show_all_errors;
};
-int get_tree_entry(const struct object_id *, const char *, struct object_id *, unsigned short *);
+int get_tree_entry(struct repository *, const struct object_id *, const char *, struct object_id *, unsigned short *);
char *make_traverse_path(char *path, const struct traverse_info *info, const struct name_entry *n);
void setup_traverse_info(struct traverse_info *info, const char *base);
struct tree *lookup_tree(struct repository *r, const struct object_id *oid)
{
- struct object *obj = lookup_object(r, oid->hash);
+ struct object *obj = lookup_object(r, oid);
if (!obj)
- return create_object(r, oid->hash,
- alloc_tree_node(r));
+ return create_object(r, oid, alloc_tree_node(r));
return object_as_type(r, obj, OBJ_TREE, 0);
}
total++;
}
- return start_delayed_progress(_("Checking out files"), total);
+ return start_delayed_progress(_("Updating files"), total);
}
static void setup_collided_checkout_detection(struct checkout *state,
const struct object_id *oid = NULL;
if (dirmask & 1)
oid = &names[i].oid;
- buf[nr_buf++] = fill_tree_descriptor(t + i, oid);
+ buf[nr_buf++] = fill_tree_descriptor(the_repository, t + i, oid);
}
}
return -1;
while ((i = read_in_full(cmd.out, namebuf, hexsz + 1)) == hexsz + 1) {
- struct object_id sha1;
+ struct object_id oid;
const char *p;
- if (parse_oid_hex(namebuf, &sha1, &p) || *p != '\n')
+ if (parse_oid_hex(namebuf, &oid, &p) || *p != '\n')
break;
- o = lookup_object(the_repository, sha1.hash);
+ o = lookup_object(the_repository, &oid);
if (o && o->type == OBJ_COMMIT) {
o->flags &= ~TMP_MARK;
}
static int mark_our_ref(const char *refname, const char *refname_full,
const struct object_id *oid)
{
- struct object *o = lookup_unknown_object(oid->hash);
+ struct object *o = lookup_unknown_object(oid);
if (ref_is_hidden(refname, refname_full)) {
o->flags |= HIDDEN_REF;
"|[-+*/<>%&^|=!]="
"|--|\\+\\+|<<=?|>>>?=?|&&|\\|\\|"),
PATTERNS("matlab",
- "^[[:space:]]*((classdef|function)[[:space:]].*)$|^%%[[:space:]].*$",
+ /*
+ * Octave pattern is mostly the same as matlab, except that '%%%' and
+ * '##' can also be used to begin code sections, in addition to '%%'
+ * that is understood by both.
+ */
+ "^[[:space:]]*((classdef|function)[[:space:]].*)$|^(%%%?|##)[[:space:]].*$",
"[a-zA-Z_][a-zA-Z0-9_]*|[-+0-9.e]+|[=~<>]=|\\.[*/\\^']|\\|\\||&&"),
PATTERNS("objc",
/* Negate C statements that can look like functions */
"(@|@@|\\$)?[a-zA-Z_][a-zA-Z0-9_]*"
"|[-+0-9.e]+|0[xXbB]?[0-9a-fA-F]+|\\?(\\\\C-)?(\\\\M-)?."
"|//=?|[-+*/<>%&^|=!]=|<<=?|>>=?|===|\\.{1,3}|::|[!=]~"),
+PATTERNS("rust",
+ "^[\t ]*((pub(\\([^\\)]+\\))?[\t ]+)?((async|const|unsafe|extern([\t ]+\"[^\"]+\"))[\t ]+)?(struct|enum|union|mod|trait|fn|impl)[< \t]+[^;]*)$",
+ /* -- */
+ "[a-zA-Z_][a-zA-Z0-9_]*"
+ "|[0-9][0-9_a-fA-Fiosuxz]*(\\.([0-9]*[eE][+-]?)?[0-9_fF]*)?"
+ "|[-+*\\/<>%&^|=!:]=|<<=?|>>=?|&&|\\|\\||->|=>|\\.{2}=|\\.{3}|::"),
PATTERNS("bibtex", "(@[a-zA-Z]{1,}[ \t]*\\{{0,1}[ \t]*[^ \t\"@',\\#}{~%]*).*$",
"[={}\"]|[^={}\" \t]+"),
PATTERNS("tex", "^(\\\\((sub)*section|chapter|part)\\*{0,1}\\{.*)$",
error("Could not interpret response from server '%s' as something to pull", target[i]);
goto done;
}
- if (process(walker, lookup_unknown_object(oids[i].hash)))
+ if (process(walker, lookup_unknown_object(&oids[i])))
goto done;
}
#include "lockfile.h"
#include "sequencer.h"
+#define AB_DELAY_WARNING_IN_MS (2 * 1000)
+
static const char cut_line[] =
"------------------------ >8 ------------------------\n";
return;
if (s->whence != FROM_COMMIT)
;
- else if (!s->is_initial)
- status_printf_ln(s, c, _(" (use \"git reset %s <file>...\" to unstage)"), s->reference);
- else
+ else if (!s->is_initial) {
+ if (!strcmp(s->reference, "HEAD"))
+ status_printf_ln(s, c,
+ _(" (use \"git restore --staged <file>...\" to unstage)"));
+ else
+ status_printf_ln(s, c,
+ _(" (use \"git restore --source=%s --staged <file>...\" to unstage)"),
+ s->reference);
+ } else
status_printf_ln(s, c, _(" (use \"git rm --cached <file>...\" to unstage)"));
if (!both_deleted) {
} else {
status_printf_ln(s, c, _(" (use \"git add/rm <file>...\" as appropriate to mark resolution)"));
}
- status_printf_ln(s, c, "%s", "");
}
static void wt_longstatus_print_cached_header(struct wt_status *s)
return;
if (s->whence != FROM_COMMIT)
; /* NEEDSWORK: use "git reset --unresolve"??? */
- else if (!s->is_initial)
- status_printf_ln(s, c, _(" (use \"git reset %s <file>...\" to unstage)"), s->reference);
- else
+ else if (!s->is_initial) {
+ if (!strcmp(s->reference, "HEAD"))
+ status_printf_ln(s, c
+ , _(" (use \"git restore --staged <file>...\" to unstage)"));
+ else
+ status_printf_ln(s, c,
+ _(" (use \"git restore --source=%s --staged <file>...\" to unstage)"),
+ s->reference);
+ } else
status_printf_ln(s, c, _(" (use \"git rm --cached <file>...\" to unstage)"));
- status_printf_ln(s, c, "%s", "");
}
static void wt_longstatus_print_dirty_header(struct wt_status *s,
status_printf_ln(s, c, _(" (use \"git add <file>...\" to update what will be committed)"));
else
status_printf_ln(s, c, _(" (use \"git add/rm <file>...\" to update what will be committed)"));
- status_printf_ln(s, c, _(" (use \"git checkout -- <file>...\" to discard changes in working directory)"));
+ status_printf_ln(s, c, _(" (use \"git restore <file>...\" to discard changes in working directory)"));
if (has_dirty_submodules)
status_printf_ln(s, c, _(" (commit or discard the untracked or modified content in submodules)"));
- status_printf_ln(s, c, "%s", "");
}
static void wt_longstatus_print_other_header(struct wt_status *s,
if (!s->hints)
return;
status_printf_ln(s, c, _(" (use \"git %s <file>...\" to include in what will be committed)"), how);
- status_printf_ln(s, c, "%s", "");
}
static void wt_longstatus_print_trailer(struct wt_status *s)
struct branch *branch;
char comment_line_string[3];
int i;
+ uint64_t t_begin = 0;
assert(s->branch && !s->is_initial);
if (!skip_prefix(s->branch, "refs/heads/", &branch_name))
return;
branch = branch_get(branch_name);
+
+ t_begin = getnanotime();
+
if (!format_tracking_info(branch, &sb, s->ahead_behind_flags))
return;
+ if (advice_status_ahead_behind_warning &&
+ s->ahead_behind_flags == AHEAD_BEHIND_FULL) {
+ uint64_t t_delta_in_ms = (getnanotime() - t_begin) / 1000000;
+ if (t_delta_in_ms > AB_DELAY_WARNING_IN_MS) {
+ strbuf_addf(&sb, _("\n"
+ "It took %.2f seconds to compute the branch ahead/behind values.\n"
+ "You can use '--no-ahead-behind' to avoid this.\n"),
+ t_delta_in_ms / 1000.0);
+ }
+ }
+
i = 0;
if (s->display_comment_prefix) {
comment_line_string[i++] = comment_line_char;
} else if (s->state.detached_from) {
branch_name = s->state.detached_from;
if (s->state.detached_at)
- on_what = _("HEAD detached at ");
+ on_what = HEAD_DETACHED_AT;
else
- on_what = _("HEAD detached from ");
+ on_what = HEAD_DETACHED_FROM;
} else {
branch_name = "";
on_what = _("Not currently on any branch.");
/*
* Fix-up changed entries before we print them.
*/
-static void wt_porcelain_v2_fix_up_changed(
- struct string_list_item *it,
- struct wt_status *s)
+static void wt_porcelain_v2_fix_up_changed(struct string_list_item *it)
{
struct wt_status_change_data *d = it->util;
char submodule_token[5];
char sep_char, eol_char;
- wt_porcelain_v2_fix_up_changed(it, s);
+ wt_porcelain_v2_fix_up_changed(it);
wt_porcelain_v2_submodule_state(d, submodule_token);
key[0] = d->index_status ? d->index_status : '.';
#ifndef STATUS_H
#define STATUS_H
-#include <stdio.h>
#include "string-list.h"
#include "color.h"
#include "pathspec.h"
STATUS_FORMAT_UNSPECIFIED
};
+#define HEAD_DETACHED_AT _("HEAD detached at ")
+#define HEAD_DETACHED_FROM _("HEAD detached from ")
+
struct wt_status_state {
int merge_in_progress;
int am_in_progress;
if (fs1 < 0)
fs1 = 0;
if (fs1 < s1) {
- s2 -= s1 - fs1;
+ s2 = XDL_MAX(s2 - (s1 - fs1), 0);
s1 = fs1;
}
}
if (fe1 < 0)
fe1 = xe->xdf1.nrec;
if (fe1 > e1) {
- e2 += fe1 - e1;
+ e2 = XDL_MIN(e2 + (fe1 - e1), xe->xdf2.nrec);
e1 = fe1;
}
*/
#include "xinclude.h"
-#include "xtypes.h"
-#include "xdiff.h"
#define MAX_PTR UINT_MAX
#define MAX_CNT UINT_MAX
*
*/
#include "xinclude.h"
-#include "xtypes.h"
-#include "xdiff.h"
/*
* The basic idea of patience diff is to find lines that are unique in
*
*/
-#include <limits.h>
-#include <assert.h>
#include "xinclude.h"
-
-
long xdl_bogosqrt(long n) {
long i;