]> git.ipfire.org Git - thirdparty/git.git/commitdiff
Merge branch 'os/fetch-check-not-current-branch'
authorJunio C Hamano <gitster@pobox.com>
Wed, 25 May 2022 23:42:48 +0000 (16:42 -0700)
committerJunio C Hamano <gitster@pobox.com>
Wed, 25 May 2022 23:42:48 +0000 (16:42 -0700)
The way "git fetch" without "--update-head-ok" ensures that HEAD in
no worktree points at any ref being updated was too wasteful, which
has been optimized a bit.

* os/fetch-check-not-current-branch:
  fetch: limit shared symref check only for local branches

181 files changed:
Documentation/Makefile
Documentation/MyFirstContribution.txt
Documentation/RelNotes/2.37.0.txt [new file with mode: 0644]
Documentation/SubmittingPatches
Documentation/config/mergetool.txt
Documentation/config/safe.txt
Documentation/git-mergetool.txt
Documentation/git-p4.txt
Documentation/git-rebase.txt
Documentation/git.txt
Documentation/mergetools/vimdiff.txt [new file with mode: 0644]
Documentation/rev-list-options.txt
Documentation/technical/api-trace2.txt
GIT-VERSION-GEN
Makefile
RelNotes
alloc.c
alloc.h
apply.c
archive.c
bisect.h
blame.c
branch.c
builtin/apply.c
builtin/bisect--helper.c
builtin/blame.c
builtin/checkout.c
builtin/clone.c
builtin/commit.c
builtin/diff.c
builtin/gc.c
builtin/index-pack.c
builtin/log.c
builtin/ls-remote.c
builtin/mailsplit.c
builtin/multi-pack-index.c
builtin/pack-redundant.c
builtin/pull.c
builtin/rebase.c
builtin/receive-pack.c
builtin/replace.c
builtin/rev-parse.c
builtin/shortlog.c
builtin/show-branch.c
builtin/stash.c
builtin/tag.c
cache.h
ci/install-dependencies.sh
combine-diff.c
commit-graph.c
compat/fsmonitor/fsm-listen-darwin.c
compat/mingw.c
compat/mkdir.c
compat/mmap.c
config.c
configure.ac
contrib/coccinelle/equals-null.cocci [new file with mode: 0644]
contrib/coccinelle/the_repository.pending.cocci
contrib/vscode/README.md
contrib/vscode/init.sh
convert.c
daemon.c
detect-compiler
diff.c
dir.c
environment.c
ewah/bitmap.c
ewah/ewah_bitmap.c
ewah/ewok.h
fetch-pack.c
git-mergetool--lib.sh
git-p4.py
git.c
gitk-git/gitk
http-fetch.c
http-push.c
http-walker.c
http.c
kwset.c
ll-merge.c
log-tree.c
mailinfo.c
mailmap.c
merge-ort.c
merge-recursive.c
mergetools/araxis
mergetools/bc
mergetools/codecompare
mergetools/deltawalker
mergetools/diffmerge
mergetools/diffuse
mergetools/ecmerge
mergetools/emerge
mergetools/examdiff
mergetools/guiffy
mergetools/kdiff3
mergetools/kompare
mergetools/meld
mergetools/opendiff
mergetools/p4merge
mergetools/smerge
mergetools/tkdiff
mergetools/tortoisemerge
mergetools/vimdiff
mergetools/winmerge
mergetools/xxdiff
midx.c
object-file.c
object-name.c
object-store.h
pack-bitmap.c
packfile.c
path.c
prio-queue.c
promisor-remote.c
read-cache.c
ref-filter.c
refs.c
refs/ref-cache.c
reftable/blocksource.c
reftable/stack_test.c
reftable/tree.c
reftable/writer.c
rerere.c
revision.c
revision.h
run-command.h
sequencer.c
serve.c
setup.c
sh-i18n--envsubst.c
shallow.c
sparse-index.c
sparse-index.h
submodule-config.h
submodule.h
t/annotate-tests.sh
t/lib-git-p4.sh
t/perf/p2000-sparse-operations.sh
t/t0012-help.sh
t/t0027-auto-crlf.sh
t/t0033-safe-directory.sh
t/t0060-path-utils.sh
t/t1006-cat-file.sh
t/t1011-read-tree-sparse-checkout.sh
t/t1092-sparse-checkout-compatibility.sh
t/t1450-fsck.sh
t/t2203-add-intent.sh
t/t3202-show-branch.sh
t/t3416-rebase-onto-threedots.sh
t/t3501-revert-cherry-pick.sh
t/t4020-diff-external.sh
t/t4202-log.sh
t/t4217-log-limit.sh [new file with mode: 0755]
t/t5572-pull-submodule.sh
t/t5605-clone-local.sh
t/t6030-bisect-porcelain.sh
t/t7011-skip-worktree-reading.sh
t/t7063-status-untracked-cache.sh
t/t7524-commit-summary.sh [new file with mode: 0755]
t/t7609-mergetool--lib.sh [new file with mode: 0755]
t/t7812-grep-icase-non-ascii.sh
t/t9800-git-p4-basic.sh
t/t9801-git-p4-branch.sh
t/t9802-git-p4-filetype.sh
t/t9835-git-p4-metadata-encoding-python2.sh [new file with mode: 0755]
t/t9836-git-p4-metadata-encoding-python3.sh [new file with mode: 0755]
t/test-lib.sh
tempfile.c
tempfile.h
trailer.c
transport.c
unpack-trees.c
wildmatch.c
worktree.c
wrapper.c
xdiff-interface.c
xdiff/xemit.c
xdiff/xmacros.h
xdiff/xprepare.c
xdiff/xutils.c

index 44c080e3e5bb0a895ab8834822490f94f758a96d..adb2f1b50abb8b2f3f4a648033150884f6455759 100644 (file)
@@ -302,12 +302,12 @@ $(mergetools_txt): mergetools-list.made
 
 mergetools-list.made: ../git-mergetool--lib.sh $(wildcard ../mergetools/*)
        $(QUIET_GEN) \
-       $(SHELL_PATH) -c 'MERGE_TOOLS_DIR=../mergetools && \
+       $(SHELL_PATH) -c 'MERGE_TOOLS_DIR=../mergetools && TOOL_MODE=diff && \
                . ../git-mergetool--lib.sh && \
-               show_tool_names can_diff "* " || :' >mergetools-diff.txt && \
-       $(SHELL_PATH) -c 'MERGE_TOOLS_DIR=../mergetools && \
+               show_tool_names can_diff' | sed -e "s/\([a-z0-9]*\)/\`\1\`;;/" >mergetools-diff.txt && \
+       $(SHELL_PATH) -c 'MERGE_TOOLS_DIR=../mergetools && TOOL_MODE=merge && \
                . ../git-mergetool--lib.sh && \
-               show_tool_names can_merge "* " || :' >mergetools-merge.txt && \
+               show_tool_names can_merge' | sed -e "s/\([a-z0-9]*\)/\`\1\`;;/" >mergetools-merge.txt && \
        date >$@
 
 TRACK_ASCIIDOCFLAGS = $(subst ','\'',$(ASCIIDOC_COMMON):$(ASCIIDOC_HTML):$(ASCIIDOC_DOCBOOK))
index 63a2ef544939a3ce3ebb468629e7d2a538d081d1..1da15d9ad4461578b671c00329805c4ea9b7312a 100644 (file)
@@ -710,13 +710,104 @@ dependencies. `prove` also makes the output nicer.
 Go ahead and commit this change, as well.
 
 [[ready-to-share]]
-== Getting Ready to Share
+== Getting Ready to Share: Anatomy of a Patch Series
 
 You may have noticed already that the Git project performs its code reviews via
 emailed patches, which are then applied by the maintainer when they are ready
-and approved by the community. The Git project does not accept patches from
+and approved by the community. The Git project does not accept contributions from
 pull requests, and the patches emailed for review need to be formatted a
-specific way. At this point the tutorial diverges, in order to demonstrate two
+specific way.
+
+:patch-series: https://lore.kernel.org/git/pull.1218.git.git.1645209647.gitgitgadget@gmail.com/
+:lore: https://lore.kernel.org/git/
+
+Before taking a look at how to convert your commits into emailed patches,
+let's analyze what the end result, a "patch series", looks like. Here is an
+{patch-series}[example] of the summary view for a patch series on the web interface of
+the {lore}[Git mailing list archive]:
+
+----
+2022-02-18 18:40 [PATCH 0/3] libify reflog John Cai via GitGitGadget
+2022-02-18 18:40 ` [PATCH 1/3] reflog: libify delete reflog function and helpers John Cai via GitGitGadget
+2022-02-18 19:10   ` Ævar Arnfjörð Bjarmason [this message]
+2022-02-18 19:39     ` Taylor Blau
+2022-02-18 19:48       ` Ævar Arnfjörð Bjarmason
+2022-02-18 19:35   ` Taylor Blau
+2022-02-21  1:43     ` John Cai
+2022-02-21  1:50       ` Taylor Blau
+2022-02-23 19:50         ` John Cai
+2022-02-18 20:00   ` // other replies ellided
+2022-02-18 18:40 ` [PATCH 2/3] reflog: call reflog_delete from reflog.c John Cai via GitGitGadget
+2022-02-18 19:15   ` Ævar Arnfjörð Bjarmason
+2022-02-18 20:26     ` Junio C Hamano
+2022-02-18 18:40 ` [PATCH 3/3] stash: call reflog_delete from reflog.c John Cai via GitGitGadget
+2022-02-18 19:20   ` Ævar Arnfjörð Bjarmason
+2022-02-19  0:21     ` Taylor Blau
+2022-02-22  2:36     ` John Cai
+2022-02-22 10:51       ` Ævar Arnfjörð Bjarmason
+2022-02-18 19:29 ` [PATCH 0/3] libify reflog Ævar Arnfjörð Bjarmason
+2022-02-22 18:30 ` [PATCH v2 0/3] libify reflog John Cai via GitGitGadget
+2022-02-22 18:30   ` [PATCH v2 1/3] stash: add test to ensure reflog --rewrite --updatref behavior John Cai via GitGitGadget
+2022-02-23  8:54     ` Ævar Arnfjörð Bjarmason
+2022-02-23 21:27       ` Junio C Hamano
+// continued
+----
+
+We can note a few things:
+
+- Each commit is sent as a separate email, with the commit message title as
+  subject, prefixed with "[PATCH _i_/_n_]" for the _i_-th commit of an
+  _n_-commit series.
+- Each patch is sent as a reply to an introductory email called the _cover
+  letter_ of the series, prefixed "[PATCH 0/_n_]".
+- Subsequent iterations of the patch series are labelled "PATCH v2", "PATCH
+  v3", etc. in place of "PATCH". For example, "[PATCH v2 1/3]" would be the first of
+  three patches in the second iteration. Each iteration is sent with a new cover
+  letter (like "[PATCH v2 0/3]" above), itself a reply to the cover letter of the
+  previous iteration (more on that below).
+
+NOTE: A single-patch topic is sent with "[PATCH]", "[PATCH v2]", etc. without
+_i_/_n_ numbering (in the above thread overview, no single-patch topic appears,
+though).
+
+[[cover-letter]]
+=== The cover letter
+
+In addition to an email per patch, the Git community also expects your patches
+to come with a cover letter. This is an important component of change
+submission as it explains to the community from a high level what you're trying
+to do, and why, in a way that's more apparent than just looking at your
+patches.
+
+The title of your cover letter should be something which succinctly covers the
+purpose of your entire topic branch. It's often in the imperative mood, just
+like our commit message titles. Here is how we'll title our series:
+
+---
+Add the 'psuh' command
+---
+
+The body of the cover letter is used to give additional context to reviewers.
+Be sure to explain anything your patches don't make clear on their own, but
+remember that since the cover letter is not recorded in the commit history,
+anything that might be useful to future readers of the repository's history
+should also be in your commit messages.
+
+Here's an example body for `psuh`:
+
+----
+Our internal metrics indicate widespread interest in the command
+git-psuh - that is, many users are trying to use it, but finding it is
+unavailable, using some unknown workaround instead.
+
+The following handful of patches add the psuh command and implement some
+handy features on top of it.
+
+This patchset is part of the MyFirstContribution tutorial and should not
+be merged.
+----
+
+At this point the tutorial diverges, in order to demonstrate two
 different methods of formatting your patchset and getting it reviewed.
 
 The first method to be covered is GitGitGadget, which is useful for those
@@ -808,8 +899,22 @@ https://github.com/gitgitgadget/git and open a PR either with the "New pull
 request" button or the convenient "Compare & pull request" button that may
 appear with the name of your newly pushed branch.
 
-Review the PR's title and description, as it's used by GitGitGadget as the cover
-letter for your change. When you're happy, submit your pull request.
+Review the PR's title and description, as they're used by GitGitGadget
+respectively as the subject and body of the cover letter for your change. Refer
+to <<cover-letter,"The cover letter">> above for advice on how to title your
+submission and what content to include in the description.
+
+NOTE: For single-patch contributions, your commit message should already be
+meaningful and explain at a high level the purpose (what is happening and why)
+of your patch, so you usually do not need any additional context. In that case,
+remove the PR description that GitHub automatically generates from your commit
+message (your PR description should be empty). If you do need to supply even
+more context, you can do so in that space and it will be appended to the email
+that GitGitGadget will send, between the three-dash line and the diffstat
+(see <<single-patch,Bonus Chapter: One-Patch Changes>> for how this looks once
+submitted).
+
+When you're happy, submit your pull request.
 
 [[run-ci-ggg]]
 === Running CI and Getting Ready to Send
@@ -952,49 +1057,29 @@ but want reviewers to look at what they have so far. You can add this flag with
 Check and make sure that your patches and cover letter template exist in the
 directory you specified - you're nearly ready to send out your review!
 
-[[cover-letter]]
+[[preparing-cover-letter]]
 === Preparing Email
 
-In addition to an email per patch, the Git community also expects your patches
-to come with a cover letter, typically with a subject line [PATCH 0/x] (where
-x is the number of patches you're sending). Since you invoked `format-patch`
-with `--cover-letter`, you've already got a template ready. Open it up in your
-favorite editor.
+Since you invoked `format-patch` with `--cover-letter`, you've already got a
+cover letter template ready. Open it up in your favorite editor.
 
 You should see a number of headers present already. Check that your `From:`
-header is correct. Then modify your `Subject:` to something which succinctly
-covers the purpose of your entire topic branch, for example:
+header is correct. Then modify your `Subject:` (see <<cover-letter,above>> for
+how to choose good title for your patch series):
 
 ----
-Subject: [PATCH 0/7] adding the 'psuh' command
+Subject: [PATCH 0/7] Add the 'psuh' command
 ----
 
 Make sure you retain the ``[PATCH 0/X]'' part; that's what indicates to the Git
-community that this email is the beginning of a review, and many reviewers
-filter their email for this type of flag.
+community that this email is the beginning of a patch series, and many
+reviewers filter their email for this type of flag.
 
 You'll need to add some extra parameters when you invoke `git send-email` to add
 the cover letter.
 
-Next you'll have to fill out the body of your cover letter. This is an important
-component of change submission as it explains to the community from a high level
-what you're trying to do, and why, in a way that's more apparent than just
-looking at your diff. Be sure to explain anything your diff doesn't make clear
-on its own.
-
-Here's an example body for `psuh`:
-
-----
-Our internal metrics indicate widespread interest in the command
-git-psuh - that is, many users are trying to use it, but finding it is
-unavailable, using some unknown workaround instead.
-
-The following handful of patches add the psuh command and implement some
-handy features on top of it.
-
-This patchset is part of the MyFirstContribution tutorial and should not
-be merged.
-----
+Next you'll have to fill out the body of your cover letter. Again, see
+<<cover-letter,above>> for what content to include.
 
 The template created by `git format-patch --cover-letter` includes a diffstat.
 This gives reviewers a summary of what they're in for when reviewing your topic.
diff --git a/Documentation/RelNotes/2.37.0.txt b/Documentation/RelNotes/2.37.0.txt
new file mode 100644 (file)
index 0000000..aab788a
--- /dev/null
@@ -0,0 +1,156 @@
+Git v2.37 Release Notes
+=======================
+
+UI, Workflows & Features
+
+ * "vimdiff[123]" mergetool drivers have been reimplemented with a
+   more generic layout mechanism.
+
+ * "git -v" and "git -h" are now understood as "git --version" and
+   "git --help".
+
+ * The temporary files fed to external diff command are now generated
+   inside a new temporary directory under the same basename.
+
+ * "git log --since=X" will stop traversal upon seeing a commit that
+   is older than X, but there may be commits behind it that is younger
+   than X when the commit was created with a faulty clock.  A new
+   option is added to keep digging without stopping, and instead
+   filter out commits with timestamp older than X.
+
+
+Performance, Internal Implementation, Development Support etc.
+
+ * The performance of the "untracked cache" feature has been improved
+   when "--untracked-files=<mode>" and "status.showUntrackedFiles"
+   are combined.
+
+ * "git stash" works better with sparse index entries.
+
+ * "git show :<path>" learned to work better with the sparse-index
+   feature.
+
+ * Introduce and apply coccinelle rule to discourage an explicit
+   comparison between a pointer and NULL, and applies the clean-up to
+   the maintenance track.
+
+Fixes since v2.36
+-----------------
+
+ * "git submodule update" without pathspec should silently skip an
+   uninitialized submodule, but it started to become noisy by mistake.
+   (merge 4f1ccef87c gc/submodule-update-part2 later to maint).
+
+ * "diff-tree --stdin" has been broken for about a year, but 2.36
+   release broke it even worse by breaking running the command with
+   <pathspec>, which in turn broke "gitk" and got noticed.  This has
+   been corrected by aligning its behaviour to that of "log".
+   (merge f8781bfda3 jc/diff-tree-stdin-fix later to maint).
+
+ * Regression fix for 2.36 where "git name-rev" started to sometimes
+   reference strings after they are freed.
+   (merge 45a14f578e rs/name-rev-fix-free-after-use later to maint).
+
+ * "git show <commit1> <commit2>... -- <pathspec>" lost the pathspec
+   when showing the second and subsequent commits, which has been
+   corrected.
+   (merge 5cdb38458e jc/show-pathspec-fix later to maint).
+
+ * "git fast-export -- <pathspec>" lost the pathspec when showing the
+   second and subsequent commits, which has been corrected.
+   (merge d1c25272f5 rs/fast-export-pathspec-fix later to maint).
+
+ * "git format-patch <args> -- <pathspec>" lost the pathspec when
+   showing the second and subsequent commits, which has been
+   corrected.
+   (merge 91f8f7e46f rs/format-patch-pathspec-fix later to maint).
+
+ * "git clone --origin X" leaked piece of memory that held value read
+   from the clone.defaultRemoteName configuration variable, which has
+   been plugged.
+   (merge 6dfadc8981 jc/clone-remote-name-leak-fix later to maint).
+
+ * Get rid of a bogus and over-eager coccinelle rule.
+   (merge 08bdd3a185 jc/cocci-xstrdup-or-null-fix later to maint).
+
+ * The path taken by "git multi-pack-index" command from the end user
+   was compared with path internally prepared by the tool withut first
+   normalizing, which lead to duplicated paths not being noticed,
+   which has been corrected.
+   (merge 11f9e8de3d ds/midx-normalize-pathname-before-comparison later to maint).
+
+ * Correct choices of C compilers used in various CI jobs.
+   (merge 3506cae04f ab/cc-package-fixes later to maint).
+
+ * Various cleanups to "git p4".
+   (merge 4ff0108d9e jh/p4-various-fixups later to maint).
+
+ * The progress meter of "git blame" was showing incorrect numbers
+   when processing only parts of the file.
+   (merge e5f5d7d42e ea/progress-partial-blame later to maint).
+
+ * "git rebase --keep-base <upstream> <branch-to-rebase>" computed the
+   commit to rebase onto incorrectly, which has been corrected.
+   (merge 9e5ebe9668 ah/rebase-keep-base-fix later to maint).
+
+ * Fix a leak of FILE * in an error codepath.
+   (merge c0befa0c03 kt/commit-graph-plug-fp-leak-on-error later to maint).
+
+ * Avoid problems from interaction between malloc_check and address
+   sanitizer.
+   (merge 067109a5e7 pw/test-malloc-with-sanitize-address later to maint).
+
+ * The commit summary shown after making a commit is matched to what
+   is given in "git status" not to use the break-rewrite heuristics.
+   (merge 84792322ed rs/commit-summary-wo-break-rewrite later to maint).
+
+ * Update a few end-user facing messages around eol conversion.
+   (merge c970d30c2c ah/convert-warning-message later to maint).
+
+ * Trace2 documentation updates.
+   (merge a6c80c313c js/trace2-doc-fixes later to maint).
+
+ * Build procedure fixup.
+   (merge 1fbfd96f50 mg/detect-compiler-in-c-locale later to maint).
+
+ * "git pull" without "--recurse-submodules=<arg>" made
+   submodule.recurse take precedence over fetch.recurseSubmodules by
+   mistake, which has been corrected.
+   (merge 5819417365 gc/pull-recurse-submodules later to maint).
+
+ * "git bisect" was too silent before it is ready to start computing
+   the actual bisection, which has been corrected.
+   (merge f11046e6de cd/bisect-messages-from-pre-flight-states later to maint).
+
+ * macOS CI jobs have been occasionally flaky due to tentative version
+   skew between perforce and the homebrew packager.  Instead of
+   failing the whole CI job, just let it skip the p4 tests when this
+   happens.
+   (merge f15e00b463 cb/ci-make-p4-optional later to maint).
+
+ * A bit of test framework fixes with a few fixes to issues found by
+   valgrind.
+   (merge 7c898554d7 ab/valgrind-fixes later to maint).
+
+ * "git archive --add-file=<path>" picked up the raw permission bits
+   from the path and propagated to zip output in some cases, without
+   normalization, which has been corrected (tar output did not have
+   this issue).
+   (merge 6a61661967 jc/archive-add-file-normalize-mode later to maint).
+
+ * Other code cleanup, docfix, build fix, etc.
+   (merge e6b2582da3 cm/reftable-0-length-memset later to maint).
+   (merge 0b75e5bf22 ab/misc-cleanup later to maint).
+   (merge 52e1ab8a76 ea/rebase-code-simplify later to maint).
+   (merge 756d15923b sg/safe-directory-tests-and-docs later to maint).
+   (merge d097a23bfa ds/do-not-call-bug-on-bad-refs later to maint).
+   (merge c36c27e75c rs/t7812-pcre2-ws-bug-test later to maint).
+   (merge 1da312742d gf/unused-includes later to maint).
+   (merge 465b30a92d pb/submodule-recurse-mode-enum later to maint).
+   (merge 82b28c4ed8 km/t3501-use-test-helpers later to maint).
+   (merge 72315e431b sa/t1011-use-helpers later to maint).
+   (merge 95b3002201 cg/vscode-with-gdb later to maint).
+   (merge fbe5f6b804 tk/p4-utf8-bom later to maint).
+   (merge 17f273ffba tk/p4-with-explicity-sync later to maint).
+   (merge 944db25c60 kf/p4-multiple-remotes later to maint).
+   (merge b014cee8de jc/update-ozlabs-url later to maint).
index a6121d1d4280248ae8d0dd44f8ad5952c6e0ced9..5bd795e5dbf90daff92fcf33a10f7c318e9d0689 100644 (file)
@@ -452,7 +452,10 @@ repositories.
 
 - `gitk-git/` comes from Paul Mackerras's gitk project:
 
-       git://ozlabs.org/~paulus/gitk
+       git://git.ozlabs.org/~paulus/gitk
+
+   Those who are interested in improve gitk can volunteer to help Paul
+   in maintaining it cf. <YntxL/fTplFm8lr6@cleo>.
 
 - `po/` comes from the localization coordinator, Jiang Xin:
 
index cafbbef46ae9c102e9e175e67282badb67e09255..90b380970023a76799206a30789d4f8af7793991 100644 (file)
@@ -45,6 +45,15 @@ mergetool.meld.useAutoMerge::
        value of `false` avoids using `--auto-merge` altogether, and is the
        default value.
 
+mergetool.vimdiff.layout::
+       The vimdiff backend uses this variable to control how its split
+       windows look like. Applies even if you are using Neovim (`nvim`) or
+       gVim (`gvim`) as the merge tool. See BACKEND SPECIFIC HINTS section
+ifndef::git-mergetool[]
+       in linkgit:git-mergetool[1].
+endif::[]
+       for details.
+
 mergetool.hideResolved::
        During a merge Git will automatically resolve as many conflicts as
        possible and write the 'MERGED' file containing conflict markers around
index 6d764fe0ccf3a87dbfeb34da03b7e1dcb380ac99..ae0e2e3bdb479b42bf40f52396ea82b6c6f0bd38 100644 (file)
@@ -13,8 +13,8 @@ override any such directories specified in the system config), add a
 `safe.directory` entry with an empty value.
 +
 This config setting is only respected when specified in a system or global
-config, not when it is specified in a repository config or via the command
-line option `-c safe.directory=<path>`.
+config, not when it is specified in a repository config, via the command
+line option `-c safe.directory=<path>`, or in environment variables.
 +
 The value of this setting is interpolated, i.e. `~/<path>` expands to a
 path relative to the home directory and `%(prefix)/<path>` expands to a
index e587c7763a7c21664c7586e273f348f70dd061d7..f784027bc13724712f902b115abd98bc1c04b43f 100644 (file)
@@ -101,6 +101,7 @@ success of the resolution after the custom tool has exited.
 
 CONFIGURATION
 -------------
+:git-mergetool: 1
 include::config/mergetool.txt[]
 
 TEMPORARY FILES
@@ -113,6 +114,13 @@ Setting the `mergetool.keepBackup` configuration variable to `false`
 causes `git mergetool` to automatically remove the backup as files
 are successfully merged.
 
+BACKEND SPECIFIC HINTS
+----------------------
+
+vimdiff
+~~~~~~~
+include::mergetools/vimdiff.txt[]
+
 GIT
 ---
 Part of the linkgit:git[1] suite
index e21fcd8f7127a3a8e9e6fcb5a268ab66a097b3a2..de5ee6748e35886fd38cf3a4a73b28f270e06e2c 100644 (file)
@@ -636,7 +636,42 @@ git-p4.pathEncoding::
        Git expects paths encoded as UTF-8. Use this config to tell git-p4
        what encoding Perforce had used for the paths. This encoding is used
        to transcode the paths to UTF-8. As an example, Perforce on Windows
-       often uses "cp1252" to encode path names.
+       often uses "cp1252" to encode path names. If this option is passed
+       into a p4 clone request, it is persisted in the resulting new git
+       repo.
+
+git-p4.metadataDecodingStrategy::
+       Perforce keeps the encoding of a changelist descriptions and user
+       full names as stored by the client on a given OS. The p4v client
+       uses the OS-local encoding, and so different users can end up storing
+       different changelist descriptions or user full names in different
+       encodings, in the same depot.
+       Git tolerates inconsistent/incorrect encodings in commit messages
+       and author names, but expects them to be specified in utf-8.
+       git-p4 can use three different decoding strategies in handling the
+       encoding uncertainty in Perforce: 'passthrough' simply passes the
+       original bytes through from Perforce to git, creating usable but
+       incorrectly-encoded data when the Perforce data is encoded as
+       anything other than utf-8. 'strict' expects the Perforce data to be
+       encoded as utf-8, and fails to import when this is not true.
+       'fallback' attempts to interpret the data as utf-8, and otherwise
+       falls back to using a secondary encoding - by default the common
+       windows encoding 'cp-1252' - with upper-range bytes escaped if
+       decoding with the fallback encoding also fails.
+       Under python2 the default strategy is 'passthrough' for historical
+       reasons, and under python3 the default is 'fallback'.
+       When 'strict' is selected and decoding fails, the error message will
+       propose changing this config parameter as a workaround. If this
+       option is passed into a p4 clone request, it is persisted into the
+       resulting new git repo.
+
+git-p4.metadataFallbackEncoding::
+       Specify the fallback encoding to use when decoding Perforce author
+       names and changelists descriptions using the 'fallback' strategy
+       (see git-p4.metadataDecodingStrategy). The fallback encoding will
+       only be used when decoding as utf-8 fails. This option defaults to
+       cp1252, a common windows encoding. If this option is passed into a
+       p4 clone request, it is persisted into the resulting new git repo.
 
 git-p4.largeFileSystem::
        Specify the system that is used for large (binary) files. Please note
index 9da4647061cdc7e85df685169c81f2f08b9d6707..262fb01aec004c880e16d2630494a194f720a713 100644 (file)
@@ -215,9 +215,10 @@ leave out at most one of A and B, in which case it defaults to HEAD.
 
 --keep-base::
        Set the starting point at which to create the new commits to the
-       merge base of <upstream> <branch>. Running
+       merge base of <upstream> and <branch>. Running
        'git rebase --keep-base <upstream> <branch>' is equivalent to
-       running 'git rebase --onto <upstream>... <upstream>'.
+       running
+       'git rebase --onto <upstream>...<branch> <upstream> <branch>'.
 +
 This option is useful in the case where one is developing a feature on
 top of an upstream branch. While the feature is being worked on, the
index 13f83a2a3a12209791de1032839764a493ec4457..302607a49676d59aef4f1f8d39d3a302b7b66cc2 100644 (file)
@@ -9,7 +9,7 @@ git - the stupid content tracker
 SYNOPSIS
 --------
 [verse]
-'git' [--version] [--help] [-C <path>] [-c <name>=<value>]
+'git' [-v | --version] [-h | --help] [-C <path>] [-c <name>=<value>]
     [--exec-path[=<path>]] [--html-path] [--man-path] [--info-path]
     [-p|--paginate|-P|--no-pager] [--no-replace-objects] [--bare]
     [--git-dir=<path>] [--work-tree=<path>] [--namespace=<name>]
@@ -39,6 +39,7 @@ or https://git-scm.com/docs.
 
 OPTIONS
 -------
+-v::
 --version::
        Prints the Git suite version that the 'git' program came from.
 +
@@ -46,6 +47,7 @@ This option is internally converted to `git version ...` and accepts
 the same options as the linkgit:git-version[1] command. If `--help` is
 also given, it takes precedence over `--version`.
 
+-h::
 --help::
        Prints the synopsis and a list of the most commonly used
        commands. If the option `--all` or `-a` is given then all
diff --git a/Documentation/mergetools/vimdiff.txt b/Documentation/mergetools/vimdiff.txt
new file mode 100644 (file)
index 0000000..2d631e9
--- /dev/null
@@ -0,0 +1,194 @@
+Description
+^^^^^^^^^^^
+
+When specifying `--tool=vimdiff` in `git mergetool` Git will open Vim with a 4
+windows layout distributed in the following way:
+....
+------------------------------------------
+|             |           |              |
+|   LOCAL     |   BASE    |   REMOTE     |
+|             |           |              |
+------------------------------------------
+|                                        |
+|                MERGED                  |
+|                                        |
+------------------------------------------
+....
+`LOCAL`, `BASE` and `REMOTE` are read-only buffers showing the contents of the
+conflicting file in specific commits ("commit you are merging into", "common
+ancestor commit" and "commit you are merging from" respectively)
+
+`MERGED` is a writable buffer where you have to resolve the conflicts (using the
+other read-only buffers as a reference). Once you are done, save and exit Vim as
+usual (`:wq`) or, if you want to abort, exit using `:cq`.
+
+Layout configuration
+^^^^^^^^^^^^^^^^^^^^
+
+You can change the windows layout used by Vim by setting configuration variable
+`mergetool.vimdiff.layout` which accepts a string where the following separators
+have special meaning:
+
+  - `+` is used to "open a new tab"
+  - `,` is used to "open a new vertical split"
+  - `/` is used to "open a new horizontal split"
+  - `@` is used to indicate which is the file containing the final version after
+    solving the conflicts. If not present, `MERGED` will be used by default.
+
+The precedence of the operators is this one (you can use parentheses to change
+it):
+
+    `@` > `+` > `/` > `,`
+
+Let's see some examples to understand how it works:
+
+* `layout = "(LOCAL,BASE,REMOTE)/MERGED"`
++
+--
+This is exactly the same as the default layout we have already seen.
+
+Note that `/` has precedence over `,` and thus the parenthesis are not
+needed in this case. The next layout definition is equivalent:
+
+    layout = "LOCAL,BASE,REMOTE / MERGED"
+--
+* `layout = "LOCAL,MERGED,REMOTE"`
++
+--
+If, for some reason, we are not interested in the `BASE` buffer.
+....
+------------------------------------------
+|             |           |              |
+|             |           |              |
+|   LOCAL     |   MERGED  |   REMOTE     |
+|             |           |              |
+|             |           |              |
+------------------------------------------
+....
+--
+* `layout = "MERGED"`
++
+--
+Only the `MERGED` buffer will be shown. Note, however, that all the other
+ones are still loaded in vim, and you can access them with the "buffers"
+command.
+....
+------------------------------------------
+|                                        |
+|                                        |
+|                 MERGED                 |
+|                                        |
+|                                        |
+------------------------------------------
+....
+--
+* `layout = "@LOCAL,REMOTE"`
++
+--
+When `MERGED` is not present in the layout, you must "mark" one of the
+buffers with an asterisk. That will become the buffer you need to edit and
+save after resolving the conflicts.
+....
+------------------------------------------
+|                   |                    |
+|                   |                    |
+|                   |                    |
+|     LOCAL         |    REMOTE          |
+|                   |                    |
+|                   |                    |
+|                   |                    |
+------------------------------------------
+....
+--
+* `layout = "LOCAL,BASE,REMOTE / MERGED + BASE,LOCAL + BASE,REMOTE"`
++
+--
+Three tabs will open: the first one is a copy of the default layout, while
+the other two only show the differences between (`BASE` and `LOCAL`) and
+(`BASE` and `REMOTE`) respectively.
+....
+------------------------------------------
+| <TAB #1> |  TAB #2  |  TAB #3  |       |
+------------------------------------------
+|             |           |              |
+|   LOCAL     |   BASE    |   REMOTE     |
+|             |           |              |
+------------------------------------------
+|                                        |
+|                MERGED                  |
+|                                        |
+------------------------------------------
+....
+....
+------------------------------------------
+|  TAB #1  | <TAB #2> |  TAB #3  |       |
+------------------------------------------
+|                   |                    |
+|                   |                    |
+|                   |                    |
+|     BASE          |    LOCAL           |
+|                   |                    |
+|                   |                    |
+|                   |                    |
+------------------------------------------
+....
+....
+------------------------------------------
+|  TAB #1  |  TAB #2  | <TAB #3> |       |
+------------------------------------------
+|                   |                    |
+|                   |                    |
+|                   |                    |
+|     BASE          |    REMOTE          |
+|                   |                    |
+|                   |                    |
+|                   |                    |
+------------------------------------------
+....
+--
+* `layout = "LOCAL,BASE,REMOTE / MERGED + BASE,LOCAL + BASE,REMOTE + (LOCAL/BASE/REMOTE),MERGED"`
++
+--
+Same as the previous example, but adds a fourth tab with the same
+information as the first tab, with a different layout.
+....
+---------------------------------------------
+|  TAB #1  |  TAB #2  |  TAB #3  | <TAB #4> |
+---------------------------------------------
+|       LOCAL         |                     |
+|---------------------|                     |
+|       BASE          |        MERGED       |
+|---------------------|                     |
+|       REMOTE        |                     |
+---------------------------------------------
+....
+Note how in the third tab definition we need to use parenthesis to make `,`
+have precedence over `/`.
+--
+
+Variants
+^^^^^^^^
+
+Instead of `--tool=vimdiff`, you can also use one of these other variants:
+
+  * `--tool=gvimdiff`, to open gVim instead of Vim.
+
+  * `--tool=nvimdiff`, to open Neovim instead of Vim.
+
+When using these variants, in order to specify a custom layout you will have to
+set configuration variables `mergetool.gvimdiff.layout` and
+`mergetool.nvimdiff.layout` instead of `mergetool.vimdiff.layout`
+
+In addition, for backwards compatibility with previous Git versions, you can
+also append `1`, `2` or `3` to either `vimdiff` or any of the variants (ex:
+`vimdiff3`, `nvimdiff1`, etc...) to use a predefined layout.
+In other words, using `--tool=[g,n,]vimdiffx` is the same as using
+`--tool=[g,n,]vimdiff` and setting configuration variable
+`mergetool.[g,n,]vimdiff.layout` to...
+
+  * `x=1`: `"@LOCAL, REMOTE"`
+  * `x=2`: `"LOCAL, MERGED, REMOTE"`
+  * `x=3`: `"MERGED"`
+
+Example: using `--tool=gvimdiff2` will open `gvim` with three columns (LOCAL,
+MERGED and REMOTE).
index fd4f4e26c90face314632ac6c2b81bed648abd17..195e74eec633ea913c0934d2b690b674360376d7 100644 (file)
@@ -25,6 +25,11 @@ ordering and formatting options, such as `--reverse`.
 --after=<date>::
        Show commits more recent than a specific date.
 
+--since-as-filter=<date>::
+       Show all commits more recent than a specific date. This visits
+       all commits in the range, rather than stopping at the first commit which
+       is older than a specific date.
+
 --until=<date>::
 --before=<date>::
        Show commits older than a specific date.
index bb13ca3db8b5f72e5754e8d925cf26a85b8304db..f4a8a69087864fad4faaee74b8edc34f3e1e86a8 100644 (file)
@@ -5,7 +5,7 @@ information to stderr or a file.  The Trace2 feature is inactive unless
 explicitly enabled by enabling one or more Trace2 Targets.
 
 The Trace2 API is intended to replace the existing (Trace1)
-printf-style tracing provided by the existing `GIT_TRACE` and
+`printf()`-style tracing provided by the existing `GIT_TRACE` and
 `GIT_TRACE_PERFORMANCE` facilities.  During initial implementation,
 Trace2 and Trace1 may operate in parallel.
 
@@ -24,8 +24,8 @@ for example.
 
 Trace2 is controlled using `trace2.*` config values in the system and
 global config files and `GIT_TRACE2*` environment variables.  Trace2 does
-not read from repo local or worktree config files or respect `-c`
-command line config settings.
+not read from repo local or worktree config files, nor does it respect
+`-c` command line config settings.
 
 == Trace2 Targets
 
@@ -34,8 +34,8 @@ Format details are given in a later section.
 
 === The Normal Format Target
 
-The normal format target is a tradition printf format and similar
-to GIT_TRACE format.  This format is enabled with the `GIT_TRACE2`
+The normal format target is a traditional `printf()` format and similar
+to the `GIT_TRACE` format.  This format is enabled with the `GIT_TRACE2`
 environment variable or the `trace2.normalTarget` system or global
 config setting.
 
@@ -69,8 +69,8 @@ $ cat ~/log.normal
 === The Performance Format Target
 
 The performance format target (PERF) is a column-based format to
-replace GIT_TRACE_PERFORMANCE and is suitable for development and
-testing, possibly to complement tools like gprof.  This format is
+replace `GIT_TRACE_PERFORMANCE` and is suitable for development and
+testing, possibly to complement tools like `gprof`.  This format is
 enabled with the `GIT_TRACE2_PERF` environment variable or the
 `trace2.perfTarget` system or global config setting.
 
@@ -128,7 +128,7 @@ yields
 
 ------------
 $ cat ~/log.event
-{"event":"version","sid":"sid":"20190408T191610.507018Z-H9b68c35f-P000059a8","thread":"main","time":"2019-01-16T17:28:42.620713Z","file":"common-main.c","line":38,"evt":"3","exe":"2.20.1.155.g426c96fcdb"}
+{"event":"version","sid":"20190408T191610.507018Z-H9b68c35f-P000059a8","thread":"main","time":"2019-01-16T17:28:42.620713Z","file":"common-main.c","line":38,"evt":"3","exe":"2.20.1.155.g426c96fcdb"}
 {"event":"start","sid":"20190408T191610.507018Z-H9b68c35f-P000059a8","thread":"main","time":"2019-01-16T17:28:42.621027Z","file":"common-main.c","line":39,"t_abs":0.001173,"argv":["git","version"]}
 {"event":"cmd_name","sid":"20190408T191610.507018Z-H9b68c35f-P000059a8","thread":"main","time":"2019-01-16T17:28:42.621122Z","file":"git.c","line":432,"name":"version","hierarchy":"version"}
 {"event":"exit","sid":"20190408T191610.507018Z-H9b68c35f-P000059a8","thread":"main","time":"2019-01-16T17:28:42.621236Z","file":"git.c","line":662,"t_abs":0.001227,"code":0}
@@ -170,9 +170,9 @@ Some functions have a `_va_fl()` suffix to indicate that they also
 take a `va_list` argument.
 
 Some functions have a `_printf_fl()` suffix to indicate that they also
-take a varargs argument.
+take a `printf()` style format with a variable number of arguments.
 
-There are CPP wrapper macros and ifdefs to hide most of these details.
+There are CPP wrapper macros and `#ifdef`s to hide most of these details.
 See `trace2.h` for more details.  The following discussion will only
 describe the simplified forms.
 
@@ -234,7 +234,7 @@ Events are written as lines of the form:
        is the event name.
 
 `<event-message>`::
-       is a free-form printf message intended for human consumption.
+       is a free-form `printf()` message intended for human consumption.
 +
 Note that this may contain embedded LF or CRLF characters that are
 not escaped, so the event may spill across multiple lines.
@@ -300,7 +300,7 @@ This field is in anticipation of in-proc submodules in the future.
        indicate a broad category, such as "index" or "status".
 
 `<perf-event-message>`::
-       is a free-form printf message intended for human consumption.
+       is a free-form `printf()` message intended for human consumption.
 
 ------------
 15:33:33.532712 wt-status.c:2310                  | d0 | main                     | region_enter | r1  |  0.126064 |           | status     | label:print
@@ -533,7 +533,7 @@ these special values are used:
 ------------
 
 `"cmd_mode"`::
-       This event, when present, describes the command variant This
+       This event, when present, describes the command variant. This
        event may be emitted more than once.
 +
 ------------
@@ -588,7 +588,7 @@ with "?".
 
 `"child_exit"`::
        This event is generated after the current process has returned
-       from the waitpid() and collected the exit information from the
+       from the `waitpid()` and collected the exit information from the
        child.
 +
 ------------
@@ -609,7 +609,7 @@ process may be a shell script which doesn't have a session-id.)
 +
 Note that the `t_rel` field contains the observed run time in seconds
 for the child process (starting before the fork/exec/spawn and
-stopping after the waitpid() and includes OS process creation overhead).
+stopping after the `waitpid()` and includes OS process creation overhead).
 So this time will be slightly larger than the atexit time reported by
 the child process itself.
 
@@ -635,7 +635,7 @@ process may be a shell script which doesn't have a session-id.)
 +
 This event is generated after the child is started in the background
 and given a little time to boot up and start working.  If the child
-startups normally and while the parent is still waiting, the "ready"
+starts up normally while the parent is still waiting, the "ready"
 field will have the value "ready".
 If the child is too slow to start and the parent times out, the field
 will have the value "timeout".
@@ -949,7 +949,7 @@ atexit elapsed:3.868970 code:0
 
 Regions::
 
-       Regions can be use to time an interesting section of code.
+       Regions can be used to time an interesting section of code.
 +
 ----------------
 void wt_status_collect(struct wt_status *s)
@@ -1103,9 +1103,9 @@ Thread Events::
 
        Thread messages added to a thread-proc.
 +
-For example, the multithreaded preload-index code can be
+For example, the multi-threaded preload-index code can be
 instrumented with a region around the thread pool and then
-per-thread start and exit events within the threadproc.
+per-thread start and exit events within the thread-proc.
 +
 ----------------
 static void *preload_thread(void *_data)
@@ -1214,11 +1214,11 @@ as each thread starts and allocates TLS storage.
 There are a few issues to resolve before we can completely
 switch to Trace2.
 
-* Updating existing tests that assume GIT_TRACE format messages.
+* Updating existing tests that assume `GIT_TRACE` format messages.
 
-* How to best handle custom GIT_TRACE_<key> messages?
+* How to best handle custom `GIT_TRACE_<key>` messages?
 
-** The GIT_TRACE_<key> mechanism allows each <key> to write to a
+** The `GIT_TRACE_<key>` mechanism allows each <key> to write to a
 different file (in addition to just stderr).
 
 ** Do we want to maintain that ability or simply write to the existing
index c1e8552d27e84c07a65de289f55f11df323701a0..1d667699e1926de980806790e50dfc0610743aad 100755 (executable)
@@ -1,7 +1,7 @@
 #!/bin/sh
 
 GVF=GIT-VERSION-FILE
-DEF_VER=v2.36.1
+DEF_VER=v2.36.GIT
 
 LF='
 '
index f8bccfab5e9c46d39be6fe28b091330d5a3dd895..18ca6744a500863086751cc2d39c64a1d83867c8 100644 (file)
--- a/Makefile
+++ b/Makefile
@@ -1267,8 +1267,9 @@ PTHREAD_CFLAGS =
 SPARSE_FLAGS ?= -std=gnu99
 SP_EXTRA_FLAGS = -Wno-universal-initializer
 
-# For informing GIT-BUILD-OPTIONS of the SANITIZE=leak target
+# For informing GIT-BUILD-OPTIONS of the SANITIZE=leak,address targets
 SANITIZE_LEAK =
+SANITIZE_ADDRESS =
 
 # For the 'coccicheck' target; setting SPATCH_BATCH_SIZE higher will
 # usually result in less CPU usage at the cost of higher peak memory.
@@ -1314,6 +1315,7 @@ SANITIZE_LEAK = YesCompiledWithIt
 endif
 ifneq ($(filter address,$(SANITIZERS)),)
 NO_REGEX = NeededForASAN
+SANITIZE_ADDRESS = YesCompiledWithIt
 endif
 endif
 
@@ -2862,6 +2864,7 @@ GIT-BUILD-OPTIONS: FORCE
        @echo PAGER_ENV=\''$(subst ','\'',$(subst ','\'',$(PAGER_ENV)))'\' >>$@+
        @echo DC_SHA1=\''$(subst ','\'',$(subst ','\'',$(DC_SHA1)))'\' >>$@+
        @echo SANITIZE_LEAK=\''$(subst ','\'',$(subst ','\'',$(SANITIZE_LEAK)))'\' >>$@+
+       @echo SANITIZE_ADDRESS=\''$(subst ','\'',$(subst ','\'',$(SANITIZE_ADDRESS)))'\' >>$@+
        @echo X=\'$(X)\' >>$@+
 ifdef FSMONITOR_DAEMON_BACKEND
        @echo FSMONITOR_DAEMON_BACKEND=\''$(subst ','\'',$(subst ','\'',$(FSMONITOR_DAEMON_BACKEND)))'\' >>$@+
@@ -3407,6 +3410,7 @@ coverage-clean-results:
        $(RM) coverage-untested-functions
        $(RM) -r cover_db/
        $(RM) -r cover_db_html/
+       $(RM) coverage-test.made
 
 coverage-clean: coverage-clean-results
        $(RM) $(addsuffix *.gcno,$(object_dirs))
@@ -3421,13 +3425,17 @@ coverage-compile:
 coverage-test: coverage-clean-results coverage-compile
        $(MAKE) CFLAGS="$(COVERAGE_CFLAGS)" LDFLAGS="$(COVERAGE_LDFLAGS)" \
                DEFAULT_TEST_TARGET=test -j1 test
+       touch coverage-test.made
+
+coverage-test.made:
+       $(MAKE) coverage-test
 
 coverage-prove: coverage-clean-results coverage-compile
        $(MAKE) CFLAGS="$(COVERAGE_CFLAGS)" LDFLAGS="$(COVERAGE_LDFLAGS)" \
                DEFAULT_TEST_TARGET=prove GIT_PROVE_OPTS="$(GIT_PROVE_OPTS) -j1" \
                -j1 test
 
-coverage-report:
+coverage-report: coverage-test.made
        $(QUIET_GCOV)for dir in $(object_dirs); do \
                $(GCOV) $(GCOVFLAGS) --object-directory=$$dir $$dir*.c || exit; \
        done
index 4600187f85ed014f1b2ded58da0722461436c7ca..51144b6e83418a44108511632565ef053f8c7712 120000 (symlink)
--- a/RelNotes
+++ b/RelNotes
@@ -1 +1 @@
-Documentation/RelNotes/2.36.1.txt
\ No newline at end of file
+Documentation/RelNotes/2.37.0.txt
\ No newline at end of file
diff --git a/alloc.c b/alloc.c
index 957a0af3626432b0c9c2dd1301f20c4ff339a0a4..27f697e4c87a05ef7cc847a17e83e14e9cfd2a4d 100644 (file)
--- a/alloc.c
+++ b/alloc.c
@@ -27,7 +27,6 @@ union any_object {
 };
 
 struct alloc_state {
-       int count; /* total number of nodes allocated */
        int nr;    /* number of nodes left in current allocation */
        void *p;   /* first free node in current allocation */
 
@@ -63,7 +62,6 @@ static inline void *alloc_node(struct alloc_state *s, size_t node_size)
                s->slabs[s->slab_nr++] = s->p;
        }
        s->nr--;
-       s->count++;
        ret = s->p;
        s->p = (char *)s->p + node_size;
        memset(ret, 0, node_size);
@@ -122,22 +120,3 @@ void *alloc_commit_node(struct repository *r)
        init_commit_node(c);
        return c;
 }
-
-static void report(const char *name, unsigned int count, size_t size)
-{
-       fprintf(stderr, "%10s: %8u (%"PRIuMAX" kB)\n",
-                       name, count, (uintmax_t) size);
-}
-
-#define REPORT(name, type)     \
-    report(#name, r->parsed_objects->name##_state->count, \
-                 r->parsed_objects->name##_state->count * sizeof(type) >> 10)
-
-void alloc_report(struct repository *r)
-{
-       REPORT(blob, struct blob);
-       REPORT(tree, struct tree);
-       REPORT(commit, struct commit);
-       REPORT(tag, struct tag);
-       REPORT(object, union any_object);
-}
diff --git a/alloc.h b/alloc.h
index 371d388b552fb01824b4ec208abe7fdf11d95eec..3f4a0ad310a94bd026f48f48491985e3e2053ee2 100644 (file)
--- a/alloc.h
+++ b/alloc.h
@@ -13,7 +13,6 @@ void init_commit_node(struct commit *c);
 void *alloc_commit_node(struct repository *r);
 void *alloc_tag_node(struct repository *r);
 void *alloc_object_node(struct repository *r);
-void alloc_report(struct repository *r);
 
 struct alloc_state *allocate_alloc_state(void);
 void clear_alloc_state(struct alloc_state *s);
diff --git a/apply.c b/apply.c
index d19c26d332e7908b364be4e2eddea4c2f1bde0b5..2b7cd930efa3bd31fec9ac72b94bac31272c4c09 100644 (file)
--- a/apply.c
+++ b/apply.c
@@ -3274,11 +3274,11 @@ static struct patch *in_fn_table(struct apply_state *state, const char *name)
 {
        struct string_list_item *item;
 
-       if (name == NULL)
+       if (!name)
                return NULL;
 
        item = string_list_lookup(&state->fn_table, name);
-       if (item != NULL)
+       if (item)
                return (struct patch *)item->util;
 
        return NULL;
@@ -3318,7 +3318,7 @@ static void add_to_fn_table(struct apply_state *state, struct patch *patch)
         * This should cover the cases for normal diffs,
         * file creations and copies
         */
-       if (patch->new_name != NULL) {
+       if (patch->new_name) {
                item = string_list_insert(&state->fn_table, patch->new_name);
                item->util = patch;
        }
index e29d0e00f6cc4ecb7883b3661807dfc4b705d0b9..e2121ebefb0451deeaacd65687eba46149ffe844 100644 (file)
--- a/archive.c
+++ b/archive.c
@@ -342,7 +342,7 @@ int write_archive_entries(struct archiver_args *args,
                else
                        err = write_entry(args, &fake_oid, path_in_archive.buf,
                                          path_in_archive.len,
-                                         info->stat.st_mode,
+                                         canon_mode(info->stat.st_mode),
                                          content.buf, content.len);
                if (err)
                        break;
@@ -465,7 +465,7 @@ static void parse_treeish_arg(const char **argv,
        }
 
        tree = parse_tree_indirect(&oid);
-       if (tree == NULL)
+       if (!tree)
                die(_("not a tree object: %s"), oid_to_hex(&oid));
 
        if (prefix) {
index 1015aeb8eaeaee7f539a4631b6dbf1c27175c7fb..ee3fd65f3bdb89611d7575239ee79e8c50bcd703 100644 (file)
--- a/bisect.h
+++ b/bisect.h
@@ -62,6 +62,15 @@ enum bisect_error {
        BISECT_INTERNAL_SUCCESS_MERGE_BASE = -11
 };
 
+/*
+ * Stores how many good/bad commits we have stored for a bisect. nr_bad can
+ * only be 0 or 1.
+ */
+struct bisect_state {
+       unsigned int nr_good;
+       unsigned int nr_bad;
+};
+
 enum bisect_error bisect_next_all(struct repository *r, const char *prefix);
 
 int estimate_bisect_steps(int all);
diff --git a/blame.c b/blame.c
index 186ad96120983107d10417e52829e60df12c2bc4..da1052ac94bb47282fb6191e457dda6597c3e8fc 100644 (file)
--- a/blame.c
+++ b/blame.c
@@ -1072,7 +1072,7 @@ static struct blame_entry *blame_merge(struct blame_entry *list1,
        if (p1->s_lno <= p2->s_lno) {
                do {
                        tail = &p1->next;
-                       if ((p1 = *tail) == NULL) {
+                       if (!(p1 = *tail)) {
                                *tail = p2;
                                return list1;
                        }
@@ -1082,7 +1082,7 @@ static struct blame_entry *blame_merge(struct blame_entry *list1,
                *tail = p2;
                do {
                        tail = &p2->next;
-                       if ((p2 = *tail) == NULL)  {
+                       if (!(p2 = *tail))  {
                                *tail = p1;
                                return list1;
                        }
@@ -1090,7 +1090,7 @@ static struct blame_entry *blame_merge(struct blame_entry *list1,
                *tail = p1;
                do {
                        tail = &p1->next;
-                       if ((p1 = *tail) == NULL) {
+                       if (!(p1 = *tail)) {
                                *tail = p2;
                                return list1;
                        }
index 01ecb816d5c4181786e7affab5aabd6a270a1db8..bde705b092b3c6f1b929ce504fb5599b1211705b 100644 (file)
--- a/branch.c
+++ b/branch.c
@@ -466,7 +466,7 @@ static void dwim_branch_start(struct repository *r, const char *start_name,
                break;
        }
 
-       if ((commit = lookup_commit_reference(r, &oid)) == NULL)
+       if (!(commit = lookup_commit_reference(r, &oid)))
                die(_("not a valid branch point: '%s'"), start_name);
        if (out_real_ref) {
                *out_real_ref = real_ref;
@@ -653,7 +653,7 @@ void create_branches_recursively(struct repository *r, const char *name,
         * be created in every submodule.
         */
        for (i = 0; i < submodule_entry_list.entry_nr; i++) {
-               if (submodule_entry_list.entries[i].repo == NULL) {
+               if (!submodule_entry_list.entries[i].repo) {
                        int code = die_message(
                                _("submodule '%s': unable to find submodule"),
                                submodule_entry_list.entries[i].submodule->name);
index 3f099b960565ff2944209ba514ea7274dad852f5..555219de40fa7e3097612a60eb953f81580a8de9 100644 (file)
@@ -1,7 +1,6 @@
 #include "cache.h"
 #include "builtin.h"
 #include "parse-options.h"
-#include "lockfile.h"
 #include "apply.h"
 
 static const char * const apply_usage[] = {
index 8b2b259ff0d11ebf6d28765b2a926a5efb1b644e..d4eaaa345ee790764f3d6948f48b6e04435340af 100644 (file)
@@ -329,12 +329,12 @@ static int check_and_set_terms(struct bisect_terms *terms, const char *cmd)
        return 0;
 }
 
-static int mark_good(const char *refname, const struct object_id *oid,
-                    int flag, void *cb_data)
+static int inc_nr(const char *refname, const struct object_id *oid,
+                 int flag, void *cb_data)
 {
-       int *m_good = (int *)cb_data;
-       *m_good = 0;
-       return 1;
+       unsigned int *nr = (unsigned int *)cb_data;
+       (*nr)++;
+       return 0;
 }
 
 static const char need_bad_and_good_revision_warning[] =
@@ -384,23 +384,64 @@ static int decide_next(const struct bisect_terms *terms,
                             vocab_good, vocab_bad, vocab_good, vocab_bad);
 }
 
-static int bisect_next_check(const struct bisect_terms *terms,
-                            const char *current_term)
+static void bisect_status(struct bisect_state *state,
+                         const struct bisect_terms *terms)
 {
-       int missing_good = 1, missing_bad = 1;
        char *bad_ref = xstrfmt("refs/bisect/%s", terms->term_bad);
        char *good_glob = xstrfmt("%s-*", terms->term_good);
 
        if (ref_exists(bad_ref))
-               missing_bad = 0;
+               state->nr_bad = 1;
 
-       for_each_glob_ref_in(mark_good, good_glob, "refs/bisect/",
-                            (void *) &missing_good);
+       for_each_glob_ref_in(inc_nr, good_glob, "refs/bisect/",
+                            (void *) &state->nr_good);
 
        free(good_glob);
        free(bad_ref);
+}
 
-       return decide_next(terms, current_term, missing_good, missing_bad);
+__attribute__((format (printf, 1, 2)))
+static void bisect_log_printf(const char *fmt, ...)
+{
+       struct strbuf buf = STRBUF_INIT;
+       va_list ap;
+
+       va_start(ap, fmt);
+       strbuf_vaddf(&buf, fmt, ap);
+       va_end(ap);
+
+       printf("%s", buf.buf);
+       append_to_file(git_path_bisect_log(), "# %s", buf.buf);
+
+       strbuf_release(&buf);
+}
+
+static void bisect_print_status(const struct bisect_terms *terms)
+{
+       struct bisect_state state = { 0 };
+
+       bisect_status(&state, terms);
+
+       /* If we had both, we'd already be started, and shouldn't get here. */
+       if (state.nr_good && state.nr_bad)
+               return;
+
+       if (!state.nr_good && !state.nr_bad)
+               bisect_log_printf(_("status: waiting for both good and bad commits\n"));
+       else if (state.nr_good)
+               bisect_log_printf(Q_("status: waiting for bad commit, %d good commit known\n",
+                                    "status: waiting for bad commit, %d good commits known\n",
+                                    state.nr_good), state.nr_good);
+       else
+               bisect_log_printf(_("status: waiting for good commit(s), bad commit known\n"));
+}
+
+static int bisect_next_check(const struct bisect_terms *terms,
+                            const char *current_term)
+{
+       struct bisect_state state = { 0 };
+       bisect_status(&state, terms);
+       return decide_next(terms, current_term, !state.nr_good, !state.nr_bad);
 }
 
 static int get_terms(struct bisect_terms *terms)
@@ -433,7 +474,7 @@ static int bisect_terms(struct bisect_terms *terms, const char *option)
        if (get_terms(terms))
                return error(_("no terms defined"));
 
-       if (option == NULL) {
+       if (!option) {
                printf(_("Your current terms are %s for the old state\n"
                         "and %s for the new state.\n"),
                       terms->term_good, terms->term_bad);
@@ -606,8 +647,10 @@ static enum bisect_error bisect_next(struct bisect_terms *terms, const char *pre
 
 static enum bisect_error bisect_auto_next(struct bisect_terms *terms, const char *prefix)
 {
-       if (bisect_next_check(terms, NULL))
+       if (bisect_next_check(terms, NULL)) {
+               bisect_print_status(terms);
                return BISECT_OK;
+       }
 
        return bisect_next(terms, prefix);
 }
index 8d15b68afc9da3ffe0e1bce952e1e0fe72afd428..e33372c56b03a56337da026f0b5b367638216f25 100644 (file)
@@ -898,6 +898,7 @@ int cmd_blame(int argc, const char **argv, const char *prefix)
        unsigned int range_i;
        long anchor;
        const int hexsz = the_hash_algo->hexsz;
+       long num_lines = 0;
 
        setup_default_color_by_age();
        git_config(git_blame_config, &output_option);
@@ -1129,7 +1130,10 @@ parse_done:
        for (range_i = ranges.nr; range_i > 0; --range_i) {
                const struct range *r = &ranges.ranges[range_i - 1];
                ent = blame_entry_prepend(ent, r->start, r->end, o);
+               num_lines += (r->end - r->start);
        }
+       if (!num_lines)
+               num_lines = sb.num_lines;
 
        o->suspects = ent;
        prio_queue_put(&sb.commits, o->commit);
@@ -1158,7 +1162,7 @@ parse_done:
        sb.found_guilty_entry = &found_guilty_entry;
        sb.found_guilty_entry_data = &pi;
        if (show_progress)
-               pi.progress = start_delayed_progress(_("Blaming lines"), sb.num_lines);
+               pi.progress = start_delayed_progress(_("Blaming lines"), num_lines);
 
        assign_blame(&sb, opt);
 
index 797681481d10b552e645237c99a8d9bff7df766a..f988936ddf02c70750e2e0bb408e81ed8c330c55 100644 (file)
@@ -834,7 +834,7 @@ static int merge_working_tree(const struct checkout_opts *opts,
                        if (ret)
                                return ret;
                        o.ancestor = old_branch_info->name;
-                       if (old_branch_info->name == NULL) {
+                       if (!old_branch_info->name) {
                                strbuf_add_unique_abbrev(&old_commit_shortname,
                                                         &old_branch_info->commit->object.oid,
                                                         DEFAULT_ABBREV);
index 52316563795fe34638b3ae22263ebf4e5f456ca5..89a91b0017725990345b6f91e61069c2f35819a6 100644 (file)
@@ -1106,10 +1106,12 @@ int cmd_clone(int argc, const char **argv, const char *prefix)
         * apply the remote name provided by --origin only after this second
         * call to git_config, to ensure it overrides all config-based values.
         */
-       if (option_origin != NULL)
+       if (option_origin) {
+               free(remote_name);
                remote_name = xstrdup(option_origin);
+       }
 
-       if (remote_name == NULL)
+       if (!remote_name)
                remote_name = xstrdup("origin");
 
        if (!valid_remote_name(remote_name))
index 009a1de0a3d3f69e05a802d7d2356935018b0f81..9bbc7f807552ebc637d4621bd99d9d27a9007b82 100644 (file)
@@ -861,7 +861,7 @@ static int prepare_to_commit(const char *index_file, const char *prefix,
        }
 
        s->fp = fopen_for_writing(git_path_commit_editmsg());
-       if (s->fp == NULL)
+       if (!s->fp)
                die_errno(_("could not open '%s'"), git_path_commit_editmsg());
 
        /* Ignore status.displayCommentPrefix: we do need comments in COMMIT_EDITMSG. */
@@ -1687,6 +1687,7 @@ int cmd_commit(int argc, const char **argv, const char *prefix)
        struct commit *current_head = NULL;
        struct commit_extra_header *extra = NULL;
        struct strbuf err = STRBUF_INIT;
+       int ret = 0;
 
        if (argc == 2 && !strcmp(argv[1], "-h"))
                usage_with_options(builtin_commit_usage, builtin_commit_options);
@@ -1721,8 +1722,9 @@ int cmd_commit(int argc, const char **argv, const char *prefix)
           running hooks, writing the trees, and interacting with the user.  */
        if (!prepare_to_commit(index_file, prefix,
                               current_head, &s, &author_ident)) {
+               ret = 1;
                rollback_index_files();
-               return 1;
+               goto cleanup;
        }
 
        /* Determine parents */
@@ -1820,7 +1822,6 @@ int cmd_commit(int argc, const char **argv, const char *prefix)
                rollback_index_files();
                die(_("failed to write commit object"));
        }
-       strbuf_release(&author_ident);
        free_commit_extra_headers(extra);
 
        if (update_head_with_reflog(current_head, &oid, reflog_msg, &sb,
@@ -1862,7 +1863,9 @@ int cmd_commit(int argc, const char **argv, const char *prefix)
 
        apply_autostash(git_path_merge_autostash(the_repository));
 
+cleanup:
+       UNLEAK(author_ident);
        UNLEAK(err);
        UNLEAK(sb);
-       return 0;
+       return ret;
 }
index bb7fafca61815460ba085859322314b4c41c8b6b..3397f44d5aa97e76867bc7fcb72b709f61123af1 100644 (file)
@@ -352,7 +352,7 @@ static void symdiff_prepare(struct rev_info *rev, struct symdiff *sym)
                        othercount++;
                        continue;
                }
-               if (map == NULL)
+               if (!map)
                        map = bitmap_new();
                bitmap_set(map, i);
        }
index b335cffa33561fa80f268260a7edbc7af9ae4136..daa4535f1c3964023d16497d2b1478f3f2e08a93 100644 (file)
@@ -446,7 +446,7 @@ static const char *lock_repo_for_gc(int force, pid_t* ret_pid)
                        fscanf(fp, scan_fmt, &pid, locking_host) == 2 &&
                        /* be gentle to concurrent "gc" on remote hosts */
                        (strcmp(locking_host, my_host) || !kill(pid, 0) || errno == EPERM);
-               if (fp != NULL)
+               if (fp)
                        fclose(fp);
                if (should_exit) {
                        if (fd >= 0)
@@ -2238,7 +2238,7 @@ static int systemd_timer_write_unit_templates(const char *exec_path)
                goto error;
        }
        file = fopen_or_warn(filename, "w");
-       if (file == NULL)
+       if (!file)
                goto error;
 
        unit = "# This file was created and is maintained by Git.\n"
@@ -2267,7 +2267,7 @@ static int systemd_timer_write_unit_templates(const char *exec_path)
 
        filename = xdg_config_home_systemd("git-maintenance@.service");
        file = fopen_or_warn(filename, "w");
-       if (file == NULL)
+       if (!file)
                goto error;
 
        unit = "# This file was created and is maintained by Git.\n"
index 680b66b0636522be7bf831e69be3adf2115ff94a..3e385b4800258d5c2e0d964d34b4238980f8b06c 100644 (file)
@@ -1942,11 +1942,11 @@ int cmd_index_pack(int argc, const char **argv, const char *prefix)
        free(objects);
        strbuf_release(&index_name_buf);
        strbuf_release(&rev_index_name_buf);
-       if (pack_name == NULL)
+       if (!pack_name)
                free((void *) curr_pack);
-       if (index_name == NULL)
+       if (!index_name)
                free((void *) curr_index);
-       if (rev_index_name == NULL)
+       if (!rev_index_name)
                free((void *) curr_rev_index);
 
        /*
index 3ac479bec3c17aa7734195bc31dbacfb22a1523e..2eb0063cc167ee84d14a25cb45678cb0ac617a41 100644 (file)
@@ -669,6 +669,11 @@ int cmd_show(int argc, const char **argv, const char *prefix)
        init_log_defaults();
        git_config(git_log_config, NULL);
 
+       if (the_repository->gitdir) {
+               prepare_repo_settings(the_repository);
+               the_repository->settings.command_requires_full_index = 0;
+       }
+
        memset(&match_all, 0, sizeof(match_all));
        repo_init_revisions(the_repository, &rev, prefix);
        git_config(grep_config, &rev.grep_filter);
@@ -1025,7 +1030,7 @@ static int open_next_file(struct commit *commit, const char *subject,
        if (!quiet)
                printf("%s\n", filename.buf + outdir_offset);
 
-       if ((rev->diffopt.file = fopen(filename.buf, "w")) == NULL) {
+       if (!(rev->diffopt.file = fopen(filename.buf, "w"))) {
                error_errno(_("cannot open patch file %s"), filename.buf);
                strbuf_release(&filename);
                return -1;
index d856085e9414a2bea070cbe9c84268c66158c33a..df44e5cc0d1171334a846817ef19e6f98feca48f 100644 (file)
@@ -114,7 +114,7 @@ int cmd_ls_remote(int argc, const char **argv, const char *prefix)
        }
 
        transport = transport_get(remote, NULL);
-       if (uploadpack != NULL)
+       if (uploadpack)
                transport_set_option(transport, TRANS_OPT_UPLOADPACK, uploadpack);
        if (server_options.nr)
                transport->server_options = &server_options;
index 30952353a370a3343f614ee5a14a3f5ca9560444..73509f651bda4805e8399d75eda80ec8976bda07 100644 (file)
@@ -120,7 +120,7 @@ static int populate_maildir_list(struct string_list *list, const char *path)
        for (sub = subs; *sub; ++sub) {
                free(name);
                name = xstrfmt("%s/%s", path, *sub);
-               if ((dir = opendir(name)) == NULL) {
+               if (!(dir = opendir(name))) {
                        if (errno == ENOENT)
                                continue;
                        error_errno("cannot opendir %s", name);
index 4480ba398277d4edc67e60eff47686d579de53c8..5edbb7fe86e81fb09ca2245ba920bc8bccd18726 100644 (file)
@@ -44,7 +44,7 @@ static char const * const builtin_multi_pack_index_usage[] = {
 };
 
 static struct opts_multi_pack_index {
-       const char *object_dir;
+       char *object_dir;
        const char *preferred_pack;
        const char *refs_snapshot;
        unsigned long batch_size;
@@ -52,9 +52,23 @@ static struct opts_multi_pack_index {
        int stdin_packs;
 } opts;
 
+
+static int parse_object_dir(const struct option *opt, const char *arg,
+                           int unset)
+{
+       free(opts.object_dir);
+       if (unset)
+               opts.object_dir = xstrdup(get_object_directory());
+       else
+               opts.object_dir = real_pathdup(arg, 1);
+       return 0;
+}
+
 static struct option common_opts[] = {
-       OPT_FILENAME(0, "object-dir", &opts.object_dir,
-         N_("object directory containing set of packfile and pack-index pairs")),
+       OPT_CALLBACK(0, "object-dir", &opts.object_dir,
+         N_("directory"),
+         N_("object directory containing set of packfile and pack-index pairs"),
+         parse_object_dir),
        OPT_END(),
 };
 
@@ -232,31 +246,40 @@ static int cmd_multi_pack_index_repack(int argc, const char **argv)
 int cmd_multi_pack_index(int argc, const char **argv,
                         const char *prefix)
 {
+       int res;
        struct option *builtin_multi_pack_index_options = common_opts;
 
        git_config(git_default_config, NULL);
 
+       if (the_repository &&
+           the_repository->objects &&
+           the_repository->objects->odb)
+               opts.object_dir = xstrdup(the_repository->objects->odb->path);
+
        argc = parse_options(argc, argv, prefix,
                             builtin_multi_pack_index_options,
                             builtin_multi_pack_index_usage,
                             PARSE_OPT_STOP_AT_NON_OPTION);
 
-       if (!opts.object_dir)
-               opts.object_dir = get_object_directory();
-
        if (!argc)
                goto usage;
 
        if (!strcmp(argv[0], "repack"))
-               return cmd_multi_pack_index_repack(argc, argv);
+               res = cmd_multi_pack_index_repack(argc, argv);
        else if (!strcmp(argv[0], "write"))
-               return cmd_multi_pack_index_write(argc, argv);
+               res =  cmd_multi_pack_index_write(argc, argv);
        else if (!strcmp(argv[0], "verify"))
-               return cmd_multi_pack_index_verify(argc, argv);
+               res =  cmd_multi_pack_index_verify(argc, argv);
        else if (!strcmp(argv[0], "expire"))
-               return cmd_multi_pack_index_expire(argc, argv);
+               res =  cmd_multi_pack_index_expire(argc, argv);
+       else {
+               error(_("unrecognized subcommand: %s"), argv[0]);
+               goto usage;
+       }
+
+       free(opts.object_dir);
+       return res;
 
-       error(_("unrecognized subcommand: %s"), argv[0]);
 usage:
        usage_with_options(builtin_multi_pack_index_usage,
                           builtin_multi_pack_index_options);
index 8bf5c0acad2023967abf498832006f37924d9ca1..ed9b9013a5fea10c0e99616ac928661130b4c21e 100644 (file)
@@ -101,7 +101,7 @@ static inline struct llist_item *llist_insert(struct llist *list,
        oidread(&new_item->oid, oid);
        new_item->next = NULL;
 
-       if (after != NULL) {
+       if (after) {
                new_item->next = after->next;
                after->next = new_item;
                if (after == list->back)
@@ -157,7 +157,7 @@ redo_from_start:
                if (cmp > 0) /* not in list, since sorted */
                        return prev;
                if (!cmp) { /* found */
-                       if (prev == NULL) {
+                       if (!prev) {
                                if (hint != NULL && hint != list->front) {
                                        /* we don't know the previous element */
                                        hint = NULL;
@@ -219,7 +219,7 @@ static struct pack_list * pack_list_difference(const struct pack_list *A,
        struct pack_list *ret;
        const struct pack_list *pl;
 
-       if (A == NULL)
+       if (!A)
                return NULL;
 
        pl = B;
@@ -317,7 +317,7 @@ static size_t get_pack_redundancy(struct pack_list *pl)
        struct pack_list *subset;
        size_t ret = 0;
 
-       if (pl == NULL)
+       if (!pl)
                return 0;
 
        while ((subset = pl->next)) {
@@ -611,7 +611,7 @@ int cmd_pack_redundant(int argc, const char **argv, const char *prefix)
                while (*(argv + i) != NULL)
                        add_pack_file(*(argv + i++));
 
-       if (local_packs == NULL)
+       if (!local_packs)
                die("Zero packs found!");
 
        load_all_objects();
index 4d667abc19d91c0c83ef9faf6b1944f73d2ff48c..01155ba67b20d6593e09b239d8a5bbb3eb195535 100644 (file)
@@ -72,6 +72,7 @@ static const char * const pull_usage[] = {
 static int opt_verbosity;
 static char *opt_progress;
 static int recurse_submodules = RECURSE_SUBMODULES_DEFAULT;
+static int recurse_submodules_cli = RECURSE_SUBMODULES_DEFAULT;
 
 /* Options passed to git-merge or git-rebase */
 static enum rebase_type opt_rebase = -1;
@@ -120,7 +121,7 @@ static struct option pull_options[] = {
                N_("force progress reporting"),
                PARSE_OPT_NOARG),
        OPT_CALLBACK_F(0, "recurse-submodules",
-                  &recurse_submodules, N_("on-demand"),
+                  &recurse_submodules_cli, N_("on-demand"),
                   N_("control for recursive fetching of submodules"),
                   PARSE_OPT_OPTARG, option_fetch_parse_recurse_submodules),
 
@@ -536,8 +537,8 @@ static int run_fetch(const char *repo, const char **refspecs)
                strvec_push(&args, opt_tags);
        if (opt_prune)
                strvec_push(&args, opt_prune);
-       if (recurse_submodules != RECURSE_SUBMODULES_DEFAULT)
-               switch (recurse_submodules) {
+       if (recurse_submodules_cli != RECURSE_SUBMODULES_DEFAULT)
+               switch (recurse_submodules_cli) {
                case RECURSE_SUBMODULES_ON:
                        strvec_push(&args, "--recurse-submodules=on");
                        break;
@@ -1001,6 +1002,9 @@ int cmd_pull(int argc, const char **argv, const char *prefix)
 
        argc = parse_options(argc, argv, prefix, pull_options, pull_usage, 0);
 
+       if (recurse_submodules_cli != RECURSE_SUBMODULES_DEFAULT)
+               recurse_submodules = recurse_submodules_cli;
+
        if (cleanup_arg)
                /*
                 * this only checks the validity of cleanup_arg; we don't need
index 27fde7bf2815d3f7af22f87d14c098edce5c552d..7ab50cda2ad09f127e5840dd3cc4e0aad6ef36c3 100644 (file)
@@ -1187,11 +1187,9 @@ int cmd_rebase(int argc, const char **argv, const char *prefix)
                } else {
                        strbuf_reset(&buf);
                        strbuf_addf(&buf, "%s/interactive", merge_dir());
-                       if(file_exists(buf.buf)) {
-                               options.type = REBASE_MERGE;
+                       options.type = REBASE_MERGE;
+                       if (file_exists(buf.buf))
                                options.flags |= REBASE_INTERACTIVE_EXPLICIT;
-                       } else
-                               options.type = REBASE_MERGE;
                }
                options.state_dir = merge_dir();
        }
@@ -1583,33 +1581,6 @@ int cmd_rebase(int argc, const char **argv, const char *prefix)
                options.upstream_arg = "--root";
        }
 
-       /* Make sure the branch to rebase onto is valid. */
-       if (keep_base) {
-               strbuf_reset(&buf);
-               strbuf_addstr(&buf, options.upstream_name);
-               strbuf_addstr(&buf, "...");
-               options.onto_name = xstrdup(buf.buf);
-       } else if (!options.onto_name)
-               options.onto_name = options.upstream_name;
-       if (strstr(options.onto_name, "...")) {
-               if (get_oid_mb(options.onto_name, &merge_base) < 0) {
-                       if (keep_base)
-                               die(_("'%s': need exactly one merge base with branch"),
-                                   options.upstream_name);
-                       else
-                               die(_("'%s': need exactly one merge base"),
-                                   options.onto_name);
-               }
-               options.onto = lookup_commit_or_die(&merge_base,
-                                                   options.onto_name);
-       } else {
-               options.onto =
-                       lookup_commit_reference_by_name(options.onto_name);
-               if (!options.onto)
-                       die(_("Does not point to a valid commit '%s'"),
-                               options.onto_name);
-       }
-
        /*
         * If the branch to rebase is given, that is the branch we will rebase
         * branch_name -- branch/commit being rebased, or
@@ -1659,6 +1630,34 @@ int cmd_rebase(int argc, const char **argv, const char *prefix)
        } else
                BUG("unexpected number of arguments left to parse");
 
+       /* Make sure the branch to rebase onto is valid. */
+       if (keep_base) {
+               strbuf_reset(&buf);
+               strbuf_addstr(&buf, options.upstream_name);
+               strbuf_addstr(&buf, "...");
+               strbuf_addstr(&buf, branch_name);
+               options.onto_name = xstrdup(buf.buf);
+       } else if (!options.onto_name)
+               options.onto_name = options.upstream_name;
+       if (strstr(options.onto_name, "...")) {
+               if (get_oid_mb(options.onto_name, &merge_base) < 0) {
+                       if (keep_base)
+                               die(_("'%s': need exactly one merge base with branch"),
+                                   options.upstream_name);
+                       else
+                               die(_("'%s': need exactly one merge base"),
+                                   options.onto_name);
+               }
+               options.onto = lookup_commit_or_die(&merge_base,
+                                                   options.onto_name);
+       } else {
+               options.onto =
+                       lookup_commit_reference_by_name(options.onto_name);
+               if (!options.onto)
+                       die(_("Does not point to a valid commit '%s'"),
+                               options.onto_name);
+       }
+
        if (options.fork_point > 0) {
                struct commit *head =
                        lookup_commit_reference(the_repository,
index 9aabffa1afb646f0513068de3d6366a7bc58809a..ad20b41e3c8e24fc7c45a0dbdcf7b7a387c1354c 100644 (file)
@@ -1664,7 +1664,7 @@ static void check_aliased_update_internal(struct command *cmd,
        }
        dst_name = strip_namespace(dst_name);
 
-       if ((item = string_list_lookup(list, dst_name)) == NULL)
+       if (!(item = string_list_lookup(list, dst_name)))
                return;
 
        cmd->skip_update = 1;
@@ -2538,7 +2538,7 @@ int cmd_receive_pack(int argc, const char **argv, const char *prefix)
                           PACKET_READ_CHOMP_NEWLINE |
                           PACKET_READ_DIE_ON_ERR_PACKET);
 
-       if ((commands = read_head_info(&reader, &shallow)) != NULL) {
+       if ((commands = read_head_info(&reader, &shallow))) {
                const char *unpack_status = NULL;
                struct string_list push_options = STRING_LIST_INIT_DUP;
 
index 5068f4f0b2acf8e4b0f342319999cedffccc4223..583702a0980e8ced3f91e1b88a9a1698be16fc6e 100644 (file)
@@ -72,7 +72,7 @@ static int list_replace_refs(const char *pattern, const char *format)
 {
        struct show_data data;
 
-       if (pattern == NULL)
+       if (!pattern)
                pattern = "*";
        data.pattern = pattern;
 
index 8480a59f573e28d7777ee7e0dd21a2a640657641..b259d8990a681d1804d434cea2f1e28193e6693f 100644 (file)
@@ -476,7 +476,7 @@ static int cmd_parseopt(int argc, const char **argv, const char *prefix)
 
                /* name(s) */
                s = strpbrk(sb.buf, flag_chars);
-               if (s == NULL)
+               if (!s)
                        s = help;
 
                if (s - sb.buf == 1) /* short option only */
@@ -723,6 +723,9 @@ int cmd_rev_parse(int argc, const char **argv, const char *prefix)
                        prefix = setup_git_directory();
                        git_config(git_default_config, NULL);
                        did_repo_setup = 1;
+
+                       prepare_repo_settings(the_repository);
+                       the_repository->settings.command_requires_full_index = 0;
                }
 
                if (!strcmp(arg, "--")) {
index 26c5c0cf93545777301c27f02279450347425fde..62c4a4eaba4cff77efc6332d13cfdd24cd71d719 100644 (file)
@@ -81,7 +81,7 @@ static void insert_one_record(struct shortlog *log,
                format_subject(&subject, oneline, " ");
                buffer = strbuf_detach(&subject, NULL);
 
-               if (item->util == NULL)
+               if (!item->util)
                        item->util = xcalloc(1, sizeof(struct string_list));
                string_list_append(item->util, buffer);
        }
index 330b0553b9d72d9befb0ecf1f3c58540b6f26877..64c649c6a238605cb847bbf515cbdca58cde990c 100644 (file)
@@ -712,6 +712,10 @@ int cmd_show_branch(int ac, const char **av, const char *prefix)
                                "--all/--remotes/--independent/--merge-base");
        }
 
+       if (with_current_branch && reflog)
+               die(_("options '%s' and '%s' cannot be used together"),
+                   "--reflog", "--current");
+
        /* If nothing is specified, show all branches by default */
        if (ac <= topics && all_heads + all_remotes == 0)
                all_heads = 1;
index 0c7b6a95882d9405e3cdf4a8d3d2d251dc0588db..3fe549f7d3cb1f7bff9e0a8e9dcb6abf8309bfc5 100644 (file)
@@ -7,6 +7,7 @@
 #include "cache-tree.h"
 #include "unpack-trees.h"
 #include "merge-recursive.h"
+#include "merge-ort-wrappers.h"
 #include "strvec.h"
 #include "run-command.h"
 #include "dir.h"
@@ -492,13 +493,13 @@ static void unstage_changes_unless_new(struct object_id *orig_tree)
 static int do_apply_stash(const char *prefix, struct stash_info *info,
                          int index, int quiet)
 {
-       int ret;
+       int clean, ret;
        int has_index = index;
        struct merge_options o;
        struct object_id c_tree;
        struct object_id index_tree;
-       struct commit *result;
-       const struct object_id *bases[1];
+       struct tree *head, *merge, *merge_base;
+       struct lock_file lock = LOCK_INIT;
 
        read_cache_preload(NULL);
        if (refresh_and_write_cache(REFRESH_QUIET, 0, 0))
@@ -541,6 +542,7 @@ static int do_apply_stash(const char *prefix, struct stash_info *info,
 
        o.branch1 = "Updated upstream";
        o.branch2 = "Stashed changes";
+       o.ancestor = "Stash base";
 
        if (oideq(&info->b_tree, &c_tree))
                o.branch1 = "Version stash was based on";
@@ -551,10 +553,26 @@ static int do_apply_stash(const char *prefix, struct stash_info *info,
        if (o.verbosity >= 3)
                printf_ln(_("Merging %s with %s"), o.branch1, o.branch2);
 
-       bases[0] = &info->b_tree;
+       head = lookup_tree(o.repo, &c_tree);
+       merge = lookup_tree(o.repo, &info->w_tree);
+       merge_base = lookup_tree(o.repo, &info->b_tree);
+
+       repo_hold_locked_index(o.repo, &lock, LOCK_DIE_ON_ERROR);
+       clean = merge_ort_nonrecursive(&o, head, merge, merge_base);
+
+       /*
+        * If 'clean' >= 0, reverse the value for 'ret' so 'ret' is 0 when the
+        * merge was clean, and nonzero if the merge was unclean or encountered
+        * an error.
+        */
+       ret = clean >= 0 ? !clean : clean;
+
+       if (ret < 0)
+               rollback_lock_file(&lock);
+       else if (write_locked_index(o.repo->index, &lock,
+                                     COMMIT_LOCK | SKIP_IF_UNCHANGED))
+               ret = error(_("could not write index"));
 
-       ret = merge_recursive_generic(&o, &c_tree, &info->w_tree, 1, bases,
-                                     &result);
        if (ret) {
                rerere(0);
 
@@ -1770,6 +1788,9 @@ int cmd_stash(int argc, const char **argv, const char *prefix)
        argc = parse_options(argc, argv, prefix, options, git_stash_usage,
                             PARSE_OPT_KEEP_UNKNOWN | PARSE_OPT_KEEP_DASHDASH);
 
+       prepare_repo_settings(the_repository);
+       the_repository->settings.command_requires_full_index = 0;
+
        index_file = get_index_file();
        strbuf_addf(&stash_index_path, "%s.stash.%" PRIuMAX, index_file,
                    (uintmax_t)pid);
index e5a8f856936a1e494d91aa8ebfa2ff9f64f375d8..75dece0e4f1c90c592f86a4881a01e6553d87613 100644 (file)
@@ -364,7 +364,7 @@ static void create_reflog_msg(const struct object_id *oid, struct strbuf *sb)
                strbuf_addstr(sb, "object of unknown type");
                break;
        case OBJ_COMMIT:
-               if ((buf = read_object_file(oid, &type, &size)) != NULL) {
+               if ((buf = read_object_file(oid, &type, &size))) {
                        subject_len = find_commit_subject(buf, &subject_start);
                        strbuf_insert(sb, sb->len, subject_start, subject_len);
                } else {
@@ -372,7 +372,7 @@ static void create_reflog_msg(const struct object_id *oid, struct strbuf *sb)
                }
                free(buf);
 
-               if ((c = lookup_commit_reference(the_repository, oid)) != NULL)
+               if ((c = lookup_commit_reference(the_repository, oid)))
                        strbuf_addf(sb, ", %s", show_date(c->date, 0, DATE_MODE(SHORT)));
                break;
        case OBJ_TREE:
diff --git a/cache.h b/cache.h
index 6226f6a8a53f65083e67ed747d56f4ca84c43302..595582becc83a5ffc1dfcc19eb59123d772b44c4 100644 (file)
--- a/cache.h
+++ b/cache.h
@@ -566,7 +566,7 @@ extern char *git_work_tree_cfg;
 int is_inside_work_tree(void);
 const char *get_git_dir(void);
 const char *get_git_common_dir(void);
-char *get_object_directory(void);
+const char *get_object_directory(void);
 char *get_index_file(void);
 char *get_graft_file(struct repository *r);
 void set_git_dir(const char *path, int make_realpath);
index dbcebad2fb293303f9271850094a29c3a249bda6..107757a1fea4c2839de8ebe871b648e91c5b97a6 100755 (executable)
@@ -5,7 +5,7 @@
 
 . ${0%/*}/lib.sh
 
-P4WHENCE=http://filehost.perforce.com/perforce/r$LINUX_P4_VERSION
+P4WHENCE=https://cdist2.perforce.com/perforce/r$LINUX_P4_VERSION
 LFSWHENCE=https://github.com/github/git-lfs/releases/download/v$LINUX_GIT_LFS_VERSION
 UBUNTU_COMMON_PKGS="make libssl-dev libcurl4-openssl-dev libexpat-dev
  tcl tk gettext zlib1g-dev perl-modules liberror-perl libauthen-sasl-perl
@@ -37,13 +37,15 @@ macos-latest)
        test -z "$BREW_INSTALL_PACKAGES" ||
        brew install $BREW_INSTALL_PACKAGES
        brew link --force gettext
-       brew install --cask --no-quarantine perforce || {
-               # Update the definitions and try again
-               cask_repo="$(brew --repository)"/Library/Taps/homebrew/homebrew-cask &&
-               git -C "$cask_repo" pull --no-stat --ff-only &&
-               brew install --cask --no-quarantine perforce
-       } ||
-       brew install homebrew/cask/perforce
+       mkdir -p $HOME/bin
+       (
+               cd $HOME/bin
+               wget -q "https://cdist2.perforce.com/perforce/r21.2/bin.macosx1015x86_64/helix-core-server.tgz" &&
+               tar -xf helix-core-server.tgz &&
+               sudo xattr -d com.apple.quarantine p4 p4d 2>/dev/null || true
+       )
+       PATH="$PATH:${HOME}/bin"
+       export PATH
 
        if test -n "$CC_PACKAGE"
        then
@@ -78,15 +80,19 @@ linux-gcc-default)
        ;;
 esac
 
-if type p4d >/dev/null && type p4 >/dev/null
+if type p4d >/dev/null 2>&1 && type p4 >/dev/null 2>&1
 then
        echo "$(tput setaf 6)Perforce Server Version$(tput sgr0)"
        p4d -V | grep Rev.
        echo "$(tput setaf 6)Perforce Client Version$(tput sgr0)"
        p4 -V | grep Rev.
+else
+       echo >&2 "WARNING: perforce wasn't installed, see above for clues why"
 fi
-if type git-lfs >/dev/null
+if type git-lfs >/dev/null 2>&1
 then
        echo "$(tput setaf 6)Git-LFS Version$(tput sgr0)"
        git-lfs version
+else
+       echo >&2 "WARNING: git-lfs wasn't installed, see above for clues why"
 fi
index d93782daeb37fee21f5898600ff15628b32d4779..b724f02123d838547fa87fb785d94a2a3141c3e8 100644 (file)
@@ -195,10 +195,10 @@ static struct lline *coalesce_lines(struct lline *base, int *lenbase,
        struct lline *baseend, *newend = NULL;
        int i, j, origbaselen = *lenbase;
 
-       if (newline == NULL)
+       if (!newline)
                return base;
 
-       if (base == NULL) {
+       if (!base) {
                *lenbase = lennew;
                return newline;
        }
index 441b36016ba796c13665603b4c523c9baa52c12d..7943da384817b7724773bbb96823b0848d96a95b 100644 (file)
@@ -523,10 +523,13 @@ static struct commit_graph *load_commit_graph_chain(struct repository *r,
        stat_res = stat(chain_name, &st);
        free(chain_name);
 
-       if (!fp ||
-           stat_res ||
-           st.st_size <= the_hash_algo->hexsz)
+       if (!fp)
                return NULL;
+       if (stat_res ||
+           st.st_size <= the_hash_algo->hexsz) {
+               fclose(fp);
+               return NULL;
+       }
 
        count = st.st_size / (the_hash_algo->hexsz + 1);
        CALLOC_ARRAY(oids, count);
@@ -2206,7 +2209,8 @@ static void mark_commit_graphs(struct write_commit_graph_context *ctx)
                struct stat st;
                struct utimbuf updated_time;
 
-               stat(ctx->commit_graph_filenames_before[i], &st);
+               if (stat(ctx->commit_graph_filenames_before[i], &st) < 0)
+                       continue;
 
                updated_time.actime = st.st_atime;
                updated_time.modtime = now;
@@ -2247,7 +2251,8 @@ static void expire_commit_graphs(struct write_commit_graph_context *ctx)
                strbuf_setlen(&path, dirnamelen);
                strbuf_addstr(&path, de->d_name);
 
-               stat(path.buf, &st);
+               if (stat(path.buf, &st) < 0)
+                       continue;
 
                if (st.st_mtime > expire_time)
                        continue;
@@ -2567,7 +2572,7 @@ int verify_commit_graph(struct repository *r, struct commit_graph *g, int flags)
                odb_parents = odb_commit->parents;
 
                while (graph_parents) {
-                       if (odb_parents == NULL) {
+                       if (!odb_parents) {
                                graph_report(_("commit-graph parent list for commit %s is too long"),
                                             oid_to_hex(&cur_oid));
                                break;
@@ -2590,7 +2595,7 @@ int verify_commit_graph(struct repository *r, struct commit_graph *g, int flags)
                        odb_parents = odb_parents->next;
                }
 
-               if (odb_parents != NULL)
+               if (odb_parents)
                        graph_report(_("commit-graph parent list for commit %s terminates early"),
                                     oid_to_hex(&cur_oid));
 
index 0741fe834c3168a0c88e4a7c67275554fe5cc5c0..dc8a33130a5ea0584686e903d8628f2dc3145e91 100644 (file)
@@ -342,7 +342,7 @@ int fsm_listen__ctor(struct fsmonitor_daemon_state *state)
                                           data->cfar_paths_to_watch,
                                           kFSEventStreamEventIdSinceNow,
                                           0.001, flags);
-       if (data->stream == NULL)
+       if (!data->stream)
                goto failed;
 
        /*
index 6fe80fdf014021ee1620e941a5214aba92ba4ca5..5772692a0ac497030abd9b1bc3eb656ab623b295 100644 (file)
@@ -1060,7 +1060,7 @@ char *mingw_mktemp(char *template)
 int mkstemp(char *template)
 {
        char *filename = mktemp(template);
-       if (filename == NULL)
+       if (!filename)
                return -1;
        return open(filename, O_RDWR | O_CREAT, 0600);
 }
@@ -2332,7 +2332,7 @@ int setitimer(int type, struct itimerval *in, struct itimerval *out)
        static const struct timeval zero;
        static int atexit_done;
 
-       if (out != NULL)
+       if (out)
                return errno = EINVAL,
                        error("setitimer param 3 != NULL not implemented");
        if (!is_timeval_eq(&in->it_interval, &zero) &&
@@ -2361,7 +2361,7 @@ int sigaction(int sig, struct sigaction *in, struct sigaction *out)
        if (sig != SIGALRM)
                return errno = EINVAL,
                        error("sigaction only implemented for SIGALRM");
-       if (out != NULL)
+       if (out)
                return errno = EINVAL,
                        error("sigaction: param 3 != NULL not implemented");
 
index 9e253fb72f2df92a09b562dde59f347c4c207a57..02aea3b32ef1e3c8944985edca52f3b0413e1739 100644 (file)
@@ -9,7 +9,7 @@ int compat_mkdir_wo_trailing_slash(const char *dir, mode_t mode)
        size_t len = strlen(dir);
 
        if (len && dir[len-1] == '/') {
-               if ((tmp_dir = strdup(dir)) == NULL)
+               if (!(tmp_dir = strdup(dir)))
                        return -1;
                tmp_dir[len-1] = '\0';
        }
index 8d6c02d4bccc0160e69d3c0892946bfc79f10d00..2fe1c7732eea9491e4bb26902f60430b6c0bb828 100644 (file)
@@ -13,7 +13,7 @@ void *git_mmap(void *start, size_t length, int prot, int flags, int fd, off_t of
        }
 
        start = malloc(length);
-       if (start == NULL) {
+       if (!start) {
                errno = ENOMEM;
                return MAP_FAILED;
        }
index a5e11aad7fe3b7f684b18b58a1cd1a11da5516d8..fa471dbdb8906356c0fd2a2bd0d4487ffc482d27 100644 (file)
--- a/config.c
+++ b/config.c
@@ -3190,7 +3190,7 @@ int git_config_set_multivar_in_file_gently(const char *config_filename,
                        goto out_free;
                }
                /* if nothing to unset, error out */
-               if (value == NULL) {
+               if (!value) {
                        ret = CONFIG_NOTHING_SET;
                        goto out_free;
                }
@@ -3206,7 +3206,7 @@ int git_config_set_multivar_in_file_gently(const char *config_filename,
                int i, new_line = 0;
                struct config_options opts;
 
-               if (value_pattern == NULL)
+               if (!value_pattern)
                        store.value_pattern = NULL;
                else if (value_pattern == CONFIG_REGEX_NONE)
                        store.value_pattern = CONFIG_REGEX_NONE;
@@ -3346,7 +3346,7 @@ int git_config_set_multivar_in_file_gently(const char *config_filename,
                }
 
                /* write the pair (value == NULL means unset) */
-               if (value != NULL) {
+               if (value) {
                        if (!store.section_seen) {
                                if (write_section(fd, key, &store) < 0)
                                        goto write_err_out;
@@ -3567,7 +3567,7 @@ static int git_config_copy_or_rename_section_in_file(const char *config_filename
                        offset = section_name_match(&buf[i], old_name);
                        if (offset > 0) {
                                ret++;
-                               if (new_name == NULL) {
+                               if (!new_name) {
                                        remove = 1;
                                        continue;
                                }
index 316a31d2313380dfe62385da237ef7d4418d11a5..7dcd04820429481adf3d504ca187c7d5263f3cee 100644 (file)
@@ -1189,9 +1189,6 @@ AC_COMPILE_IFELSE([BSD_SYSCTL_SRC],
 GIT_CONF_SUBST([HAVE_BSD_SYSCTL])
 
 ## Other checks.
-# Define USE_PIC if you need the main git objects to be built with -fPIC
-# in order to build and link perl/Git.so.  x86-64 seems to need this.
-#
 # Define NO_SYMLINK_HEAD if you never want .git/HEAD to be a symbolic link.
 # Enable it on Windows.  By default, symrefs are still used.
 #
diff --git a/contrib/coccinelle/equals-null.cocci b/contrib/coccinelle/equals-null.cocci
new file mode 100644 (file)
index 0000000..92c7054
--- /dev/null
@@ -0,0 +1,30 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+@@
+expression e;
+statement s;
+@@
+if (
+(
+!e
+|
+- e == NULL
++ !e
+)
+   )
+   {...}
+else s
+
+@@
+expression e;
+statement s;
+@@
+if (
+(
+e
+|
+- e != NULL
++ e
+)
+   )
+   {...}
+else s
index 2ee702ecf7fe9d45c2c28faa7eccc79cba38e184..072ea0d92287a780f80a4d8e0ecb1cd46bdcd064 100644 (file)
@@ -11,21 +11,6 @@ expression G;
 + repo_read_object_file(the_repository,
   E, F, G)
 
-@@
-expression E;
-@@
-- has_sha1_file(
-+ repo_has_sha1_file(the_repository,
-  E)
-
-@@
-expression E;
-expression F;
-@@
-- has_sha1_file_with_flags(
-+ repo_has_sha1_file_with_flags(the_repository,
-  E)
-
 @@
 expression E;
 @@
index 8202d62035f98e38d3326dabf6d7f94364f3d842..f383c95e1f96258bc2b7ce4dcd089523f86cba95 100644 (file)
@@ -6,7 +6,11 @@ code editor which runs on your desktop and is available for
 [Windows](https://code.visualstudio.com/docs/setup/windows),
 [macOS](https://code.visualstudio.com/docs/setup/mac) and
 [Linux](https://code.visualstudio.com/docs/setup/linux). Among other languages,
-it has [support for C/C++ via an extension](https://github.com/Microsoft/vscode-cpptools).
+it has [support for C/C++ via an extension](https://github.com/Microsoft/vscode-cpptools) with
+[debugging support](https://code.visualstudio.com/docs/editor/debugging)
+
+To get help about "how to personalize your settings" read:
+[How to set up your settings](https://code.visualstudio.com/docs/getstarted/settings)
 
 To start developing Git with VS Code, simply run the Unix shell script called
 `init.sh` in this directory, which creates the configuration files in
index 27de94994b5dc5fef855ef983d0eb53a512d70c9..f139fd864446262da45cc5ed9bb4180beae2b5a0 100755 (executable)
@@ -271,7 +271,6 @@ cat >.vscode/launch.json.new <<EOF ||
             "stopAtEntry": false,
             "cwd": "\${workspaceFolder}",
             "environment": [],
-            "externalConsole": true,
             "MIMode": "gdb",
             "miDebuggerPath": "$GDBPATH",
             "setupCommands": [
index 8e39731efb0bd26e9aa6c6341f049f2965e659f2..4d153729da0f185207b85d103905b30e3183796b 100644 (file)
--- a/convert.c
+++ b/convert.c
@@ -195,9 +195,9 @@ static void check_global_conv_flags_eol(const char *path,
                if (conv_flags & CONV_EOL_RNDTRP_DIE)
                        die(_("CRLF would be replaced by LF in %s"), path);
                else if (conv_flags & CONV_EOL_RNDTRP_WARN)
-                       warning(_("CRLF will be replaced by LF in %s.\n"
-                                 "The file will have its original line"
-                                 " endings in your working directory"), path);
+                       warning(_("in the working copy of '%s', CRLF will be"
+                                 " replaced by LF the next time Git touches"
+                                 " it"), path);
        } else if (old_stats->lonelf && !new_stats->lonelf ) {
                /*
                 * CRLFs would be added by checkout
@@ -205,9 +205,9 @@ static void check_global_conv_flags_eol(const char *path,
                if (conv_flags & CONV_EOL_RNDTRP_DIE)
                        die(_("LF would be replaced by CRLF in %s"), path);
                else if (conv_flags & CONV_EOL_RNDTRP_WARN)
-                       warning(_("LF will be replaced by CRLF in %s.\n"
-                                 "The file will have its original line"
-                                 " endings in your working directory"), path);
+                       warning(_("in the working copy of '%s', LF will be"
+                                 " replaced by CRLF the next time Git touches"
+                                 " it"), path);
        }
 }
 
index 94a5b8a36453f67d6d7de945c0d2427acd2f8188..aeda6966f05e928f453ee231a8d2f41da37ff0a6 100644 (file)
--- a/daemon.c
+++ b/daemon.c
@@ -447,7 +447,7 @@ static void copy_to_log(int fd)
        FILE *fp;
 
        fp = fdopen(fd, "r");
-       if (fp == NULL) {
+       if (!fp) {
                logerror("fdopen of error channel failed");
                close(fd);
                return;
index 11d60da5b72512332185c703658cee6d14f551bc..50087f567062c4ff858bd6cae83fb3c59a0e4038 100755 (executable)
@@ -9,7 +9,7 @@ CC="$*"
 #
 # FreeBSD clang version 3.4.1 (tags/RELEASE...)
 get_version_line() {
-       $CC -v 2>&1 | grep ' version '
+       LANG=C LC_ALL=C $CC -v 2>&1 | grep ' version '
 }
 
 get_family() {
diff --git a/diff.c b/diff.c
index ef7159968b68c43c361a4cacc4558e14fe82fd4f..e71cf758861bd7596ce122611a4c92fe6b27d8c5 100644 (file)
--- a/diff.c
+++ b/diff.c
@@ -4136,18 +4136,13 @@ static void prep_temp_blob(struct index_state *istate,
                           int mode)
 {
        struct strbuf buf = STRBUF_INIT;
-       struct strbuf tempfile = STRBUF_INIT;
        char *path_dup = xstrdup(path);
        const char *base = basename(path_dup);
        struct checkout_metadata meta;
 
        init_checkout_metadata(&meta, NULL, NULL, oid);
 
-       /* Generate "XXXXXX_basename.ext" */
-       strbuf_addstr(&tempfile, "XXXXXX_");
-       strbuf_addstr(&tempfile, base);
-
-       temp->tempfile = mks_tempfile_ts(tempfile.buf, strlen(base) + 1);
+       temp->tempfile = mks_tempfile_dt("git-blob-XXXXXX", base);
        if (!temp->tempfile)
                die_errno("unable to create temp-file");
        if (convert_to_working_tree(istate, path,
@@ -4162,7 +4157,6 @@ static void prep_temp_blob(struct index_state *istate,
        oid_to_hex_r(temp->hex, oid);
        xsnprintf(temp->mode, sizeof(temp->mode), "%06o", mode);
        strbuf_release(&buf);
-       strbuf_release(&tempfile);
        free(path_dup);
 }
 
diff --git a/dir.c b/dir.c
index f2b0f242101b290d4fc4615dda7b0e1a892947d3..6b030be2ec5742a989845997b5aeb46cb4df5191 100644 (file)
--- a/dir.c
+++ b/dir.c
@@ -2747,13 +2747,33 @@ static void set_untracked_ident(struct untracked_cache *uc)
        strbuf_addch(&uc->ident, 0);
 }
 
-static void new_untracked_cache(struct index_state *istate)
+static unsigned new_untracked_cache_flags(struct index_state *istate)
+{
+       struct repository *repo = istate->repo;
+       char *val;
+
+       /*
+        * This logic is coordinated with the setting of these flags in
+        * wt-status.c#wt_status_collect_untracked(), and the evaluation
+        * of the config setting in commit.c#git_status_config()
+        */
+       if (!repo_config_get_string(repo, "status.showuntrackedfiles", &val) &&
+           !strcmp(val, "all"))
+               return 0;
+
+       /*
+        * The default, if "all" is not set, is "normal" - leading us here.
+        * If the value is "none" then it really doesn't matter.
+        */
+       return DIR_SHOW_OTHER_DIRECTORIES | DIR_HIDE_EMPTY_DIRECTORIES;
+}
+
+static void new_untracked_cache(struct index_state *istate, int flags)
 {
        struct untracked_cache *uc = xcalloc(1, sizeof(*uc));
        strbuf_init(&uc->ident, 100);
        uc->exclude_per_dir = ".gitignore";
-       /* should be the same flags used by git-status */
-       uc->dir_flags = DIR_SHOW_OTHER_DIRECTORIES | DIR_HIDE_EMPTY_DIRECTORIES;
+       uc->dir_flags = flags >= 0 ? flags : new_untracked_cache_flags(istate);
        set_untracked_ident(uc);
        istate->untracked = uc;
        istate->cache_changed |= UNTRACKED_CHANGED;
@@ -2762,11 +2782,11 @@ static void new_untracked_cache(struct index_state *istate)
 void add_untracked_cache(struct index_state *istate)
 {
        if (!istate->untracked) {
-               new_untracked_cache(istate);
+               new_untracked_cache(istate, -1);
        } else {
                if (!ident_in_untracked(istate->untracked)) {
                        free_untracked_cache(istate->untracked);
-                       new_untracked_cache(istate);
+                       new_untracked_cache(istate, -1);
                }
        }
 }
@@ -2814,17 +2834,9 @@ static struct untracked_cache_dir *validate_untracked_cache(struct dir_struct *d
        if (base_len || (pathspec && pathspec->nr))
                return NULL;
 
-       /* Different set of flags may produce different results */
-       if (dir->flags != dir->untracked->dir_flags ||
-           /*
-            * See treat_directory(), case index_nonexistent. Without
-            * this flag, we may need to also cache .git file content
-            * for the resolve_gitlink_ref() call, which we don't.
-            */
-           !(dir->flags & DIR_SHOW_OTHER_DIRECTORIES) ||
-           /* We don't support collecting ignore files */
-           (dir->flags & (DIR_SHOW_IGNORED | DIR_SHOW_IGNORED_TOO |
-                          DIR_COLLECT_IGNORED)))
+       /* We don't support collecting ignore files */
+       if (dir->flags & (DIR_SHOW_IGNORED | DIR_SHOW_IGNORED_TOO |
+                       DIR_COLLECT_IGNORED))
                return NULL;
 
        /*
@@ -2847,6 +2859,50 @@ static struct untracked_cache_dir *validate_untracked_cache(struct dir_struct *d
                return NULL;
        }
 
+       /*
+        * If the untracked structure we received does not have the same flags
+        * as requested in this run, we're going to need to either discard the
+        * existing structure (and potentially later recreate), or bypass the
+        * untracked cache mechanism for this run.
+        */
+       if (dir->flags != dir->untracked->dir_flags) {
+               /*
+                * If the untracked structure we received does not have the same flags
+                * as configured, then we need to reset / create a new "untracked"
+                * structure to match the new config.
+                *
+                * Keeping the saved and used untracked cache consistent with the
+                * configuration provides an opportunity for frequent users of
+                * "git status -uall" to leverage the untracked cache by aligning their
+                * configuration - setting "status.showuntrackedfiles" to "all" or
+                * "normal" as appropriate.
+                *
+                * Previously using -uall (or setting "status.showuntrackedfiles" to
+                * "all") was incompatible with untracked cache and *consistently*
+                * caused surprisingly bad performance (with fscache and fsmonitor
+                * enabled) on Windows.
+                *
+                * IMPROVEMENT OPPORTUNITY: If we reworked the untracked cache storage
+                * to not be as bound up with the desired output in a given run,
+                * and instead iterated through and stored enough information to
+                * correctly serve both "modes", then users could get peak performance
+                * with or without '-uall' regardless of their
+                * "status.showuntrackedfiles" config.
+                */
+               if (dir->untracked->dir_flags != new_untracked_cache_flags(istate)) {
+                       free_untracked_cache(istate->untracked);
+                       new_untracked_cache(istate, dir->flags);
+                       dir->untracked = istate->untracked;
+               }
+               else {
+                       /*
+                        * Current untracked cache data is consistent with config, but not
+                        * usable in this request/run; just bypass untracked cache.
+                        */
+                       return NULL;
+               }
+       }
+
        if (!dir->untracked->root) {
                /* Untracked cache existed but is not initialized; fix that */
                FLEX_ALLOC_STR(dir->untracked->root, name, "");
@@ -3054,7 +3110,7 @@ char *git_url_basename(const char *repo, int is_bundle, int is_bare)
         * Skip scheme.
         */
        start = strstr(repo, "://");
-       if (start == NULL)
+       if (!start)
                start = repo;
        else
                start += 3;
index 5bff1b386fd8823b0205b7e473b91b7aa44d7f73..b3296ce7d15140bff12299b25d1450f69f8508ee 100644 (file)
@@ -273,7 +273,7 @@ const char *get_git_work_tree(void)
        return the_repository->worktree;
 }
 
-char *get_object_directory(void)
+const char *get_object_directory(void)
 {
        if (!the_repository->objects->odb)
                BUG("git environment hasn't been setup");
index 38a47c44db4c6cd43bd384890ed414d4665bf456..ac618641632f8c347b2a2900fb15b0de9ed8dd8a 100644 (file)
@@ -216,14 +216,9 @@ int bitmap_is_subset(struct bitmap *self, struct bitmap *other)
        return 0;
 }
 
-void bitmap_reset(struct bitmap *bitmap)
-{
-       memset(bitmap->words, 0x0, bitmap->word_alloc * sizeof(eword_t));
-}
-
 void bitmap_free(struct bitmap *bitmap)
 {
-       if (bitmap == NULL)
+       if (!bitmap)
                return;
 
        free(bitmap->words);
index 2a8c7c5c33ab864788baaa7b6a52a5c59f84f7d0..6fe48d3ae0449a0298deb109b967ef3479ef0c47 100644 (file)
@@ -451,7 +451,7 @@ struct ewah_bitmap *ewah_pool_new(void)
 
 void ewah_pool_free(struct ewah_bitmap *self)
 {
-       if (self == NULL)
+       if (!self)
                return;
 
        if (bitmap_pool_size == BITMAP_POOL_MAX ||
index 66920965da19ab4a38db18d1b18e0f5fbaa4776f..7eb8b9b63013daa70c91687108307d38a9eb1e09 100644 (file)
@@ -177,7 +177,6 @@ struct bitmap *bitmap_dup(const struct bitmap *src);
 void bitmap_set(struct bitmap *self, size_t pos);
 void bitmap_unset(struct bitmap *self, size_t pos);
 int bitmap_get(struct bitmap *self, size_t pos);
-void bitmap_reset(struct bitmap *self);
 void bitmap_free(struct bitmap *self);
 int bitmap_equals(struct bitmap *self, struct bitmap *other);
 int bitmap_is_subset(struct bitmap *self, struct bitmap *other);
index 4e1e88eea097dde85dbbe200ed28d2886a5f2dca..6d0d2712595588eb2c812467528a10fe98775f1f 100644 (file)
@@ -1370,17 +1370,20 @@ static int send_fetch_request(struct fetch_negotiator *negotiator, int fd_out,
 static int process_section_header(struct packet_reader *reader,
                                  const char *section, int peek)
 {
-       int ret;
-
-       if (packet_reader_peek(reader) != PACKET_READ_NORMAL)
-               die(_("error reading section header '%s'"), section);
+       int ret = 0;
 
-       ret = !strcmp(reader->line, section);
+       if (packet_reader_peek(reader) == PACKET_READ_NORMAL &&
+           !strcmp(reader->line, section))
+               ret = 1;
 
        if (!peek) {
-               if (!ret)
-                       die(_("expected '%s', received '%s'"),
-                           section, reader->line);
+               if (!ret) {
+                       if (reader->line)
+                               die(_("expected '%s', received '%s'"),
+                                   section, reader->line);
+                       else
+                               die(_("expected '%s'"), section);
+               }
                packet_reader_read(reader);
        }
 
index 542a6a75eb3c4b93d8b4a394551d5d38d2a88f6e..9f99201bcca1eada84c3f19d1ca44cb41b3f3c59 100644 (file)
@@ -63,7 +63,7 @@ $(list_tool_variants)"
                                        preamble=
                                fi
                                shown_any=yes
-                               printf "%s%s\n" "$per_line_prefix" "$toolname"
+                               printf "%s%-15s  %s\n" "$per_line_prefix" "$toolname" $(diff_mode && diff_cmd_help "$toolname" || merge_cmd_help "$toolname")
                        fi
                done
 
@@ -162,10 +162,18 @@ setup_tool () {
                return 1
        }
 
+       diff_cmd_help () {
+               return 0
+       }
+
        merge_cmd () {
                return 1
        }
 
+       merge_cmd_help () {
+               return 0
+       }
+
        hide_resolved_enabled () {
                return 0
        }
index a9b1f9044108e4dce94b865f1f777039d8755613..8fbf6eb1fe385090489f55c87913d0867ad8c349 100755 (executable)
--- a/git-p4.py
+++ b/git-p4.py
@@ -7,34 +7,52 @@
 #            2007 Trolltech ASA
 # License: MIT <http://www.opensource.org/licenses/mit-license.php>
 #
-# pylint: disable=invalid-name,missing-docstring,too-many-arguments,broad-except
-# pylint: disable=no-self-use,wrong-import-position,consider-iterating-dictionary
-# pylint: disable=wrong-import-order,unused-import,too-few-public-methods
-# pylint: disable=too-many-lines,ungrouped-imports,fixme,too-many-locals
-# pylint: disable=line-too-long,bad-whitespace,superfluous-parens
-# pylint: disable=too-many-statements,too-many-instance-attributes
-# pylint: disable=too-many-branches,too-many-nested-blocks
+# pylint: disable=bad-whitespace
+# pylint: disable=broad-except
+# pylint: disable=consider-iterating-dictionary
+# pylint: disable=disable
+# pylint: disable=fixme
+# pylint: disable=invalid-name
+# pylint: disable=line-too-long
+# pylint: disable=missing-docstring
+# pylint: disable=no-self-use
+# pylint: disable=superfluous-parens
+# pylint: disable=too-few-public-methods
+# pylint: disable=too-many-arguments
+# pylint: disable=too-many-branches
+# pylint: disable=too-many-instance-attributes
+# pylint: disable=too-many-lines
+# pylint: disable=too-many-locals
+# pylint: disable=too-many-nested-blocks
+# pylint: disable=too-many-statements
+# pylint: disable=ungrouped-imports
+# pylint: disable=unused-import
+# pylint: disable=wrong-import-order
+# pylint: disable=wrong-import-position
 #
+
+import struct
 import sys
 if sys.version_info.major < 3 and sys.version_info.minor < 7:
     sys.stderr.write("git-p4: requires Python 2.7 or later.\n")
     sys.exit(1)
-import os
-import optparse
+
+import ctypes
+import errno
 import functools
+import glob
 import marshal
-import subprocess
-import tempfile
-import time
+import optparse
+import os
 import platform
 import re
 import shutil
 import stat
+import subprocess
+import tempfile
+import time
 import zipfile
 import zlib
-import ctypes
-import errno
-import glob
 
 # On python2.7 where raw_input() and input() are both availble,
 # we want raw_input's semantics, but aliased to input for python3
@@ -52,17 +70,21 @@ verbose = False
 defaultLabelRegexp = r'[a-zA-Z0-9_\-.]+$'
 
 # The block size is reduced automatically if required
-defaultBlockSize = 1<<20
+defaultBlockSize = 1 << 20
+
+defaultMetadataDecodingStrategy = 'passthrough' if sys.version_info.major == 2 else 'fallback'
+defaultFallbackMetadataEncoding = 'cp1252'
 
 p4_access_checked = False
 
 re_ko_keywords = re.compile(br'\$(Id|Header)(:[^$\n]+)?\$')
 re_k_keywords = re.compile(br'\$(Id|Header|Author|Date|DateTime|Change|File|Revision)(:[^$\n]+)?\$')
 
+
 def format_size_human_readable(num):
-    """ Returns a number of units (typically bytes) formatted as a human-readable
-        string.
-    """
+    """Returns a number of units (typically bytes) formatted as a
+       human-readable string.
+       """
     if num < 1024:
         return '{:d} B'.format(num)
     for unit in ["Ki", "Mi", "Gi", "Ti", "Pi", "Ei", "Zi"]:
@@ -71,18 +93,19 @@ def format_size_human_readable(num):
             return "{:3.1f} {}B".format(num, unit)
     return "{:.1f} YiB".format(num)
 
+
 def p4_build_cmd(cmd):
     """Build a suitable p4 command line.
 
-    This consolidates building and returning a p4 command line into one
-    location. It means that hooking into the environment, or other configuration
-    can be done more easily.
-    """
+       This consolidates building and returning a p4 command line into one
+       location. It means that hooking into the environment, or other
+       configuration can be done more easily.
+       """
     real_cmd = ["p4"]
 
     user = gitConfig("git-p4.user")
     if len(user) > 0:
-        real_cmd += ["-u",user]
+        real_cmd += ["-u", user]
 
     password = gitConfig("git-p4.password")
     if len(password) > 0:
@@ -118,31 +141,31 @@ def p4_build_cmd(cmd):
 
     return real_cmd
 
+
 def git_dir(path):
-    """ Return TRUE if the given path is a git directory (/path/to/dir/.git).
-        This won't automatically add ".git" to a directory.
-    """
+    """Return TRUE if the given path is a git directory (/path/to/dir/.git).
+       This won't automatically add ".git" to a directory.
+       """
     d = read_pipe(["git", "--git-dir", path, "rev-parse", "--git-dir"], True).strip()
     if not d or len(d) == 0:
         return None
     else:
         return d
 
+
 def chdir(path, is_client_path=False):
-    """Do chdir to the given path, and set the PWD environment
-       variable for use by P4.  It does not look at getcwd() output.
-       Since we're not using the shell, it is necessary to set the
-       PWD environment variable explicitly.
-
-       Normally, expand the path to force it to be absolute.  This
-       addresses the use of relative path names inside P4 settings,
-       e.g. P4CONFIG=.p4config.  P4 does not simply open the filename
-       as given; it looks for .p4config using PWD.
-
-       If is_client_path, the path was handed to us directly by p4,
-       and may be a symbolic link.  Do not call os.getcwd() in this
-       case, because it will cause p4 to think that PWD is not inside
-       the client path.
+    """Do chdir to the given path, and set the PWD environment variable for use
+       by P4.  It does not look at getcwd() output.  Since we're not using the
+       shell, it is necessary to set the PWD environment variable explicitly.
+
+       Normally, expand the path to force it to be absolute.  This addresses
+       the use of relative path names inside P4 settings, e.g.
+       P4CONFIG=.p4config.  P4 does not simply open the filename as given; it
+       looks for .p4config using PWD.
+
+       If is_client_path, the path was handed to us directly by p4, and may be
+       a symbolic link.  Do not call os.getcwd() in this case, because it will
+       cause p4 to think that PWD is not inside the client path.
        """
 
     os.chdir(path)
@@ -150,6 +173,7 @@ def chdir(path, is_client_path=False):
         path = os.getcwd()
     os.environ['PWD'] = path
 
+
 def calcDiskFree():
     """Return free space in bytes on the disk of the given dirname."""
     if platform.system() == 'Windows':
@@ -160,52 +184,124 @@ def calcDiskFree():
         st = os.statvfs(os.getcwd())
         return st.f_bavail * st.f_frsize
 
+
 def die(msg):
-    """ Terminate execution. Make sure that any running child processes have been wait()ed for before
-        calling this.
-    """
+    """Terminate execution. Make sure that any running child processes have
+       been wait()ed for before calling this.
+       """
     if verbose:
         raise Exception(msg)
     else:
         sys.stderr.write(msg + "\n")
         sys.exit(1)
 
+
 def prompt(prompt_text):
-    """ Prompt the user to choose one of the choices
+    """Prompt the user to choose one of the choices.
 
-    Choices are identified in the prompt_text by square brackets around
-    a single letter option.
-    """
+       Choices are identified in the prompt_text by square brackets around a
+       single letter option.
+       """
     choices = set(m.group(1) for m in re.finditer(r"\[(.)\]", prompt_text))
     while True:
         sys.stderr.flush()
         sys.stdout.write(prompt_text)
         sys.stdout.flush()
-        response=sys.stdin.readline().strip().lower()
+        response = sys.stdin.readline().strip().lower()
         if not response:
             continue
         response = response[0]
         if response in choices:
             return response
 
+
 # We need different encoding/decoding strategies for text data being passed
 # around in pipes depending on python version
 if bytes is not str:
     # For python3, always encode and decode as appropriate
     def decode_text_stream(s):
         return s.decode() if isinstance(s, bytes) else s
+
     def encode_text_stream(s):
         return s.encode() if isinstance(s, str) else s
 else:
     # For python2.7, pass read strings as-is, but also allow writing unicode
     def decode_text_stream(s):
         return s
+
     def encode_text_stream(s):
         return s.encode('utf_8') if isinstance(s, unicode) else s
 
+
+class MetadataDecodingException(Exception):
+    def __init__(self, input_string):
+        self.input_string = input_string
+
+    def __str__(self):
+        return """Decoding perforce metadata failed!
+The failing string was:
+---
+{}
+---
+Consider setting the git-p4.metadataDecodingStrategy config option to
+'fallback', to allow metadata to be decoded using a fallback encoding,
+defaulting to cp1252.""".format(self.input_string)
+
+
+encoding_fallback_warning_issued = False
+encoding_escape_warning_issued = False
+def metadata_stream_to_writable_bytes(s):
+    encodingStrategy = gitConfig('git-p4.metadataDecodingStrategy') or defaultMetadataDecodingStrategy
+    fallbackEncoding = gitConfig('git-p4.metadataFallbackEncoding') or defaultFallbackMetadataEncoding
+    if not isinstance(s, bytes):
+        return s.encode('utf_8')
+    if encodingStrategy == 'passthrough':
+        return s
+    try:
+        s.decode('utf_8')
+        return s
+    except UnicodeDecodeError:
+        if encodingStrategy == 'fallback' and fallbackEncoding:
+            global encoding_fallback_warning_issued
+            global encoding_escape_warning_issued
+            try:
+                if not encoding_fallback_warning_issued:
+                    print("\nCould not decode value as utf-8; using configured fallback encoding %s: %s" % (fallbackEncoding, s))
+                    print("\n(this warning is only displayed once during an import)")
+                    encoding_fallback_warning_issued = True
+                return s.decode(fallbackEncoding).encode('utf_8')
+            except Exception as exc:
+                if not encoding_escape_warning_issued:
+                    print("\nCould not decode value with configured fallback encoding %s; escaping bytes over 127: %s" % (fallbackEncoding, s))
+                    print("\n(this warning is only displayed once during an import)")
+                    encoding_escape_warning_issued = True
+                escaped_bytes = b''
+                # bytes and strings work very differently in python2 vs python3...
+                if str is bytes:
+                    for byte in s:
+                        byte_number = struct.unpack('>B', byte)[0]
+                        if byte_number > 127:
+                            escaped_bytes += b'%'
+                            escaped_bytes += hex(byte_number)[2:].upper()
+                        else:
+                            escaped_bytes += byte
+                else:
+                    for byte_number in s:
+                        if byte_number > 127:
+                            escaped_bytes += b'%'
+                            escaped_bytes += hex(byte_number).upper().encode()[2:]
+                        else:
+                            escaped_bytes += bytes([byte_number])
+                return escaped_bytes
+
+        raise MetadataDecodingException(s)
+
+
 def decode_path(path):
-    """Decode a given string (bytes or otherwise) using configured path encoding options
-    """
+    """Decode a given string (bytes or otherwise) using configured path
+       encoding options.
+       """
+
     encoding = gitConfig('git-p4.pathEncoding') or 'utf_8'
     if bytes is not str:
         return path.decode(encoding, errors='replace') if isinstance(path, bytes) else path
@@ -218,6 +314,7 @@ def decode_path(path):
                 print('Path with non-ASCII characters detected. Used {} to decode: {}'.format(encoding, path))
         return path
 
+
 def run_git_hook(cmd, param=[]):
     """Execute a hook if the hook exists."""
     args = ['git', 'hook', 'run', '--ignore-missing', cmd]
@@ -227,6 +324,7 @@ def run_git_hook(cmd, param=[]):
             args.append(p)
     return subprocess.call(args) == 0
 
+
 def write_pipe(c, stdin, *k, **kw):
     if verbose:
         sys.stderr.write('Writing pipe: {}\n'.format(' '.join(c)))
@@ -240,33 +338,35 @@ def write_pipe(c, stdin, *k, **kw):
 
     return val
 
+
 def p4_write_pipe(c, stdin, *k, **kw):
     real_cmd = p4_build_cmd(c)
     if bytes is not str and isinstance(stdin, str):
         stdin = encode_text_stream(stdin)
     return write_pipe(real_cmd, stdin, *k, **kw)
 
+
 def read_pipe_full(c, *k, **kw):
-    """ Read output from  command. Returns a tuple
-        of the return status, stdout text and stderr
-        text.
-    """
+    """Read output from command. Returns a tuple of the return status, stdout
+       text and stderr text.
+       """
     if verbose:
         sys.stderr.write('Reading pipe: {}\n'.format(' '.join(c)))
 
     p = subprocess.Popen(
         c, stdout=subprocess.PIPE, stderr=subprocess.PIPE, *k, **kw)
-    (out, err) = p.communicate()
+    out, err = p.communicate()
     return (p.returncode, out, decode_text_stream(err))
 
+
 def read_pipe(c, ignore_error=False, raw=False, *k, **kw):
-    """ Read output from  command. Returns the output text on
-        success. On failure, terminates execution, unless
-        ignore_error is True, when it returns an empty string.
+    """Read output from  command. Returns the output text on success. On
+       failure, terminates execution, unless ignore_error is True, when it
+       returns an empty string.
 
-        If raw is True, do not attempt to decode output text.
-    """
-    (retcode, out, err) = read_pipe_full(c, *k, **kw)
+       If raw is True, do not attempt to decode output text.
+       """
+    retcode, out, err = read_pipe_full(c, *k, **kw)
     if retcode != 0:
         if ignore_error:
             out = ""
@@ -276,20 +376,23 @@ def read_pipe(c, ignore_error=False, raw=False, *k, **kw):
         out = decode_text_stream(out)
     return out
 
+
 def read_pipe_text(c, *k, **kw):
-    """ Read output from a command with trailing whitespace stripped.
-        On error, returns None.
-    """
-    (retcode, out, err) = read_pipe_full(c, *k, **kw)
+    """Read output from a command with trailing whitespace stripped. On error,
+       returns None.
+       """
+    retcode, out, err = read_pipe_full(c, *k, **kw)
     if retcode != 0:
         return None
     else:
         return decode_text_stream(out).rstrip()
 
+
 def p4_read_pipe(c, ignore_error=False, raw=False, *k, **kw):
     real_cmd = p4_build_cmd(c)
     return read_pipe(real_cmd, ignore_error, raw=raw, *k, **kw)
 
+
 def read_pipe_lines(c, raw=False, *k, **kw):
     if verbose:
         sys.stderr.write('Reading pipe: {}\n'.format(' '.join(c)))
@@ -303,31 +406,36 @@ def read_pipe_lines(c, raw=False, *k, **kw):
         die('Command failed: {}'.format(' '.join(c)))
     return lines
 
+
 def p4_read_pipe_lines(c, *k, **kw):
-    """Specifically invoke p4 on the command supplied. """
+    """Specifically invoke p4 on the command supplied."""
     real_cmd = p4_build_cmd(c)
     return read_pipe_lines(real_cmd, *k, **kw)
 
+
 def p4_has_command(cmd):
-    """Ask p4 for help on this command.  If it returns an error, the
-       command does not exist in this version of p4."""
+    """Ask p4 for help on this command.  If it returns an error, the command
+       does not exist in this version of p4.
+       """
     real_cmd = p4_build_cmd(["help", cmd])
     p = subprocess.Popen(real_cmd, stdout=subprocess.PIPE,
                                    stderr=subprocess.PIPE)
     p.communicate()
     return p.returncode == 0
 
+
 def p4_has_move_command():
-    """See if the move command exists, that it supports -k, and that
-       it has not been administratively disabled.  The arguments
-       must be correct, but the filenames do not have to exist.  Use
-       ones with wildcards so even if they exist, it will fail."""
+    """See if the move command exists, that it supports -k, and that it has not
+       been administratively disabled.  The arguments must be correct, but the
+       filenames do not have to exist.  Use ones with wildcards so even if they
+       exist, it will fail.
+       """
 
     if not p4_has_command("move"):
         return False
     cmd = p4_build_cmd(["move", "-k", "@from", "@to"])
     p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-    (out, err) = p.communicate()
+    out, err = p.communicate()
     err = decode_text_stream(err)
     # return code will be 1 in either case
     if err.find("Invalid option") >= 0:
@@ -337,6 +445,7 @@ def p4_has_move_command():
     # assume it failed because @... was invalid changelist
     return True
 
+
 def system(cmd, ignore_error=False, *k, **kw):
     if verbose:
         sys.stderr.write("executing {}\n".format(
@@ -347,19 +456,22 @@ def system(cmd, ignore_error=False, *k, **kw):
 
     return retcode
 
+
 def p4_system(cmd, *k, **kw):
-    """Specifically invoke p4 as the system command. """
+    """Specifically invoke p4 as the system command."""
     real_cmd = p4_build_cmd(cmd)
     retcode = subprocess.call(real_cmd, *k, **kw)
     if retcode:
         raise subprocess.CalledProcessError(retcode, real_cmd)
 
+
 def die_bad_access(s):
     die("failure accessing depot: {0}".format(s.rstrip()))
 
+
 def p4_check_access(min_expiration=1):
-    """ Check if we can access Perforce - account still logged in
-    """
+    """Check if we can access Perforce - account still logged in."""
+
     results = p4CmdList(["login", "-s"])
 
     if len(results) == 0:
@@ -402,62 +514,78 @@ def p4_check_access(min_expiration=1):
     else:
         die_bad_access("unknown error code {0}".format(code))
 
+
 _p4_version_string = None
+
+
 def p4_version_string():
-    """Read the version string, showing just the last line, which
-       hopefully is the interesting version bit.
+    """Read the version string, showing just the last line, which hopefully is
+       the interesting version bit.
 
        $ p4 -V
        Perforce - The Fast Software Configuration Management System.
        Copyright 1995-2011 Perforce Software.  All rights reserved.
        Rev. P4/NTX86/2011.1/393975 (2011/12/16).
-    """
+       """
     global _p4_version_string
     if not _p4_version_string:
         a = p4_read_pipe_lines(["-V"])
         _p4_version_string = a[-1].rstrip()
     return _p4_version_string
 
+
 def p4_integrate(src, dest):
     p4_system(["integrate", "-Dt", wildcard_encode(src), wildcard_encode(dest)])
 
+
 def p4_sync(f, *options):
     p4_system(["sync"] + list(options) + [wildcard_encode(f)])
 
+
 def p4_add(f):
-    # forcibly add file names with wildcards
+    """Forcibly add file names with wildcards."""
     if wildcard_present(f):
         p4_system(["add", "-f", f])
     else:
         p4_system(["add", f])
 
+
 def p4_delete(f):
     p4_system(["delete", wildcard_encode(f)])
 
+
 def p4_edit(f, *options):
     p4_system(["edit"] + list(options) + [wildcard_encode(f)])
 
+
 def p4_revert(f):
     p4_system(["revert", wildcard_encode(f)])
 
+
 def p4_reopen(type, f):
     p4_system(["reopen", "-t", type, wildcard_encode(f)])
 
+
 def p4_reopen_in_change(changelist, files):
     cmd = ["reopen", "-c", str(changelist)] + files
     p4_system(cmd)
 
+
 def p4_move(src, dest):
     p4_system(["move", "-k", wildcard_encode(src), wildcard_encode(dest)])
 
+
 def p4_last_change():
     results = p4CmdList(["changes", "-m", "1"], skip_info=True)
     return int(results[0]['change'])
 
+
 def p4_describe(change, shelved=False):
-    """Make sure it returns a valid result by checking for
-       the presence of field "time".  Return a dict of the
-       results."""
+    """Make sure it returns a valid result by checking for the presence of
+       field "time".
+
+       Return a dict of the results.
+       """
 
     cmd = ["describe", "-s"]
     if shelved:
@@ -482,12 +610,11 @@ def p4_describe(change, shelved=False):
 
     return d
 
-#
-# Canonicalize the p4 type and return a tuple of the
-# base type, plus any modifiers.  See "p4 help filetypes"
-# for a list and explanation.
-#
+
 def split_p4_type(p4type):
+    """Canonicalize the p4 type and return a tuple of the base type, plus any
+       modifiers.  See "p4 help filetypes" for a list and explanation.
+       """
 
     p4_filetypes_historical = {
         "ctempobj": "binary+Sw",
@@ -517,18 +644,19 @@ def split_p4_type(p4type):
         mods = s[1]
     return (base, mods)
 
-#
-# return the raw p4 type of a file (text, text+ko, etc)
-#
+
 def p4_type(f):
+    """Return the raw p4 type of a file (text, text+ko, etc)."""
+
     results = p4CmdList(["fstat", "-T", "headType", wildcard_encode(f)])
     return results[0]['headType']
 
-#
-# Given a type base and modifier, return a regexp matching
-# the keywords that can be expanded in the file
-#
+
 def p4_keywords_regexp_for_type(base, type_mods):
+    """Given a type base and modifier, return a regexp matching the keywords
+       that can be expanded in the file.
+       """
+
     if base in ("text", "unicode", "binary"):
         if "ko" in type_mods:
             return re_ko_keywords
@@ -539,21 +667,23 @@ def p4_keywords_regexp_for_type(base, type_mods):
     else:
         return None
 
-#
-# Given a file, return a regexp matching the possible
-# RCS keywords that will be expanded, or None for files
-# with kw expansion turned off.
-#
+
 def p4_keywords_regexp_for_file(file):
+    """Given a file, return a regexp matching the possible RCS keywords that
+       will be expanded, or None for files with kw expansion turned off.
+       """
+
     if not os.path.exists(file):
         return None
     else:
-        (type_base, type_mods) = split_p4_type(p4_type(file))
+        type_base, type_mods = split_p4_type(p4_type(file))
         return p4_keywords_regexp_for_type(type_base, type_mods)
 
+
 def setP4ExecBit(file, mode):
-    # Reopens an already open file and changes the execute bit to match
-    # the execute bit setting in the passed in mode.
+    """Reopens an already open file and changes the execute bit to match the
+       execute bit setting in the passed in mode.
+       """
 
     p4Type = "+x"
 
@@ -566,8 +696,9 @@ def setP4ExecBit(file, mode):
 
     p4_reopen(p4Type, file)
 
+
 def getP4OpenedType(file):
-    # Returns the perforce file type for the given file.
+    """Returns the perforce file type for the given file."""
 
     result = p4_read_pipe(["opened", wildcard_encode(file)])
     match = re.match(".*\((.+)\)( \*exclusive\*)?\r?$", result)
@@ -576,8 +707,10 @@ def getP4OpenedType(file):
     else:
         die("Could not determine file type for %s (result: '%s')" % (file, result))
 
-# Return the set of all p4 labels
+
 def getP4Labels(depotPaths):
+    """Return the set of all p4 labels."""
+
     labels = set()
     if not isinstance(depotPaths, list):
         depotPaths = [depotPaths]
@@ -588,34 +721,39 @@ def getP4Labels(depotPaths):
 
     return labels
 
-# Return the set of all git tags
+
 def getGitTags():
+    """Return the set of all git tags."""
+
     gitTags = set()
     for line in read_pipe_lines(["git", "tag"]):
         tag = line.strip()
         gitTags.add(tag)
     return gitTags
 
+
 _diff_tree_pattern = None
 
+
 def parseDiffTreeEntry(entry):
     """Parses a single diff tree entry into its component elements.
 
-    See git-diff-tree(1) manpage for details about the format of the diff
-    output. This method returns a dictionary with the following elements:
-
-    src_mode - The mode of the source file
-    dst_mode - The mode of the destination file
-    src_sha1 - The sha1 for the source file
-    dst_sha1 - The sha1 fr the destination file
-    status - The one letter status of the diff (i.e. 'A', 'M', 'D', etc)
-    status_score - The score for the status (applicable for 'C' and 'R'
-                   statuses). This is None if there is no score.
-    src - The path for the source file.
-    dst - The path for the destination file. This is only present for
-          copy or renames. If it is not present, this is None.
-
-    If the pattern is not matched, None is returned."""
+       See git-diff-tree(1) manpage for details about the format of the diff
+       output. This method returns a dictionary with the following elements:
+
+       src_mode - The mode of the source file
+       dst_mode - The mode of the destination file
+       src_sha1 - The sha1 for the source file
+       dst_sha1 - The sha1 fr the destination file
+       status - The one letter status of the diff (i.e. 'A', 'M', 'D', etc)
+       status_score - The score for the status (applicable for 'C' and 'R'
+                      statuses). This is None if there is no score.
+       src - The path for the source file.
+       dst - The path for the destination file. This is only present for
+             copy or renames. If it is not present, this is None.
+
+       If the pattern is not matched, None is returned.
+       """
 
     global _diff_tree_pattern
     if not _diff_tree_pattern:
@@ -635,41 +773,55 @@ def parseDiffTreeEntry(entry):
         }
     return None
 
+
 def isModeExec(mode):
-    # Returns True if the given git mode represents an executable file,
-    # otherwise False.
+    """Returns True if the given git mode represents an executable file,
+       otherwise False.
+       """
     return mode[-3:] == "755"
 
+
 class P4Exception(Exception):
-    """ Base class for exceptions from the p4 client """
+    """Base class for exceptions from the p4 client."""
+
     def __init__(self, exit_code):
         self.p4ExitCode = exit_code
 
+
 class P4ServerException(P4Exception):
-    """ Base class for exceptions where we get some kind of marshalled up result from the server """
+    """Base class for exceptions where we get some kind of marshalled up result
+       from the server.
+       """
+
     def __init__(self, exit_code, p4_result):
         super(P4ServerException, self).__init__(exit_code)
         self.p4_result = p4_result
         self.code = p4_result[0]['code']
         self.data = p4_result[0]['data']
 
+
 class P4RequestSizeException(P4ServerException):
-    """ One of the maxresults or maxscanrows errors """
+    """One of the maxresults or maxscanrows errors."""
+
     def __init__(self, exit_code, p4_result, limit):
         super(P4RequestSizeException, self).__init__(exit_code, p4_result)
         self.limit = limit
 
+
 class P4CommandException(P4Exception):
-    """ Something went wrong calling p4 which means we have to give up """
+    """Something went wrong calling p4 which means we have to give up."""
+
     def __init__(self, msg):
         self.msg = msg
 
     def __str__(self):
         return self.msg
 
+
 def isModeExecChanged(src_mode, dst_mode):
     return isModeExec(src_mode) != isModeExec(dst_mode)
 
+
 def p4CmdList(cmd, stdin=None, stdin_mode='w+b', cb=None, skip_info=False,
         errors_as_exceptions=False, *k, **kw):
 
@@ -702,11 +854,12 @@ def p4CmdList(cmd, stdin=None, stdin_mode='w+b', cb=None, skip_info=False,
             if bytes is not str:
                 # Decode unmarshalled dict to use str keys and values, except for:
                 #   - `data` which may contain arbitrary binary data
-                #   - `depotFile[0-9]*`, `path`, or `clientFile` which may contain non-UTF8 encoded text
+                #   - `desc` or `FullName` which may contain non-UTF8 encoded text handled below, eagerly converted to bytes
+                #   - `depotFile[0-9]*`, `path`, or `clientFile` which may contain non-UTF8 encoded text, handled by decode_path()
                 decoded_entry = {}
                 for key, value in entry.items():
                     key = key.decode()
-                    if isinstance(value, bytes) and not (key in ('data', 'path', 'clientFile') or key.startswith('depotFile')):
+                    if isinstance(value, bytes) and not (key in ('data', 'desc', 'FullName', 'path', 'clientFile') or key.startswith('depotFile')):
                         value = value.decode()
                     decoded_entry[key] = value
                 # Parse out data if it's an error response
@@ -716,6 +869,10 @@ def p4CmdList(cmd, stdin=None, stdin_mode='w+b', cb=None, skip_info=False,
             if skip_info:
                 if 'code' in entry and entry['code'] == 'info':
                     continue
+            if 'desc' in entry:
+                entry['desc'] = metadata_stream_to_writable_bytes(entry['desc'])
+            if 'FullName' in entry:
+                entry['FullName'] = metadata_stream_to_writable_bytes(entry['FullName'])
             if cb is not None:
                 cb(entry)
             else:
@@ -746,12 +903,14 @@ def p4CmdList(cmd, stdin=None, stdin_mode='w+b', cb=None, skip_info=False,
 
     return result
 
+
 def p4Cmd(cmd, *k, **kw):
     list = p4CmdList(cmd, *k, **kw)
     result = {}
     for entry in list:
         result.update(entry)
-    return result;
+    return result
+
 
 def p4Where(depotPath):
     if not depotPath.endswith("/"):
@@ -773,7 +932,7 @@ def p4Where(depotPath):
             if data[:space] == depotPath:
                 output = entry
                 break
-    if output == None:
+    if output is None:
         return ""
     if output["code"] == "error":
         return ""
@@ -789,48 +948,54 @@ def p4Where(depotPath):
         clientPath = clientPath[:-3]
     return clientPath
 
+
 def currentGitBranch():
     return read_pipe_text(["git", "symbolic-ref", "--short", "-q", "HEAD"])
 
+
 def isValidGitDir(path):
-    return git_dir(path) != None
+    return git_dir(path) is not None
+
 
 def parseRevision(ref):
     return read_pipe(["git", "rev-parse", ref]).strip()
 
+
 def branchExists(ref):
     rev = read_pipe(["git", "rev-parse", "-q", "--verify", ref],
                      ignore_error=True)
     return len(rev) > 0
 
+
 def extractLogMessageFromGitCommit(commit):
     logMessage = ""
 
-    ## fixme: title is first line of commit, not 1st paragraph.
+    # fixme: title is first line of commit, not 1st paragraph.
     foundTitle = False
     for log in read_pipe_lines(["git", "cat-file", "commit", commit]):
-       if not foundTitle:
-           if len(log) == 1:
-               foundTitle = True
-           continue
+        if not foundTitle:
+            if len(log) == 1:
+                foundTitle = True
+            continue
 
-       logMessage += log
+        logMessage += log
     return logMessage
 
+
 def extractSettingsGitLog(log):
     values = {}
     for line in log.split("\n"):
         line = line.strip()
-        m = re.search (r"^ *\[git-p4: (.*)\]$", line)
+        m = re.search(r"^ *\[git-p4: (.*)\]$", line)
         if not m:
             continue
 
-        assignments = m.group(1).split (':')
+        assignments = m.group(1).split(':')
         for a in assignments:
-            vals = a.split ('=')
+            vals = a.split('=')
             key = vals[0].strip()
-            val = ('='.join (vals[1:])).strip()
-            if val.endswith ('\"') and val.startswith('"'):
+            val = ('='.join(vals[1:])).strip()
+            if val.endswith('\"') and val.startswith('"'):
                 val = val[1:-1]
 
             values[key] = val
@@ -842,41 +1007,49 @@ def extractSettingsGitLog(log):
         values['depot-paths'] = paths.split(',')
     return values
 
+
 def gitBranchExists(branch):
     proc = subprocess.Popen(["git", "rev-parse", branch],
-                            stderr=subprocess.PIPE, stdout=subprocess.PIPE);
-    return proc.wait() == 0;
+                            stderr=subprocess.PIPE, stdout=subprocess.PIPE)
+    return proc.wait() == 0
+
 
 def gitUpdateRef(ref, newvalue):
     subprocess.check_call(["git", "update-ref", ref, newvalue])
 
+
 def gitDeleteRef(ref):
     subprocess.check_call(["git", "update-ref", "-d", ref])
 
+
 _gitConfig = {}
 
+
 def gitConfig(key, typeSpecifier=None):
     if key not in _gitConfig:
-        cmd = [ "git", "config" ]
+        cmd = ["git", "config"]
         if typeSpecifier:
-            cmd += [ typeSpecifier ]
-        cmd += [ key ]
+            cmd += [typeSpecifier]
+        cmd += [key]
         s = read_pipe(cmd, ignore_error=True)
         _gitConfig[key] = s.strip()
     return _gitConfig[key]
 
+
 def gitConfigBool(key):
     """Return a bool, using git config --bool.  It is True only if the
        variable is set to true, and False if set to false or not present
-       in the config."""
+       in the config.
+       """
 
     if key not in _gitConfig:
         _gitConfig[key] = gitConfig(key, '--bool') == "true"
     return _gitConfig[key]
 
+
 def gitConfigInt(key):
     if key not in _gitConfig:
-        cmd = [ "git", "config", "--int", key ]
+        cmd = ["git", "config", "--int", key]
         s = read_pipe(cmd, ignore_error=True)
         v = s.strip()
         try:
@@ -885,6 +1058,7 @@ def gitConfigInt(key):
             _gitConfig[key] = None
     return _gitConfig[key]
 
+
 def gitConfigList(key):
     if key not in _gitConfig:
         s = read_pipe(["git", "config", "--get-all", key], ignore_error=True)
@@ -893,12 +1067,43 @@ def gitConfigList(key):
             _gitConfig[key] = []
     return _gitConfig[key]
 
+def fullP4Ref(incomingRef, importIntoRemotes=True):
+    """Standardize a given provided p4 ref value to a full git ref:
+         refs/foo/bar/branch -> use it exactly
+         p4/branch -> prepend refs/remotes/ or refs/heads/
+         branch -> prepend refs/remotes/p4/ or refs/heads/p4/"""
+    if incomingRef.startswith("refs/"):
+        return incomingRef
+    if importIntoRemotes:
+        prepend = "refs/remotes/"
+    else:
+        prepend = "refs/heads/"
+    if not incomingRef.startswith("p4/"):
+        prepend += "p4/"
+    return prepend + incomingRef
+
+def shortP4Ref(incomingRef, importIntoRemotes=True):
+    """Standardize to a "short ref" if possible:
+         refs/foo/bar/branch -> ignore
+         refs/remotes/p4/branch or refs/heads/p4/branch -> shorten
+         p4/branch -> shorten"""
+    if importIntoRemotes:
+        longprefix = "refs/remotes/p4/"
+    else:
+        longprefix = "refs/heads/p4/"
+    if incomingRef.startswith(longprefix):
+        return incomingRef[len(longprefix):]
+    if incomingRef.startswith("p4/"):
+        return incomingRef[3:]
+    return incomingRef
+
 def p4BranchesInGit(branchesAreInRemotes=True):
     """Find all the branches whose names start with "p4/", looking
        in remotes or heads as specified by the argument.  Return
        a dictionary of { branch: revision } for each one found.
        The branch names are the short names, without any
-       "p4/" prefix."""
+       "p4/" prefix.
+       """
 
     branches = {}
 
@@ -925,10 +1130,11 @@ def p4BranchesInGit(branchesAreInRemotes=True):
 
     return branches
 
+
 def branch_exists(branch):
     """Make sure that the given ref name really exists."""
 
-    cmd = [ "git", "rev-parse", "--symbolic", "--verify", branch ]
+    cmd = ["git", "rev-parse", "--symbolic", "--verify", branch]
     p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
     out, _ = p.communicate()
     out = decode_text_stream(out)
@@ -937,7 +1143,8 @@ def branch_exists(branch):
     # expect exactly one line of output: the branch name
     return out.rstrip() == branch
 
-def findUpstreamBranchPoint(head = "HEAD"):
+
+def findUpstreamBranchPoint(head="HEAD"):
     branches = p4BranchesInGit()
     # map from depot-path to branch name
     branchByDepotPath = {}
@@ -946,8 +1153,12 @@ def findUpstreamBranchPoint(head = "HEAD"):
         log = extractLogMessageFromGitCommit(tip)
         settings = extractSettingsGitLog(log)
         if "depot-paths" in settings:
+            git_branch = "remotes/p4/" + branch
             paths = ",".join(settings["depot-paths"])
-            branchByDepotPath[paths] = "remotes/p4/" + branch
+            branchByDepotPath[paths] = git_branch
+            if "change" in settings:
+                paths = paths + ";" + settings["change"]
+                branchByDepotPath[paths] = git_branch
 
     settings = None
     parent = 0
@@ -957,6 +1168,10 @@ def findUpstreamBranchPoint(head = "HEAD"):
         settings = extractSettingsGitLog(log)
         if "depot-paths" in settings:
             paths = ",".join(settings["depot-paths"])
+            if "change" in settings:
+                expaths = paths + ";" + settings["change"]
+                if expaths in branchByDepotPath:
+                    return [branchByDepotPath[expaths], settings]
             if paths in branchByDepotPath:
                 return [branchByDepotPath[paths], settings]
 
@@ -964,7 +1179,8 @@ def findUpstreamBranchPoint(head = "HEAD"):
 
     return ["", settings]
 
-def createOrUpdateBranchesFromOrigin(localRefPrefix = "refs/remotes/p4/", silent=True):
+
+def createOrUpdateBranchesFromOrigin(localRefPrefix="refs/remotes/p4/", silent=True):
     if not silent:
         print("Creating/updating branch(es) in %s based on origin branch(es)"
                % localRefPrefix)
@@ -981,8 +1197,7 @@ def createOrUpdateBranchesFromOrigin(localRefPrefix = "refs/remotes/p4/", silent
         originHead = line
 
         original = extractSettingsGitLog(extractLogMessageFromGitCommit(originHead))
-        if ('depot-paths' not in original
-            or 'change' not in original):
+        if 'depot-paths' not in original or 'change' not in original:
             continue
 
         update = False
@@ -1011,8 +1226,9 @@ def createOrUpdateBranchesFromOrigin(localRefPrefix = "refs/remotes/p4/", silent
         if update:
             system(["git", "update-ref", remoteHead, originHead])
 
+
 def originP4BranchesExist():
-        return gitBranchExists("origin") or gitBranchExists("origin/p4") or gitBranchExists("origin/p4/master")
+    return gitBranchExists("origin") or gitBranchExists("origin/p4") or gitBranchExists("origin/p4/master")
 
 
 def p4ParseNumericChangeRange(parts):
@@ -1024,12 +1240,14 @@ def p4ParseNumericChangeRange(parts):
 
     return (changeStart, changeEnd)
 
+
 def chooseBlockSize(blockSize):
     if blockSize:
         return blockSize
     else:
         return defaultBlockSize
 
+
 def p4ChangesForPaths(depotPaths, changeRange, requestedBlockSize):
     assert depotPaths
 
@@ -1047,7 +1265,7 @@ def p4ChangesForPaths(depotPaths, changeRange, requestedBlockSize):
         parts = changeRange.split(',')
         assert len(parts) == 2
         try:
-            (changeStart, changeEnd) = p4ParseNumericChangeRange(parts)
+            changeStart, changeEnd = p4ParseNumericChangeRange(parts)
             block_size = chooseBlockSize(requestedBlockSize)
         except ValueError:
             changeStart = parts[0][1:]
@@ -1085,7 +1303,8 @@ def p4ChangesForPaths(depotPaths, changeRange, requestedBlockSize):
             else:
                 block_size = max(2, block_size // 2)
 
-            if verbose: print("block size error, retrying with block size {0}".format(block_size))
+            if verbose:
+                print("block size error, retrying with block size {0}".format(block_size))
             continue
         except P4Exception as e:
             die('Error retrieving changes description ({0})'.format(e.p4ExitCode))
@@ -1107,21 +1326,25 @@ def p4ChangesForPaths(depotPaths, changeRange, requestedBlockSize):
     changes = sorted(changes)
     return changes
 
+
 def p4PathStartsWith(path, prefix):
-    # This method tries to remedy a potential mixed-case issue:
-    #
-    # If UserA adds  //depot/DirA/file1
-    # and UserB adds //depot/dira/file2
-    #
-    # we may or may not have a problem. If you have core.ignorecase=true,
-    # we treat DirA and dira as the same directory
+    """This method tries to remedy a potential mixed-case issue:
+
+       If UserA adds  //depot/DirA/file1
+       and UserB adds //depot/dira/file2
+
+       we may or may not have a problem. If you have core.ignorecase=true,
+       we treat DirA and dira as the same directory.
+       """
     if gitConfigBool("core.ignorecase"):
         return path.lower().startswith(prefix.lower())
     return path.startswith(prefix)
 
+
 def getClientSpec():
     """Look at the p4 client spec, create a View() object that contains
-       all the mappings, and return it."""
+       all the mappings, and return it.
+       """
 
     specList = p4CmdList(["client", "-o"])
     if len(specList) != 1:
@@ -1135,7 +1358,7 @@ def getClientSpec():
     client_name = entry["Client"]
 
     # just the keys that start with "View"
-    view_keys = [ k for k in entry.keys() if k.startswith("View") ]
+    view_keys = [k for k in entry.keys() if k.startswith("View")]
 
     # hold this new View
     view = View(client_name)
@@ -1149,6 +1372,7 @@ def getClientSpec():
 
     return view
 
+
 def getClientRoot():
     """Grab the client directory."""
 
@@ -1162,12 +1386,15 @@ def getClientRoot():
 
     return entry["Root"]
 
-#
-# P4 wildcards are not allowed in filenames.  P4 complains
-# if you simply add them, but you can force it with "-f", in
-# which case it translates them into %xx encoding internally.
-#
+
 def wildcard_decode(path):
+    """Decode P4 wildcards into %xx encoding
+
+       P4 wildcards are not allowed in filenames.  P4 complains if you simply
+       add them, but you can force it with "-f", in which case it translates
+       them into %xx encoding internally.
+       """
+
     # Search for and fix just these four characters.  Do % last so
     # that fixing it does not inadvertently create new %-escapes.
     # Cannot have * in a filename in windows; untested as to
@@ -1179,7 +1406,10 @@ def wildcard_decode(path):
                .replace("%25", "%")
     return path
 
+
 def wildcard_encode(path):
+    """Encode %xx coded wildcards into P4 coding."""
+
     # do % first to avoid double-encoding the %s introduced here
     path = path.replace("%", "%25") \
                .replace("*", "%2A") \
@@ -1187,10 +1417,12 @@ def wildcard_encode(path):
                .replace("@", "%40")
     return path
 
+
 def wildcard_present(path):
     m = re.search("[*#@%]", path)
     return m is not None
 
+
 class LargeFileSystem(object):
     """Base class for large file system support."""
 
@@ -1199,13 +1431,15 @@ class LargeFileSystem(object):
         self.writeToGitStream = writeToGitStream
 
     def generatePointer(self, cloneDestination, contentFile):
-        """Return the content of a pointer file that is stored in Git instead of
-           the actual content."""
+        """Return the content of a pointer file that is stored in Git instead
+           of the actual content.
+           """
         assert False, "Method 'generatePointer' required in " + self.__class__.__name__
 
     def pushFile(self, localLargeFile):
         """Push the actual content which is not stored in the Git repository to
-           a server."""
+           a server.
+           """
         assert False, "Method 'pushFile' required in " + self.__class__.__name__
 
     def hasLargeFileExtension(self, relPath):
@@ -1253,10 +1487,11 @@ class LargeFileSystem(object):
     def processContent(self, git_mode, relPath, contents):
         """Processes the content of git fast import. This method decides if a
            file is stored in the large file system and handles all necessary
-           steps."""
+           steps.
+           """
         if self.exceedsLargeFileThreshold(relPath, contents) or self.hasLargeFileExtension(relPath):
             contentTempFile = self.generateTempFile(contents)
-            (pointer_git_mode, contents, localLargeFile) = self.generatePointer(contentTempFile)
+            pointer_git_mode, contents, localLargeFile = self.generatePointer(contentTempFile)
             if pointer_git_mode:
                 git_mode = pointer_git_mode
             if localLargeFile:
@@ -1272,12 +1507,14 @@ class LargeFileSystem(object):
                     sys.stderr.write("%s moved to large file system (%s)\n" % (relPath, localLargeFile))
         return (git_mode, contents)
 
+
 class MockLFS(LargeFileSystem):
     """Mock large file system for testing."""
 
     def generatePointer(self, contentFile):
         """The pointer content is the original content prefixed with "pointer-".
-           The local filename of the large file storage is derived from the file content.
+           The local filename of the large file storage is derived from the
+           file content.
            """
         with open(contentFile, 'r') as f:
             content = next(f)
@@ -1287,17 +1524,19 @@ class MockLFS(LargeFileSystem):
             return (gitMode, pointerContents, localLargeFile)
 
     def pushFile(self, localLargeFile):
-        """The remote filename of the large file storage is the same as the local
-           one but in a different directory.
+        """The remote filename of the large file storage is the same as the
+           local one but in a different directory.
            """
         remotePath = os.path.join(os.path.dirname(localLargeFile), '..', 'remote')
         if not os.path.exists(remotePath):
             os.makedirs(remotePath)
         shutil.copyfile(localLargeFile, os.path.join(remotePath, os.path.basename(localLargeFile)))
 
+
 class GitLFS(LargeFileSystem):
     """Git LFS as backend for the git-p4 large file system.
-       See https://git-lfs.github.com/ for details."""
+       See https://git-lfs.github.com/ for details.
+       """
 
     def __init__(self, *args):
         LargeFileSystem.__init__(self, *args)
@@ -1383,9 +1622,10 @@ class GitLFS(LargeFileSystem):
         else:
             return LargeFileSystem.processContent(self, git_mode, relPath, contents)
 
+
 class Command:
-    delete_actions = ( "delete", "move/delete", "purge" )
-    add_actions = ( "add", "branch", "move/add" )
+    delete_actions = ("delete", "move/delete", "purge")
+    add_actions = ("add", "branch", "move/add")
 
     def __init__(self):
         self.usage = "usage: %prog [options]"
@@ -1398,6 +1638,7 @@ class Command:
             setattr(self, attr, value)
         return getattr(self, attr)
 
+
 class P4UserMap:
     def __init__(self):
         self.userMapFromPerforceServer = False
@@ -1415,7 +1656,7 @@ class P4UserMap:
         die("Could not find your p4 user id")
 
     def p4UserIsMe(self, p4User):
-        # return True if the given p4 user is actually me
+        """Return True if the given p4 user is actually me."""
         me = self.p4UserId()
         if not p4User or p4User != me:
             return False
@@ -1435,7 +1676,13 @@ class P4UserMap:
         for output in p4CmdList(["users"]):
             if "User" not in output:
                 continue
-            self.users[output["User"]] = output["FullName"] + " <" + output["Email"] + ">"
+            # "FullName" is bytes. "Email" on the other hand might be bytes
+            # or unicode string depending on whether we are running under
+            # python2 or python3. To support
+            # git-p4.metadataDecodingStrategy=fallback, self.users dict values
+            # are always bytes, ready to be written to git.
+            emailbytes = metadata_stream_to_writable_bytes(output["Email"])
+            self.users[output["User"]] = output["FullName"] + b" <" + emailbytes + b">"
             self.emails[output["Email"]] = output["User"]
 
         mapUserConfigRegex = re.compile(r"^\s*(\S+)\s*=\s*(.+)\s*<(\S+)>\s*$", re.VERBOSE)
@@ -1445,29 +1692,32 @@ class P4UserMap:
                 user = mapUser[0][0]
                 fullname = mapUser[0][1]
                 email = mapUser[0][2]
-                self.users[user] = fullname + " <" + email + ">"
+                fulluser = fullname + " <" + email + ">"
+                self.users[user] = metadata_stream_to_writable_bytes(fulluser)
                 self.emails[email] = user
 
-        s = ''
+        s = b''
         for (key, val) in self.users.items():
-            s += "%s\t%s\n" % (key.expandtabs(1), val.expandtabs(1))
+            keybytes = metadata_stream_to_writable_bytes(key)
+            s += b"%s\t%s\n" % (keybytes.expandtabs(1), val.expandtabs(1))
 
-        open(self.getUserCacheFilename(), 'w').write(s)
+        open(self.getUserCacheFilename(), 'wb').write(s)
         self.userMapFromPerforceServer = True
 
     def loadUserMapFromCache(self):
         self.users = {}
         self.userMapFromPerforceServer = False
         try:
-            cache = open(self.getUserCacheFilename(), 'r')
+            cache = open(self.getUserCacheFilename(), 'rb')
             lines = cache.readlines()
             cache.close()
             for line in lines:
-                entry = line.strip().split("\t")
-                self.users[entry[0]] = entry[1]
+                entry = line.strip().split(b"\t")
+                self.users[entry[0].decode('utf_8')] = entry[1]
         except IOError:
             self.getUserMapFromPerforceServer()
 
+
 class P4Submit(Command, P4UserMap):
 
     conflict_behavior_choices = ("ask", "skip", "quit")
@@ -1560,20 +1810,20 @@ class P4Submit(Command, P4UserMap):
             die("You have files opened with perforce! Close them before starting the sync.")
 
     def separate_jobs_from_description(self, message):
-        """Extract and return a possible Jobs field in the commit
-           message.  It goes into a separate section in the p4 change
-           specification.
+        """Extract and return a possible Jobs field in the commit message.  It
+           goes into a separate section in the p4 change specification.
 
-           A jobs line starts with "Jobs:" and looks like a new field
-           in a form.  Values are white-space separated on the same
-           line or on following lines that start with a tab.
+           A jobs line starts with "Jobs:" and looks like a new field in a
+           form.  Values are white-space separated on the same line or on
+           following lines that start with a tab.
 
-           This does not parse and extract the full git commit message
-           like a p4 form.  It just sees the Jobs: line as a marker
-           to pass everything from then on directly into the p4 form,
-           but outside the description section.
+           This does not parse and extract the full git commit message like a
+           p4 form.  It just sees the Jobs: line as a marker to pass everything
+           from then on directly into the p4 form, but outside the description
+           section.
 
-           Return a tuple (stripped log message, jobs string)."""
+           Return a tuple (stripped log message, jobs string).
+           """
 
         m = re.search(r'^Jobs:', message, re.MULTILINE)
         if m is None:
@@ -1584,9 +1834,10 @@ class P4Submit(Command, P4UserMap):
         return (stripped_message, jobtext)
 
     def prepareLogMessage(self, template, message, jobs):
-        """Edits the template returned from "p4 change -o" to insert
-           the message in the Description field, and the jobs text in
-           the Jobs field."""
+        """Edits the template returned from "p4 change -o" to insert the
+           message in the Description field, and the jobs text in the Jobs
+           field.
+           """
         result = ""
 
         inDescriptionSection = False
@@ -1616,8 +1867,10 @@ class P4Submit(Command, P4UserMap):
         return result
 
     def patchRCSKeywords(self, file, regexp):
-        # Attempt to zap the RCS keywords in a p4 controlled file matching the given regex
-        (handle, outFileName) = tempfile.mkstemp(dir='.')
+        """Attempt to zap the RCS keywords in a p4 controlled file matching the
+           given regex.
+           """
+        handle, outFileName = tempfile.mkstemp(dir='.')
         try:
             with os.fdopen(handle, "wb") as outFile, open(file, "rb") as inFile:
                 for line in inFile.readlines():
@@ -1633,21 +1886,23 @@ class P4Submit(Command, P4UserMap):
 
         print("Patched up RCS keywords in %s" % file)
 
-    def p4UserForCommit(self,id):
-        # Return the tuple (perforce user,git email) for a given git commit id
+    def p4UserForCommit(self, id):
+        """Return the tuple (perforce user,git email) for a given git commit
+           id.
+           """
         self.getUserMapFromPerforceServer()
         gitEmail = read_pipe(["git", "log", "--max-count=1",
                               "--format=%ae", id])
         gitEmail = gitEmail.strip()
         if gitEmail not in self.emails:
-            return (None,gitEmail)
+            return (None, gitEmail)
         else:
-            return (self.emails[gitEmail],gitEmail)
+            return (self.emails[gitEmail], gitEmail)
 
-    def checkValidP4Users(self,commits):
-        # check if any git authors cannot be mapped to p4 users
+    def checkValidP4Users(self, commits):
+        """Check if any git authors cannot be mapped to p4 users."""
         for id in commits:
-            (user,email) = self.p4UserForCommit(id)
+            user, email = self.p4UserForCommit(id)
             if not user:
                 msg = "Cannot find p4 user for email %s in commit %s." % (email, id)
                 if gitConfigBool("git-p4.allowMissingP4Users"):
@@ -1656,10 +1911,12 @@ class P4Submit(Command, P4UserMap):
                     die("Error: %s\nSet git-p4.allowMissingP4Users to true to allow this." % msg)
 
     def lastP4Changelist(self):
-        # Get back the last changelist number submitted in this client spec. This
-        # then gets used to patch up the username in the change. If the same
-        # client spec is being used by multiple processes then this might go
-        # wrong.
+        """Get back the last changelist number submitted in this client spec.
+
+           This then gets used to patch up the username in the change. If the
+           same client spec is being used by multiple processes then this might
+           go wrong.
+           """
         results = p4CmdList(["client", "-o"])        # find the current client
         client = None
         for r in results:
@@ -1675,14 +1932,16 @@ class P4Submit(Command, P4UserMap):
         die("Could not get changelist number for last submit - cannot patch up user details")
 
     def modifyChangelistUser(self, changelist, newUser):
-        # fixup the user field of a changelist after it has been submitted.
+        """Fixup the user field of a changelist after it has been submitted."""
         changes = p4CmdList(["change", "-o", changelist])
         if len(changes) != 1:
             die("Bad output from p4 change modifying %s to user %s" %
                 (changelist, newUser))
 
         c = changes[0]
-        if c['User'] == newUser: return   # nothing to do
+        if c['User'] == newUser:
+            # Nothing to do
+            return
         c['User'] = newUser
         # p4 does not understand format version 3 and above
         input = marshal.dumps(c, 2)
@@ -1698,8 +1957,9 @@ class P4Submit(Command, P4UserMap):
         die("Could not modify user field of changelist %s to %s" % (changelist, newUser))
 
     def canChangeChangelists(self):
-        # check to see if we have p4 admin or super-user permissions, either of
-        # which are required to modify changelists.
+        """Check to see if we have p4 admin or super-user permissions, either
+           of which are required to modify changelists.
+           """
         results = p4CmdList(["protects", self.depotPath])
         for r in results:
             if 'perm' in r:
@@ -1711,13 +1971,15 @@ class P4Submit(Command, P4UserMap):
 
     def prepareSubmitTemplate(self, changelist=None):
         """Run "p4 change -o" to grab a change specification template.
+
            This does not use "p4 -G", as it is nice to keep the submission
            template in original order, since a human might edit it.
 
            Remove lines in the Files section that show changes to files
-           outside the depot path we're committing into."""
+           outside the depot path we're committing into.
+           """
 
-        [upstream, settings] = findUpstreamBranchPoint()
+        upstream, settings = findUpstreamBranchPoint()
 
         template = """\
 # A Perforce Change Specification.
@@ -1778,8 +2040,10 @@ class P4Submit(Command, P4UserMap):
         return template
 
     def edit_template(self, template_file):
-        """Invoke the editor to let the user change the submission
-           message.  Return true if okay to continue with the submit."""
+        """Invoke the editor to let the user change the submission message.
+
+           Return true if okay to continue with the submit.
+           """
 
         # if configured to skip the editing part, just submit
         if gitConfigBool("git-p4.skipSubmitEdit"):
@@ -1838,7 +2102,9 @@ class P4Submit(Command, P4UserMap):
                     for line in f.readlines():
                         newdiff += "+" + line
                 except UnicodeDecodeError:
-                    pass # Found non-text data and skip, since diff description should only include text
+                    # Found non-text data and skip, since diff description
+                    # should only include text
+                    pass
                 f.close()
 
         return (diff + newdiff).replace('\r\n', '\n')
@@ -1849,7 +2115,7 @@ class P4Submit(Command, P4UserMap):
         print("Applying", read_pipe(["git", "show", "-s",
                                      "--format=format:%h %s", id]))
 
-        (p4User, gitEmail) = self.p4UserForCommit(id)
+        p4User, gitEmail = self.p4UserForCommit(id)
 
         diff = read_pipe_lines(
             ["git", "diff-tree", "-r"] + self.diffOpts + ["{}^".format(id), id])
@@ -1956,8 +2222,8 @@ class P4Submit(Command, P4UserMap):
                     if regexp:
                         # this file is a possibility...look for RCS keywords.
                         for line in read_pipe_lines(
-                            ["git", "diff", "%s^..%s" % (id, id), file],
-                            raw=True):
+                                ["git", "diff", "%s^..%s" % (id, id), file],
+                                raw=True):
                             if regexp.search(line):
                                 if verbose:
                                     print("got keyword match on %s in %s in %s" % (regex.pattern, line, file))
@@ -2014,13 +2280,13 @@ class P4Submit(Command, P4UserMap):
         #
         logMessage = extractLogMessageFromGitCommit(id)
         logMessage = logMessage.strip()
-        (logMessage, jobs) = self.separate_jobs_from_description(logMessage)
+        logMessage, jobs = self.separate_jobs_from_description(logMessage)
 
         template = self.prepareSubmitTemplate(update_shelve)
         submitTemplate = self.prepareLogMessage(template, logMessage, jobs)
 
         if self.preserveUser:
-           submitTemplate += "\n######## Actual user %s, modified after commit\n" % p4User
+            submitTemplate += "\n######## Actual user %s, modified after commit\n" % p4User
 
         if self.checkAuthorship and not self.p4UserIsMe(p4User):
             submitTemplate += "######## git author %s does not match your p4 account.\n" % gitEmail
@@ -2032,7 +2298,7 @@ class P4Submit(Command, P4UserMap):
             submitTemplate += separatorLine
             submitTemplate += self.get_diff_description(editedFiles, filesToAdd, symlinks)
 
-        (handle, fileName) = tempfile.mkstemp()
+        handle, fileName = tempfile.mkstemp()
         tmpFile = os.fdopen(handle, "w+b")
         if self.isWindows:
             submitTemplate = submitTemplate.replace("\n", "\r\n")
@@ -2059,13 +2325,13 @@ class P4Submit(Command, P4UserMap):
                 print("  " + self.clientPath)
                 print("")
                 print("To submit, use \"p4 submit\" to write a new description,")
-                print("or \"p4 submit -i <%s\" to use the one prepared by" \
+                print("or \"p4 submit -i <%s\" to use the one prepared by"
                       " \"git p4\"." % fileName)
                 print("You can delete the file \"%s\" when finished." % fileName)
 
                 if self.preserveUser and p4User and not self.p4UserIsMe(p4User):
-                    print("To preserve change ownership by user %s, you must\n" \
-                          "do \"p4 change -f <change>\" after submitting and\n" \
+                    print("To preserve change ownership by user %s, you must\n"
+                          "do \"p4 change -f <change>\" after submitting and\n"
                           "edit the User field.")
                 if pureRenameCopy:
                     print("After submitting, renamed files must be re-synced.")
@@ -2133,9 +2399,9 @@ class P4Submit(Command, P4UserMap):
             # Revert changes if we skip this patch
             if not submitted or self.shelve:
                 if self.shelve:
-                    print ("Reverting shelved files.")
+                    print("Reverting shelved files.")
                 else:
-                    print ("Submission cancelled, undoing p4 changes.")
+                    print("Submission cancelled, undoing p4 changes.")
                 sys.stdout.flush()
                 for f in editedFiles | filesToDelete:
                     p4_revert(f)
@@ -2147,9 +2413,11 @@ class P4Submit(Command, P4UserMap):
                 os.remove(fileName)
         return submitted
 
-    # Export git tags as p4 labels. Create a p4 label and then tag
-    # with that.
     def exportGitTags(self, gitTags):
+        """Export git tags as p4 labels. Create a p4 label and then tag with
+           that.
+           """
+
         validLabelRegexp = gitConfig("git-p4.labelExportRegexp")
         if len(validLabelRegexp) == 0:
             validLabelRegexp = defaultLabelRegexp
@@ -2195,7 +2463,7 @@ class P4Submit(Command, P4UserMap):
             # Create the label - use the same view as the client spec we are using
             clientSpec = getClientSpec()
 
-            labelTemplate  = "Label: %s\n" % name
+            labelTemplate = "Label: %s\n" % name
             labelTemplate += "Description:\n"
             for b in body:
                 labelTemplate += "\t" + b + "\n"
@@ -2206,7 +2474,7 @@ class P4Submit(Command, P4UserMap):
             if self.dry_run:
                 print("Would create p4 label %s for tag" % name)
             elif self.prepare_p4_only:
-                print("Not creating p4 label %s for tag due to option" \
+                print("Not creating p4 label %s for tag due to option"
                       " --prepare-p4-only" % name)
             else:
                 p4_write_pipe(["label", "-i"], labelTemplate)
@@ -2237,7 +2505,7 @@ class P4Submit(Command, P4UserMap):
             if len(allowSubmit) > 0 and not self.master in allowSubmit.split(","):
                 die("%s is not in git-p4.allowSubmit" % self.master)
 
-        [upstream, settings] = findUpstreamBranchPoint()
+        upstream, settings = findUpstreamBranchPoint()
         self.depotPath = settings['depot-paths'][0]
         if len(self.origin) == 0:
             self.origin = upstream
@@ -2371,13 +2639,13 @@ class P4Submit(Command, P4UserMap):
         if not self.no_verify:
             try:
                 if not run_git_hook("p4-pre-submit"):
-                    print("\nThe p4-pre-submit hook failed, aborting the submit.\n\nYou can skip " \
-                        "this pre-submission check by adding\nthe command line option '--no-verify', " \
+                    print("\nThe p4-pre-submit hook failed, aborting the submit.\n\nYou can skip "
+                        "this pre-submission check by adding\nthe command line option '--no-verify', "
                         "however,\nthis will also skip the p4-changelist hook as well.")
                     sys.exit(1)
             except Exception as e:
-                print("\nThe p4-pre-submit hook failed, aborting the submit.\n\nThe hook failed "\
-                    "with the error '{0}'".format(e.message) )
+                print("\nThe p4-pre-submit hook failed, aborting the submit.\n\nThe hook failed "
+                    "with the error '{0}'".format(e.message))
                 sys.exit(1)
 
         #
@@ -2399,7 +2667,7 @@ class P4Submit(Command, P4UserMap):
                 applied.append(commit)
                 if self.prepare_p4_only:
                     if i < last:
-                        print("Processing only the first commit due to option" \
+                        print("Processing only the first commit due to option"
                                 " --prepare-p4-only")
                     break
             else:
@@ -2469,13 +2737,15 @@ class P4Submit(Command, P4UserMap):
 
         # exit with error unless everything applied perfectly
         if len(commits) != len(applied):
-                sys.exit(1)
+            sys.exit(1)
 
         return True
 
+
 class View(object):
-    """Represent a p4 view ("p4 help views"), and map files in a
-       repo according to the view."""
+    """Represent a p4 view ("p4 help views"), and map files in a repo according
+       to the view.
+       """
 
     def __init__(self, client_name):
         self.mappings = []
@@ -2484,9 +2754,10 @@ class View(object):
         self.client_spec_path_cache = {}
 
     def append(self, view_line):
-        """Parse a view line, splitting it into depot and client
-           sides.  Append to self.mappings, preserving order.  This
-           is only needed for tag creation."""
+        """Parse a view line, splitting it into depot and client sides.  Append
+           to self.mappings, preserving order.  This is only needed for tag
+           creation.
+           """
 
         # Split the view line into exactly two words.  P4 enforces
         # structure on these lines that simplifies this quite a bit.
@@ -2535,7 +2806,7 @@ class View(object):
         return clientFile[len(self.client_prefix):]
 
     def update_client_spec_path_cache(self, files):
-        """ Caching file paths by "p4 where" batch query """
+        """Caching file paths by "p4 where" batch query."""
 
         # List depot file paths exclude that already cached
         fileArgs = [f['path'] for f in files if decode_path(f['path']) not in self.client_spec_path_cache]
@@ -2567,9 +2838,11 @@ class View(object):
                 self.client_spec_path_cache[depotFile] = b''
 
     def map_in_client(self, depot_path):
-        """Return the relative location in the client where this
-           depot file should live.  Returns "" if the file should
-           not be mapped in the client."""
+        """Return the relative location in the client where this depot file
+           should live.
+
+           Returns "" if the file should not be mapped in the client.
+           """
 
         if gitConfigBool("core.ignorecase"):
             depot_path = depot_path.lower()
@@ -2577,14 +2850,16 @@ class View(object):
         if depot_path in self.client_spec_path_cache:
             return self.client_spec_path_cache[depot_path]
 
-        die( "Error: %s is not found in client spec path" % depot_path )
+        die("Error: %s is not found in client spec path" % depot_path)
         return ""
 
+
 def cloneExcludeCallback(option, opt_str, value, parser):
     # prepend "/" because the first "/" was consumed as part of the option itself.
     # ("-//depot/A/..." becomes "/depot/A/..." after option parsing)
     parser.values.cloneExclude += ["/" + re.sub(r"\.\.\.$", "", value)]
 
+
 class P4Sync(Command, P4UserMap):
 
     def __init__(self):
@@ -2665,8 +2940,8 @@ class P4Sync(Command, P4UserMap):
         self.tz = "%+03d%02d" % (- time.timezone / 3600, ((- time.timezone % 3600) / 60))
         self.labels = {}
 
-    # Force a checkpoint in fast-import and wait for it to finish
     def checkpoint(self):
+        """Force a checkpoint in fast-import and wait for it to finish."""
         self.gitStream.write("checkpoint\n\n")
         self.gitStream.write("progress checkpoint\n\n")
         self.gitStream.flush()
@@ -2687,11 +2962,11 @@ class P4Sync(Command, P4UserMap):
                 return True
         return False
 
-    def extractFilesFromCommit(self, commit, shelved=False, shelved_cl = 0):
+    def extractFilesFromCommit(self, commit, shelved=False, shelved_cl=0):
         files = []
         fnum = 0
         while "depotFile%s" % fnum in commit:
-            path =  commit["depotFile%s" % fnum]
+            path = commit["depotFile%s" % fnum]
             found = self.isPathWanted(decode_path(path))
             if not found:
                 fnum = fnum + 1
@@ -2718,10 +2993,10 @@ class P4Sync(Command, P4UserMap):
         return jobs
 
     def stripRepoPath(self, path, prefixes):
-        """When streaming files, this is called to map a p4 depot path
-           to where it should go in git.  The prefixes are either
-           self.depotPaths, or self.branchPrefixes in the case of
-           branch detection."""
+        """When streaming files, this is called to map a p4 depot path to where
+           it should go in git.  The prefixes are either self.depotPaths, or
+           self.branchPrefixes in the case of branch detection.
+           """
 
         if self.useClientSpec:
             # branch detection moves files up a level (the branch name)
@@ -2750,8 +3025,9 @@ class P4Sync(Command, P4UserMap):
         return path
 
     def splitFilesIntoBranches(self, commit):
-        """Look at each depotFile in the commit to figure out to what
-           branch it belongs."""
+        """Look at each depotFile in the commit to figure out to what branch it
+           belongs.
+           """
 
         if self.clientSpecDirs:
             files = self.extractFilesFromCommit(commit)
@@ -2811,10 +3087,12 @@ class P4Sync(Command, P4UserMap):
                 print('Path with non-ASCII characters detected. Used %s to encode: %s ' % (encoding, path))
         return path
 
-    # output one file from the P4 stream
-    # - helper for streamP4Files
-
     def streamOneP4File(self, file, contents):
+        """Output one file from the P4 stream.
+
+           This is a helper for streamP4Files().
+           """
+
         file_path = file['depotFile']
         relPath = self.stripRepoPath(decode_path(file_path), self.branchPrefixes)
 
@@ -2822,12 +3100,13 @@ class P4Sync(Command, P4UserMap):
             if 'fileSize' in self.stream_file:
                 size = int(self.stream_file['fileSize'])
             else:
-                size = 0 # deleted files don't get a fileSize apparently
+                # Deleted files don't get a fileSize apparently
+                size = 0
             sys.stdout.write('\r%s --> %s (%s)\n' % (
                 file_path, relPath, format_size_human_readable(size)))
             sys.stdout.flush()
 
-        (type_base, type_mods) = split_p4_type(file["type"])
+        type_base, type_mods = split_p4_type(file["type"])
 
         git_mode = "100644"
         if "x" in type_mods:
@@ -2870,7 +3149,7 @@ class P4Sync(Command, P4UserMap):
             else:
                 if p4_version_string().find('/NT') >= 0:
                     text = text.replace(b'\r\n', b'\n')
-                contents = [ text ]
+                contents = [text]
 
         if type_base == "apple":
             # Apple filetype files will be streamed as a concatenation of
@@ -2885,6 +3164,16 @@ class P4Sync(Command, P4UserMap):
             print("\nIgnoring apple filetype file %s" % file['depotFile'])
             return
 
+        if type_base == "utf8":
+            # The type utf8 explicitly means utf8 *with BOM*. These are
+            # streamed just like regular text files, however, without
+            # the BOM in the stream.
+            # Therefore, to accurately import these files into git, we
+            # need to explicitly re-add the BOM before writing.
+            # 'contents' is a set of bytes in this case, so create the
+            # BOM prefix as a b'' literal.
+            contents = [b'\xef\xbb\xbf' + contents[0]] + contents[1:]
+
         # Note that we do not try to de-mangle keywords on utf16 files,
         # even though in theory somebody may want that.
         regexp = p4_keywords_regexp_for_type(type_base, type_mods)
@@ -2892,7 +3181,7 @@ class P4Sync(Command, P4UserMap):
             contents = [regexp.sub(br'$\1$', c) for c in contents]
 
         if self.largeFileSystem:
-            (git_mode, contents) = self.largeFileSystem.processContent(git_mode, relPath, contents)
+            git_mode, contents = self.largeFileSystem.processContent(git_mode, relPath, contents)
 
         self.writeToGitStream(git_mode, relPath, contents)
 
@@ -2906,8 +3195,8 @@ class P4Sync(Command, P4UserMap):
         if self.largeFileSystem and self.largeFileSystem.isLargeFile(relPath):
             self.largeFileSystem.removeLargeFile(relPath)
 
-    # handle another chunk of streaming data
     def streamP4FilesCb(self, marshalled):
+        """Handle another chunk of streaming data."""
 
         # catch p4 errors and complain
         err = None
@@ -2958,9 +3247,9 @@ class P4Sync(Command, P4UserMap):
                 self.stream_file[k] = marshalled[k]
 
         if (verbose and
-            'streamContentSize' in self.stream_file and
-            'fileSize' in self.stream_file and
-            'depotFile' in self.stream_file):
+                'streamContentSize' in self.stream_file and
+                'fileSize' in self.stream_file and
+                'depotFile' in self.stream_file):
             size = int(self.stream_file["fileSize"])
             if size > 0:
                 progress = 100*self.stream_file['streamContentSize']/size
@@ -2971,8 +3260,9 @@ class P4Sync(Command, P4UserMap):
 
         self.stream_have_file_info = True
 
-    # Stream directly from "p4 files" into "git fast-import"
     def streamP4Files(self, files):
+        """Stream directly from "p4 files" into "git fast-import."""
+
         filesForCommit = []
         filesToRead = []
         filesToDelete = []
@@ -3020,12 +3310,14 @@ class P4Sync(Command, P4UserMap):
         if userid in self.users:
             return self.users[userid]
         else:
-            return "%s <a@b>" % userid
+            userid_bytes = metadata_stream_to_writable_bytes(userid)
+            return b"%s <a@b>" % userid_bytes
 
     def streamTag(self, gitStream, labelName, labelDetails, commit, epoch):
-        """ Stream a p4 tag.
-        commit is either a git commit, or a fast-import mark, ":<p4commit>"
-        """
+        """Stream a p4 tag.
+
+           Commit is either a git commit, or a fast-import mark, ":<p4commit>".
+           """
 
         if verbose:
             print("writing tag %s for commit %s" % (labelName, commit))
@@ -3043,11 +3335,12 @@ class P4Sync(Command, P4UserMap):
             email = self.make_email(owner)
         else:
             email = self.make_email(self.p4UserId())
-        tagger = "%s %s %s" % (email, epoch, self.tz)
 
-        gitStream.write("tagger %s\n" % tagger)
+        gitStream.write("tagger ")
+        gitStream.write(email)
+        gitStream.write(" %s %s\n" % (epoch, self.tz))
 
-        print("labelDetails=",labelDetails)
+        print("labelDetails=", labelDetails)
         if 'Description' in labelDetails:
             description = labelDetails['Description']
         else:
@@ -3075,15 +3368,18 @@ class P4Sync(Command, P4UserMap):
         return hasPrefix
 
     def findShadowedFiles(self, files, change):
-        # Perforce allows you commit files and directories with the same name,
-        # so you could have files //depot/foo and //depot/foo/bar both checked
-        # in.  A p4 sync of a repository in this state fails.  Deleting one of
-        # the files recovers the repository.
-        #
-        # Git will not allow the broken state to exist and only the most recent
-        # of the conflicting names is left in the repository.  When one of the
-        # conflicting files is deleted we need to re-add the other one to make
-        # sure the git repository recovers in the same way as perforce.
+        """Perforce allows you commit files and directories with the same name,
+           so you could have files //depot/foo and //depot/foo/bar both checked
+           in.  A p4 sync of a repository in this state fails.  Deleting one of
+           the files recovers the repository.
+
+           Git will not allow the broken state to exist and only the most
+           recent of the conflicting names is left in the repository.  When one
+           of the conflicting files is deleted we need to re-add the other one
+           to make sure the git repository recovers in the same way as
+           perforce.
+           """
+
         deleted = [f for f in files if f['action'] in self.delete_actions]
         to_check = set()
         for f in deleted:
@@ -3110,7 +3406,7 @@ class P4Sync(Command, P4UserMap):
                     'rev': record['headRev'],
                     'type': record['headType']})
 
-    def commit(self, details, files, branch, parent = "", allow_empty=False):
+    def commit(self, details, files, branch, parent="", allow_empty=False):
         epoch = details["time"]
         author = details["user"]
         jobs = self.extractJobsFromCommit(details)
@@ -3138,12 +3434,12 @@ class P4Sync(Command, P4UserMap):
         self.gitStream.write("commit %s\n" % branch)
         self.gitStream.write("mark :%s\n" % details["change"])
         self.committedChanges.add(int(details["change"]))
-        committer = ""
         if author not in self.users:
             self.getUserMapFromPerforceServer()
-        committer = "%s %s %s" % (self.make_email(author), epoch, self.tz)
 
-        self.gitStream.write("committer %s\n" % committer)
+        self.gitStream.write("committer ")
+        self.gitStream.write(self.make_email(author))
+        self.gitStream.write(" %s %s\n" % (epoch, self.tz))
 
         self.gitStream.write("data <<EOT\n")
         self.gitStream.write(details["desc"])
@@ -3200,8 +3496,11 @@ class P4Sync(Command, P4UserMap):
                     print("Tag %s does not match with change %s: file count is different."
                            % (labelDetails["label"], change))
 
-    # Build a dictionary of changelists and labels, for "detect-labels" option.
     def getLabels(self):
+        """Build a dictionary of changelists and labels, for "detect-labels"
+           option.
+           """
+
         self.labels = {}
 
         l = p4CmdList(["labels"] + ["%s..." % p for p in self.depotPaths])
@@ -3227,11 +3526,12 @@ class P4Sync(Command, P4UserMap):
         if self.verbose:
             print("Label changes: %s" % self.labels.keys())
 
-    # Import p4 labels as git tags. A direct mapping does not
-    # exist, so assume that if all the files are at the same revision
-    # then we can use that, or it's something more complicated we should
-    # just ignore.
     def importP4Labels(self, stream, p4Labels):
+        """Import p4 labels as git tags. A direct mapping does not exist, so
+           assume that if all the files are at the same revision then we can
+           use that, or it's something more complicated we should just ignore.
+           """
+
         if verbose:
             print("import p4 labels: " + ' '.join(p4Labels))
 
@@ -3246,7 +3546,7 @@ class P4Sync(Command, P4UserMap):
 
             if not m.match(name):
                 if verbose:
-                    print("label %s does not match regexp %s" % (name,validLabelRegexp))
+                    print("label %s does not match regexp %s" % (name, validLabelRegexp))
                 continue
 
             if name in ignoredP4Labels:
@@ -3302,7 +3602,7 @@ class P4Sync(Command, P4UserMap):
                 p = p[:-1]
             p = p[p.strip().rfind("/") + 1:]
             if not p.endswith("/"):
-               p += "/"
+                p += "/"
             return p
 
     def getBranchMapping(self):
@@ -3322,7 +3622,7 @@ class P4Sync(Command, P4UserMap):
                     continue
                 source = paths[0]
                 destination = paths[1]
-                ## HACK
+                # HACK
                 if p4PathStartsWith(source, self.depotPaths[0]) and p4PathStartsWith(destination, self.depotPaths[0]):
                     source = source[len(self.depotPaths[0]):-4]
                     destination = destination[len(self.depotPaths[0]):-4]
@@ -3351,7 +3651,7 @@ class P4Sync(Command, P4UserMap):
         configBranches = gitConfigList("git-p4.branchList")
         for branch in configBranches:
             if branch:
-                (source, destination) = branch.split(":")
+                source, destination = branch.split(":")
                 self.knownBranches[destination] = source
 
                 lostAndFoundBranches.discard(destination)
@@ -3359,7 +3659,6 @@ class P4Sync(Command, P4UserMap):
                 if source not in self.knownBranches:
                     lostAndFoundBranches.add(source)
 
-
         for branch in lostAndFoundBranches:
             self.knownBranches[branch] = branch
 
@@ -3431,27 +3730,22 @@ class P4Sync(Command, P4UserMap):
     def importNewBranch(self, branch, maxChange):
         # make fast-import flush all changes to disk and update the refs using the checkpoint
         # command so that we can try to find the branch parent in the git history
-        self.gitStream.write("checkpoint\n\n");
-        self.gitStream.flush();
+        self.gitStream.write("checkpoint\n\n")
+        self.gitStream.flush()
         branchPrefix = self.depotPaths[0] + branch + "/"
         range = "@1,%s" % maxChange
-        #print "prefix" + branchPrefix
         changes = p4ChangesForPaths([branchPrefix], range, self.changes_block_size)
         if len(changes) <= 0:
             return False
         firstChange = changes[0]
-        #print "first change in branch: %s" % firstChange
         sourceBranch = self.knownBranches[branch]
         sourceDepotPath = self.depotPaths[0] + sourceBranch
         sourceRef = self.gitRefForBranch(sourceBranch)
-        #print "source " + sourceBranch
 
         branchParentChange = int(p4Cmd(["changes", "-m", "1", "%s...@1,%s" % (sourceDepotPath, firstChange)])["change"])
-        #print "branch parent: %s" % branchParentChange
         gitParent = self.gitCommitByP4Change(sourceRef, branchParentChange)
         if len(gitParent) > 0:
             self.initialParents[self.gitRefForBranch(branch)] = gitParent
-            #print "parent git commit: %s" % gitParent
 
         self.importChanges(changes)
         return True
@@ -3486,9 +3780,9 @@ class P4Sync(Command, P4UserMap):
                 if self.detectBranches:
                     branches = self.splitFilesIntoBranches(description)
                     for branch in branches.keys():
-                        ## HACK  --hwn
+                        # HACK  --hwn
                         branchPrefix = self.depotPaths[0] + branch + "/"
-                        self.branchPrefixes = [ branchPrefix ]
+                        self.branchPrefixes = [branchPrefix]
 
                         parent = ""
 
@@ -3508,12 +3802,12 @@ class P4Sync(Command, P4UserMap):
                                 fullBranch = self.projectName + branch
                                 if fullBranch not in self.p4BranchesInGit:
                                     if not self.silent:
-                                        print("\n    Importing new branch %s" % fullBranch);
+                                        print("\n    Importing new branch %s" % fullBranch)
                                     if self.importNewBranch(branch, change - 1):
                                         parent = ""
                                         self.p4BranchesInGit.append(fullBranch)
                                     if not self.silent:
-                                        print("\n    Resuming with change %s" % change);
+                                        print("\n    Resuming with change %s" % change)
 
                                 if self.verbose:
                                     print("parent determined through known branches: %s" % parent)
@@ -3572,7 +3866,7 @@ class P4Sync(Command, P4UserMap):
         newestRevision = 0
 
         fileCnt = 0
-        fileArgs = ["%s...%s" % (p,revision) for p in self.depotPaths]
+        fileArgs = ["%s...%s" % (p, revision) for p in self.depotPaths]
 
         for info in p4CmdList(["files"] + fileArgs):
 
@@ -3581,24 +3875,21 @@ class P4Sync(Command, P4UserMap):
                                  % info['data'])
                 if info['data'].find("must refer to client") >= 0:
                     sys.stderr.write("This particular p4 error is misleading.\n")
-                    sys.stderr.write("Perhaps the depot path was misspelled.\n");
+                    sys.stderr.write("Perhaps the depot path was misspelled.\n")
                     sys.stderr.write("Depot path:  %s\n" % " ".join(self.depotPaths))
                 sys.exit(1)
             if 'p4ExitCode' in info:
                 sys.stderr.write("p4 exitcode: %s\n" % info['p4ExitCode'])
                 sys.exit(1)
 
-
             change = int(info["change"])
             if change > newestRevision:
                 newestRevision = change
 
             if info["action"] in self.delete_actions:
-                # don't increase the file cnt, otherwise details["depotFile123"] will have gaps!
-                #fileCnt = fileCnt + 1
                 continue
 
-            for prop in ["depotFile", "rev", "action", "type" ]:
+            for prop in ["depotFile", "rev", "action", "type"]:
                 details["%s%s" % (prop, fileCnt)] = info[prop]
 
             fileCnt = fileCnt + 1
@@ -3618,7 +3909,6 @@ class P4Sync(Command, P4UserMap):
             print("IO error details: {}".format(err))
             print(self.gitError.read())
 
-
     def importRevisions(self, args, branch_arg_given):
         changes = []
 
@@ -3690,7 +3980,7 @@ class P4Sync(Command, P4UserMap):
         self.importProcess = subprocess.Popen(["git", "fast-import"],
                                               stdin=subprocess.PIPE,
                                               stdout=subprocess.PIPE,
-                                              stderr=subprocess.PIPE);
+                                              stderr=subprocess.PIPE)
         self.gitOutput = self.importProcess.stdout
         self.gitStream = self.importProcess.stdin
         self.gitError = self.importProcess.stderr
@@ -3750,9 +4040,13 @@ class P4Sync(Command, P4UserMap):
 
             # restrict to just this one, disabling detect-branches
             if branch_arg_given:
-                short = self.branch.split("/")[-1]
+                short = shortP4Ref(self.branch, self.importIntoRemotes)
                 if short in branches:
-                    self.p4BranchesInGit = [ short ]
+                    self.p4BranchesInGit = [short]
+                elif self.branch.startswith('refs/') and \
+                        branchExists(self.branch) and \
+                        '[git-p4:' in extractLogMessageFromGitCommit(self.branch):
+                    self.p4BranchesInGit = [self.branch]
             else:
                 self.p4BranchesInGit = branches.keys()
 
@@ -3769,13 +4063,13 @@ class P4Sync(Command, P4UserMap):
 
             p4Change = 0
             for branch in self.p4BranchesInGit:
-                logMsg =  extractLogMessageFromGitCommit(self.refPrefix + branch)
+                logMsg = extractLogMessageFromGitCommit(fullP4Ref(branch,
+                                                        self.importIntoRemotes))
 
                 settings = extractSettingsGitLog(logMsg)
 
                 self.readOptions(settings)
-                if ('depot-paths' in settings
-                    and 'change' in settings):
+                if 'depot-paths' in settings and 'change' in settings:
                     change = int(settings['change']) + 1
                     p4Change = max(p4Change, change)
 
@@ -3792,7 +4086,7 @@ class P4Sync(Command, P4UserMap):
                                     i = i - 1
                                     break
 
-                            paths.append ("/".join(cur_list[:i + 1]))
+                            paths.append("/".join(cur_list[:i + 1]))
 
                         self.previousDepotPaths = paths
 
@@ -3802,18 +4096,7 @@ class P4Sync(Command, P4UserMap):
                 if not self.silent and not self.detectBranches:
                     print("Performing incremental import into %s git branch" % self.branch)
 
-        # accept multiple ref name abbreviations:
-        #    refs/foo/bar/branch -> use it exactly
-        #    p4/branch -> prepend refs/remotes/ or refs/heads/
-        #    branch -> prepend refs/remotes/p4/ or refs/heads/p4/
-        if not self.branch.startswith("refs/"):
-            if self.importIntoRemotes:
-                prepend = "refs/remotes/"
-            else:
-                prepend = "refs/heads/"
-            if not self.branch.startswith("p4/"):
-                prepend += "p4/"
-            self.branch = prepend + self.branch
+        self.branch = fullP4Ref(self.branch, self.importIntoRemotes)
 
         if len(args) == 0 and self.depotPaths:
             if not self.silent:
@@ -3821,8 +4104,8 @@ class P4Sync(Command, P4UserMap):
         else:
             if self.depotPaths and self.depotPaths != args:
                 print("previous import used depot path %s and now %s was specified. "
-                       "This doesn't work!" % (' '.join (self.depotPaths),
-                                               ' '.join (args)))
+                       "This doesn't work!" % (' '.join(self.depotPaths),
+                                               ' '.join(args)))
                 sys.exit(1)
 
             self.depotPaths = sorted(args)
@@ -3862,7 +4145,7 @@ class P4Sync(Command, P4UserMap):
                 if len(self.changesFile) == 0:
                     revision = "#head"
 
-            p = re.sub ("\.\.\.$", "", p)
+            p = re.sub("\.\.\.$", "", p)
             if not p.endswith("/"):
                 p += "/"
 
@@ -3876,10 +4159,10 @@ class P4Sync(Command, P4UserMap):
         self.loadUserMapFromCache()
         self.labels = {}
         if self.detectLabels:
-            self.getLabels();
+            self.getLabels()
 
         if self.detectBranches:
-            ## FIXME - what's a P4 projectName ?
+            # FIXME - what's a P4 projectName ?
             self.projectName = self.guessProjectName()
 
             if self.hasOrigin:
@@ -3892,7 +4175,7 @@ class P4Sync(Command, P4UserMap):
             for b in self.p4BranchesInGit:
                 if b != "master":
 
-                    ## FIXME
+                    # FIXME
                     b = b[len(self.projectName):]
                 self.createdBranches.add(b)
 
@@ -3942,6 +4225,7 @@ class P4Sync(Command, P4UserMap):
 
         return True
 
+
 class P4Rebase(Command):
     def __init__(self):
         Command.__init__(self)
@@ -3961,11 +4245,11 @@ class P4Rebase(Command):
 
     def rebase(self):
         if os.system("git update-index --refresh") != 0:
-            die("Some files in your working directory are modified and different than what is in your index. You can use git update-index <filename> to bring the index up to date or stash away all your changes with git stash.");
+            die("Some files in your working directory are modified and different than what is in your index. You can use git update-index <filename> to bring the index up to date or stash away all your changes with git stash.")
         if len(read_pipe(["git", "diff-index", "HEAD", "--"])) > 0:
-            die("You have uncommitted changes. Please commit them before rebasing or stash them away with git stash.");
+            die("You have uncommitted changes. Please commit them before rebasing or stash them away with git stash.")
 
-        [upstream, settings] = findUpstreamBranchPoint()
+        upstream, settings = findUpstreamBranchPoint()
         if len(upstream) == 0:
             die("Cannot find upstream branchpoint for rebase")
 
@@ -3979,6 +4263,7 @@ class P4Rebase(Command):
             "HEAD", "--"])
         return True
 
+
 class P4Clone(P4Sync):
     def __init__(self):
         P4Sync.__init__(self)
@@ -3996,7 +4281,7 @@ class P4Clone(P4Sync):
         self.cloneBare = False
 
     def defaultDestination(self, args):
-        ## TODO: use common prefix of args?
+        # TODO: use common prefix of args?
         depotPath = args[0]
         depotDir = re.sub("(@[^@]*)$", "", depotPath)
         depotDir = re.sub("(#[^#]*)$", "", depotDir)
@@ -4032,7 +4317,7 @@ class P4Clone(P4Sync):
             os.makedirs(self.cloneDestination)
         chdir(self.cloneDestination)
 
-        init_cmd = [ "git", "init" ]
+        init_cmd = ["git", "init"]
         if self.cloneBare:
             init_cmd.append("--bare")
         retcode = subprocess.call(init_cmd)
@@ -4044,19 +4329,28 @@ class P4Clone(P4Sync):
 
         # create a master branch and check out a work tree
         if gitBranchExists(self.branch):
-            system([ "git", "branch", currentGitBranch(), self.branch ])
+            system(["git", "branch", currentGitBranch(), self.branch])
             if not self.cloneBare:
-                system([ "git", "checkout", "-f" ])
+                system(["git", "checkout", "-f"])
         else:
-            print('Not checking out any branch, use ' \
+            print('Not checking out any branch, use '
                   '"git checkout -q -b master <branch>"')
 
         # auto-set this variable if invoked with --use-client-spec
         if self.useClientSpec_from_options:
             system(["git", "config", "--bool", "git-p4.useclientspec", "true"])
 
+        # persist any git-p4 encoding-handling config options passed in for clone:
+        if gitConfig('git-p4.metadataDecodingStrategy'):
+            system(["git", "config", "git-p4.metadataDecodingStrategy", gitConfig('git-p4.metadataDecodingStrategy')])
+        if gitConfig('git-p4.metadataFallbackEncoding'):
+            system(["git", "config", "git-p4.metadataFallbackEncoding", gitConfig('git-p4.metadataFallbackEncoding')])
+        if gitConfig('git-p4.pathEncoding'):
+            system(["git", "config", "git-p4.pathEncoding", gitConfig('git-p4.pathEncoding')])
+
         return True
 
+
 class P4Unshelve(Command):
     def __init__(self):
         Command.__init__(self)
@@ -4073,14 +4367,14 @@ class P4Unshelve(Command):
         self.destbranch = "refs/remotes/p4-unshelved"
 
     def renameBranch(self, branch_name):
-        """ Rename the existing branch to branch_name.N
-        """
+        """Rename the existing branch to branch_name.N ."""
 
         found = True
-        for i in range(0,1000):
+        for i in range(0, 1000):
             backup_branch_name = "{0}.{1}".format(branch_name, i)
             if not gitBranchExists(backup_branch_name):
-                gitUpdateRef(backup_branch_name, branch_name) # copy ref to backup
+                # Copy ref to backup
+                gitUpdateRef(backup_branch_name, branch_name)
                 gitDeleteRef(branch_name)
                 found = True
                 print("renamed old unshelve branch to {0}".format(backup_branch_name))
@@ -4090,9 +4384,9 @@ class P4Unshelve(Command):
             sys.exit("gave up trying to rename existing branch {0}".format(sync.branch))
 
     def findLastP4Revision(self, starting_point):
-        """ Look back from starting_point for the first commit created by git-p4
-            to find the P4 commit we are based on, and the depot-paths.
-        """
+        """Look back from starting_point for the first commit created by git-p4
+           to find the P4 commit we are based on, and the depot-paths.
+           """
 
         for parent in (range(65535)):
             log = extractLogMessageFromGitCommit("{0}~{1}".format(starting_point, parent))
@@ -4103,8 +4397,9 @@ class P4Unshelve(Command):
         sys.exit("could not find git-p4 commits in {0}".format(self.origin))
 
     def createShelveParent(self, change, branch_name, sync, origin):
-        """ Create a commit matching the parent of the shelved changelist 'change'
-        """
+        """Create a commit matching the parent of the shelved changelist
+           'change'.
+           """
         parent_description = p4_describe(change, shelved=True)
         parent_description['desc'] = 'parent for shelved changelist {}\n'.format(change)
         files = sync.extractFilesFromCommit(parent_description, shelved=False, shelved_cl=change)
@@ -4172,10 +4467,11 @@ class P4Unshelve(Command):
 
         return True
 
+
 class P4Branches(Command):
     def __init__(self):
         Command.__init__(self)
-        self.options = [ ]
+        self.options = []
         self.description = ("Shows the git branches that hold imports and their "
                             + "corresponding perforce depot paths")
         self.verbose = False
@@ -4197,6 +4493,7 @@ class P4Branches(Command):
             print("%s <= %s (%s)" % (branch, ",".join(settings["depot-paths"]), settings["change"]))
         return True
 
+
 class HelpFormatter(optparse.IndentedHelpFormatter):
     def __init__(self):
         optparse.IndentedHelpFormatter.__init__(self)
@@ -4207,6 +4504,7 @@ class HelpFormatter(optparse.IndentedHelpFormatter):
         else:
             return ""
 
+
 def printUsage(commands):
     print("usage: %s <command> [options]" % sys.argv[0])
     print("")
@@ -4215,16 +4513,18 @@ def printUsage(commands):
     print("Try %s <command> --help for command specific help." % sys.argv[0])
     print("")
 
+
 commands = {
-    "submit" : P4Submit,
-    "commit" : P4Submit,
-    "sync" : P4Sync,
-    "rebase" : P4Rebase,
-    "clone" : P4Clone,
-    "branches" : P4Branches,
-    "unshelve" : P4Unshelve,
+    "submit": P4Submit,
+    "commit": P4Submit,
+    "sync": P4Sync,
+    "rebase": P4Rebase,
+    "clone": P4Clone,
+    "branches": P4Branches,
+    "unshelve": P4Unshelve,
 }
 
+
 def main():
     if len(sys.argv[1:]) == 0:
         printUsage(commands.keys())
@@ -4251,11 +4551,11 @@ def main():
 
     parser = optparse.OptionParser(cmd.usage.replace("%prog", "%prog " + cmdName),
                                    options,
-                                   description = cmd.description,
-                                   formatter = HelpFormatter())
+                                   description=cmd.description,
+                                   formatter=HelpFormatter())
 
     try:
-        (cmd, args) = parser.parse_args(sys.argv[2:], cmd);
+        cmd, args = parser.parse_args(sys.argv[2:], cmd)
     except:
         parser.print_help()
         raise
@@ -4263,7 +4563,7 @@ def main():
     global verbose
     verbose = cmd.verbose
     if cmd.needsGit:
-        if cmd.gitdir == None:
+        if cmd.gitdir is None:
             cmd.gitdir = os.path.abspath(".git")
             if not isValidGitDir(cmd.gitdir):
                 # "rev-parse --git-dir" without arguments will try $PWD/.git
@@ -4271,7 +4571,7 @@ def main():
                 if os.path.exists(cmd.gitdir):
                     cdup = read_pipe(["git", "rev-parse", "--show-cdup"]).strip()
                     if len(cdup) > 0:
-                        chdir(cdup);
+                        chdir(cdup)
 
         if not isValidGitDir(cmd.gitdir):
             if isValidGitDir(cmd.gitdir + "/.git"):
diff --git a/git.c b/git.c
index 3d8e48cf555a5d1d56abd4d093145e6a9f77db94..5ff4f3e25b73eab27a40ea988b1e826da1462e62 100644 (file)
--- a/git.c
+++ b/git.c
@@ -25,7 +25,7 @@ struct cmd_struct {
 };
 
 const char git_usage_string[] =
-       N_("git [--version] [--help] [-C <path>] [-c <name>=<value>]\n"
+       N_("git [-v | --version] [-h | --help] [-C <path>] [-c <name>=<value>]\n"
           "           [--exec-path[=<path>]] [--html-path] [--man-path] [--info-path]\n"
           "           [-p | --paginate | -P | --no-pager] [--no-replace-objects] [--bare]\n"
           "           [--git-dir=<path>] [--work-tree=<path>] [--namespace=<name>]\n"
@@ -146,7 +146,8 @@ static int handle_options(const char ***argv, int *argc, int *envchanged)
                 * commands can be written with "--" prepended
                 * to make them look like flags.
                 */
-               if (!strcmp(cmd, "--help") || !strcmp(cmd, "--version"))
+               if (!strcmp(cmd, "--help") || !strcmp(cmd, "-h") ||
+                   !strcmp(cmd, "--version") || !strcmp(cmd, "-v"))
                        break;
 
                /*
@@ -893,8 +894,10 @@ int cmd_main(int argc, const char **argv)
        argc--;
        handle_options(&argv, &argc, NULL);
        if (argc > 0) {
-               /* translate --help and --version into commands */
-               skip_prefix(argv[0], "--", &argv[0]);
+               if (!strcmp("--version", argv[0]) || !strcmp("-v", argv[0]))
+                       argv[0] = "version";
+               else if (!strcmp("--help", argv[0]) || !strcmp("-h", argv[0]))
+                       argv[0] = "help";
        } else {
                /* The user didn't specify a command; give them help */
                commit_pager_choice();
index 23d9dd1fe0d024c4af344c35faf01365ec8a6c58..0ae7d685904b85f97a69b243e30f010671d050e3 100755 (executable)
@@ -2955,9 +2955,9 @@ proc savestuff {w} {
 proc resizeclistpanes {win w} {
     global oldwidth oldsash use_ttk
     if {[info exists oldwidth($win)]} {
-       if {[info exists oldsash($win)]} {
-           set s0 [lindex $oldsash($win) 0]
-           set s1 [lindex $oldsash($win) 1]
+        if {[info exists oldsash($win)]} {
+            set s0 [lindex $oldsash($win) 0]
+            set s1 [lindex $oldsash($win) 1]
         } elseif {$use_ttk} {
             set s0 [$win sashpos 0]
             set s1 [$win sashpos 1]
@@ -2991,8 +2991,10 @@ proc resizeclistpanes {win w} {
         } else {
             $win sash place 0 $sash0 [lindex $s0 1]
             $win sash place 1 $sash1 [lindex $s1 1]
+            set sash0 [list $sash0 [lindex $s0 1]]
+            set sash1 [list $sash1 [lindex $s1 1]]
         }
-       set oldsash($win) [list $sash0 $sash1]
+        set oldsash($win) [list $sash0 $sash1]
     }
     set oldwidth($win) $w
 }
@@ -3000,8 +3002,8 @@ proc resizeclistpanes {win w} {
 proc resizecdetpanes {win w} {
     global oldwidth oldsash use_ttk
     if {[info exists oldwidth($win)]} {
-       if {[info exists oldsash($win)]} {
-           set s0 $oldsash($win)
+        if {[info exists oldsash($win)]} {
+            set s0 $oldsash($win)
         } elseif {$use_ttk} {
             set s0 [$win sashpos 0]
         } else {
@@ -3023,8 +3025,9 @@ proc resizecdetpanes {win w} {
             $win sashpos 0 $sash0
         } else {
             $win sash place 0 $sash0 [lindex $s0 1]
+            set sash0 [list $sash0 [lindex $s0 1]]
         }
-       set oldsash($win) $sash0
+        set oldsash($win) $sash0
     }
     set oldwidth($win) $w
 }
index 58b394cd47f3f068177c622e927532497126e14c..31bc5c7767ce865bb3fb1a89f311f7d2b03c7e6d 100644 (file)
@@ -55,7 +55,7 @@ static void fetch_single_packfile(struct object_id *packfile_hash,
        http_init(NULL, url, 0);
 
        preq = new_direct_http_pack_request(packfile_hash->hash, xstrdup(url));
-       if (preq == NULL)
+       if (!preq)
                die("couldn't create http pack request");
        preq->slot->results = &results;
        preq->index_pack_args = index_pack_args;
index f0c044dcf7661a37b2a03f59e11f9e0bcefba0ea..7dafb1331a65eb14f5ac1b2bdb595c6ae1dc7606 100644 (file)
@@ -253,7 +253,7 @@ static void start_fetch_loose(struct transfer_request *request)
        struct http_object_request *obj_req;
 
        obj_req = new_http_object_request(repo->url, &request->obj->oid);
-       if (obj_req == NULL) {
+       if (!obj_req) {
                request->state = ABORTED;
                return;
        }
@@ -318,7 +318,7 @@ static void start_fetch_packed(struct transfer_request *request)
        fprintf(stderr, " which contains %s\n", oid_to_hex(&request->obj->oid));
 
        preq = new_http_pack_request(target->hash, repo->url);
-       if (preq == NULL) {
+       if (!preq) {
                repo->can_update_info_refs = 0;
                return;
        }
@@ -520,7 +520,7 @@ static void finish_request(struct transfer_request *request)
        /* Keep locks active */
        check_locks();
 
-       if (request->headers != NULL)
+       if (request->headers)
                curl_slist_free_all(request->headers);
 
        /* URL is reused for MOVE after PUT and used during FETCH */
@@ -783,7 +783,7 @@ xml_start_tag(void *userData, const char *name, const char **atts)
        const char *c = strchr(name, ':');
        int old_namelen, new_len;
 
-       if (c == NULL)
+       if (!c)
                c = name;
        else
                c++;
@@ -811,7 +811,7 @@ xml_end_tag(void *userData, const char *name)
 
        ctx->userFunc(ctx, 1);
 
-       if (c == NULL)
+       if (!c)
                c = name;
        else
                c++;
@@ -1893,7 +1893,7 @@ int cmd_main(int argc, const char **argv)
 
                /* Lock remote branch ref */
                ref_lock = lock_remote(ref->name, LOCK_TIME);
-               if (ref_lock == NULL) {
+               if (!ref_lock) {
                        fprintf(stderr, "Unable to lock remote branch %s\n",
                                ref->name);
                        if (helper_status)
index 910fae539b89e6aea2af299b61dd64ff645b3578..b8f0f98ae146999adf3770cc7338cbacbbbf8cb3 100644 (file)
@@ -59,7 +59,7 @@ static void start_object_request(struct walker *walker,
        struct http_object_request *req;
 
        req = new_http_object_request(obj_req->repo->base, &obj_req->oid);
-       if (req == NULL) {
+       if (!req) {
                obj_req->state = ABORTED;
                return;
        }
@@ -106,7 +106,7 @@ static void process_object_response(void *callback_data)
        /* Use alternates if necessary */
        if (missing_target(obj_req->req)) {
                fetch_alternates(walker, alt->base);
-               if (obj_req->repo->next != NULL) {
+               if (obj_req->repo->next) {
                        obj_req->repo =
                                obj_req->repo->next;
                        release_http_object_request(obj_req->req);
@@ -225,12 +225,12 @@ static void process_alternates_response(void *callback_data)
                                         alt_req->url->buf);
                        active_requests++;
                        slot->in_use = 1;
-                       if (slot->finished != NULL)
+                       if (slot->finished)
                                (*slot->finished) = 0;
                        if (!start_active_slot(slot)) {
                                cdata->got_alternates = -1;
                                slot->in_use = 0;
-                               if (slot->finished != NULL)
+                               if (slot->finished)
                                        (*slot->finished) = 1;
                        }
                        return;
@@ -443,7 +443,7 @@ static int http_fetch_pack(struct walker *walker, struct alt_base *repo, unsigne
        }
 
        preq = new_http_pack_request(target->hash, repo->base);
-       if (preq == NULL)
+       if (!preq)
                goto abort;
        preq->slot->results = &results;
 
@@ -489,11 +489,11 @@ static int fetch_object(struct walker *walker, unsigned char *hash)
                if (hasheq(obj_req->oid.hash, hash))
                        break;
        }
-       if (obj_req == NULL)
+       if (!obj_req)
                return error("Couldn't find request for %s in the queue", hex);
 
        if (has_object_file(&obj_req->oid)) {
-               if (obj_req->req != NULL)
+               if (obj_req->req)
                        abort_http_object_request(obj_req->req);
                abort_object_request(obj_req);
                return 0;
diff --git a/http.c b/http.c
index 229da4d14882d9c9855ab418ad64f30fe62e485a..b148468b267e8b80f40d75c2a0088d9362f427c9 100644 (file)
--- a/http.c
+++ b/http.c
@@ -197,11 +197,11 @@ static void finish_active_slot(struct active_request_slot *slot)
        closedown_active_slot(slot);
        curl_easy_getinfo(slot->curl, CURLINFO_HTTP_CODE, &slot->http_code);
 
-       if (slot->finished != NULL)
+       if (slot->finished)
                (*slot->finished) = 1;
 
        /* Store slot results so they can be read after the slot is reused */
-       if (slot->results != NULL) {
+       if (slot->results) {
                slot->results->curl_result = slot->curl_result;
                slot->results->http_code = slot->http_code;
                curl_easy_getinfo(slot->curl, CURLINFO_HTTPAUTH_AVAIL,
@@ -212,7 +212,7 @@ static void finish_active_slot(struct active_request_slot *slot)
        }
 
        /* Run callback if appropriate */
-       if (slot->callback_func != NULL)
+       if (slot->callback_func)
                slot->callback_func(slot->callback_data);
 }
 
@@ -234,7 +234,7 @@ static void process_curl_messages(void)
                        while (slot != NULL &&
                               slot->curl != curl_message->easy_handle)
                                slot = slot->next;
-                       if (slot != NULL) {
+                       if (slot) {
                                xmulti_remove_handle(slot);
                                slot->curl_result = curl_result;
                                finish_active_slot(slot);
@@ -838,16 +838,16 @@ static CURL *get_curl_handle(void)
                curl_easy_setopt(result, CURLOPT_SSL_CIPHER_LIST,
                                ssl_cipherlist);
 
-       if (ssl_cert != NULL)
+       if (ssl_cert)
                curl_easy_setopt(result, CURLOPT_SSLCERT, ssl_cert);
        if (has_cert_password())
                curl_easy_setopt(result, CURLOPT_KEYPASSWD, cert_auth.password);
-       if (ssl_key != NULL)
+       if (ssl_key)
                curl_easy_setopt(result, CURLOPT_SSLKEY, ssl_key);
-       if (ssl_capath != NULL)
+       if (ssl_capath)
                curl_easy_setopt(result, CURLOPT_CAPATH, ssl_capath);
 #ifdef GIT_CURL_HAVE_CURLOPT_PINNEDPUBLICKEY
-       if (ssl_pinnedkey != NULL)
+       if (ssl_pinnedkey)
                curl_easy_setopt(result, CURLOPT_PINNEDPUBLICKEY, ssl_pinnedkey);
 #endif
        if (http_ssl_backend && !strcmp("schannel", http_ssl_backend) &&
@@ -857,10 +857,10 @@ static CURL *get_curl_handle(void)
                curl_easy_setopt(result, CURLOPT_PROXY_CAINFO, NULL);
 #endif
        } else if (ssl_cainfo != NULL || http_proxy_ssl_ca_info != NULL) {
-               if (ssl_cainfo != NULL)
+               if (ssl_cainfo)
                        curl_easy_setopt(result, CURLOPT_CAINFO, ssl_cainfo);
 #ifdef GIT_CURL_HAVE_CURLOPT_PROXY_CAINFO
-               if (http_proxy_ssl_ca_info != NULL)
+               if (http_proxy_ssl_ca_info)
                        curl_easy_setopt(result, CURLOPT_PROXY_CAINFO, http_proxy_ssl_ca_info);
 #endif
        }
@@ -1050,7 +1050,7 @@ void http_init(struct remote *remote, const char *url, int proactive_auth)
 
        {
                char *http_max_requests = getenv("GIT_HTTP_MAX_REQUESTS");
-               if (http_max_requests != NULL)
+               if (http_max_requests)
                        max_requests = atoi(http_max_requests);
        }
 
@@ -1069,10 +1069,10 @@ void http_init(struct remote *remote, const char *url, int proactive_auth)
        set_from_env(&user_agent, "GIT_HTTP_USER_AGENT");
 
        low_speed_limit = getenv("GIT_HTTP_LOW_SPEED_LIMIT");
-       if (low_speed_limit != NULL)
+       if (low_speed_limit)
                curl_low_speed_limit = strtol(low_speed_limit, NULL, 10);
        low_speed_time = getenv("GIT_HTTP_LOW_SPEED_TIME");
-       if (low_speed_time != NULL)
+       if (low_speed_time)
                curl_low_speed_time = strtol(low_speed_time, NULL, 10);
 
        if (curl_ssl_verify == -1)
@@ -1109,7 +1109,7 @@ void http_cleanup(void)
 
        while (slot != NULL) {
                struct active_request_slot *next = slot->next;
-               if (slot->curl != NULL) {
+               if (slot->curl) {
                        xmulti_remove_handle(slot);
                        curl_easy_cleanup(slot->curl);
                }
@@ -1147,13 +1147,13 @@ void http_cleanup(void)
        free((void *)http_proxy_authmethod);
        http_proxy_authmethod = NULL;
 
-       if (cert_auth.password != NULL) {
+       if (cert_auth.password) {
                memset(cert_auth.password, 0, strlen(cert_auth.password));
                FREE_AND_NULL(cert_auth.password);
        }
        ssl_cert_password_required = 0;
 
-       if (proxy_cert_auth.password != NULL) {
+       if (proxy_cert_auth.password) {
                memset(proxy_cert_auth.password, 0, strlen(proxy_cert_auth.password));
                FREE_AND_NULL(proxy_cert_auth.password);
        }
@@ -1179,14 +1179,14 @@ struct active_request_slot *get_active_slot(void)
        while (slot != NULL && slot->in_use)
                slot = slot->next;
 
-       if (slot == NULL) {
+       if (!slot) {
                newslot = xmalloc(sizeof(*newslot));
                newslot->curl = NULL;
                newslot->in_use = 0;
                newslot->next = NULL;
 
                slot = active_queue_head;
-               if (slot == NULL) {
+               if (!slot) {
                        active_queue_head = newslot;
                } else {
                        while (slot->next != NULL)
@@ -1196,7 +1196,7 @@ struct active_request_slot *get_active_slot(void)
                slot = newslot;
        }
 
-       if (slot->curl == NULL) {
+       if (!slot->curl) {
                slot->curl = curl_easy_duphandle(curl_default);
                curl_session_count++;
        }
@@ -1768,7 +1768,7 @@ static int http_request(const char *url,
        slot = get_active_slot();
        curl_easy_setopt(slot->curl, CURLOPT_HTTPGET, 1);
 
-       if (result == NULL) {
+       if (!result) {
                curl_easy_setopt(slot->curl, CURLOPT_NOBODY, 1);
        } else {
                curl_easy_setopt(slot->curl, CURLOPT_NOBODY, 0);
@@ -2100,7 +2100,7 @@ cleanup:
 
 void release_http_pack_request(struct http_pack_request *preq)
 {
-       if (preq->packfile != NULL) {
+       if (preq->packfile) {
                fclose(preq->packfile);
                preq->packfile = NULL;
        }
@@ -2391,7 +2391,7 @@ abort:
 
 void process_http_object_request(struct http_object_request *freq)
 {
-       if (freq->slot == NULL)
+       if (!freq->slot)
                return;
        freq->curl_result = freq->slot->curl_result;
        freq->http_code = freq->slot->http_code;
@@ -2448,7 +2448,7 @@ void release_http_object_request(struct http_object_request *freq)
                freq->localfile = -1;
        }
        FREE_AND_NULL(freq->url);
-       if (freq->slot != NULL) {
+       if (freq->slot) {
                freq->slot->callback_func = NULL;
                freq->slot->callback_data = NULL;
                release_active_slot(freq->slot);
diff --git a/kwset.c b/kwset.c
index fc439e0667f137f3449635a37a32f8418d5041f0..08aadf03117c5069116bb7e7fa4aa5ed7a9b1fb0 100644 (file)
--- a/kwset.c
+++ b/kwset.c
@@ -477,7 +477,7 @@ kwsprep (kwset_t kws)
        next[i] = NULL;
       treenext(kwset->trie->links, next);
 
-      if ((trans = kwset->trans) != NULL)
+      if ((trans = kwset->trans))
        for (i = 0; i < NCHAR; ++i)
          kwset->next[i] = next[U(trans[i])];
       else
@@ -485,7 +485,7 @@ kwsprep (kwset_t kws)
     }
 
   /* Fix things up for any translation table. */
-  if ((trans = kwset->trans) != NULL)
+  if ((trans = kwset->trans))
     for (i = 0; i < NCHAR; ++i)
       kwset->delta[i] = delta[U(trans[i])];
   else
index a937cec59a6e8cd80f2272940ed0960a08d35125..14b83620191019e465291d13be023df794ffb645 100644 (file)
@@ -207,7 +207,7 @@ static enum ll_merge_result ll_ext_merge(const struct ll_merge_driver *fn,
        dict[4].placeholder = "P"; dict[4].value = path_sq.buf;
        dict[5].placeholder = NULL; dict[5].value = NULL;
 
-       if (fn->cmdline == NULL)
+       if (!fn->cmdline)
                die("custom merge driver %s lacks command line.", fn->name);
 
        result->ptr = NULL;
index 3a03e34c305c07533574177a7b5483d038f5290c..d0ac0a6327a18f5eeee6fc43f036c9a2618fd672 100644 (file)
@@ -88,7 +88,7 @@ static int match_ref_pattern(const char *refname,
                             const struct string_list_item *item)
 {
        int matched = 0;
-       if (item->util == NULL) {
+       if (!item->util) {
                if (!wildmatch(item->string, refname, 0))
                        matched = 1;
        } else {
index 02f6f9535783a486b09e2f22481b39dbc039c89b..9621ba62a394348a95f6e5e181aea286c6c37b56 100644 (file)
@@ -698,7 +698,7 @@ static int is_scissors_line(const char *line)
                        continue;
                }
                last_nonblank = c;
-               if (first_nonblank == NULL)
+               if (!first_nonblank)
                        first_nonblank = c;
                if (*c == '-') {
                        in_perforation = 1;
@@ -1094,7 +1094,7 @@ static void handle_body(struct mailinfo *mi, struct strbuf *line)
                         */
                        lines = strbuf_split(line, '\n');
                        for (it = lines; (sb = *it); it++) {
-                               if (*(it + 1) == NULL) /* The last line */
+                               if (!*(it + 1)) /* The last line */
                                        if (sb->buf[sb->len - 1] != '\n') {
                                                /* Partial line, save it for later. */
                                                strbuf_addbuf(&prev, sb);
index 7befdc5e4835d533b994f75ee42709937a5d046c..da2589b08229813963347115a001d8ce6f45ae32 100644 (file)
--- a/mailmap.c
+++ b/mailmap.c
@@ -77,7 +77,7 @@ static void add_mapping(struct string_list *map,
        struct mailmap_entry *me;
        struct string_list_item *item;
 
-       if (old_email == NULL) {
+       if (!old_email) {
                old_email = new_email;
                new_email = NULL;
        }
@@ -92,7 +92,7 @@ static void add_mapping(struct string_list *map,
                item->util = me;
        }
 
-       if (old_name == NULL) {
+       if (!old_name) {
                debug_mm("mailmap: adding (simple) entry for '%s'\n", old_email);
 
                /* Replace current name and new email for simple entry */
@@ -123,9 +123,9 @@ static char *parse_name_and_email(char *buffer, char **name,
        char *left, *right, *nstart, *nend;
        *name = *email = NULL;
 
-       if ((left = strchr(buffer, '<')) == NULL)
+       if (!(left = strchr(buffer, '<')))
                return NULL;
-       if ((right = strchr(left+1, '>')) == NULL)
+       if (!(right = strchr(left + 1, '>')))
                return NULL;
        if (!allow_empty_email && (left+1 == right))
                return NULL;
@@ -153,7 +153,7 @@ static void read_mailmap_line(struct string_list *map, char *buffer)
        if (buffer[0] == '#')
                return;
 
-       if ((name2 = parse_name_and_email(buffer, &name1, &email1, 0)) != NULL)
+       if ((name2 = parse_name_and_email(buffer, &name1, &email1, 0)))
                parse_name_and_email(name2, &name2, &email2, 1);
 
        if (email1)
@@ -320,7 +320,7 @@ int map_user(struct string_list *map,
                 (int)*emaillen, debug_str(*email));
 
        item = lookup_prefix(map, *email, *emaillen);
-       if (item != NULL) {
+       if (item) {
                me = (struct mailmap_entry *)item->util;
                if (me->namemap.nr) {
                        /*
@@ -334,7 +334,7 @@ int map_user(struct string_list *map,
                                item = subitem;
                }
        }
-       if (item != NULL) {
+       if (item) {
                struct mailmap_info *mi = (struct mailmap_info *)item->util;
                if (mi->name == NULL && mi->email == NULL) {
                        debug_mm("map_user:  -- (no simple mapping)\n");
index 8545354dafd0600e02eb1bb509d8f9bedd23d1c9..0d3f42592fb208739840a0cb37e0e205b22d1ab8 100644 (file)
@@ -2068,7 +2068,7 @@ static char *handle_path_level_conflicts(struct merge_options *opt,
         * to ensure that's the case.
         */
        c_info = strmap_get(collisions, new_path);
-       if (c_info == NULL)
+       if (!c_info)
                BUG("c_info is NULL");
 
        /*
@@ -4640,7 +4640,7 @@ static void merge_ort_internal(struct merge_options *opt,
        }
 
        merged_merge_bases = pop_commit(&merge_bases);
-       if (merged_merge_bases == NULL) {
+       if (!merged_merge_bases) {
                /* if there is no common ancestor, use an empty tree */
                struct tree *tree;
 
index 1ee6364e8b16b17295258667e70b77ea1f1fffa4..fd1bbde061fa4cf54b2d0c65c61d65e44c7cd8c1 100644 (file)
@@ -82,7 +82,7 @@ static struct dir_rename_entry *dir_rename_find_entry(struct hashmap *hashmap,
 {
        struct dir_rename_entry key;
 
-       if (dir == NULL)
+       if (!dir)
                return NULL;
        hashmap_entry_init(&key.ent, strhash(dir));
        key.dir = dir;
@@ -1990,14 +1990,14 @@ static void get_renamed_dir_portion(const char *old_path, const char *new_path,
         * renamed means the root directory can never be renamed -- because
         * the root directory always exists).
         */
-       if (end_of_old == NULL)
+       if (!end_of_old)
                return; /* Note: *old_dir and *new_dir are still NULL */
 
        /*
         * If new_path contains no directory (end_of_new is NULL), then we
         * have a rename of old_path's directory to the root directory.
         */
-       if (end_of_new == NULL) {
+       if (!end_of_new) {
                *old_dir = xstrndup(old_path, end_of_old - old_path);
                *new_dir = xstrdup("");
                return;
@@ -2116,7 +2116,7 @@ static char *handle_path_level_conflicts(struct merge_options *opt,
         * to ensure that's the case.
         */
        collision_ent = collision_find_entry(collisions, new_path);
-       if (collision_ent == NULL)
+       if (!collision_ent)
                BUG("collision_ent is NULL");
 
        /*
@@ -2996,7 +2996,7 @@ static void final_cleanup_rename(struct string_list *rename)
        const struct rename *re;
        int i;
 
-       if (rename == NULL)
+       if (!rename)
                return;
 
        for (i = 0; i < rename->nr; i++) {
@@ -3605,7 +3605,7 @@ static int merge_recursive_internal(struct merge_options *opt,
        }
 
        merged_merge_bases = pop_commit(&merge_bases);
-       if (merged_merge_bases == NULL) {
+       if (!merged_merge_bases) {
                /* if there is no common ancestor, use an empty tree */
                struct tree *tree;
 
index e2407b65b70d1e622979cdb2fe0e425ba6403ee1..eb32a7da956dc35ded8e8c129ccb7b23ab13d6a4 100644 (file)
@@ -2,6 +2,10 @@ diff_cmd () {
        "$merge_tool_path" -wait -2 "$LOCAL" "$REMOTE" >/dev/null 2>&1
 }
 
+diff_cmd_help () {
+       echo "Use Araxis Merge (requires a graphical session)"
+}
+
 merge_cmd () {
        if $base_present
        then
@@ -13,6 +17,10 @@ merge_cmd () {
        fi
 }
 
+merge_cmd_help () {
+       echo "Use Araxis Merge (requires a graphical session)"
+}
+
 translate_merge_tool_path() {
        echo compare
 }
index 26c19d46a5bdee5739b9a9425cc48fac3efd6bfb..2922667dddaa4ec6aa5407f7e98fd2cab23df053 100644 (file)
@@ -2,6 +2,10 @@ diff_cmd () {
        "$merge_tool_path" "$LOCAL" "$REMOTE"
 }
 
+diff_cmd_help () {
+       echo "Use Beyond Compare (requires a graphical session)"
+}
+
 merge_cmd () {
        if $base_present
        then
@@ -13,6 +17,10 @@ merge_cmd () {
        fi
 }
 
+merge_cmd_help () {
+       echo "Use Beyond Compare (requires a graphical session)"
+}
+
 translate_merge_tool_path() {
        if type bcomp >/dev/null 2>/dev/null
        then
index 9f60e8da6527cf28dcc5ad4a3e5f7f0ca9442cb4..610963d377d66cd402cbe39e1f12a05b61fc0a02 100644 (file)
@@ -2,6 +2,10 @@ diff_cmd () {
        "$merge_tool_path" "$LOCAL" "$REMOTE"
 }
 
+diff_cmd_help () {
+       echo "Use Code Compare (requires a graphical session)"
+}
+
 merge_cmd () {
        if $base_present
        then
@@ -13,6 +17,10 @@ merge_cmd () {
        fi
 }
 
+merge_cmd_help () {
+       echo "Use Code Compare (requires a graphical session)"
+}
+
 translate_merge_tool_path() {
        if merge_mode
        then
index ee6f374bceb8e14af5b424e64fbe9787006091e8..efae4c285ca9554d4c58ff6feb7e5ca9d2a3ff2b 100644 (file)
@@ -2,6 +2,10 @@ diff_cmd () {
        "$merge_tool_path" "$LOCAL" "$REMOTE" >/dev/null 2>&1
 }
 
+diff_cmd_help () {
+       echo "Use DeltaWalker (requires a graphical session)"
+}
+
 merge_cmd () {
        # Adding $(pwd)/ in front of $MERGED should not be necessary.
        # However without it, DeltaWalker (at least v1.9.8 on Windows)
@@ -16,6 +20,10 @@ merge_cmd () {
        fi >/dev/null 2>&1
 }
 
+merge_cmd_help () {
+       echo "Use DeltaWalker (requires a graphical session)"
+}
+
 translate_merge_tool_path () {
        echo DeltaWalker
 }
index 9b6355b98a71da2627b68c6543cea4f2515b8d54..9b5b62d1cae024e7bd6e944562ca0817c90a13b9 100644 (file)
@@ -2,6 +2,10 @@ diff_cmd () {
        "$merge_tool_path" "$LOCAL" "$REMOTE" >/dev/null 2>&1
 }
 
+diff_cmd_help () {
+       echo "Use DiffMerge (requires a graphical session)"
+}
+
 merge_cmd () {
        if $base_present
        then
@@ -13,6 +17,10 @@ merge_cmd () {
        fi
 }
 
+merge_cmd_help () {
+       echo "Use DiffMerge (requires a graphical session)"
+}
+
 exit_code_trustable () {
        true
 }
index 5a3ae8b5695d3141ff0310a706495ea52037b464..ebfaba517216d85429a539177128f20719f538c7 100644 (file)
@@ -2,6 +2,10 @@ diff_cmd () {
        "$merge_tool_path" "$LOCAL" "$REMOTE" | cat
 }
 
+diff_cmd_help () {
+       echo "Use Diffuse (requires a graphical session)"
+}
+
 merge_cmd () {
        if $base_present
        then
@@ -13,3 +17,7 @@ merge_cmd () {
                        "$LOCAL" "$MERGED" "$REMOTE" | cat
        fi
 }
+
+merge_cmd_help () {
+       echo "Use Diffuse (requires a graphical session)"
+}
index 6c5101c4f729d49c544436e9262ca75e4ce6cddd..0d4d6098745e677f432a8c1affd793e9148f43e0 100644 (file)
@@ -2,6 +2,10 @@ diff_cmd () {
        "$merge_tool_path" --default --mode=diff2 "$LOCAL" "$REMOTE"
 }
 
+diff_cmd_help () {
+       echo "Use ECMerge (requires a graphical session)"
+}
+
 merge_cmd () {
        if $base_present
        then
@@ -12,3 +16,7 @@ merge_cmd () {
                        --default --mode=merge2 --to="$MERGED"
        fi
 }
+
+merge_cmd_help () {
+       echo "Use ECMerge (requires a graphical session)"
+}
index d1ce513ff5d3b3db14dd1270d2181099e1134c2c..fc6892cc95ed1ec480e06959bf7248f45d23371e 100644 (file)
@@ -2,6 +2,10 @@ diff_cmd () {
        "$merge_tool_path" -f emerge-files-command "$LOCAL" "$REMOTE"
 }
 
+diff_cmd_help () {
+       echo "Use Emacs' Emerge"
+}
+
 merge_cmd () {
        if $base_present
        then
@@ -17,6 +21,10 @@ merge_cmd () {
        fi
 }
 
+merge_cmd_help () {
+       echo "Use Emacs' Emerge"
+}
+
 translate_merge_tool_path() {
        echo emacs
 }
index e72b06fc4d8ff76e06a0b972ae28df73efd4180d..6f53ca91613dc898d6247228eac98e8c23dd96fe 100644 (file)
@@ -2,6 +2,10 @@ diff_cmd () {
        "$merge_tool_path" "$LOCAL" "$REMOTE" -nh
 }
 
+diff_cmd_help () {
+       echo "Use ExamDiff Pro (requires a graphical session)"
+}
+
 merge_cmd () {
        if $base_present
        then
@@ -11,6 +15,10 @@ merge_cmd () {
        fi
 }
 
+merge_cmd_help () {
+       echo "Use ExamDiff Pro (requires a graphical session)"
+}
+
 translate_merge_tool_path() {
        mergetool_find_win32_cmd "ExamDiff.com" "ExamDiff Pro"
 }
index 8b23a13c4111bdecc700a4a1a4547e15760eba58..3ed07efd16d2937939385d5777325eacccd65e2f 100644 (file)
@@ -2,6 +2,10 @@ diff_cmd () {
        "$merge_tool_path" "$LOCAL" "$REMOTE"
 }
 
+diff_cmd_help () {
+       echo "Use Guiffy's Diff Tool (requires a graphical session)"
+}
+
 merge_cmd () {
        if $base_present
        then
@@ -13,6 +17,10 @@ merge_cmd () {
        fi
 }
 
+merge_cmd_help () {
+       echo "Use Guiffy's Diff Tool (requires a graphical session)"
+}
+
 exit_code_trustable () {
        true
 }
index 520cb914a18bd24421fe2c64daf6bac571e73975..ee8b3a0570eff25511c2d7be0cbf291faf145ee0 100644 (file)
@@ -4,6 +4,10 @@ diff_cmd () {
                "$LOCAL" "$REMOTE" >/dev/null 2>&1
 }
 
+diff_cmd_help () {
+       echo "Use KDiff3 (requires a graphical session)"
+}
+
 merge_cmd () {
        if $base_present
        then
@@ -22,6 +26,10 @@ merge_cmd () {
        fi
 }
 
+merge_cmd_help () {
+       echo "Use KDiff3 (requires a graphical session)"
+}
+
 exit_code_trustable () {
        true
 }
index e8c0bfa678547258ff0f330ae756f3621e6aadc7..4ce23dbe8bbf72adf3cc06bd8928f61aa94ce020 100644 (file)
@@ -2,10 +2,18 @@ can_merge () {
        return 1
 }
 
+diff_cmd_help () {
+       echo "Use Kompare (requires a graphical session)"
+}
+
 diff_cmd () {
        "$merge_tool_path" "$LOCAL" "$REMOTE"
 }
 
+merge_cmd_help () {
+       echo "Use Kompare (requires a graphical session)"
+}
+
 exit_code_trustable () {
        true
 }
index aab4ebb9355ae946f1c7b289569f1892ea29e583..8ec0867e032bf7ed4036d5da4f2bfae7fd2b2ae4 100644 (file)
@@ -2,6 +2,10 @@ diff_cmd () {
        "$merge_tool_path" "$LOCAL" "$REMOTE"
 }
 
+diff_cmd_help () {
+       echo "Use Meld (requires a graphical session)"
+}
+
 merge_cmd () {
        check_meld_for_features
 
@@ -20,6 +24,10 @@ merge_cmd () {
        fi
 }
 
+merge_cmd_help () {
+       echo "Use Meld (requires a graphical session) with optional \`auto merge\` (see \`git help mergetool\`'s \`CONFIGURATION\` section)"
+}
+
 # Get meld help message
 init_meld_help_msg () {
        if test -z "$meld_help_msg"
index b608dd6de30aaab9b30729145029761330f9fb55..44adf8f95155cb4adb9a29de3ea28ca67ad579e8 100644 (file)
@@ -2,6 +2,10 @@ diff_cmd () {
        "$merge_tool_path" "$LOCAL" "$REMOTE" | cat
 }
 
+diff_cmd_help () {
+       echo "Use FileMerge (requires a graphical session)"
+}
+
 merge_cmd () {
        if $base_present
        then
@@ -12,3 +16,7 @@ merge_cmd () {
                        -merge "$MERGED" | cat
        fi
 }
+
+merge_cmd_help () {
+       echo "Use FileMerge (requires a graphical session)"
+}
index 7a5b291dd28ad5b60ca0492480b800aaa9cc929e..f3cb197e5876e1a667e97189fff71fd355ccab30 100644 (file)
@@ -19,6 +19,10 @@ diff_cmd () {
        fi
 }
 
+diff_cmd_help () {
+       echo "Use HelixCore P4Merge (requires a graphical session)"
+}
+
 merge_cmd () {
        if ! $base_present
        then
@@ -34,3 +38,7 @@ create_empty_file () {
 
        printf "%s" "$empty_file"
 }
+
+merge_cmd_help () {
+       echo "Use HelixCore P4Merge (requires a graphical session)"
+}
index 9c2e6f6fd7d0a72c200b9f68f1f0f926318a27ba..5410835a6b5f6ed3d8e1220f1d98fc73d2457e64 100644 (file)
@@ -2,6 +2,10 @@ diff_cmd () {
        "$merge_tool_path" mergetool "$LOCAL" "$REMOTE" -o "$MERGED"
 }
 
+diff_cmd_help () {
+       echo "Use Sublime Merge (requires a graphical session)"
+}
+
 merge_cmd () {
        if $base_present
        then
@@ -10,3 +14,7 @@ merge_cmd () {
                "$merge_tool_path" mergetool "$LOCAL" "$REMOTE" -o "$MERGED"
        fi
 }
+
+merge_cmd_help () {
+       echo "Use Sublime Merge (requires a graphical session)"
+}
index eee5cb57e3ccf7f1ad7ad150c37a8b6d7ab6d436..66906a720d6079acce918b97083c0f64b95049b6 100644 (file)
@@ -2,6 +2,10 @@ diff_cmd () {
        "$merge_tool_path" "$LOCAL" "$REMOTE"
 }
 
+diff_cmd_help () {
+       echo "Use TkDiff (requires a graphical session)"
+}
+
 merge_cmd () {
        if $base_present
        then
@@ -14,3 +18,7 @@ merge_cmd () {
 exit_code_trustable () {
        true
 }
+
+merge_cmd_help () {
+       echo "Use TkDiff (requires a graphical session)"
+}
index d7ab666a59a2c8690861146bb6ae4874ed48e0de..507edcd444d12f4ebbc9f75b77ed89776904e401 100644 (file)
@@ -2,6 +2,10 @@ can_diff () {
        return 1
 }
 
+diff_cmd_help () {
+       echo "Use TortoiseMerge (requires a graphical session)"
+}
+
 merge_cmd () {
        if $base_present
        then
@@ -30,3 +34,7 @@ translate_merge_tool_path() {
                echo tortoisemerge
        fi
 }
+
+merge_cmd_help () {
+       echo "Use TortoiseMerge (requires a graphical session)"
+}
index 96f6209a04106fbe8b66e30d5fefd0c8e92fb685..461a89b6f987972c6447a1e6d8df074aedddf814 100644 (file)
+# This script can be run in two different contexts:
+#
+#   - From git, when the user invokes the "vimdiff" merge tool. In this context
+#     this script expects the following environment variables (among others) to
+#     be defined (which is something "git" takes care of):
+#
+#       - $BASE
+#       - $LOCAL
+#       - $REMOTE
+#       - $MERGED
+#
+#     In this mode, all this script does is to run the next command:
+#
+#         vim -f -c ... $LOCAL $BASE $REMOTE $MERGED
+#
+#     ...where the "..." string depends on the value of the
+#     "mergetool.vimdiff.layout" configuration variable and is used to open vim
+#     with a certain layout of buffers, windows and tabs.
+#
+#   - From a script inside the unit tests framework folder ("t" folder) by
+#     sourcing this script and then manually calling "run_unit_tests", which
+#     will run a battery of unit tests to make sure nothing breaks.
+#     In this context this script does not expect any particular environment
+#     variable to be set.
+
+
+################################################################################
+## Internal functions (not meant to be used outside this script)
+################################################################################
+
+debug_print () {
+       # Send message to stderr if global variable GIT_MERGETOOL_VIMDIFF is set
+       # to "true"
+
+       if test -n "$GIT_MERGETOOL_VIMDIFF_DEBUG"
+       then
+               >&2 echo "$@"
+       fi
+}
+
+substring () {
+       # Return a substring of $1 containing $3 characters starting at
+       # zero-based offset $2.
+       #
+       # Examples:
+       #
+       #   substring "Hello world" 0 4  --> "Hell"
+       #   substring "Hello world" 3 4  --> "lo w"
+       #   substring "Hello world" 3 10 --> "lo world"
+
+       STRING=$1
+       START=$2
+       LEN=$3
+
+       echo "$STRING" | cut -c$(( START + 1 ))-$(( START + $LEN ))
+}
+
+gen_cmd_aux () {
+       # Auxiliary function used from "gen_cmd()".
+       # Read that other function documentation for more details.
+
+       LAYOUT=$1
+       CMD=$2  # This is a second (hidden) argument used for recursion
+
+       debug_print
+       debug_print "LAYOUT    : $LAYOUT"
+       debug_print "CMD       : $CMD"
+
+       if test -z "$CMD"
+       then
+               CMD="echo" # vim "nop" operator
+       fi
+
+       start=0
+       end=${#LAYOUT}
+
+       nested=0
+       nested_min=100
+
+
+       # Step 1:
+       #
+       # Increase/decrease "start"/"end" indices respectively to get rid of
+       # outer parenthesis.
+       #
+       # Example:
+       #
+       #   - BEFORE: (( LOCAL , BASE ) / MERGED )
+       #   - AFTER :  ( LOCAL , BASE ) / MERGED
+
+       oldIFS=$IFS
+       IFS=#
+       for c in $(echo "$LAYOUT" | sed 's:.:&#:g')
+       do
+               if test "$c" = " "
+               then
+                       continue
+               fi
+
+               if test "$c" = "("
+               then
+                       nested=$(( nested + 1 ))
+                       continue
+               fi
+
+               if test "$c" = ")"
+               then
+                       nested=$(( nested - 1 ))
+                       continue
+               fi
+
+               if test "$nested" -lt "$nested_min"
+               then
+                       nested_min=$nested
+               fi
+       done
+       IFS=$oldIFS
+
+       debug_print "NESTED MIN: $nested_min"
+
+       while test "$nested_min" -gt "0"
+       do
+               start=$(( start + 1 ))
+               end=$(( end - 1 ))
+
+               start_minus_one=$(( start - 1 ))
+
+               while ! test "$(substring "$LAYOUT" "$start_minus_one" 1)" = "("
+               do
+                       start=$(( start + 1 ))
+                       start_minus_one=$(( start_minus_one + 1 ))
+               done
+
+               while ! test "$(substring "$LAYOUT" "$end" 1)" = ")"
+               do
+                       end=$(( end - 1 ))
+               done
+
+               nested_min=$(( nested_min - 1 ))
+       done
+
+       debug_print "CLEAN     : $(substring "$LAYOUT" "$start" "$(( end - start ))")"
+
+
+       # Step 2:
+       #
+       # Search for all valid separators ("+", "/" or ",") which are *not*
+       # inside parenthesis. Save the index at which each of them makes the
+       # first appearance.
+
+       index_new_tab=""
+       index_horizontal_split=""
+       index_vertical_split=""
+
+       nested=0
+       i=$(( start - 1 ))
+
+       oldIFS=$IFS
+       IFS=#
+       for c in $(substring "$LAYOUT" "$start" "$(( end - start ))" | sed 's:.:&#:g');
+       do
+               i=$(( i + 1 ))
+
+               if test "$c" = " "
+               then
+                       continue
+               fi
+
+               if test "$c" = "("
+               then
+                       nested=$(( nested + 1 ))
+                       continue
+               fi
+
+               if test "$c" = ")"
+               then
+                       nested=$(( nested - 1 ))
+                       continue
+               fi
+
+               if test "$nested" = 0
+               then
+                       current=$c
+
+                       if test "$current" = "+"
+                       then
+                               if test -z "$index_new_tab"
+                               then
+                                       index_new_tab=$i
+                               fi
+
+                       elif test "$current" = "/"
+                       then
+                               if test -z "$index_horizontal_split"
+                               then
+                                       index_horizontal_split=$i
+                               fi
+
+                       elif test "$current" = ","
+                       then
+                               if test -z "$index_vertical_split"
+                               then
+                                       index_vertical_split=$i
+                               fi
+                       fi
+               fi
+       done
+       IFS=$oldIFS
+
+
+       # Step 3:
+       #
+       # Process the separator with the highest order of precedence
+       # (";" has the highest precedence and "|" the lowest one).
+       #
+       # By "process" I mean recursively call this function twice: the first
+       # one with the substring at the left of the separator and the second one
+       # with the one at its right.
+
+       terminate="false"
+
+       if ! test -z "$index_new_tab"
+       then
+               before="-tabnew"
+               after="tabnext"
+               index=$index_new_tab
+               terminate="true"
+
+       elif ! test -z "$index_horizontal_split"
+       then
+               before="split"
+               after="wincmd j"
+               index=$index_horizontal_split
+               terminate="true"
+
+       elif ! test -z "$index_vertical_split"
+       then
+               before="vertical split"
+               after="wincmd l"
+               index=$index_vertical_split
+               terminate="true"
+       fi
+
+       if  test "$terminate" = "true"
+       then
+               CMD="$CMD | $before"
+               CMD=$(gen_cmd_aux "$(substring "$LAYOUT" "$start" "$(( index - start ))")" "$CMD")
+               CMD="$CMD | $after"
+               CMD=$(gen_cmd_aux "$(substring "$LAYOUT" "$(( index + 1 ))" "$(( ${#LAYOUT} - index ))")" "$CMD")
+               echo "$CMD"
+               return
+       fi
+
+
+       # Step 4:
+       #
+       # If we reach this point, it means there are no separators and we just
+       # need to print the command to display the specified buffer
+
+       target=$(substring "$LAYOUT" "$start" "$(( end - start ))" | sed 's:[ @();|-]::g')
+
+       if test "$target" = "LOCAL"
+       then
+               CMD="$CMD | 1b"
+
+       elif test "$target" = "BASE"
+       then
+               CMD="$CMD | 2b"
+
+       elif test "$target" = "REMOTE"
+       then
+               CMD="$CMD | 3b"
+
+       elif test "$target" = "MERGED"
+       then
+               CMD="$CMD | 4b"
+
+       else
+               CMD="$CMD | ERROR: >$target<"
+       fi
+
+       echo "$CMD"
+       return
+}
+
+
+gen_cmd () {
+       # This function returns (in global variable FINAL_CMD) the string that
+       # you can use when invoking "vim" (as shown next) to obtain a given
+       # layout:
+       #
+       #   $ vim -f $FINAL_CMD "$LOCAL" "$BASE" "$REMOTE" "$MERGED"
+       #
+       # It takes one single argument: a string containing the desired layout
+       # definition.
+       #
+       # The syntax of the "layout definitions" is explained in "Documentation/
+       # mergetools/vimdiff.txt" but you can already intuitively understand how
+       # it works by knowing that...
+       #
+       #   * "+" means "a new vim tab"
+       #   * "/" means "a new vim horizontal split"
+       #   * "," means "a new vim vertical split"
+       #
+       # It also returns (in global variable FINAL_TARGET) the name ("LOCAL",
+       # "BASE", "REMOTE" or "MERGED") of the file that is marked with an "@",
+       # or "MERGED" if none of them is.
+       #
+       # Example:
+       #
+       #     gen_cmd "@LOCAL , REMOTE"
+       #     |
+       #     `-> FINAL_CMD    == "-c \"echo | vertical split | 1b | wincmd l | 3b | tabdo windo diffthis\" -c \"tabfirst\""
+       #         FINAL_TARGET == "LOCAL"
+
+       LAYOUT=$1
+
+
+       # Search for a "@" in one of the files identifiers ("LOCAL", "BASE",
+       # "REMOTE", "MERGED"). If not found, use "MERGE" as the default file
+       # where changes will be saved.
+
+       if echo "$LAYOUT" | grep @LOCAL >/dev/null
+       then
+               FINAL_TARGET="LOCAL"
+       elif echo "$LAYOUT" | grep @BASE >/dev/null
+       then
+               FINAL_TARGET="BASE"
+       else
+               FINAL_TARGET="MERGED"
+       fi
+
+
+       # Obtain the first part of vim "-c" option to obtain the desired layout
+
+       CMD=$(gen_cmd_aux "$LAYOUT")
+
+
+       # Adjust the just obtained script depending on whether more than one
+       # windows are visible or not
+
+       if echo "$LAYOUT" | grep ",\|/" >/dev/null
+       then
+               CMD="$CMD | tabdo windo diffthis"
+       else
+               CMD="$CMD | bufdo diffthis"
+       fi
+
+
+       # Add an extra "-c" option to move to the first tab (notice that we
+       # can't simply append the command to the previous "-c" string as
+       # explained here: https://github.com/vim/vim/issues/9076
+
+       FINAL_CMD="-c \"$CMD\" -c \"tabfirst\""
+}
+
+
+################################################################################
+## API functions (called from "git-mergetool--lib.sh")
+################################################################################
+
 diff_cmd () {
        "$merge_tool_path" -R -f -d \
                -c 'wincmd l' -c 'cd $GIT_PREFIX' "$LOCAL" "$REMOTE"
 }
 
+
+diff_cmd_help () {
+       TOOL=$1
+
+       case "$TOOL" in
+       nvimdiff*)
+               printf "Use Neovim"
+               ;;
+       gvimdiff*)
+               printf "Use gVim (requires a graphical session)"
+               ;;
+       vimdiff*)
+               printf "Use Vim"
+               ;;
+       esac
+
+       return 0
+}
+
+
 merge_cmd () {
+       layout=$(git config mergetool.vimdiff.layout)
+
        case "$1" in
        *vimdiff)
-               if $base_present
+               if test -z "$layout"
                then
-                       "$merge_tool_path" -f -d -c '4wincmd w | wincmd J' \
-                               "$LOCAL" "$BASE" "$REMOTE" "$MERGED"
-               else
-                       "$merge_tool_path" -f -d -c 'wincmd l' \
-                               "$LOCAL" "$MERGED" "$REMOTE"
+                       # Default layout when none is specified
+                       layout="(LOCAL,BASE,REMOTE)/MERGED"
                fi
                ;;
        *vimdiff1)
-               "$merge_tool_path" -f -d \
-                       -c 'echon "Resolve conflicts leftward then save. Use :cq to abort."' \
-                       "$LOCAL" "$REMOTE"
-               ret="$?"
-               if test "$ret" -eq 0
-               then
-                       cp -- "$LOCAL" "$MERGED"
-               fi
-               return "$ret"
+               layout="@LOCAL,REMOTE"
                ;;
        *vimdiff2)
-               "$merge_tool_path" -f -d -c 'wincmd l' \
-                       "$LOCAL" "$MERGED" "$REMOTE"
+               layout="LOCAL,MERGED,REMOTE"
                ;;
        *vimdiff3)
-               if $base_present
+               layout="MERGED"
+               ;;
+       esac
+
+       gen_cmd "$layout"
+
+       debug_print ""
+       debug_print "FINAL CMD : $FINAL_CMD"
+       debug_print "FINAL TAR : $FINAL_TARGET"
+
+       if $base_present
+       then
+               eval "$merge_tool_path" \
+                       -f "$FINAL_CMD" "$LOCAL" "$BASE" "$REMOTE" "$MERGED"
+       else
+               # If there is no BASE (example: a merge conflict in a new file
+               # with the same name created in both braches which didn't exist
+               # before), close all BASE windows using vim's "quit" command
+
+               FINAL_CMD=$(echo "$FINAL_CMD" | \
+                       sed -e 's:2b:quit:g' -e 's:3b:2b:g' -e 's:4b:3b:g')
+
+               eval "$merge_tool_path" \
+                       -f "$FINAL_CMD" "$LOCAL" "$REMOTE" "$MERGED"
+       fi
+
+       ret="$?"
+
+       if test "$ret" -eq 0
+       then
+               case "$FINAL_TARGET" in
+               LOCAL)
+                       source_path="$LOCAL"
+                       ;;
+               REMOTE)
+                       source_path="$REMOTE"
+                       ;;
+               MERGED|*)
+                       # Do nothing
+                       source_path=
+                       ;;
+               esac
+
+               if test -n "$source_path"
                then
-                       "$merge_tool_path" -f -d -c 'hid | hid | hid' \
-                               "$LOCAL" "$REMOTE" "$BASE" "$MERGED"
-               else
-                       "$merge_tool_path" -f -d -c 'hid | hid' \
-                               "$LOCAL" "$REMOTE" "$MERGED"
+                       cp "$source_path" "$MERGED"
                fi
+       fi
+
+       return "$ret"
+}
+
+
+merge_cmd_help () {
+       TOOL=$1
+
+       case "$TOOL" in
+       nvimdiff*)
+               printf "Use Neovim "
+               ;;
+       gvimdiff*)
+               printf "Use gVim (requires a graphical session) "
+               ;;
+       vimdiff*)
+               printf "Use Vim "
+               ;;
+       esac
+
+       case "$TOOL" in
+       *1)
+               echo "with a 2 panes layout (LOCAL and REMOTE)"
+               ;;
+       *2)
+               echo "with a 3 panes layout (LOCAL, MERGED and REMOTE)"
+               ;;
+       *3)
+               echo "where only the MERGED file is shown"
+               ;;
+       *)
+               echo "with a custom layout (see \`git help mergetool\`'s \`BACKEND SPECIFIC HINTS\` section)"
                ;;
        esac
+
+       return 0
 }
 
-translate_merge_tool_path() {
+
+translate_merge_tool_path () {
        case "$1" in
        nvimdiff*)
                echo nvim
@@ -57,14 +503,121 @@ translate_merge_tool_path() {
        esac
 }
 
+
 exit_code_trustable () {
        true
 }
 
+
 list_tool_variants () {
-       for prefix in '' g n; do
-               for suffix in '' 1 2 3; do
-                       echo "${prefix}vimdiff${suffix}"
+       if test "$TOOL_MODE" = "diff"
+       then
+               for prefix in '' g n
+               do
+                       echo "${prefix}vimdiff"
+               done
+       else
+               for prefix in '' g n
+               do
+                       for suffix in '' 1 2 3
+                       do
+                               echo "${prefix}vimdiff${suffix}"
+                       done
                done
+       fi
+}
+
+
+################################################################################
+## Unit tests (called from scripts inside the "t" folder)
+################################################################################
+
+run_unit_tests () {
+       # Function to make sure that we don't break anything when modifying this
+       # script.
+
+       NUMBER_OF_TEST_CASES=16
+
+       TEST_CASE_01="(LOCAL,BASE,REMOTE)/MERGED"   # default behaviour
+       TEST_CASE_02="@LOCAL,REMOTE"                # when using vimdiff1
+       TEST_CASE_03="LOCAL,MERGED,REMOTE"          # when using vimdiff2
+       TEST_CASE_04="MERGED"                       # when using vimdiff3
+       TEST_CASE_05="LOCAL/MERGED/REMOTE"
+       TEST_CASE_06="(LOCAL/REMOTE),MERGED"
+       TEST_CASE_07="MERGED,(LOCAL/REMOTE)"
+       TEST_CASE_08="(LOCAL,REMOTE)/MERGED"
+       TEST_CASE_09="MERGED/(LOCAL,REMOTE)"
+       TEST_CASE_10="(LOCAL/BASE/REMOTE),MERGED"
+       TEST_CASE_11="(LOCAL,BASE,REMOTE)/MERGED+BASE,LOCAL+BASE,REMOTE+(LOCAL/BASE/REMOTE),MERGED"
+       TEST_CASE_12="((LOCAL,REMOTE)/BASE),MERGED"
+       TEST_CASE_13="((LOCAL,REMOTE)/BASE),((LOCAL/REMOTE),MERGED)"
+       TEST_CASE_14="BASE,REMOTE+BASE,LOCAL"
+       TEST_CASE_15="  ((  (LOCAL , BASE , REMOTE) / MERGED))   +(BASE)   , LOCAL+ BASE , REMOTE+ (((LOCAL / BASE / REMOTE)) ,    MERGED   )  "
+       TEST_CASE_16="LOCAL,BASE,REMOTE / MERGED + BASE,LOCAL + BASE,REMOTE + (LOCAL / BASE / REMOTE),MERGED"
+
+       EXPECTED_CMD_01="-c \"echo | split | vertical split | 1b | wincmd l | vertical split | 2b | wincmd l | 3b | wincmd j | 4b | tabdo windo diffthis\" -c \"tabfirst\""
+       EXPECTED_CMD_02="-c \"echo | vertical split | 1b | wincmd l | 3b | tabdo windo diffthis\" -c \"tabfirst\""
+       EXPECTED_CMD_03="-c \"echo | vertical split | 1b | wincmd l | vertical split | 4b | wincmd l | 3b | tabdo windo diffthis\" -c \"tabfirst\""
+       EXPECTED_CMD_04="-c \"echo | 4b | bufdo diffthis\" -c \"tabfirst\""
+       EXPECTED_CMD_05="-c \"echo | split | 1b | wincmd j | split | 4b | wincmd j | 3b | tabdo windo diffthis\" -c \"tabfirst\""
+       EXPECTED_CMD_06="-c \"echo | vertical split | split | 1b | wincmd j | 3b | wincmd l | 4b | tabdo windo diffthis\" -c \"tabfirst\""
+       EXPECTED_CMD_07="-c \"echo | vertical split | 4b | wincmd l | split | 1b | wincmd j | 3b | tabdo windo diffthis\" -c \"tabfirst\""
+       EXPECTED_CMD_08="-c \"echo | split | vertical split | 1b | wincmd l | 3b | wincmd j | 4b | tabdo windo diffthis\" -c \"tabfirst\""
+       EXPECTED_CMD_09="-c \"echo | split | 4b | wincmd j | vertical split | 1b | wincmd l | 3b | tabdo windo diffthis\" -c \"tabfirst\""
+       EXPECTED_CMD_10="-c \"echo | vertical split | split | 1b | wincmd j | split | 2b | wincmd j | 3b | wincmd l | 4b | tabdo windo diffthis\" -c \"tabfirst\""
+       EXPECTED_CMD_11="-c \"echo | -tabnew | split | vertical split | 1b | wincmd l | vertical split | 2b | wincmd l | 3b | wincmd j | 4b | tabnext | -tabnew | vertical split | 2b | wincmd l | 1b | tabnext | -tabnew | vertical split | 2b | wincmd l | 3b | tabnext | vertical split | split | 1b | wincmd j | split | 2b | wincmd j | 3b | wincmd l | 4b | tabdo windo diffthis\" -c \"tabfirst\""
+       EXPECTED_CMD_12="-c \"echo | vertical split | split | vertical split | 1b | wincmd l | 3b | wincmd j | 2b | wincmd l | 4b | tabdo windo diffthis\" -c \"tabfirst\""
+       EXPECTED_CMD_13="-c \"echo | vertical split | split | vertical split | 1b | wincmd l | 3b | wincmd j | 2b | wincmd l | vertical split | split | 1b | wincmd j | 3b | wincmd l | 4b | tabdo windo diffthis\" -c \"tabfirst\""
+       EXPECTED_CMD_14="-c \"echo | -tabnew | vertical split | 2b | wincmd l | 3b | tabnext | vertical split | 2b | wincmd l | 1b | tabdo windo diffthis\" -c \"tabfirst\""
+       EXPECTED_CMD_15="-c \"echo | -tabnew | split | vertical split | 1b | wincmd l | vertical split | 2b | wincmd l | 3b | wincmd j | 4b | tabnext | -tabnew | vertical split | 2b | wincmd l | 1b | tabnext | -tabnew | vertical split | 2b | wincmd l | 3b | tabnext | vertical split | split | 1b | wincmd j | split | 2b | wincmd j | 3b | wincmd l | 4b | tabdo windo diffthis\" -c \"tabfirst\""
+       EXPECTED_CMD_16="-c \"echo | -tabnew | split | vertical split | 1b | wincmd l | vertical split | 2b | wincmd l | 3b | wincmd j | 4b | tabnext | -tabnew | vertical split | 2b | wincmd l | 1b | tabnext | -tabnew | vertical split | 2b | wincmd l | 3b | tabnext | vertical split | split | 1b | wincmd j | split | 2b | wincmd j | 3b | wincmd l | 4b | tabdo windo diffthis\" -c \"tabfirst\""
+
+       EXPECTED_TARGET_01="MERGED"
+       EXPECTED_TARGET_02="LOCAL"
+       EXPECTED_TARGET_03="MERGED"
+       EXPECTED_TARGET_04="MERGED"
+       EXPECTED_TARGET_05="MERGED"
+       EXPECTED_TARGET_06="MERGED"
+       EXPECTED_TARGET_07="MERGED"
+       EXPECTED_TARGET_08="MERGED"
+       EXPECTED_TARGET_09="MERGED"
+       EXPECTED_TARGET_10="MERGED"
+       EXPECTED_TARGET_11="MERGED"
+       EXPECTED_TARGET_12="MERGED"
+       EXPECTED_TARGET_13="MERGED"
+       EXPECTED_TARGET_14="MERGED"
+       EXPECTED_TARGET_15="MERGED"
+       EXPECTED_TARGET_16="MERGED"
+
+       at_least_one_ko="false"
+
+       for i in $(seq -w 1 99)
+       do
+               if test "$i" -gt $NUMBER_OF_TEST_CASES
+               then
+                       break
+               fi
+
+               gen_cmd "$(eval echo \${TEST_CASE_"$i"})"
+
+               if test "$FINAL_CMD" = "$(eval echo \${EXPECTED_CMD_"$i"})" \
+                       && test "$FINAL_TARGET" = "$(eval echo \${EXPECTED_TARGET_"$i"})"
+               then
+                       printf "Test Case #%02d: OK\n" "$(echo "$i" | sed 's/^0*//')"
+               else
+                       printf "Test Case #%02d: KO !!!!\n" "$(echo "$i" | sed 's/^0*//')"
+                       echo "  FINAL_CMD              : $FINAL_CMD"
+                       echo "  FINAL_CMD (expected)   : $(eval echo \${EXPECTED_CMD_"$i"})"
+                       echo "  FINAL_TARGET           : $FINAL_TARGET"
+                       echo "  FINAL_TARGET (expected): $(eval echo \${EXPECTED_TARGET_"$i"})"
+                       at_least_one_ko="true"
+               fi
        done
+
+       if test "$at_least_one_ko" = "true"
+       then
+               return 255
+       else
+               return 0
+       fi
 }
index 74d03259fdf157c9ee07eec7b2c40727f5ce49dc..36c72dde6e3ff53ed0204166343fde3ae9af9809 100644 (file)
@@ -3,6 +3,10 @@ diff_cmd () {
        return 0
 }
 
+diff_cmd_help () {
+       echo "Use WinMerge (requires a graphical session)"
+}
+
 merge_cmd () {
        # mergetool.winmerge.trustExitCode is implicitly false.
        # touch $BACKUP so that we can check_unchanged.
@@ -13,3 +17,7 @@ merge_cmd () {
 translate_merge_tool_path() {
        mergetool_find_win32_cmd "WinMergeU.exe" "WinMerge"
 }
+
+merge_cmd_help () {
+       echo "Use WinMerge (requires a graphical session)"
+}
index d5ce467995653e07be726c307fcf3b61fc5ddea4..cd205f98425dfc66a41b8b5ea536bf81b374e6c7 100644 (file)
@@ -12,6 +12,10 @@ diff_cmd () {
        fi
 }
 
+diff_cmd_help () {
+       echo "Use xxdiff (requires a graphical session)"
+}
+
 merge_cmd () {
        if $base_present
        then
@@ -28,3 +32,7 @@ merge_cmd () {
                        --merged-file "$MERGED" "$LOCAL" "$REMOTE"
        fi
 }
+
+merge_cmd_help () {
+       echo "Use xxdiff (requires a graphical session)"
+}
diff --git a/midx.c b/midx.c
index 107365d2114ce2945d4221ff853df97829b22d05..3db0e47735f600c936ec8a9566d5587228534481 100644 (file)
--- a/midx.c
+++ b/midx.c
@@ -1132,17 +1132,26 @@ cleanup:
 static struct multi_pack_index *lookup_multi_pack_index(struct repository *r,
                                                        const char *object_dir)
 {
+       struct multi_pack_index *result = NULL;
        struct multi_pack_index *cur;
+       char *obj_dir_real = real_pathdup(object_dir, 1);
+       struct strbuf cur_path_real = STRBUF_INIT;
 
        /* Ensure the given object_dir is local, or a known alternate. */
-       find_odb(r, object_dir);
+       find_odb(r, obj_dir_real);
 
        for (cur = get_multi_pack_index(r); cur; cur = cur->next) {
-               if (!strcmp(object_dir, cur->object_dir))
-                       return cur;
+               strbuf_realpath(&cur_path_real, cur->object_dir, 1);
+               if (!strcmp(obj_dir_real, cur_path_real.buf)) {
+                       result = cur;
+                       goto cleanup;
+               }
        }
 
-       return NULL;
+cleanup:
+       free(obj_dir_real);
+       strbuf_release(&cur_path_real);
+       return result;
 }
 
 static int write_midx_internal(const char *object_dir,
index 5ffbf3d4fd46de4242bd1ce00c6b3c2dad17322f..d35c1820e7af51691e500e9da8fdc0282157749c 100644 (file)
@@ -1728,7 +1728,7 @@ void *read_object_file_extended(struct repository *r,
                die(_("loose object %s (stored in %s) is corrupt"),
                    oid_to_hex(repl), path);
 
-       if ((p = has_packed_and_bad(r, repl)) != NULL)
+       if ((p = has_packed_and_bad(r, repl)))
                die(_("packed object %s (stored in %s) is corrupt"),
                    oid_to_hex(repl), p->pack_name);
        obj_read_unlock();
@@ -2623,8 +2623,12 @@ int read_loose_object(const char *path,
                goto out;
        }
 
-       if (unpack_loose_header(&stream, map, mapsize, hdr, sizeof(hdr),
-                               NULL) < 0) {
+       switch (unpack_loose_header(&stream, map, mapsize, hdr, sizeof(hdr),
+                                   NULL)) {
+       case ULHR_OK:
+               break;
+       case ULHR_BAD:
+       case ULHR_TOO_LONG:
                error(_("unable to unpack header of %s"), path);
                goto out;
        }
index f0e327f91f57ac9dabaec840cd1eb77a4b4293ec..4d2746574cde0b813a73772c13d71a30db696128 100644 (file)
@@ -1832,7 +1832,8 @@ static void diagnose_invalid_index_path(struct repository *r,
                pos = -pos - 1;
        if (pos < istate->cache_nr) {
                ce = istate->cache[pos];
-               if (ce_namelen(ce) == namelen &&
+               if (!S_ISSPARSEDIR(ce->ce_mode) &&
+                   ce_namelen(ce) == namelen &&
                    !memcmp(ce->name, filename, namelen))
                        die(_("path '%s' is in the index, but not at stage %d\n"
                            "hint: Did you mean ':%d:%s'?"),
@@ -1848,7 +1849,8 @@ static void diagnose_invalid_index_path(struct repository *r,
                pos = -pos - 1;
        if (pos < istate->cache_nr) {
                ce = istate->cache[pos];
-               if (ce_namelen(ce) == fullname.len &&
+               if (!S_ISSPARSEDIR(ce->ce_mode) &&
+                   ce_namelen(ce) == fullname.len &&
                    !memcmp(ce->name, fullname.buf, fullname.len))
                        die(_("path '%s' is in the index, but not '%s'\n"
                            "hint: Did you mean ':%d:%s' aka ':%d:./%s'?"),
@@ -1881,6 +1883,20 @@ static char *resolve_relative_path(struct repository *r, const char *rel)
                           rel);
 }
 
+static int reject_tree_in_index(struct repository *repo,
+                               int only_to_die,
+                               const struct cache_entry *ce,
+                               int stage,
+                               const char *prefix,
+                               const char *cp)
+{
+       if (!S_ISSPARSEDIR(ce->ce_mode))
+               return 0;
+       if (only_to_die)
+               diagnose_invalid_index_path(repo, stage, prefix, cp);
+       return -1;
+}
+
 static enum get_oid_result get_oid_with_context_1(struct repository *repo,
                                  const char *name,
                                  unsigned flags,
@@ -1955,9 +1971,12 @@ static enum get_oid_result get_oid_with_context_1(struct repository *repo,
                            memcmp(ce->name, cp, namelen))
                                break;
                        if (ce_stage(ce) == stage) {
+                               free(new_path);
+                               if (reject_tree_in_index(repo, only_to_die, ce,
+                                                        stage, prefix, cp))
+                                       return -1;
                                oidcpy(oid, &ce->oid);
                                oc->mode = ce->ce_mode;
-                               free(new_path);
                                return 0;
                        }
                        pos++;
index bd2322ed8ce3368d8b06ab570f2f1f4c2217d757..53996018c1106b1552898c718f7fb903fdb294b2 100644 (file)
@@ -312,10 +312,6 @@ int has_object(struct repository *r, const struct object_id *oid,
  * These functions can be removed once all callers have migrated to
  * has_object() and/or oid_object_info_extended().
  */
-#ifndef NO_THE_REPOSITORY_COMPATIBILITY_MACROS
-#define has_sha1_file_with_flags(sha1, flags) repo_has_sha1_file_with_flags(the_repository, sha1, flags)
-#define has_sha1_file(sha1) repo_has_sha1_file(the_repository, sha1)
-#endif
 int repo_has_object_file(struct repository *r, const struct object_id *oid);
 int repo_has_object_file_with_flags(struct repository *r,
                                    const struct object_id *oid, int flags);
index 97909d48da381f81622d36fb5889fd5d7013b04a..6a7cdca2317b0a1d00cbfa80b1c974f39df06585 100644 (file)
@@ -111,7 +111,7 @@ static struct ewah_bitmap *lookup_stored_bitmap(struct stored_bitmap *st)
        struct ewah_bitmap *parent;
        struct ewah_bitmap *composed;
 
-       if (st->xor == NULL)
+       if (!st->xor)
                return st->root;
 
        composed = ewah_pool_new();
@@ -279,7 +279,7 @@ static int load_bitmap_entries_v1(struct bitmap_index *index)
                if (xor_offset > 0) {
                        xor_bitmap = recent_bitmaps[(i - xor_offset) % MAX_XOR_OFFSET];
 
-                       if (xor_bitmap == NULL)
+                       if (!xor_bitmap)
                                return error("Invalid XOR offset in bitmap pack index");
                }
 
@@ -728,7 +728,7 @@ static int add_commit_to_bitmap(struct bitmap_index *bitmap_git,
        if (!or_with)
                return 0;
 
-       if (*base == NULL)
+       if (!*base)
                *base = ewah_to_bitmap(or_with);
        else
                bitmap_or_ewah(*base, or_with);
@@ -771,7 +771,7 @@ static struct bitmap *find_objects(struct bitmap_index *bitmap_git,
         * Best case scenario: We found bitmaps for all the roots,
         * so the resulting `or` bitmap has the full reachability analysis
         */
-       if (not_mapped == NULL)
+       if (!not_mapped)
                return base;
 
        roots = not_mapped;
@@ -805,7 +805,7 @@ static struct bitmap *find_objects(struct bitmap_index *bitmap_git,
                struct include_data incdata;
                struct bitmap_show_data show_data;
 
-               if (base == NULL)
+               if (!base)
                        base = bitmap_new();
 
                incdata.bitmap_git = bitmap_git;
@@ -1299,7 +1299,7 @@ struct bitmap_index *prepare_bitmap_walk(struct rev_info *revs,
                reset_revision_walk();
                revs->ignore_missing_links = 0;
 
-               if (haves_bitmap == NULL)
+               if (!haves_bitmap)
                        BUG("failed to perform bitmap walk");
        }
 
@@ -1698,7 +1698,7 @@ void test_bitmap_walk(struct rev_info *revs)
                result = ewah_to_bitmap(bm);
        }
 
-       if (result == NULL)
+       if (!result)
                die("Commit %s doesn't have an indexed bitmap", oid_to_hex(&root->oid));
 
        revs->tag_objects = 1;
index 835b2d271645ce08b7f98214748d3ffe4296edb8..6b88a56025c3f189f72e859c7bbcafd81cea84ef 100644 (file)
@@ -116,7 +116,7 @@ int load_idx(const char *path, const unsigned int hashsz, void *idx_map,
 
        if (idx_size < 4 * 256 + hashsz + hashsz)
                return error("index file %s is too small", path);
-       if (idx_map == NULL)
+       if (!idx_map)
                return error("empty data");
 
        if (hdr->idx_signature == htonl(PACK_IDX_SIGNATURE)) {
diff --git a/path.c b/path.c
index d73146b6cd266a9d051c334ae63bb91426da0a33..3236122aad2066c6047139920d00ad155435c221 100644 (file)
--- a/path.c
+++ b/path.c
@@ -733,7 +733,7 @@ char *interpolate_path(const char *path, int real_home)
        struct strbuf user_path = STRBUF_INIT;
        const char *to_copy = path;
 
-       if (path == NULL)
+       if (!path)
                goto return_null;
 
        if (skip_prefix(path, "%(prefix)/", &path))
index d3f488cb05f29bc90e4e03810d9bf4b972d0834f..d31b48e725083654222842049e121a35fe8d511b 100644 (file)
@@ -19,7 +19,7 @@ void prio_queue_reverse(struct prio_queue *queue)
 {
        int i, j;
 
-       if (queue->compare != NULL)
+       if (queue->compare)
                BUG("prio_queue_reverse() on non-LIFO queue");
        for (i = 0; i < (j = (queue->nr - 1) - i); i++)
                swap(queue, i, j);
index db2ebdc66ef2fe465456e39c4d33c6e5f6bd36de..8d6695681c1040aa2c1941228f325607c0c81e6b 100644 (file)
@@ -84,7 +84,7 @@ static void promisor_remote_move_to_tail(struct promisor_remote_config *config,
                                         struct promisor_remote *r,
                                         struct promisor_remote *previous)
 {
-       if (r->next == NULL)
+       if (!r->next)
                return;
 
        if (previous)
index 4df97e185e9c80cd061ab1d00143eceed81a0242..60355f5ad6a9a689fd02cd98f126e084c2c74216 100644 (file)
@@ -2260,6 +2260,20 @@ static unsigned long load_cache_entries_threaded(struct index_state *istate, con
        return consumed;
 }
 
+static void set_new_index_sparsity(struct index_state *istate)
+{
+       /*
+        * If the index's repo exists, mark it sparse according to
+        * repo settings.
+        */
+       if (istate->repo) {
+               prepare_repo_settings(istate->repo);
+               if (!istate->repo->settings.command_requires_full_index &&
+                   is_sparse_index_allowed(istate, 0))
+                       istate->sparse_index = 1;
+       }
+}
+
 /* remember to discard_cache() before reading a different cache! */
 int do_read_index(struct index_state *istate, const char *path, int must_exist)
 {
@@ -2281,8 +2295,10 @@ int do_read_index(struct index_state *istate, const char *path, int must_exist)
        istate->timestamp.nsec = 0;
        fd = open(path, O_RDONLY);
        if (fd < 0) {
-               if (!must_exist && errno == ENOENT)
+               if (!must_exist && errno == ENOENT) {
+                       set_new_index_sparsity(istate);
                        return 0;
+               }
                die_errno(_("%s: index file open failed"), path);
        }
 
index 7838bd22b8db1bd52859361a4f3b7789c2cf4b5b..2413f889f48315c14d06a64c308da0aad8f2dcd3 100644 (file)
@@ -1261,7 +1261,7 @@ static void grab_date(const char *buf, struct atom_value *v, const char *atomnam
         * ":" means no format is specified, and use the default.
         */
        formatp = strchr(atomname, ':');
-       if (formatp != NULL) {
+       if (formatp) {
                formatp++;
                parse_date_format(formatp, &date_mode);
        }
@@ -1509,7 +1509,7 @@ static void fill_missing_values(struct atom_value *val)
        int i;
        for (i = 0; i < used_atom_cnt; i++) {
                struct atom_value *v = &val[i];
-               if (v->s == NULL)
+               if (!v->s)
                        v->s = xstrdup("");
        }
 }
@@ -1619,7 +1619,7 @@ static const char *rstrip_ref_components(const char *refname, int len)
 
        while (remaining-- > 0) {
                char *p = strrchr(start, '/');
-               if (p == NULL) {
+               if (!p) {
                        free((char *)to_free);
                        return xstrdup("");
                } else
diff --git a/refs.c b/refs.c
index 9db66e995518c9f85acd0de0c5fa44658fdcf523..90bcb2716873592864e2496951f913618521cb45 100644 (file)
--- a/refs.c
+++ b/refs.c
@@ -1109,8 +1109,10 @@ int ref_transaction_create(struct ref_transaction *transaction,
                           unsigned int flags, const char *msg,
                           struct strbuf *err)
 {
-       if (!new_oid || is_null_oid(new_oid))
-               BUG("create called without valid new_oid");
+       if (!new_oid || is_null_oid(new_oid)) {
+               strbuf_addf(err, "'%s' has a null OID", refname);
+               return 1;
+       }
        return ref_transaction_update(transaction, refname, new_oid,
                                      null_oid(), flags, msg, err);
 }
index 3080ef944d998f87c7a42d1e2be33d5c97d8cfaf..32afd8a40b0faf22d1ef2de5bb2a2990fa6eebb3 100644 (file)
@@ -134,7 +134,7 @@ int search_ref_dir(struct ref_dir *dir, const char *refname, size_t len)
        r = bsearch(&key, dir->entries, dir->nr, sizeof(*dir->entries),
                    ref_entry_cmp_sslice);
 
-       if (r == NULL)
+       if (!r)
                return -1;
 
        return r - dir->entries;
index 2605371c28d86f638dcd5079466e038e3eb7a38a..8331b34e82300630b68c574e4f68071f3ebf261a 100644 (file)
@@ -15,7 +15,8 @@ https://developers.google.com/open-source/licenses/bsd
 
 static void strbuf_return_block(void *b, struct reftable_block *dest)
 {
-       memset(dest->data, 0xff, dest->len);
+       if (dest->len)
+               memset(dest->data, 0xff, dest->len);
        reftable_free(dest->data);
 }
 
@@ -56,7 +57,8 @@ void block_source_from_strbuf(struct reftable_block_source *bs,
 
 static void malloc_return_block(void *b, struct reftable_block *dest)
 {
-       memset(dest->data, 0xff, dest->len);
+       if (dest->len)
+               memset(dest->data, 0xff, dest->len);
        reftable_free(dest->data);
 }
 
@@ -85,7 +87,8 @@ static uint64_t file_size(void *b)
 
 static void file_return_block(void *b, struct reftable_block *dest)
 {
-       memset(dest->data, 0xff, dest->len);
+       if (dest->len)
+               memset(dest->data, 0xff, dest->len);
        reftable_free(dest->data);
 }
 
index 19fe4e200859683b13b1f0b0a21c8b1685ad4a08..d0b717510fa7d6dd8832bf77490630964e33b23b 100644 (file)
@@ -35,7 +35,7 @@ static int count_dir_entries(const char *dirname)
        DIR *dir = opendir(dirname);
        int len = 0;
        struct dirent *d;
-       if (dir == NULL)
+       if (!dir)
                return 0;
 
        while ((d = readdir(dir))) {
index 82db7995dd68ec9f8f8b7756582d9be749f13a9b..b8899e060abdd6629d73510a8b126b0c00c90b8d 100644 (file)
@@ -16,7 +16,7 @@ struct tree_node *tree_search(void *key, struct tree_node **rootp,
                              int insert)
 {
        int res;
-       if (*rootp == NULL) {
+       if (!*rootp) {
                if (!insert) {
                        return NULL;
                } else {
@@ -50,7 +50,7 @@ void infix_walk(struct tree_node *t, void (*action)(void *arg, void *key),
 
 void tree_free(struct tree_node *t)
 {
-       if (t == NULL) {
+       if (!t) {
                return;
        }
        if (t->left) {
index 427f1317c6be6121a733fa206ce6e27ab5e029b8..2e322a5683d081eea60fc85dfc4d792ce89b7a93 100644 (file)
@@ -183,7 +183,7 @@ static void writer_index_hash(struct reftable_writer *w, struct strbuf *hash)
        struct tree_node *node = tree_search(&want, &w->obj_index_tree,
                                             &obj_index_tree_node_compare, 0);
        struct obj_index_tree_node *key = NULL;
-       if (node == NULL) {
+       if (!node) {
                struct obj_index_tree_node empty = OBJ_INDEX_TREE_NODE_INIT;
                key = reftable_malloc(sizeof(struct obj_index_tree_node));
                *key = empty;
@@ -222,7 +222,7 @@ static int writer_add_record(struct reftable_writer *w,
 
        strbuf_reset(&w->last_key);
        strbuf_addbuf(&w->last_key, &key);
-       if (w->block_writer == NULL) {
+       if (!w->block_writer) {
                writer_reinit_block_writer(w, reftable_record_type(rec));
        }
 
@@ -263,7 +263,7 @@ int reftable_writer_add_ref(struct reftable_writer *w,
        };
        int err = 0;
 
-       if (ref->refname == NULL)
+       if (!ref->refname)
                return REFTABLE_API_ERROR;
        if (ref->update_index < w->min_update_index ||
            ref->update_index > w->max_update_index)
@@ -336,7 +336,7 @@ int reftable_writer_add_log(struct reftable_writer *w,
        if (log->value_type == REFTABLE_LOG_DELETION)
                return reftable_writer_add_log_verbatim(w, log);
 
-       if (log->refname == NULL)
+       if (!log->refname)
                return REFTABLE_API_ERROR;
 
        input_log_message = log->value.update.message;
@@ -545,7 +545,7 @@ static int writer_finish_public_section(struct reftable_writer *w)
        uint8_t typ = 0;
        int err = 0;
 
-       if (w->block_writer == NULL)
+       if (!w->block_writer)
                return 0;
 
        typ = block_writer_type(w->block_writer);
@@ -694,7 +694,7 @@ static int writer_flush_nonempty_block(struct reftable_writer *w)
 
 static int writer_flush_block(struct reftable_writer *w)
 {
-       if (w->block_writer == NULL)
+       if (!w->block_writer)
                return 0;
        if (w->block_writer->entries == 0)
                return 0;
index d26627c59329151ef196e26444a64cb44ad699d4..876ab435da949c8359c315636225d9fd7fd962ab 100644 (file)
--- a/rerere.c
+++ b/rerere.c
@@ -591,7 +591,7 @@ int rerere_remaining(struct repository *r, struct string_list *merge_rr)
                else if (conflict_type == RESOLVED) {
                        struct string_list_item *it;
                        it = string_list_lookup(merge_rr, (const char *)e->name);
-                       if (it != NULL) {
+                       if (it) {
                                free_rerere_id(it);
                                it->util = RERERE_RESOLVED;
                        }
index 7d435f80480ef1b7684fdc43b51057d20e79966f..090a967bf46aaec77f3bce578af58ca42f42d288 100644 (file)
@@ -1440,6 +1440,9 @@ static int limit_list(struct rev_info *revs)
                if (revs->min_age != -1 && (commit->date > revs->min_age) &&
                    !revs->line_level_traverse)
                        continue;
+               if (revs->max_age_as_filter != -1 &&
+                       (commit->date < revs->max_age_as_filter) && !revs->line_level_traverse)
+                       continue;
                date = commit->date;
                p = &commit_list_insert(commit, p)->next;
 
@@ -1838,6 +1841,7 @@ void repo_init_revisions(struct repository *r,
        revs->dense = 1;
        revs->prefix = prefix;
        revs->max_age = -1;
+       revs->max_age_as_filter = -1;
        revs->min_age = -1;
        revs->skip_count = -1;
        revs->max_count = -1;
@@ -2218,6 +2222,9 @@ static int handle_revision_opt(struct rev_info *revs, int argc, const char **arg
        } else if ((argcount = parse_long_opt("since", argv, &optarg))) {
                revs->max_age = approxidate(optarg);
                return argcount;
+       } else if ((argcount = parse_long_opt("since-as-filter", argv, &optarg))) {
+               revs->max_age_as_filter = approxidate(optarg);
+               return argcount;
        } else if ((argcount = parse_long_opt("after", argv, &optarg))) {
                revs->max_age = approxidate(optarg);
                return argcount;
@@ -2833,7 +2840,7 @@ int setup_revisions(int argc, const char **argv, struct rev_info *revs, struct s
        }
        strvec_clear(&prune_data);
 
-       if (revs->def == NULL)
+       if (!revs->def)
                revs->def = opt ? opt->def : NULL;
        if (opt && opt->tweak)
                opt->tweak(revs, opt);
@@ -3652,7 +3659,7 @@ static enum rewrite_result rewrite_one_1(struct rev_info *revs,
                        return rewrite_one_ok;
                if (!p->parents)
                        return rewrite_one_noparents;
-               if ((p = one_relevant_parent(revs, p->parents)) == NULL)
+               if (!(p = one_relevant_parent(revs, p->parents)))
                        return rewrite_one_ok;
                *pp = p;
        }
@@ -3862,6 +3869,9 @@ enum commit_action get_commit_action(struct rev_info *revs, struct commit *commi
        if (revs->min_age != -1 &&
            comparison_date(revs, commit) > revs->min_age)
                        return commit_ignore;
+       if (revs->max_age_as_filter != -1 &&
+           comparison_date(revs, commit) < revs->max_age_as_filter)
+                       return commit_ignore;
        if (revs->min_parents || (revs->max_parents >= 0)) {
                int n = commit_list_count(commit->parents);
                if ((n < revs->min_parents) ||
index 5bc59c7bfe1e35efe7fb24e93f9a093baa28b658..e80c148b1943e7cbedf9b4bd6bd0f0a698e78437 100644 (file)
@@ -263,6 +263,7 @@ struct rev_info {
        int skip_count;
        int max_count;
        timestamp_t max_age;
+       timestamp_t max_age_as_filter;
        timestamp_t min_age;
        int min_parents;
        int max_parents;
index 07bed6c31b4e9f065b9ae0a4490cfd56adada0c7..5bd0c933e80291264abb537b7cd39b761e964a82 100644 (file)
@@ -142,7 +142,6 @@ struct child_process {
        unsigned clean_on_exit:1;
        unsigned wait_after_clean:1;
        void (*clean_on_exit_handler)(struct child_process *process);
-       void *clean_on_exit_handler_cbdata;
 };
 
 #define CHILD_PROCESS_INIT { \
index a5f678f4529b1953af8dd7b6e168a4a03333e2e9..8c3ed3532acff7d93fec585f7fd9bc3423d0612d 100644 (file)
@@ -1327,7 +1327,6 @@ void print_commit_summary(struct repository *r,
        get_commit_format(format.buf, &rev);
        rev.always_show_header = 0;
        rev.diffopt.detect_rename = DIFF_DETECT_RENAME;
-       rev.diffopt.break_opt = 0;
        diff_setup_done(&rev.diffopt);
 
        refs = get_main_ref_store(the_repository);
diff --git a/serve.c b/serve.c
index b3fe9b5126a3347784d501e296102ab87cc4c5ab..733347f602aa1ede3e45fee94050163790d65bf9 100644 (file)
--- a/serve.c
+++ b/serve.c
@@ -3,7 +3,6 @@
 #include "config.h"
 #include "pkt-line.h"
 #include "version.h"
-#include "strvec.h"
 #include "ls-refs.h"
 #include "protocol-caps.h"
 #include "serve.h"
diff --git a/setup.c b/setup.c
index a7b36f3ffbfbf272a319005828961d9f7db8afe8..f818dd858c6e624987663c6abd8f23107cbf4750 100644 (file)
--- a/setup.c
+++ b/setup.c
@@ -1470,7 +1470,7 @@ int git_config_perm(const char *var, const char *value)
        int i;
        char *endptr;
 
-       if (value == NULL)
+       if (!value)
                return PERM_GROUP;
 
        if (!strcmp(value, "umask"))
index 6cd307ac2c65a2825e467aeb9c1e6abfe8cc7e70..133496bd4d9f2979dfe2765e98a50186463dde1d 100644 (file)
@@ -397,7 +397,7 @@ subst_from_stdin (void)
                  /* Substitute the variable's value from the environment.  */
                  const char *env_value = getenv (buffer);
 
-                 if (env_value != NULL)
+                 if (env_value)
                    fputs (env_value, stdout);
                }
              else
index e158be58b05591820d645a6a44423d2da97097e1..8ad5f22832cabc33217a185650f41a56a18b9bae 100644 (file)
--- a/shallow.c
+++ b/shallow.c
@@ -560,7 +560,7 @@ static void paint_down(struct paint_info *info, const struct object_id *oid,
                else
                        c->object.flags |= SEEN;
 
-               if (*refs == NULL)
+               if (!*refs)
                        *refs = bitmap;
                else {
                        memcpy(tmp, *refs, bitmap_size);
index 8636af72de59f38ff9de871c34bd44f8aab75286..ffbab7d35f1d1681a23bfd50da7401b315739451 100644 (file)
@@ -118,7 +118,7 @@ static int index_has_unmerged_entries(struct index_state *istate)
        return 0;
 }
 
-static int is_sparse_index_allowed(struct index_state *istate, int flags)
+int is_sparse_index_allowed(struct index_state *istate, int flags)
 {
        if (!core_apply_sparse_checkout || !core_sparse_checkout_cone)
                return 0;
index 633d4fb7e318afcd69257e5d85850138da9d7348..f57c65d972f474a38a8add549248df834a157c06 100644 (file)
@@ -3,6 +3,7 @@
 
 struct index_state;
 #define SPARSE_INDEX_MEMORY_ONLY (1 << 0)
+int is_sparse_index_allowed(struct index_state *istate, int flags);
 int convert_to_sparse(struct index_state *istate, int flags);
 void ensure_correct_sparsity(struct index_state *istate);
 void clear_skip_worktree_from_present_files(struct index_state *istate);
index fa229a8b97a12ab4bd354db6ed0de5eddd098af5..28a8ca6bf46845906cb4bc2071a594a383eefc72 100644 (file)
@@ -38,7 +38,7 @@ struct submodule {
        const char *path;
        const char *name;
        const char *url;
-       int fetch_recurse;
+       enum submodule_recurse_mode fetch_recurse;
        const char *ignore;
        const char *branch;
        struct submodule_update_strategy update_strategy;
index 40c14452377521b9ca46b44e64e3580539c2d1cc..437bc96e05eb2fe3845b998be584737c6d67147d 100644 (file)
@@ -13,7 +13,7 @@ struct repository;
 struct string_list;
 struct strbuf;
 
-enum {
+enum submodule_recurse_mode {
        RECURSE_SUBMODULES_ONLY = -5,
        RECURSE_SUBMODULES_CHECK = -4,
        RECURSE_SUBMODULES_ERROR = -3,
index 09e86f9ba0804f70d6561b5bda03d69535f11dcf..cc01d891504eb117eb9c367ebc4e89b516157012 100644 (file)
@@ -56,6 +56,10 @@ check_count () {
        ' "$@" <actual
 }
 
+get_progress_result () {
+       tr '\015' '\012' | tail -n 1
+}
+
 test_expect_success 'setup A lines' '
        echo "1A quick brown fox jumps over the" >file &&
        echo "lazy dog" >>file &&
@@ -604,3 +608,39 @@ test_expect_success 'blame -L X,-N (non-numeric N)' '
 test_expect_success 'blame -L ,^/RE/' '
        test_must_fail $PROG -L1,^/99/ file
 '
+
+test_expect_success 'blame progress on a full file' '
+       cat >expect <<-\EOF &&
+       Blaming lines: 100% (10/10), done.
+       EOF
+
+       GIT_PROGRESS_DELAY=0 \
+       git blame --progress hello.c 2>stderr &&
+
+       get_progress_result <stderr >actual &&
+       test_cmp expect actual
+'
+
+test_expect_success 'blame progress on a single range' '
+       cat >expect <<-\EOF &&
+       Blaming lines: 100% (4/4), done.
+       EOF
+
+       GIT_PROGRESS_DELAY=0 \
+       git blame --progress -L 3,6 hello.c 2>stderr &&
+
+       get_progress_result <stderr >actual &&
+       test_cmp expect actual
+'
+
+test_expect_success 'blame progress on multiple ranges' '
+       cat >expect <<-\EOF &&
+       Blaming lines: 100% (7/7), done.
+       EOF
+
+       GIT_PROGRESS_DELAY=0 \
+       git blame --progress -L 3,6 -L 8,10 hello.c 2>stderr &&
+
+       get_progress_result <stderr >actual &&
+       test_cmp expect actual
+'
index 5aff2abe8b5490156e7745beb5ad01d9cacc0d58..2a5b8738ea37da950b65cd8a3357b9ebdb59363e 100644 (file)
@@ -142,10 +142,11 @@ start_p4d () {
 
 p4_add_user () {
        name=$1 &&
+       fullname="${2:-Dr. $1}"
        p4 user -f -i <<-EOF
        User: $name
        Email: $name@example.com
-       FullName: Dr. $name
+       FullName: $fullname
        EOF
 }
 
index 382716cfca909e82c0bc5bf04e3bb3726dc2f9f1..76710cbef355c43226c7f9dcdb309c0aedc7dbd6 100755 (executable)
@@ -106,6 +106,8 @@ test_perf_on_all () {
 }
 
 test_perf_on_all git status
+test_perf_on_all 'git stash && git stash pop'
+test_perf_on_all 'echo >>new && git stash -u && git stash pop'
 test_perf_on_all git add -A
 test_perf_on_all git add .
 test_perf_on_all git commit -a -m A
index 6c3e1f7159d4dcc6a2e61228fa7c4f3cec42ef3b..6c33a4369015c813a73384ff68149463c5189105 100755 (executable)
@@ -181,7 +181,7 @@ for cmd in git "git help"
 do
        test_expect_success "'$cmd' section spacing" '
                test_section_spacing_trailer git help <<-\EOF &&
-               usage: git [--version] [--help] [-C <path>] [-c <name>=<value>]
+               usage: git [-v | --version] [-h | --help] [-C <path>] [-c <name>=<value>]
 
                These are common Git commands used in various situations:
 
index 0feb41a23f2c27db155fc49b6fb3ff7f9106cc53..7f80f463930407410c6d3f671fb1f7833ec3b01e 100755 (executable)
@@ -77,12 +77,12 @@ create_NNO_MIX_files () {
 
 check_warning () {
        case "$1" in
-       LF_CRLF) echo "warning: LF will be replaced by CRLF" >"$2".expect ;;
-       CRLF_LF) echo "warning: CRLF will be replaced by LF" >"$2".expect ;;
-       '')                                                      >"$2".expect ;;
+       LF_CRLF) echo "LF will be replaced by CRLF" >"$2".expect ;;
+       CRLF_LF) echo "CRLF will be replaced by LF" >"$2".expect ;;
+       '')                                         >"$2".expect ;;
        *) echo >&2 "Illegal 1": "$1" ; return false ;;
        esac
-       grep "will be replaced by" "$2" | sed -e "s/\(.*\) in [^ ]*$/\1/" | uniq  >"$2".actual
+       sed -e "s/^.* \([^ ]* will be replaced by [^ ]*\) .*$/\1/" "$2" | uniq  >"$2".actual
        test_cmp "$2".expect "$2".actual
 }
 
index 239d93f4d21141f5991f9fb409a4675ece0f98bd..238b25f91a34d745016045a9570063086a5eec1f 100755 (executable)
@@ -9,13 +9,41 @@ export GIT_TEST_ASSUME_DIFFERENT_OWNER
 
 expect_rejected_dir () {
        test_must_fail git status 2>err &&
-       grep "safe.directory" err
+       grep "unsafe repository" err
 }
 
 test_expect_success 'safe.directory is not set' '
        expect_rejected_dir
 '
 
+test_expect_success 'ignoring safe.directory on the command line' '
+       test_must_fail git -c safe.directory="$(pwd)" status 2>err &&
+       grep "unsafe repository" err
+'
+
+test_expect_success 'ignoring safe.directory in the environment' '
+       test_must_fail env GIT_CONFIG_COUNT=1 \
+               GIT_CONFIG_KEY_0="safe.directory" \
+               GIT_CONFIG_VALUE_0="$(pwd)" \
+               git status 2>err &&
+       grep "unsafe repository" err
+'
+
+test_expect_success 'ignoring safe.directory in GIT_CONFIG_PARAMETERS' '
+       test_must_fail env \
+               GIT_CONFIG_PARAMETERS="${SQ}safe.directory${SQ}=${SQ}$(pwd)${SQ}" \
+               git status 2>err &&
+       grep "unsafe repository" err
+'
+
+test_expect_success 'ignoring safe.directory in repo config' '
+       (
+               unset GIT_TEST_ASSUME_DIFFERENT_OWNER &&
+               git config safe.directory "$(pwd)"
+       ) &&
+       expect_rejected_dir
+'
+
 test_expect_success 'safe.directory does not match' '
        git config --global safe.directory bogus &&
        expect_rejected_dir
index 2fe6ae6a4e544cb4f58ad96e22ee6b24c076de94..aa35350b6f396f562463a16ffcba364ab94ed5d5 100755 (executable)
@@ -542,7 +542,7 @@ test_lazy_prereq CAN_EXEC_IN_PWD '
        ./git rev-parse
 '
 
-test_expect_success RUNTIME_PREFIX,CAN_EXEC_IN_PWD 'RUNTIME_PREFIX works' '
+test_expect_success !VALGRIND,RUNTIME_PREFIX,CAN_EXEC_IN_PWD 'RUNTIME_PREFIX works' '
        mkdir -p pretend/bin pretend/libexec/git-core &&
        echo "echo HERE" | write_script pretend/libexec/git-core/git-here &&
        cp "$GIT_EXEC_PATH"/git$X pretend/bin/ &&
@@ -550,7 +550,7 @@ test_expect_success RUNTIME_PREFIX,CAN_EXEC_IN_PWD 'RUNTIME_PREFIX works' '
        echo HERE >expect &&
        test_cmp expect actual'
 
-test_expect_success RUNTIME_PREFIX,CAN_EXEC_IN_PWD '%(prefix)/ works' '
+test_expect_success !VALGRIND,RUNTIME_PREFIX,CAN_EXEC_IN_PWD '%(prefix)/ works' '
        mkdir -p pretend/bin &&
        cp "$GIT_EXEC_PATH"/git$X pretend/bin/ &&
        git config yes.path "%(prefix)/yes" &&
index 1b8520769446d217c757d8286e8c46c54dcafb97..dadf3b14583bec460a51cbf69362a875dd3cc867 100755 (executable)
@@ -681,7 +681,7 @@ test_expect_success 'cat-file -t and -s on corrupt loose object' '
 
                # Setup and create the empty blob and its path
                empty_path=$(git rev-parse --git-path objects/$(test_oid_to_path "$EMPTY_BLOB")) &&
-               git hash-object -w --stdin </dev/null &&
+               empty_blob=$(git hash-object -w --stdin </dev/null) &&
 
                # Create another blob and its path
                echo other >other.blob &&
@@ -722,7 +722,13 @@ test_expect_success 'cat-file -t and -s on corrupt loose object' '
                # content out as-is. Try to make it zlib-invalid.
                mv -f other.blob "$empty_path" &&
                test_must_fail git fsck 2>err.fsck &&
-               grep "^error: inflate: data stream error (" err.fsck
+               cat >expect <<-EOF &&
+               error: inflate: data stream error (incorrect header check)
+               error: unable to unpack header of ./$empty_path
+               error: $empty_blob: object corrupt or missing: ./$empty_path
+               EOF
+               grep "^error: " err.fsck >actual &&
+               test_cmp expect actual
        )
 '
 
index dd957be1b78c5690fc79495f501eea52e4467600..63a553d7b32224550a48008772e396e88455af11 100755 (executable)
@@ -57,8 +57,8 @@ test_expect_success 'read-tree with .git/info/sparse-checkout but disabled' '
        read_tree_u_must_succeed -m -u HEAD &&
        git ls-files -t >result &&
        test_cmp expected.swt result &&
-       test -f init.t &&
-       test -f sub/added
+       test_path_is_file init.t &&
+       test_path_is_file sub/added
 '
 
 test_expect_success 'read-tree --no-sparse-checkout with empty .git/info/sparse-checkout and enabled' '
@@ -67,8 +67,8 @@ test_expect_success 'read-tree --no-sparse-checkout with empty .git/info/sparse-
        read_tree_u_must_succeed --no-sparse-checkout -m -u HEAD &&
        git ls-files -t >result &&
        test_cmp expected.swt result &&
-       test -f init.t &&
-       test -f sub/added
+       test_path_is_file init.t &&
+       test_path_is_file sub/added
 '
 
 test_expect_success 'read-tree with empty .git/info/sparse-checkout' '
@@ -85,8 +85,8 @@ test_expect_success 'read-tree with empty .git/info/sparse-checkout' '
        S subsub/added
        EOF
        test_cmp expected.swt result &&
-       ! test -f init.t &&
-       ! test -f sub/added
+       test_path_is_missing init.t &&
+       test_path_is_missing sub/added
 '
 
 test_expect_success 'match directories with trailing slash' '
@@ -101,8 +101,8 @@ test_expect_success 'match directories with trailing slash' '
        read_tree_u_must_succeed -m -u HEAD &&
        git ls-files -t > result &&
        test_cmp expected.swt-noinit result &&
-       test ! -f init.t &&
-       test -f sub/added
+       test_path_is_missing init.t &&
+       test_path_is_file sub/added
 '
 
 test_expect_success 'match directories without trailing slash' '
@@ -110,8 +110,8 @@ test_expect_success 'match directories without trailing slash' '
        read_tree_u_must_succeed -m -u HEAD &&
        git ls-files -t >result &&
        test_cmp expected.swt-noinit result &&
-       test ! -f init.t &&
-       test -f sub/added
+       test_path_is_missing init.t &&
+       test_path_is_file sub/added
 '
 
 test_expect_success 'match directories with negated patterns' '
@@ -129,9 +129,9 @@ EOF
        git read-tree -m -u HEAD &&
        git ls-files -t >result &&
        test_cmp expected.swt-negation result &&
-       test ! -f init.t &&
-       test ! -f sub/added &&
-       test -f sub/addedtoo
+       test_path_is_missing init.t &&
+       test_path_is_missing sub/added &&
+       test_path_is_file sub/addedtoo
 '
 
 test_expect_success 'match directories with negated patterns (2)' '
@@ -150,9 +150,9 @@ EOF
        git read-tree -m -u HEAD &&
        git ls-files -t >result &&
        test_cmp expected.swt-negation2 result &&
-       test -f init.t &&
-       test -f sub/added &&
-       test ! -f sub/addedtoo
+       test_path_is_file init.t &&
+       test_path_is_file sub/added &&
+       test_path_is_missing sub/addedtoo
 '
 
 test_expect_success 'match directory pattern' '
@@ -160,8 +160,8 @@ test_expect_success 'match directory pattern' '
        read_tree_u_must_succeed -m -u HEAD &&
        git ls-files -t >result &&
        test_cmp expected.swt-noinit result &&
-       test ! -f init.t &&
-       test -f sub/added
+       test_path_is_missing init.t &&
+       test_path_is_file sub/added
 '
 
 test_expect_success 'checkout area changes' '
@@ -176,15 +176,15 @@ test_expect_success 'checkout area changes' '
        read_tree_u_must_succeed -m -u HEAD &&
        git ls-files -t >result &&
        test_cmp expected.swt-nosub result &&
-       test -f init.t &&
-       test ! -f sub/added
+       test_path_is_file init.t &&
+       test_path_is_missing sub/added
 '
 
 test_expect_success 'read-tree updates worktree, absent case' '
        echo sub/added >.git/info/sparse-checkout &&
        git checkout -f top &&
        read_tree_u_must_succeed -m -u HEAD^ &&
-       test ! -f init.t
+       test_path_is_missing init.t
 '
 
 test_expect_success 'read-tree will not throw away dirty changes, non-sparse' '
@@ -229,7 +229,7 @@ test_expect_success 'read-tree adds to worktree, absent case' '
        echo init.t >.git/info/sparse-checkout &&
        git checkout -f removed &&
        read_tree_u_must_succeed -u -m HEAD^ &&
-       test ! -f sub/added
+       test_path_is_missing sub/added
 '
 
 test_expect_success 'read-tree adds to worktree, dirty case' '
@@ -248,7 +248,7 @@ test_expect_success 'index removal and worktree narrowing at the same time' '
        echo init.t >.git/info/sparse-checkout &&
        git checkout removed &&
        git ls-files sub/added >result &&
-       test ! -f sub/added &&
+       test_path_is_missing sub/added &&
        test_must_be_empty result
 '
 
index 236ab5302844b21d5688ddaca5e98625c33406fa..6f778cf28c53b4371349dafb0f46788735e233fc 100755 (executable)
@@ -1034,6 +1034,55 @@ test_expect_success 'cherry-pick with conflicts' '
        test_all_match test_must_fail git cherry-pick to-cherry-pick
 '
 
+test_expect_success 'stash' '
+       init_repos &&
+
+       write_script edit-contents <<-\EOF &&
+       echo text >>$1
+       EOF
+
+       # Stash a sparse directory (folder1)
+       test_all_match git checkout -b test-branch rename-base &&
+       test_all_match git reset --soft rename-out-to-out &&
+       test_all_match git stash &&
+       test_all_match git status --porcelain=v2 &&
+
+       # Apply the sparse directory stash without reinstating the index
+       test_all_match git stash apply -q &&
+       test_all_match git status --porcelain=v2 &&
+
+       # Reset to state where stash can be applied
+       test_sparse_match git sparse-checkout reapply &&
+       test_all_match git reset --hard rename-out-to-out &&
+
+       # Apply the sparse directory stash *with* reinstating the index
+       test_all_match git stash apply --index -q &&
+       test_all_match git status --porcelain=v2 &&
+
+       # Reset to state where we will get a conflict applying the stash
+       test_sparse_match git sparse-checkout reapply &&
+       test_all_match git reset --hard update-folder1 &&
+
+       # Apply the sparse directory stash with conflicts
+       test_all_match test_must_fail git stash apply --index -q &&
+       test_all_match test_must_fail git stash apply -q &&
+       test_all_match git status --porcelain=v2 &&
+
+       # Reset to base branch
+       test_sparse_match git sparse-checkout reapply &&
+       test_all_match git reset --hard base &&
+
+       # Stash & unstash an untracked file outside of the sparse checkout
+       # definition.
+       run_on_sparse mkdir -p folder1 &&
+       run_on_all ../edit-contents folder1/new &&
+       test_all_match git stash -u &&
+       test_all_match git status --porcelain=v2 &&
+
+       test_all_match git stash pop -q &&
+       test_all_match git status --porcelain=v2
+'
+
 test_expect_success 'checkout-index inside sparse definition' '
        init_repos &&
 
@@ -1151,6 +1200,33 @@ test_expect_success 'clean' '
        test_sparse_match test_path_is_dir folder1
 '
 
+for builtin in show rev-parse
+do
+       test_expect_success "$builtin (cached blobs/trees)" "
+               init_repos &&
+
+               test_all_match git $builtin :a &&
+               test_all_match git $builtin :deep/a &&
+               test_sparse_match git $builtin :folder1/a &&
+
+               # The error message differs depending on whether
+               # the directory exists in the worktree.
+               test_all_match test_must_fail git $builtin :deep/ &&
+               test_must_fail git -C full-checkout $builtin :folder1/ &&
+               test_sparse_match test_must_fail git $builtin :folder1/ &&
+
+               # Change the sparse cone for an extra case:
+               run_on_sparse git sparse-checkout set deep/deeper1 &&
+
+               # deep/deeper2 is a sparse directory in the sparse index.
+               test_sparse_match test_must_fail git $builtin :deep/deeper2/ &&
+
+               # deep/deeper2/deepest is not in the sparse index, but
+               # will trigger an index expansion.
+               test_sparse_match test_must_fail git $builtin :deep/deeper2/deepest/
+       "
+done
+
 test_expect_success 'submodule handling' '
        init_repos &&
 
@@ -1222,7 +1298,10 @@ test_expect_success 'index.sparse disabled inline uses full index' '
 
 ensure_not_expanded () {
        rm -f trace2.txt &&
-       echo >>sparse-index/untracked.txt &&
+       if test -z "$WITHOUT_UNTRACKED_TXT"
+       then
+               echo >>sparse-index/untracked.txt
+       fi &&
 
        if test "$1" = "!"
        then
@@ -1326,6 +1405,30 @@ test_expect_success 'sparse-index is not expanded: merge conflict in cone' '
        )
 '
 
+test_expect_success 'sparse-index is not expanded: stash' '
+       init_repos &&
+
+       echo >>sparse-index/a &&
+       ensure_not_expanded stash &&
+       ensure_not_expanded stash list &&
+       ensure_not_expanded stash show stash@{0} &&
+       ensure_not_expanded stash apply stash@{0} &&
+       ensure_not_expanded stash drop stash@{0} &&
+
+       echo >>sparse-index/deep/new &&
+       ensure_not_expanded stash -u &&
+       (
+               WITHOUT_UNTRACKED_TXT=1 &&
+               ensure_not_expanded stash pop
+       ) &&
+
+       ensure_not_expanded stash create &&
+       oid=$(git -C sparse-index stash create) &&
+       ensure_not_expanded stash store -m "test" $oid &&
+       ensure_not_expanded reset --hard &&
+       ensure_not_expanded stash pop
+'
+
 test_expect_success 'sparse index is not expanded: diff' '
        init_repos &&
 
@@ -1372,6 +1475,15 @@ test_expect_success 'sparse index is not expanded: diff' '
        ensure_not_expanded diff --cached
 '
 
+test_expect_success 'sparse index is not expanded: show and rev-parse' '
+       init_repos &&
+
+       ensure_not_expanded show :a &&
+       ensure_not_expanded show :deep/a &&
+       ensure_not_expanded rev-parse :a &&
+       ensure_not_expanded rev-parse :deep/a
+'
+
 test_expect_success 'sparse index is not expanded: update-index' '
        init_repos &&
 
index de50c0ea018cfa981312c9f146f8265544e64711..ab7f31f1dcd5b5b7224ded4099f8316171bdea7d 100755 (executable)
@@ -774,10 +774,19 @@ test_expect_success 'fsck finds problems in duplicate loose objects' '
                # no "-d" here, so we end up with duplicates
                git repack &&
                # now corrupt the loose copy
-               file=$(sha1_file "$(git rev-parse HEAD)") &&
+               oid="$(git rev-parse HEAD)" &&
+               file=$(sha1_file "$oid") &&
                rm "$file" &&
                echo broken >"$file" &&
-               test_must_fail git fsck
+               test_must_fail git fsck 2>err &&
+
+               cat >expect <<-EOF &&
+               error: inflate: data stream error (incorrect header check)
+               error: unable to unpack header of $file
+               error: $oid: object corrupt or missing: $file
+               EOF
+               grep "^error: " err >actual &&
+               test_cmp expect actual
        )
 '
 
index db7ca55998666138f3580fe430dfe53ece43be13..ebf58db2d1827209464c94ac621f16f1693367d5 100755 (executable)
@@ -2,6 +2,7 @@
 
 test_description='Intent to add'
 
+TEST_PASSES_SANITIZE_LEAK=true
 . ./test-lib.sh
 
 test_expect_success 'intent to add' '
index 7a1be73ce8771b851dfc2c7a95d80dece4c2b5cb..f2b9199007752eded932a8da541e1a86e350d10a 100755 (executable)
@@ -161,4 +161,18 @@ test_expect_success 'show branch --reflog=2' '
        test_cmp actual expect
 '
 
+# incompatible options
+while read combo
+do
+       test_expect_success "show-branch $combo (should fail)" '
+               test_must_fail git show-branch $combo 2>error &&
+               grep -e "cannot be used together" -e "usage:" error
+       '
+done <<\EOF
+--all --reflog
+--merge-base --reflog
+--list --merge-base
+--reflog --current
+EOF
+
 test_done
index 3716a42e81284f562cef9ba951edbbd1ff6701d6..3e04802cb003b027c0d70319465af4363877de06 100755 (executable)
@@ -129,6 +129,20 @@ test_expect_success 'rebase --keep-base main from topic' '
        test_cmp expect actual
 '
 
+test_expect_success 'rebase --keep-base main topic from main' '
+       git checkout main &&
+       git branch -f topic G &&
+
+       git rebase --keep-base main topic &&
+       git rev-parse C >base.expect &&
+       git merge-base main HEAD >base.actual &&
+       test_cmp base.expect base.actual &&
+
+       git rev-parse HEAD~2 >actual &&
+       git rev-parse C^0 >expect &&
+       test_cmp expect actual
+'
+
 test_expect_success 'rebase --keep-base main from side' '
        git reset --hard &&
        git checkout side &&
@@ -153,6 +167,21 @@ test_expect_success 'rebase -i --keep-base main from topic' '
        test_cmp expect actual
 '
 
+test_expect_success 'rebase -i --keep-base main topic from main' '
+       git checkout main &&
+       git branch -f topic G &&
+
+       set_fake_editor &&
+       EXPECT_COUNT=2 git rebase -i --keep-base main topic &&
+       git rev-parse C >base.expect &&
+       git merge-base main HEAD >base.actual &&
+       test_cmp base.expect base.actual &&
+
+       git rev-parse HEAD~2 >actual &&
+       git rev-parse C^0 >expect &&
+       test_cmp expect actual
+'
+
 test_expect_success 'rebase -i --keep-base main from side' '
        git reset --hard &&
        git checkout side &&
index 8617efaaf1e66f6f5d8246a817085ebd2d43c477..9eb19204ac7e3167511aca8c8ae8583928be3692 100755 (executable)
@@ -66,8 +66,7 @@ test_expect_success 'cherry-pick after renaming branch' '
 
        git checkout rename2 &&
        git cherry-pick added &&
-       test $(git rev-parse HEAD^) = $(git rev-parse rename2) &&
-       test -f opos &&
+       test_cmp_rev rename2 HEAD^ &&
        grep "Add extra line at the end" opos &&
        git reflog -1 | grep cherry-pick
 
@@ -77,9 +76,9 @@ test_expect_success 'revert after renaming branch' '
 
        git checkout rename1 &&
        git revert added &&
-       test $(git rev-parse HEAD^) = $(git rev-parse rename1) &&
-       test -f spoo &&
-       ! grep "Add extra line at the end" spoo &&
+       test_cmp_rev rename1 HEAD^ &&
+       test_path_is_file spoo &&
+       test_cmp_rev initial:oops HEAD:spoo &&
        git reflog -1 | grep revert
 
 '
index 1219f8bd4c05f69ca159717c9b2e92f9510e76da..858a5522f96b63b971daf85f20e5d66aaebc48e6 100755 (executable)
@@ -206,17 +206,17 @@ test_expect_success 'GIT_EXTERNAL_DIFF path counter/total' '
 '
 
 test_expect_success 'GIT_EXTERNAL_DIFF generates pretty paths' '
+       test_when_finished "git rm -f file.ext" &&
        touch file.ext &&
        git add file.ext &&
        echo with extension > file.ext &&
 
        cat >expect <<-EOF &&
-       file.ext file $(git rev-parse --verify HEAD:file) 100644 file.ext $(test_oid zero) 100644
+       file.ext
        EOF
        GIT_EXTERNAL_DIFF=echo git diff file.ext >out &&
-       cut -d" " -f1,3- <out >actual &&
-       git update-index --force-remove file.ext &&
-       rm file.ext
+       basename $(cut -d" " -f2 <out) >actual &&
+       test_cmp expect actual
 '
 
 echo "#!$SHELL_PATH" >fake-diff.sh
index be07407f855597ca276deccfbc17c3b692aa0642..6e66352558212e9a40404ef892a0944ab3d09db3 100755 (executable)
@@ -1992,10 +1992,13 @@ test_expect_success GPG 'log --show-signature for merged tag with GPG failure' '
        git tag -s -m signed_tag_msg signed_tag_fail &&
        git checkout plain-fail &&
        git merge --no-ff -m msg signed_tag_fail &&
-       TMPDIR="$(pwd)/bogus" git log --show-signature -n1 plain-fail >actual &&
-       grep "^merged tag" actual &&
-       grep "^No signature" actual &&
-       ! grep "^gpg: Signature made" actual
+       if ! test_have_prereq VALGRIND
+       then
+               TMPDIR="$(pwd)/bogus" git log --show-signature -n1 plain-fail >actual &&
+               grep "^merged tag" actual &&
+               grep "^No signature" actual &&
+               ! grep "^gpg: Signature made" actual
+       fi
 '
 
 test_expect_success GPGSM 'log --graph --show-signature for merged tag x509' '
diff --git a/t/t4217-log-limit.sh b/t/t4217-log-limit.sh
new file mode 100755 (executable)
index 0000000..6e01e26
--- /dev/null
@@ -0,0 +1,41 @@
+#!/bin/sh
+
+test_description='git log with filter options limiting the output'
+
+. ./test-lib.sh
+
+test_expect_success 'setup test' '
+       git init &&
+       echo a >file &&
+       git add file &&
+       GIT_COMMITTER_DATE="2021-02-01 00:00" git commit -m init &&
+       echo a >>file &&
+       git add file &&
+       GIT_COMMITTER_DATE="2022-02-01 00:00" git commit -m first &&
+       echo a >>file &&
+       git add file &&
+       GIT_COMMITTER_DATE="2021-03-01 00:00" git commit -m second &&
+       echo a >>file &&
+       git add file &&
+       GIT_COMMITTER_DATE="2022-03-01 00:00" git commit -m third
+'
+
+test_expect_success 'git log --since-as-filter=...' '
+       git log --since-as-filter="2022-01-01" --format=%s >actual &&
+       cat >expect <<-\EOF &&
+       third
+       first
+       EOF
+       test_cmp expect actual
+'
+
+test_expect_success 'git log --children --since-as-filter=...' '
+       git log --children --since-as-filter="2022-01-01" --format=%s >actual &&
+       cat >expect <<-\EOF &&
+       third
+       first
+       EOF
+       test_cmp expect actual
+'
+
+test_done
index fa6b4cca65c8c687bf83f9eaf0ecb5568c9373f6..a35396fadf5ec2acea618be47c9cec5b5a108fa3 100755 (executable)
@@ -107,6 +107,32 @@ test_expect_success " --[no-]recurse-submodule and submodule.recurse" '
        test_path_is_file super/sub/merge_strategy_4.t
 '
 
+test_expect_success "fetch.recurseSubmodules option triggers recursive fetch (but not recursive update)" '
+       test_commit -C child merge_strategy_5 &&
+       # Omit the parent commit, otherwise this passes with the
+       # default "pull" behavior.
+
+       git -C super -c fetch.recursesubmodules=true pull --no-rebase &&
+       # Check that the submodule commit was fetched
+       sub_oid=$(git -C child rev-parse HEAD) &&
+       git -C super/sub cat-file -e $sub_oid &&
+       # Check that the submodule worktree did not update
+       ! test_path_is_file super/sub/merge_strategy_5.t
+'
+
+test_expect_success "fetch.recurseSubmodules takes precedence over submodule.recurse" '
+       test_commit -C child merge_strategy_6 &&
+       # Omit the parent commit, otherwise this passes with the
+       # default "pull" behavior.
+
+       git -C super -c submodule.recurse=false -c fetch.recursesubmodules=true pull --no-rebase &&
+       # Check that the submodule commit was fetched
+       sub_oid=$(git -C child rev-parse HEAD) &&
+       git -C super/sub cat-file -e $sub_oid &&
+       # Check that the submodule worktree did not update
+       ! test_path_is_file super/sub/merge_strategy_6.t
+'
+
 test_expect_success 'pull --rebase --recurse-submodules (remote superproject submodule changes, local submodule changes)' '
        # This tests the following scenario :
        # - local submodule has new commits
index 7d63365f93a0a50eda1d4ec9b55c19890f610102..21ab6192839a006060b6e2e864aab93989a418f1 100755 (executable)
@@ -141,4 +141,13 @@ test_expect_success 'cloning locally respects "-u" for fetching refs' '
        test_must_fail git clone --bare -u false a should_not_work.git
 '
 
+test_expect_success 'local clone from repo with corrupt refs fails gracefully' '
+       git init corrupt &&
+       test_commit -C corrupt one &&
+       echo a >corrupt/.git/refs/heads/topic &&
+
+       test_must_fail git clone corrupt working 2>err &&
+       grep "has a null OID" err
+'
+
 test_done
index 5382e5d21620aea6bd74fdd4a84fb14f0ac644dc..83931d482fb23b0b774310a49e8dcd6ad7eac4b9 100755 (executable)
@@ -1025,4 +1025,32 @@ test_expect_success 'bisect visualize with a filename with dash and space' '
        git bisect visualize -p -- "-hello 2"
 '
 
+test_expect_success 'bisect state output with multiple good commits' '
+       git bisect reset &&
+       git bisect start >output &&
+       grep "waiting for both good and bad commits" output &&
+       git bisect log >output &&
+       grep "waiting for both good and bad commits" output &&
+       git bisect good "$HASH1" >output &&
+       grep "waiting for bad commit, 1 good commit known" output &&
+       git bisect log >output &&
+       grep "waiting for bad commit, 1 good commit known" output &&
+       git bisect good "$HASH2" >output &&
+       grep "waiting for bad commit, 2 good commits known" output &&
+       git bisect log >output &&
+       grep "waiting for bad commit, 2 good commits known" output
+'
+
+test_expect_success 'bisect state output with bad commit' '
+       git bisect reset &&
+       git bisect start >output &&
+       grep "waiting for both good and bad commits" output &&
+       git bisect log >output &&
+       grep "waiting for both good and bad commits" output &&
+       git bisect bad "$HASH4" >output &&
+       grep -F "waiting for good commit(s), bad commit known" output &&
+       git bisect log >output &&
+       grep -F "waiting for good commit(s), bad commit known" output
+'
+
 test_done
index 1761a2b1b99bc80e43ac22b0ec57656ec38b9ad5..4adac5acd57c56835608631c4ffe98f772d455bd 100755 (executable)
@@ -5,6 +5,7 @@
 
 test_description='skip-worktree bit test'
 
+TEST_PASSES_SANITIZE_LEAK=true
 . ./test-lib.sh
 
 cat >expect.full <<EOF
index ca90ee805e7b3411c6e7c8c5868c3aa2063dba05..9936cc329ecd3f3d6cf96994de0209efd945dd55 100755 (executable)
@@ -190,6 +190,119 @@ test_expect_success 'untracked cache after second status' '
        test_cmp ../dump.expect ../actual
 '
 
+cat >../status_uall.expect <<EOF &&
+A  done/one
+A  one
+A  two
+?? dthree/three
+?? dtwo/two
+?? three
+EOF
+
+# Bypassing the untracked cache here is not desirable from an
+# end-user perspective, but is expected in the current design.
+# The untracked cache data stored for a -unormal run cannot be
+# correctly used in a -uall run - it would yield incorrect output.
+test_expect_success 'untracked cache is bypassed with -uall' '
+       : >../trace.output &&
+       GIT_TRACE2_PERF="$TRASH_DIRECTORY/trace.output" \
+       git status -uall --porcelain >../actual &&
+       iuc status -uall --porcelain >../status.iuc &&
+       test_cmp ../status_uall.expect ../status.iuc &&
+       test_cmp ../status_uall.expect ../actual &&
+       get_relevant_traces ../trace.output ../trace.relevant &&
+       cat >../trace.expect <<EOF &&
+ ....path:
+EOF
+       test_cmp ../trace.expect ../trace.relevant
+'
+
+test_expect_success 'untracked cache remains after bypass' '
+       test-tool dump-untracked-cache >../actual &&
+       test_cmp ../dump.expect ../actual
+'
+
+test_expect_success 'if -uall is configured, untracked cache gets populated by default' '
+       test_config status.showuntrackedfiles all &&
+       : >../trace.output &&
+       GIT_TRACE2_PERF="$TRASH_DIRECTORY/trace.output" \
+       git status --porcelain >../actual &&
+       iuc status --porcelain >../status.iuc &&
+       test_cmp ../status_uall.expect ../status.iuc &&
+       test_cmp ../status_uall.expect ../actual &&
+       get_relevant_traces ../trace.output ../trace.relevant &&
+       cat >../trace.expect <<EOF &&
+ ....path:
+ ....node-creation:3
+ ....gitignore-invalidation:1
+ ....directory-invalidation:0
+ ....opendir:4
+EOF
+       test_cmp ../trace.expect ../trace.relevant
+'
+
+cat >../dump_uall.expect <<EOF &&
+info/exclude $EMPTY_BLOB
+core.excludesfile $ZERO_OID
+exclude_per_dir .gitignore
+flags 00000000
+/ $ZERO_OID recurse valid
+three
+/done/ $ZERO_OID recurse valid
+/dthree/ $ZERO_OID recurse valid
+three
+/dtwo/ $ZERO_OID recurse valid
+two
+EOF
+
+test_expect_success 'if -uall was configured, untracked cache is populated' '
+       test-tool dump-untracked-cache >../actual &&
+       test_cmp ../dump_uall.expect ../actual
+'
+
+test_expect_success 'if -uall is configured, untracked cache is used by default' '
+       test_config status.showuntrackedfiles all &&
+       : >../trace.output &&
+       GIT_TRACE2_PERF="$TRASH_DIRECTORY/trace.output" \
+       git status --porcelain >../actual &&
+       iuc status --porcelain >../status.iuc &&
+       test_cmp ../status_uall.expect ../status.iuc &&
+       test_cmp ../status_uall.expect ../actual &&
+       get_relevant_traces ../trace.output ../trace.relevant &&
+       cat >../trace.expect <<EOF &&
+ ....path:
+ ....node-creation:0
+ ....gitignore-invalidation:0
+ ....directory-invalidation:0
+ ....opendir:0
+EOF
+       test_cmp ../trace.expect ../trace.relevant
+'
+
+# Bypassing the untracked cache here is not desirable from an
+# end-user perspective, but is expected in the current design.
+# The untracked cache data stored for a -all run cannot be
+# correctly used in a -unormal run - it would yield incorrect
+# output.
+test_expect_success 'if -uall is configured, untracked cache is bypassed with -unormal' '
+       test_config status.showuntrackedfiles all &&
+       : >../trace.output &&
+       GIT_TRACE2_PERF="$TRASH_DIRECTORY/trace.output" \
+       git status -unormal --porcelain >../actual &&
+       iuc status -unormal --porcelain >../status.iuc &&
+       test_cmp ../status.expect ../status.iuc &&
+       test_cmp ../status.expect ../actual &&
+       get_relevant_traces ../trace.output ../trace.relevant &&
+       cat >../trace.expect <<EOF &&
+ ....path:
+EOF
+       test_cmp ../trace.expect ../trace.relevant
+'
+
+test_expect_success 'repopulate untracked cache for -unormal' '
+       git status --porcelain
+'
+
 test_expect_success 'modify in root directory, one dir invalidation' '
        : >four &&
        test-tool chmtime =-240 four &&
diff --git a/t/t7524-commit-summary.sh b/t/t7524-commit-summary.sh
new file mode 100755 (executable)
index 0000000..47b2f1d
--- /dev/null
@@ -0,0 +1,31 @@
+#!/bin/sh
+
+test_description='git commit summary'
+. ./test-lib.sh
+
+test_expect_success 'setup' '
+       test_seq 101 200 >file &&
+       git add file &&
+       git commit -m initial &&
+       git tag initial
+'
+
+test_expect_success 'commit summary ignores rewrites' '
+       git reset --hard initial &&
+       test_seq 200 300 >file &&
+
+       git diff --stat >diffstat &&
+       git diff --stat --break-rewrites >diffstatrewrite &&
+
+       # make sure this scenario is a detectable rewrite
+       ! test_cmp_bin diffstat diffstatrewrite &&
+
+       git add file &&
+       git commit -m second >actual &&
+
+       grep "1 file" <actual >actual.total &&
+       grep "1 file" <diffstat >diffstat.total &&
+       test_cmp diffstat.total actual.total
+'
+
+test_done
diff --git a/t/t7609-mergetool--lib.sh b/t/t7609-mergetool--lib.sh
new file mode 100755 (executable)
index 0000000..d848fe6
--- /dev/null
@@ -0,0 +1,14 @@
+#!/bin/sh
+
+test_description='git mergetool
+
+Testing basic merge tools options'
+
+. ./test-lib.sh
+
+test_expect_success 'mergetool --tool=vimdiff creates the expected layout' '
+       . $GIT_BUILD_DIR/mergetools/vimdiff &&
+       run_unit_tests
+'
+
+test_done
index 9047d665a1049195ec03b64504c973559a677c36..ac7be5471452a0a8b2123dc39be80caf3520e9db 100755 (executable)
@@ -4,6 +4,10 @@ test_description='grep icase on non-English locales'
 
 . ./lib-gettext.sh
 
+doalarm () {
+       perl -e 'alarm shift; exec @ARGV' -- "$@"
+}
+
 test_expect_success GETTEXT_LOCALE 'setup' '
        test_write_lines "TILRAUN: Halló Heimur!" >file &&
        git add file &&
@@ -139,4 +143,10 @@ test_expect_success GETTEXT_LOCALE,LIBPCRE2 'PCRE v2: grep non-literal ASCII fro
        test_cmp expected actual
 '
 
+test_expect_success GETTEXT_LOCALE,LIBPCRE2 'PCRE v2: grep avoid endless loop bug' '
+       echo " Halló" >leading-whitespace &&
+       git add leading-whitespace &&
+       doalarm 1 git grep --perl-regexp "^\s" leading-whitespace
+'
+
 test_done
index 8b30062c0cfcc80a0cdff0f73367be43246280b3..dc88d0e064931aab48a3e93002cc67416090282c 100755 (executable)
@@ -74,6 +74,91 @@ test_expect_success 'git p4 sync new branch' '
        )
 '
 
+#
+# Setup as before, and then explicitly sync imported branch, using a
+# different ref format.
+#
+test_expect_success 'git p4 sync existing branch without changes' '
+       test_create_repo "$git" &&
+       test_when_finished cleanup_git &&
+       (
+               cd "$git" &&
+               test_commit head &&
+               git p4 sync --branch=depot //depot@all &&
+               git p4 sync --branch=refs/remotes/p4/depot >out &&
+               test_i18ngrep "No changes to import!" out
+       )
+'
+
+#
+# Same as before, relative branch name.
+#
+test_expect_success 'git p4 sync existing branch with relative name' '
+       test_create_repo "$git" &&
+       test_when_finished cleanup_git &&
+       (
+               cd "$git" &&
+               test_commit head &&
+               git p4 sync --branch=branch1 //depot@all &&
+               git p4 sync --branch=p4/branch1 >out &&
+               test_i18ngrep "No changes to import!" out
+       )
+'
+
+#
+# Same as before, with a nested branch path, referenced different ways.
+#
+test_expect_success 'git p4 sync existing branch with nested path' '
+       test_create_repo "$git" &&
+       test_when_finished cleanup_git &&
+       (
+               cd "$git" &&
+               test_commit head &&
+               git p4 sync --branch=p4/some/path //depot@all &&
+               git p4 sync --branch=some/path >out &&
+               test_i18ngrep "No changes to import!" out
+       )
+'
+
+#
+# Same as before, with a full ref path outside the p4/* namespace.
+#
+test_expect_success 'git p4 sync branch explicit ref without p4 in path' '
+       test_create_repo "$git" &&
+       test_when_finished cleanup_git &&
+       (
+               cd "$git" &&
+               test_commit head &&
+               git p4 sync --branch=refs/remotes/someremote/depot //depot@all &&
+               git p4 sync --branch=refs/remotes/someremote/depot >out &&
+               test_i18ngrep "No changes to import!" out
+       )
+'
+
+test_expect_success 'git p4 sync nonexistent ref' '
+       test_create_repo "$git" &&
+       test_when_finished cleanup_git &&
+       (
+               cd "$git" &&
+               test_commit head &&
+               git p4 sync --branch=depot //depot@all &&
+               test_must_fail git p4 sync --branch=depot2 2>errs &&
+               test_i18ngrep "Perhaps you never did" errs
+       )
+'
+
+test_expect_success 'git p4 sync existing non-p4-imported ref' '
+       test_create_repo "$git" &&
+       test_when_finished cleanup_git &&
+       (
+               cd "$git" &&
+               test_commit head &&
+               git p4 sync --branch=depot //depot@all &&
+               test_must_fail git p4 sync --branch=refs/heads/master 2>errs &&
+               test_i18ngrep "Perhaps you never did" errs
+       )
+'
+
 test_expect_success 'clone two dirs' '
        (
                cd "$cli" &&
index 50a6f8bad5c5752f6b73829fd657ebb3700db006..759a14fa87ce6ae540b2c98cfd3b83a7bf6928f2 100755 (executable)
@@ -129,6 +129,16 @@ test_expect_success 'import depot, branch detection' '
        )
 '
 
+test_expect_success 'sync specific detected branch' '
+       test_when_finished cleanup_git &&
+       git p4 clone --dest="$git" --detect-branches //depot@all &&
+       (
+               cd "$git" &&
+               git p4 sync --branch=depot/branch2 >out &&
+               test_i18ngrep "No changes to import!" out
+       )
+'
+
 test_expect_success 'import depot, branch detection, branchList branch definition' '
        test_when_finished cleanup_git &&
        test_create_repo "$git" &&
index 19073c6e9f8485f4d76c46cafd9c0a98c0fbce96..2a6ee2a46787f07c763c6d7608628c65d8181526 100755 (executable)
@@ -333,4 +333,38 @@ test_expect_success SYMLINKS 'empty symlink target' '
        )
 '
 
+test_expect_success SYMLINKS 'utf-8 with and without BOM in text file' '
+       (
+               cd "$cli" &&
+
+               # some utf8 content
+               echo some tǣxt >utf8-nobom-test &&
+
+               # same utf8 content as before but with bom
+               echo some tǣxt | sed '\''s/^/\xef\xbb\xbf/'\'' >utf8-bom-test &&
+
+               # bom only
+               dd bs=1 count=3 if=utf8-bom-test of=utf8-bom-empty-test &&
+
+               p4 add utf8-nobom-test utf8-bom-test utf8-bom-empty-test &&
+               p4 submit -d "add utf8 test files"
+       ) &&
+       test_when_finished cleanup_git &&
+
+       git p4 clone --dest="$git" //depot@all &&
+       (
+               cd "$git" &&
+               git checkout refs/remotes/p4/master &&
+
+               echo some tǣxt >utf8-nobom-check &&
+               test_cmp utf8-nobom-check utf8-nobom-test &&
+
+               echo some tǣxt | sed '\''s/^/\xef\xbb\xbf/'\'' >utf8-bom-check &&
+               test_cmp utf8-bom-check utf8-bom-test &&
+
+               dd bs=1 count=3 if=utf8-bom-check of=utf8-bom-empty-check &&
+               test_cmp utf8-bom-empty-check utf8-bom-empty-test
+       )
+'
+
 test_done
diff --git a/t/t9835-git-p4-metadata-encoding-python2.sh b/t/t9835-git-p4-metadata-encoding-python2.sh
new file mode 100755 (executable)
index 0000000..036bf79
--- /dev/null
@@ -0,0 +1,213 @@
+#!/bin/sh
+
+test_description='git p4 metadata encoding
+
+This test checks that the import process handles inconsistent text
+encoding in p4 metadata (author names, commit messages, etc) without
+failing, and produces maximally sane output in git.'
+
+. ./lib-git-p4.sh
+
+python_target_version='2'
+
+###############################
+## SECTION REPEATED IN t9836 ##
+###############################
+
+# Please note: this test calls "git-p4.py" rather than "git-p4", because the
+# latter references a specific path so we can't easily force it to run under
+# the python version we need to.
+
+python_major_version=$(python -V 2>&1 | cut -c  8)
+python_target_binary=$(which python$python_target_version)
+if ! test "$python_major_version" = "$python_target_version" && test "$python_target_binary"
+then
+       mkdir temp_python
+       PATH="$(pwd)/temp_python:$PATH" && export PATH
+       ln -s $python_target_binary temp_python/python
+fi
+
+python_major_version=$(python -V 2>&1 | cut -c  8)
+if ! test "$python_major_version" = "$python_target_version"
+then
+       skip_all="skipping python$python_target_version-specific git p4 tests; python$python_target_version not available"
+       test_done
+fi
+
+remove_user_cache () {
+       rm "$HOME/.gitp4-usercache.txt" || true
+}
+
+test_expect_success 'start p4d' '
+       start_p4d
+'
+
+test_expect_success 'init depot' '
+       (
+               cd "$cli" &&
+
+               p4_add_user "utf8_author" "ǣuthor" &&
+               P4USER=utf8_author &&
+               touch file1 &&
+               p4 add file1 &&
+               p4 submit -d "first CL has some utf-8 tǣxt" &&
+
+               p4_add_user "latin1_author" "$(echo æuthor |
+                       iconv -f utf8 -t latin1)" &&
+               P4USER=latin1_author &&
+               touch file2 &&
+               p4 add file2 &&
+               p4 submit -d "$(echo second CL has some latin-1 tæxt |
+                       iconv -f utf8 -t latin1)" &&
+
+               p4_add_user "cp1252_author" "$(echo æuthœr |
+                       iconv -f utf8 -t cp1252)" &&
+               P4USER=cp1252_author &&
+               touch file3 &&
+               p4 add file3 &&
+               p4 submit -d "$(echo third CL has sœme cp-1252 tæxt |
+                 iconv -f utf8 -t cp1252)" &&
+
+               p4_add_user "cp850_author" "$(echo Åuthor |
+                       iconv -f utf8 -t cp850)" &&
+               P4USER=cp850_author &&
+               touch file4 &&
+               p4 add file4 &&
+               p4 submit -d "$(echo fourth CL hÅs some cp850 text |
+                       iconv -f utf8 -t cp850)"
+       )
+'
+
+test_expect_success 'clone non-utf8 repo with strict encoding' '
+       test_when_finished cleanup_git &&
+       test_when_finished remove_user_cache &&
+       test_must_fail git -c git-p4.metadataDecodingStrategy=strict p4.py clone --dest="$git" //depot@all 2>err &&
+       grep "Decoding perforce metadata failed!" err
+'
+
+test_expect_success 'check utf-8 contents with passthrough strategy' '
+       test_when_finished cleanup_git &&
+       test_when_finished remove_user_cache &&
+       git -c git-p4.metadataDecodingStrategy=passthrough p4.py clone --dest="$git" //depot@all &&
+       (
+               cd "$git" &&
+               git log >actual &&
+               grep "some utf-8 tǣxt" actual &&
+               grep "ǣuthor" actual
+       )
+'
+
+test_expect_success 'check latin-1 contents corrupted in git with passthrough strategy' '
+       test_when_finished cleanup_git &&
+       test_when_finished remove_user_cache &&
+       git -c git-p4.metadataDecodingStrategy=passthrough p4.py clone --dest="$git" //depot@all &&
+       (
+               cd "$git" &&
+               git log >actual &&
+               badly_encoded_in_git=$(echo "some latin-1 tæxt" | iconv -f utf8 -t latin1) &&
+               grep "$badly_encoded_in_git" actual &&
+               bad_author_in_git="$(echo æuthor | iconv -f utf8 -t latin1)" &&
+               grep "$bad_author_in_git" actual
+       )
+'
+
+test_expect_success 'check utf-8 contents with fallback strategy' '
+       test_when_finished cleanup_git &&
+       test_when_finished remove_user_cache &&
+       git -c git-p4.metadataDecodingStrategy=fallback p4.py clone --dest="$git" //depot@all &&
+       (
+               cd "$git" &&
+               git log >actual &&
+               grep "some utf-8 tǣxt" actual &&
+               grep "ǣuthor" actual
+       )
+'
+
+test_expect_success 'check latin-1 contents with fallback strategy' '
+       test_when_finished cleanup_git &&
+       test_when_finished remove_user_cache &&
+       git -c git-p4.metadataDecodingStrategy=fallback p4.py clone --dest="$git" //depot@all &&
+       (
+               cd "$git" &&
+               git log >actual &&
+               grep "some latin-1 tæxt" actual &&
+               grep "æuthor" actual
+       )
+'
+
+test_expect_success 'check cp-1252 contents with fallback strategy' '
+       test_when_finished cleanup_git &&
+       test_when_finished remove_user_cache &&
+       git -c git-p4.metadataDecodingStrategy=fallback p4.py clone --dest="$git" //depot@all &&
+       (
+               cd "$git" &&
+               git log >actual &&
+               grep "sœme cp-1252 tæxt" actual &&
+               grep "æuthœr" actual
+       )
+'
+
+test_expect_success 'check cp850 contents parsed with correct fallback' '
+       test_when_finished cleanup_git &&
+       test_when_finished remove_user_cache &&
+       git -c git-p4.metadataDecodingStrategy=fallback -c git-p4.metadataFallbackEncoding=cp850 p4.py clone --dest="$git" //depot@all &&
+       (
+               cd "$git" &&
+               git log >actual &&
+               grep "hÅs some cp850 text" actual &&
+               grep "Åuthor" actual
+       )
+'
+
+test_expect_success 'check cp850-only contents escaped when cp1252 is fallback' '
+       test_when_finished cleanup_git &&
+       test_when_finished remove_user_cache &&
+       git -c git-p4.metadataDecodingStrategy=fallback p4.py clone --dest="$git" //depot@all &&
+       (
+               cd "$git" &&
+               git log >actual &&
+               grep "h%8Fs some cp850 text" actual &&
+               grep "%8Futhor" actual
+       )
+'
+
+test_expect_success 'check cp-1252 contents on later sync after clone with fallback strategy' '
+       test_when_finished cleanup_git &&
+       test_when_finished remove_user_cache &&
+       git -c git-p4.metadataDecodingStrategy=fallback p4.py clone --dest="$git" //depot@all &&
+       (
+               cd "$cli" &&
+               P4USER=cp1252_author &&
+               touch file10 &&
+               p4 add file10 &&
+               p4 submit -d "$(echo later CL has sœme more cp-1252 tæxt |
+                       iconv -f utf8 -t cp1252)"
+       ) &&
+       (
+               cd "$git" &&
+
+               git p4.py sync --branch=master &&
+
+               git log p4/master >actual &&
+               grep "sœme more cp-1252 tæxt" actual &&
+               grep "æuthœr" actual
+       )
+'
+
+############################
+## / END REPEATED SECTION ##
+############################
+
+test_expect_success 'passthrough (latin-1 contents corrupted in git) is the default with python2' '
+       test_when_finished cleanup_git &&
+       test_when_finished remove_user_cache &&
+       git -c git-p4.metadataDecodingStrategy=passthrough p4.py clone --dest="$git" //depot@all &&
+       (
+               cd "$git" &&
+               git log >actual &&
+               badly_encoded_in_git=$(echo "some latin-1 tæxt" | iconv -f utf8 -t latin1) &&
+               grep "$badly_encoded_in_git" actual
+       )
+'
+
+test_done
diff --git a/t/t9836-git-p4-metadata-encoding-python3.sh b/t/t9836-git-p4-metadata-encoding-python3.sh
new file mode 100755 (executable)
index 0000000..63350dc
--- /dev/null
@@ -0,0 +1,214 @@
+#!/bin/sh
+
+test_description='git p4 metadata encoding
+
+This test checks that the import process handles inconsistent text
+encoding in p4 metadata (author names, commit messages, etc) without
+failing, and produces maximally sane output in git.'
+
+. ./lib-git-p4.sh
+
+python_target_version='3'
+
+###############################
+## SECTION REPEATED IN t9835 ##
+###############################
+
+# Please note: this test calls "git-p4.py" rather than "git-p4", because the
+# latter references a specific path so we can't easily force it to run under
+# the python version we need to.
+
+python_major_version=$(python -V 2>&1 | cut -c  8)
+python_target_binary=$(which python$python_target_version)
+if ! test "$python_major_version" = "$python_target_version" && test "$python_target_binary"
+then
+       mkdir temp_python
+       PATH="$(pwd)/temp_python:$PATH" && export PATH
+       ln -s $python_target_binary temp_python/python
+fi
+
+python_major_version=$(python -V 2>&1 | cut -c  8)
+if ! test "$python_major_version" = "$python_target_version"
+then
+       skip_all="skipping python$python_target_version-specific git p4 tests; python$python_target_version not available"
+       test_done
+fi
+
+remove_user_cache () {
+       rm "$HOME/.gitp4-usercache.txt" || true
+}
+
+test_expect_success 'start p4d' '
+       start_p4d
+'
+
+test_expect_success 'init depot' '
+       (
+               cd "$cli" &&
+
+               p4_add_user "utf8_author" "ǣuthor" &&
+               P4USER=utf8_author &&
+               touch file1 &&
+               p4 add file1 &&
+               p4 submit -d "first CL has some utf-8 tǣxt" &&
+
+               p4_add_user "latin1_author" "$(echo æuthor |
+                       iconv -f utf8 -t latin1)" &&
+               P4USER=latin1_author &&
+               touch file2 &&
+               p4 add file2 &&
+               p4 submit -d "$(echo second CL has some latin-1 tæxt |
+                       iconv -f utf8 -t latin1)" &&
+
+               p4_add_user "cp1252_author" "$(echo æuthœr |
+                       iconv -f utf8 -t cp1252)" &&
+               P4USER=cp1252_author &&
+               touch file3 &&
+               p4 add file3 &&
+               p4 submit -d "$(echo third CL has sœme cp-1252 tæxt |
+                 iconv -f utf8 -t cp1252)" &&
+
+               p4_add_user "cp850_author" "$(echo Åuthor |
+                       iconv -f utf8 -t cp850)" &&
+               P4USER=cp850_author &&
+               touch file4 &&
+               p4 add file4 &&
+               p4 submit -d "$(echo fourth CL hÅs some cp850 text |
+                       iconv -f utf8 -t cp850)"
+       )
+'
+
+test_expect_success 'clone non-utf8 repo with strict encoding' '
+       test_when_finished cleanup_git &&
+       test_when_finished remove_user_cache &&
+       test_must_fail git -c git-p4.metadataDecodingStrategy=strict p4.py clone --dest="$git" //depot@all 2>err &&
+       grep "Decoding perforce metadata failed!" err
+'
+
+test_expect_success 'check utf-8 contents with passthrough strategy' '
+       test_when_finished cleanup_git &&
+       test_when_finished remove_user_cache &&
+       git -c git-p4.metadataDecodingStrategy=passthrough p4.py clone --dest="$git" //depot@all &&
+       (
+               cd "$git" &&
+               git log >actual &&
+               grep "some utf-8 tǣxt" actual &&
+               grep "ǣuthor" actual
+       )
+'
+
+test_expect_success 'check latin-1 contents corrupted in git with passthrough strategy' '
+       test_when_finished cleanup_git &&
+       test_when_finished remove_user_cache &&
+       git -c git-p4.metadataDecodingStrategy=passthrough p4.py clone --dest="$git" //depot@all &&
+       (
+               cd "$git" &&
+               git log >actual &&
+               badly_encoded_in_git=$(echo "some latin-1 tæxt" | iconv -f utf8 -t latin1) &&
+               grep "$badly_encoded_in_git" actual &&
+               bad_author_in_git="$(echo æuthor | iconv -f utf8 -t latin1)" &&
+               grep "$bad_author_in_git" actual
+       )
+'
+
+test_expect_success 'check utf-8 contents with fallback strategy' '
+       test_when_finished cleanup_git &&
+       test_when_finished remove_user_cache &&
+       git -c git-p4.metadataDecodingStrategy=fallback p4.py clone --dest="$git" //depot@all &&
+       (
+               cd "$git" &&
+               git log >actual &&
+               grep "some utf-8 tǣxt" actual &&
+               grep "ǣuthor" actual
+       )
+'
+
+test_expect_success 'check latin-1 contents with fallback strategy' '
+       test_when_finished cleanup_git &&
+       test_when_finished remove_user_cache &&
+       git -c git-p4.metadataDecodingStrategy=fallback p4.py clone --dest="$git" //depot@all &&
+       (
+               cd "$git" &&
+               git log >actual &&
+               grep "some latin-1 tæxt" actual &&
+               grep "æuthor" actual
+       )
+'
+
+test_expect_success 'check cp-1252 contents with fallback strategy' '
+       test_when_finished cleanup_git &&
+       test_when_finished remove_user_cache &&
+       git -c git-p4.metadataDecodingStrategy=fallback p4.py clone --dest="$git" //depot@all &&
+       (
+               cd "$git" &&
+               git log >actual &&
+               grep "sœme cp-1252 tæxt" actual &&
+               grep "æuthœr" actual
+       )
+'
+
+test_expect_success 'check cp850 contents parsed with correct fallback' '
+       test_when_finished cleanup_git &&
+       test_when_finished remove_user_cache &&
+       git -c git-p4.metadataDecodingStrategy=fallback -c git-p4.metadataFallbackEncoding=cp850 p4.py clone --dest="$git" //depot@all &&
+       (
+               cd "$git" &&
+               git log >actual &&
+               grep "hÅs some cp850 text" actual &&
+               grep "Åuthor" actual
+       )
+'
+
+test_expect_success 'check cp850-only contents escaped when cp1252 is fallback' '
+       test_when_finished cleanup_git &&
+       test_when_finished remove_user_cache &&
+       git -c git-p4.metadataDecodingStrategy=fallback p4.py clone --dest="$git" //depot@all &&
+       (
+               cd "$git" &&
+               git log >actual &&
+               grep "h%8Fs some cp850 text" actual &&
+               grep "%8Futhor" actual
+       )
+'
+
+test_expect_success 'check cp-1252 contents on later sync after clone with fallback strategy' '
+       test_when_finished cleanup_git &&
+       test_when_finished remove_user_cache &&
+       git -c git-p4.metadataDecodingStrategy=fallback p4.py clone --dest="$git" //depot@all &&
+       (
+               cd "$cli" &&
+               P4USER=cp1252_author &&
+               touch file10 &&
+               p4 add file10 &&
+               p4 submit -d "$(echo later CL has sœme more cp-1252 tæxt |
+                       iconv -f utf8 -t cp1252)"
+       ) &&
+       (
+               cd "$git" &&
+
+               git p4.py sync --branch=master &&
+
+               git log p4/master >actual &&
+               grep "sœme more cp-1252 tæxt" actual &&
+               grep "æuthœr" actual
+       )
+'
+
+############################
+## / END REPEATED SECTION ##
+############################
+
+
+test_expect_success 'fallback (both utf-8 and cp-1252 contents handled) is the default with python3' '
+       test_when_finished cleanup_git &&
+       test_when_finished remove_user_cache &&
+       git p4.py clone --dest="$git" //depot@all &&
+       (
+               cd "$git" &&
+               git log >actual &&
+               grep "sœme cp-1252 tæxt" actual &&
+               grep "æuthœr" actual
+       )
+'
+
+test_done
index 531cef097db0c6110ba2c6dce403912f402a6c95..8ba5ca1534518a5f35e50a6d27a81d1a06282720 100644 (file)
@@ -535,9 +535,10 @@ case $GIT_TEST_FSYNC in
        ;;
 esac
 
-# Add libc MALLOC and MALLOC_PERTURB test
-# only if we are not executing the test with valgrind
+# Add libc MALLOC and MALLOC_PERTURB test only if we are not executing
+# the test with valgrind and have not compiled with SANITIZE=address.
 if test -n "$valgrind" ||
+   test -n "$SANITIZE_ADDRESS" ||
    test -n "$TEST_NO_MALLOC_CHECK"
 then
        setup_malloc_check () {
@@ -1666,6 +1667,7 @@ test -n "$USE_LIBPCRE2" && test_set_prereq PCRE
 test -n "$USE_LIBPCRE2" && test_set_prereq LIBPCRE2
 test -z "$NO_GETTEXT" && test_set_prereq GETTEXT
 test -n "$SANITIZE_LEAK" && test_set_prereq SANITIZE_LEAK
+test -n "$GIT_VALGRIND_ENABLED" && test_set_prereq VALGRIND
 
 if test -z "$GIT_TEST_CHECK_CACHE_TREE"
 then
index 94aa18f3f7db211482f8392a5ecc2a3da786fc18..2024c82691fe4a1a1c9b5c8ebd31d6c382fe8bfc 100644 (file)
 
 static VOLATILE_LIST_HEAD(tempfile_list);
 
+static void remove_template_directory(struct tempfile *tempfile,
+                                     int in_signal_handler)
+{
+       if (tempfile->directorylen > 0 &&
+           tempfile->directorylen < tempfile->filename.len &&
+           tempfile->filename.buf[tempfile->directorylen] == '/') {
+               strbuf_setlen(&tempfile->filename, tempfile->directorylen);
+               if (in_signal_handler)
+                       rmdir(tempfile->filename.buf);
+               else
+                       rmdir_or_warn(tempfile->filename.buf);
+       }
+}
+
 static void remove_tempfiles(int in_signal_handler)
 {
        pid_t me = getpid();
@@ -74,6 +88,7 @@ static void remove_tempfiles(int in_signal_handler)
                        unlink(p->filename.buf);
                else
                        unlink_or_warn(p->filename.buf);
+               remove_template_directory(p, in_signal_handler);
 
                p->active = 0;
        }
@@ -100,6 +115,7 @@ static struct tempfile *new_tempfile(void)
        tempfile->owner = 0;
        INIT_LIST_HEAD(&tempfile->list);
        strbuf_init(&tempfile->filename, 0);
+       tempfile->directorylen = 0;
        return tempfile;
 }
 
@@ -198,6 +214,52 @@ struct tempfile *mks_tempfile_tsm(const char *filename_template, int suffixlen,
        return tempfile;
 }
 
+struct tempfile *mks_tempfile_dt(const char *directory_template,
+                                const char *filename)
+{
+       struct tempfile *tempfile;
+       const char *tmpdir;
+       struct strbuf sb = STRBUF_INIT;
+       int fd;
+       size_t directorylen;
+
+       if (!ends_with(directory_template, "XXXXXX")) {
+               errno = EINVAL;
+               return NULL;
+       }
+
+       tmpdir = getenv("TMPDIR");
+       if (!tmpdir)
+               tmpdir = "/tmp";
+
+       strbuf_addf(&sb, "%s/%s", tmpdir, directory_template);
+       directorylen = sb.len;
+       if (!mkdtemp(sb.buf)) {
+               int orig_errno = errno;
+               strbuf_release(&sb);
+               errno = orig_errno;
+               return NULL;
+       }
+
+       strbuf_addf(&sb, "/%s", filename);
+       fd = open(sb.buf, O_CREAT | O_EXCL | O_RDWR, 0600);
+       if (fd < 0) {
+               int orig_errno = errno;
+               strbuf_setlen(&sb, directorylen);
+               rmdir(sb.buf);
+               strbuf_release(&sb);
+               errno = orig_errno;
+               return NULL;
+       }
+
+       tempfile = new_tempfile();
+       strbuf_swap(&tempfile->filename, &sb);
+       tempfile->directorylen = directorylen;
+       tempfile->fd = fd;
+       activate_tempfile(tempfile);
+       return tempfile;
+}
+
 struct tempfile *xmks_tempfile_m(const char *filename_template, int mode)
 {
        struct tempfile *tempfile;
@@ -316,6 +378,7 @@ void delete_tempfile(struct tempfile **tempfile_p)
 
        close_tempfile_gently(tempfile);
        unlink_or_warn(tempfile->filename.buf);
+       remove_template_directory(tempfile, 0);
        deactivate_tempfile(tempfile);
        *tempfile_p = NULL;
 }
index 4de3bc77d246ef5ceceabc42e64ae35a9960b26a..d7804a214abb60ee60496ef34a9fc8be110344ea 100644 (file)
@@ -82,6 +82,7 @@ struct tempfile {
        FILE *volatile fp;
        volatile pid_t owner;
        struct strbuf filename;
+       size_t directorylen;
 };
 
 /*
@@ -198,6 +199,18 @@ static inline struct tempfile *xmks_tempfile(const char *filename_template)
        return xmks_tempfile_m(filename_template, 0600);
 }
 
+/*
+ * Attempt to create a temporary directory in $TMPDIR and to create and
+ * open a file in that new directory. Derive the directory name from the
+ * template in the manner of mkdtemp(). Arrange for directory and file
+ * to be deleted if the program exits before they are deleted
+ * explicitly. On success return a tempfile whose "filename" member
+ * contains the full path of the file and its "fd" member is open for
+ * writing the file. On error return NULL and set errno appropriately.
+ */
+struct tempfile *mks_tempfile_dt(const char *directory_template,
+                                const char *filename);
+
 /*
  * Associate a stdio stream with the temporary file (which must still
  * be open). Return `NULL` (*without* deleting the file) on error. The
index 1b12f77d945f423aae5b8141d9494c06424f7180..926a3729df594027e67d709e1887560eb6bb7c94 100644 (file)
--- a/trailer.c
+++ b/trailer.c
@@ -1029,7 +1029,7 @@ static FILE *create_in_place_tempfile(const char *file)
 
        /* Create temporary file in the same directory as the original */
        tail = strrchr(file, '/');
-       if (tail != NULL)
+       if (tail)
                strbuf_add(&filename_template, file, tail - file + 1);
        strbuf_addstr(&filename_template, "git-interpret-trailers-XXXXXX");
 
index 3d64a43ab394b1861fee34b465f0cfb973bfe5fb..01e24bd578f29f79b5afad8bad04f6c64fde590d 100644 (file)
@@ -438,7 +438,7 @@ static int fetch_refs_via_pack(struct transport *transport,
                args.self_contained_and_connected;
        data->options.connectivity_checked = args.connectivity_checked;
 
-       if (refs == NULL)
+       if (!refs)
                ret = -1;
        if (report_unmatched_refs(to_fetch, nr_heads))
                ret = -1;
index 7f528d35cc263ad2d87444d2b9c0081ba6e1da74..a1d0ff3a4d318ce12493eb35cc758963d92608e0 100644 (file)
@@ -11,6 +11,7 @@
 #include "refs.h"
 #include "attr.h"
 #include "split-index.h"
+#include "sparse-index.h"
 #include "submodule.h"
 #include "submodule-config.h"
 #include "fsmonitor.h"
@@ -1839,6 +1840,11 @@ int unpack_trees(unsigned len, struct tree_desc *t, struct unpack_trees_options
        o->result.fsmonitor_last_update =
                xstrdup_or_null(o->src_index->fsmonitor_last_update);
 
+       if (!o->src_index->initialized &&
+           !repo->settings.command_requires_full_index &&
+           is_sparse_index_allowed(&o->result, 0))
+               o->result.sparse_index = 1;
+
        /*
         * Sparse checkout loop #1: set NEW_SKIP_WORKTREE on existing entries
         */
index 9e9e2a2f955d242cf994c5451c537a7f88539379..7e5a7ea1eaa9d9d49219537b70256f1d947633ea 100644 (file)
@@ -113,7 +113,7 @@ static int dowild(const uchar *p, const uchar *text, unsigned int flags)
                                /* Trailing "**" matches everything.  Trailing "*" matches
                                 * only if there are no more slash characters. */
                                if (!match_slash) {
-                                       if (strchr((char*)text, '/') != NULL)
+                                       if (strchr((char *)text, '/'))
                                                return WM_NOMATCH;
                                }
                                return WM_MATCH;
index 90fc085f76b4cc2ad89587cfc2bfc718f57144d9..257ba4cf1ee5b71de8be0bff85511415f6600347 100644 (file)
@@ -483,7 +483,7 @@ int submodule_uses_worktrees(const char *path)
                return 0;
 
        d = readdir_skip_dot_and_dotdot(dir);
-       if (d != NULL)
+       if (d)
                ret = 1;
        closedir(dir);
        return ret;
index f512994690b0292587d819c6b042c18e3b9675a4..1c3c970080b0e00169802b8de4d3f70b709b44ad 100644 (file)
--- a/wrapper.c
+++ b/wrapper.c
@@ -393,7 +393,7 @@ FILE *xfopen(const char *path, const char *mode)
 FILE *xfdopen(int fd, const char *mode)
 {
        FILE *stream = fdopen(fd, mode);
-       if (stream == NULL)
+       if (!stream)
                die_errno("Out of memory? fdopen failed");
        return stream;
 }
index 2e3a5a2943e7fc28e79a425fcbdb01da0b0567b0..e87950de32e5602765de26953ce7ec812ef30925 100644 (file)
@@ -159,7 +159,7 @@ int read_mmfile(mmfile_t *ptr, const char *filename)
 
        if (stat(filename, &st))
                return error_errno("Could not stat %s", filename);
-       if ((f = fopen(filename, "rb")) == NULL)
+       if (!(f = fopen(filename, "rb")))
                return error_errno("Could not open %s", filename);
        sz = xsize_t(st.st_size);
        ptr->ptr = xmalloc(sz ? sz : 1);
index 1cbf2b9829e759dd20f5e714d1390b26fe8cdd3d..c4ccd68d4760bc08735d6ca3b7dcdd3440abaf16 100644 (file)
@@ -65,7 +65,7 @@ xdchange_t *xdl_get_hunk(xdchange_t **xscr, xdemitconf_t const *xecfg)
                        *xscr = xch;
        }
 
-       if (*xscr == NULL)
+       if (!*xscr)
                return NULL;
 
        lxch = *xscr;
index 2809a28ca960147c285bc5a224ed377a0964663a..ae4636c2477cc640eae84578805d9722d5e28d1b 100644 (file)
@@ -34,7 +34,6 @@
 #define XDL_ADDBITS(v,b)       ((v) + ((v) >> (b)))
 #define XDL_MASKBITS(b)                ((1UL << (b)) - 1)
 #define XDL_HASHLONG(v,b)      (XDL_ADDBITS((unsigned long)(v), b) & XDL_MASKBITS(b))
-#define XDL_PTRFREE(p) do { if (p) { xdl_free(p); (p) = NULL; } } while (0)
 #define XDL_LE32_PUT(p, v) \
 do { \
        unsigned char *__p = (unsigned char *) (p); \
index 4527a4a07c4e0986cb377015d09f9a13309c04d8..105752758f2f3870448f1ec1cf0070c3a29b36e6 100644 (file)
@@ -188,7 +188,7 @@ static int xdl_prepare_ctx(unsigned int pass, mmfile_t *mf, long narec, xpparam_
        memset(rhash, 0, hsize * sizeof(xrecord_t *));
 
        nrec = 0;
-       if ((cur = blk = xdl_mmfile_first(mf, &bsize)) != NULL) {
+       if ((cur = blk = xdl_mmfile_first(mf, &bsize))) {
                for (top = blk + bsize; cur < top; ) {
                        prev = cur;
                        hav = xdl_hash_record(&cur, top, xpp->flags);
index cfa6e2220ffd0461ce3293911c86366f6ccb1b05..115b2b1640b4504d1b7eb1bc4dc1428b109f6380 100644 (file)
@@ -122,7 +122,7 @@ long xdl_guess_lines(mmfile_t *mf, long sample) {
        long nl = 0, size, tsize = 0;
        char const *data, *cur, *top;
 
-       if ((cur = data = xdl_mmfile_first(mf, &size)) != NULL) {
+       if ((cur = data = xdl_mmfile_first(mf, &size))) {
                for (top = data + size; nl < sample && cur < top; ) {
                        nl++;
                        if (!(cur = memchr(cur, '\n', top - cur)))