merge v1.12.37

1. 4350791 On project cleanup, don't remove nested projects
2. revert 76a4a9d project: Set config option to skip lfs smudge filter
diff --git a/.flake8 b/.flake8
new file mode 100644
index 0000000..45ab656
--- /dev/null
+++ b/.flake8
@@ -0,0 +1,3 @@
+[flake8]
+max-line-length=80
+ignore=E111,E114,E402
diff --git a/.gitattributes b/.gitattributes
index d65028a..cdd8546 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -1,4 +1,4 @@
 # Prevent /bin/sh scripts from being clobbered by autocrlf=true
 git_ssh text eol=lf
-main.py text eol=lf
 repo text eol=lf
+hooks/* text eol=lf
diff --git a/.mailmap b/.mailmap
new file mode 100644
index 0000000..eb64bd2
--- /dev/null
+++ b/.mailmap
@@ -0,0 +1,11 @@
+Anthony Newnam <anthony.newnam@garmin.com>    Anthony <anthony@bnovc.com>
+Hu Xiuyun <xiuyun.hu@hisilicon.com>           Hu xiuyun <xiuyun.hu@hisilicon.com>
+Hu Xiuyun <xiuyun.hu@hisilicon.com>           Hu Xiuyun <clouds08@qq.com>
+Jelly Chen <chenguodong@huawei.com>           chenguodong <chenguodong@huawei.com>
+Jia Bi <bijia@xiaomi.com>                     bijia <bijia@xiaomi.com>
+JoonCheol Park <jooncheol@gmail.com>          Jooncheol Park <jooncheol@gmail.com>
+Sergii Pylypenko <x.pelya.x@gmail.com>        pelya <x.pelya.x@gmail.com>
+Shawn Pearce <sop@google.com>                 Shawn O. Pearce <sop@google.com>
+Ulrik Sjölin <ulrik.sjolin@sonyericsson.com>  Ulrik Sjolin <ulrik.sjolin@gmail.com>
+Ulrik Sjölin <ulrik.sjolin@sonyericsson.com>  Ulrik Sjolin <ulrik.sjolin@sonyericsson.com>
+Ulrik Sjölin <ulrik.sjolin@sonyericsson.com>  Ulrik Sjölin <ulrik.sjolin@sonyericsson.com>
diff --git a/.pylintrc b/.pylintrc
deleted file mode 100644
index c6be743..0000000
--- a/.pylintrc
+++ /dev/null
@@ -1,298 +0,0 @@
-# lint Python modules using external checkers.
-#
-# This is the main checker controling the other ones and the reports
-# generation. It is itself both a raw checker and an astng checker in order
-# to:
-# * handle message activation / deactivation at the module level
-# * handle some basic but necessary stats'data (number of classes, methods...)
-#
-[MASTER]
-
-# Specify a configuration file.
-#rcfile=
-
-# Python code to execute, usually for sys.path manipulation such as
-# pygtk.require().
-#init-hook=
-
-# Profiled execution.
-profile=no
-
-# Add <file or directory> to the black list. It should be a base name, not a
-# path. You may set this option multiple times.
-ignore=SVN
-
-# Pickle collected data for later comparisons.
-persistent=yes
-
-# Set the cache size for astng objects.
-cache-size=500
-
-# List of plugins (as comma separated values of python modules names) to load,
-# usually to register additional checkers.
-load-plugins=
-
-
-[MESSAGES CONTROL]
-
-# Enable only checker(s) with the given id(s). This option conflicts with the
-# disable-checker option
-#enable-checker=
-
-# Enable all checker(s) except those with the given id(s). This option
-# conflicts with the enable-checker option
-#disable-checker=
-
-# Enable all messages in the listed categories.
-#enable-msg-cat=
-
-# Disable all messages in the listed categories.
-#disable-msg-cat=
-
-# Enable the message(s) with the given id(s).
-enable=RP0004
-
-# Disable the message(s) with the given id(s).
-disable=R0903,R0912,R0913,R0914,R0915,W0141,C0111,C0103,W0603,W0703,R0911,C0301,C0302,R0902,R0904,W0142,W0212,E1101,E1103,R0201,W0201,W0122,W0232,RP0001,RP0003,RP0101,RP0002,RP0401,RP0701,RP0801,F0401,E0611,R0801,I0011
-
-[REPORTS]
-
-# set the output format. Available formats are text, parseable, colorized, msvs
-# (visual studio) and html
-output-format=text
-
-# Put messages in a separate file for each module / package specified on the
-# command line instead of printing them on stdout. Reports (if any) will be
-# written in a file name "pylint_global.[txt|html]".
-files-output=no
-
-# Tells whether to display a full report or only the messages
-reports=yes
-
-# Python expression which should return a note less than 10 (10 is the highest
-# note).You have access to the variables errors warning, statement which
-# respectivly contain the number of errors / warnings messages and the total
-# number of statements analyzed. This is used by the global evaluation report
-# (R0004).
-evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
-
-# Add a comment according to your evaluation note. This is used by the global
-# evaluation report (R0004).
-comment=no
-
-# checks for
-# * unused variables / imports
-# * undefined variables
-# * redefinition of variable from builtins or from an outer scope
-# * use of variable before assigment
-#
-[VARIABLES]
-
-# Tells whether we should check for unused import in __init__ files.
-init-import=no
-
-# A regular expression matching names used for dummy variables (i.e. not used).
-dummy-variables-rgx=_|dummy
-
-# List of additional names supposed to be defined in builtins. Remember that
-# you should avoid to define new builtins when possible.
-additional-builtins=
-
-
-# try to find bugs in the code using type inference
-#
-[TYPECHECK]
-
-# Tells whether missing members accessed in mixin class should be ignored. A
-# mixin class is detected if its name ends with "mixin" (case insensitive).
-ignore-mixin-members=yes
-
-# List of classes names for which member attributes should not be checked
-# (useful for classes with attributes dynamicaly set).
-ignored-classes=SQLObject
-
-# When zope mode is activated, consider the acquired-members option to ignore
-# access to some undefined attributes.
-zope=no
-
-# List of members which are usually get through zope's acquisition mecanism and
-# so shouldn't trigger E0201 when accessed (need zope=yes to be considered).
-acquired-members=REQUEST,acl_users,aq_parent
-
-
-# checks for :
-# * doc strings
-# * modules / classes / functions / methods / arguments / variables name
-# * number of arguments, local variables, branchs, returns and statements in
-# functions, methods
-# * required module attributes
-# * dangerous default values as arguments
-# * redefinition of function / method / class
-# * uses of the global statement
-#
-[BASIC]
-
-# Required attributes for module, separated by a comma
-required-attributes=
-
-# Regular expression which should only match functions or classes name which do
-# not require a docstring
-no-docstring-rgx=_main|__.*__
-
-# Regular expression which should only match correct module names
-module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
-
-# Regular expression which should only match correct module level names
-const-rgx=(([A-Z_][A-Z1-9_]*)|(__.*__))|(log)$
-
-# Regular expression which should only match correct class names
-class-rgx=[A-Z_][a-zA-Z0-9]+$
-
-# Regular expression which should only match correct function names
-function-rgx=[a-z_][a-z0-9_]{2,30}$
-
-# Regular expression which should only match correct method names
-method-rgx=[a-z_][a-z0-9_]{2,30}$
-
-# Regular expression which should only match correct instance attribute names
-attr-rgx=[a-z_][a-z0-9_]{2,30}$
-
-# Regular expression which should only match correct argument names
-argument-rgx=[a-z_][a-z0-9_]{2,30}$
-
-# Regular expression which should only match correct variable names
-variable-rgx=[a-z_][a-z0-9_]{2,30}$
-
-# Regular expression which should only match correct list comprehension /
-# generator expression variable names
-inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
-
-# Good variable names which should always be accepted, separated by a comma
-good-names=i,j,k,ex,Run,_,e,d1,d2,v,f,l,d
-
-# Bad variable names which should always be refused, separated by a comma
-bad-names=foo,bar,baz,toto,tutu,tata
-
-# List of builtins function names that should not be used, separated by a comma
-bad-functions=map,filter,apply,input
-
-
-# checks for sign of poor/misdesign:
-# * number of methods, attributes, local variables...
-# * size, complexity of functions, methods
-#
-[DESIGN]
-
-# Maximum number of arguments for function / method
-max-args=5
-
-# Maximum number of locals for function / method body
-max-locals=15
-
-# Maximum number of return / yield for function / method body
-max-returns=6
-
-# Maximum number of branch for function / method body
-max-branchs=12
-
-# Maximum number of statements in function / method body
-max-statements=50
-
-# Maximum number of parents for a class (see R0901).
-max-parents=7
-
-# Maximum number of attributes for a class (see R0902).
-max-attributes=20
-
-# Minimum number of public methods for a class (see R0903).
-min-public-methods=2
-
-# Maximum number of public methods for a class (see R0904).
-max-public-methods=30
-
-
-# checks for
-# * external modules dependencies
-# * relative / wildcard imports
-# * cyclic imports
-# * uses of deprecated modules
-#
-[IMPORTS]
-
-# Deprecated modules which should not be used, separated by a comma
-deprecated-modules=regsub,string,TERMIOS,Bastion,rexec
-
-# Create a graph of every (i.e. internal and external) dependencies in the
-# given file (report R0402 must not be disabled)
-import-graph=
-
-# Create a graph of external dependencies in the given file (report R0402 must
-# not be disabled)
-ext-import-graph=
-
-# Create a graph of internal dependencies in the given file (report R0402 must
-# not be disabled)
-int-import-graph=
-
-
-# checks for :
-# * methods without self as first argument
-# * overridden methods signature
-# * access only to existant members via self
-# * attributes not defined in the __init__ method
-# * supported interfaces implementation
-# * unreachable code
-#
-[CLASSES]
-
-# List of interface methods to ignore, separated by a comma. This is used for
-# instance to not check methods defines in Zope's Interface base class.
-ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by
-
-# List of method names used to declare (i.e. assign) instance attributes.
-defining-attr-methods=__init__,__new__,setUp
-
-
-# checks for similarities and duplicated code. This computation may be
-# memory / CPU intensive, so you should disable it if you experiments some
-# problems.
-#
-[SIMILARITIES]
-
-# Minimum lines number of a similarity.
-min-similarity-lines=4
-
-# Ignore comments when computing similarities.
-ignore-comments=yes
-
-# Ignore docstrings when computing similarities.
-ignore-docstrings=yes
-
-
-# checks for:
-# * warning notes in the code like FIXME, XXX
-# * PEP 263: source code with non ascii character but no encoding declaration
-#
-[MISCELLANEOUS]
-
-# List of note tags to take in consideration, separated by a comma.
-notes=FIXME,XXX,TODO
-
-
-# checks for :
-# * unauthorized constructions
-# * strict indentation
-# * line length
-# * use of <> instead of !=
-#
-[FORMAT]
-
-# Maximum number of characters on a single line.
-max-line-length=80
-
-# Maximum number of lines in a module
-max-module-lines=1000
-
-# String used as indentation unit. This is usually "    " (4 spaces) or "\t" (1
-# tab).  In repo it is 2 spaces.
-indent-string='  '
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..e35f8e9
--- /dev/null
+++ b/README.md
@@ -0,0 +1,14 @@
+# repo
+
+Repo is a tool built on top of Git.  Repo helps manage many Git repositories,
+does the uploads to revision control systems, and automates parts of the
+development workflow.  Repo is not meant to replace Git, only to make it
+easier to work with Git.  The repo command is an executable Python script
+that you can put anywhere in your path.
+
+* Homepage: https://code.google.com/p/git-repo/
+* Bug reports: https://code.google.com/p/git-repo/issues/
+* Source: https://code.google.com/p/git-repo/
+* Overview: https://source.android.com/source/developing.html
+* Docs: https://source.android.com/source/using-repo.html
+* [Submitting patches](./SUBMITTING_PATCHES.md)
diff --git a/SUBMITTING_PATCHES b/SUBMITTING_PATCHES.md
similarity index 70%
rename from SUBMITTING_PATCHES
rename to SUBMITTING_PATCHES.md
index 8656ee7..07f7661 100644
--- a/SUBMITTING_PATCHES
+++ b/SUBMITTING_PATCHES.md
@@ -1,17 +1,17 @@
-Short Version:
+# Short Version
 
  - Make small logical changes.
  - Provide a meaningful commit message.
- - Check for coding errors with pylint
+ - Check for coding errors and style nits with pyflakes and flake8
  - Make sure all code is under the Apache License, 2.0.
  - Publish your changes for review.
  - Make corrections if requested.
  - Verify your changes on gerrit so they can be submitted.
 
-   git push https://gerrit-review.googlesource.com/git-repo HEAD:refs/for/master
+   `git push https://gerrit-review.googlesource.com/git-repo HEAD:refs/for/master`
 
 
-Long Version:
+# Long Version
 
 I wanted a file describing how to submit patches for repo,
 so I started with the one found in the core Git distribution
@@ -19,10 +19,10 @@
 patch submission guidelines for the Linux kernel.
 
 However there are some differences, so please review and familiarize
-yourself with the following relevant bits:
+yourself with the following relevant bits.
 
 
-(1) Make separate commits for logically separate changes.
+## Make separate commits for logically separate changes.
 
 Unless your patch is really trivial, you should not be sending
 out a patch that was generated between your working tree and your
@@ -36,14 +36,34 @@
 probably need to split up your commit to finer grained pieces.
 
 
-(2) Check for coding errors with pylint
+## Check for coding errors and style nits with pyflakes and flake8
 
-Run pylint on changed modules using the provided configuration:
+### Coding errors
 
-  pylint --rcfile=.pylintrc file.py
+Run `pyflakes` on changed modules:
 
+    pyflakes file.py
 
-(3) Check the license
+Ideally there should be no new errors or warnings introduced.
+
+### Style violations
+
+Run `flake8` on changes modules:
+
+    flake8 file.py
+
+Note that repo generally follows [Google's python style guide]
+(https://google.github.io/styleguide/pyguide.html) rather than [PEP 8]
+(https://www.python.org/dev/peps/pep-0008/), so it's possible that
+the output of `flake8` will be quite noisy. It's not mandatory to
+avoid all warnings, but at least the maximum line length should be
+followed.
+
+If there are many occurrences of the same warning that cannot be
+avoided without going against the Google style guide, these may be
+suppressed in the included `.flake8` file.
+
+## Check the license
 
 repo is licensed under the Apache License, 2.0.
 
@@ -59,7 +79,7 @@
 has been applied and pushed out.
 
 
-(4) Sending your patches.
+## Sending your patches.
 
 Do not email your patches to anyone.
 
@@ -91,23 +111,23 @@
 Push your patches over HTTPS to the review server, possibly through
 a remembered remote to make this easier in the future:
 
-   git config remote.review.url https://gerrit-review.googlesource.com/git-repo
-   git config remote.review.push HEAD:refs/for/master
+    git config remote.review.url https://gerrit-review.googlesource.com/git-repo
+    git config remote.review.push HEAD:refs/for/master
 
-   git push review
+    git push review
 
 You will be automatically emailed a copy of your commits, and any
 comments made by the project maintainers.
 
 
-(5) Make changes if requested
+## Make changes if requested
 
 The project maintainer who reviews your changes might request changes to your
 commit. If you make the requested changes you will need to amend your commit
 and push it to the review server again.
 
 
-(6) Verify your changes on gerrit
+## Verify your changes on gerrit
 
 After you receive a Code-Review+2 from the maintainer, select the Verified
 button on the gerrit page for the change. This verifies that you have tested
diff --git a/command.py b/command.py
index cd5e3c3..2ff0a34 100644
--- a/command.py
+++ b/command.py
@@ -31,7 +31,7 @@
   manifest = None
   _optparse = None
 
-  def WantPager(self, opt):
+  def WantPager(self, _opt):
     return False
 
   def ReadEnvironmentOptions(self, opts):
@@ -63,7 +63,7 @@
         usage = self.helpUsage.strip().replace('%prog', me)
       except AttributeError:
         usage = 'repo %s' % self.NAME
-      self._optparse = optparse.OptionParser(usage = usage)
+      self._optparse = optparse.OptionParser(usage=usage)
       self._Options(self._optparse)
     return self._optparse
 
@@ -110,15 +110,20 @@
     project = None
     if os.path.exists(path):
       oldpath = None
-      while path \
-        and path != oldpath \
-        and path != manifest.topdir:
+      while path and \
+            path != oldpath and \
+            path != manifest.topdir:
         try:
           project = self._by_path[path]
           break
         except KeyError:
           oldpath = path
           path = os.path.dirname(path)
+      if not project and path == manifest.topdir:
+        try:
+          project = self._by_path[path]
+        except KeyError:
+          pass
     else:
       try:
         project = self._by_path[path]
@@ -138,7 +143,7 @@
     mp = manifest.manifestProject
 
     if not groups:
-        groups = mp.config.GetString('manifest.groups')
+      groups = mp.config.GetString('manifest.groups')
     if not groups:
       groups = 'default,platform-' + platform.system().lower()
     groups = [x for x in re.split(r'[,\s]+', groups) if x]
@@ -151,8 +156,7 @@
                                   for p in project.GetDerivedSubprojects())
       all_projects_list.extend(derived_projects.values())
       for project in all_projects_list:
-        if ((missing_ok or project.Exists) and
-            project.MatchesGroups(groups)):
+        if (missing_ok or project.Exists) and project.MatchesGroups(groups):
           result.append(project)
     else:
       self._ResetPathToProjectMap(all_projects_list)
@@ -166,8 +170,8 @@
 
           # If it's not a derived project, update path->project mapping and
           # search again, as arg might actually point to a derived subproject.
-          if (project and not project.Derived and
-              (submodules_ok or project.sync_s)):
+          if (project and not project.Derived and (submodules_ok or
+                                                   project.sync_s)):
             search_again = False
             for subproject in project.GetDerivedSubprojects():
               self._UpdatePathToProjectMap(subproject)
@@ -194,17 +198,24 @@
     result.sort(key=_getpath)
     return result
 
-  def FindProjects(self, args):
+  def FindProjects(self, args, inverse=False):
     result = []
     patterns = [re.compile(r'%s' % a, re.IGNORECASE) for a in args]
     for project in self.GetProjects(''):
       for pattern in patterns:
-        if pattern.search(project.name) or pattern.search(project.relpath):
+        match = pattern.search(project.name) or pattern.search(project.relpath)
+        if not inverse and match:
           result.append(project)
           break
+        if inverse and match:
+          break
+      else:
+        if inverse:
+          result.append(project)
     result.sort(key=lambda project: project.relpath)
     return result
 
+
 # pylint: disable=W0223
 # Pylint warns that the `InteractiveCommand` and `PagedCommand` classes do not
 # override method `Execute` which is abstract in `Command`.  Since that method
@@ -214,28 +225,32 @@
   """Command which requires user interaction on the tty and
      must not run within a pager, even if the user asks to.
   """
-  def WantPager(self, opt):
+  def WantPager(self, _opt):
     return False
 
+
 class PagedCommand(Command):
   """Command which defaults to output in a pager, as its
      display tends to be larger than one screen full.
   """
-  def WantPager(self, opt):
+  def WantPager(self, _opt):
     return True
 
 # pylint: enable=W0223
 
+
 class MirrorSafeCommand(object):
   """Command permits itself to run within a mirror,
      and does not require a working directory.
   """
 
+
 class GitcAvailableCommand(object):
   """Command that requires GITC to be available, but does
      not require the local client to be a GITC client.
   """
 
+
 class GitcClientCommand(object):
   """Command that requires the local client to be a GITC
      client.
diff --git a/docs/manifest-format.txt b/docs/manifest-format.txt
index 140a782..2a07f19 100644
--- a/docs/manifest-format.txt
+++ b/docs/manifest-format.txt
@@ -35,6 +35,7 @@
     <!ATTLIST remote name         ID    #REQUIRED>
     <!ATTLIST remote alias        CDATA #IMPLIED>
     <!ATTLIST remote fetch        CDATA #REQUIRED>
+    <!ATTLIST remote pushurl      CDATA #IMPLIED>
     <!ATTLIST remote review       CDATA #IMPLIED>
     <!ATTLIST remote revision     CDATA #IMPLIED>
 
@@ -125,6 +126,12 @@
 this remote.  Each project's name is appended to this prefix to
 form the actual URL used to clone the project.
 
+Attribute `pushurl`: The Git "push" URL prefix for all projects
+which use this remote.  Each project's name is appended to this
+prefix to form the actual URL used to "git push" the project.
+This attribute is optional; if not specified then "git push"
+will use the same URL as the `fetch` attribute.
+
 Attribute `review`: Hostname of the Gerrit server where reviews
 are uploaded to by `repo upload`.  This attribute is optional;
 if not specified then `repo upload` will not function.
@@ -175,7 +182,8 @@
   GetApprovedManifest(branch, target)
 
 Return a manifest in which each project is pegged to a known good revision
-for the current branch and target.
+for the current branch and target. This is used by repo sync when the
+--smart-sync option is given.
 
 The target to use is defined by environment variables TARGET_PRODUCT
 and TARGET_BUILD_VARIANT. These variables are used to create a string
@@ -187,7 +195,8 @@
   GetManifest(tag)
 
 Return a manifest in which each project is pegged to the revision at
-the specified tag.
+the specified tag. This is used by repo sync when the --smart-tag option
+is given.
 
 
 Element project
diff --git a/git_config.py b/git_config.py
index 0379181..e223678 100644
--- a/git_config.py
+++ b/git_config.py
@@ -464,9 +464,13 @@
              % (host,port, str(e)), file=sys.stderr)
       return False
 
+    time.sleep(1)
+    ssh_died = (p.poll() is not None)
+    if ssh_died:
+      return False
+
     _master_processes.append(p)
     _master_keys.add(key)
-    time.sleep(1)
     return True
   finally:
     _master_keys_lock.release()
@@ -568,6 +572,7 @@
     self._config = config
     self.name = name
     self.url = self._Get('url')
+    self.pushUrl = self._Get('pushurl')
     self.review = self._Get('review')
     self.projectname = self._Get('projectname')
     self.fetch = list(map(RefSpec.FromString,
@@ -694,6 +699,10 @@
     """Save this remote to the configuration.
     """
     self._Set('url', self.url)
+    if self.pushUrl is not None:
+      self._Set('pushurl', self.pushUrl + '/' + self.projectname)
+    else:
+      self._Set('pushurl', self.pushUrl)
     self._Set('review', self.review)
     self._Set('projectname', self.projectname)
     self._Set('fetch', list(map(str, self.fetch)))
diff --git a/gitc_utils.py b/gitc_utils.py
index 0f3e181..0d4a5c3 100644
--- a/gitc_utils.py
+++ b/gitc_utils.py
@@ -24,7 +24,9 @@
 import git_config
 import wrapper
 
-NUM_BATCH_RETRIEVE_REVISIONID = 300
+from error import ManifestParseError
+
+NUM_BATCH_RETRIEVE_REVISIONID = 32
 
 def get_gitc_manifest_dir():
   return wrapper.Wrapper().get_gitc_manifest_dir()
@@ -54,7 +56,11 @@
     if gitcmd.Wait():
       print('FATAL: Failed to retrieve revisionExpr for %s' % proj)
       sys.exit(1)
-    proj.revisionExpr = gitcmd.stdout.split('\t')[0]
+    revisionExpr = gitcmd.stdout.split('\t')[0]
+    if not revisionExpr:
+      raise(ManifestParseError('Invalid SHA-1 revision project %s (%s)' %
+                               (proj.remote.url, proj.revisionExpr)))
+    proj.revisionExpr = revisionExpr
 
 def _manifest_groups(manifest):
   """Returns the manifest group string that should be synced
@@ -127,7 +133,7 @@
         repo_proj.revisionExpr = None
 
   # Convert URLs from relative to absolute.
-  for name, remote in manifest.remotes.iteritems():
+  for _name, remote in manifest.remotes.iteritems():
     remote.fetchUrl = remote.resolvedFetchUrl
 
   # Save the manifest.
diff --git a/hooks/commit-msg b/hooks/commit-msg
index d8f009b..40ac237 100755
--- a/hooks/commit-msg
+++ b/hooks/commit-msg
@@ -1,6 +1,7 @@
 #!/bin/sh
+# From Gerrit Code Review 2.12.1
 #
-# Part of Gerrit Code Review (http://code.google.com/p/gerrit/)
+# Part of Gerrit Code Review (https://www.gerritcodereview.com/)
 #
 # Copyright (C) 2009 The Android Open Source Project
 #
@@ -19,7 +20,7 @@
 
 unset GREP_OPTIONS
 
-CHANGE_ID_AFTER="Bug|Issue"
+CHANGE_ID_AFTER="Bug|Issue|Test"
 MSG="$1"
 
 # Check for, and add if missing, a unique Change-Id
@@ -38,6 +39,12 @@
 		return
 	fi
 
+	# Do not add Change-Id to temp commits
+	if echo "$clean_message" | head -1 | grep -q '^\(fixup\|squash\)!'
+	then
+		return
+	fi
+
 	if test "false" = "`git config --bool --get gerrit.createChangeId`"
 	then
 		return
@@ -57,6 +64,10 @@
 		AWK=/usr/xpg4/bin/awk
 	fi
 
+	# Get core.commentChar from git config or use default symbol
+	commentChar=`git config --get core.commentChar`
+	commentChar=${commentChar:-#}
+
 	# How this works:
 	# - parse the commit message as (textLine+ blankLine*)*
 	# - assume textLine+ to be a footer until proven otherwise
@@ -75,8 +86,8 @@
 		blankLines = 0
 	}
 
-	# Skip lines starting with "#" without any spaces before it.
-	/^#/ { next }
+	# Skip lines starting with commentChar without any spaces before it.
+	/^'"$commentChar"'/ { next }
 
 	# Skip the line starting with the diff command and everything after it,
 	# up to the end of the file, assuming it is only patch data.
diff --git a/main.py b/main.py
index 4f4eb9f..c5f1e9c 100755
--- a/main.py
+++ b/main.py
@@ -379,7 +379,7 @@
     self.context = None
     self.handler_order = urllib.request.BaseHandler.handler_order - 50
 
-  def http_error_401(self, req, fp, code, msg, headers):
+  def http_error_401(self, req, fp, code, msg, headers): # pylint:disable=unused-argument
     host = req.get_host()
     retry = self.http_error_auth_reqed('www-authenticate', host, req, headers)
     return retry
diff --git a/manifest_xml.py b/manifest_xml.py
index 41d92e9..8bcc616 100644
--- a/manifest_xml.py
+++ b/manifest_xml.py
@@ -64,11 +64,13 @@
                name,
                alias=None,
                fetch=None,
+               pushUrl=None,
                manifestUrl=None,
                review=None,
                revision=None):
     self.name = name
     self.fetchUrl = fetch
+    self.pushUrl = pushUrl
     self.manifestUrl = manifestUrl
     self.remoteAlias = alias
     self.reviewUrl = review
@@ -102,7 +104,11 @@
     remoteName = self.name
     if self.remoteAlias:
       remoteName = self.remoteAlias
-    return RemoteSpec(remoteName, url, self.reviewUrl)
+    return RemoteSpec(remoteName,
+                      url=url,
+                      pushUrl=self.pushUrl,
+                      review=self.reviewUrl,
+                      orig_name=self.name)
 
 class XmlManifest(object):
   """manages the repo configuration file"""
@@ -157,6 +163,8 @@
     root.appendChild(e)
     e.setAttribute('name', r.name)
     e.setAttribute('fetch', r.fetchUrl)
+    if r.pushUrl is not None:
+      e.setAttribute('pushurl', r.pushUrl)
     if r.remoteAlias is not None:
       e.setAttribute('alias', r.remoteAlias)
     if r.reviewUrl is not None:
@@ -249,9 +257,9 @@
         e.setAttribute('path', relpath)
       remoteName = None
       if d.remote:
-        remoteName = d.remote.remoteAlias or d.remote.name
-      if not d.remote or p.remote.name != remoteName:
-        remoteName = p.remote.name
+        remoteName = d.remote.name
+      if not d.remote or p.remote.orig_name != remoteName:
+        remoteName = p.remote.orig_name
         e.setAttribute('remote', remoteName)
       if peg_rev:
         if self.IsMirror:
@@ -267,7 +275,7 @@
             # isn't our value
             e.setAttribute('upstream', p.revisionExpr)
       else:
-        revision = self.remotes[remoteName].revision or d.revisionExpr
+        revision = self.remotes[p.remote.orig_name].revision or d.revisionExpr
         if not revision or revision != p.revisionExpr:
           e.setAttribute('revision', p.revisionExpr)
         if p.upstream and p.upstream != p.revisionExpr:
@@ -639,6 +647,9 @@
     if alias == '':
       alias = None
     fetch = self._reqatt(node, 'fetch')
+    pushUrl = node.getAttribute('pushurl')
+    if pushUrl == '':
+      pushUrl = None
     review = node.getAttribute('review')
     if review == '':
       review = None
@@ -646,7 +657,7 @@
     if revision == '':
       revision = None
     manifestUrl = self.manifestProject.config.GetString('remote.origin.url')
-    return _XmlRemote(name, alias, fetch, manifestUrl, review, revision)
+    return _XmlRemote(name, alias, fetch, pushUrl, manifestUrl, review, revision)
 
   def _ParseDefault(self, node):
     """
@@ -978,5 +989,5 @@
   def _output_manifest_project_extras(self, p, e):
     """Output GITC Specific Project attributes"""
     if p.old_revision:
-        e.setAttribute('old-revision', str(p.old_revision))
+      e.setAttribute('old-revision', str(p.old_revision))
 
diff --git a/project.py b/project.py
index 878d227..5479985 100644
--- a/project.py
+++ b/project.py
@@ -30,7 +30,8 @@
 
 from color import Coloring
 from git_command import GitCommand, git_require
-from git_config import GitConfig, IsId, GetSchemeFromUrl, GetUrlCookieFile, ID_RE
+from git_config import GitConfig, IsId, GetSchemeFromUrl, GetUrlCookieFile, \
+    ID_RE
 from error import GitError, HookError, UploadError, DownloadError
 from error import ManifestInvalidRevisionError
 from error import NoManifestException
@@ -39,11 +40,18 @@
 from git_refs import GitRefs, HEAD, R_HEADS, R_TAGS, R_PUB, R_M
 
 from pyversion import is_python3
-if not is_python3():
+if is_python3():
+  import urllib.parse
+else:
+  import imp
+  import urlparse
+  urllib = imp.new_module('urllib')
+  urllib.parse = urlparse
   # pylint:disable=W0622
   input = raw_input
   # pylint:enable=W0622
 
+
 def _lwrite(path, content):
   lock = '%s.lock' % path
 
@@ -59,21 +67,27 @@
     os.remove(lock)
     raise
 
+
 def _error(fmt, *args):
   msg = fmt % args
   print('error: %s' % msg, file=sys.stderr)
 
+
 def _warn(fmt, *args):
   msg = fmt % args
   print('warn: %s' % msg, file=sys.stderr)
 
+
 def not_rev(r):
   return '^' + r
 
+
 def sq(r):
   return "'" + r.replace("'", "'\''") + "'"
 
 _project_hook_list = None
+
+
 def _ProjectHooks():
   """List the hooks present in the 'hooks' directory.
 
@@ -107,15 +121,14 @@
   @property
   def commits(self):
     if self._commit_cache is None:
-      self._commit_cache = self.project.bare_git.rev_list(
-        '--abbrev=8',
-        '--abbrev-commit',
-        '--pretty=oneline',
-        '--reverse',
-        '--date-order',
-        not_rev(self.base),
-        self.commit,
-        '--')
+      self._commit_cache = self.project.bare_git.rev_list('--abbrev=8',
+                                                          '--abbrev-commit',
+                                                          '--pretty=oneline',
+                                                          '--reverse',
+                                                          '--date-order',
+                                                          not_rev(self.base),
+                                                          self.commit,
+                                                          '--')
     return self._commit_cache
 
 
@@ -134,36 +147,36 @@
   @property
   def commits(self):
     if self._commit_cache is None:
-      self._commit_cache = self.project.bare_git.rev_list(
-        '--abbrev=8',
-        '--abbrev-commit',
-        '--pretty=oneline',
-        '--reverse',
-        '--date-order',
-        not_rev(self.base),
-        R_HEADS + self.name,
-        '--')
+      self._commit_cache = self.project.bare_git.rev_list('--abbrev=8',
+                                                          '--abbrev-commit',
+                                                          '--pretty=oneline',
+                                                          '--reverse',
+                                                          '--date-order',
+                                                          not_rev(self.base),
+                                                          R_HEADS + self.name,
+                                                          '--')
     return self._commit_cache
 
   @property
   def unabbrev_commits(self):
     r = dict()
-    for commit in self.project.bare_git.rev_list(
-        not_rev(self.base),
-        R_HEADS + self.name,
-        '--'):
+    for commit in self.project.bare_git.rev_list(not_rev(self.base),
+                                                 R_HEADS + self.name,
+                                                 '--'):
       r[commit[0:8]] = commit
     return r
 
   @property
   def date(self):
-    return self.project.bare_git.log(
-      '--pretty=format:%cd',
-      '-n', '1',
-      R_HEADS + self.name,
-      '--')
+    return self.project.bare_git.log('--pretty=format:%cd',
+                                     '-n', '1',
+                                     R_HEADS + self.name,
+                                     '--')
 
-  def UploadForReview(self, people, auto_topic=False, draft=False, dest_branch=None):
+  def UploadForReview(self, people,
+                      auto_topic=False,
+                      draft=False,
+                      dest_branch=None):
     self.project.UploadForReview(self.name,
                                  people,
                                  auto_topic=auto_topic,
@@ -173,8 +186,8 @@
   def GetPublishedRefs(self):
     refs = {}
     output = self.project.bare_git.ls_remote(
-      self.branch.remote.SshReviewUrl(self.project.UserEmail),
-      'refs/changes/*')
+        self.branch.remote.SshReviewUrl(self.project.UserEmail),
+        'refs/changes/*')
     for line in output.split('\n'):
       try:
         (sha, ref) = line.split()
@@ -184,7 +197,9 @@
 
     return refs
 
+
 class StatusColoring(Coloring):
+
   def __init__(self, config):
     Coloring.__init__(self, config, 'status')
     self.project = self.printer('header', attr='bold')
@@ -198,17 +213,22 @@
 
 
 class DiffColoring(Coloring):
+
   def __init__(self, config):
     Coloring.__init__(self, config, 'diff')
     self.project = self.printer('header', attr='bold')
 
+
 class _Annotation(object):
+
   def __init__(self, name, value, keep):
     self.name = name
     self.value = value
     self.keep = keep
 
+
 class _CopyFile(object):
+
   def __init__(self, src, dest, abssrc, absdest):
     self.src = src
     self.dest = dest
@@ -236,7 +256,9 @@
       except IOError:
         _error('Cannot copy file %s to %s', src, dest)
 
+
 class _LinkFile(object):
+
   def __init__(self, git_worktree, src, dest, relsrc, absdest):
     self.git_worktree = git_worktree
     self.src = src
@@ -275,7 +297,7 @@
       absDestDir = self.abs_dest
       if os.path.exists(absDestDir) and not os.path.isdir(absDestDir):
         _error('Link error: src with wildcard, %s must be a directory',
-            absDestDir)
+               absDestDir)
       else:
         absSrcFiles = glob.glob(absSrc)
         for absSrcFile in absSrcFiles:
@@ -292,18 +314,26 @@
           relSrc = os.path.join(relSrcDir, srcFile)
           self.__linkIt(relSrc, absDest)
 
+
 class RemoteSpec(object):
+
   def __init__(self,
                name,
                url=None,
+               pushUrl=None,
                review=None,
-               revision=None):
+               revision=None,
+               orig_name=None):
     self.name = name
     self.url = url
+    self.pushUrl = pushUrl
     self.review = review
     self.revision = revision
+    self.orig_name = orig_name
+
 
 class RepoHook(object):
+
   """A RepoHook contains information about a script to run as a hook.
 
   Hooks are used to run a python script before running an upload (for instance,
@@ -316,10 +346,12 @@
   Hooks are always python.  When a hook is run, we will load the hook into the
   interpreter and execute its main() function.
   """
+
   def __init__(self,
                hook_type,
                hooks_project,
                topdir,
+               manifest_url,
                abort_if_user_denies=False):
     """RepoHook constructor.
 
@@ -333,11 +365,13 @@
       topdir: Repo's top directory (the one containing the .repo directory).
           Scripts will run with CWD as this directory.  If you have a manifest,
           this is manifest.topdir
+      manifest_url: The URL to the manifest git repo.
       abort_if_user_denies: If True, we'll throw a HookError() if the user
           doesn't allow us to run the hook.
     """
     self._hook_type = hook_type
     self._hooks_project = hooks_project
+    self._manifest_url = manifest_url
     self._topdir = topdir
     self._abort_if_user_denies = abort_if_user_denies
 
@@ -386,9 +420,9 @@
   def _CheckForHookApproval(self):
     """Check to see whether this hook has been approved.
 
-    We'll look at the hash of all of the hooks.  If this matches the hash that
-    the user last approved, we're done.  If it doesn't, we'll ask the user
-    about approval.
+    We'll accept approval of manifest URLs if they're using secure transports.
+    This way the user can say they trust the manifest hoster.  For insecure
+    hosts, we fall back to checking the hash of the hooks repo.
 
     Note that we ask permission for each individual hook even though we use
     the hash of all hooks when detecting changes.  We'd like the user to be
@@ -402,44 +436,58 @@
       HookError: Raised if the user doesn't approve and abort_if_user_denies
           was passed to the consturctor.
     """
-    hooks_config = self._hooks_project.config
-    git_approval_key = 'repo.hooks.%s.approvedhash' % self._hook_type
+    if self._ManifestUrlHasSecureScheme():
+      return self._CheckForHookApprovalManifest()
+    else:
+      return self._CheckForHookApprovalHash()
+
+  def _CheckForHookApprovalHelper(self, subkey, new_val, main_prompt,
+                                  changed_prompt):
+    """Check for approval for a particular attribute and hook.
 
-    # Get the last hash that the user approved for this hook; may be None.
-    old_hash = hooks_config.GetString(git_approval_key)
+    Args:
+      subkey: The git config key under [repo.hooks.<hook_type>] to store the
+          last approved string.
+      new_val: The new value to compare against the last approved one.
+      main_prompt: Message to display to the user to ask for approval.
+      changed_prompt: Message explaining why we're re-asking for approval.
 
-    # Get the current hash so we can tell if scripts changed since approval.
-    new_hash = self._GetHash()
+    Returns:
+      True if this hook is approved to run; False otherwise.
 
-    if old_hash is not None:
+    Raises:
+      HookError: Raised if the user doesn't approve and abort_if_user_denies
+          was passed to the consturctor.
+    """
+    hooks_config = self._hooks_project.config
+    git_approval_key = 'repo.hooks.%s.%s' % (self._hook_type, subkey)
+
+    # Get the last value that the user approved for this hook; may be None.
+    old_val = hooks_config.GetString(git_approval_key)
+
+    if old_val is not None:
       # User previously approved hook and asked not to be prompted again.
-      if new_hash == old_hash:
+      if new_val == old_val:
         # Approval matched.  We're done.
         return True
       else:
         # Give the user a reason why we're prompting, since they last told
         # us to "never ask again".
-        prompt = 'WARNING: Scripts have changed since %s was allowed.\n\n' % (
-            self._hook_type)
+        prompt = 'WARNING: %s\n\n' % (changed_prompt,)
     else:
       prompt = ''
 
     # Prompt the user if we're not on a tty; on a tty we'll assume "no".
     if sys.stdout.isatty():
-      prompt += ('Repo %s run the script:\n'
-                 '  %s\n'
-                 '\n'
-                 'Do you want to allow this script to run '
-                 '(yes/yes-never-ask-again/NO)? ') % (
-                 self._GetMustVerb(), self._script_fullpath)
+      prompt += main_prompt + ' (yes/always/NO)? '
       response = input(prompt).lower()
       print()
 
       # User is doing a one-time approval.
       if response in ('y', 'yes'):
         return True
-      elif response == 'yes-never-ask-again':
-        hooks_config.SetString(git_approval_key, new_hash)
+      elif response == 'always':
+        hooks_config.SetString(git_approval_key, new_val)
         return True
 
     # For anything else, we'll assume no approval.
@@ -449,6 +497,40 @@
 
     return False
 
+  def _ManifestUrlHasSecureScheme(self):
+    """Check if the URI for the manifest is a secure transport."""
+    secure_schemes = ('file', 'https', 'ssh', 'persistent-https', 'sso', 'rpc')
+    parse_results = urllib.parse.urlparse(self._manifest_url)
+    return parse_results.scheme in secure_schemes
+
+  def _CheckForHookApprovalManifest(self):
+    """Check whether the user has approved this manifest host.
+
+    Returns:
+      True if this hook is approved to run; False otherwise.
+    """
+    return self._CheckForHookApprovalHelper(
+        'approvedmanifest',
+        self._manifest_url,
+        'Run hook scripts from %s' % (self._manifest_url,),
+        'Manifest URL has changed since %s was allowed.' % (self._hook_type,))
+
+  def _CheckForHookApprovalHash(self):
+    """Check whether the user has approved the hooks repo.
+
+    Returns:
+      True if this hook is approved to run; False otherwise.
+    """
+    prompt = ('Repo %s run the script:\n'
+              '  %s\n'
+              '\n'
+              'Do you want to allow this script to run')
+    return self._CheckForHookApprovalHelper(
+        'approvedhash',
+        self._GetHash(),
+        prompt % (self._GetMustVerb(), self._script_fullpath),
+        'Scripts have changed since %s was allowed.' % (self._hook_type,))
+
   def _ExecuteHook(self, **kwargs):
     """Actually execute the given hook.
 
@@ -475,19 +557,18 @@
 
       # Exec, storing global context in the context dict.  We catch exceptions
       # and  convert to a HookError w/ just the failing traceback.
-      context = {}
+      context = {'__file__': self._script_fullpath}
       try:
         exec(compile(open(self._script_fullpath).read(),
                      self._script_fullpath, 'exec'), context)
       except Exception:
-        raise HookError('%s\nFailed to import %s hook; see traceback above.' % (
-                        traceback.format_exc(), self._hook_type))
+        raise HookError('%s\nFailed to import %s hook; see traceback above.' %
+                        (traceback.format_exc(), self._hook_type))
 
       # Running the script should have defined a main() function.
       if 'main' not in context:
         raise HookError('Missing main() in: "%s"' % self._script_fullpath)
 
-
       # Add 'hook_should_take_kwargs' to the arguments to be passed to main.
       # We don't actually want hooks to define their main with this argument--
       # it's there to remind them that their hook should always take **kwargs.
@@ -505,8 +586,8 @@
         context['main'](**kwargs)
       except Exception:
         raise HookError('%s\nFailed to run main() for %s hook; see traceback '
-                        'above.' % (
-                        traceback.format_exc(), self._hook_type))
+                        'above.' % (traceback.format_exc(),
+                                    self._hook_type))
     finally:
       # Restore sys.path and CWD.
       sys.path = orig_syspath
@@ -530,8 +611,8 @@
           to run a required hook (from _CheckForHookApproval).
     """
     # No-op if there is no hooks project or if hook is disabled.
-    if ((not self._hooks_project) or
-        (self._hook_type not in self._hooks_project.enabled_repo_hooks)):
+    if ((not self._hooks_project) or (self._hook_type not in
+                                      self._hooks_project.enabled_repo_hooks)):
       return
 
     # Bail with a nice error if we can't find the hook.
@@ -553,6 +634,7 @@
   # These objects can only be used by a single working tree.
   working_tree_files = ['config', 'packed-refs', 'shallow']
   working_tree_dirs = ['logs', 'refs']
+
   def __init__(self,
                manifest,
                name,
@@ -607,15 +689,15 @@
     self.gitdir = gitdir.replace('\\', '/')
     self.objdir = objdir.replace('\\', '/')
     if worktree:
-      self.worktree = worktree.replace('\\', '/')
+      self.worktree = os.path.normpath(worktree.replace('\\', '/'))
     else:
       self.worktree = None
     self.relpath = relpath
     self.revisionExpr = revisionExpr
 
-    if   revisionId is None \
-     and revisionExpr \
-     and IsId(revisionExpr):
+    if revisionId is None \
+            and revisionExpr \
+            and IsId(revisionExpr):
       self.revisionId = revisionExpr
     else:
       self.revisionId = revisionId
@@ -635,9 +717,8 @@
     self.copyfiles = []
     self.linkfiles = []
     self.annotations = []
-    self.config = GitConfig.ForRepository(
-                    gitdir=self.gitdir,
-                    defaults=self.manifest.globalConfig)
+    self.config = GitConfig.ForRepository(gitdir=self.gitdir,
+                                          defaults=self.manifest.globalConfig)
 
     if self.worktree:
       self.work_git = self._GitGetByExec(self, bare=False, gitdir=gitdir)
@@ -776,7 +857,7 @@
     """
     expanded_manifest_groups = manifest_groups or ['default']
     expanded_project_groups = ['all'] + (self.groups or [])
-    if not 'notdefault' in expanded_project_groups:
+    if 'notdefault' not in expanded_project_groups:
       expanded_project_groups += ['default']
 
     matched = False
@@ -788,7 +869,7 @@
 
     return matched
 
-## Status Display ##
+# Status Display ##
   def UncommitedFiles(self, get_all=True):
     """Returns a list of strings, uncommitted files in the git tree.
 
@@ -840,7 +921,7 @@
       output: If specified, redirect the output to this object.
     """
     if not os.path.isdir(self.worktree):
-      if output_redir == None:
+      if output_redir is None:
         output_redir = sys.stdout
       print(file=output_redir)
       print('project %s/' % self.relpath, file=output_redir)
@@ -859,7 +940,7 @@
       return 'CLEAN'
 
     out = StatusColoring(self.config)
-    if not output_redir == None:
+    if output_redir is not None:
       out.redirect(output_redir)
     out.project('project %-40s', self.relpath + '/ ')
 
@@ -902,7 +983,7 @@
 
       if i and i.src_path:
         line = ' %s%s\t%s => %s (%s%%)' % (i_status, f_status,
-                                        i.src_path, p, i.level)
+                                           i.src_path, p, i.level)
       else:
         line = ' %s%s\t%s' % (i_status, f_status, p)
 
@@ -945,7 +1026,7 @@
     p.Wait()
 
 
-## Publish / Upload ##
+# Publish / Upload ##
 
   def WasPublished(self, branch, all_refs=None):
     """Was the branch published (uploaded) for code review?
@@ -1088,7 +1169,6 @@
                             message=msg)
 
 
-## Sync ##
   def __FetchLfsObjects(self, name, refs):
     if 'refs/heads/*' in refs:
       refs = []
@@ -1114,6 +1194,7 @@
       except OSError:
         pass
       _lwrite(ref, '%s\n' % revid)
+# Sync ##
 
   def _ExtractArchive(self, tarpath, path=None):
     """Extract the given tar on its current location
@@ -1131,15 +1212,15 @@
     return False
 
   def Sync_NetworkHalf(self,
-      quiet=False,
-      is_new=None,
-      current_branch_only=False,
-      force_sync=False,
-      clone_bundle=True,
-      no_tags=False,
-      archive=False,
-      optimized_fetch=False,
-      prune=False):
+                       quiet=False,
+                       is_new=None,
+                       current_branch_only=False,
+                       force_sync=False,
+                       clone_bundle=True,
+                       no_tags=False,
+                       archive=False,
+                       optimized_fetch=False,
+                       prune=False):
     """Perform only the network IO portion of the sync process.
        Local working directory/branch state is not affected.
     """
@@ -1192,8 +1273,8 @@
       alt_dir = None
 
     if clone_bundle \
-    and alt_dir is None \
-    and self._ApplyCloneBundle(initial=is_new, quiet=quiet):
+            and alt_dir is None \
+            and self._ApplyCloneBundle(initial=is_new, quiet=quiet):
       is_new = False
 
     if not current_branch_only:
@@ -1205,12 +1286,13 @@
       elif self.manifest.default.sync_c:
         current_branch_only = True
 
-    need_to_fetch = not (optimized_fetch and \
-      (ID_RE.match(self.revisionExpr) and self._CheckForSha1()))
-    if (need_to_fetch
-        and not self._RemoteFetch(initial=is_new, quiet=quiet, alt_dir=alt_dir,
-                                  current_branch_only=current_branch_only,
-                                  no_tags=no_tags, prune=prune)):
+    need_to_fetch = not (optimized_fetch and
+                         (ID_RE.match(self.revisionExpr) and
+                          self._CheckForSha1()))
+    if (need_to_fetch and
+        not self._RemoteFetch(initial=is_new, quiet=quiet, alt_dir=alt_dir,
+                              current_branch_only=current_branch_only,
+                              no_tags=no_tags, prune=prune)):
       return False
 
     if self.worktree:
@@ -1276,9 +1358,8 @@
     try:
       return self.bare_git.rev_list(self.revisionExpr, '-1')[0]
     except GitError:
-      raise ManifestInvalidRevisionError(
-        'revision %s in %s not found' % (self.revisionExpr,
-                                         self.name))
+      raise ManifestInvalidRevisionError('revision %s in %s not found' %
+                                         (self.revisionExpr, self.name))
 
   def GetRevisionId(self, all_refs=None):
     if self.revisionId:
@@ -1293,9 +1374,8 @@
     try:
       return self.bare_git.rev_parse('--verify', '%s^0' % rev)
     except GitError:
-      raise ManifestInvalidRevisionError(
-        'revision %s in %s not found' % (self.revisionExpr,
-                                         self.name))
+      raise ManifestInvalidRevisionError('revision %s in %s not found' %
+                                         (self.revisionExpr, self.name))
 
   def Sync_LocalHalf(self, syncbuf, force_sync=False):
     """Perform only the local IO portion of the sync process.
@@ -1384,8 +1464,8 @@
           # to rewrite the published commits so we punt.
           #
           syncbuf.fail(self,
-                       "branch %s is published (but not merged) and is now %d commits behind"
-                       % (branch.name, len(upstream_gain)))
+                       "branch %s is published (but not merged) and is now "
+                       "%d commits behind" % (branch.name, len(upstream_gain)))
         return
       elif pub == head:
         # All published commits are merged, and thus we are a
@@ -1479,7 +1559,7 @@
     remote = self.GetRemote(self.remote.name)
 
     cmd = ['fetch', remote.name]
-    cmd.append('refs/changes/%2.2d/%d/%d' \
+    cmd.append('refs/changes/%2.2d/%d/%d'
                % (change_id % 100, change_id, patch_id))
     if GitCommand(self, cmd, bare=True).Wait() != 0:
       return None
@@ -1490,7 +1570,7 @@
                             self.bare_git.rev_parse('FETCH_HEAD'))
 
 
-## Branch Management ##
+# Branch Management ##
 
   def StartBranch(self, name, branch_merge=''):
     """Create a new branch off the manifest's revision.
@@ -1680,10 +1760,11 @@
     return kept
 
 
-## Submodule Management ##
+# Submodule Management ##
 
   def GetRegisteredSubprojects(self):
     result = []
+
     def rec(subprojects):
       if not subprojects:
         return
@@ -1718,6 +1799,7 @@
 
     re_path = re.compile(r'^submodule\.([^.]+)\.path=(.*)$')
     re_url = re.compile(r'^submodule\.([^.]+)\.url=(.*)$')
+
     def parse_gitmodules(gitdir, rev):
       cmd = ['cat-file', 'blob', '%s:.gitmodules' % rev]
       try:
@@ -1805,6 +1887,7 @@
 
       remote = RemoteSpec(self.remote.name,
                           url=url,
+                          pushUrl=self.remote.pushUrl,
                           review=self.remote.review,
                           revision=self.remote.revision)
       subproject = Project(manifest=self.manifest,
@@ -1814,7 +1897,7 @@
                            objdir=objdir,
                            worktree=worktree,
                            relpath=relpath,
-                           revisionExpr=self.revisionExpr,
+                           revisionExpr=rev,
                            revisionId=rev,
                            rebase=self.rebase,
                            groups=self.groups,
@@ -1827,7 +1910,7 @@
     return result
 
 
-## Direct Git Commands ##
+# Direct Git Commands ##
   def _CheckForSha1(self):
     try:
       # if revision (sha or tag) is not present then following function
@@ -1851,7 +1934,6 @@
     if command.Wait() != 0:
       raise GitError('git archive %s: %s' % (self.name, command.stderr))
 
-
   def _RemoteFetch(self, name=None,
                    current_branch_only=False,
                    initial=False,
@@ -1898,7 +1980,10 @@
         #   will fail.
         # * otherwise, fetch all branches to make sure we end up with the
         #   specific commit.
-        current_branch_only = self.upstream and not ID_RE.match(self.upstream)
+        if self.upstream:
+          current_branch_only = not ID_RE.match(self.upstream)
+        else:
+          current_branch_only = False
 
     if not name:
       name = self.remote.name
@@ -2019,9 +2104,9 @@
           break
         continue
       elif current_branch_only and is_sha1 and ret == 128:
-        # Exit code 128 means "couldn't find the ref you asked for"; if we're in sha1
-        # mode, we just tried sync'ing from the upstream field; it doesn't exist, thus
-        # abort the optimization attempt and do a full sync.
+        # Exit code 128 means "couldn't find the ref you asked for"; if we're
+        # in sha1 mode, we just tried sync'ing from the upstream field; it
+        # doesn't exist, thus abort the optimization attempt and do a full sync.
         break
       elif ret < 0:
         # Git died with a signal, exit immediately
@@ -2048,21 +2133,25 @@
                                    initial=False, quiet=quiet, alt_dir=alt_dir)
         if self.clone_depth:
           self.clone_depth = None
-          return self._RemoteFetch(name=name, current_branch_only=current_branch_only,
+          return self._RemoteFetch(name=name,
+                                   current_branch_only=current_branch_only,
                                    initial=False, quiet=quiet, alt_dir=alt_dir)
     if self.lfs_fetch:
       self.__FetchLfsObjects(name, refs)
     return ok
 
   def _ApplyCloneBundle(self, initial=False, quiet=False):
-    if initial and (self.manifest.manifestProject.config.GetString('repo.depth') or self.clone_depth):
+    if initial and \
+        (self.manifest.manifestProject.config.GetString('repo.depth') or
+         self.clone_depth):
       return False
 
     remote = self.GetRemote(self.remote.name)
     bundle_url = remote.url + '/clone.bundle'
     bundle_url = GitConfig.ForUser().UrlInsteadOf(bundle_url)
-    if GetSchemeFromUrl(bundle_url) not in (
-        'http', 'https', 'persistent-http', 'persistent-https'):
+    if GetSchemeFromUrl(bundle_url) not in ('http', 'https',
+                                            'persistent-http',
+                                            'persistent-https'):
       return False
 
     bundle_dst = os.path.join(self.gitdir, 'clone.bundle')
@@ -2111,7 +2200,7 @@
         os.remove(tmpPath)
     if 'http_proxy' in os.environ and 'darwin' == sys.platform:
       cmd += ['--proxy', os.environ['http_proxy']]
-    with GetUrlCookieFile(srcUrl, quiet) as (cookiefile, proxy):
+    with GetUrlCookieFile(srcUrl, quiet) as (cookiefile, _proxy):
       if cookiefile:
         cmd += ['--cookie', cookiefile, '--cookie-jar', cookiefile]
       if srcUrl.startswith('persistent-'):
@@ -2230,11 +2319,12 @@
           self._CheckDirReference(self.objdir, self.gitdir, share_refs=False)
         except GitError as e:
           if force_sync:
-            print("Retrying clone after deleting %s" % self.gitdir, file=sys.stderr)
+            print("Retrying clone after deleting %s" %
+                  self.gitdir, file=sys.stderr)
             try:
               shutil.rmtree(os.path.realpath(self.gitdir))
-              if self.worktree and os.path.exists(
-                  os.path.realpath(self.worktree)):
+              if self.worktree and os.path.exists(os.path.realpath
+                                                  (self.worktree)):
                 shutil.rmtree(os.path.realpath(self.worktree))
               return self._InitGitDir(mirror_git=mirror_git, force_sync=False)
             except:
@@ -2269,7 +2359,7 @@
         m = self.manifest.manifestProject.config
         for key in ['user.name', 'user.email']:
           if m.Has(key, include_defaults=False):
-            self.config.SetString(key, m.GetString(key))
+            self.config.SetString(key, m.GetString(key))        
         if self.manifest.IsMirror:
           self.config.SetString('core.bare', 'true')
         else:
@@ -2299,7 +2389,7 @@
       name = os.path.basename(stock_hook)
 
       if name in ('commit-msg',) and not self.remote.review \
-            and not self is self.manifest.manifestProject:
+              and self is not self.manifest.manifestProject:
         # Don't install a Gerrit Code Review hook if this
         # project does not appear to use it for reviews.
         #
@@ -2314,7 +2404,8 @@
         if filecmp.cmp(stock_hook, dst, shallow=False):
           os.remove(dst)
         else:
-          _warn("%s: Not replacing locally modified %s hook", self.relpath, name)
+          _warn("%s: Not replacing locally modified %s hook",
+                self.relpath, name)
           continue
       try:
         os.symlink(os.path.relpath(stock_hook, os.path.dirname(dst)), dst)
@@ -2328,6 +2419,7 @@
     if self.remote.url:
       remote = self.GetRemote(self.remote.name)
       remote.url = self.remote.url
+      remote.pushUrl = self.remote.pushUrl
       remote.review = self.remote.review
       remote.projectname = self.name
 
@@ -2360,8 +2452,8 @@
         self.bare_git.symbolic_ref('-m', msg, ref, dst)
 
   def _CheckDirReference(self, srcdir, destdir, share_refs):
-    symlink_files = self.shareable_files
-    symlink_dirs = self.shareable_dirs
+    symlink_files = self.shareable_files[:]
+    symlink_dirs = self.shareable_dirs[:]
     if share_refs:
       symlink_files += self.working_tree_files
       symlink_dirs += self.working_tree_dirs
@@ -2389,8 +2481,8 @@
       copy_all: If true, copy all remaining files from |gitdir| -> |dotgit|.
           This saves you the effort of initializing |dotgit| yourself.
     """
-    symlink_files = self.shareable_files
-    symlink_dirs = self.shareable_dirs
+    symlink_files = self.shareable_files[:]
+    symlink_dirs = self.shareable_dirs[:]
     if share_refs:
       symlink_files += self.working_tree_files
       symlink_dirs += self.working_tree_dirs
@@ -2482,7 +2574,7 @@
   def _allrefs(self):
     return self.bare_ref.all
 
-  def _getLogs(self, rev1, rev2, oneline=False, color=True):
+  def _getLogs(self, rev1, rev2, oneline=False, color=True, pretty_format=None):
     """Get logs between two revisions of this project."""
     comp = '..'
     if rev1:
@@ -2493,6 +2585,8 @@
       out = DiffColoring(self.config)
       if out.is_on and color:
         cmd.append('--color')
+      if pretty_format is not None:
+        cmd.append('--pretty=format:%s' % pretty_format)
       if oneline:
         cmd.append('--oneline')
 
@@ -2509,17 +2603,21 @@
           raise
     return None
 
-  def getAddedAndRemovedLogs(self, toProject, oneline=False, color=True):
+  def getAddedAndRemovedLogs(self, toProject, oneline=False, color=True,
+                             pretty_format=None):
     """Get the list of logs from this revision to given revisionId"""
     logs = {}
     selfId = self.GetRevisionId(self._allrefs)
     toId = toProject.GetRevisionId(toProject._allrefs)
 
-    logs['added'] = self._getLogs(selfId, toId, oneline=oneline, color=color)
-    logs['removed'] = self._getLogs(toId, selfId, oneline=oneline, color=color)
+    logs['added'] = self._getLogs(selfId, toId, oneline=oneline, color=color,
+                                  pretty_format=pretty_format)
+    logs['removed'] = self._getLogs(toId, selfId, oneline=oneline, color=color,
+                                    pretty_format=pretty_format)
     return logs
 
   class _GitGetByExec(object):
+
     def __init__(self, project, bare, gitdir):
       self._project = project
       self._bare = bare
@@ -2538,8 +2636,8 @@
       if p.Wait() == 0:
         out = p.stdout
         if out:
+          # Backslash is not anomalous
           return out[:-1].split('\0')  # pylint: disable=W1401
-                                       # Backslash is not anomalous
       return []
 
     def DiffZ(self, name, *args):
@@ -2565,6 +2663,7 @@
               break
 
             class _Info(object):
+
               def __init__(self, path, omode, nmode, oid, nid, state):
                 self.path = path
                 self.src_path = None
@@ -2667,10 +2766,8 @@
           line = line[:-1]
         r.append(line)
       if p.Wait() != 0:
-        raise GitError('%s rev-list %s: %s' % (
-                       self._project.name,
-                       str(args),
-                       p.stderr))
+        raise GitError('%s rev-list %s: %s' %
+                       (self._project.name, str(args), p.stderr))
       return r
 
     def __getattr__(self, name):
@@ -2693,6 +2790,7 @@
         A callable object that will try to call git with the named command.
       """
       name = name.replace('_', '-')
+
       def runner(*args, **kwargs):
         cmdv = []
         config = kwargs.pop('config', None)
@@ -2715,10 +2813,8 @@
                        capture_stdout=True,
                        capture_stderr=True)
         if p.Wait() != 0:
-          raise GitError('%s %s: %s' % (
-                         self._project.name,
-                         name,
-                         p.stderr))
+          raise GitError('%s %s: %s' %
+                         (self._project.name, name, p.stderr))
         r = p.stdout
         try:
           r = r.decode('utf-8')
@@ -2731,14 +2827,19 @@
 
 
 class _PriorSyncFailedError(Exception):
+
   def __str__(self):
     return 'prior sync failed; rebase still in progress'
 
+
 class _DirtyError(Exception):
+
   def __str__(self):
     return 'contains uncommitted changes'
 
+
 class _InfoMessage(object):
+
   def __init__(self, project, text):
     self.project = project
     self.text = text
@@ -2747,7 +2848,9 @@
     syncbuf.out.info('%s/: %s', self.project.relpath, self.text)
     syncbuf.out.nl()
 
+
 class _Failure(object):
+
   def __init__(self, project, why):
     self.project = project
     self.why = why
@@ -2758,7 +2861,9 @@
                      str(self.why))
     syncbuf.out.nl()
 
+
 class _Later(object):
+
   def __init__(self, project, action):
     self.project = project
     self.action = action
@@ -2775,14 +2880,18 @@
       out.nl()
       return False
 
+
 class _SyncColoring(Coloring):
+
   def __init__(self, config):
     Coloring.__init__(self, config, 'reposync')
     self.project = self.printer('header', attr='bold')
     self.info = self.printer('info')
     self.fail = self.printer('fail', fg='red')
 
+
 class SyncBuffer(object):
+
   def __init__(self, config, detach_head=False):
     self._messages = []
     self._failures = []
@@ -2838,8 +2947,10 @@
 
 
 class MetaProject(Project):
+
   """A special project housed under .repo.
   """
+
   def __init__(self, manifest, name, gitdir, worktree):
     Project.__init__(self,
                      manifest=manifest,
@@ -2873,10 +2984,9 @@
     syncbuf.Finish()
 
     return GitCommand(self,
-                        ['update-ref', '-d', 'refs/heads/default'],
-                        capture_stdout=True,
-                        capture_stderr=True).Wait() == 0
-
+                      ['update-ref', '-d', 'refs/heads/default'],
+                      capture_stdout=True,
+                      capture_stderr=True).Wait() == 0
 
   @property
   def LastFetch(self):
diff --git a/repo b/repo
index 4721174..acaa9c4 100755
--- a/repo
+++ b/repo
@@ -23,10 +23,13 @@
 # limitations under the License.
 
 # increment this whenever we make important changes to this script
-VERSION = (1, 22)
+VERSION = (1, 23)
 
 # increment this if the MAINTAINER_KEYS block is modified
 KEYRING_VERSION = (1, 2)
+
+# Each individual key entry is created by using:
+# gpg --armor --export keyid
 MAINTAINER_KEYS = """
 
      Repo Maintainer <repo@android.kernel.org>
@@ -196,6 +199,9 @@
                  help='restrict manifest projects to ones with a specified '
                       'platform group [auto|all|none|linux|darwin|...]',
                  metavar='PLATFORM')
+group.add_option('--no-clone-bundle',
+                 dest='no_clone_bundle', action='store_true',
+                 help='disable use of /clone.bundle on HTTP/HTTPS')
 
 
 # Tool
@@ -339,7 +345,7 @@
       can_verify = True
 
     dst = os.path.abspath(os.path.join(repodir, S_repo))
-    _Clone(url, dst, opt.quiet)
+    _Clone(url, dst, opt.quiet, not opt.no_clone_bundle)
 
     if can_verify and not opt.no_repo_verify:
       rev = _Verify(dst, branch, opt.quiet)
@@ -432,7 +438,10 @@
       sys.exit(1)
 
   env = os.environ.copy()
-  env['GNUPGHOME'] = gpg_dir.encode()
+  try:
+    env['GNUPGHOME'] = gpg_dir
+  except UnicodeEncodeError:
+    env['GNUPGHOME'] = gpg_dir.encode()
 
   cmd = ['gpg', '--import']
   try:
@@ -543,7 +552,7 @@
     try:
       r = urllib.request.urlopen(url)
     except urllib.error.HTTPError as e:
-      if e.code in [401, 403, 404]:
+      if e.code in [401, 403, 404, 501]:
         return False
       _print('fatal: Cannot get %s' % url, file=sys.stderr)
       _print('fatal: HTTP error %s' % e.code, file=sys.stderr)
@@ -574,7 +583,7 @@
     os.remove(path)
 
 
-def _Clone(url, local, quiet):
+def _Clone(url, local, quiet, clone_bundle):
   """Clones a git repository to a new subdirectory of repodir
   """
   try:
@@ -604,7 +613,7 @@
   _SetConfig(local,
              'remote.origin.fetch',
              '+refs/heads/*:refs/remotes/origin/*')
-  if _DownloadBundle(url, local, quiet):
+  if clone_bundle and _DownloadBundle(url, local, quiet):
     _ImportBundle(local)
   _Fetch(url, local, 'origin', quiet)
 
@@ -638,7 +647,10 @@
       _print(file=sys.stderr)
 
   env = os.environ.copy()
-  env['GNUPGHOME'] = gpg_dir.encode()
+  try:
+    env['GNUPGHOME'] = gpg_dir
+  except UnicodeEncodeError:
+    env['GNUPGHOME'] = gpg_dir.encode()
 
   cmd = [GIT, 'tag', '-v', cur]
   proc = subprocess.Popen(cmd,
diff --git a/subcmds/diffmanifests.py b/subcmds/diffmanifests.py
index 0599868..751a202 100644
--- a/subcmds/diffmanifests.py
+++ b/subcmds/diffmanifests.py
@@ -71,6 +71,10 @@
     p.add_option('--no-color',
                  dest='color', action='store_false', default=True,
                  help='does not display the diff in color.')
+    p.add_option('--pretty-format',
+                 dest='pretty_format', action='store',
+                 metavar='<FORMAT>',
+                 help='print the log using a custom git pretty format string')
 
   def _printRawDiff(self, diff):
     for project in diff['added']:
@@ -92,7 +96,7 @@
                                      otherProject.revisionExpr))
       self.out.nl()
 
-  def _printDiff(self, diff, color=True):
+  def _printDiff(self, diff, color=True, pretty_format=None):
     if diff['added']:
       self.out.nl()
       self.printText('added projects : \n')
@@ -124,7 +128,8 @@
         self.printText(' to ')
         self.printRevision(otherProject.revisionExpr)
         self.out.nl()
-        self._printLogs(project, otherProject, raw=False, color=color)
+        self._printLogs(project, otherProject, raw=False, color=color,
+                        pretty_format=pretty_format)
         self.out.nl()
 
     if diff['unreachable']:
@@ -139,9 +144,13 @@
         self.printText(' not found')
         self.out.nl()
 
-  def _printLogs(self, project, otherProject, raw=False, color=True):
-    logs = project.getAddedAndRemovedLogs(otherProject, oneline=True,
-                                          color=color)
+  def _printLogs(self, project, otherProject, raw=False, color=True,
+                 pretty_format=None):
+
+    logs = project.getAddedAndRemovedLogs(otherProject,
+                                          oneline=(pretty_format is None),
+                                          color=color,
+                                          pretty_format=pretty_format)
     if logs['removed']:
       removedLogs = logs['removed'].split('\n')
       for log in removedLogs:
@@ -192,4 +201,4 @@
     if opt.raw:
       self._printRawDiff(diff)
     else:
-      self._printDiff(diff, color=opt.color)
+      self._printDiff(diff, color=opt.color, pretty_format=opt.pretty_format)
diff --git a/subcmds/forall.py b/subcmds/forall.py
index b10f34b..07ee8d5 100644
--- a/subcmds/forall.py
+++ b/subcmds/forall.py
@@ -120,6 +120,9 @@
     p.add_option('-r', '--regex',
                  dest='regex', action='store_true',
                  help="Execute the command only on projects matching regex or wildcard expression")
+    p.add_option('-i', '--inverse-regex',
+                 dest='inverse_regex', action='store_true',
+                 help="Execute the command only on projects not matching regex or wildcard expression")
     p.add_option('-g', '--groups',
                  dest='groups',
                  help="Execute the command only on projects matching the specified groups")
@@ -215,10 +218,12 @@
     if os.path.isfile(smart_sync_manifest_path):
       self.manifest.Override(smart_sync_manifest_path)
 
-    if not opt.regex:
-      projects = self.GetProjects(args, groups=opt.groups)
-    else:
+    if opt.regex:
       projects = self.FindProjects(args)
+    elif opt.inverse_regex:
+      projects = self.FindProjects(args, inverse=True)
+    else:
+      projects = self.GetProjects(args, groups=opt.groups)
 
     os.environ['REPO_COUNT'] = str(len(projects))
 
diff --git a/subcmds/init.py b/subcmds/init.py
index b8e3de5..45d69b7 100644
--- a/subcmds/init.py
+++ b/subcmds/init.py
@@ -61,6 +61,11 @@
 directory when fetching from the server. This will make the sync
 go a lot faster by reducing data traffic on the network.
 
+The --no-clone-bundle option disables any attempt to use
+$URL/clone.bundle to bootstrap a new Git repository from a
+resumeable bundle file on a content delivery network. This
+may be necessary if there are problems with the local Python
+HTTP client or proxy configuration, but the Git binary works.
 
 Switching Manifest Branches
 ---------------------------
@@ -113,6 +118,9 @@
                  help='restrict manifest projects to ones with a specified '
                       'platform group [auto|all|none|linux|darwin|...]',
                  metavar='PLATFORM')
+    g.add_option('--no-clone-bundle',
+                 dest='no_clone_bundle', action='store_true',
+                 help='disable use of /clone.bundle on HTTP/HTTPS')
 
     # Tool
     g = p.add_option_group('repo Version options')
@@ -222,7 +230,8 @@
               'in another location.', file=sys.stderr)
         sys.exit(1)
 
-    if not m.Sync_NetworkHalf(is_new=is_new):
+    if not m.Sync_NetworkHalf(is_new=is_new, quiet=opt.quiet,
+        clone_bundle=not opt.no_clone_bundle):
       r = m.GetRemote(m.remote.name)
       print('fatal: cannot obtain manifest %s' % r.url, file=sys.stderr)
 
diff --git a/subcmds/start.py b/subcmds/start.py
index d1430a9..290b689 100644
--- a/subcmds/start.py
+++ b/subcmds/start.py
@@ -54,8 +54,7 @@
     if not opt.all:
       projects = args[1:]
       if len(projects) < 1:
-        print("error: at least one project must be specified", file=sys.stderr)
-        sys.exit(1)
+        projects = ['.',]  # start it in the local project by default
 
     all_projects = self.GetProjects(projects,
                                     missing_ok=bool(self.gitc_manifest))
diff --git a/subcmds/sync.py b/subcmds/sync.py
index 6c2f320..138eaf2 100644
--- a/subcmds/sync.py
+++ b/subcmds/sync.py
@@ -244,7 +244,7 @@
     if show_smart:
       p.add_option('-s', '--smart-sync',
                    dest='smart_sync', action='store_true',
-                   help='smart sync using manifest from a known good build')
+                   help='smart sync using manifest from the latest known good build')
       p.add_option('-t', '--smart-tag',
                    dest='smart_tag', action='store',
                    help='smart sync using manifest from a known tag')
@@ -402,9 +402,12 @@
     return fetched
 
   def _GCProjects(self, projects):
-    gitdirs = {}
+    gc_gitdirs = {}
     for project in projects:
-      gitdirs[project.gitdir] = project.bare_git
+      if len(project.manifest.GetProjectsWithName(project.name)) > 1:
+        print('Shared project %s found, disabling pruning.' % project.name)
+        project.bare_git.config('--replace-all', 'gc.pruneExpire', 'never')
+      gc_gitdirs[project.gitdir] = project.bare_git
 
     has_dash_c = git_require((1, 7, 2))
     if multiprocessing and has_dash_c:
@@ -414,7 +417,7 @@
     jobs = min(self.jobs, cpu_count)
 
     if jobs < 2:
-      for bare_git in gitdirs.values():
+      for bare_git in gc_gitdirs.values():
         bare_git.gc('--auto')
       return
 
@@ -436,7 +439,7 @@
       finally:
         sem.release()
 
-    for bare_git in gitdirs.values():
+    for bare_git in gc_gitdirs.values():
       if err_event.isSet():
         break
       sem.acquire()
@@ -459,6 +462,65 @@
     else:
       self.manifest._Unload()
 
+  def _DeleteProject(self, path):
+    print('Deleting obsolete path %s' % path, file=sys.stderr)
+
+    # Delete the .git directory first, so we're less likely to have a partially
+    # working git repository around. There shouldn't be any git projects here,
+    # so rmtree works.
+    try:
+      shutil.rmtree(os.path.join(path, '.git'))
+    except OSError:
+      print('Failed to remove %s' % os.path.join(path, '.git'), file=sys.stderr)
+      print('error: Failed to delete obsolete path %s' % path, file=sys.stderr)
+      print('       remove manually, then run sync again', file=sys.stderr)
+      return -1
+
+    # Delete everything under the worktree, except for directories that contain
+    # another git project
+    dirs_to_remove = []
+    failed = False
+    for root, dirs, files in os.walk(path):
+      for f in files:
+        try:
+          os.remove(os.path.join(root, f))
+        except OSError:
+          print('Failed to remove %s' % os.path.join(root, f), file=sys.stderr)
+          failed = True
+      dirs[:] = [d for d in dirs
+                 if not os.path.lexists(os.path.join(root, d, '.git'))]
+      dirs_to_remove += [os.path.join(root, d) for d in dirs
+                         if os.path.join(root, d) not in dirs_to_remove]
+    for d in reversed(dirs_to_remove):
+      if os.path.islink(d):
+        try:
+          os.remove(d)
+        except OSError:
+          print('Failed to remove %s' % os.path.join(root, d), file=sys.stderr)
+          failed = True
+      elif len(os.listdir(d)) == 0:
+        try:
+          os.rmdir(d)
+        except OSError:
+          print('Failed to remove %s' % os.path.join(root, d), file=sys.stderr)
+          failed = True
+          continue
+    if failed:
+      print('error: Failed to delete obsolete path %s' % path, file=sys.stderr)
+      print('       remove manually, then run sync again', file=sys.stderr)
+      return -1
+
+    # Try deleting parent dirs if they are empty
+    project_dir = path
+    while project_dir != self.manifest.topdir:
+      if len(os.listdir(project_dir)) == 0:
+        os.rmdir(project_dir)
+      else:
+        break
+      project_dir = os.path.dirname(project_dir)
+
+    return 0
+
   def UpdateProjectList(self):
     new_project_paths = []
     for project in self.GetProjects(None, missing_ok=True):
@@ -479,8 +541,8 @@
           continue
         if path not in new_project_paths:
           # If the path has already been deleted, we don't need to do it
-          if os.path.exists(self.manifest.topdir + '/' + path):
-            gitdir = os.path.join(self.manifest.topdir, path, '.git')
+          gitdir = os.path.join(self.manifest.topdir, path, '.git')
+          if os.path.exists(gitdir):
             project = Project(
                            manifest = self.manifest,
                            name = path,
@@ -500,18 +562,10 @@
                     file=sys.stderr)
               return -1
             else:
-              print('Deleting obsolete path %s' % project.worktree,
-                    file=sys.stderr)
-              shutil.rmtree(project.worktree)
-              # Try deleting parent subdirs if they are empty
-              project_dir = os.path.dirname(project.worktree)
-              while project_dir != self.manifest.topdir:
-                try:
-                  os.rmdir(project_dir)
-                except OSError:
-                  break
-                project_dir = os.path.dirname(project_dir)
-              project.RemoveOldCopyAndLinkFiles(os.path.join(self.manifest.repodir, 'projects', '%s.git' % path))
+              if self._DeleteProject(project.worktree) == 0:
+                project.RemoveOldCopyAndLinkFiles(os.path.join(self.manifest.repodir, 'projects', '%s.git' % path))
+              else:
+                return -1
 
     new_project_paths.sort()
     fd = open(file_path, 'w')
diff --git a/subcmds/upload.py b/subcmds/upload.py
index 674fc17..1172dad 100644
--- a/subcmds/upload.py
+++ b/subcmds/upload.py
@@ -454,9 +454,15 @@
       if avail:
         pending.append((project, avail))
 
-    if pending and (not opt.bypass_hooks):
+    if not pending:
+      print("no branches ready for upload", file=sys.stderr)
+      return
+
+    if not opt.bypass_hooks:
       hook = RepoHook('pre-upload', self.manifest.repo_hooks_project,
-                      self.manifest.topdir, abort_if_user_denies=True)
+                      self.manifest.topdir,
+                      self.manifest.manifestProject.GetRemote('origin').url,
+                      abort_if_user_denies=True)
       pending_proj_names = [project.name for (project, avail) in pending]
       pending_worktrees = [project.worktree for (project, avail) in pending]
       try:
@@ -472,9 +478,7 @@
       cc = _SplitEmails(opt.cc)
     people = (reviewers, cc)
 
-    if not pending:
-      print("no branches ready for upload", file=sys.stderr)
-    elif len(pending) == 1 and len(pending[0][1]) == 1:
+    if len(pending) == 1 and len(pending[0][1]) == 1:
       self._SingleBranch(opt, pending[0][1][0], people)
     else:
       self._MultipleBranches(opt, pending, people)