Merge "Add a check and more output to protect against invalid REPO_URLs"
diff --git a/.flake8 b/.flake8
new file mode 100644
index 0000000..45ab656
--- /dev/null
+++ b/.flake8
@@ -0,0 +1,3 @@
+[flake8]
+max-line-length=80
+ignore=E111,E114,E402
diff --git a/.pylintrc b/.pylintrc
deleted file mode 100644
index 413d66a..0000000
--- a/.pylintrc
+++ /dev/null
@@ -1,298 +0,0 @@
-# lint Python modules using external checkers.
-#
-# This is the main checker controling the other ones and the reports
-# generation. It is itself both a raw checker and an astng checker in order
-# to:
-# * handle message activation / deactivation at the module level
-# * handle some basic but necessary stats'data (number of classes, methods...)
-#
-[MASTER]
-
-# Specify a configuration file.
-#rcfile=
-
-# Python code to execute, usually for sys.path manipulation such as
-# pygtk.require().
-#init-hook=
-
-# Profiled execution.
-profile=no
-
-# Add <file or directory> to the black list. It should be a base name, not a
-# path. You may set this option multiple times.
-ignore=SVN
-
-# Pickle collected data for later comparisons.
-persistent=yes
-
-# Set the cache size for astng objects.
-cache-size=500
-
-# List of plugins (as comma separated values of python modules names) to load,
-# usually to register additional checkers.
-load-plugins=
-
-
-[MESSAGES CONTROL]
-
-# Enable only checker(s) with the given id(s). This option conflicts with the
-# disable-checker option
-#enable-checker=
-
-# Enable all checker(s) except those with the given id(s). This option
-# conflicts with the enable-checker option
-#disable-checker=
-
-# Enable all messages in the listed categories.
-#enable-msg-cat=
-
-# Disable all messages in the listed categories.
-#disable-msg-cat=
-
-# Enable the message(s) with the given id(s).
-enable=RP0004
-
-# Disable the message(s) with the given id(s).
-disable=C0326,R0903,R0912,R0913,R0914,R0915,W0141,C0111,C0103,W0603,W0703,R0911,C0301,C0302,R0902,R0904,W0142,W0212,E1101,E1103,R0201,W0201,W0122,W0232,RP0001,RP0003,RP0101,RP0002,RP0401,RP0701,RP0801,F0401,E0611,R0801,I0011
-
-[REPORTS]
-
-# set the output format. Available formats are text, parseable, colorized, msvs
-# (visual studio) and html
-output-format=text
-
-# Put messages in a separate file for each module / package specified on the
-# command line instead of printing them on stdout. Reports (if any) will be
-# written in a file name "pylint_global.[txt|html]".
-files-output=no
-
-# Tells whether to display a full report or only the messages
-reports=yes
-
-# Python expression which should return a note less than 10 (10 is the highest
-# note).You have access to the variables errors warning, statement which
-# respectivly contain the number of errors / warnings messages and the total
-# number of statements analyzed. This is used by the global evaluation report
-# (R0004).
-evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
-
-# Add a comment according to your evaluation note. This is used by the global
-# evaluation report (R0004).
-comment=no
-
-# checks for
-# * unused variables / imports
-# * undefined variables
-# * redefinition of variable from builtins or from an outer scope
-# * use of variable before assigment
-#
-[VARIABLES]
-
-# Tells whether we should check for unused import in __init__ files.
-init-import=no
-
-# A regular expression matching names used for dummy variables (i.e. not used).
-dummy-variables-rgx=_|dummy
-
-# List of additional names supposed to be defined in builtins. Remember that
-# you should avoid to define new builtins when possible.
-additional-builtins=
-
-
-# try to find bugs in the code using type inference
-#
-[TYPECHECK]
-
-# Tells whether missing members accessed in mixin class should be ignored. A
-# mixin class is detected if its name ends with "mixin" (case insensitive).
-ignore-mixin-members=yes
-
-# List of classes names for which member attributes should not be checked
-# (useful for classes with attributes dynamicaly set).
-ignored-classes=SQLObject
-
-# When zope mode is activated, consider the acquired-members option to ignore
-# access to some undefined attributes.
-zope=no
-
-# List of members which are usually get through zope's acquisition mecanism and
-# so shouldn't trigger E0201 when accessed (need zope=yes to be considered).
-acquired-members=REQUEST,acl_users,aq_parent
-
-
-# checks for :
-# * doc strings
-# * modules / classes / functions / methods / arguments / variables name
-# * number of arguments, local variables, branchs, returns and statements in
-# functions, methods
-# * required module attributes
-# * dangerous default values as arguments
-# * redefinition of function / method / class
-# * uses of the global statement
-#
-[BASIC]
-
-# Required attributes for module, separated by a comma
-required-attributes=
-
-# Regular expression which should only match functions or classes name which do
-# not require a docstring
-no-docstring-rgx=_main|__.*__
-
-# Regular expression which should only match correct module names
-module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
-
-# Regular expression which should only match correct module level names
-const-rgx=(([A-Z_][A-Z1-9_]*)|(__.*__))|(log)$
-
-# Regular expression which should only match correct class names
-class-rgx=[A-Z_][a-zA-Z0-9]+$
-
-# Regular expression which should only match correct function names
-function-rgx=[a-z_][a-z0-9_]{2,30}$
-
-# Regular expression which should only match correct method names
-method-rgx=[a-z_][a-z0-9_]{2,30}$
-
-# Regular expression which should only match correct instance attribute names
-attr-rgx=[a-z_][a-z0-9_]{2,30}$
-
-# Regular expression which should only match correct argument names
-argument-rgx=[a-z_][a-z0-9_]{2,30}$
-
-# Regular expression which should only match correct variable names
-variable-rgx=[a-z_][a-z0-9_]{2,30}$
-
-# Regular expression which should only match correct list comprehension /
-# generator expression variable names
-inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
-
-# Good variable names which should always be accepted, separated by a comma
-good-names=i,j,k,ex,Run,_,e,d1,d2,v,f,l,d
-
-# Bad variable names which should always be refused, separated by a comma
-bad-names=foo,bar,baz,toto,tutu,tata
-
-# List of builtins function names that should not be used, separated by a comma
-bad-functions=map,filter,apply,input
-
-
-# checks for sign of poor/misdesign:
-# * number of methods, attributes, local variables...
-# * size, complexity of functions, methods
-#
-[DESIGN]
-
-# Maximum number of arguments for function / method
-max-args=5
-
-# Maximum number of locals for function / method body
-max-locals=15
-
-# Maximum number of return / yield for function / method body
-max-returns=6
-
-# Maximum number of branch for function / method body
-max-branchs=12
-
-# Maximum number of statements in function / method body
-max-statements=50
-
-# Maximum number of parents for a class (see R0901).
-max-parents=7
-
-# Maximum number of attributes for a class (see R0902).
-max-attributes=20
-
-# Minimum number of public methods for a class (see R0903).
-min-public-methods=2
-
-# Maximum number of public methods for a class (see R0904).
-max-public-methods=30
-
-
-# checks for
-# * external modules dependencies
-# * relative / wildcard imports
-# * cyclic imports
-# * uses of deprecated modules
-#
-[IMPORTS]
-
-# Deprecated modules which should not be used, separated by a comma
-deprecated-modules=regsub,string,TERMIOS,Bastion,rexec
-
-# Create a graph of every (i.e. internal and external) dependencies in the
-# given file (report R0402 must not be disabled)
-import-graph=
-
-# Create a graph of external dependencies in the given file (report R0402 must
-# not be disabled)
-ext-import-graph=
-
-# Create a graph of internal dependencies in the given file (report R0402 must
-# not be disabled)
-int-import-graph=
-
-
-# checks for :
-# * methods without self as first argument
-# * overridden methods signature
-# * access only to existant members via self
-# * attributes not defined in the __init__ method
-# * supported interfaces implementation
-# * unreachable code
-#
-[CLASSES]
-
-# List of interface methods to ignore, separated by a comma. This is used for
-# instance to not check methods defines in Zope's Interface base class.
-ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by
-
-# List of method names used to declare (i.e. assign) instance attributes.
-defining-attr-methods=__init__,__new__,setUp
-
-
-# checks for similarities and duplicated code. This computation may be
-# memory / CPU intensive, so you should disable it if you experiments some
-# problems.
-#
-[SIMILARITIES]
-
-# Minimum lines number of a similarity.
-min-similarity-lines=4
-
-# Ignore comments when computing similarities.
-ignore-comments=yes
-
-# Ignore docstrings when computing similarities.
-ignore-docstrings=yes
-
-
-# checks for:
-# * warning notes in the code like FIXME, XXX
-# * PEP 263: source code with non ascii character but no encoding declaration
-#
-[MISCELLANEOUS]
-
-# List of note tags to take in consideration, separated by a comma.
-notes=FIXME,XXX,TODO
-
-
-# checks for :
-# * unauthorized constructions
-# * strict indentation
-# * line length
-# * use of <> instead of !=
-#
-[FORMAT]
-
-# Maximum number of characters on a single line.
-max-line-length=80
-
-# Maximum number of lines in a module
-max-module-lines=1000
-
-# String used as indentation unit. This is usually "    " (4 spaces) or "\t" (1
-# tab).  In repo it is 2 spaces.
-indent-string='  '
diff --git a/SUBMITTING_PATCHES.md b/SUBMITTING_PATCHES.md
index 085ae06..07f7661 100644
--- a/SUBMITTING_PATCHES.md
+++ b/SUBMITTING_PATCHES.md
@@ -2,7 +2,7 @@
 
  - Make small logical changes.
  - Provide a meaningful commit message.
- - Check for coding errors with pylint
+ - Check for coding errors and style nits with pyflakes and flake8
  - Make sure all code is under the Apache License, 2.0.
  - Publish your changes for review.
  - Make corrections if requested.
@@ -36,12 +36,32 @@
 probably need to split up your commit to finer grained pieces.
 
 
-## Check for coding errors with pylint
+## Check for coding errors and style nits with pyflakes and flake8
 
-Run pylint on changed modules using the provided configuration:
+### Coding errors
 
-    pylint --rcfile=.pylintrc file.py
+Run `pyflakes` on changed modules:
 
+    pyflakes file.py
+
+Ideally there should be no new errors or warnings introduced.
+
+### Style violations
+
+Run `flake8` on changes modules:
+
+    flake8 file.py
+
+Note that repo generally follows [Google's python style guide]
+(https://google.github.io/styleguide/pyguide.html) rather than [PEP 8]
+(https://www.python.org/dev/peps/pep-0008/), so it's possible that
+the output of `flake8` will be quite noisy. It's not mandatory to
+avoid all warnings, but at least the maximum line length should be
+followed.
+
+If there are many occurrences of the same warning that cannot be
+avoided without going against the Google style guide, these may be
+suppressed in the included `.flake8` file.
 
 ## Check the license
 
diff --git a/docs/manifest-format.txt b/docs/manifest-format.txt
index 8fd9137..2a07f19 100644
--- a/docs/manifest-format.txt
+++ b/docs/manifest-format.txt
@@ -35,6 +35,7 @@
     <!ATTLIST remote name         ID    #REQUIRED>
     <!ATTLIST remote alias        CDATA #IMPLIED>
     <!ATTLIST remote fetch        CDATA #REQUIRED>
+    <!ATTLIST remote pushurl      CDATA #IMPLIED>
     <!ATTLIST remote review       CDATA #IMPLIED>
     <!ATTLIST remote revision     CDATA #IMPLIED>
 
@@ -125,6 +126,12 @@
 this remote.  Each project's name is appended to this prefix to
 form the actual URL used to clone the project.
 
+Attribute `pushurl`: The Git "push" URL prefix for all projects
+which use this remote.  Each project's name is appended to this
+prefix to form the actual URL used to "git push" the project.
+This attribute is optional; if not specified then "git push"
+will use the same URL as the `fetch` attribute.
+
 Attribute `review`: Hostname of the Gerrit server where reviews
 are uploaded to by `repo upload`.  This attribute is optional;
 if not specified then `repo upload` will not function.
diff --git a/git_config.py b/git_config.py
index 0379181..e2236785 100644
--- a/git_config.py
+++ b/git_config.py
@@ -464,9 +464,13 @@
              % (host,port, str(e)), file=sys.stderr)
       return False
 
+    time.sleep(1)
+    ssh_died = (p.poll() is not None)
+    if ssh_died:
+      return False
+
     _master_processes.append(p)
     _master_keys.add(key)
-    time.sleep(1)
     return True
   finally:
     _master_keys_lock.release()
@@ -568,6 +572,7 @@
     self._config = config
     self.name = name
     self.url = self._Get('url')
+    self.pushUrl = self._Get('pushurl')
     self.review = self._Get('review')
     self.projectname = self._Get('projectname')
     self.fetch = list(map(RefSpec.FromString,
@@ -694,6 +699,10 @@
     """Save this remote to the configuration.
     """
     self._Set('url', self.url)
+    if self.pushUrl is not None:
+      self._Set('pushurl', self.pushUrl + '/' + self.projectname)
+    else:
+      self._Set('pushurl', self.pushUrl)
     self._Set('review', self.review)
     self._Set('projectname', self.projectname)
     self._Set('fetch', list(map(str, self.fetch)))
diff --git a/manifest_xml.py b/manifest_xml.py
index 295493d..0859e1f 100644
--- a/manifest_xml.py
+++ b/manifest_xml.py
@@ -40,8 +40,18 @@
 LOCAL_MANIFESTS_DIR_NAME = 'local_manifests'
 
 # urljoin gets confused if the scheme is not known.
-urllib.parse.uses_relative.extend(['ssh', 'git', 'persistent-https', 'rpc'])
-urllib.parse.uses_netloc.extend(['ssh', 'git', 'persistent-https', 'rpc'])
+urllib.parse.uses_relative.extend([
+    'ssh',
+    'git',
+    'persistent-https',
+    'sso',
+    'rpc'])
+urllib.parse.uses_netloc.extend([
+    'ssh',
+    'git',
+    'persistent-https',
+    'sso',
+    'rpc'])
 
 class _Default(object):
   """Project defaults within the manifest."""
@@ -64,11 +74,13 @@
                name,
                alias=None,
                fetch=None,
+               pushUrl=None,
                manifestUrl=None,
                review=None,
                revision=None):
     self.name = name
     self.fetchUrl = fetch
+    self.pushUrl = pushUrl
     self.manifestUrl = manifestUrl
     self.remoteAlias = alias
     self.reviewUrl = review
@@ -104,6 +116,7 @@
       remoteName = self.remoteAlias
     return RemoteSpec(remoteName,
                       url=url,
+                      pushUrl=self.pushUrl,
                       review=self.reviewUrl,
                       orig_name=self.name)
 
@@ -160,6 +173,8 @@
     root.appendChild(e)
     e.setAttribute('name', r.name)
     e.setAttribute('fetch', r.fetchUrl)
+    if r.pushUrl is not None:
+      e.setAttribute('pushurl', r.pushUrl)
     if r.remoteAlias is not None:
       e.setAttribute('alias', r.remoteAlias)
     if r.reviewUrl is not None:
@@ -639,6 +654,9 @@
     if alias == '':
       alias = None
     fetch = self._reqatt(node, 'fetch')
+    pushUrl = node.getAttribute('pushurl')
+    if pushUrl == '':
+      pushUrl = None
     review = node.getAttribute('review')
     if review == '':
       review = None
@@ -646,7 +664,7 @@
     if revision == '':
       revision = None
     manifestUrl = self.manifestProject.config.GetString('remote.origin.url')
-    return _XmlRemote(name, alias, fetch, manifestUrl, review, revision)
+    return _XmlRemote(name, alias, fetch, pushUrl, manifestUrl, review, revision)
 
   def _ParseDefault(self, node):
     """
diff --git a/project.py b/project.py
index 46e06bf..633ae07 100644
--- a/project.py
+++ b/project.py
@@ -320,11 +320,13 @@
   def __init__(self,
                name,
                url=None,
+               pushUrl=None,
                review=None,
                revision=None,
                orig_name=None):
     self.name = name
     self.url = url
+    self.pushUrl = pushUrl
     self.review = review
     self.revision = revision
     self.orig_name = orig_name
@@ -909,11 +911,13 @@
     else:
       return False
 
-  def PrintWorkTreeStatus(self, output_redir=None):
+  def PrintWorkTreeStatus(self, output_redir=None, quiet=False):
     """Prints the status of the repository to stdout.
 
     Args:
       output: If specified, redirect the output to this object.
+      quiet:  If True then only print the project name.  Do not print
+              the modified files, branch name, etc.
     """
     if not os.path.isdir(self.worktree):
       if output_redir is None:
@@ -939,6 +943,10 @@
       out.redirect(output_redir)
     out.project('project %-40s', self.relpath + '/ ')
 
+    if quiet:
+      out.nl()
+      return 'DIRTY'
+
     branch = self.CurrentBranch
     if branch is None:
       out.nobranch('(*** NO BRANCH ***)')
@@ -1256,13 +1264,18 @@
       elif self.manifest.default.sync_c:
         current_branch_only = True
 
+    if self.clone_depth:
+      depth = self.clone_depth
+    else:
+      depth = self.manifest.manifestProject.config.GetString('repo.depth')
+
     need_to_fetch = not (optimized_fetch and
                          (ID_RE.match(self.revisionExpr) and
                           self._CheckForSha1()))
     if (need_to_fetch and
         not self._RemoteFetch(initial=is_new, quiet=quiet, alt_dir=alt_dir,
                               current_branch_only=current_branch_only,
-                              no_tags=no_tags, prune=prune)):
+                              no_tags=no_tags, prune=prune, depth=depth)):
       return False
 
     if self.worktree:
@@ -1825,6 +1838,7 @@
 
       remote = RemoteSpec(self.remote.name,
                           url=url,
+                          pushUrl=self.remote.pushUrl,
                           review=self.remote.review,
                           revision=self.remote.revision)
       subproject = Project(manifest=self.manifest,
@@ -1834,7 +1848,7 @@
                            objdir=objdir,
                            worktree=worktree,
                            relpath=relpath,
-                           revisionExpr=self.revisionExpr,
+                           revisionExpr=rev,
                            revisionId=rev,
                            rebase=self.rebase,
                            groups=self.groups,
@@ -1877,23 +1891,17 @@
                    quiet=False,
                    alt_dir=None,
                    no_tags=False,
-                   prune=False):
+                   prune=False,
+                   depth=None):
 
     is_sha1 = False
     tag_name = None
-    depth = None
-
     # The depth should not be used when fetching to a mirror because
     # it will result in a shallow repository that cannot be cloned or
     # fetched from.
-    if not self.manifest.IsMirror:
-      if self.clone_depth:
-        depth = self.clone_depth
-      else:
-        depth = self.manifest.manifestProject.config.GetString('repo.depth')
-      # The repo project should never be synced with partial depth
-      if self.relpath == '.repo/repo':
-        depth = None
+    # The repo project should also never be synced with partial depth.
+    if self.manifest.IsMirror or self.relpath == '.repo/repo':
+      depth = None
 
     if depth:
       current_branch_only = True
@@ -2054,21 +2062,22 @@
           os.remove(packed_refs)
       self.bare_git.pack_refs('--all', '--prune')
 
-    if is_sha1 and current_branch_only and self.upstream:
+    if is_sha1 and current_branch_only:
       # We just synced the upstream given branch; verify we
       # got what we wanted, else trigger a second run of all
       # refs.
       if not self._CheckForSha1():
-        if not depth:
-          # Avoid infinite recursion when depth is True (since depth implies
-          # current_branch_only)
-          return self._RemoteFetch(name=name, current_branch_only=False,
-                                   initial=False, quiet=quiet, alt_dir=alt_dir)
-        if self.clone_depth:
-          self.clone_depth = None
+        if current_branch_only and depth:
+          # Sync the current branch only with depth set to None
           return self._RemoteFetch(name=name,
                                    current_branch_only=current_branch_only,
-                                   initial=False, quiet=quiet, alt_dir=alt_dir)
+                                   initial=False, quiet=quiet, alt_dir=alt_dir,
+                                   depth=None)
+        else:
+          # Avoid infinite recursion: sync all branches with depth set to None
+          return self._RemoteFetch(name=name, current_branch_only=False,
+                                   initial=False, quiet=quiet, alt_dir=alt_dir,
+                                   depth=None)
 
     return ok
 
@@ -2346,6 +2355,7 @@
     if self.remote.url:
       remote = self.GetRemote(self.remote.name)
       remote.url = self.remote.url
+      remote.pushUrl = self.remote.pushUrl
       remote.review = self.remote.review
       remote.projectname = self.name
 
@@ -2390,6 +2400,7 @@
         src = os.path.realpath(os.path.join(srcdir, name))
         # Fail if the links are pointing to the wrong place
         if src != dst:
+          _error('%s is different in %s vs %s', name, destdir, srcdir)
           raise GitError('--force-sync not enabled; cannot overwrite a local '
                          'work tree. If you\'re comfortable with the '
                          'possibility of losing the work tree\'s git metadata,'
diff --git a/repo b/repo
index 36af511..4293c79 100755
--- a/repo
+++ b/repo
@@ -27,6 +27,9 @@
 
 # increment this if the MAINTAINER_KEYS block is modified
 KEYRING_VERSION = (1, 2)
+
+# Each individual key entry is created by using:
+# gpg --armor --export keyid
 MAINTAINER_KEYS = """
 
      Repo Maintainer <repo@android.kernel.org>
diff --git a/subcmds/abandon.py b/subcmds/abandon.py
index b94ccdd..6f78da7 100644
--- a/subcmds/abandon.py
+++ b/subcmds/abandon.py
@@ -16,6 +16,7 @@
 from __future__ import print_function
 import sys
 from command import Command
+from collections import defaultdict
 from git_command import git
 from progress import Progress
 
@@ -23,49 +24,75 @@
   common = True
   helpSummary = "Permanently abandon a development branch"
   helpUsage = """
-%prog <branchname> [<project>...]
+%prog [--all | <branchname>] [<project>...]
 
 This subcommand permanently abandons a development branch by
 deleting it (and all its history) from your local repository.
 
 It is equivalent to "git branch -D <branchname>".
 """
+  def _Options(self, p):
+    p.add_option('--all',
+                 dest='all', action='store_true',
+                 help='delete all branches in all projects')
 
   def Execute(self, opt, args):
-    if not args:
+    if not opt.all and not args:
       self.Usage()
 
-    nb = args[0]
-    if not git.check_ref_format('heads/%s' % nb):
-      print("error: '%s' is not a valid name" % nb, file=sys.stderr)
-      sys.exit(1)
+    if not opt.all:
+      nb = args[0]
+      if not git.check_ref_format('heads/%s' % nb):
+        print("error: '%s' is not a valid name" % nb, file=sys.stderr)
+        sys.exit(1)
+    else:
+      args.insert(0,None)
+      nb = "'All local branches'"
 
-    nb = args[0]
-    err = []
-    success = []
+    err = defaultdict(list)
+    success = defaultdict(list)
     all_projects = self.GetProjects(args[1:])
 
     pm = Progress('Abandon %s' % nb, len(all_projects))
     for project in all_projects:
       pm.update()
 
-      status = project.AbandonBranch(nb)
-      if status is not None:
-        if status:
-          success.append(project)
-        else:
-          err.append(project)
+      if opt.all:
+        branches = project.GetBranches().keys()
+      else:
+        branches = [nb]
+
+      for name in branches:
+        status = project.AbandonBranch(name)
+        if status is not None:
+          if status:
+            success[name].append(project)
+          else:
+            err[name].append(project)
     pm.end()
 
+    width = 25
+    for name in branches:
+      if width < len(name):
+        width = len(name)
+
     if err:
-      for p in err:
-        print("error: %s/: cannot abandon %s" % (p.relpath, nb),
-              file=sys.stderr)
+      for br in err.keys():
+        err_msg = "error: cannot abandon %s" %br
+        print(err_msg, file=sys.stderr)
+        for proj in err[br]:
+          print(' '*len(err_msg) + " | %s" % p.relpath, file=sys.stderr)
       sys.exit(1)
     elif not success:
-      print('error: no project has branch %s' % nb, file=sys.stderr)
+      print('error: no project has local branch(es) : %s' % nb,
+            file=sys.stderr)
       sys.exit(1)
     else:
-      print('Abandoned in %d project(s):\n  %s'
-            % (len(success), '\n  '.join(p.relpath for p in success)),
-            file=sys.stderr)
+      print('Abandoned branches:', file=sys.stderr)
+      for br in success.keys():
+        if len(all_projects) > 1 and len(all_projects) == len(success[br]):
+          result = "all project"
+        else:
+          result = "%s" % (
+            ('\n'+' '*width + '| ').join(p.relpath for p in success[br]))
+        print("%s%s| %s\n" % (br,' '*(width-len(br)), result),file=sys.stderr)
diff --git a/subcmds/start.py b/subcmds/start.py
index d1430a9..290b689 100644
--- a/subcmds/start.py
+++ b/subcmds/start.py
@@ -54,8 +54,7 @@
     if not opt.all:
       projects = args[1:]
       if len(projects) < 1:
-        print("error: at least one project must be specified", file=sys.stderr)
-        sys.exit(1)
+        projects = ['.',]  # start it in the local project by default
 
     all_projects = self.GetProjects(projects,
                                     missing_ok=bool(self.gitc_manifest))
diff --git a/subcmds/status.py b/subcmds/status.py
index 38c229b..60e26ff 100644
--- a/subcmds/status.py
+++ b/subcmds/status.py
@@ -89,8 +89,10 @@
     p.add_option('-o', '--orphans',
                  dest='orphans', action='store_true',
                  help="include objects in working directory outside of repo projects")
+    p.add_option('-q', '--quiet', action='store_true',
+                 help="only print the name of modified projects")
 
-  def _StatusHelper(self, project, clean_counter, sem):
+  def _StatusHelper(self, project, clean_counter, sem, quiet):
     """Obtains the status for a specific project.
 
     Obtains the status for a project, redirecting the output to
@@ -104,7 +106,7 @@
       output: Where to output the status.
     """
     try:
-      state = project.PrintWorkTreeStatus()
+      state = project.PrintWorkTreeStatus(quiet=quiet)
       if state == 'CLEAN':
         next(clean_counter)
     finally:
@@ -132,7 +134,7 @@
 
     if opt.jobs == 1:
       for project in all_projects:
-        state = project.PrintWorkTreeStatus()
+        state = project.PrintWorkTreeStatus(quiet=opt.quiet)
         if state == 'CLEAN':
           next(counter)
     else:
@@ -142,13 +144,13 @@
         sem.acquire()
 
         t = _threading.Thread(target=self._StatusHelper,
-                              args=(project, counter, sem))
+                              args=(project, counter, sem, opt.quiet))
         threads.append(t)
         t.daemon = True
         t.start()
       for t in threads:
         t.join()
-    if len(all_projects) == next(counter):
+    if not opt.quiet and len(all_projects) == next(counter):
       print('nothing to commit (working directory clean)')
 
     if opt.orphans:
diff --git a/subcmds/sync.py b/subcmds/sync.py
index 9124a65..bbb166c 100644
--- a/subcmds/sync.py
+++ b/subcmds/sync.py
@@ -255,7 +255,7 @@
                  dest='repo_upgraded', action='store_true',
                  help=SUPPRESS_HELP)
 
-  def _FetchProjectList(self, opt, projects, *args, **kwargs):
+  def _FetchProjectList(self, opt, projects, sem, *args, **kwargs):
     """Main function of the fetch threads when jobs are > 1.
 
     Delegates most of the work to _FetchHelper.
@@ -263,15 +263,20 @@
     Args:
       opt: Program options returned from optparse.  See _Options().
       projects: Projects to fetch.
+      sem: We'll release() this semaphore when we exit so that another thread
+          can be started up.
       *args, **kwargs: Remaining arguments to pass to _FetchHelper. See the
           _FetchHelper docstring for details.
     """
-    for project in projects:
-      success = self._FetchHelper(opt, project, *args, **kwargs)
-      if not success and not opt.force_broken:
-        break
+    try:
+        for project in projects:
+          success = self._FetchHelper(opt, project, *args, **kwargs)
+          if not success and not opt.force_broken:
+            break
+    finally:
+        sem.release()
 
-  def _FetchHelper(self, opt, project, lock, fetched, pm, sem, err_event):
+  def _FetchHelper(self, opt, project, lock, fetched, pm, err_event):
     """Fetch git objects for a single project.
 
     Args:
@@ -283,8 +288,6 @@
           (with our lock held).
       pm: Instance of a Project object.  We will call pm.update() (with our
           lock held).
-      sem: We'll release() this semaphore when we exit so that another thread
-          can be started up.
       err_event: We'll set this event in the case of an error (after printing
           out info about the error).
 
@@ -340,7 +343,6 @@
     finally:
       if did_lock:
         lock.release()
-      sem.release()
 
     return success
 
@@ -365,10 +367,10 @@
       sem.acquire()
       kwargs = dict(opt=opt,
                     projects=project_list,
+                    sem=sem,
                     lock=lock,
                     fetched=fetched,
                     pm=pm,
-                    sem=sem,
                     err_event=err_event)
       if self.jobs > 1:
         t = _threading.Thread(target = self._FetchProjectList,
@@ -397,9 +399,12 @@
     return fetched
 
   def _GCProjects(self, projects):
-    gitdirs = {}
+    gc_gitdirs = {}
     for project in projects:
-      gitdirs[project.gitdir] = project.bare_git
+      if len(project.manifest.GetProjectsWithName(project.name)) > 1:
+        print('Shared project %s found, disabling pruning.' % project.name)
+        project.bare_git.config('--replace-all', 'gc.pruneExpire', 'never')
+      gc_gitdirs[project.gitdir] = project.bare_git
 
     has_dash_c = git_require((1, 7, 2))
     if multiprocessing and has_dash_c:
@@ -409,7 +414,7 @@
     jobs = min(self.jobs, cpu_count)
 
     if jobs < 2:
-      for bare_git in gitdirs.values():
+      for bare_git in gc_gitdirs.values():
         bare_git.gc('--auto')
       return
 
@@ -431,7 +436,7 @@
       finally:
         sem.release()
 
-    for bare_git in gitdirs.values():
+    for bare_git in gc_gitdirs.values():
       if err_event.isSet():
         break
       sem.acquire()
@@ -454,6 +459,65 @@
     else:
       self.manifest._Unload()
 
+  def _DeleteProject(self, path):
+    print('Deleting obsolete path %s' % path, file=sys.stderr)
+
+    # Delete the .git directory first, so we're less likely to have a partially
+    # working git repository around. There shouldn't be any git projects here,
+    # so rmtree works.
+    try:
+      shutil.rmtree(os.path.join(path, '.git'))
+    except OSError:
+      print('Failed to remove %s' % os.path.join(path, '.git'), file=sys.stderr)
+      print('error: Failed to delete obsolete path %s' % path, file=sys.stderr)
+      print('       remove manually, then run sync again', file=sys.stderr)
+      return -1
+
+    # Delete everything under the worktree, except for directories that contain
+    # another git project
+    dirs_to_remove = []
+    failed = False
+    for root, dirs, files in os.walk(path):
+      for f in files:
+        try:
+          os.remove(os.path.join(root, f))
+        except OSError:
+          print('Failed to remove %s' % os.path.join(root, f), file=sys.stderr)
+          failed = True
+      dirs[:] = [d for d in dirs
+                 if not os.path.lexists(os.path.join(root, d, '.git'))]
+      dirs_to_remove += [os.path.join(root, d) for d in dirs
+                         if os.path.join(root, d) not in dirs_to_remove]
+    for d in reversed(dirs_to_remove):
+      if os.path.islink(d):
+        try:
+          os.remove(d)
+        except OSError:
+          print('Failed to remove %s' % os.path.join(root, d), file=sys.stderr)
+          failed = True
+      elif len(os.listdir(d)) == 0:
+        try:
+          os.rmdir(d)
+        except OSError:
+          print('Failed to remove %s' % os.path.join(root, d), file=sys.stderr)
+          failed = True
+          continue
+    if failed:
+      print('error: Failed to delete obsolete path %s' % path, file=sys.stderr)
+      print('       remove manually, then run sync again', file=sys.stderr)
+      return -1
+
+    # Try deleting parent dirs if they are empty
+    project_dir = path
+    while project_dir != self.manifest.topdir:
+      if len(os.listdir(project_dir)) == 0:
+        os.rmdir(project_dir)
+      else:
+        break
+      project_dir = os.path.dirname(project_dir)
+
+    return 0
+
   def UpdateProjectList(self):
     new_project_paths = []
     for project in self.GetProjects(None, missing_ok=True):
@@ -474,8 +538,8 @@
           continue
         if path not in new_project_paths:
           # If the path has already been deleted, we don't need to do it
-          if os.path.exists(self.manifest.topdir + '/' + path):
-            gitdir = os.path.join(self.manifest.topdir, path, '.git')
+          gitdir = os.path.join(self.manifest.topdir, path, '.git')
+          if os.path.exists(gitdir):
             project = Project(
                            manifest = self.manifest,
                            name = path,
@@ -494,18 +558,8 @@
               print('       commit changes, then run sync again',
                     file=sys.stderr)
               return -1
-            else:
-              print('Deleting obsolete path %s' % project.worktree,
-                    file=sys.stderr)
-              shutil.rmtree(project.worktree)
-              # Try deleting parent subdirs if they are empty
-              project_dir = os.path.dirname(project.worktree)
-              while project_dir != self.manifest.topdir:
-                try:
-                  os.rmdir(project_dir)
-                except OSError:
-                  break
-                project_dir = os.path.dirname(project_dir)
+            elif self._DeleteProject(project.worktree):
+              return -1
 
     new_project_paths.sort()
     fd = open(file_path, 'w')
diff --git a/subcmds/upload.py b/subcmds/upload.py
index 4b05f1e..1172dad 100644
--- a/subcmds/upload.py
+++ b/subcmds/upload.py
@@ -454,7 +454,11 @@
       if avail:
         pending.append((project, avail))
 
-    if pending and (not opt.bypass_hooks):
+    if not pending:
+      print("no branches ready for upload", file=sys.stderr)
+      return
+
+    if not opt.bypass_hooks:
       hook = RepoHook('pre-upload', self.manifest.repo_hooks_project,
                       self.manifest.topdir,
                       self.manifest.manifestProject.GetRemote('origin').url,
@@ -474,9 +478,7 @@
       cc = _SplitEmails(opt.cc)
     people = (reviewers, cc)
 
-    if not pending:
-      print("no branches ready for upload", file=sys.stderr)
-    elif len(pending) == 1 and len(pending[0][1]) == 1:
+    if len(pending) == 1 and len(pending[0][1]) == 1:
       self._SingleBranch(opt, pending[0][1][0], people)
     else:
       self._MultipleBranches(opt, pending, people)