Merge "Emit project info in case of sync exception."
diff --git a/.pylintrc b/.pylintrc
index 2ed0940..c6be743 100644
--- a/.pylintrc
+++ b/.pylintrc
@@ -61,9 +61,6 @@
 # (visual studio) and html
 output-format=text
 
-# Include message's id in output
-include-ids=yes
-
 # Put messages in a separate file for each module / package specified on the
 # command line instead of printing them on stdout. Reports (if any) will be
 # written in a file name "pylint_global.[txt|html]".
diff --git a/color.py b/color.py
index 7970198..0218aab 100644
--- a/color.py
+++ b/color.py
@@ -18,41 +18,43 @@
 
 import pager
 
-COLORS = {None     :-1,
-          'normal' :-1,
-          'black'  : 0,
-          'red'    : 1,
-          'green'  : 2,
-          'yellow' : 3,
-          'blue'   : 4,
+COLORS = {None: -1,
+          'normal': -1,
+          'black': 0,
+          'red': 1,
+          'green': 2,
+          'yellow': 3,
+          'blue': 4,
           'magenta': 5,
-          'cyan'   : 6,
-          'white'  : 7}
+          'cyan': 6,
+          'white': 7}
 
-ATTRS = {None     :-1,
-         'bold'   : 1,
-         'dim'    : 2,
-         'ul'     : 4,
-         'blink'  : 5,
+ATTRS = {None: -1,
+         'bold': 1,
+         'dim': 2,
+         'ul': 4,
+         'blink': 5,
          'reverse': 7}
 
-RESET = "\033[m"  # pylint: disable=W1401
-                  # backslash is not anomalous
+RESET = "\033[m"
+
 
 def is_color(s):
   return s in COLORS
 
+
 def is_attr(s):
   return s in ATTRS
 
-def _Color(fg = None, bg = None, attr = None):
+
+def _Color(fg=None, bg=None, attr=None):
   fg = COLORS[fg]
   bg = COLORS[bg]
   attr = ATTRS[attr]
 
   if attr >= 0 or fg >= 0 or bg >= 0:
     need_sep = False
-    code = "\033["  #pylint: disable=W1401
+    code = "\033["
 
     if attr >= 0:
       code += chr(ord('0') + attr)
@@ -71,7 +73,6 @@
     if bg >= 0:
       if need_sep:
         code += ';'
-      need_sep = True
 
       if bg < 8:
         code += '4%c' % (ord('0') + bg)
@@ -82,6 +83,27 @@
     code = ''
   return code
 
+DEFAULT = None
+
+
+def SetDefaultColoring(state):
+  """Set coloring behavior to |state|.
+
+  This is useful for overriding config options via the command line.
+  """
+  if state is None:
+    # Leave it alone -- return quick!
+    return
+
+  global DEFAULT
+  state = state.lower()
+  if state in ('auto',):
+    DEFAULT = state
+  elif state in ('always', 'yes', 'true', True):
+    DEFAULT = 'always'
+  elif state in ('never', 'no', 'false', False):
+    DEFAULT = 'never'
+
 
 class Coloring(object):
   def __init__(self, config, section_type):
@@ -89,9 +111,11 @@
     self._config = config
     self._out = sys.stdout
 
-    on = self._config.GetString(self._section)
+    on = DEFAULT
     if on is None:
-      on = self._config.GetString('color.ui')
+      on = self._config.GetString(self._section)
+      if on is None:
+        on = self._config.GetString('color.ui')
 
     if on == 'auto':
       if pager.active or os.isatty(1):
@@ -122,6 +146,7 @@
   def printer(self, opt=None, fg=None, bg=None, attr=None):
     s = self
     c = self.colorer(opt, fg, bg, attr)
+
     def f(fmt, *args):
       s._out.write(c(fmt, *args))
     return f
@@ -129,6 +154,7 @@
   def nofmt_printer(self, opt=None, fg=None, bg=None, attr=None):
     s = self
     c = self.nofmt_colorer(opt, fg, bg, attr)
+
     def f(fmt):
       s._out.write(c(fmt))
     return f
@@ -136,11 +162,13 @@
   def colorer(self, opt=None, fg=None, bg=None, attr=None):
     if self._on:
       c = self._parse(opt, fg, bg, attr)
+
       def f(fmt, *args):
         output = fmt % args
         return ''.join([c, output, RESET])
       return f
     else:
+
       def f(fmt, *args):
         return fmt % args
       return f
@@ -148,6 +176,7 @@
   def nofmt_colorer(self, opt=None, fg=None, bg=None, attr=None):
     if self._on:
       c = self._parse(opt, fg, bg, attr)
+
       def f(fmt):
         return ''.join([c, fmt, RESET])
       return f
diff --git a/docs/manifest-format.txt b/docs/manifest-format.txt
index e48b75f..1aa9396 100644
--- a/docs/manifest-format.txt
+++ b/docs/manifest-format.txt
@@ -26,6 +26,7 @@
                         manifest-server?,
                         remove-project*,
                         project*,
+                        extend-project*,
                         repo-hooks?)>
 
     <!ELEMENT notice (#PCDATA)>
@@ -35,6 +36,7 @@
     <!ATTLIST remote alias        CDATA #IMPLIED>
     <!ATTLIST remote fetch        CDATA #REQUIRED>
     <!ATTLIST remote review       CDATA #IMPLIED>
+    <!ATTLIST remote revision     CDATA #IMPLIED>
 
     <!ELEMENT default (EMPTY)>
     <!ATTLIST default remote      IDREF #IMPLIED>
@@ -66,6 +68,11 @@
     <!ATTLIST annotation value CDATA #REQUIRED>
     <!ATTLIST annotation keep  CDATA "true">
 
+    <!ELEMENT extend-project>
+    <!ATTLIST extend-project name CDATA #REQUIRED>
+    <!ATTLIST extend-project path CDATA #IMPLIED>
+    <!ATTLIST extend-project groups CDATA #IMPLIED>
+
     <!ELEMENT remove-project (EMPTY)>
     <!ATTLIST remove-project name  CDATA #REQUIRED>
 
@@ -112,6 +119,10 @@
 are uploaded to by `repo upload`.  This attribute is optional;
 if not specified then `repo upload` will not function.
 
+Attribute `revision`: Name of a Git branch (e.g. `master` or
+`refs/heads/master`). Remotes with their own revision will override
+the default revision.
+
 Element default
 ---------------
 
@@ -132,14 +143,14 @@
 this value. If this value is not set, projects will use `revision`
 by default instead.
 
-Attribute `sync_j`: Number of parallel jobs to use when synching.
+Attribute `sync-j`: Number of parallel jobs to use when synching.
 
-Attribute `sync_c`: Set to true to only sync the given Git
+Attribute `sync-c`: Set to true to only sync the given Git
 branch (specified in the `revision` attribute) rather than the
-whole ref space.  Project elements lacking a sync_c element of
+whole ref space.  Project elements lacking a sync-c element of
 their own will use this value.
 
-Attribute `sync_s`: Set to true to also sync sub-projects.
+Attribute `sync-s`: Set to true to also sync sub-projects.
 
 
 Element manifest-server
@@ -208,7 +219,8 @@
 (e.g. just "master") or absolute (e.g. "refs/heads/master").
 Tags and/or explicit SHA-1s should work in theory, but have not
 been extensively tested.  If not supplied the revision given by
-the default element is used.
+the remote element is used if applicable, else the default
+element is used.
 
 Attribute `dest-branch`: Name of a Git branch (e.g. `master`).
 When using `repo upload`, changes will be submitted for code
@@ -226,13 +238,13 @@
 If the project has a parent element, the `name` and `path` here
 are the prefixed ones.
 
-Attribute `sync_c`: Set to true to only sync the given Git
+Attribute `sync-c`: Set to true to only sync the given Git
 branch (specified in the `revision` attribute) rather than the
 whole ref space.
 
-Attribute `sync_s`: Set to true to also sync sub-projects.
+Attribute `sync-s`: Set to true to also sync sub-projects.
 
-Attribute `upstream`: Name of the Git branch in which a sha1
+Attribute `upstream`: Name of the Git ref in which a sha1
 can be found.  Used when syncing a revision locked manifest in
 -c mode to avoid having to sync the entire ref space.
 
@@ -246,6 +258,22 @@
 local mirrors syncing, it will be ignored when syncing the projects in a
 client working directory.
 
+Element extend-project
+----------------------
+
+Modify the attributes of the named project.
+
+This element is mostly useful in a local manifest file, to modify the
+attributes of an existing project without completely replacing the
+existing project definition.  This makes the local manifest more robust
+against changes to the original manifest.
+
+Attribute `path`: If specified, limit the change to projects checked out
+at the specified path, rather than all projects with the given name.
+
+Attribute `groups`: List of additional groups to which this project
+belongs.  Same syntax as the corresponding element of `project`.
+
 Element annotation
 ------------------
 
diff --git a/error.py b/error.py
index ff948f9..f2a7c4e 100644
--- a/error.py
+++ b/error.py
@@ -80,7 +80,7 @@
     self.name = name
 
   def __str__(self):
-    if self.Name is None:
+    if self.name is None:
       return 'in current directory'
     return self.name
 
@@ -93,7 +93,7 @@
     self.name = name
 
   def __str__(self):
-    if self.Name is None:
+    if self.name is None:
       return 'in current directory'
     return self.name
 
diff --git a/git_command.py b/git_command.py
index 354fc71..0893bff 100644
--- a/git_command.py
+++ b/git_command.py
@@ -14,7 +14,9 @@
 # limitations under the License.
 
 from __future__ import print_function
+import fcntl
 import os
+import select
 import sys
 import subprocess
 import tempfile
@@ -76,17 +78,30 @@
 
 _git_version = None
 
+class _sfd(object):
+  """select file descriptor class"""
+  def __init__(self, fd, dest, std_name):
+    assert std_name in ('stdout', 'stderr')
+    self.fd = fd
+    self.dest = dest
+    self.std_name = std_name
+  def fileno(self):
+    return self.fd.fileno()
+
 class _GitCall(object):
   def version(self):
     p = GitCommand(None, ['--version'], capture_stdout=True)
     if p.Wait() == 0:
-      return p.stdout
+      if hasattr(p.stdout, 'decode'):
+        return p.stdout.decode('utf-8')
+      else:
+        return p.stdout
     return None
 
   def version_tuple(self):
     global _git_version
     if _git_version is None:
-      ver_str = git.version().decode('utf-8')
+      ver_str = git.version()
       _git_version = Wrapper().ParseGitVersion(ver_str)
       if _git_version is None:
         print('fatal: "%s" unsupported' % ver_str, file=sys.stderr)
@@ -139,6 +154,9 @@
       if key in env:
         del env[key]
 
+    # If we are not capturing std* then need to print it.
+    self.tee = {'stdout': not capture_stdout, 'stderr': not capture_stderr}
+
     if disable_editor:
       _setenv(env, 'GIT_EDITOR', ':')
     if ssh_proxy:
@@ -162,22 +180,21 @@
       if gitdir:
         _setenv(env, GIT_DIR, gitdir)
       cwd = None
-    command.extend(cmdv)
+    command.append(cmdv[0])
+    # Need to use the --progress flag for fetch/clone so output will be
+    # displayed as by default git only does progress output if stderr is a TTY.
+    if sys.stderr.isatty() and cmdv[0] in ('fetch', 'clone'):
+      if '--progress' not in cmdv and '--quiet' not in cmdv:
+        command.append('--progress')
+    command.extend(cmdv[1:])
 
     if provide_stdin:
       stdin = subprocess.PIPE
     else:
       stdin = None
 
-    if capture_stdout:
-      stdout = subprocess.PIPE
-    else:
-      stdout = None
-
-    if capture_stderr:
-      stderr = subprocess.PIPE
-    else:
-      stderr = None
+    stdout = subprocess.PIPE
+    stderr = subprocess.PIPE
 
     if IsTrace():
       global LAST_CWD
@@ -226,8 +243,36 @@
   def Wait(self):
     try:
       p = self.process
-      (self.stdout, self.stderr) = p.communicate()
-      rc = p.returncode
+      rc = self._CaptureOutput()
     finally:
       _remove_ssh_client(p)
     return rc
+
+  def _CaptureOutput(self):
+    p = self.process
+    s_in = [_sfd(p.stdout, sys.stdout, 'stdout'),
+            _sfd(p.stderr, sys.stderr, 'stderr')]
+    self.stdout = ''
+    self.stderr = ''
+
+    for s in s_in:
+      flags = fcntl.fcntl(s.fd, fcntl.F_GETFL)
+      fcntl.fcntl(s.fd, fcntl.F_SETFL, flags | os.O_NONBLOCK)
+
+    while s_in:
+      in_ready, _, _ = select.select(s_in, [], [])
+      for s in in_ready:
+        buf = s.fd.read(4096)
+        if not buf:
+          s_in.remove(s)
+          continue
+        if not hasattr(buf, 'encode'):
+          buf = buf.decode()
+        if s.std_name == 'stdout':
+          self.stdout += buf
+        else:
+          self.stderr += buf
+        if self.tee[s.std_name]:
+          s.dest.write(buf)
+          s.dest.flush()
+    return p.wait()
diff --git a/git_config.py b/git_config.py
index 32879ec..8ded7c2 100644
--- a/git_config.py
+++ b/git_config.py
@@ -15,8 +15,8 @@
 
 from __future__ import print_function
 
+import json
 import os
-import pickle
 import re
 import subprocess
 import sys
@@ -80,7 +80,7 @@
     return cls(configfile = os.path.join(gitdir, 'config'),
                defaults = defaults)
 
-  def __init__(self, configfile, defaults=None, pickleFile=None):
+  def __init__(self, configfile, defaults=None, jsonFile=None):
     self.file = configfile
     self.defaults = defaults
     self._cache_dict = None
@@ -88,12 +88,11 @@
     self._remotes = {}
     self._branches = {}
 
-    if pickleFile is None:
-      self._pickle = os.path.join(
+    self._json = jsonFile
+    if self._json is None:
+      self._json = os.path.join(
         os.path.dirname(self.file),
-        '.repopickle_' + os.path.basename(self.file))
-    else:
-      self._pickle = pickleFile
+        '.repo_' + os.path.basename(self.file) + '.json')
 
   def Has(self, name, include_defaults = True):
     """Return true if this configuration file has the key.
@@ -217,9 +216,9 @@
     """Resolve any url.*.insteadof references.
     """
     for new_url in self.GetSubSections('url'):
-      old_url = self.GetString('url.%s.insteadof' % new_url)
-      if old_url is not None and url.startswith(old_url):
-        return new_url + url[len(old_url):]
+      for old_url in self.GetString('url.%s.insteadof' % new_url, True):
+        if old_url is not None and url.startswith(old_url):
+          return new_url + url[len(old_url):]
     return url
 
   @property
@@ -248,50 +247,41 @@
     return self._cache_dict
 
   def _Read(self):
-    d = self._ReadPickle()
+    d = self._ReadJson()
     if d is None:
       d = self._ReadGit()
-      self._SavePickle(d)
+      self._SaveJson(d)
     return d
 
-  def _ReadPickle(self):
+  def _ReadJson(self):
     try:
-      if os.path.getmtime(self._pickle) \
+      if os.path.getmtime(self._json) \
       <= os.path.getmtime(self.file):
-        os.remove(self._pickle)
+        os.remove(self._json)
         return None
     except OSError:
       return None
     try:
-      Trace(': unpickle %s', self.file)
-      fd = open(self._pickle, 'rb')
+      Trace(': parsing %s', self.file)
+      fd = open(self._json)
       try:
-        return pickle.load(fd)
+        return json.load(fd)
       finally:
         fd.close()
-    except EOFError:
-      os.remove(self._pickle)
-      return None
-    except IOError:
-      os.remove(self._pickle)
-      return None
-    except pickle.PickleError:
-      os.remove(self._pickle)
+    except (IOError, ValueError):
+      os.remove(self._json)
       return None
 
-  def _SavePickle(self, cache):
+  def _SaveJson(self, cache):
     try:
-      fd = open(self._pickle, 'wb')
+      fd = open(self._json, 'w')
       try:
-        pickle.dump(cache, fd, pickle.HIGHEST_PROTOCOL)
+        json.dump(cache, fd, indent=2)
       finally:
         fd.close()
-    except IOError:
-      if os.path.exists(self._pickle):
-        os.remove(self._pickle)
-    except pickle.PickleError:
-      if os.path.exists(self._pickle):
-        os.remove(self._pickle)
+    except (IOError, TypeError):
+      if os.path.exists(self._json):
+        os.remove(self._json)
 
   def _ReadGit(self):
     """
@@ -576,6 +566,8 @@
         return None
 
       u = self.review
+      if u.startswith('persistent-'):
+        u = u[len('persistent-'):]
       if u.split(':')[0] not in ('http', 'https', 'sso'):
         u = 'http://%s' % u
       if u.endswith('/Gerrit'):
@@ -627,9 +619,7 @@
   def ToLocal(self, rev):
     """Convert a remote revision string to something we have locally.
     """
-    if IsId(rev):
-      return rev
-    if rev.startswith(R_TAGS):
+    if self.name == '.' or IsId(rev):
       return rev
 
     if not rev.startswith('refs/'):
@@ -638,6 +628,10 @@
     for spec in self.fetch:
       if spec.SourceMatches(rev):
         return spec.MapSource(rev)
+
+    if not rev.startswith(R_HEADS):
+      return rev
+
     raise GitError('remote %s does not have %s' % (self.name, rev))
 
   def WritesTo(self, ref):
@@ -707,7 +701,7 @@
       self._Set('merge', self.merge)
 
     else:
-      fd = open(self._config.file, 'ab')
+      fd = open(self._config.file, 'a')
       try:
         fd.write('[branch "%s"]\n' % self.name)
         if self.remote:
diff --git a/hooks/commit-msg b/hooks/commit-msg
index 5ca2b11..d8f009b 100755
--- a/hooks/commit-msg
+++ b/hooks/commit-msg
@@ -1,5 +1,4 @@
 #!/bin/sh
-# From Gerrit Code Review 2.6
 #
 # Part of Gerrit Code Review (http://code.google.com/p/gerrit/)
 #
@@ -27,7 +26,7 @@
 #
 add_ChangeId() {
 	clean_message=`sed -e '
-		/^diff --git a\/.*/{
+		/^diff --git .*/{
 			s///
 			q
 		}
@@ -39,6 +38,11 @@
 		return
 	fi
 
+	if test "false" = "`git config --bool --get gerrit.createChangeId`"
+	then
+		return
+	fi
+
 	# Does Change-Id: already exist? if so, exit (no change).
 	if grep -i '^Change-Id:' "$MSG" >/dev/null
 	then
@@ -77,7 +81,7 @@
 	# Skip the line starting with the diff command and everything after it,
 	# up to the end of the file, assuming it is only patch data.
 	# If more than one line before the diff was empty, strip all but one.
-	/^diff --git a/ {
+	/^diff --git / {
 		blankLines = 0
 		while (getline) { }
 		next
diff --git a/main.py b/main.py
index 72fb39b..6736abc 100755
--- a/main.py
+++ b/main.py
@@ -36,6 +36,7 @@
 except ImportError:
   kerberos = None
 
+from color import SetDefaultColoring
 from trace import SetTrace
 from git_command import git, GitCommand
 from git_config import init_ssh, close_ssh
@@ -44,6 +45,7 @@
 from subcmds.version import Version
 from editor import Editor
 from error import DownloadError
+from error import InvalidProjectGroupsError
 from error import ManifestInvalidRevisionError
 from error import ManifestParseError
 from error import NoManifestException
@@ -69,6 +71,9 @@
 global_options.add_option('--no-pager',
                           dest='no_pager', action='store_true',
                           help='disable the pager')
+global_options.add_option('--color',
+                          choices=('auto', 'always', 'never'), default=None,
+                          help='control color usage: auto, always, never')
 global_options.add_option('--trace',
                           dest='trace', action='store_true',
                           help='trace git command execution')
@@ -113,6 +118,8 @@
         print('fatal: invalid usage of --version', file=sys.stderr)
         return 1
 
+    SetDefaultColoring(gopts.color)
+
     try:
       cmd = self.commands[name]
     except KeyError:
@@ -167,6 +174,12 @@
       else:
         print('error: no project in current directory', file=sys.stderr)
       result = 1
+    except InvalidProjectGroupsError as e:
+      if e.name:
+        print('error: project group must be enabled for project %s' % e.name, file=sys.stderr)
+      else:
+        print('error: project group must be enabled for the project in the current directory', file=sys.stderr)
+      result = 1
     finally:
       elapsed = time.time() - start
       hours, remainder = divmod(elapsed, 3600)
diff --git a/manifest_xml.py b/manifest_xml.py
index 3c8fadd..130e17c 100644
--- a/manifest_xml.py
+++ b/manifest_xml.py
@@ -38,8 +38,9 @@
 LOCAL_MANIFEST_NAME = 'local_manifest.xml'
 LOCAL_MANIFESTS_DIR_NAME = 'local_manifests'
 
-urllib.parse.uses_relative.extend(['ssh', 'git'])
-urllib.parse.uses_netloc.extend(['ssh', 'git'])
+# urljoin gets confused if the scheme is not known.
+urllib.parse.uses_relative.extend(['ssh', 'git', 'persistent-https', 'rpc'])
+urllib.parse.uses_netloc.extend(['ssh', 'git', 'persistent-https', 'rpc'])
 
 class _Default(object):
   """Project defaults within the manifest."""
@@ -63,12 +64,14 @@
                alias=None,
                fetch=None,
                manifestUrl=None,
-               review=None):
+               review=None,
+               revision=None):
     self.name = name
     self.fetchUrl = fetch
     self.manifestUrl = manifestUrl
     self.remoteAlias = alias
     self.reviewUrl = review
+    self.revision = revision
     self.resolvedFetchUrl = self._resolveFetchUrl()
 
   def __eq__(self, other):
@@ -83,17 +86,14 @@
     # urljoin will gets confused over quite a few things.  The ones we care
     # about here are:
     # * no scheme in the base url, like <hostname:port>
-    # * persistent-https://
-    # We handle this by replacing these with obscure protocols
-    # and then replacing them with the original when we are done.
-    # gopher -> <none>
-    # wais -> persistent-https
+    # We handle no scheme by replacing it with an obscure protocol, gopher
+    # and then replacing it with the original when we are done.
+
     if manifestUrl.find(':') != manifestUrl.find('/') - 1:
-      manifestUrl = 'gopher://' + manifestUrl
-    manifestUrl = re.sub(r'^persistent-https://', 'wais://', manifestUrl)
-    url = urllib.parse.urljoin(manifestUrl, url)
-    url = re.sub(r'^gopher://', '', url)
-    url = re.sub(r'^wais://', 'persistent-https://', url)
+      url = urllib.parse.urljoin('gopher://' + manifestUrl, url)
+      url = re.sub(r'^gopher://', '', url)
+    else:
+      url = urllib.parse.urljoin(manifestUrl, url)
     return url
 
   def ToRemoteSpec(self, projectName):
@@ -159,6 +159,11 @@
       e.setAttribute('alias', r.remoteAlias)
     if r.reviewUrl is not None:
       e.setAttribute('review', r.reviewUrl)
+    if r.revision is not None:
+      e.setAttribute('revision', r.revision)
+
+  def _ParseGroups(self, groups):
+    return [x for x in re.split(r'[,\s]+', groups) if x]
 
   def Save(self, fd, peg_rev=False, peg_rev_upstream=True):
     """Write the current manifest out to the given file descriptor.
@@ -167,7 +172,7 @@
 
     groups = mp.config.GetString('manifest.groups')
     if groups:
-      groups = [x for x in re.split(r'[,\s]+', groups) if x]
+      groups = self._ParseGroups(groups)
 
     doc = xml.dom.minidom.Document()
     root = doc.createElement('manifest')
@@ -240,20 +245,27 @@
       if d.remote:
         remoteName = d.remote.remoteAlias or d.remote.name
       if not d.remote or p.remote.name != remoteName:
-        e.setAttribute('remote', p.remote.name)
+        remoteName = p.remote.name
+        e.setAttribute('remote', remoteName)
       if peg_rev:
         if self.IsMirror:
           value = p.bare_git.rev_parse(p.revisionExpr + '^0')
         else:
           value = p.work_git.rev_parse(HEAD + '^0')
         e.setAttribute('revision', value)
-        if peg_rev_upstream and value != p.revisionExpr:
-          # Only save the origin if the origin is not a sha1, and the default
-          # isn't our value, and the if the default doesn't already have that
-          # covered.
-          e.setAttribute('upstream', p.revisionExpr)
-      elif not d.revisionExpr or p.revisionExpr != d.revisionExpr:
-        e.setAttribute('revision', p.revisionExpr)
+        if peg_rev_upstream:
+          if p.upstream:
+            e.setAttribute('upstream', p.upstream)
+          elif value != p.revisionExpr:
+            # Only save the origin if the origin is not a sha1, and the default
+            # isn't our value
+            e.setAttribute('upstream', p.revisionExpr)
+      else:
+        revision = self.remotes[remoteName].revision or d.revisionExpr
+        if not revision or revision != p.revisionExpr:
+          e.setAttribute('revision', p.revisionExpr)
+        if p.upstream and p.upstream != p.revisionExpr:
+          e.setAttribute('upstream', p.upstream)
 
       for c in p.copyfiles:
         ce = doc.createElement('copyfile')
@@ -261,6 +273,12 @@
         ce.setAttribute('dest', c.dest)
         e.appendChild(ce)
 
+      for l in p.linkfiles:
+        le = doc.createElement('linkfile')
+        le.setAttribute('src', l.src)
+        le.setAttribute('dest', l.dest)
+        e.appendChild(le)
+
       default_groups = ['all', 'name:%s' % p.name, 'path:%s' % p.relpath]
       egroups = [g for g in p.groups if g not in default_groups]
       if egroups:
@@ -304,7 +322,7 @@
   @property
   def projects(self):
     self._Load()
-    return self._paths.values()
+    return list(self._paths.values())
 
   @property
   def remotes(self):
@@ -492,6 +510,23 @@
       if node.nodeName == 'project':
         project = self._ParseProject(node)
         recursively_add_projects(project)
+      if node.nodeName == 'extend-project':
+        name = self._reqatt(node, 'name')
+
+        if name not in self._projects:
+          raise ManifestParseError('extend-project element specifies non-existent '
+                                   'project: %s' % name)
+
+        path = node.getAttribute('path')
+        groups = node.getAttribute('groups')
+        if groups:
+          groups = self._ParseGroups(groups)
+
+        for p in self._projects[name]:
+          if path and p.relpath != path:
+            continue
+          if groups:
+            p.groups.extend(groups)
       if node.nodeName == 'repo-hooks':
         # Get the name of the project and the (space-separated) list of enabled.
         repo_hooks_project = self._reqatt(node, 'in-project')
@@ -586,8 +621,11 @@
     review = node.getAttribute('review')
     if review == '':
       review = None
+    revision = node.getAttribute('revision')
+    if revision == '':
+      revision = None
     manifestUrl = self.manifestProject.config.GetString('remote.origin.url')
-    return _XmlRemote(name, alias, fetch, manifestUrl, review)
+    return _XmlRemote(name, alias, fetch, manifestUrl, review, revision)
 
   def _ParseDefault(self, node):
     """
@@ -680,7 +718,7 @@
       raise ManifestParseError("no remote for project %s within %s" %
             (name, self.manifestFile))
 
-    revisionExpr = node.getAttribute('revision')
+    revisionExpr = node.getAttribute('revision') or remote.revision
     if not revisionExpr:
       revisionExpr = self._default.revisionExpr
     if not revisionExpr:
@@ -729,7 +767,7 @@
     groups = ''
     if node.hasAttribute('groups'):
       groups = node.getAttribute('groups')
-    groups = [x for x in re.split(r'[,\s]+', groups) if x]
+    groups = self._ParseGroups(groups)
 
     if parent is None:
       relpath, worktree, gitdir, objdir = self.GetProjectPaths(name, path)
@@ -765,6 +803,8 @@
     for n in node.childNodes:
       if n.nodeName == 'copyfile':
         self._ParseCopyFile(project, n)
+      if n.nodeName == 'linkfile':
+        self._ParseLinkFile(project, n)
       if n.nodeName == 'annotation':
         self._ParseAnnotation(project, n)
       if n.nodeName == 'project':
@@ -814,6 +854,14 @@
       # dest is relative to the top of the tree
       project.AddCopyFile(src, dest, os.path.join(self.topdir, dest))
 
+  def _ParseLinkFile(self, project, node):
+    src = self._reqatt(node, 'src')
+    dest = self._reqatt(node, 'dest')
+    if not self.IsMirror:
+      # src is project relative;
+      # dest is relative to the top of the tree
+      project.AddLinkFile(src, dest, os.path.join(self.topdir, dest))
+
   def _ParseAnnotation(self, project, node):
     name = self._reqatt(node, 'name')
     value = self._reqatt(node, 'value')
@@ -856,10 +904,8 @@
     fromProjects = self.paths
     toProjects = manifest.paths
 
-    fromKeys = fromProjects.keys()
-    fromKeys.sort()
-    toKeys = toProjects.keys()
-    toKeys.sort()
+    fromKeys = sorted(fromProjects.keys())
+    toKeys = sorted(toProjects.keys())
 
     diff = {'added': [], 'removed': [], 'changed': [], 'unreachable': []}
 
diff --git a/project.py b/project.py
index 023cf73..868425c 100644
--- a/project.py
+++ b/project.py
@@ -13,9 +13,10 @@
 # limitations under the License.
 
 from __future__ import print_function
-import traceback
+import contextlib
 import errno
 import filecmp
+import glob
 import os
 import random
 import re
@@ -26,11 +27,12 @@
 import tarfile
 import tempfile
 import time
+import traceback
 
 from color import Coloring
 from git_command import GitCommand, git_require
 from git_config import GitConfig, IsId, GetSchemeFromUrl, ID_RE
-from error import GitError, HookError, UploadError
+from error import GitError, HookError, UploadError, DownloadError
 from error import ManifestInvalidRevisionError
 from error import NoManifestException
 from trace import IsTrace, Trace
@@ -46,7 +48,7 @@
 def _lwrite(path, content):
   lock = '%s.lock' % path
 
-  fd = open(lock, 'wb')
+  fd = open(lock, 'w')
   try:
     fd.write(content)
   finally:
@@ -84,7 +86,7 @@
   global _project_hook_list
   if _project_hook_list is None:
     d = os.path.realpath(os.path.abspath(os.path.dirname(__file__)))
-    d = os.path.join(d , 'hooks')
+    d = os.path.join(d, 'hooks')
     _project_hook_list = [os.path.join(d, x) for x in os.listdir(d)]
   return _project_hook_list
 
@@ -182,28 +184,28 @@
 class StatusColoring(Coloring):
   def __init__(self, config):
     Coloring.__init__(self, config, 'status')
-    self.project   = self.printer('header',    attr = 'bold')
-    self.branch    = self.printer('header',    attr = 'bold')
-    self.nobranch  = self.printer('nobranch',  fg = 'red')
-    self.important = self.printer('important', fg = 'red')
+    self.project = self.printer('header', attr='bold')
+    self.branch = self.printer('header', attr='bold')
+    self.nobranch = self.printer('nobranch', fg='red')
+    self.important = self.printer('important', fg='red')
 
-    self.added     = self.printer('added',     fg = 'green')
-    self.changed   = self.printer('changed',   fg = 'red')
-    self.untracked = self.printer('untracked', fg = 'red')
+    self.added = self.printer('added', fg='green')
+    self.changed = self.printer('changed', fg='red')
+    self.untracked = self.printer('untracked', fg='red')
 
 
 class DiffColoring(Coloring):
   def __init__(self, config):
     Coloring.__init__(self, config, 'diff')
-    self.project   = self.printer('header',    attr = 'bold')
+    self.project = self.printer('header', attr='bold')
 
-class _Annotation:
+class _Annotation(object):
   def __init__(self, name, value, keep):
     self.name = name
     self.value = value
     self.keep = keep
 
-class _CopyFile:
+class _CopyFile(object):
   def __init__(self, src, dest, abssrc, absdest):
     self.src = src
     self.dest = dest
@@ -231,14 +233,72 @@
       except IOError:
         _error('Cannot copy file %s to %s', src, dest)
 
+class _LinkFile(object):
+  def __init__(self, git_worktree, src, dest, relsrc, absdest):
+    self.git_worktree = git_worktree
+    self.src = src
+    self.dest = dest
+    self.src_rel_to_dest = relsrc
+    self.abs_dest = absdest
+
+  def __linkIt(self, relSrc, absDest):
+    # link file if it does not exist or is out of date
+    if not os.path.islink(absDest) or (os.readlink(absDest) != relSrc):
+      try:
+        # remove existing file first, since it might be read-only
+        if os.path.exists(absDest):
+          os.remove(absDest)
+        else:
+          dest_dir = os.path.dirname(absDest)
+          if not os.path.isdir(dest_dir):
+            os.makedirs(dest_dir)
+        os.symlink(relSrc, absDest)
+      except IOError:
+        _error('Cannot link file %s to %s', relSrc, absDest)
+
+  def _Link(self):
+    """Link the self.rel_src_to_dest and self.abs_dest. Handles wild cards
+    on the src linking all of the files in the source in to the destination
+    directory.
+    """
+    # We use the absSrc to handle the situation where the current directory
+    # is not the root of the repo
+    absSrc = os.path.join(self.git_worktree, self.src)
+    if os.path.exists(absSrc):
+      # Entity exists so just a simple one to one link operation
+      self.__linkIt(self.src_rel_to_dest, self.abs_dest)
+    else:
+      # Entity doesn't exist assume there is a wild card
+      absDestDir = self.abs_dest
+      if os.path.exists(absDestDir) and not os.path.isdir(absDestDir):
+        _error('Link error: src with wildcard, %s must be a directory',
+            absDestDir)
+      else:
+        absSrcFiles = glob.glob(absSrc)
+        for absSrcFile in absSrcFiles:
+          # Create a releative path from source dir to destination dir
+          absSrcDir = os.path.dirname(absSrcFile)
+          relSrcDir = os.path.relpath(absSrcDir, absDestDir)
+
+          # Get the source file name
+          srcFile = os.path.basename(absSrcFile)
+
+          # Now form the final full paths to srcFile. They will be
+          # absolute for the desintaiton and relative for the srouce.
+          absDest = os.path.join(absDestDir, srcFile)
+          relSrc = os.path.join(relSrcDir, srcFile)
+          self.__linkIt(relSrc, absDest)
+
 class RemoteSpec(object):
   def __init__(self,
                name,
-               url = None,
-               review = None):
+               url=None,
+               review=None,
+               revision=None):
     self.name = name
     self.url = url
     self.review = review
+    self.revision = revision
 
 class RepoHook(object):
   """A RepoHook contains information about a script to run as a hook.
@@ -414,7 +474,8 @@
       # and  convert to a HookError w/ just the failing traceback.
       context = {}
       try:
-        execfile(self._script_fullpath, context)
+        exec(compile(open(self._script_fullpath).read(),
+                     self._script_fullpath, 'exec'), context)
       except Exception:
         raise HookError('%s\nFailed to import %s hook; see traceback above.' % (
                         traceback.format_exc(), self._hook_type))
@@ -483,6 +544,12 @@
 
 
 class Project(object):
+  # These objects can be shared between several working trees.
+  shareable_files = ['description', 'info']
+  shareable_dirs = ['hooks', 'objects', 'rr-cache', 'svn']
+  # These objects can only be used by a single working tree.
+  working_tree_files = ['config', 'packed-refs', 'shallow']
+  working_tree_dirs = ['logs', 'refs']
   def __init__(self,
                manifest,
                name,
@@ -493,15 +560,16 @@
                relpath,
                revisionExpr,
                revisionId,
-               rebase = True,
-               groups = None,
-               sync_c = False,
-               sync_s = False,
-               clone_depth = None,
-               upstream = None,
-               parent = None,
-               is_derived = False,
-               dest_branch = None):
+               rebase=True,
+               groups=None,
+               sync_c=False,
+               sync_s=False,
+               clone_depth=None,
+               upstream=None,
+               parent=None,
+               is_derived=False,
+               dest_branch=None,
+               optimized_fetch=False):
     """Init a Project object.
 
     Args:
@@ -523,6 +591,8 @@
       is_derived: False if the project was explicitly defined in the manifest;
                   True if the project is a discovered submodule.
       dest_branch: The branch to which to push changes for review by default.
+      optimized_fetch: If True, when a project is set to a sha1 revision, only
+                       fetch from the remote if the sha1 is not present locally.
     """
     self.manifest = manifest
     self.name = name
@@ -551,14 +621,16 @@
     self.upstream = upstream
     self.parent = parent
     self.is_derived = is_derived
+    self.optimized_fetch = optimized_fetch
     self.subprojects = []
 
     self.snapshots = {}
     self.copyfiles = []
+    self.linkfiles = []
     self.annotations = []
     self.config = GitConfig.ForRepository(
-                    gitdir = self.gitdir,
-                    defaults =  self.manifest.globalConfig)
+                    gitdir=self.gitdir,
+                    defaults=self.manifest.globalConfig)
 
     if self.worktree:
       self.work_git = self._GitGetByExec(self, bare=False, gitdir=gitdir)
@@ -579,7 +651,7 @@
 
   @property
   def Exists(self):
-    return os.path.isdir(self.gitdir)
+    return os.path.isdir(self.gitdir) and os.path.isdir(self.objdir)
 
   @property
   def CurrentBranch(self):
@@ -708,27 +780,49 @@
     return matched
 
 ## Status Display ##
+  def UncommitedFiles(self, get_all=True):
+    """Returns a list of strings, uncommitted files in the git tree.
 
-  def HasChanges(self):
-    """Returns true if there are uncommitted changes.
+    Args:
+      get_all: a boolean, if True - get information about all different
+               uncommitted files. If False - return as soon as any kind of
+               uncommitted files is detected.
     """
+    details = []
     self.work_git.update_index('-q',
                                '--unmerged',
                                '--ignore-missing',
                                '--refresh')
     if self.IsRebaseInProgress():
-      return True
+      details.append("rebase in progress")
+      if not get_all:
+        return details
 
-    if self.work_git.DiffZ('diff-index', '--cached', HEAD):
-      return True
+    changes = self.work_git.DiffZ('diff-index', '--cached', HEAD).keys()
+    if changes:
+      details.extend(changes)
+      if not get_all:
+        return details
 
-    if self.work_git.DiffZ('diff-files'):
-      return True
+    changes = self.work_git.DiffZ('diff-files').keys()
+    if changes:
+      details.extend(changes)
+      if not get_all:
+        return details
 
-    if self.work_git.LsOthers():
-      return True
+    changes = self.work_git.LsOthers()
+    if changes:
+      details.extend(changes)
 
-    return False
+    return details
+
+  def HasChanges(self):
+    """Returns true if there are uncommitted changes.
+    """
+    if self.UncommitedFiles(get_all=False):
+      return True
+    else:
+      return False
 
   def PrintWorkTreeStatus(self, output_redir=None):
     """Prints the status of the repository to stdout.
@@ -758,7 +852,7 @@
     out = StatusColoring(self.config)
     if not output_redir == None:
       out.redirect(output_redir)
-    out.project('project %-40s', self.relpath + '/')
+    out.project('project %-40s', self.relpath + '/ ')
 
     branch = self.CurrentBranch
     if branch is None:
@@ -829,8 +923,8 @@
     cmd.append('--')
     p = GitCommand(self,
                    cmd,
-                   capture_stdout = True,
-                   capture_stderr = True)
+                   capture_stdout=True,
+                   capture_stderr=True)
     has_diff = False
     for line in p.process.stdout:
       if not has_diff:
@@ -915,7 +1009,7 @@
     return None
 
   def UploadForReview(self, branch=None,
-                      people=([],[]),
+                      people=([], []),
                       auto_topic=False,
                       draft=False,
                       dest_branch=None):
@@ -976,13 +1070,13 @@
         ref_spec = ref_spec + '%' + ','.join(rp)
     cmd.append(ref_spec)
 
-    if GitCommand(self, cmd, bare = True).Wait() != 0:
+    if GitCommand(self, cmd, bare=True).Wait() != 0:
       raise UploadError('Upload failed')
 
     msg = "posted to %s for %s" % (branch.remote.review, dest_branch)
     self.bare_git.UpdateRef(R_PUB + branch.name,
                             R_HEADS + branch.name,
-                            message = msg)
+                            message=msg)
 
 
 ## Sync ##
@@ -1007,9 +1101,11 @@
       quiet=False,
       is_new=None,
       current_branch_only=False,
+      force_sync=False,
       clone_bundle=True,
       no_tags=False,
-      archive=False):
+      archive=False,
+      optimized_fetch=False):
     """Perform only the network IO portion of the sync process.
        Local working directory/branch state is not affected.
     """
@@ -1040,13 +1136,12 @@
       except OSError as e:
         print("warn: Cannot remove archive %s: "
               "%s" % (tarpath, str(e)), file=sys.stderr)
-      self._CopyFiles()
+      self._CopyAndLinkFiles()
       return True
-
     if is_new is None:
       is_new = not self.Exists
     if is_new:
-      self._InitGitDir()
+      self._InitGitDir(force_sync=force_sync)
     else:
       self._UpdateHooks()
     self._InitRemote()
@@ -1078,16 +1173,12 @@
       elif self.manifest.default.sync_c:
         current_branch_only = True
 
-    is_sha1 = False
-    if ID_RE.match(self.revisionExpr) is not None:
-      is_sha1 = True
-    if is_sha1 and self._CheckForSha1():
-      # Don't need to fetch since we already have this revision
-      return True
-
-    if not self._RemoteFetch(initial=is_new, quiet=quiet, alt_dir=alt_dir,
-                             current_branch_only=current_branch_only,
-                             no_tags=no_tags):
+    need_to_fetch = not (optimized_fetch and \
+      (ID_RE.match(self.revisionExpr) and self._CheckForSha1()))
+    if (need_to_fetch
+        and not self._RemoteFetch(initial=is_new, quiet=quiet, alt_dir=alt_dir,
+                                  current_branch_only=current_branch_only,
+                                  no_tags=no_tags)):
       return False
 
     if self.worktree:
@@ -1103,9 +1194,11 @@
   def PostRepoUpgrade(self):
     self._InitHooks()
 
-  def _CopyFiles(self):
+  def _CopyAndLinkFiles(self):
     for copyfile in self.copyfiles:
       copyfile._Copy()
+    for linkfile in self.linkfiles:
+      linkfile._Link()
 
   def GetCommitRevisionId(self):
     """Get revisionId of a commit.
@@ -1141,18 +1234,18 @@
         'revision %s in %s not found' % (self.revisionExpr,
                                          self.name))
 
-  def Sync_LocalHalf(self, syncbuf):
+  def Sync_LocalHalf(self, syncbuf, force_sync=False):
     """Perform only the local IO portion of the sync process.
        Network access is not required.
     """
-    self._InitWorkTree()
+    self._InitWorkTree(force_sync=force_sync)
     all_refs = self.bare_ref.all
     self.CleanPublishedCache(all_refs)
     revid = self.GetRevisionId(all_refs)
 
     def _doff():
       self._FastForward(revid)
-      self._CopyFiles()
+      self._CopyAndLinkFiles()
 
     head = self.work_git.GetHead()
     if head.startswith(R_HEADS):
@@ -1188,7 +1281,7 @@
       except GitError as e:
         syncbuf.fail(self, e)
         return
-      self._CopyFiles()
+      self._CopyAndLinkFiles()
       return
 
     if head == revid:
@@ -1210,7 +1303,7 @@
       except GitError as e:
         syncbuf.fail(self, e)
         return
-      self._CopyFiles()
+      self._CopyAndLinkFiles()
       return
 
     upstream_gain = self._revlist(not_rev(HEAD), revid)
@@ -1278,17 +1371,19 @@
     if not ID_RE.match(self.revisionExpr):
       # in case of manifest sync the revisionExpr might be a SHA1
       branch.merge = self.revisionExpr
+      if not branch.merge.startswith('refs/'):
+        branch.merge = R_HEADS + branch.merge
     branch.Save()
 
     if cnt_mine > 0 and self.rebase:
       def _dorebase():
-        self._Rebase(upstream = '%s^1' % last_mine, onto = revid)
-        self._CopyFiles()
+        self._Rebase(upstream='%s^1' % last_mine, onto=revid)
+        self._CopyAndLinkFiles()
       syncbuf.later2(self, _dorebase)
     elif local_changes:
       try:
         self._ResetHard(revid)
-        self._CopyFiles()
+        self._CopyAndLinkFiles()
       except GitError as e:
         syncbuf.fail(self, e)
         return
@@ -1301,6 +1396,13 @@
     abssrc = os.path.join(self.worktree, src)
     self.copyfiles.append(_CopyFile(src, dest, abssrc, absdest))
 
+  def AddLinkFile(self, src, dest, absdest):
+    # dest should already be an absolute path, but src is project relative
+    # make src relative path to dest
+    absdestdir = os.path.dirname(absdest)
+    relsrc = os.path.relpath(os.path.join(self.worktree, src), absdestdir)
+    self.linkfiles.append(_LinkFile(self.worktree, src, dest, relsrc, absdest))
+
   def AddAnnotation(self, name, value, keep):
     self.annotations.append(_Annotation(name, value, keep))
 
@@ -1331,15 +1433,17 @@
       return True
 
     all_refs = self.bare_ref.all
-    if (R_HEADS + name) in all_refs:
+    if R_HEADS + name in all_refs:
       return GitCommand(self,
                         ['checkout', name, '--'],
-                        capture_stdout = True,
-                        capture_stderr = True).Wait() == 0
+                        capture_stdout=True,
+                        capture_stderr=True).Wait() == 0
 
     branch = self.GetBranch(name)
     branch.remote = self.GetRemote(self.remote.name)
     branch.merge = self.revisionExpr
+    if not branch.merge.startswith('refs/') and not ID_RE.match(self.revisionExpr):
+      branch.merge = R_HEADS + self.revisionExpr
     revid = self.GetRevisionId(all_refs)
 
     if head.startswith(R_HEADS):
@@ -1362,8 +1466,8 @@
 
     if GitCommand(self,
                   ['checkout', '-b', branch.name, revid],
-                  capture_stdout = True,
-                  capture_stderr = True).Wait() == 0:
+                  capture_stdout=True,
+                  capture_stderr=True).Wait() == 0:
       branch.Save()
       return True
     return False
@@ -1409,8 +1513,8 @@
 
     return GitCommand(self,
                       ['checkout', name, '--'],
-                      capture_stdout = True,
-                      capture_stderr = True).Wait() == 0
+                      capture_stdout=True,
+                      capture_stderr=True).Wait() == 0
 
   def AbandonBranch(self, name):
     """Destroy a local topic branch.
@@ -1444,8 +1548,8 @@
 
     return GitCommand(self,
                       ['branch', '-D', name],
-                      capture_stdout = True,
-                      capture_stderr = True).Wait() == 0
+                      capture_stdout=True,
+                      capture_stderr=True).Wait() == 0
 
   def PruneHeads(self):
     """Prune any topic branches already merged into upstream.
@@ -1462,7 +1566,7 @@
     rev = self.GetRevisionId(left)
     if cb is not None \
        and not self._revlist(HEAD + '...' + rev) \
-       and not self.IsDirty(consider_untracked = False):
+       and not self.IsDirty(consider_untracked=False):
       self.work_git.DetachHead(HEAD)
       kill.append(cb)
 
@@ -1495,7 +1599,7 @@
 
     kept = []
     for branch in kill:
-      if (R_HEADS + branch) in left:
+      if R_HEADS + branch in left:
         branch = self.GetBranch(branch)
         base = branch.LocalMerge
         if not base:
@@ -1545,8 +1649,8 @@
     def parse_gitmodules(gitdir, rev):
       cmd = ['cat-file', 'blob', '%s:.gitmodules' % rev]
       try:
-        p = GitCommand(None, cmd, capture_stdout = True, capture_stderr = True,
-                       bare = True, gitdir = gitdir)
+        p = GitCommand(None, cmd, capture_stdout=True, capture_stderr=True,
+                       bare=True, gitdir=gitdir)
       except GitError:
         return [], []
       if p.Wait() != 0:
@@ -1558,8 +1662,8 @@
         os.write(fd, p.stdout)
         os.close(fd)
         cmd = ['config', '--file', temp_gitmodules_path, '--list']
-        p = GitCommand(None, cmd, capture_stdout = True, capture_stderr = True,
-                       bare = True, gitdir = gitdir)
+        p = GitCommand(None, cmd, capture_stdout=True, capture_stderr=True,
+                       bare=True, gitdir=gitdir)
         if p.Wait() != 0:
           return [], []
         gitmodules_lines = p.stdout.split('\n')
@@ -1592,8 +1696,8 @@
       cmd = ['ls-tree', rev, '--']
       cmd.extend(paths)
       try:
-        p = GitCommand(None, cmd, capture_stdout = True, capture_stderr = True,
-                       bare = True, gitdir = gitdir)
+        p = GitCommand(None, cmd, capture_stdout=True, capture_stderr=True,
+                       bare=True, gitdir=gitdir)
       except GitError:
         return []
       if p.Wait() != 0:
@@ -1628,23 +1732,24 @@
         continue
 
       remote = RemoteSpec(self.remote.name,
-                          url = url,
-                          review = self.remote.review)
-      subproject = Project(manifest = self.manifest,
-                           name = name,
-                           remote = remote,
-                           gitdir = gitdir,
-                           objdir = objdir,
-                           worktree = worktree,
-                           relpath = relpath,
-                           revisionExpr = self.revisionExpr,
-                           revisionId = rev,
-                           rebase = self.rebase,
-                           groups = self.groups,
-                           sync_c = self.sync_c,
-                           sync_s = self.sync_s,
-                           parent = self,
-                           is_derived = True)
+                          url=url,
+                          review=self.remote.review,
+                          revision=self.remote.revision)
+      subproject = Project(manifest=self.manifest,
+                           name=name,
+                           remote=remote,
+                           gitdir=gitdir,
+                           objdir=objdir,
+                           worktree=worktree,
+                           relpath=relpath,
+                           revisionExpr=self.revisionExpr,
+                           revisionId=rev,
+                           rebase=self.rebase,
+                           groups=self.groups,
+                           sync_c=self.sync_c,
+                           sync_s=self.sync_s,
+                           parent=self,
+                           is_derived=True)
       result.append(subproject)
       result.extend(subproject.GetDerivedSubprojects())
     return result
@@ -1674,6 +1779,7 @@
     if command.Wait() != 0:
       raise GitError('git archive %s: %s' % (self.name, command.stderr))
 
+
   def _RemoteFetch(self, name=None,
                    current_branch_only=False,
                    initial=False,
@@ -1683,26 +1789,43 @@
 
     is_sha1 = False
     tag_name = None
+    depth = None
 
-    if self.clone_depth:
-      depth = self.clone_depth
-    else:
-      depth = self.manifest.manifestProject.config.GetString('repo.depth')
+    # The depth should not be used when fetching to a mirror because
+    # it will result in a shallow repository that cannot be cloned or
+    # fetched from.
+    if not self.manifest.IsMirror:
+      if self.clone_depth:
+        depth = self.clone_depth
+      else:
+        depth = self.manifest.manifestProject.config.GetString('repo.depth')
+      # The repo project should never be synced with partial depth
+      if self.relpath == '.repo/repo':
+        depth = None
+
     if depth:
       current_branch_only = True
 
+    if ID_RE.match(self.revisionExpr) is not None:
+      is_sha1 = True
+
     if current_branch_only:
-      if ID_RE.match(self.revisionExpr) is not None:
-        is_sha1 = True
-      elif self.revisionExpr.startswith(R_TAGS):
+      if self.revisionExpr.startswith(R_TAGS):
         # this is a tag and its sha1 value should never change
         tag_name = self.revisionExpr[len(R_TAGS):]
 
       if is_sha1 or tag_name is not None:
         if self._CheckForSha1():
           return True
-      if is_sha1 and (not self.upstream or ID_RE.match(self.upstream)):
-        current_branch_only = False
+      if is_sha1 and not depth:
+        # When syncing a specific commit and --depth is not set:
+        # * if upstream is explicitly specified and is not a sha1, fetch only
+        #   upstream as users expect only upstream to be fetch.
+        #   Note: The commit might not be in upstream in which case the sync
+        #   will fail.
+        # * otherwise, fetch all branches to make sure we end up with the
+        #   specific commit.
+        current_branch_only = self.upstream and not ID_RE.match(self.upstream)
 
     if not name:
       name = self.remote.name
@@ -1752,9 +1875,7 @@
 
     cmd = ['fetch']
 
-    # The --depth option only affects the initial fetch; after that we'll do
-    # full fetches of changes.
-    if depth and initial:
+    if depth:
       cmd.append('--depth=%s' % depth)
 
     if quiet:
@@ -1763,46 +1884,74 @@
       cmd.append('--update-head-ok')
     cmd.append(name)
 
+    # If using depth then we should not get all the tags since they may
+    # be outside of the depth.
+    if no_tags or depth:
+      cmd.append('--no-tags')
+    else:
+      cmd.append('--tags')
+
+    spec = []
     if not current_branch_only:
       # Fetch whole repo
-      # If using depth then we should not get all the tags since they may
-      # be outside of the depth.
-      if no_tags or depth:
-        cmd.append('--no-tags')
-      else:
-        cmd.append('--tags')
-
-      cmd.append(str((u'+refs/heads/*:') + remote.ToLocal('refs/heads/*')))
+      spec.append(str((u'+refs/heads/*:') + remote.ToLocal('refs/heads/*')))
     elif tag_name is not None:
-      cmd.append('tag')
-      cmd.append(tag_name)
-    else:
+      spec.append('tag')
+      spec.append(tag_name)
+
+    if not self.manifest.IsMirror:
       branch = self.revisionExpr
-      if is_sha1:
-        branch = self.upstream
-      if branch.startswith(R_HEADS):
-        branch = branch[len(R_HEADS):]
-      cmd.append(str((u'+refs/heads/%s:' % branch) + remote.ToLocal('refs/heads/%s' % branch)))
+      if is_sha1 and depth and git_require((1, 8, 3)):
+        # Shallow checkout of a specific commit, fetch from that commit and not
+        # the heads only as the commit might be deeper in the history.
+        spec.append(branch)
+      else:
+        if is_sha1:
+          branch = self.upstream
+        if branch is not None and branch.strip():
+          if not branch.startswith('refs/'):
+            branch = R_HEADS + branch
+          spec.append(str((u'+%s:' % branch) + remote.ToLocal(branch)))
+    cmd.extend(spec)
+
+    shallowfetch = self.config.GetString('repo.shallowfetch')
+    if shallowfetch and shallowfetch != ' '.join(spec):
+      GitCommand(self, ['fetch', '--depth=2147483647', name]
+                 + shallowfetch.split(),
+                 bare=True, ssh_proxy=ssh_proxy).Wait()
+    if depth:
+      self.config.SetString('repo.shallowfetch', ' '.join(spec))
+    else:
+      self.config.SetString('repo.shallowfetch', None)
 
     ok = False
     for _i in range(2):
-      ret = GitCommand(self, cmd, bare=True, ssh_proxy=ssh_proxy).Wait()
+      gitcmd = GitCommand(self, cmd, bare=True, ssh_proxy=ssh_proxy)
+      ret = gitcmd.Wait()
       if ret == 0:
         ok = True
         break
+      # If needed, run the 'git remote prune' the first time through the loop
+      elif (not _i and
+            "error:" in gitcmd.stderr and
+            "git remote prune" in gitcmd.stderr):
+        prunecmd = GitCommand(self, ['remote', 'prune', name], bare=True,
+                              ssh_proxy=ssh_proxy)
+        ret = prunecmd.Wait()
+        if ret:
+          break
+        continue
       elif current_branch_only and is_sha1 and ret == 128:
         # Exit code 128 means "couldn't find the ref you asked for"; if we're in sha1
         # mode, we just tried sync'ing from the upstream field; it doesn't exist, thus
         # abort the optimization attempt and do a full sync.
         break
+      elif ret < 0:
+        # Git died with a signal, exit immediately
+        break
       time.sleep(random.randint(30, 45))
 
     if initial:
-      # Ensure that some refs exist.  Otherwise, we probably aren't looking
-      # at a real git repository and may have a bad url.
-      if not self.bare_ref.all:
-          ok = False
-
       if alt_dir:
         if old_packed != '':
           _lwrite(packed_refs, old_packed)
@@ -1815,8 +1964,15 @@
       # got what we wanted, else trigger a second run of all
       # refs.
       if not self._CheckForSha1():
-        return self._RemoteFetch(name=name, current_branch_only=False,
-                                 initial=False, quiet=quiet, alt_dir=alt_dir)
+        if not depth:
+          # Avoid infinite recursion when depth is True (since depth implies
+          # current_branch_only)
+          return self._RemoteFetch(name=name, current_branch_only=False,
+                                   initial=False, quiet=quiet, alt_dir=alt_dir)
+        if self.clone_depth:
+          self.clone_depth = None
+          return self._RemoteFetch(name=name, current_branch_only=current_branch_only,
+                                   initial=False, quiet=quiet, alt_dir=alt_dir)
 
     return ok
 
@@ -1877,34 +2033,34 @@
         os.remove(tmpPath)
     if 'http_proxy' in os.environ and 'darwin' == sys.platform:
       cmd += ['--proxy', os.environ['http_proxy']]
-    cookiefile = self._GetBundleCookieFile(srcUrl)
-    if cookiefile:
-      cmd += ['--cookie', cookiefile]
-    if srcUrl.startswith('persistent-'):
-      srcUrl = srcUrl[len('persistent-'):]
-    cmd += [srcUrl]
+    with self._GetBundleCookieFile(srcUrl, quiet) as cookiefile:
+      if cookiefile:
+        cmd += ['--cookie', cookiefile, '--cookie-jar', cookiefile]
+      if srcUrl.startswith('persistent-'):
+        srcUrl = srcUrl[len('persistent-'):]
+      cmd += [srcUrl]
 
-    if IsTrace():
-      Trace('%s', ' '.join(cmd))
-    try:
-      proc = subprocess.Popen(cmd)
-    except OSError:
-      return False
+      if IsTrace():
+        Trace('%s', ' '.join(cmd))
+      try:
+        proc = subprocess.Popen(cmd)
+      except OSError:
+        return False
 
-    curlret = proc.wait()
+      curlret = proc.wait()
 
-    if curlret == 22:
-      # From curl man page:
-      # 22: HTTP page not retrieved. The requested url was not found or
-      # returned another error with the HTTP error code being 400 or above.
-      # This return code only appears if -f, --fail is used.
-      if not quiet:
-        print("Server does not provide clone.bundle; ignoring.",
-              file=sys.stderr)
-      return False
+      if curlret == 22:
+        # From curl man page:
+        # 22: HTTP page not retrieved. The requested url was not found or
+        # returned another error with the HTTP error code being 400 or above.
+        # This return code only appears if -f, --fail is used.
+        if not quiet:
+          print("Server does not provide clone.bundle; ignoring.",
+                file=sys.stderr)
+        return False
 
     if os.path.exists(tmpPath):
-      if curlret == 0 and self._IsValidBundle(tmpPath):
+      if curlret == 0 and self._IsValidBundle(tmpPath, quiet):
         os.rename(tmpPath, dstPath)
         return True
       else:
@@ -1913,45 +2069,51 @@
     else:
       return False
 
-  def _IsValidBundle(self, path):
+  def _IsValidBundle(self, path, quiet):
     try:
       with open(path) as f:
         if f.read(16) == '# v2 git bundle\n':
           return True
         else:
-          print("Invalid clone.bundle file; ignoring.", file=sys.stderr)
+          if not quiet:
+            print("Invalid clone.bundle file; ignoring.", file=sys.stderr)
           return False
     except OSError:
       return False
 
-  def _GetBundleCookieFile(self, url):
+  @contextlib.contextmanager
+  def _GetBundleCookieFile(self, url, quiet):
     if url.startswith('persistent-'):
       try:
         p = subprocess.Popen(
             ['git-remote-persistent-https', '-print_config', url],
             stdin=subprocess.PIPE, stdout=subprocess.PIPE,
             stderr=subprocess.PIPE)
-        p.stdin.close()  # Tell subprocess it's ok to close.
-        prefix = 'http.cookiefile='
-        cookiefile = None
-        for line in p.stdout:
-          line = line.strip()
-          if line.startswith(prefix):
-            cookiefile = line[len(prefix):]
-            break
-        if p.wait():
-          err_msg = p.stderr.read()
-          if ' -print_config' in err_msg:
-            pass  # Persistent proxy doesn't support -print_config.
-          else:
-            print(err_msg, file=sys.stderr)
-        if cookiefile:
-          return cookiefile
+        try:
+          prefix = 'http.cookiefile='
+          cookiefile = None
+          for line in p.stdout:
+            line = line.strip()
+            if line.startswith(prefix):
+              cookiefile = line[len(prefix):]
+              break
+          # Leave subprocess open, as cookie file may be transient.
+          if cookiefile:
+            yield cookiefile
+            return
+        finally:
+          p.stdin.close()
+          if p.wait():
+            err_msg = p.stderr.read()
+            if ' -print_config' in err_msg:
+              pass  # Persistent proxy doesn't support -print_config.
+            elif not quiet:
+              print(err_msg, file=sys.stderr)
       except OSError as e:
         if e.errno == errno.ENOENT:
           pass  # No persistent proxy.
         raise
-    return GitConfig.ForUser().GetString('http.cookiefile')
+    yield GitConfig.ForUser().GetString('http.cookiefile')
 
   def _Checkout(self, rev, quiet=False):
     cmd = ['checkout']
@@ -1963,7 +2125,7 @@
       if self._allrefs:
         raise GitError('%s checkout %s ' % (self.name, rev))
 
-  def _CherryPick(self, rev, quiet=False):
+  def _CherryPick(self, rev):
     cmd = ['cherry-pick']
     cmd.append(rev)
     cmd.append('--')
@@ -1971,7 +2133,7 @@
       if self._allrefs:
         raise GitError('%s cherry-pick %s ' % (self.name, rev))
 
-  def _Revert(self, rev, quiet=False):
+  def _Revert(self, rev):
     cmd = ['revert']
     cmd.append('--no-edit')
     cmd.append(rev)
@@ -1988,7 +2150,7 @@
     if GitCommand(self, cmd).Wait() != 0:
       raise GitError('%s reset --hard %s ' % (self.name, rev))
 
-  def _Rebase(self, upstream, onto = None):
+  def _Rebase(self, upstream, onto=None):
     cmd = ['rebase']
     if onto is not None:
       cmd.extend(['--onto', onto])
@@ -2003,64 +2165,80 @@
     if GitCommand(self, cmd).Wait() != 0:
       raise GitError('%s merge %s ' % (self.name, head))
 
-  def _InitGitDir(self, mirror_git=None):
-    if not os.path.exists(self.gitdir):
-
+  def _InitGitDir(self, mirror_git=None, force_sync=False):
+    init_git_dir = not os.path.exists(self.gitdir)
+    init_obj_dir = not os.path.exists(self.objdir)
+    try:
       # Initialize the bare repository, which contains all of the objects.
-      if not os.path.exists(self.objdir):
+      if init_obj_dir:
         os.makedirs(self.objdir)
         self.bare_objdir.init()
 
       # If we have a separate directory to hold refs, initialize it as well.
       if self.objdir != self.gitdir:
-        os.makedirs(self.gitdir)
-        self._ReferenceGitDir(self.objdir, self.gitdir, share_refs=False,
-                              copy_all=True)
+        if init_git_dir:
+          os.makedirs(self.gitdir)
 
-      mp = self.manifest.manifestProject
-      ref_dir = mp.config.GetString('repo.reference') or ''
+        if init_obj_dir or init_git_dir:
+          self._ReferenceGitDir(self.objdir, self.gitdir, share_refs=False,
+                                copy_all=True)
+        try:
+          self._CheckDirReference(self.objdir, self.gitdir, share_refs=False)
+        except GitError as e:
+          print("Retrying clone after deleting %s" % force_sync, file=sys.stderr)
+          if force_sync:
+            try:
+              shutil.rmtree(os.path.realpath(self.gitdir))
+              if self.worktree and os.path.exists(
+                  os.path.realpath(self.worktree)):
+                shutil.rmtree(os.path.realpath(self.worktree))
+              return self._InitGitDir(mirror_git=mirror_git, force_sync=False)
+            except:
+              raise e
+          raise e
 
-      if ref_dir or mirror_git:
-        if not mirror_git:
-          mirror_git = os.path.join(ref_dir, self.name + '.git')
-        repo_git = os.path.join(ref_dir, '.repo', 'projects',
-                                self.relpath + '.git')
+      if init_git_dir:
+        mp = self.manifest.manifestProject
+        ref_dir = mp.config.GetString('repo.reference') or ''
 
-        if os.path.exists(mirror_git):
-          ref_dir = mirror_git
+        if ref_dir or mirror_git:
+          if not mirror_git:
+            mirror_git = os.path.join(ref_dir, self.name + '.git')
+          repo_git = os.path.join(ref_dir, '.repo', 'projects',
+                                  self.relpath + '.git')
 
-        elif os.path.exists(repo_git):
-          ref_dir = repo_git
+          if os.path.exists(mirror_git):
+            ref_dir = mirror_git
 
+          elif os.path.exists(repo_git):
+            ref_dir = repo_git
+
+          else:
+            ref_dir = None
+
+          if ref_dir:
+            _lwrite(os.path.join(self.gitdir, 'objects/info/alternates'),
+                    os.path.join(ref_dir, 'objects') + '\n')
+
+        self._UpdateHooks()
+
+        m = self.manifest.manifestProject.config
+        for key in ['user.name', 'user.email']:
+          if m.Has(key, include_defaults=False):
+            self.config.SetString(key, m.GetString(key))
+        if self.manifest.IsMirror:
+          self.config.SetString('core.bare', 'true')
         else:
-          ref_dir = None
-
-        if ref_dir:
-          _lwrite(os.path.join(self.gitdir, 'objects/info/alternates'),
-                  os.path.join(ref_dir, 'objects') + '\n')
-
-      self._UpdateHooks()
-
-      m = self.manifest.manifestProject.config
-      for key in ['user.name', 'user.email']:
-        if m.Has(key, include_defaults = False):
-          self.config.SetString(key, m.GetString(key))
-      if self.manifest.IsMirror:
-        self.config.SetString('core.bare', 'true')
-      else:
-        self.config.SetString('core.bare', None)
+          self.config.SetString('core.bare', None)
+    except Exception:
+      if init_obj_dir and os.path.exists(self.objdir):
+        shutil.rmtree(self.objdir)
+      if init_git_dir and os.path.exists(self.gitdir):
+        shutil.rmtree(self.gitdir)
+      raise
 
   def _UpdateHooks(self):
     if os.path.exists(self.gitdir):
-      # Always recreate hooks since they can have been changed
-      # since the latest update.
-      hooks = self._gitdir_path('hooks')
-      try:
-        to_rm = os.listdir(hooks)
-      except OSError:
-        to_rm = []
-      for old_hook in to_rm:
-        os.remove(os.path.join(hooks, old_hook))
       self._InitHooks()
 
   def _InitHooks(self):
@@ -2123,7 +2301,7 @@
       if cur != '' or self.bare_ref.get(ref) != self.revisionId:
         msg = 'manifest set to %s' % self.revisionId
         dst = self.revisionId + '^0'
-        self.bare_git.UpdateRef(ref, dst, message = msg, detach = True)
+        self.bare_git.UpdateRef(ref, dst, message=msg, detach=True)
     else:
       remote = self.GetRemote(self.remote.name)
       dst = remote.ToLocal(self.revisionExpr)
@@ -2131,6 +2309,22 @@
         msg = 'manifest set to %s' % self.revisionExpr
         self.bare_git.symbolic_ref('-m', msg, ref, dst)
 
+  def _CheckDirReference(self, srcdir, destdir, share_refs):
+    symlink_files = self.shareable_files
+    symlink_dirs = self.shareable_dirs
+    if share_refs:
+      symlink_files += self.working_tree_files
+      symlink_dirs += self.working_tree_dirs
+    to_symlink = symlink_files + symlink_dirs
+    for name in set(to_symlink):
+      dst = os.path.realpath(os.path.join(destdir, name))
+      if os.path.lexists(dst):
+        src = os.path.realpath(os.path.join(srcdir, name))
+        # Fail if the links are pointing to the wrong place
+        if src != dst:
+          raise GitError('--force-sync not enabled; cannot overwrite a local '
+                         'work tree')
+
   def _ReferenceGitDir(self, gitdir, dotgit, share_refs, copy_all):
     """Update |dotgit| to reference |gitdir|, using symlinks where possible.
 
@@ -2142,13 +2336,11 @@
       copy_all: If true, copy all remaining files from |gitdir| -> |dotgit|.
           This saves you the effort of initializing |dotgit| yourself.
     """
-    # These objects can be shared between several working trees.
-    symlink_files = ['description', 'info']
-    symlink_dirs = ['hooks', 'objects', 'rr-cache', 'svn']
+    symlink_files = self.shareable_files
+    symlink_dirs = self.shareable_dirs
     if share_refs:
-      # These objects can only be used by a single working tree.
-      symlink_files += ['config', 'packed-refs']
-      symlink_dirs += ['logs', 'refs']
+      symlink_files += self.working_tree_files
+      symlink_dirs += self.working_tree_dirs
     to_symlink = symlink_files + symlink_dirs
 
     to_copy = []
@@ -2160,13 +2352,21 @@
         src = os.path.realpath(os.path.join(gitdir, name))
         dst = os.path.realpath(os.path.join(dotgit, name))
 
-        if os.path.lexists(dst) and not os.path.islink(dst):
-          raise GitError('cannot overwrite a local work tree')
+        if os.path.lexists(dst):
+          continue
 
         # If the source dir doesn't exist, create an empty dir.
         if name in symlink_dirs and not os.path.lexists(src):
           os.makedirs(src)
 
+        # If the source file doesn't exist, ensure the destination
+        # file doesn't either.
+        if name in symlink_files and not os.path.lexists(src):
+          try:
+            os.remove(dst)
+          except OSError:
+            pass
+
         if name in to_symlink:
           os.symlink(os.path.relpath(src, os.path.dirname(dst)), dst)
         elif copy_all and not os.path.islink(dst):
@@ -2176,26 +2376,44 @@
             shutil.copy(src, dst)
       except OSError as e:
         if e.errno == errno.EPERM:
-          raise GitError('filesystem must support symlinks')
+          raise DownloadError('filesystem must support symlinks')
         else:
           raise
 
-  def _InitWorkTree(self):
+  def _InitWorkTree(self, force_sync=False):
     dotgit = os.path.join(self.worktree, '.git')
-    if not os.path.exists(dotgit):
-      os.makedirs(dotgit)
-      self._ReferenceGitDir(self.gitdir, dotgit, share_refs=True,
-                            copy_all=False)
+    init_dotgit = not os.path.exists(dotgit)
+    try:
+      if init_dotgit:
+        os.makedirs(dotgit)
+        self._ReferenceGitDir(self.gitdir, dotgit, share_refs=True,
+                              copy_all=False)
 
-      _lwrite(os.path.join(dotgit, HEAD), '%s\n' % self.GetRevisionId())
+      try:
+        self._CheckDirReference(self.gitdir, dotgit, share_refs=True)
+      except GitError as e:
+        if force_sync:
+          try:
+            shutil.rmtree(dotgit)
+            return self._InitWorkTree(force_sync=False)
+          except:
+            raise e
+        raise e
 
-      cmd = ['read-tree', '--reset', '-u']
-      cmd.append('-v')
-      cmd.append(HEAD)
-      if GitCommand(self, cmd).Wait() != 0:
-        raise GitError("cannot initialize work tree")
+      if init_dotgit:
+        _lwrite(os.path.join(dotgit, HEAD), '%s\n' % self.GetRevisionId())
 
-      self._CopyFiles()
+        cmd = ['read-tree', '--reset', '-u']
+        cmd.append('-v')
+        cmd.append(HEAD)
+        if GitCommand(self, cmd).Wait() != 0:
+          raise GitError("cannot initialize work tree")
+
+        self._CopyAndLinkFiles()
+    except Exception:
+      if init_dotgit:
+        shutil.rmtree(dotgit)
+      raise
 
   def _gitdir_path(self, path):
     return os.path.realpath(os.path.join(self.gitdir, path))
@@ -2259,10 +2477,10 @@
                       '-z',
                       '--others',
                       '--exclude-standard'],
-                     bare = False,
+                     bare=False,
                      gitdir=self._gitdir,
-                     capture_stdout = True,
-                     capture_stderr = True)
+                     capture_stdout=True,
+                     capture_stderr=True)
       if p.Wait() == 0:
         out = p.stdout
         if out:
@@ -2277,9 +2495,9 @@
       p = GitCommand(self._project,
                      cmd,
                      gitdir=self._gitdir,
-                     bare = False,
-                     capture_stdout = True,
-                     capture_stderr = True)
+                     bare=False,
+                     capture_stdout=True,
+                     capture_stderr=True)
       try:
         out = p.process.stdout.read()
         r = {}
@@ -2287,8 +2505,8 @@
           out = iter(out[:-1].split('\0'))  # pylint: disable=W1401
           while out:
             try:
-              info = out.next()
-              path = out.next()
+              info = next(out)
+              path = next(out)
             except StopIteration:
               break
 
@@ -2314,7 +2532,7 @@
             info = _Info(path, *info)
             if info.status in ('R', 'C'):
               info.src_path = info.path
-              info.path = out.next()
+              info.path = next(out)
             r[info.path] = info
         return r
       finally:
@@ -2385,10 +2603,10 @@
       cmdv.extend(args)
       p = GitCommand(self._project,
                      cmdv,
-                     bare = self._bare,
+                     bare=self._bare,
                      gitdir=self._gitdir,
-                     capture_stdout = True,
-                     capture_stderr = True)
+                     capture_stdout=True,
+                     capture_stderr=True)
       r = []
       for line in p.process.stdout:
         if line[-1] == '\n':
@@ -2438,10 +2656,10 @@
         cmdv.extend(args)
         p = GitCommand(self._project,
                        cmdv,
-                       bare = self._bare,
+                       bare=self._bare,
                        gitdir=self._gitdir,
-                       capture_stdout = True,
-                       capture_stderr = True)
+                       capture_stdout=True,
+                       capture_stderr=True)
         if p.Wait() != 0:
           raise GitError('%s %s: %s' % (
                          self._project.name,
@@ -2506,9 +2724,9 @@
 class _SyncColoring(Coloring):
   def __init__(self, config):
     Coloring.__init__(self, config, 'reposync')
-    self.project   = self.printer('header', attr = 'bold')
-    self.info      = self.printer('info')
-    self.fail      = self.printer('fail', fg='red')
+    self.project = self.printer('header', attr='bold')
+    self.info = self.printer('info')
+    self.fail = self.printer('fail', fg='red')
 
 class SyncBuffer(object):
   def __init__(self, config, detach_head=False):
@@ -2570,16 +2788,16 @@
   """
   def __init__(self, manifest, name, gitdir, worktree):
     Project.__init__(self,
-                     manifest = manifest,
-                     name = name,
-                     gitdir = gitdir,
-                     objdir = gitdir,
-                     worktree = worktree,
-                     remote = RemoteSpec('origin'),
-                     relpath = '.repo/%s' % name,
-                     revisionExpr = 'refs/heads/master',
-                     revisionId = None,
-                     groups = None)
+                     manifest=manifest,
+                     name=name,
+                     gitdir=gitdir,
+                     objdir=gitdir,
+                     worktree=worktree,
+                     remote=RemoteSpec('origin'),
+                     relpath='.repo/%s' % name,
+                     revisionExpr='refs/heads/master',
+                     revisionId=None,
+                     groups=None)
 
   def PreSync(self):
     if self.Exists:
@@ -2590,20 +2808,20 @@
           self.revisionExpr = base
           self.revisionId = None
 
-  def MetaBranchSwitch(self, target):
+  def MetaBranchSwitch(self):
     """ Prepare MetaProject for manifest branch switch
     """
 
     # detach and delete manifest branch, allowing a new
     # branch to take over
-    syncbuf = SyncBuffer(self.config, detach_head = True)
+    syncbuf = SyncBuffer(self.config, detach_head=True)
     self.Sync_LocalHalf(syncbuf)
     syncbuf.Finish()
 
     return GitCommand(self,
                         ['update-ref', '-d', 'refs/heads/default'],
-                        capture_stdout = True,
-                        capture_stderr = True).Wait() == 0
+                        capture_stdout=True,
+                        capture_stderr=True).Wait() == 0
 
 
   @property
diff --git a/repo b/repo
index 768f11f..f12354a 100755
--- a/repo
+++ b/repo
@@ -114,6 +114,7 @@
 import optparse
 import os
 import re
+import shutil
 import stat
 import subprocess
 import sys
@@ -138,10 +139,6 @@
 
 # Python version check
 ver = sys.version_info
-if ver[0] == 3:
-  _print('warning: Python 3 support is currently experimental. YMMV.\n'
-         'Please use Python 2.6 - 2.7 instead.',
-         file=sys.stderr)
 if (ver[0], ver[1]) < MIN_PYTHON_VERSION:
   _print('error: Python version %s unsupported.\n'
          'Please use Python 2.6 - 2.7 instead.'
@@ -465,7 +462,7 @@
     try:
       r = urllib.request.urlopen(url)
     except urllib.error.HTTPError as e:
-      if e.code in [403, 404]:
+      if e.code in [401, 403, 404]:
         return False
       _print('fatal: Cannot get %s' % url, file=sys.stderr)
       _print('fatal: HTTP error %s' % e.code, file=sys.stderr)
@@ -741,12 +738,7 @@
       try:
         _Init(args)
       except CloneFailure:
-        for root, dirs, files in os.walk(repodir, topdown=False):
-          for name in files:
-            os.remove(os.path.join(root, name))
-          for name in dirs:
-            os.rmdir(os.path.join(root, name))
-        os.rmdir(repodir)
+        shutil.rmtree(os.path.join(repodir, S_repo), ignore_errors=True)
         sys.exit(1)
       repo_main, rel_repo_dir = _FindRepo()
     else:
@@ -772,4 +764,8 @@
 
 
 if __name__ == '__main__':
+  if ver[0] == 3:
+    _print('warning: Python 3 support is currently experimental. YMMV.\n'
+           'Please use Python 2.6 - 2.7 instead.',
+           file=sys.stderr)
   main(sys.argv[1:])
diff --git a/subcmds/branches.py b/subcmds/branches.py
index f714c1e..2902684 100644
--- a/subcmds/branches.py
+++ b/subcmds/branches.py
@@ -47,6 +47,10 @@
     return self.current > 0
 
   @property
+  def IsSplitCurrent(self):
+    return self.current != 0 and self.current != len(self.projects)
+
+  @property
   def IsPublished(self):
     return self.published > 0
 
@@ -139,10 +143,14 @@
       if in_cnt < project_cnt:
         fmt = out.write
         paths = []
-        if in_cnt < project_cnt - in_cnt:
+        non_cur_paths = []
+        if i.IsSplitCurrent or (in_cnt < project_cnt - in_cnt):
           in_type = 'in'
           for b in i.projects:
-            paths.append(b.project.relpath)
+            if not i.IsSplitCurrent or b.current:
+              paths.append(b.project.relpath)
+            else:
+              non_cur_paths.append(b.project.relpath)
         else:
           fmt = out.notinproject
           in_type = 'not in'
@@ -154,13 +162,19 @@
               paths.append(p.relpath)
 
         s = ' %s %s' % (in_type, ', '.join(paths))
-        if width + 7 + len(s) < 80:
+        if not i.IsSplitCurrent and (width + 7 + len(s) < 80):
+          fmt = out.current if i.IsCurrent else fmt
           fmt(s)
         else:
           fmt(' %s:' % in_type)
+          fmt = out.current if i.IsCurrent else out.write
           for p in paths:
             out.nl()
             fmt(width*' ' + '          %s' % p)
+          fmt = out.write
+          for p in non_cur_paths:
+            out.nl()
+            fmt(width*' ' + '          %s' % p)
       else:
         out.write(' in all projects')
       out.nl()
diff --git a/subcmds/cherry_pick.py b/subcmds/cherry_pick.py
index 520e4c3..1f7dffd 100644
--- a/subcmds/cherry_pick.py
+++ b/subcmds/cherry_pick.py
@@ -76,6 +76,7 @@
                      capture_stdout = True,
                      capture_stderr = True)
       p.stdin.write(new_msg)
+      p.stdin.close()
       if p.Wait() != 0:
         print("error: Failed to update commit message", file=sys.stderr)
         sys.exit(1)
diff --git a/subcmds/download.py b/subcmds/download.py
index 098d8b4..a029462 100644
--- a/subcmds/download.py
+++ b/subcmds/download.py
@@ -93,6 +93,7 @@
         except GitError:
           print('[%s] Could not complete the cherry-pick of %s' \
                 % (project.name, dl.commit), file=sys.stderr)
+          sys.exit(1)
 
       elif opt.revert:
         project._Revert(dl.commit)
diff --git a/subcmds/forall.py b/subcmds/forall.py
index e2a420a..b93cd6d 100644
--- a/subcmds/forall.py
+++ b/subcmds/forall.py
@@ -14,10 +14,13 @@
 # limitations under the License.
 
 from __future__ import print_function
+import errno
 import fcntl
+import multiprocessing
 import re
 import os
 import select
+import signal
 import sys
 import subprocess
 
@@ -31,6 +34,7 @@
   'log',
 ]
 
+
 class ForallColoring(Coloring):
   def __init__(self, config):
     Coloring.__init__(self, config, 'forall')
@@ -87,6 +91,12 @@
 REPO_RREV is the name of the revision from the manifest, exactly
 as written in the manifest.
 
+REPO_COUNT is the total number of projects being iterated.
+
+REPO_I is the current (1-based) iteration count. Can be used in
+conjunction with REPO_COUNT to add a simple progress indicator to your
+command.
+
 REPO__* are any extra environment variables, specified by the
 "annotation" element under any project element.  This can be useful
 for differentiating trees based on user-specific criteria, or simply
@@ -126,9 +136,35 @@
     g.add_option('-v', '--verbose',
                  dest='verbose', action='store_true',
                  help='Show command error messages')
+    g.add_option('-j', '--jobs',
+                 dest='jobs', action='store', type='int', default=1,
+                 help='number of commands to execute simultaneously')
 
   def WantPager(self, opt):
-    return opt.project_header
+    return opt.project_header and opt.jobs == 1
+
+  def _SerializeProject(self, project):
+    """ Serialize a project._GitGetByExec instance.
+
+    project._GitGetByExec is not pickle-able. Instead of trying to pass it
+    around between processes, make a dict ourselves containing only the
+    attributes that we need.
+
+    """
+    if not self.manifest.IsMirror:
+      lrev = project.GetRevisionId()
+    else:
+      lrev = None
+    return {
+      'name': project.name,
+      'relpath': project.relpath,
+      'remote_name': project.remote.name,
+      'lrev': lrev,
+      'rrev': project.revisionExpr,
+      'annotations': dict((a.name, a.value) for a in project.annotations),
+      'gitdir': project.gitdir,
+      'worktree': project.worktree,
+    }
 
   def Execute(self, opt, args):
     if not opt.command:
@@ -167,123 +203,188 @@
       # pylint: enable=W0631
 
     mirror = self.manifest.IsMirror
-    out = ForallColoring(self.manifest.manifestProject.config)
-    out.redirect(sys.stdout)
-
     rc = 0
-    first = True
+
+    smart_sync_manifest_name = "smart_sync_override.xml"
+    smart_sync_manifest_path = os.path.join(
+      self.manifest.manifestProject.worktree, smart_sync_manifest_name)
+
+    if os.path.isfile(smart_sync_manifest_path):
+      self.manifest.Override(smart_sync_manifest_path)
 
     if not opt.regex:
       projects = self.GetProjects(args)
     else:
       projects = self.FindProjects(args)
 
-    for project in projects:
-      env = os.environ.copy()
-      def setenv(name, val):
-        if val is None:
-          val = ''
-        env[name] = val.encode()
+    os.environ['REPO_COUNT'] = str(len(projects))
 
-      setenv('REPO_PROJECT', project.name)
-      setenv('REPO_PATH', project.relpath)
-      setenv('REPO_REMOTE', project.remote.name)
-      setenv('REPO_LREV', project.GetRevisionId())
-      setenv('REPO_RREV', project.revisionExpr)
-      for a in project.annotations:
-        setenv("REPO__%s" % (a.name), a.value)
-
-      if mirror:
-        setenv('GIT_DIR', project.gitdir)
-        cwd = project.gitdir
-      else:
-        cwd = project.worktree
-
-      if not os.path.exists(cwd):
-        if (opt.project_header and opt.verbose) \
-        or not opt.project_header:
-          print('skipping %s/' % project.relpath, file=sys.stderr)
-        continue
-
-      if opt.project_header:
-        stdin = subprocess.PIPE
-        stdout = subprocess.PIPE
-        stderr = subprocess.PIPE
-      else:
-        stdin = None
-        stdout = None
-        stderr = None
-
-      p = subprocess.Popen(cmd,
-                           cwd = cwd,
-                           shell = shell,
-                           env = env,
-                           stdin = stdin,
-                           stdout = stdout,
-                           stderr = stderr)
-
-      if opt.project_header:
-        class sfd(object):
-          def __init__(self, fd, dest):
-            self.fd = fd
-            self.dest = dest
-          def fileno(self):
-            return self.fd.fileno()
-
-        empty = True
-        errbuf = ''
-
-        p.stdin.close()
-        s_in = [sfd(p.stdout, sys.stdout),
-                sfd(p.stderr, sys.stderr)]
-
-        for s in s_in:
-          flags = fcntl.fcntl(s.fd, fcntl.F_GETFL)
-          fcntl.fcntl(s.fd, fcntl.F_SETFL, flags | os.O_NONBLOCK)
-
-        while s_in:
-          in_ready, _out_ready, _err_ready = select.select(s_in, [], [])
-          for s in in_ready:
-            buf = s.fd.read(4096)
-            if not buf:
-              s.fd.close()
-              s_in.remove(s)
-              continue
-
-            if not opt.verbose:
-              if s.fd != p.stdout:
-                errbuf += buf
-                continue
-
-            if empty:
-              if first:
-                first = False
-              else:
-                out.nl()
-
-              if mirror:
-                project_header_path = project.name
-              else:
-                project_header_path = project.relpath
-              out.project('project %s/', project_header_path)
-              out.nl()
-              out.flush()
-              if errbuf:
-                sys.stderr.write(errbuf)
-                sys.stderr.flush()
-                errbuf = ''
-              empty = False
-
-            s.dest.write(buf)
-            s.dest.flush()
-
-      r = p.wait()
-      if r != 0:
-        if r != rc:
-          rc = r
-        if opt.abort_on_errors:
-          print("error: %s: Aborting due to previous error" % project.relpath,
-                file=sys.stderr)
-          sys.exit(r)
+    pool = multiprocessing.Pool(opt.jobs, InitWorker)
+    try:
+      config = self.manifest.manifestProject.config
+      results_it = pool.imap(
+         DoWorkWrapper,
+         self.ProjectArgs(projects, mirror, opt, cmd, shell, config))
+      pool.close()
+      for r in results_it:
+        rc = rc or r
+        if r != 0 and opt.abort_on_errors:
+          raise Exception('Aborting due to previous error')
+    except (KeyboardInterrupt, WorkerKeyboardInterrupt):
+      # Catch KeyboardInterrupt raised inside and outside of workers
+      print('Interrupted - terminating the pool')
+      pool.terminate()
+      rc = rc or errno.EINTR
+    except Exception as e:
+      # Catch any other exceptions raised
+      print('Got an error, terminating the pool: %r' % e,
+            file=sys.stderr)
+      pool.terminate()
+      rc = rc or getattr(e, 'errno', 1)
+    finally:
+      pool.join()
     if rc != 0:
       sys.exit(rc)
+
+  def ProjectArgs(self, projects, mirror, opt, cmd, shell, config):
+    for cnt, p in enumerate(projects):
+      try:
+        project = self._SerializeProject(p)
+      except Exception as e:
+        print('Project list error: %r' % e,
+              file=sys.stderr)
+        return
+      except KeyboardInterrupt:
+        print('Project list interrupted',
+              file=sys.stderr)
+        return
+      yield [mirror, opt, cmd, shell, cnt, config, project]
+
+class WorkerKeyboardInterrupt(Exception):
+  """ Keyboard interrupt exception for worker processes. """
+  pass
+
+
+def InitWorker():
+  signal.signal(signal.SIGINT, signal.SIG_IGN)
+
+def DoWorkWrapper(args):
+  """ A wrapper around the DoWork() method.
+
+  Catch the KeyboardInterrupt exceptions here and re-raise them as a different,
+  ``Exception``-based exception to stop it flooding the console with stacktraces
+  and making the parent hang indefinitely.
+
+  """
+  project = args.pop()
+  try:
+    return DoWork(project, *args)
+  except KeyboardInterrupt:
+    print('%s: Worker interrupted' % project['name'])
+    raise WorkerKeyboardInterrupt()
+
+
+def DoWork(project, mirror, opt, cmd, shell, cnt, config):
+  env = os.environ.copy()
+  def setenv(name, val):
+    if val is None:
+      val = ''
+    if hasattr(val, 'encode'):
+      val = val.encode()
+    env[name] = val
+
+  setenv('REPO_PROJECT', project['name'])
+  setenv('REPO_PATH', project['relpath'])
+  setenv('REPO_REMOTE', project['remote_name'])
+  setenv('REPO_LREV', project['lrev'])
+  setenv('REPO_RREV', project['rrev'])
+  setenv('REPO_I', str(cnt + 1))
+  for name in project['annotations']:
+    setenv("REPO__%s" % (name), project['annotations'][name])
+
+  if mirror:
+    setenv('GIT_DIR', project['gitdir'])
+    cwd = project['gitdir']
+  else:
+    cwd = project['worktree']
+
+  if not os.path.exists(cwd):
+    if (opt.project_header and opt.verbose) \
+    or not opt.project_header:
+      print('skipping %s/' % project['relpath'], file=sys.stderr)
+    return
+
+  if opt.project_header:
+    stdin = subprocess.PIPE
+    stdout = subprocess.PIPE
+    stderr = subprocess.PIPE
+  else:
+    stdin = None
+    stdout = None
+    stderr = None
+
+  p = subprocess.Popen(cmd,
+                       cwd=cwd,
+                       shell=shell,
+                       env=env,
+                       stdin=stdin,
+                       stdout=stdout,
+                       stderr=stderr)
+
+  if opt.project_header:
+    out = ForallColoring(config)
+    out.redirect(sys.stdout)
+    class sfd(object):
+      def __init__(self, fd, dest):
+        self.fd = fd
+        self.dest = dest
+      def fileno(self):
+        return self.fd.fileno()
+
+    empty = True
+    errbuf = ''
+
+    p.stdin.close()
+    s_in = [sfd(p.stdout, sys.stdout),
+            sfd(p.stderr, sys.stderr)]
+
+    for s in s_in:
+      flags = fcntl.fcntl(s.fd, fcntl.F_GETFL)
+      fcntl.fcntl(s.fd, fcntl.F_SETFL, flags | os.O_NONBLOCK)
+
+    while s_in:
+      in_ready, _out_ready, _err_ready = select.select(s_in, [], [])
+      for s in in_ready:
+        buf = s.fd.read(4096)
+        if not buf:
+          s.fd.close()
+          s_in.remove(s)
+          continue
+
+        if not opt.verbose:
+          if s.fd != p.stdout:
+            errbuf += buf
+            continue
+
+        if empty and out:
+          if not cnt == 0:
+            out.nl()
+
+          if mirror:
+            project_header_path = project['name']
+          else:
+            project_header_path = project['relpath']
+          out.project('project %s/', project_header_path)
+          out.nl()
+          out.flush()
+          if errbuf:
+            sys.stderr.write(errbuf)
+            sys.stderr.flush()
+            errbuf = ''
+          empty = False
+
+        s.dest.write(buf)
+        s.dest.flush()
+
+  r = p.wait()
+  return r
diff --git a/subcmds/info.py b/subcmds/info.py
index d42860a..ed196e9 100644
--- a/subcmds/info.py
+++ b/subcmds/info.py
@@ -59,7 +59,8 @@
                       or 'all,-notdefault')
 
     self.heading("Manifest branch: ")
-    self.headtext(self.manifest.default.revisionExpr)
+    if self.manifest.default.revisionExpr:
+        self.headtext(self.manifest.default.revisionExpr)
     self.out.nl()
     self.heading("Manifest merge branch: ")
     self.headtext(mergeBranch)
diff --git a/subcmds/init.py b/subcmds/init.py
index b1fcb69..dbb6ddd 100644
--- a/subcmds/init.py
+++ b/subcmds/init.py
@@ -27,7 +27,7 @@
   import imp
   import urlparse
   urllib = imp.new_module('urllib')
-  urllib.parse = urlparse.urlparse
+  urllib.parse = urlparse
 
 from color import Coloring
 from command import InteractiveCommand, MirrorSafeCommand
@@ -153,7 +153,7 @@
       # server where this git is located, so let's save that here.
       mirrored_manifest_git = None
       if opt.reference:
-        manifest_git_path = urllib.parse(opt.manifest_url).path[1:]
+        manifest_git_path = urllib.parse.urlparse(opt.manifest_url).path[1:]
         mirrored_manifest_git = os.path.join(opt.reference, manifest_git_path)
         if not mirrored_manifest_git.endswith(".git"):
           mirrored_manifest_git += ".git"
@@ -233,7 +233,7 @@
       sys.exit(1)
 
     if opt.manifest_branch:
-      m.MetaBranchSwitch(opt.manifest_branch)
+      m.MetaBranchSwitch()
 
     syncbuf = SyncBuffer(m.config)
     m.Sync_LocalHalf(syncbuf)
diff --git a/subcmds/start.py b/subcmds/start.py
index 2d723fc..60ad41e 100644
--- a/subcmds/start.py
+++ b/subcmds/start.py
@@ -59,9 +59,13 @@
     for project in all_projects:
       pm.update()
       # If the current revision is a specific SHA1 then we can't push back
-      # to it so substitute the manifest default revision instead.
+      # to it; so substitute with dest_branch if defined, or with manifest
+      # default revision instead.
       if IsId(project.revisionExpr):
-        project.revisionExpr = self.manifest.default.revisionExpr
+        if project.dest_branch:
+          project.revisionExpr = project.dest_branch
+        else:
+          project.revisionExpr = self.manifest.default.revisionExpr
       if not project.StartBranch(nb):
         err.append(project)
     pm.end()
diff --git a/subcmds/status.py b/subcmds/status.py
index 41c4429..38c229b 100644
--- a/subcmds/status.py
+++ b/subcmds/status.py
@@ -22,15 +22,8 @@
 
 import glob
 
-from pyversion import is_python3
-if is_python3():
-  import io
-else:
-  import StringIO as io
-
 import itertools
 import os
-import sys
 
 from color import Coloring
 
@@ -97,7 +90,7 @@
                  dest='orphans', action='store_true',
                  help="include objects in working directory outside of repo projects")
 
-  def _StatusHelper(self, project, clean_counter, sem, output):
+  def _StatusHelper(self, project, clean_counter, sem):
     """Obtains the status for a specific project.
 
     Obtains the status for a project, redirecting the output to
@@ -111,9 +104,9 @@
       output: Where to output the status.
     """
     try:
-      state = project.PrintWorkTreeStatus(output)
+      state = project.PrintWorkTreeStatus()
       if state == 'CLEAN':
-        clean_counter.next()
+        next(clean_counter)
     finally:
       sem.release()
 
@@ -122,16 +115,16 @@
     status_header = ' --\t'
     for item in dirs:
       if not os.path.isdir(item):
-        outstring.write(''.join([status_header, item]))
+        outstring.append(''.join([status_header, item]))
         continue
       if item in proj_dirs:
         continue
       if item in proj_dirs_parents:
-        self._FindOrphans(glob.glob('%s/.*' % item) + \
-            glob.glob('%s/*' % item), \
+        self._FindOrphans(glob.glob('%s/.*' % item) +
+            glob.glob('%s/*' % item),
             proj_dirs, proj_dirs_parents, outstring)
         continue
-      outstring.write(''.join([status_header, item, '/']))
+      outstring.append(''.join([status_header, item, '/']))
 
   def Execute(self, opt, args):
     all_projects = self.GetProjects(args)
@@ -141,30 +134,21 @@
       for project in all_projects:
         state = project.PrintWorkTreeStatus()
         if state == 'CLEAN':
-          counter.next()
+          next(counter)
     else:
       sem = _threading.Semaphore(opt.jobs)
-      threads_and_output = []
+      threads = []
       for project in all_projects:
         sem.acquire()
 
-        class BufList(io.StringIO):
-          def dump(self, ostream):
-            for entry in self.buflist:
-              ostream.write(entry)
-
-        output = BufList()
-
         t = _threading.Thread(target=self._StatusHelper,
-                              args=(project, counter, sem, output))
-        threads_and_output.append((t, output))
+                              args=(project, counter, sem))
+        threads.append(t)
         t.daemon = True
         t.start()
-      for (t, output) in threads_and_output:
+      for t in threads:
         t.join()
-        output.dump(sys.stdout)
-        output.close()
-    if len(all_projects) == counter.next():
+    if len(all_projects) == next(counter):
       print('nothing to commit (working directory clean)')
 
     if opt.orphans:
@@ -188,23 +172,21 @@
       try:
         os.chdir(self.manifest.topdir)
 
-        outstring = io.StringIO()
-        self._FindOrphans(glob.glob('.*') + \
-            glob.glob('*'), \
+        outstring = []
+        self._FindOrphans(glob.glob('.*') +
+            glob.glob('*'),
             proj_dirs, proj_dirs_parents, outstring)
 
-        if outstring.buflist:
+        if outstring:
           output = StatusColoring(self.manifest.globalConfig)
           output.project('Objects not within a project (orphans)')
           output.nl()
-          for entry in outstring.buflist:
+          for entry in outstring:
             output.untracked(entry)
             output.nl()
         else:
           print('No orphan files or directories')
 
-        outstring.close()
-
       finally:
         # Restore CWD.
         os.chdir(orig_path)
diff --git a/subcmds/sync.py b/subcmds/sync.py
index b50df09..43d450b 100644
--- a/subcmds/sync.py
+++ b/subcmds/sync.py
@@ -14,10 +14,10 @@
 # limitations under the License.
 
 from __future__ import print_function
+import json
 import netrc
 from optparse import SUPPRESS_HELP
 import os
-import pickle
 import re
 import shutil
 import socket
@@ -119,6 +119,11 @@
 The -f/--force-broken option can be used to proceed with syncing
 other projects if a project sync fails.
 
+The --force-sync option can be used to overwrite existing git
+directories if they have previously been linked to a different
+object direcotry. WARNING: This may cause data to be lost since
+refs may be removed when overwriting.
+
 The --no-clone-bundle option disables any attempt to use
 $URL/clone.bundle to bootstrap a new Git repository from a
 resumeable bundle file on a content delivery network. This
@@ -128,6 +133,13 @@
 The --fetch-submodules option enables fetching Git submodules
 of a project from server.
 
+The -c/--current-branch option can be used to only fetch objects that
+are on the branch specified by a project's revision.
+
+The --optimized-fetch option can be used to only fetch projects that
+are fixed to a sha1 revision if the sha1 revision does not already
+exist locally.
+
 SSH Connections
 ---------------
 
@@ -167,6 +179,11 @@
     p.add_option('-f', '--force-broken',
                  dest='force_broken', action='store_true',
                  help="continue sync even if a project fails to sync")
+    p.add_option('--force-sync',
+                 dest='force_sync', action='store_true',
+                 help="overwrite an existing git directory if it needs to "
+                      "point to a different object directory. WARNING: this "
+                      "may cause loss of data")
     p.add_option('-l', '--local-only',
                  dest='local_only', action='store_true',
                  help="only update working tree, don't fetch")
@@ -203,6 +220,9 @@
     p.add_option('--no-tags',
                  dest='no_tags', action='store_true',
                  help="don't fetch tags")
+    p.add_option('--optimized-fetch',
+                 dest='optimized_fetch', action='store_true',
+                 help='only fetch projects fixed to sha1 if revision does not exist locally')
     if show_smart:
       p.add_option('-s', '--smart-sync',
                    dest='smart_sync', action='store_true',
@@ -271,8 +291,10 @@
         success = project.Sync_NetworkHalf(
           quiet=opt.quiet,
           current_branch_only=opt.current_branch_only,
+          force_sync=opt.force_sync,
           clone_bundle=not opt.no_clone_bundle,
-          no_tags=opt.no_tags, archive=self.manifest.IsArchive)
+          no_tags=opt.no_tags, archive=self.manifest.IsArchive,
+          optimized_fetch=opt.optimized_fetch)
         self._fetch_times.Set(project, time.time() - start)
 
         # Lock around all the rest of the code, since printing, updating a set
@@ -508,6 +530,9 @@
       self.manifest.Override(opt.manifest_name)
 
     manifest_name = opt.manifest_name
+    smart_sync_manifest_name = "smart_sync_override.xml"
+    smart_sync_manifest_path = os.path.join(
+      self.manifest.manifestProject.worktree, smart_sync_manifest_name)
 
     if opt.smart_sync or opt.smart_tag:
       if not self.manifest.manifest_server:
@@ -560,7 +585,10 @@
             branch = branch[len(R_HEADS):]
 
           env = os.environ.copy()
-          if 'TARGET_PRODUCT' in env and 'TARGET_BUILD_VARIANT' in env:
+          if 'SYNC_TARGET' in env:
+            target = env['SYNC_TARGET']
+            [success, manifest_str] = server.GetApprovedManifest(branch, target)
+          elif 'TARGET_PRODUCT' in env and 'TARGET_BUILD_VARIANT' in env:
             target = '%s-%s' % (env['TARGET_PRODUCT'],
                                 env['TARGET_BUILD_VARIANT'])
             [success, manifest_str] = server.GetApprovedManifest(branch, target)
@@ -571,17 +599,16 @@
           [success, manifest_str] = server.GetManifest(opt.smart_tag)
 
         if success:
-          manifest_name = "smart_sync_override.xml"
-          manifest_path = os.path.join(self.manifest.manifestProject.worktree,
-                                       manifest_name)
+          manifest_name = smart_sync_manifest_name
           try:
-            f = open(manifest_path, 'w')
+            f = open(smart_sync_manifest_path, 'w')
             try:
               f.write(manifest_str)
             finally:
               f.close()
-          except IOError:
-            print('error: cannot write manifest to %s' % manifest_path,
+          except IOError as e:
+            print('error: cannot write manifest to %s:\n%s'
+                  % (smart_sync_manifest_path, e),
                   file=sys.stderr)
             sys.exit(1)
           self._ReloadManifest(manifest_name)
@@ -598,6 +625,13 @@
               % (self.manifest.manifest_server, e.errcode, e.errmsg),
               file=sys.stderr)
         sys.exit(1)
+    else:  # Not smart sync or smart tag mode
+      if os.path.isfile(smart_sync_manifest_path):
+        try:
+          os.remove(smart_sync_manifest_path)
+        except OSError as e:
+          print('error: failed to remove existing smart sync override manifest: %s' %
+                e, file=sys.stderr)
 
     rp = self.manifest.repoProject
     rp.PreSync()
@@ -611,7 +645,8 @@
     if not opt.local_only:
       mp.Sync_NetworkHalf(quiet=opt.quiet,
                           current_branch_only=opt.current_branch_only,
-                          no_tags=opt.no_tags)
+                          no_tags=opt.no_tags,
+                          optimized_fetch=opt.optimized_fetch)
 
     if mp.HasChanges:
       syncbuf = SyncBuffer(mp.config)
@@ -674,7 +709,7 @@
     for project in all_projects:
       pm.update()
       if project.worktree:
-        project.Sync_LocalHalf(syncbuf)
+        project.Sync_LocalHalf(syncbuf, force_sync=opt.force_sync)
     pm.end()
     print(file=sys.stderr)
     if not syncbuf.Finish():
@@ -762,7 +797,7 @@
   _ALPHA = 0.5
 
   def __init__(self, manifest):
-    self._path = os.path.join(manifest.repodir, '.repopickle_fetchtimes')
+    self._path = os.path.join(manifest.repodir, '.repo_fetchtimes.json')
     self._times = None
     self._seen = set()
 
@@ -781,22 +816,17 @@
   def _Load(self):
     if self._times is None:
       try:
-        f = open(self._path, 'rb')
-      except IOError:
-        self._times = {}
-        return self._times
-      try:
+        f = open(self._path)
         try:
-          self._times = pickle.load(f)
-        except IOError:
-          try:
-            os.remove(self._path)
-          except OSError:
-            pass
-          self._times = {}
-      finally:
-        f.close()
-    return self._times
+          self._times = json.load(f)
+        finally:
+          f.close()
+      except (IOError, ValueError):
+        try:
+          os.remove(self._path)
+        except OSError:
+          pass
+        self._times = {}
 
   def Save(self):
     if self._times is None:
@@ -810,13 +840,13 @@
       del self._times[name]
 
     try:
-      f = open(self._path, 'wb')
+      f = open(self._path, 'w')
       try:
-        pickle.dump(self._times, f)
-      except (IOError, OSError, pickle.PickleError):
-        try:
-          os.remove(self._path)
-        except OSError:
-          pass
-    finally:
-      f.close()
+        json.dump(self._times, f, indent=2)
+      finally:
+        f.close()
+    except (IOError, TypeError):
+      try:
+        os.remove(self._path)
+      except OSError:
+        pass
diff --git a/subcmds/upload.py b/subcmds/upload.py
index e2fa261..674fc17 100644
--- a/subcmds/upload.py
+++ b/subcmds/upload.py
@@ -25,10 +25,12 @@
 from project import RepoHook
 
 from pyversion import is_python3
+# pylint:disable=W0622
 if not is_python3():
-  # pylint:disable=W0622
   input = raw_input
-  # pylint:enable=W0622
+else:
+  unicode = str
+# pylint:enable=W0622
 
 UNUSUAL_COMMIT_THRESHOLD = 5
 
@@ -337,13 +339,17 @@
         self._AppendAutoList(branch, people)
 
         # Check if there are local changes that may have been forgotten
-        if branch.project.HasChanges():
+        changes = branch.project.UncommitedFiles()
+        if changes:
           key = 'review.%s.autoupload' % branch.project.remote.review
           answer = branch.project.config.GetBoolean(key)
 
           # if they want to auto upload, let's not ask because it could be automated
           if answer is None:
-            sys.stdout.write('Uncommitted changes in ' + branch.project.name + ' (did you forget to amend?). Continue uploading? (y/N) ')
+            sys.stdout.write('Uncommitted changes in ' + branch.project.name)
+            sys.stdout.write(' (did you forget to amend?):\n')
+            sys.stdout.write('\n'.join(changes) + '\n')
+            sys.stdout.write('Continue uploading? (y/N) ')
             a = sys.stdin.readline().strip().lower()
             if a not in ('y', 'yes', 't', 'true', 'on'):
               print("skipping upload", file=sys.stderr)