sync: use namedtuples for internal return values
Replace tuple returns with namedtuples, to simplify adding new fields.
Extend the Sync_NetworkHalf return value to:
- success: True if successful (the former return value)
- remote_fetched: True if we called `git fetch`
Change-Id: If63c24c2f849523f77fa19c05bbf23a5e9a20ba9
Reviewed-on: https://gerrit-review.googlesource.com/c/git-repo/+/344534
Reviewed-by: Mike Frysinger <vapier@google.com>
Tested-by: LaMont Jones <lamontjones@google.com>
diff --git a/project.py b/project.py
index 2b57a5f..0da2118 100644
--- a/project.py
+++ b/project.py
@@ -26,6 +26,7 @@
import tarfile
import tempfile
import time
+from typing import NamedTuple
import urllib.parse
from color import Coloring
@@ -45,6 +46,14 @@
from git_refs import GitRefs, HEAD, R_HEADS, R_TAGS, R_PUB, R_M, R_WORKTREE_M
+class SyncNetworkHalfResult(NamedTuple):
+ """Sync_NetworkHalf return value."""
+ # True if successful.
+ success: bool
+ # Did we query the remote? False when optimized_fetch is True and we have the
+ # commit already present.
+ remote_fetched: bool
+
# Maximum sleep time allowed during retries.
MAXIMUM_RETRY_SLEEP_SEC = 3600.0
# +-10% random jitter is added to each Fetches retry sleep duration.
@@ -1133,7 +1142,7 @@
if archive and not isinstance(self, MetaProject):
if self.remote.url.startswith(('http://', 'https://')):
_error("%s: Cannot fetch archives from http/https remotes.", self.name)
- return False
+ return SyncNetworkHalfResult(False, False)
name = self.relpath.replace('\\', '/')
name = name.replace('/', '_')
@@ -1144,19 +1153,19 @@
self._FetchArchive(tarpath, cwd=topdir)
except GitError as e:
_error('%s', e)
- return False
+ return SyncNetworkHalfResult(False, False)
# From now on, we only need absolute tarpath
tarpath = os.path.join(topdir, tarpath)
if not self._ExtractArchive(tarpath, path=topdir):
- return False
+ return SyncNetworkHalfResult(False, True)
try:
platform_utils.remove(tarpath)
except OSError as e:
_warn("Cannot remove archive %s: %s", tarpath, str(e))
self._CopyAndLinkFiles()
- return True
+ return SyncNetworkHalfResult(True, True)
# If the shared object dir already exists, don't try to rebootstrap with a
# clone bundle download. We should have the majority of objects already.
@@ -1220,9 +1229,11 @@
depth = self.manifest.manifestProject.depth
# See if we can skip the network fetch entirely.
+ remote_fetched = False
if not (optimized_fetch and
(ID_RE.match(self.revisionExpr) and
self._CheckForImmutableRevision())):
+ remote_fetched = True
if not self._RemoteFetch(
initial=is_new,
quiet=quiet, verbose=verbose, output_redir=output_redir,
@@ -1231,7 +1242,7 @@
submodules=submodules, force_sync=force_sync,
ssh_proxy=ssh_proxy,
clone_filter=clone_filter, retry_fetches=retry_fetches):
- return False
+ return SyncNetworkHalfResult(False, remote_fetched)
mp = self.manifest.manifestProject
dissociate = mp.dissociate
@@ -1244,7 +1255,7 @@
if p.stdout and output_redir:
output_redir.write(p.stdout)
if p.Wait() != 0:
- return False
+ return SyncNetworkHalfResult(False, remote_fetched)
platform_utils.remove(alternates_file)
if self.worktree:
@@ -1253,7 +1264,7 @@
self._InitMirrorHead()
platform_utils.remove(os.path.join(self.gitdir, 'FETCH_HEAD'),
missing_ok=True)
- return True
+ return SyncNetworkHalfResult(True, remote_fetched)
def PostRepoUpgrade(self):
self._InitHooks()
@@ -3836,7 +3847,7 @@
is_new=is_new, quiet=not verbose, verbose=verbose,
clone_bundle=clone_bundle, current_branch_only=current_branch_only,
tags=tags, submodules=submodules, clone_filter=clone_filter,
- partial_clone_exclude=self.manifest.PartialCloneExclude):
+ partial_clone_exclude=self.manifest.PartialCloneExclude).success:
r = self.GetRemote()
print('fatal: cannot obtain manifest %s' % r.url, file=sys.stderr)
diff --git a/subcmds/selfupdate.py b/subcmds/selfupdate.py
index 282f518..898bc3f 100644
--- a/subcmds/selfupdate.py
+++ b/subcmds/selfupdate.py
@@ -51,7 +51,7 @@
_PostRepoUpgrade(self.manifest)
else:
- if not rp.Sync_NetworkHalf():
+ if not rp.Sync_NetworkHalf().success:
print("error: can't update repo", file=sys.stderr)
sys.exit(1)
diff --git a/subcmds/sync.py b/subcmds/sync.py
index 9e9c8f0..1c49b46 100644
--- a/subcmds/sync.py
+++ b/subcmds/sync.py
@@ -26,6 +26,7 @@
import sys
import tempfile
import time
+from typing import NamedTuple
import urllib.error
import urllib.parse
import urllib.request
@@ -71,6 +72,58 @@
_BACKUP_OBJECTS = os.environ.get(REPO_BACKUP_OBJECTS) != '0'
+class _FetchOneResult(NamedTuple):
+ """_FetchOne return value.
+
+ Attributes:
+ success (bool): True if successful.
+ project (Project): The fetched project.
+ start (float): The starting time.time().
+ finish (float): The ending time.time().
+ remote_fetched (bool): True if the remote was actually queried.
+ """
+ success: bool
+ project: Project
+ start: float
+ finish: float
+ remote_fetched: bool
+
+
+class _FetchResult(NamedTuple):
+ """_Fetch return value.
+
+ Attributes:
+ success (bool): True if successful.
+ projects (set[str]): The names of the git directories of fetched projects.
+ """
+ success: bool
+ projects: set[str]
+
+
+class _FetchMainResult(NamedTuple):
+ """_FetchMain return value.
+
+ Attributes:
+ all_projects (list[Project]): The fetched projects.
+ """
+ all_projects: list[Project]
+
+
+class _CheckoutOneResult(NamedTuple):
+ """_CheckoutOne return value.
+
+ Attributes:
+ success (bool): True if successful.
+ project (Project): The project.
+ start (float): The starting time.time().
+ finish (float): The ending time.time().
+ """
+ success: bool
+ project: Project
+ start: float
+ finish: float
+
+
class Sync(Command, MirrorSafeCommand):
COMMON = True
MULTI_MANIFEST_SUPPORT = True
@@ -412,7 +465,7 @@
success = False
buf = io.StringIO()
try:
- success = project.Sync_NetworkHalf(
+ sync_result = project.Sync_NetworkHalf(
quiet=opt.quiet,
verbose=opt.verbose,
output_redir=buf,
@@ -426,6 +479,7 @@
ssh_proxy=self.ssh_proxy,
clone_filter=project.manifest.CloneFilter,
partial_clone_exclude=project.manifest.PartialCloneExclude)
+ success = sync_result.success
output = buf.getvalue()
if (opt.verbose or not success) and output:
@@ -443,7 +497,8 @@
raise
finish = time.time()
- return (success, project, start, finish)
+ return _FetchOneResult(success, project, start, finish,
+ sync_result.remote_fetched)
@classmethod
def _FetchInitChild(cls, ssh_proxy):
@@ -454,6 +509,7 @@
jobs = opt.jobs_network
fetched = set()
+ remote_fetched = set()
pm = Progress('Fetching', len(projects), delay=False, quiet=opt.quiet)
objdir_project_map = dict()
@@ -464,10 +520,16 @@
def _ProcessResults(results_sets):
ret = True
for results in results_sets:
- for (success, project, start, finish) in results:
+ for result in results:
+ success = result.success
+ project = result.project
+ start = result.start
+ finish = result.finish
self._fetch_times.Set(project, finish - start)
self.event_log.AddSync(project, event_log.TASK_SYNC_NETWORK,
start, finish, success)
+ if result.remote_fetched:
+ remote_fetched.add(project)
# Check for any errors before running any more tasks.
# ...we'll let existing jobs finish, though.
if not success:
@@ -525,7 +587,7 @@
if not self.outer_client.manifest.IsArchive:
self._GCProjects(projects, opt, err_event)
- return (ret, fetched)
+ return _FetchResult(ret, fetched)
def _FetchMain(self, opt, args, all_projects, err_event,
ssh_proxy, manifest):
@@ -551,7 +613,9 @@
to_fetch.extend(all_projects)
to_fetch.sort(key=self._fetch_times.Get, reverse=True)
- success, fetched = self._Fetch(to_fetch, opt, err_event, ssh_proxy)
+ result = self._Fetch(to_fetch, opt, err_event, ssh_proxy)
+ success = result.success
+ fetched = result.projects
if not success:
err_event.set()
@@ -561,7 +625,7 @@
if err_event.is_set():
print('\nerror: Exited sync due to fetch errors.\n', file=sys.stderr)
sys.exit(1)
- return
+ return _FetchMainResult([])
# Iteratively fetch missing and/or nested unregistered submodules
previously_missing_set = set()
@@ -584,12 +648,14 @@
if previously_missing_set == missing_set:
break
previously_missing_set = missing_set
- success, new_fetched = self._Fetch(missing, opt, err_event, ssh_proxy)
+ result = self._Fetch(missing, opt, err_event, ssh_proxy)
+ success = result.success
+ new_fetched = result.projects
if not success:
err_event.set()
fetched.update(new_fetched)
- return all_projects
+ return _FetchMainResult(all_projects)
def _CheckoutOne(self, detach_head, force_sync, project):
"""Checkout work tree for one project
@@ -621,7 +687,7 @@
if not success:
print('error: Cannot checkout %s' % (project.name), file=sys.stderr)
finish = time.time()
- return (success, project, start, finish)
+ return _CheckoutOneResult(success, project, start, finish)
def _Checkout(self, all_projects, opt, err_results):
"""Checkout projects listed in all_projects
@@ -636,7 +702,11 @@
def _ProcessResults(pool, pm, results):
ret = True
- for (success, project, start, finish) in results:
+ for result in results:
+ success = result.success
+ project = result.project
+ start = result.start
+ finish = result.finish
self.event_log.AddSync(project, event_log.TASK_SYNC_LOCAL,
start, finish, success)
# Check for any errors before running any more tasks.
@@ -1208,8 +1278,9 @@
with ssh.ProxyManager(manager) as ssh_proxy:
# Initialize the socket dir once in the parent.
ssh_proxy.sock()
- all_projects = self._FetchMain(opt, args, all_projects, err_event,
- ssh_proxy, manifest)
+ result = self._FetchMain(opt, args, all_projects, err_event,
+ ssh_proxy, manifest)
+ all_projects = result.all_projects
if opt.network_only:
return