mirror of
https://gerrit.googlesource.com/git-repo
synced 2026-01-12 17:40:52 +00:00
Compare commits
23 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1afe96a7e9 | ||
|
|
2719a8e203 | ||
|
|
e4872ac8ba | ||
|
|
4623264809 | ||
|
|
67383bdba9 | ||
|
|
d30414bb53 | ||
|
|
80d1a5ad3e | ||
|
|
c615c964fb | ||
|
|
5ed12ec81d | ||
|
|
58a59fdfbc | ||
|
|
38d2fe11b9 | ||
|
|
854fe440f2 | ||
|
|
d534a5537f | ||
|
|
a64149a7a7 | ||
|
|
3e6acf2778 | ||
|
|
a6e1a59ac1 | ||
|
|
380bf9546e | ||
|
|
d9cc0a1526 | ||
|
|
8c3585f367 | ||
|
|
239fad7146 | ||
|
|
d3eec0acdd | ||
|
|
7f7d70efe4 | ||
|
|
720bd1e96b |
2
.github/workflows/close-pull-request.yml
vendored
2
.github/workflows/close-pull-request.yml
vendored
@@ -18,5 +18,5 @@ jobs:
|
||||
Thanks for your contribution!
|
||||
Unfortunately, we don't use GitHub pull requests to manage code
|
||||
contributions to this repository.
|
||||
Instead, please see [README.md](../blob/HEAD/SUBMITTING_PATCHES.md)
|
||||
Instead, please see [README.md](../blob/HEAD/CONTRIBUTING.md)
|
||||
which provides full instructions on how to get involved.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2023 The Android Open Source Project
|
||||
# Copyright (C) 2023 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -14,7 +14,7 @@ that you can put anywhere in your path.
|
||||
* Docs: <https://source.android.com/source/using-repo.html>
|
||||
* [repo Manifest Format](./docs/manifest-format.md)
|
||||
* [repo Hooks](./docs/repo-hooks.md)
|
||||
* [Submitting patches](./SUBMITTING_PATCHES.md)
|
||||
* [Contributing](./CONTRIBUTING.md)
|
||||
* Running Repo in [Microsoft Windows](./docs/windows.md)
|
||||
* GitHub mirror: <https://github.com/GerritCodeReview/git-repo>
|
||||
* Postsubmit tests: <https://github.com/GerritCodeReview/git-repo/actions>
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2021 The Android Open Source Project
|
||||
# Copyright (C) 2021 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
1
git_ssh
1
git_ssh
@@ -1,5 +1,4 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Copyright (C) 2009 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
||||
@@ -190,7 +190,8 @@ class Superproject:
|
||||
message = f"{self._LogMessagePrefix()} {fmt.format(*inputs)}"
|
||||
if self._print_messages:
|
||||
print(message, file=sys.stderr)
|
||||
self._git_event_log.ErrorEvent(message, fmt)
|
||||
if self._git_event_log:
|
||||
self._git_event_log.ErrorEvent(message, fmt)
|
||||
|
||||
def _LogMessagePrefix(self):
|
||||
"""Returns the prefix string to be logged in each log message"""
|
||||
|
||||
@@ -1,3 +1,19 @@
|
||||
# Copyright (C) 2020 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Event logging in the git trace2 EVENT format."""
|
||||
|
||||
from git_command import GetEventTargetPath
|
||||
from git_command import RepoSourceVersion
|
||||
from git_trace2_event_log_base import BaseEventLog
|
||||
|
||||
6
hooks.py
6
hooks.py
@@ -101,12 +101,11 @@ class RepoHook:
|
||||
self._abort_if_user_denies = abort_if_user_denies
|
||||
|
||||
# Store the full path to the script for convenience.
|
||||
if self._hooks_project:
|
||||
self._script_fullpath = None
|
||||
if self._hooks_project and self._hooks_project.worktree:
|
||||
self._script_fullpath = os.path.join(
|
||||
self._hooks_project.worktree, self._hook_type + ".py"
|
||||
)
|
||||
else:
|
||||
self._script_fullpath = None
|
||||
|
||||
def _GetHash(self):
|
||||
"""Return a hash of the contents of the hooks directory.
|
||||
@@ -443,6 +442,7 @@ class RepoHook:
|
||||
if (
|
||||
self._bypass_hooks
|
||||
or not self._hooks_project
|
||||
or not self._script_fullpath
|
||||
or self._hook_type not in self._hooks_project.enabled_repo_hooks
|
||||
):
|
||||
return True
|
||||
|
||||
1
main.py
1
main.py
@@ -1,5 +1,4 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# Copyright (C) 2008 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||
.TH REPO "1" "June 2025" "repo smartsync" "Repo Manual"
|
||||
.TH REPO "1" "August 2025" "repo smartsync" "Repo Manual"
|
||||
.SH NAME
|
||||
repo \- repo smartsync - manual page for repo smartsync
|
||||
.SH SYNOPSIS
|
||||
@@ -20,12 +20,11 @@ number of CPU cores)
|
||||
.TP
|
||||
\fB\-\-jobs\-network\fR=\fI\,JOBS\/\fR
|
||||
number of network jobs to run in parallel (defaults to
|
||||
\fB\-\-jobs\fR or 1). Ignored when \fB\-\-interleaved\fR is set
|
||||
\fB\-\-jobs\fR or 1). Ignored unless \fB\-\-no\-interleaved\fR is set
|
||||
.TP
|
||||
\fB\-\-jobs\-checkout\fR=\fI\,JOBS\/\fR
|
||||
number of local checkout jobs to run in parallel
|
||||
(defaults to \fB\-\-jobs\fR or 8). Ignored when \fB\-\-interleaved\fR
|
||||
is set
|
||||
(defaults to \fB\-\-jobs\fR or 8). Ignored unless \fB\-\-nointerleaved\fR is set
|
||||
.TP
|
||||
\fB\-f\fR, \fB\-\-force\-broken\fR
|
||||
obsolete option (to be deleted in the future)
|
||||
@@ -60,7 +59,10 @@ use the existing manifest checkout as\-is. (do not
|
||||
update to the latest revision)
|
||||
.TP
|
||||
\fB\-\-interleaved\fR
|
||||
fetch and checkout projects in parallel (experimental)
|
||||
fetch and checkout projects in parallel (default)
|
||||
.TP
|
||||
\fB\-\-no\-interleaved\fR
|
||||
fetch and checkout projects in phases
|
||||
.TP
|
||||
\fB\-n\fR, \fB\-\-network\-only\fR
|
||||
fetch only, don't update working tree
|
||||
@@ -149,6 +151,16 @@ operate on this manifest and its submanifests
|
||||
.TP
|
||||
\fB\-\-no\-repo\-verify\fR
|
||||
do not verify repo source code
|
||||
.SS post\-sync hooks:
|
||||
.TP
|
||||
\fB\-\-no\-verify\fR
|
||||
Do not run the post\-sync hook.
|
||||
.TP
|
||||
\fB\-\-verify\fR
|
||||
Run the post\-sync hook without prompting.
|
||||
.TP
|
||||
\fB\-\-ignore\-hooks\fR
|
||||
Do not abort if post\-sync hooks fail.
|
||||
.PP
|
||||
Run `repo help smartsync` to view the detailed manual.
|
||||
.SH DETAILS
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||
.TH REPO "1" "June 2025" "repo sync" "Repo Manual"
|
||||
.TH REPO "1" "August 2025" "repo sync" "Repo Manual"
|
||||
.SH NAME
|
||||
repo \- repo sync - manual page for repo sync
|
||||
.SH SYNOPSIS
|
||||
@@ -20,12 +20,11 @@ number of CPU cores)
|
||||
.TP
|
||||
\fB\-\-jobs\-network\fR=\fI\,JOBS\/\fR
|
||||
number of network jobs to run in parallel (defaults to
|
||||
\fB\-\-jobs\fR or 1). Ignored when \fB\-\-interleaved\fR is set
|
||||
\fB\-\-jobs\fR or 1). Ignored unless \fB\-\-no\-interleaved\fR is set
|
||||
.TP
|
||||
\fB\-\-jobs\-checkout\fR=\fI\,JOBS\/\fR
|
||||
number of local checkout jobs to run in parallel
|
||||
(defaults to \fB\-\-jobs\fR or 8). Ignored when \fB\-\-interleaved\fR
|
||||
is set
|
||||
(defaults to \fB\-\-jobs\fR or 8). Ignored unless \fB\-\-nointerleaved\fR is set
|
||||
.TP
|
||||
\fB\-f\fR, \fB\-\-force\-broken\fR
|
||||
obsolete option (to be deleted in the future)
|
||||
@@ -60,7 +59,10 @@ use the existing manifest checkout as\-is. (do not
|
||||
update to the latest revision)
|
||||
.TP
|
||||
\fB\-\-interleaved\fR
|
||||
fetch and checkout projects in parallel (experimental)
|
||||
fetch and checkout projects in parallel (default)
|
||||
.TP
|
||||
\fB\-\-no\-interleaved\fR
|
||||
fetch and checkout projects in phases
|
||||
.TP
|
||||
\fB\-n\fR, \fB\-\-network\-only\fR
|
||||
fetch only, don't update working tree
|
||||
@@ -156,6 +158,16 @@ operate on this manifest and its submanifests
|
||||
.TP
|
||||
\fB\-\-no\-repo\-verify\fR
|
||||
do not verify repo source code
|
||||
.SS post\-sync hooks:
|
||||
.TP
|
||||
\fB\-\-no\-verify\fR
|
||||
Do not run the post\-sync hook.
|
||||
.TP
|
||||
\fB\-\-verify\fR
|
||||
Run the post\-sync hook without prompting.
|
||||
.TP
|
||||
\fB\-\-ignore\-hooks\fR
|
||||
Do not abort if post\-sync hooks fail.
|
||||
.PP
|
||||
Run `repo help sync` to view the detailed manual.
|
||||
.SH DETAILS
|
||||
|
||||
17
progress.py
17
progress.py
@@ -25,7 +25,10 @@ except ImportError:
|
||||
from repo_trace import IsTraceToStderr
|
||||
|
||||
|
||||
_TTY = sys.stderr.isatty()
|
||||
# Capture the original stderr stream. We use this exclusively for progress
|
||||
# updates to ensure we talk to the terminal even if stderr is redirected.
|
||||
_STDERR = sys.stderr
|
||||
_TTY = _STDERR.isatty()
|
||||
|
||||
# This will erase all content in the current line (wherever the cursor is).
|
||||
# It does not move the cursor, so this is usually followed by \r to move to
|
||||
@@ -133,11 +136,11 @@ class Progress:
|
||||
def _write(self, s):
|
||||
s = "\r" + s
|
||||
if self._elide:
|
||||
col = os.get_terminal_size(sys.stderr.fileno()).columns
|
||||
col = os.get_terminal_size(_STDERR.fileno()).columns
|
||||
if len(s) > col:
|
||||
s = s[: col - 1] + ".."
|
||||
sys.stderr.write(s)
|
||||
sys.stderr.flush()
|
||||
_STDERR.write(s)
|
||||
_STDERR.flush()
|
||||
|
||||
def start(self, name):
|
||||
self._active += 1
|
||||
@@ -211,9 +214,9 @@ class Progress:
|
||||
|
||||
# Erase the current line, print the message with a newline,
|
||||
# and then immediately redraw the progress bar on the new line.
|
||||
sys.stderr.write("\r" + CSI_ERASE_LINE)
|
||||
sys.stderr.write(msg + "\n")
|
||||
sys.stderr.flush()
|
||||
_STDERR.write("\r" + CSI_ERASE_LINE)
|
||||
_STDERR.write(msg + "\n")
|
||||
_STDERR.flush()
|
||||
self.update(inc=0)
|
||||
|
||||
def end(self):
|
||||
|
||||
143
project.py
143
project.py
@@ -642,10 +642,6 @@ class Project:
|
||||
# project containing repo hooks.
|
||||
self.enabled_repo_hooks = []
|
||||
|
||||
# This will be updated later if the project has submodules and
|
||||
# if they will be synced.
|
||||
self.has_subprojects = False
|
||||
|
||||
def RelPath(self, local=True):
|
||||
"""Return the path for the project relative to a manifest.
|
||||
|
||||
@@ -1539,18 +1535,14 @@ class Project:
|
||||
force_checkout=False,
|
||||
force_rebase=False,
|
||||
submodules=False,
|
||||
errors=None,
|
||||
verbose=False,
|
||||
):
|
||||
"""Perform only the local IO portion of the sync process.
|
||||
|
||||
Network access is not required.
|
||||
"""
|
||||
if errors is None:
|
||||
errors = []
|
||||
|
||||
def fail(error: Exception):
|
||||
errors.append(error)
|
||||
syncbuf.fail(self, error)
|
||||
|
||||
if not os.path.exists(self.gitdir):
|
||||
@@ -1567,8 +1559,8 @@ class Project:
|
||||
# TODO(https://git-scm.com/docs/git-worktree#_bugs): Re-evaluate if
|
||||
# submodules can be init when using worktrees once its support is
|
||||
# complete.
|
||||
if self.has_subprojects and not self.use_git_worktrees:
|
||||
self._InitSubmodules()
|
||||
if self.parent and not self.use_git_worktrees:
|
||||
self._InitSubmodule()
|
||||
all_refs = self.bare_ref.all
|
||||
self.CleanPublishedCache(all_refs)
|
||||
revid = self.GetRevisionId(all_refs)
|
||||
@@ -1597,6 +1589,9 @@ class Project:
|
||||
self._FastForward(revid)
|
||||
self._CopyAndLinkFiles()
|
||||
|
||||
def _dorebase():
|
||||
self._Rebase(upstream="@{upstream}")
|
||||
|
||||
def _dosubmodules():
|
||||
self._SyncSubmodules(quiet=True)
|
||||
|
||||
@@ -1688,19 +1683,24 @@ class Project:
|
||||
if pub:
|
||||
not_merged = self._revlist(not_rev(revid), pub)
|
||||
if not_merged:
|
||||
if upstream_gain and not force_rebase:
|
||||
# The user has published this branch and some of those
|
||||
# commits are not yet merged upstream. We do not want
|
||||
# to rewrite the published commits so we punt.
|
||||
fail(
|
||||
LocalSyncFail(
|
||||
"branch %s is published (but not merged) and is "
|
||||
"now %d commits behind. Fix this manually or rerun "
|
||||
"with the --rebase option to force a rebase."
|
||||
% (branch.name, len(upstream_gain)),
|
||||
project=self.name,
|
||||
if upstream_gain:
|
||||
if force_rebase:
|
||||
# Try to rebase local published but not merged changes
|
||||
# on top of the upstream changes.
|
||||
syncbuf.later1(self, _dorebase, not verbose)
|
||||
else:
|
||||
# The user has published this branch and some of those
|
||||
# commits are not yet merged upstream. We do not want
|
||||
# to rewrite the published commits so we punt.
|
||||
fail(
|
||||
LocalSyncFail(
|
||||
"branch %s is published (but not merged) and "
|
||||
"is now %d commits behind. Fix this manually "
|
||||
"or rerun with the --rebase option to force a "
|
||||
"rebase." % (branch.name, len(upstream_gain)),
|
||||
project=self.name,
|
||||
)
|
||||
)
|
||||
)
|
||||
return
|
||||
syncbuf.later1(self, _doff, not verbose)
|
||||
return
|
||||
@@ -2363,8 +2363,6 @@ class Project:
|
||||
)
|
||||
result.append(subproject)
|
||||
result.extend(subproject.GetDerivedSubprojects())
|
||||
if result:
|
||||
self.has_subprojects = True
|
||||
return result
|
||||
|
||||
def EnableRepositoryExtension(self, key, value="true", version=1):
|
||||
@@ -2415,7 +2413,9 @@ class Project:
|
||||
# throws an error.
|
||||
revs = [f"{self.revisionExpr}^0"]
|
||||
upstream_rev = None
|
||||
if self.upstream:
|
||||
|
||||
# Only check upstream when using superproject.
|
||||
if self.upstream and self.manifest.manifestProject.use_superproject:
|
||||
upstream_rev = self.GetRemote().ToLocal(self.upstream)
|
||||
revs.append(upstream_rev)
|
||||
|
||||
@@ -2427,7 +2427,9 @@ class Project:
|
||||
log_as_error=False,
|
||||
)
|
||||
|
||||
if self.upstream:
|
||||
# Only verify upstream relationship for superproject scenarios
|
||||
# without affecting plain usage.
|
||||
if self.upstream and self.manifest.manifestProject.use_superproject:
|
||||
self.bare_git.merge_base(
|
||||
"--is-ancestor",
|
||||
self.revisionExpr,
|
||||
@@ -3030,16 +3032,39 @@ class Project:
|
||||
project=self.name,
|
||||
)
|
||||
|
||||
def _InitSubmodules(self, quiet=True):
|
||||
"""Initialize the submodules for the project."""
|
||||
def _InitSubmodule(self, quiet=True):
|
||||
"""Initialize the submodule."""
|
||||
cmd = ["submodule", "init"]
|
||||
if quiet:
|
||||
cmd.append("-q")
|
||||
if GitCommand(self, cmd).Wait() != 0:
|
||||
raise GitError(
|
||||
f"{self.name} submodule init",
|
||||
project=self.name,
|
||||
cmd.extend(["--", self.worktree])
|
||||
max_retries = 3
|
||||
base_delay_secs = 1
|
||||
jitter_ratio = 1 / 3
|
||||
for attempt in range(max_retries):
|
||||
git_cmd = GitCommand(
|
||||
None,
|
||||
cmd,
|
||||
cwd=self.parent.worktree,
|
||||
capture_stdout=True,
|
||||
capture_stderr=True,
|
||||
)
|
||||
if git_cmd.Wait() == 0:
|
||||
return
|
||||
error = git_cmd.stderr or git_cmd.stdout
|
||||
if "lock" in error:
|
||||
delay = base_delay_secs * (2**attempt)
|
||||
delay += random.uniform(0, delay * jitter_ratio)
|
||||
logger.warning(
|
||||
f"Attempt {attempt+1}/{max_retries}: "
|
||||
+ f"git {' '.join(cmd)} failed."
|
||||
+ f" Error: {error}."
|
||||
+ f" Sleeping {delay:.2f}s before retrying."
|
||||
)
|
||||
time.sleep(delay)
|
||||
else:
|
||||
break
|
||||
git_cmd.VerifyCommand()
|
||||
|
||||
def _Rebase(self, upstream, onto=None):
|
||||
cmd = ["rebase"]
|
||||
@@ -3835,10 +3860,35 @@ class Project:
|
||||
def GetHead(self):
|
||||
"""Return the ref that HEAD points to."""
|
||||
try:
|
||||
return self.rev_parse("--symbolic-full-name", HEAD)
|
||||
symbolic_head = self.rev_parse("--symbolic-full-name", HEAD)
|
||||
if symbolic_head == HEAD:
|
||||
# Detached HEAD. Return the commit SHA instead.
|
||||
return self.rev_parse(HEAD)
|
||||
return symbolic_head
|
||||
except GitError as e:
|
||||
logger.warning(
|
||||
"project %s: unparseable HEAD; trying to recover.\n"
|
||||
"Check that HEAD ref in .git/HEAD is valid. The error "
|
||||
"was: %s",
|
||||
self._project.RelPath(local=False),
|
||||
e,
|
||||
)
|
||||
|
||||
# Fallback to direct file reading for compatibility with broken
|
||||
# repos, e.g. if HEAD points to an unborn branch.
|
||||
path = self.GetDotgitPath(subpath=HEAD)
|
||||
raise NoManifestException(path, str(e))
|
||||
try:
|
||||
with open(path) as fd:
|
||||
line = fd.readline()
|
||||
except OSError:
|
||||
raise NoManifestException(path, str(e))
|
||||
try:
|
||||
line = line.decode()
|
||||
except AttributeError:
|
||||
pass
|
||||
if line.startswith("ref: "):
|
||||
return line[5:-1]
|
||||
return line[:-1]
|
||||
|
||||
def SetHead(self, ref, message=None):
|
||||
cmdv = []
|
||||
@@ -4002,7 +4052,8 @@ class _Later:
|
||||
if not self.quiet:
|
||||
out.nl()
|
||||
return True
|
||||
except GitError:
|
||||
except GitError as e:
|
||||
syncbuf.fail(self.project, e)
|
||||
out.nl()
|
||||
return False
|
||||
|
||||
@@ -4018,7 +4069,12 @@ class _SyncColoring(Coloring):
|
||||
class SyncBuffer:
|
||||
def __init__(self, config, detach_head=False):
|
||||
self._messages = []
|
||||
self._failures = []
|
||||
|
||||
# Failures that have not yet been printed. Cleared after printing.
|
||||
self._pending_failures = []
|
||||
# A persistent record of all failures during the buffer's lifetime.
|
||||
self._all_failures = []
|
||||
|
||||
self._later_queue1 = []
|
||||
self._later_queue2 = []
|
||||
|
||||
@@ -4033,7 +4089,9 @@ class SyncBuffer:
|
||||
self._messages.append(_InfoMessage(project, fmt % args))
|
||||
|
||||
def fail(self, project, err=None):
|
||||
self._failures.append(_Failure(project, err))
|
||||
failure = _Failure(project, err)
|
||||
self._pending_failures.append(failure)
|
||||
self._all_failures.append(failure)
|
||||
self._MarkUnclean()
|
||||
|
||||
def later1(self, project, what, quiet):
|
||||
@@ -4053,6 +4111,11 @@ class SyncBuffer:
|
||||
self.recent_clean = True
|
||||
return recent_clean
|
||||
|
||||
@property
|
||||
def errors(self):
|
||||
"""Returns a list of all exceptions accumulated in the buffer."""
|
||||
return [f.why for f in self._all_failures if f.why]
|
||||
|
||||
def _MarkUnclean(self):
|
||||
self.clean = False
|
||||
self.recent_clean = False
|
||||
@@ -4071,18 +4134,18 @@ class SyncBuffer:
|
||||
return True
|
||||
|
||||
def _PrintMessages(self):
|
||||
if self._messages or self._failures:
|
||||
if self._messages or self._pending_failures:
|
||||
if os.isatty(2):
|
||||
self.out.write(progress.CSI_ERASE_LINE)
|
||||
self.out.write("\r")
|
||||
|
||||
for m in self._messages:
|
||||
m.Print(self)
|
||||
for m in self._failures:
|
||||
for m in self._pending_failures:
|
||||
m.Print(self)
|
||||
|
||||
self._messages = []
|
||||
self._failures = []
|
||||
self._pending_failures = []
|
||||
|
||||
|
||||
class MetaProject(Project):
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2023 The Android Open Source Project
|
||||
# Copyright (C) 2023 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
152
release/check-metadata.py
Executable file
152
release/check-metadata.py
Executable file
@@ -0,0 +1,152 @@
|
||||
#!/usr/bin/env python3
|
||||
# Copyright (C) 2025 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Helper tool to check various metadata (e.g. licensing) in source files."""
|
||||
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
import re
|
||||
import sys
|
||||
|
||||
import util
|
||||
|
||||
|
||||
_FILE_HEADER_RE = re.compile(
|
||||
r"""# Copyright \(C\) 20[0-9]{2} The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2\.0 \(the "License"\);
|
||||
# you may not use this file except in compliance with the License\.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www\.apache\.org/licenses/LICENSE-2\.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied\.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License\.
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
def check_license(path: Path, lines: list[str]) -> bool:
|
||||
"""Check license header."""
|
||||
# Enforce licensing on configs & scripts.
|
||||
if not (
|
||||
path.suffix in (".bash", ".cfg", ".ini", ".py", ".toml")
|
||||
or lines[0] in ("#!/bin/bash", "#!/bin/sh", "#!/usr/bin/env python3")
|
||||
):
|
||||
return True
|
||||
|
||||
# Extract the file header.
|
||||
header_lines = []
|
||||
for line in lines:
|
||||
if line.startswith("#"):
|
||||
header_lines.append(line)
|
||||
else:
|
||||
break
|
||||
if not header_lines:
|
||||
print(
|
||||
f"error: {path.relative_to(util.TOPDIR)}: "
|
||||
"missing file header (copyright+licensing)",
|
||||
file=sys.stderr,
|
||||
)
|
||||
return False
|
||||
|
||||
# Skip the shebang.
|
||||
if header_lines[0].startswith("#!"):
|
||||
header_lines.pop(0)
|
||||
|
||||
# If this file is imported into the tree, then leave it be.
|
||||
if header_lines[0] == "# DO NOT EDIT THIS FILE":
|
||||
return True
|
||||
|
||||
header = "".join(f"{x}\n" for x in header_lines)
|
||||
if not _FILE_HEADER_RE.match(header):
|
||||
print(
|
||||
f"error: {path.relative_to(util.TOPDIR)}: "
|
||||
"file header incorrectly formatted",
|
||||
file=sys.stderr,
|
||||
)
|
||||
print(
|
||||
"".join(f"> {x}\n" for x in header_lines), end="", file=sys.stderr
|
||||
)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def check_path(opts: argparse.Namespace, path: Path) -> bool:
|
||||
"""Check a single path."""
|
||||
data = path.read_text(encoding="utf-8")
|
||||
lines = data.splitlines()
|
||||
# NB: Use list comprehension and not a generator so we run all the checks.
|
||||
return all(
|
||||
[
|
||||
check_license(path, lines),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def check_paths(opts: argparse.Namespace, paths: list[Path]) -> bool:
|
||||
"""Check all the paths."""
|
||||
# NB: Use list comprehension and not a generator so we check all paths.
|
||||
return all([check_path(opts, x) for x in paths])
|
||||
|
||||
|
||||
def find_files(opts: argparse.Namespace) -> list[Path]:
|
||||
"""Find all the files in the source tree."""
|
||||
result = util.run(
|
||||
opts,
|
||||
["git", "ls-tree", "-r", "-z", "--name-only", "HEAD"],
|
||||
cwd=util.TOPDIR,
|
||||
capture_output=True,
|
||||
encoding="utf-8",
|
||||
)
|
||||
return [util.TOPDIR / x for x in result.stdout.split("\0")[:-1]]
|
||||
|
||||
|
||||
def get_parser() -> argparse.ArgumentParser:
|
||||
"""Get a CLI parser."""
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument(
|
||||
"-n",
|
||||
"--dry-run",
|
||||
dest="dryrun",
|
||||
action="store_true",
|
||||
help="show everything that would be done",
|
||||
)
|
||||
parser.add_argument(
|
||||
"paths",
|
||||
nargs="*",
|
||||
help="the paths to scan",
|
||||
)
|
||||
return parser
|
||||
|
||||
|
||||
def main(argv: list[str]) -> int:
|
||||
"""The main func!"""
|
||||
parser = get_parser()
|
||||
opts = parser.parse_args(argv)
|
||||
|
||||
paths = opts.paths
|
||||
if not opts.paths:
|
||||
paths = find_files(opts)
|
||||
|
||||
return 0 if check_paths(opts, paths) else 1
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
@@ -14,7 +14,7 @@
|
||||
|
||||
"""Random utility code for release tools."""
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
import re
|
||||
import shlex
|
||||
import subprocess
|
||||
@@ -24,8 +24,9 @@ import sys
|
||||
assert sys.version_info >= (3, 6), "This module requires Python 3.6+"
|
||||
|
||||
|
||||
TOPDIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
HOMEDIR = os.path.expanduser("~")
|
||||
THIS_FILE = Path(__file__).resolve()
|
||||
TOPDIR = THIS_FILE.parent.parent
|
||||
HOMEDIR = Path("~").expanduser()
|
||||
|
||||
|
||||
# These are the release keys we sign with.
|
||||
@@ -54,7 +55,7 @@ def run(opts, cmd, check=True, **kwargs):
|
||||
def import_release_key(opts):
|
||||
"""Import the public key of the official release repo signing key."""
|
||||
# Extract the key from our repo launcher.
|
||||
launcher = getattr(opts, "launcher", os.path.join(TOPDIR, "repo"))
|
||||
launcher = getattr(opts, "launcher", TOPDIR / "repo")
|
||||
print(f'Importing keys from "{launcher}" launcher script')
|
||||
with open(launcher, encoding="utf-8") as fp:
|
||||
data = fp.read()
|
||||
|
||||
1
repo
1
repo
@@ -1,5 +1,4 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# Copyright (C) 2008 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
||||
38
run_tests
38
run_tests
@@ -1,5 +1,5 @@
|
||||
#!/usr/bin/env python3
|
||||
# Copyright 2019 The Android Open Source Project
|
||||
# Copyright (C) 2019 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -17,6 +17,7 @@
|
||||
|
||||
import functools
|
||||
import os
|
||||
import shlex
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
@@ -26,6 +27,11 @@ from typing import List
|
||||
ROOT_DIR = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
|
||||
def log_cmd(cmd: str, argv: list[str]) -> None:
|
||||
"""Log a debug message to make history easier to track."""
|
||||
print("+", cmd, shlex.join(argv), file=sys.stderr)
|
||||
|
||||
|
||||
@functools.lru_cache()
|
||||
def is_ci() -> bool:
|
||||
"""Whether we're running in our CI system."""
|
||||
@@ -37,6 +43,7 @@ def run_pytest(argv: List[str]) -> int:
|
||||
if is_ci():
|
||||
argv = ["-m", "not skip_cq"] + argv
|
||||
|
||||
log_cmd("pytest", argv)
|
||||
return subprocess.run(
|
||||
[sys.executable, "-m", "pytest"] + argv,
|
||||
check=False,
|
||||
@@ -49,6 +56,7 @@ def run_pytest_py38(argv: List[str]) -> int:
|
||||
if is_ci():
|
||||
argv = ["-m", "not skip_cq"] + argv
|
||||
|
||||
log_cmd("[vpython 3.8] pytest", argv)
|
||||
try:
|
||||
return subprocess.run(
|
||||
[
|
||||
@@ -77,8 +85,10 @@ def run_black():
|
||||
"release/update-hooks",
|
||||
"release/update-manpages",
|
||||
]
|
||||
argv = ["--diff", "--check", ROOT_DIR] + extra_programs
|
||||
log_cmd("black", argv)
|
||||
return subprocess.run(
|
||||
[sys.executable, "-m", "black", "--check", ROOT_DIR] + extra_programs,
|
||||
[sys.executable, "-m", "black"] + argv,
|
||||
check=False,
|
||||
cwd=ROOT_DIR,
|
||||
).returncode
|
||||
@@ -86,8 +96,10 @@ def run_black():
|
||||
|
||||
def run_flake8():
|
||||
"""Returns the exit code from flake8."""
|
||||
argv = [ROOT_DIR]
|
||||
log_cmd("flake8", argv)
|
||||
return subprocess.run(
|
||||
[sys.executable, "-m", "flake8", ROOT_DIR],
|
||||
[sys.executable, "-m", "flake8"] + argv,
|
||||
check=False,
|
||||
cwd=ROOT_DIR,
|
||||
).returncode
|
||||
@@ -95,8 +107,21 @@ def run_flake8():
|
||||
|
||||
def run_isort():
|
||||
"""Returns the exit code from isort."""
|
||||
argv = ["--check", ROOT_DIR]
|
||||
log_cmd("isort", argv)
|
||||
return subprocess.run(
|
||||
[sys.executable, "-m", "isort", "--check", ROOT_DIR],
|
||||
[sys.executable, "-m", "isort"] + argv,
|
||||
check=False,
|
||||
cwd=ROOT_DIR,
|
||||
).returncode
|
||||
|
||||
|
||||
def run_check_metadata():
|
||||
"""Returns the exit code from check-metadata."""
|
||||
argv = []
|
||||
log_cmd("release/check-metadata.py", argv)
|
||||
return subprocess.run(
|
||||
[sys.executable, "release/check-metadata.py"] + argv,
|
||||
check=False,
|
||||
cwd=ROOT_DIR,
|
||||
).returncode
|
||||
@@ -109,8 +134,10 @@ def run_update_manpages() -> int:
|
||||
print("update-manpages: help2man not found; skipping test")
|
||||
return 0
|
||||
|
||||
argv = ["--check"]
|
||||
log_cmd("release/update-manpages", argv)
|
||||
return subprocess.run(
|
||||
[sys.executable, "release/update-manpages", "--check"],
|
||||
[sys.executable, "release/update-manpages"] + argv,
|
||||
check=False,
|
||||
cwd=ROOT_DIR,
|
||||
).returncode
|
||||
@@ -124,6 +151,7 @@ def main(argv):
|
||||
run_black,
|
||||
run_flake8,
|
||||
run_isort,
|
||||
run_check_metadata,
|
||||
run_update_manpages,
|
||||
)
|
||||
# Run all the tests all the time to get full feedback. Don't exit on the
|
||||
|
||||
4
setup.py
4
setup.py
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python3
|
||||
# Copyright 2019 The Android Open Source Project
|
||||
# Copyright (C) 2019 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the 'License");
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
|
||||
@@ -133,7 +133,7 @@ without iterating through the remaining projects.
|
||||
|
||||
@staticmethod
|
||||
def _cmd_option(option, _opt_str, _value, parser):
|
||||
setattr(parser.values, option.dest or "command", list(parser.rargs))
|
||||
setattr(parser.values, option.dest, list(parser.rargs))
|
||||
while parser.rargs:
|
||||
del parser.rargs[0]
|
||||
|
||||
@@ -161,6 +161,7 @@ without iterating through the remaining projects.
|
||||
p.add_option(
|
||||
"-c",
|
||||
"--command",
|
||||
dest="command",
|
||||
help="command (and arguments) to execute",
|
||||
action="callback",
|
||||
callback=self._cmd_option,
|
||||
|
||||
453
subcmds/sync.py
453
subcmds/sync.py
@@ -204,14 +204,13 @@ class _SyncResult(NamedTuple):
|
||||
relpath (str): The project's relative path from the repo client top.
|
||||
remote_fetched (bool): True if the remote was actually queried.
|
||||
fetch_success (bool): True if the fetch operation was successful.
|
||||
fetch_error (Optional[Exception]): The Exception from a failed fetch,
|
||||
or None.
|
||||
fetch_errors (List[Exception]): The Exceptions from a failed fetch.
|
||||
fetch_start (Optional[float]): The time.time() when fetch started.
|
||||
fetch_finish (Optional[float]): The time.time() when fetch finished.
|
||||
checkout_success (bool): True if the checkout operation was
|
||||
successful.
|
||||
checkout_error (Optional[Exception]): The Exception from a failed
|
||||
checkout, or None.
|
||||
checkout_errors (List[Exception]): The Exceptions from a failed
|
||||
checkout.
|
||||
checkout_start (Optional[float]): The time.time() when checkout
|
||||
started.
|
||||
checkout_finish (Optional[float]): The time.time() when checkout
|
||||
@@ -224,12 +223,12 @@ class _SyncResult(NamedTuple):
|
||||
|
||||
remote_fetched: bool
|
||||
fetch_success: bool
|
||||
fetch_error: Optional[Exception]
|
||||
fetch_errors: List[Exception]
|
||||
fetch_start: Optional[float]
|
||||
fetch_finish: Optional[float]
|
||||
|
||||
checkout_success: bool
|
||||
checkout_error: Optional[Exception]
|
||||
checkout_errors: List[Exception]
|
||||
checkout_start: Optional[float]
|
||||
checkout_finish: Optional[float]
|
||||
|
||||
@@ -976,9 +975,6 @@ later is required to fix a server side protocol bug.
|
||||
sync_event.set()
|
||||
sync_progress_thread.join()
|
||||
|
||||
self._fetch_times.Save()
|
||||
self._local_sync_state.Save()
|
||||
|
||||
if not self.outer_client.manifest.IsArchive:
|
||||
self._GCProjects(projects, opt, err_event)
|
||||
|
||||
@@ -1004,53 +1000,58 @@ later is required to fix a server side protocol bug.
|
||||
to_fetch.extend(all_projects)
|
||||
to_fetch.sort(key=self._fetch_times.Get, reverse=True)
|
||||
|
||||
result = self._Fetch(to_fetch, opt, err_event, ssh_proxy, errors)
|
||||
success = result.success
|
||||
fetched = result.projects
|
||||
if not success:
|
||||
err_event.set()
|
||||
|
||||
if opt.network_only:
|
||||
# Bail out now; the rest touches the working tree.
|
||||
if err_event.is_set():
|
||||
e = SyncError(
|
||||
"error: Exited sync due to fetch errors.",
|
||||
aggregate_errors=errors,
|
||||
)
|
||||
|
||||
logger.error(e)
|
||||
raise e
|
||||
return _FetchMainResult([])
|
||||
|
||||
# Iteratively fetch missing and/or nested unregistered submodules.
|
||||
previously_missing_set = set()
|
||||
while True:
|
||||
self._ReloadManifest(None, manifest)
|
||||
all_projects = self.GetProjects(
|
||||
args,
|
||||
missing_ok=True,
|
||||
submodules_ok=opt.fetch_submodules,
|
||||
manifest=manifest,
|
||||
all_manifests=not opt.this_manifest_only,
|
||||
)
|
||||
missing = []
|
||||
for project in all_projects:
|
||||
if project.gitdir not in fetched:
|
||||
missing.append(project)
|
||||
if not missing:
|
||||
break
|
||||
# Stop us from non-stopped fetching actually-missing repos: If set
|
||||
# of missing repos has not been changed from last fetch, we break.
|
||||
missing_set = {p.name for p in missing}
|
||||
if previously_missing_set == missing_set:
|
||||
break
|
||||
previously_missing_set = missing_set
|
||||
result = self._Fetch(missing, opt, err_event, ssh_proxy, errors)
|
||||
try:
|
||||
result = self._Fetch(to_fetch, opt, err_event, ssh_proxy, errors)
|
||||
success = result.success
|
||||
new_fetched = result.projects
|
||||
fetched = result.projects
|
||||
if not success:
|
||||
err_event.set()
|
||||
fetched.update(new_fetched)
|
||||
|
||||
if opt.network_only:
|
||||
# Bail out now; the rest touches the working tree.
|
||||
if err_event.is_set():
|
||||
e = SyncError(
|
||||
"error: Exited sync due to fetch errors.",
|
||||
aggregate_errors=errors,
|
||||
)
|
||||
|
||||
logger.error(e)
|
||||
raise e
|
||||
return _FetchMainResult([])
|
||||
|
||||
# Iteratively fetch missing and/or nested unregistered submodules.
|
||||
previously_missing_set = set()
|
||||
while True:
|
||||
self._ReloadManifest(None, manifest)
|
||||
all_projects = self.GetProjects(
|
||||
args,
|
||||
missing_ok=True,
|
||||
submodules_ok=opt.fetch_submodules,
|
||||
manifest=manifest,
|
||||
all_manifests=not opt.this_manifest_only,
|
||||
)
|
||||
missing = []
|
||||
for project in all_projects:
|
||||
if project.gitdir not in fetched:
|
||||
missing.append(project)
|
||||
if not missing:
|
||||
break
|
||||
# Stop us from non-stopped fetching actually-missing repos: If
|
||||
# set of missing repos has not been changed from last fetch, we
|
||||
# break.
|
||||
missing_set = {p.name for p in missing}
|
||||
if previously_missing_set == missing_set:
|
||||
break
|
||||
previously_missing_set = missing_set
|
||||
result = self._Fetch(missing, opt, err_event, ssh_proxy, errors)
|
||||
success = result.success
|
||||
new_fetched = result.projects
|
||||
if not success:
|
||||
err_event.set()
|
||||
fetched.update(new_fetched)
|
||||
finally:
|
||||
self._fetch_times.Save()
|
||||
self._local_sync_state.Save()
|
||||
|
||||
return _FetchMainResult(all_projects)
|
||||
|
||||
@@ -1092,10 +1093,10 @@ later is required to fix a server side protocol bug.
|
||||
force_sync=force_sync,
|
||||
force_checkout=force_checkout,
|
||||
force_rebase=force_rebase,
|
||||
errors=errors,
|
||||
verbose=verbose,
|
||||
)
|
||||
success = syncbuf.Finish()
|
||||
errors.extend(syncbuf.errors)
|
||||
except KeyboardInterrupt:
|
||||
logger.error("Keyboard interrupt while processing %s", project.name)
|
||||
except GitError as e:
|
||||
@@ -1753,10 +1754,10 @@ later is required to fix a server side protocol bug.
|
||||
mp.Sync_LocalHalf(
|
||||
syncbuf,
|
||||
submodules=mp.manifest.HasSubmodules,
|
||||
errors=errors,
|
||||
verbose=opt.verbose,
|
||||
)
|
||||
clean = syncbuf.Finish()
|
||||
errors.extend(syncbuf.errors)
|
||||
self.event_log.AddSync(
|
||||
mp, event_log.TASK_SYNC_LOCAL, start, time.time(), clean
|
||||
)
|
||||
@@ -2210,7 +2211,7 @@ later is required to fix a server side protocol bug.
|
||||
"""Syncs a single project for interleaved sync."""
|
||||
fetch_success = False
|
||||
remote_fetched = False
|
||||
fetch_error = None
|
||||
fetch_errors = []
|
||||
fetch_start = None
|
||||
fetch_finish = None
|
||||
network_output = ""
|
||||
@@ -2243,16 +2244,17 @@ later is required to fix a server side protocol bug.
|
||||
)
|
||||
fetch_success = sync_result.success
|
||||
remote_fetched = sync_result.remote_fetched
|
||||
fetch_error = sync_result.error
|
||||
if sync_result.error:
|
||||
fetch_errors.append(sync_result.error)
|
||||
except KeyboardInterrupt:
|
||||
logger.error(
|
||||
"Keyboard interrupt while processing %s", project.name
|
||||
)
|
||||
except GitError as e:
|
||||
fetch_error = e
|
||||
fetch_errors.append(e)
|
||||
logger.error("error.GitError: Cannot fetch %s", e)
|
||||
except Exception as e:
|
||||
fetch_error = e
|
||||
fetch_errors.append(e)
|
||||
logger.error(
|
||||
"error: Cannot fetch %s (%s: %s)",
|
||||
project.name,
|
||||
@@ -2264,56 +2266,58 @@ later is required to fix a server side protocol bug.
|
||||
network_output = network_output_capture.getvalue()
|
||||
|
||||
checkout_success = False
|
||||
checkout_error = None
|
||||
checkout_errors = []
|
||||
checkout_start = None
|
||||
checkout_finish = None
|
||||
checkout_stderr = ""
|
||||
|
||||
if fetch_success and not opt.network_only:
|
||||
checkout_start = time.time()
|
||||
stderr_capture = io.StringIO()
|
||||
try:
|
||||
with contextlib.redirect_stderr(stderr_capture):
|
||||
syncbuf = SyncBuffer(
|
||||
project.manifest.manifestProject.config,
|
||||
detach_head=opt.detach_head,
|
||||
)
|
||||
local_half_errors = []
|
||||
project.Sync_LocalHalf(
|
||||
syncbuf,
|
||||
force_sync=opt.force_sync,
|
||||
force_checkout=opt.force_checkout,
|
||||
force_rebase=opt.rebase,
|
||||
errors=local_half_errors,
|
||||
verbose=opt.verbose,
|
||||
)
|
||||
checkout_success = syncbuf.Finish()
|
||||
if local_half_errors:
|
||||
checkout_error = SyncError(
|
||||
aggregate_errors=local_half_errors
|
||||
if fetch_success:
|
||||
# We skip checkout if it's network-only or if the project has no
|
||||
# working tree (e.g., a mirror).
|
||||
if opt.network_only or not project.worktree:
|
||||
checkout_success = True
|
||||
else:
|
||||
# This is a normal project that needs a checkout.
|
||||
checkout_start = time.time()
|
||||
stderr_capture = io.StringIO()
|
||||
try:
|
||||
with contextlib.redirect_stderr(stderr_capture):
|
||||
syncbuf = SyncBuffer(
|
||||
project.manifest.manifestProject.config,
|
||||
detach_head=opt.detach_head,
|
||||
)
|
||||
except KeyboardInterrupt:
|
||||
logger.error(
|
||||
"Keyboard interrupt while processing %s", project.name
|
||||
)
|
||||
except GitError as e:
|
||||
checkout_error = e
|
||||
logger.error(
|
||||
"error.GitError: Cannot checkout %s: %s", project.name, e
|
||||
)
|
||||
except Exception as e:
|
||||
checkout_error = e
|
||||
logger.error(
|
||||
"error: Cannot checkout %s: %s: %s",
|
||||
project.name,
|
||||
type(e).__name__,
|
||||
e,
|
||||
)
|
||||
finally:
|
||||
checkout_finish = time.time()
|
||||
checkout_stderr = stderr_capture.getvalue()
|
||||
elif fetch_success:
|
||||
checkout_success = True
|
||||
project.Sync_LocalHalf(
|
||||
syncbuf,
|
||||
force_sync=opt.force_sync,
|
||||
force_checkout=opt.force_checkout,
|
||||
force_rebase=opt.rebase,
|
||||
verbose=opt.verbose,
|
||||
)
|
||||
checkout_success = syncbuf.Finish()
|
||||
if syncbuf.errors:
|
||||
checkout_errors.extend(syncbuf.errors)
|
||||
except KeyboardInterrupt:
|
||||
logger.error(
|
||||
"Keyboard interrupt while processing %s", project.name
|
||||
)
|
||||
except GitError as e:
|
||||
checkout_errors.append(e)
|
||||
logger.error(
|
||||
"error.GitError: Cannot checkout %s: %s",
|
||||
project.name,
|
||||
e,
|
||||
)
|
||||
except Exception as e:
|
||||
checkout_errors.append(e)
|
||||
logger.error(
|
||||
"error: Cannot checkout %s: %s: %s",
|
||||
project.name,
|
||||
type(e).__name__,
|
||||
e,
|
||||
)
|
||||
finally:
|
||||
checkout_finish = time.time()
|
||||
checkout_stderr = stderr_capture.getvalue()
|
||||
|
||||
# Consolidate all captured output.
|
||||
captured_parts = []
|
||||
@@ -2329,8 +2333,8 @@ later is required to fix a server side protocol bug.
|
||||
fetch_success=fetch_success,
|
||||
remote_fetched=remote_fetched,
|
||||
checkout_success=checkout_success,
|
||||
fetch_error=fetch_error,
|
||||
checkout_error=checkout_error,
|
||||
fetch_errors=fetch_errors,
|
||||
checkout_errors=checkout_errors,
|
||||
stderr_text=stderr_text.strip(),
|
||||
fetch_start=fetch_start,
|
||||
fetch_finish=fetch_finish,
|
||||
@@ -2376,7 +2380,7 @@ later is required to fix a server side protocol bug.
|
||||
|
||||
def _ProcessSyncInterleavedResults(
|
||||
self,
|
||||
synced_relpaths: Set[str],
|
||||
finished_relpaths: Set[str],
|
||||
err_event: _threading.Event,
|
||||
errors: List[Exception],
|
||||
opt: optparse.Values,
|
||||
@@ -2392,7 +2396,8 @@ later is required to fix a server side protocol bug.
|
||||
pm.update()
|
||||
project = projects[result.project_index]
|
||||
|
||||
if opt.verbose and result.stderr_text:
|
||||
success = result.fetch_success and result.checkout_success
|
||||
if result.stderr_text and (opt.verbose or not success):
|
||||
pm.display_message(result.stderr_text)
|
||||
|
||||
if result.fetch_start:
|
||||
@@ -2419,19 +2424,19 @@ later is required to fix a server side protocol bug.
|
||||
result.checkout_success,
|
||||
)
|
||||
|
||||
if result.fetch_success and result.checkout_success:
|
||||
synced_relpaths.add(result.relpath)
|
||||
else:
|
||||
finished_relpaths.add(result.relpath)
|
||||
|
||||
if not success:
|
||||
ret = False
|
||||
err_event.set()
|
||||
if result.fetch_error:
|
||||
errors.append(result.fetch_error)
|
||||
if result.fetch_errors:
|
||||
errors.extend(result.fetch_errors)
|
||||
self._interleaved_err_network = True
|
||||
self._interleaved_err_network_results.append(
|
||||
result.relpath
|
||||
)
|
||||
if result.checkout_error:
|
||||
errors.append(result.checkout_error)
|
||||
if result.checkout_errors:
|
||||
errors.extend(result.checkout_errors)
|
||||
self._interleaved_err_checkout = True
|
||||
self._interleaved_err_checkout_results.append(
|
||||
result.relpath
|
||||
@@ -2473,7 +2478,7 @@ later is required to fix a server side protocol bug.
|
||||
self._interleaved_err_checkout_results = []
|
||||
|
||||
err_event = multiprocessing.Event()
|
||||
synced_relpaths = set()
|
||||
finished_relpaths = set()
|
||||
project_list = list(all_projects)
|
||||
pm = Progress(
|
||||
"Syncing",
|
||||
@@ -2488,112 +2493,120 @@ later is required to fix a server side protocol bug.
|
||||
sync_event = _threading.Event()
|
||||
sync_progress_thread = self._CreateSyncProgressThread(pm, sync_event)
|
||||
|
||||
with multiprocessing.Manager() as manager, ssh.ProxyManager(
|
||||
manager
|
||||
) as ssh_proxy:
|
||||
ssh_proxy.sock()
|
||||
with self.ParallelContext():
|
||||
self.get_parallel_context()["ssh_proxy"] = ssh_proxy
|
||||
# TODO(gavinmak): Use multprocessing.Queue instead of dict.
|
||||
self.get_parallel_context()[
|
||||
"sync_dict"
|
||||
] = multiprocessing.Manager().dict()
|
||||
sync_progress_thread.start()
|
||||
try:
|
||||
with multiprocessing.Manager() as manager, ssh.ProxyManager(
|
||||
manager
|
||||
) as ssh_proxy:
|
||||
ssh_proxy.sock()
|
||||
with self.ParallelContext():
|
||||
self.get_parallel_context()["ssh_proxy"] = ssh_proxy
|
||||
# TODO(gavinmak): Use multprocessing.Queue instead of dict.
|
||||
self.get_parallel_context()[
|
||||
"sync_dict"
|
||||
] = multiprocessing.Manager().dict()
|
||||
sync_progress_thread.start()
|
||||
|
||||
try:
|
||||
# Outer loop for dynamic project discovery. This continues
|
||||
# until no unsynced projects remain.
|
||||
while True:
|
||||
projects_to_sync = [
|
||||
p
|
||||
for p in project_list
|
||||
if p.relpath not in synced_relpaths
|
||||
]
|
||||
if not projects_to_sync:
|
||||
break
|
||||
try:
|
||||
# Outer loop for dynamic project discovery. This
|
||||
# continues until no unsynced projects remain.
|
||||
while True:
|
||||
projects_to_sync = [
|
||||
p
|
||||
for p in project_list
|
||||
if p.relpath not in finished_relpaths
|
||||
]
|
||||
if not projects_to_sync:
|
||||
break
|
||||
|
||||
pending_relpaths = {p.relpath for p in projects_to_sync}
|
||||
if previously_pending_relpaths == pending_relpaths:
|
||||
stalled_projects_str = "\n".join(
|
||||
f" - {path}"
|
||||
for path in sorted(list(pending_relpaths))
|
||||
)
|
||||
logger.error(
|
||||
"The following projects failed and could not "
|
||||
"be synced:\n%s",
|
||||
stalled_projects_str,
|
||||
)
|
||||
err_event.set()
|
||||
|
||||
# Include these in the final error report.
|
||||
self._interleaved_err_checkout = True
|
||||
self._interleaved_err_checkout_results.extend(
|
||||
list(pending_relpaths)
|
||||
)
|
||||
break
|
||||
previously_pending_relpaths = pending_relpaths
|
||||
|
||||
self.get_parallel_context()[
|
||||
"projects"
|
||||
] = projects_to_sync
|
||||
project_index_map = {
|
||||
p: i for i, p in enumerate(projects_to_sync)
|
||||
}
|
||||
|
||||
# Inner loop to process projects in a hierarchical
|
||||
# order. This iterates through levels of project
|
||||
# dependencies (e.g. 'foo' then 'foo/bar'). All projects
|
||||
# in one level can be processed in parallel, but we must
|
||||
# wait for a level to complete before starting the next.
|
||||
for level_projects in _SafeCheckoutOrder(
|
||||
projects_to_sync
|
||||
):
|
||||
if not level_projects:
|
||||
continue
|
||||
|
||||
objdir_project_map = collections.defaultdict(list)
|
||||
for p in level_projects:
|
||||
objdir_project_map[p.objdir].append(
|
||||
project_index_map[p]
|
||||
pending_relpaths = {
|
||||
p.relpath for p in projects_to_sync
|
||||
}
|
||||
if previously_pending_relpaths == pending_relpaths:
|
||||
stalled_projects_str = "\n".join(
|
||||
f" - {path}"
|
||||
for path in sorted(list(pending_relpaths))
|
||||
)
|
||||
logger.error(
|
||||
"The following projects failed and could "
|
||||
"not be synced:\n%s",
|
||||
stalled_projects_str,
|
||||
)
|
||||
|
||||
work_items = list(objdir_project_map.values())
|
||||
if not work_items:
|
||||
continue
|
||||
|
||||
jobs = max(1, min(opt.jobs, len(work_items)))
|
||||
callback = functools.partial(
|
||||
self._ProcessSyncInterleavedResults,
|
||||
synced_relpaths,
|
||||
err_event,
|
||||
errors,
|
||||
opt,
|
||||
)
|
||||
if not self.ExecuteInParallel(
|
||||
jobs,
|
||||
functools.partial(self._SyncProjectList, opt),
|
||||
work_items,
|
||||
callback=callback,
|
||||
output=pm,
|
||||
chunksize=1,
|
||||
):
|
||||
err_event.set()
|
||||
break
|
||||
previously_pending_relpaths = pending_relpaths
|
||||
|
||||
if err_event.is_set() and opt.fail_fast:
|
||||
raise SyncFailFastError(aggregate_errors=errors)
|
||||
self.get_parallel_context()[
|
||||
"projects"
|
||||
] = projects_to_sync
|
||||
project_index_map = {
|
||||
p: i for i, p in enumerate(projects_to_sync)
|
||||
}
|
||||
|
||||
self._ReloadManifest(None, manifest)
|
||||
project_list = self.GetProjects(
|
||||
args,
|
||||
missing_ok=True,
|
||||
submodules_ok=opt.fetch_submodules,
|
||||
manifest=manifest,
|
||||
all_manifests=not opt.this_manifest_only,
|
||||
)
|
||||
pm.update_total(len(project_list))
|
||||
finally:
|
||||
sync_event.set()
|
||||
sync_progress_thread.join()
|
||||
# Inner loop to process projects in a hierarchical
|
||||
# order. This iterates through levels of project
|
||||
# dependencies (e.g. 'foo' then 'foo/bar'). All
|
||||
# projects in one level can be processed in
|
||||
# parallel, but we must wait for a level to complete
|
||||
# before starting the next.
|
||||
for level_projects in _SafeCheckoutOrder(
|
||||
projects_to_sync
|
||||
):
|
||||
if not level_projects:
|
||||
continue
|
||||
|
||||
objdir_project_map = collections.defaultdict(
|
||||
list
|
||||
)
|
||||
for p in level_projects:
|
||||
objdir_project_map[p.objdir].append(
|
||||
project_index_map[p]
|
||||
)
|
||||
|
||||
work_items = list(objdir_project_map.values())
|
||||
if not work_items:
|
||||
continue
|
||||
|
||||
jobs = max(1, min(opt.jobs, len(work_items)))
|
||||
callback = functools.partial(
|
||||
self._ProcessSyncInterleavedResults,
|
||||
finished_relpaths,
|
||||
err_event,
|
||||
errors,
|
||||
opt,
|
||||
)
|
||||
if not self.ExecuteInParallel(
|
||||
jobs,
|
||||
functools.partial(
|
||||
self._SyncProjectList, opt
|
||||
),
|
||||
work_items,
|
||||
callback=callback,
|
||||
output=pm,
|
||||
chunksize=1,
|
||||
initializer=self.InitWorker,
|
||||
):
|
||||
err_event.set()
|
||||
|
||||
if err_event.is_set() and opt.fail_fast:
|
||||
raise SyncFailFastError(
|
||||
aggregate_errors=errors
|
||||
)
|
||||
|
||||
self._ReloadManifest(None, manifest)
|
||||
project_list = self.GetProjects(
|
||||
args,
|
||||
missing_ok=True,
|
||||
submodules_ok=opt.fetch_submodules,
|
||||
manifest=manifest,
|
||||
all_manifests=not opt.this_manifest_only,
|
||||
)
|
||||
pm.update_total(len(project_list))
|
||||
finally:
|
||||
sync_event.set()
|
||||
sync_progress_thread.join()
|
||||
finally:
|
||||
self._fetch_times.Save()
|
||||
self._local_sync_state.Save()
|
||||
|
||||
pm.end()
|
||||
|
||||
@@ -2697,17 +2710,19 @@ class _FetchTimes:
|
||||
self._saved = {}
|
||||
|
||||
def Save(self):
|
||||
if self._saved is None:
|
||||
if not self._seen:
|
||||
return
|
||||
|
||||
self._Load()
|
||||
|
||||
for name, t in self._seen.items():
|
||||
# Keep a moving average across the previous/current sync runs.
|
||||
old = self._saved.get(name, t)
|
||||
self._seen[name] = (self._ALPHA * t) + ((1 - self._ALPHA) * old)
|
||||
self._saved[name] = (self._ALPHA * t) + ((1 - self._ALPHA) * old)
|
||||
|
||||
try:
|
||||
with open(self._path, "w") as f:
|
||||
json.dump(self._seen, f, indent=2)
|
||||
json.dump(self._saved, f, indent=2)
|
||||
except (OSError, TypeError):
|
||||
platform_utils.remove(self._path, missing_ok=True)
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2022 The Android Open Source Project
|
||||
# Copyright (C) 2022 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2021 The Android Open Source Project
|
||||
# Copyright (C) 2021 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2019 The Android Open Source Project
|
||||
# Copyright (C) 2019 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2021 The Android Open Source Project
|
||||
# Copyright (C) 2021 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2022 The Android Open Source Project
|
||||
# Copyright (C) 2022 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2019 The Android Open Source Project
|
||||
# Copyright (C) 2019 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -94,7 +94,12 @@ class AllCommands(unittest.TestCase):
|
||||
"""Block redundant dest= arguments."""
|
||||
|
||||
def _check_dest(opt):
|
||||
if opt.dest is None or not opt._long_opts:
|
||||
"""Check the dest= setting."""
|
||||
# If the destination is not set, nothing to check.
|
||||
# If long options are not set, then there's no implicit destination.
|
||||
# If callback is used, then a destination might be needed because
|
||||
# optparse cannot assume a value is always stored.
|
||||
if opt.dest is None or not opt._long_opts or opt.callback:
|
||||
return
|
||||
|
||||
long = opt._long_opts[0]
|
||||
|
||||
@@ -309,6 +309,7 @@ class FakeProject:
|
||||
self.relpath = relpath
|
||||
self.name = name or relpath
|
||||
self.objdir = objdir or relpath
|
||||
self.worktree = relpath
|
||||
|
||||
self.use_git_worktrees = False
|
||||
self.UseAlternates = False
|
||||
@@ -680,6 +681,9 @@ class InterleavedSyncTest(unittest.TestCase):
|
||||
# Mock _GetCurrentBranchOnly for worker tests.
|
||||
mock.patch.object(sync.Sync, "_GetCurrentBranchOnly").start()
|
||||
|
||||
self.cmd._fetch_times = mock.Mock()
|
||||
self.cmd._local_sync_state = mock.Mock()
|
||||
|
||||
def tearDown(self):
|
||||
"""Clean up resources."""
|
||||
shutil.rmtree(self.repodir)
|
||||
@@ -800,6 +804,7 @@ class InterleavedSyncTest(unittest.TestCase):
|
||||
with mock.patch("subcmds.sync.SyncBuffer") as mock_sync_buffer:
|
||||
mock_sync_buf_instance = mock.MagicMock()
|
||||
mock_sync_buf_instance.Finish.return_value = True
|
||||
mock_sync_buf_instance.errors = []
|
||||
mock_sync_buffer.return_value = mock_sync_buf_instance
|
||||
|
||||
result_obj = self.cmd._SyncProjectList(opt, [0])
|
||||
@@ -808,8 +813,8 @@ class InterleavedSyncTest(unittest.TestCase):
|
||||
result = result_obj.results[0]
|
||||
self.assertTrue(result.fetch_success)
|
||||
self.assertTrue(result.checkout_success)
|
||||
self.assertIsNone(result.fetch_error)
|
||||
self.assertIsNone(result.checkout_error)
|
||||
self.assertEqual(result.fetch_errors, [])
|
||||
self.assertEqual(result.checkout_errors, [])
|
||||
project.Sync_NetworkHalf.assert_called_once()
|
||||
project.Sync_LocalHalf.assert_called_once()
|
||||
|
||||
@@ -831,8 +836,27 @@ class InterleavedSyncTest(unittest.TestCase):
|
||||
|
||||
self.assertFalse(result.fetch_success)
|
||||
self.assertFalse(result.checkout_success)
|
||||
self.assertEqual(result.fetch_error, fetch_error)
|
||||
self.assertIsNone(result.checkout_error)
|
||||
self.assertEqual(result.fetch_errors, [fetch_error])
|
||||
self.assertEqual(result.checkout_errors, [])
|
||||
project.Sync_NetworkHalf.assert_called_once()
|
||||
project.Sync_LocalHalf.assert_not_called()
|
||||
|
||||
def test_worker_no_worktree(self):
|
||||
"""Test interleaved sync does not checkout with no worktree."""
|
||||
opt = self._get_opts()
|
||||
project = self.projA
|
||||
project.worktree = None
|
||||
project.Sync_NetworkHalf = mock.Mock(
|
||||
return_value=SyncNetworkHalfResult(error=None, remote_fetched=True)
|
||||
)
|
||||
project.Sync_LocalHalf = mock.Mock()
|
||||
self.mock_context["projects"] = [project]
|
||||
|
||||
result_obj = self.cmd._SyncProjectList(opt, [0])
|
||||
result = result_obj.results[0]
|
||||
|
||||
self.assertTrue(result.fetch_success)
|
||||
self.assertTrue(result.checkout_success)
|
||||
project.Sync_NetworkHalf.assert_called_once()
|
||||
project.Sync_LocalHalf.assert_not_called()
|
||||
|
||||
@@ -850,7 +874,7 @@ class InterleavedSyncTest(unittest.TestCase):
|
||||
|
||||
self.assertFalse(result.fetch_success)
|
||||
self.assertFalse(result.checkout_success)
|
||||
self.assertEqual(result.fetch_error, fetch_error)
|
||||
self.assertEqual(result.fetch_errors, [fetch_error])
|
||||
project.Sync_NetworkHalf.assert_called_once()
|
||||
project.Sync_LocalHalf.assert_not_called()
|
||||
|
||||
@@ -872,8 +896,8 @@ class InterleavedSyncTest(unittest.TestCase):
|
||||
|
||||
self.assertTrue(result.fetch_success)
|
||||
self.assertFalse(result.checkout_success)
|
||||
self.assertIsNone(result.fetch_error)
|
||||
self.assertEqual(result.checkout_error, checkout_error)
|
||||
self.assertEqual(result.fetch_errors, [])
|
||||
self.assertEqual(result.checkout_errors, [checkout_error])
|
||||
project.Sync_NetworkHalf.assert_called_once()
|
||||
project.Sync_LocalHalf.assert_called_once()
|
||||
|
||||
@@ -889,6 +913,7 @@ class InterleavedSyncTest(unittest.TestCase):
|
||||
with mock.patch("subcmds.sync.SyncBuffer") as mock_sync_buffer:
|
||||
mock_sync_buf_instance = mock.MagicMock()
|
||||
mock_sync_buf_instance.Finish.return_value = True
|
||||
mock_sync_buf_instance.errors = []
|
||||
mock_sync_buffer.return_value = mock_sync_buf_instance
|
||||
|
||||
result_obj = self.cmd._SyncProjectList(opt, [0])
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2022 The Android Open Source Project
|
||||
# Copyright (C) 2022 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
Reference in New Issue
Block a user