mirror of
https://gerrit.googlesource.com/git-repo
synced 2026-01-12 09:30:28 +00:00
Compare commits
70 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
871e4c7ed1 | ||
|
|
5b0b5513d6 | ||
|
|
b5991d7128 | ||
|
|
7f87c54043 | ||
|
|
50c6226075 | ||
|
|
1e4b2887a7 | ||
|
|
31b4b19387 | ||
|
|
2b6de52a36 | ||
|
|
91ec998598 | ||
|
|
08964a1658 | ||
|
|
3073a90046 | ||
|
|
75773b8b9d | ||
|
|
412367bfaf | ||
|
|
47c24b5c40 | ||
|
|
be33106ffc | ||
|
|
5998c0b506 | ||
|
|
877ef91be2 | ||
|
|
4ab2284a94 | ||
|
|
1afe96a7e9 | ||
|
|
2719a8e203 | ||
|
|
e4872ac8ba | ||
|
|
4623264809 | ||
|
|
67383bdba9 | ||
|
|
d30414bb53 | ||
|
|
80d1a5ad3e | ||
|
|
c615c964fb | ||
|
|
5ed12ec81d | ||
|
|
58a59fdfbc | ||
|
|
38d2fe11b9 | ||
|
|
854fe440f2 | ||
|
|
d534a5537f | ||
|
|
a64149a7a7 | ||
|
|
3e6acf2778 | ||
|
|
a6e1a59ac1 | ||
|
|
380bf9546e | ||
|
|
d9cc0a1526 | ||
|
|
8c3585f367 | ||
|
|
239fad7146 | ||
|
|
d3eec0acdd | ||
|
|
7f7d70efe4 | ||
|
|
720bd1e96b | ||
|
|
25858c8b16 | ||
|
|
52bab0ba27 | ||
|
|
2e6d0881d9 | ||
|
|
74edacd8e5 | ||
|
|
5d95ba8d85 | ||
|
|
82d500eb7a | ||
|
|
21269c3eed | ||
|
|
99b5a17f2c | ||
|
|
df3c4017f9 | ||
|
|
f7a3f99dc9 | ||
|
|
6b8e9fc8db | ||
|
|
7b6ffed4ae | ||
|
|
b4b323a8bd | ||
|
|
f91f4462e6 | ||
|
|
85352825ff | ||
|
|
b262d0e461 | ||
|
|
044e52e236 | ||
|
|
0cb88a8d79 | ||
|
|
08815ad3eb | ||
|
|
3c8bae27ec | ||
|
|
06338abe79 | ||
|
|
8d37f61471 | ||
|
|
1acbc14c34 | ||
|
|
c448ba9cc7 | ||
|
|
21cbcc54e9 | ||
|
|
0f200bb3a1 | ||
|
|
c8da28c3ed | ||
|
|
c061593a12 | ||
|
|
a94457d1ce |
1
.flake8
1
.flake8
@@ -12,5 +12,6 @@ extend-ignore =
|
||||
# E731: do not assign a lambda expression, use a def
|
||||
E731,
|
||||
exclude =
|
||||
.venv,
|
||||
venv,
|
||||
.tox,
|
||||
|
||||
2
.github/workflows/close-pull-request.yml
vendored
2
.github/workflows/close-pull-request.yml
vendored
@@ -18,5 +18,5 @@ jobs:
|
||||
Thanks for your contribution!
|
||||
Unfortunately, we don't use GitHub pull requests to manage code
|
||||
contributions to this repository.
|
||||
Instead, please see [README.md](../blob/HEAD/SUBMITTING_PATCHES.md)
|
||||
Instead, please see [README.md](../blob/HEAD/CONTRIBUTING.md)
|
||||
which provides full instructions on how to get involved.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2023 The Android Open Source Project
|
||||
# Copyright (C) 2023 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -14,7 +14,7 @@ that you can put anywhere in your path.
|
||||
* Docs: <https://source.android.com/source/using-repo.html>
|
||||
* [repo Manifest Format](./docs/manifest-format.md)
|
||||
* [repo Hooks](./docs/repo-hooks.md)
|
||||
* [Submitting patches](./SUBMITTING_PATCHES.md)
|
||||
* [Contributing](./CONTRIBUTING.md)
|
||||
* Running Repo in [Microsoft Windows](./docs/windows.md)
|
||||
* GitHub mirror: <https://github.com/GerritCodeReview/git-repo>
|
||||
* Postsubmit tests: <https://github.com/GerritCodeReview/git-repo/actions>
|
||||
|
||||
@@ -399,7 +399,7 @@ class Command:
|
||||
result = []
|
||||
|
||||
if not groups:
|
||||
groups = manifest.GetGroupsStr()
|
||||
groups = manifest.GetManifestGroupsStr()
|
||||
groups = [x for x in re.split(r"[,\s]+", groups) if x]
|
||||
|
||||
if not args:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2021 The Android Open Source Project
|
||||
# Copyright (C) 2021 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -59,7 +59,7 @@ following DTD:
|
||||
<!ATTLIST manifest-server url CDATA #REQUIRED>
|
||||
|
||||
<!ELEMENT submanifest EMPTY>
|
||||
<!ATTLIST submanifest name ID #REQUIRED>
|
||||
<!ATTLIST submanifest name ID #REQUIRED>
|
||||
<!ATTLIST submanifest remote IDREF #IMPLIED>
|
||||
<!ATTLIST submanifest project CDATA #IMPLIED>
|
||||
<!ATTLIST submanifest manifest-name CDATA #IMPLIED>
|
||||
@@ -81,9 +81,9 @@ following DTD:
|
||||
<!ATTLIST project sync-c CDATA #IMPLIED>
|
||||
<!ATTLIST project sync-s CDATA #IMPLIED>
|
||||
<!ATTLIST project sync-tags CDATA #IMPLIED>
|
||||
<!ATTLIST project upstream CDATA #IMPLIED>
|
||||
<!ATTLIST project upstream CDATA #IMPLIED>
|
||||
<!ATTLIST project clone-depth CDATA #IMPLIED>
|
||||
<!ATTLIST project force-path CDATA #IMPLIED>
|
||||
<!ATTLIST project force-path CDATA #IMPLIED>
|
||||
|
||||
<!ELEMENT annotation EMPTY>
|
||||
<!ATTLIST annotation name CDATA #REQUIRED>
|
||||
@@ -95,19 +95,21 @@ following DTD:
|
||||
<!ATTLIST copyfile dest CDATA #REQUIRED>
|
||||
|
||||
<!ELEMENT linkfile EMPTY>
|
||||
<!ATTLIST linkfile src CDATA #REQUIRED>
|
||||
<!ATTLIST linkfile src CDATA #REQUIRED>
|
||||
<!ATTLIST linkfile dest CDATA #REQUIRED>
|
||||
|
||||
<!ELEMENT extend-project EMPTY>
|
||||
<!ATTLIST extend-project name CDATA #REQUIRED>
|
||||
<!ATTLIST extend-project path CDATA #IMPLIED>
|
||||
<!ATTLIST extend-project dest-path CDATA #IMPLIED>
|
||||
<!ATTLIST extend-project groups CDATA #IMPLIED>
|
||||
<!ATTLIST extend-project revision CDATA #IMPLIED>
|
||||
<!ATTLIST extend-project remote CDATA #IMPLIED>
|
||||
<!ELEMENT extend-project (annotation*,
|
||||
copyfile*,
|
||||
linkfile*)>
|
||||
<!ATTLIST extend-project name CDATA #REQUIRED>
|
||||
<!ATTLIST extend-project path CDATA #IMPLIED>
|
||||
<!ATTLIST extend-project dest-path CDATA #IMPLIED>
|
||||
<!ATTLIST extend-project groups CDATA #IMPLIED>
|
||||
<!ATTLIST extend-project revision CDATA #IMPLIED>
|
||||
<!ATTLIST extend-project remote CDATA #IMPLIED>
|
||||
<!ATTLIST extend-project dest-branch CDATA #IMPLIED>
|
||||
<!ATTLIST extend-project upstream CDATA #IMPLIED>
|
||||
<!ATTLIST extend-project base-rev CDATA #IMPLIED>
|
||||
<!ATTLIST extend-project upstream CDATA #IMPLIED>
|
||||
<!ATTLIST extend-project base-rev CDATA #IMPLIED>
|
||||
|
||||
<!ELEMENT remove-project EMPTY>
|
||||
<!ATTLIST remove-project name CDATA #IMPLIED>
|
||||
@@ -116,7 +118,7 @@ following DTD:
|
||||
<!ATTLIST remove-project base-rev CDATA #IMPLIED>
|
||||
|
||||
<!ELEMENT repo-hooks EMPTY>
|
||||
<!ATTLIST repo-hooks in-project CDATA #REQUIRED>
|
||||
<!ATTLIST repo-hooks in-project CDATA #REQUIRED>
|
||||
<!ATTLIST repo-hooks enabled-list CDATA #REQUIRED>
|
||||
|
||||
<!ELEMENT superproject EMPTY>
|
||||
@@ -125,7 +127,7 @@ following DTD:
|
||||
<!ATTLIST superproject revision CDATA #IMPLIED>
|
||||
|
||||
<!ELEMENT contactinfo EMPTY>
|
||||
<!ATTLIST contactinfo bugurl CDATA #REQUIRED>
|
||||
<!ATTLIST contactinfo bugurl CDATA #REQUIRED>
|
||||
|
||||
<!ELEMENT include EMPTY>
|
||||
<!ATTLIST include name CDATA #REQUIRED>
|
||||
@@ -285,7 +287,7 @@ should be placed. If not supplied, `revision` is used.
|
||||
|
||||
`path` may not be an absolute path or use "." or ".." path components.
|
||||
|
||||
Attribute `groups`: List of additional groups to which all projects
|
||||
Attribute `groups`: Set of additional groups to which all projects
|
||||
in the included submanifest belong. This appends and recurses, meaning
|
||||
all projects in submanifests carry all parent submanifest groups.
|
||||
Same syntax as the corresponding element of `project`.
|
||||
@@ -353,7 +355,7 @@ When using `repo upload`, changes will be submitted for code
|
||||
review on this branch. If unspecified both here and in the
|
||||
default element, `revision` is used instead.
|
||||
|
||||
Attribute `groups`: List of groups to which this project belongs,
|
||||
Attribute `groups`: Set of groups to which this project belongs,
|
||||
whitespace or comma separated. All projects belong to the group
|
||||
"all", and each project automatically belongs to a group of
|
||||
its name:`name` and path:`path`. E.g. for
|
||||
@@ -401,7 +403,7 @@ of the repo client where the Git working directory for this project
|
||||
should be placed. This is used to move a project in the checkout by
|
||||
overriding the existing `path` setting.
|
||||
|
||||
Attribute `groups`: List of additional groups to which this project
|
||||
Attribute `groups`: Set of additional groups to which this project
|
||||
belongs. Same syntax as the corresponding element of `project`.
|
||||
|
||||
Attribute `revision`: If specified, overrides the revision of the original
|
||||
@@ -427,19 +429,20 @@ Same syntax as the corresponding element of `project`.
|
||||
### Element annotation
|
||||
|
||||
Zero or more annotation elements may be specified as children of a
|
||||
project or remote element. Each element describes a name-value pair.
|
||||
For projects, this name-value pair will be exported into each project's
|
||||
environment during a 'forall' command, prefixed with `REPO__`. In addition,
|
||||
there is an optional attribute "keep" which accepts the case insensitive values
|
||||
"true" (default) or "false". This attribute determines whether or not the
|
||||
project element, an extend-project element, or a remote element. Each
|
||||
element describes a name-value pair. For projects, this name-value pair
|
||||
will be exported into each project's environment during a 'forall'
|
||||
command, prefixed with `REPO__`. In addition, there is an optional
|
||||
attribute "keep" which accepts the case insensitive values "true"
|
||||
(default) or "false". This attribute determines whether or not the
|
||||
annotation will be kept when exported with the manifest subcommand.
|
||||
|
||||
### Element copyfile
|
||||
|
||||
Zero or more copyfile elements may be specified as children of a
|
||||
project element. Each element describes a src-dest pair of files;
|
||||
the "src" file will be copied to the "dest" place during `repo sync`
|
||||
command.
|
||||
project element, or an extend-project element. Each element describes a
|
||||
src-dest pair of files; the "src" file will be copied to the "dest"
|
||||
place during `repo sync` command.
|
||||
|
||||
"src" is project relative, "dest" is relative to the top of the tree.
|
||||
Copying from paths outside of the project or to paths outside of the repo
|
||||
@@ -450,10 +453,14 @@ Intermediate paths must not be symlinks either.
|
||||
|
||||
Parent directories of "dest" will be automatically created if missing.
|
||||
|
||||
The files are copied in the order they are specified in the manifests.
|
||||
If multiple elements specify the same source and destination, they will
|
||||
only be applied as one, based on the first occurence. Files are copied
|
||||
before any links specified via linkfile elements are created.
|
||||
|
||||
### Element linkfile
|
||||
|
||||
It's just like copyfile and runs at the same time as copyfile but
|
||||
instead of copying it creates a symlink.
|
||||
It's just like copyfile, but instead of copying it creates a symlink.
|
||||
|
||||
The symlink is created at "dest" (relative to the top of the tree) and
|
||||
points to the path specified by "src" which is a path in the project.
|
||||
@@ -463,6 +470,11 @@ Parent directories of "dest" will be automatically created if missing.
|
||||
The symlink target may be a file or directory, but it may not point outside
|
||||
of the repo client.
|
||||
|
||||
The links are created in the order they are specified in the manifests.
|
||||
If multiple elements specify the same source and destination, they will
|
||||
only be applied as one, based on the first occurence. Links are created
|
||||
after any files specified via copyfile elements are copied.
|
||||
|
||||
### Element remove-project
|
||||
|
||||
Deletes a project from the internal manifest table, possibly
|
||||
@@ -560,13 +572,16 @@ the manifest repository's root.
|
||||
"name" may not be an absolute path or use "." or ".." path components.
|
||||
These restrictions are not enforced for [Local Manifests].
|
||||
|
||||
Attribute `groups`: List of additional groups to which all projects
|
||||
Attribute `groups`: Set of additional groups to which all projects
|
||||
in the included manifest belong. This appends and recurses, meaning
|
||||
all projects in included manifests carry all parent include groups.
|
||||
This also applies to all extend-project elements in the included manifests.
|
||||
Same syntax as the corresponding element of `project`.
|
||||
|
||||
Attribute `revision`: Name of a Git branch (e.g. `main` or `refs/heads/main`)
|
||||
default to which all projects in the included manifest belong.
|
||||
default to which all projects in the included manifest belong. This recurses,
|
||||
meaning it will apply to all projects in all manifests included as a result of
|
||||
this element.
|
||||
|
||||
## Local Manifests {#local-manifests}
|
||||
|
||||
|
||||
@@ -133,3 +133,43 @@ def main(project_list, worktree_list=None, **kwargs):
|
||||
kwargs: Leave this here for forward-compatibility.
|
||||
"""
|
||||
```
|
||||
|
||||
### post-sync
|
||||
|
||||
This hook runs when `repo sync` completes without errors.
|
||||
|
||||
Note: This includes cases where no actual checkout may occur. The hook will still run.
|
||||
For example:
|
||||
- `repo sync -n` performs network fetches only and skips the checkout phase.
|
||||
- `repo sync <project>` only updates the specified project(s).
|
||||
- Partial failures may still result in a successful exit.
|
||||
|
||||
This hook is useful for post-processing tasks such as setting up git hooks,
|
||||
bootstrapping configuration files, or running project initialization logic.
|
||||
|
||||
The hook is defined using the existing `<repo-hooks>` manifest block and is
|
||||
optional. If the hook script fails or is missing, `repo sync` will still
|
||||
complete successfully, and the error will be printed as a warning.
|
||||
|
||||
Example:
|
||||
|
||||
```xml
|
||||
<project name="myorg/dev-tools" path="tools" revision="main" />
|
||||
<repo-hooks in-project="myorg/dev-tools" enabled-list="post-sync">
|
||||
<hook name="post-sync" />
|
||||
</repo-hooks>
|
||||
```
|
||||
|
||||
The `post-sync.py` file should be defined like:
|
||||
|
||||
```py
|
||||
def main(repo_topdir=None, **kwargs):
|
||||
"""Main function invoked directly by repo.
|
||||
|
||||
We must use the name "main" as that is what repo requires.
|
||||
|
||||
Args:
|
||||
repo_topdir: The absolute path to the top-level directory of the repo workspace.
|
||||
kwargs: Leave this here for forward-compatibility.
|
||||
"""
|
||||
```
|
||||
|
||||
1
git_ssh
1
git_ssh
@@ -1,5 +1,4 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Copyright (C) 2009 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
||||
@@ -28,6 +28,7 @@ import os
|
||||
import sys
|
||||
import time
|
||||
from typing import NamedTuple
|
||||
import urllib.parse
|
||||
|
||||
from git_command import git_require
|
||||
from git_command import GitCommand
|
||||
@@ -128,6 +129,30 @@ class Superproject:
|
||||
"""Set the _print_messages attribute."""
|
||||
self._print_messages = value
|
||||
|
||||
@property
|
||||
def commit_id(self):
|
||||
"""Returns the commit ID of the superproject checkout."""
|
||||
cmd = ["rev-parse", self.revision]
|
||||
p = GitCommand(
|
||||
None, # project
|
||||
cmd,
|
||||
gitdir=self._work_git,
|
||||
bare=True,
|
||||
capture_stdout=True,
|
||||
capture_stderr=True,
|
||||
)
|
||||
retval = p.Wait()
|
||||
if retval != 0:
|
||||
self._LogWarning(
|
||||
"git rev-parse call failed, command: git {}, "
|
||||
"return code: {}, stderr: {}",
|
||||
cmd,
|
||||
retval,
|
||||
p.stderr,
|
||||
)
|
||||
return None
|
||||
return p.stdout
|
||||
|
||||
@property
|
||||
def project_commit_ids(self):
|
||||
"""Returns a dictionary of projects and their commit ids."""
|
||||
@@ -140,12 +165,33 @@ class Superproject:
|
||||
self._manifest_path if os.path.exists(self._manifest_path) else None
|
||||
)
|
||||
|
||||
@property
|
||||
def repo_id(self):
|
||||
"""Returns the repo ID for the superproject.
|
||||
|
||||
For example, if the superproject points to:
|
||||
https://android-review.googlesource.com/platform/superproject/
|
||||
Then the repo_id would be:
|
||||
android/platform/superproject
|
||||
"""
|
||||
review_url = self.remote.review
|
||||
if review_url:
|
||||
parsed_url = urllib.parse.urlparse(review_url)
|
||||
netloc = parsed_url.netloc
|
||||
if netloc:
|
||||
parts = netloc.split("-review", 1)
|
||||
host = parts[0]
|
||||
rev = GitRefs(self._work_git).get("HEAD")
|
||||
return f"{host}/{self.name}@{rev}"
|
||||
return None
|
||||
|
||||
def _LogMessage(self, fmt, *inputs):
|
||||
"""Logs message to stderr and _git_event_log."""
|
||||
message = f"{self._LogMessagePrefix()} {fmt.format(*inputs)}"
|
||||
if self._print_messages:
|
||||
print(message, file=sys.stderr)
|
||||
self._git_event_log.ErrorEvent(message, fmt)
|
||||
if self._git_event_log:
|
||||
self._git_event_log.ErrorEvent(message, fmt)
|
||||
|
||||
def _LogMessagePrefix(self):
|
||||
"""Returns the prefix string to be logged in each log message"""
|
||||
@@ -258,7 +304,7 @@ class Superproject:
|
||||
Works only in git repositories.
|
||||
|
||||
Returns:
|
||||
data: data returned from 'git ls-tree ...' instead of None.
|
||||
data: data returned from 'git ls-tree ...'. None on error.
|
||||
"""
|
||||
if not os.path.exists(self._work_git):
|
||||
self._LogWarning(
|
||||
@@ -288,6 +334,7 @@ class Superproject:
|
||||
retval,
|
||||
p.stderr,
|
||||
)
|
||||
return None
|
||||
return data
|
||||
|
||||
def Sync(self, git_event_log):
|
||||
@@ -375,7 +422,8 @@ class Superproject:
|
||||
)
|
||||
return None
|
||||
manifest_str = self._manifest.ToXml(
|
||||
groups=self._manifest.GetGroupsStr(), omit_local=True
|
||||
filter_groups=self._manifest.GetManifestGroupsStr(),
|
||||
omit_local=True,
|
||||
).toxml()
|
||||
manifest_path = self._manifest_path
|
||||
try:
|
||||
|
||||
@@ -1,3 +1,19 @@
|
||||
# Copyright (C) 2020 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Event logging in the git trace2 EVENT format."""
|
||||
|
||||
from git_command import GetEventTargetPath
|
||||
from git_command import RepoSourceVersion
|
||||
from git_trace2_event_log_base import BaseEventLog
|
||||
|
||||
31
hooks.py
31
hooks.py
@@ -22,6 +22,13 @@ from error import HookError
|
||||
from git_refs import HEAD
|
||||
|
||||
|
||||
# The API we've documented to hook authors. Keep in sync with repo-hooks.md.
|
||||
_API_ARGS = {
|
||||
"pre-upload": {"project_list", "worktree_list"},
|
||||
"post-sync": {"repo_topdir"},
|
||||
}
|
||||
|
||||
|
||||
class RepoHook:
|
||||
"""A RepoHook contains information about a script to run as a hook.
|
||||
|
||||
@@ -56,6 +63,7 @@ class RepoHook:
|
||||
hooks_project,
|
||||
repo_topdir,
|
||||
manifest_url,
|
||||
bug_url=None,
|
||||
bypass_hooks=False,
|
||||
allow_all_hooks=False,
|
||||
ignore_hooks=False,
|
||||
@@ -75,6 +83,7 @@ class RepoHook:
|
||||
run with CWD as this directory.
|
||||
If you have a manifest, this is manifest.topdir.
|
||||
manifest_url: The URL to the manifest git repo.
|
||||
bug_url: The URL to report issues.
|
||||
bypass_hooks: If True, then 'Do not run the hook'.
|
||||
allow_all_hooks: If True, then 'Run the hook without prompting'.
|
||||
ignore_hooks: If True, then 'Do not abort action if hooks fail'.
|
||||
@@ -85,18 +94,18 @@ class RepoHook:
|
||||
self._hooks_project = hooks_project
|
||||
self._repo_topdir = repo_topdir
|
||||
self._manifest_url = manifest_url
|
||||
self._bug_url = bug_url
|
||||
self._bypass_hooks = bypass_hooks
|
||||
self._allow_all_hooks = allow_all_hooks
|
||||
self._ignore_hooks = ignore_hooks
|
||||
self._abort_if_user_denies = abort_if_user_denies
|
||||
|
||||
# Store the full path to the script for convenience.
|
||||
if self._hooks_project:
|
||||
self._script_fullpath = None
|
||||
if self._hooks_project and self._hooks_project.worktree:
|
||||
self._script_fullpath = os.path.join(
|
||||
self._hooks_project.worktree, self._hook_type + ".py"
|
||||
)
|
||||
else:
|
||||
self._script_fullpath = None
|
||||
|
||||
def _GetHash(self):
|
||||
"""Return a hash of the contents of the hooks directory.
|
||||
@@ -414,11 +423,26 @@ class RepoHook:
|
||||
ignore the result through the option combinations as listed in
|
||||
AddHookOptionGroup().
|
||||
"""
|
||||
# Make sure our own callers use the documented API.
|
||||
exp_kwargs = _API_ARGS.get(self._hook_type, set())
|
||||
got_kwargs = set(kwargs.keys())
|
||||
if exp_kwargs != got_kwargs:
|
||||
print(
|
||||
"repo internal error: "
|
||||
f"hook '{self._hook_type}' called incorrectly\n"
|
||||
f" got: {sorted(got_kwargs)}\n"
|
||||
f" expected: {sorted(exp_kwargs)}\n"
|
||||
f"Please file a bug: {self._bug_url}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
return False
|
||||
|
||||
# Do not do anything in case bypass_hooks is set, or
|
||||
# no-op if there is no hooks project or if hook is disabled.
|
||||
if (
|
||||
self._bypass_hooks
|
||||
or not self._hooks_project
|
||||
or not self._script_fullpath
|
||||
or self._hook_type not in self._hooks_project.enabled_repo_hooks
|
||||
):
|
||||
return True
|
||||
@@ -472,6 +496,7 @@ class RepoHook:
|
||||
"manifest_url": manifest.manifestProject.GetRemote(
|
||||
"origin"
|
||||
).url,
|
||||
"bug_url": manifest.contactinfo.bugurl,
|
||||
}
|
||||
)
|
||||
return cls(*args, **kwargs)
|
||||
|
||||
1
main.py
1
main.py
@@ -1,5 +1,4 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# Copyright (C) 2008 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||
.TH REPO "1" "December 2024" "repo gc" "Repo Manual"
|
||||
.TH REPO "1" "April 2025" "repo gc" "Repo Manual"
|
||||
.SH NAME
|
||||
repo \- repo gc - manual page for repo gc
|
||||
.SH SYNOPSIS
|
||||
@@ -8,7 +8,7 @@ repo \- repo gc - manual page for repo gc
|
||||
.SH DESCRIPTION
|
||||
Summary
|
||||
.PP
|
||||
Cleaning up internal repo state.
|
||||
Cleaning up internal repo and Git state.
|
||||
.SH OPTIONS
|
||||
.TP
|
||||
\fB\-h\fR, \fB\-\-help\fR
|
||||
@@ -19,6 +19,10 @@ do everything except actually delete
|
||||
.TP
|
||||
\fB\-y\fR, \fB\-\-yes\fR
|
||||
answer yes to all safe prompts
|
||||
.TP
|
||||
\fB\-\-repack\fR
|
||||
repack all projects that use partial clone with
|
||||
filter=blob:none
|
||||
.SS Logging options:
|
||||
.TP
|
||||
\fB\-v\fR, \fB\-\-verbose\fR
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||
.TH REPO "1" "December 2024" "repo manifest" "Repo Manual"
|
||||
.TH REPO "1" "December 2025" "repo manifest" "Repo Manual"
|
||||
.SH NAME
|
||||
repo \- repo manifest - manual page for repo manifest
|
||||
.SH SYNOPSIS
|
||||
@@ -30,8 +30,8 @@ if in \fB\-r\fR mode, do not write the dest\-branch field
|
||||
(only of use if the branch names for a sha1 manifest
|
||||
are sensitive)
|
||||
.TP
|
||||
\fB\-\-json\fR
|
||||
output manifest in JSON format (experimental)
|
||||
\fB\-\-format\fR=\fI\,FORMAT\/\fR
|
||||
output format: xml, json (default: xml)
|
||||
.TP
|
||||
\fB\-\-pretty\fR
|
||||
format output for humans to read
|
||||
@@ -78,6 +78,10 @@ set to the ref we were on when the manifest was generated. The 'dest\-branch'
|
||||
attribute is set to indicate the remote ref to push changes to via 'repo
|
||||
upload'.
|
||||
.PP
|
||||
Multiple output formats are supported via \fB\-\-format\fR. The default output is XML,
|
||||
and formats are generally "condensed". Use \fB\-\-pretty\fR for more human\-readable
|
||||
variations.
|
||||
.PP
|
||||
repo Manifest Format
|
||||
.PP
|
||||
A repo manifest describes the structure of a repo client; that is the
|
||||
@@ -135,7 +139,7 @@ include*)>
|
||||
<!ATTLIST manifest\-server url CDATA #REQUIRED>
|
||||
.IP
|
||||
<!ELEMENT submanifest EMPTY>
|
||||
<!ATTLIST submanifest name ID #REQUIRED>
|
||||
<!ATTLIST submanifest name ID #REQUIRED>
|
||||
<!ATTLIST submanifest remote IDREF #IMPLIED>
|
||||
<!ATTLIST submanifest project CDATA #IMPLIED>
|
||||
<!ATTLIST submanifest manifest\-name CDATA #IMPLIED>
|
||||
@@ -166,9 +170,9 @@ CDATA #IMPLIED>
|
||||
<!ATTLIST project sync\-c CDATA #IMPLIED>
|
||||
<!ATTLIST project sync\-s CDATA #IMPLIED>
|
||||
<!ATTLIST project sync\-tags CDATA #IMPLIED>
|
||||
<!ATTLIST project upstream CDATA #IMPLIED>
|
||||
<!ATTLIST project upstream CDATA #IMPLIED>
|
||||
<!ATTLIST project clone\-depth CDATA #IMPLIED>
|
||||
<!ATTLIST project force\-path CDATA #IMPLIED>
|
||||
<!ATTLIST project force\-path CDATA #IMPLIED>
|
||||
.IP
|
||||
<!ELEMENT annotation EMPTY>
|
||||
<!ATTLIST annotation name CDATA #REQUIRED>
|
||||
@@ -180,19 +184,34 @@ CDATA #IMPLIED>
|
||||
<!ATTLIST copyfile dest CDATA #REQUIRED>
|
||||
.IP
|
||||
<!ELEMENT linkfile EMPTY>
|
||||
<!ATTLIST linkfile src CDATA #REQUIRED>
|
||||
<!ATTLIST linkfile src CDATA #REQUIRED>
|
||||
<!ATTLIST linkfile dest CDATA #REQUIRED>
|
||||
.TP
|
||||
<!ELEMENT extend\-project (annotation*,
|
||||
copyfile*,
|
||||
linkfile*)>
|
||||
.TP
|
||||
<!ATTLIST extend\-project name
|
||||
CDATA #REQUIRED>
|
||||
.TP
|
||||
<!ATTLIST extend\-project path
|
||||
CDATA #IMPLIED>
|
||||
.TP
|
||||
<!ATTLIST extend\-project dest\-path
|
||||
CDATA #IMPLIED>
|
||||
.TP
|
||||
<!ATTLIST extend\-project groups
|
||||
CDATA #IMPLIED>
|
||||
.TP
|
||||
<!ATTLIST extend\-project revision
|
||||
CDATA #IMPLIED>
|
||||
.TP
|
||||
<!ATTLIST extend\-project remote
|
||||
CDATA #IMPLIED>
|
||||
.IP
|
||||
<!ELEMENT extend\-project EMPTY>
|
||||
<!ATTLIST extend\-project name CDATA #REQUIRED>
|
||||
<!ATTLIST extend\-project path CDATA #IMPLIED>
|
||||
<!ATTLIST extend\-project dest\-path CDATA #IMPLIED>
|
||||
<!ATTLIST extend\-project groups CDATA #IMPLIED>
|
||||
<!ATTLIST extend\-project revision CDATA #IMPLIED>
|
||||
<!ATTLIST extend\-project remote CDATA #IMPLIED>
|
||||
<!ATTLIST extend\-project dest\-branch CDATA #IMPLIED>
|
||||
<!ATTLIST extend\-project upstream CDATA #IMPLIED>
|
||||
<!ATTLIST extend\-project base\-rev CDATA #IMPLIED>
|
||||
<!ATTLIST extend\-project upstream CDATA #IMPLIED>
|
||||
<!ATTLIST extend\-project base\-rev CDATA #IMPLIED>
|
||||
.IP
|
||||
<!ELEMENT remove\-project EMPTY>
|
||||
<!ATTLIST remove\-project name CDATA #IMPLIED>
|
||||
@@ -201,7 +220,7 @@ CDATA #IMPLIED>
|
||||
<!ATTLIST remove\-project base\-rev CDATA #IMPLIED>
|
||||
.IP
|
||||
<!ELEMENT repo\-hooks EMPTY>
|
||||
<!ATTLIST repo\-hooks in\-project CDATA #REQUIRED>
|
||||
<!ATTLIST repo\-hooks in\-project CDATA #REQUIRED>
|
||||
<!ATTLIST repo\-hooks enabled\-list CDATA #REQUIRED>
|
||||
.IP
|
||||
<!ELEMENT superproject EMPTY>
|
||||
@@ -210,7 +229,7 @@ CDATA #IMPLIED>
|
||||
<!ATTLIST superproject revision CDATA #IMPLIED>
|
||||
.IP
|
||||
<!ELEMENT contactinfo EMPTY>
|
||||
<!ATTLIST contactinfo bugurl CDATA #REQUIRED>
|
||||
<!ATTLIST contactinfo bugurl CDATA #REQUIRED>
|
||||
.IP
|
||||
<!ELEMENT include EMPTY>
|
||||
<!ATTLIST include name CDATA #REQUIRED>
|
||||
@@ -306,25 +325,7 @@ Element manifest\-server
|
||||
At most one manifest\-server may be specified. The url attribute is used to
|
||||
specify the URL of a manifest server, which is an XML RPC service.
|
||||
.PP
|
||||
The manifest server should implement the following RPC methods:
|
||||
.IP
|
||||
GetApprovedManifest(branch, target)
|
||||
.PP
|
||||
Return a manifest in which each project is pegged to a known good revision for
|
||||
the current branch and target. This is used by repo sync when the \fB\-\-smart\-sync\fR
|
||||
option is given.
|
||||
.PP
|
||||
The target to use is defined by environment variables TARGET_PRODUCT and
|
||||
TARGET_BUILD_VARIANT. These variables are used to create a string of the form
|
||||
$TARGET_PRODUCT\-$TARGET_BUILD_VARIANT, e.g. passion\-userdebug. If one of those
|
||||
variables or both are not present, the program will call GetApprovedManifest
|
||||
without the target parameter and the manifest server should choose a reasonable
|
||||
default target.
|
||||
.IP
|
||||
GetManifest(tag)
|
||||
.PP
|
||||
Return a manifest in which each project is pegged to the revision at the
|
||||
specified tag. This is used by repo sync when the \fB\-\-smart\-tag\fR option is given.
|
||||
See the [smart sync documentation](./smart\-sync.md) for more details.
|
||||
.PP
|
||||
Element submanifest
|
||||
.PP
|
||||
@@ -376,7 +377,7 @@ supplied, `revision` is used.
|
||||
.PP
|
||||
`path` may not be an absolute path or use "." or ".." path components.
|
||||
.PP
|
||||
Attribute `groups`: List of additional groups to which all projects in the
|
||||
Attribute `groups`: Set of additional groups to which all projects in the
|
||||
included submanifest belong. This appends and recurses, meaning all projects in
|
||||
submanifests carry all parent submanifest groups. Same syntax as the
|
||||
corresponding element of `project`.
|
||||
@@ -438,7 +439,7 @@ Attribute `dest\-branch`: Name of a Git branch (e.g. `main`). When using `repo
|
||||
upload`, changes will be submitted for code review on this branch. If
|
||||
unspecified both here and in the default element, `revision` is used instead.
|
||||
.PP
|
||||
Attribute `groups`: List of groups to which this project belongs, whitespace or
|
||||
Attribute `groups`: Set of groups to which this project belongs, whitespace or
|
||||
comma separated. All projects belong to the group "all", and each project
|
||||
automatically belongs to a group of its name:`name` and path:`path`. E.g. for
|
||||
`<project name="monkeys" path="barrel\-of"/>`, that project definition is
|
||||
@@ -482,8 +483,8 @@ repo client where the Git working directory for this project should be placed.
|
||||
This is used to move a project in the checkout by overriding the existing `path`
|
||||
setting.
|
||||
.PP
|
||||
Attribute `groups`: List of additional groups to which this project belongs.
|
||||
Same syntax as the corresponding element of `project`.
|
||||
Attribute `groups`: Set of additional groups to which this project belongs. Same
|
||||
syntax as the corresponding element of `project`.
|
||||
.PP
|
||||
Attribute `revision`: If specified, overrides the revision of the original
|
||||
project. Same syntax as the corresponding element of `project`.
|
||||
@@ -507,19 +508,21 @@ element of `project`.
|
||||
.PP
|
||||
Element annotation
|
||||
.PP
|
||||
Zero or more annotation elements may be specified as children of a project or
|
||||
remote element. Each element describes a name\-value pair. For projects, this
|
||||
name\-value pair will be exported into each project's environment during a
|
||||
\&'forall' command, prefixed with `REPO__`. In addition, there is an optional
|
||||
attribute "keep" which accepts the case insensitive values "true" (default) or
|
||||
"false". This attribute determines whether or not the annotation will be kept
|
||||
when exported with the manifest subcommand.
|
||||
Zero or more annotation elements may be specified as children of a project
|
||||
element, an extend\-project element, or a remote element. Each element describes
|
||||
a name\-value pair. For projects, this name\-value pair will be exported into each
|
||||
project's environment during a 'forall' command, prefixed with `REPO__`. In
|
||||
addition, there is an optional attribute "keep" which accepts the case
|
||||
insensitive values "true" (default) or "false". This attribute determines
|
||||
whether or not the annotation will be kept when exported with the manifest
|
||||
subcommand.
|
||||
.PP
|
||||
Element copyfile
|
||||
.PP
|
||||
Zero or more copyfile elements may be specified as children of a project
|
||||
element. Each element describes a src\-dest pair of files; the "src" file will be
|
||||
copied to the "dest" place during `repo sync` command.
|
||||
element, or an extend\-project element. Each element describes a src\-dest pair of
|
||||
files; the "src" file will be copied to the "dest" place during `repo sync`
|
||||
command.
|
||||
.PP
|
||||
"src" is project relative, "dest" is relative to the top of the tree. Copying
|
||||
from paths outside of the project or to paths outside of the repo client is not
|
||||
@@ -530,10 +533,14 @@ Intermediate paths must not be symlinks either.
|
||||
.PP
|
||||
Parent directories of "dest" will be automatically created if missing.
|
||||
.PP
|
||||
The files are copied in the order they are specified in the manifests. If
|
||||
multiple elements specify the same source and destination, they will only be
|
||||
applied as one, based on the first occurence. Files are copied before any links
|
||||
specified via linkfile elements are created.
|
||||
.PP
|
||||
Element linkfile
|
||||
.PP
|
||||
It's just like copyfile and runs at the same time as copyfile but instead of
|
||||
copying it creates a symlink.
|
||||
It's just like copyfile, but instead of copying it creates a symlink.
|
||||
.PP
|
||||
The symlink is created at "dest" (relative to the top of the tree) and points to
|
||||
the path specified by "src" which is a path in the project.
|
||||
@@ -543,6 +550,11 @@ Parent directories of "dest" will be automatically created if missing.
|
||||
The symlink target may be a file or directory, but it may not point outside of
|
||||
the repo client.
|
||||
.PP
|
||||
The links are created in the order they are specified in the manifests. If
|
||||
multiple elements specify the same source and destination, they will only be
|
||||
applied as one, based on the first occurence. Links are created after any files
|
||||
specified via copyfile elements are copied.
|
||||
.PP
|
||||
Element remove\-project
|
||||
.PP
|
||||
Deletes a project from the internal manifest table, possibly allowing a
|
||||
@@ -634,13 +646,16 @@ repository's root.
|
||||
"name" may not be an absolute path or use "." or ".." path components. These
|
||||
restrictions are not enforced for [Local Manifests].
|
||||
.PP
|
||||
Attribute `groups`: List of additional groups to which all projects in the
|
||||
Attribute `groups`: Set of additional groups to which all projects in the
|
||||
included manifest belong. This appends and recurses, meaning all projects in
|
||||
included manifests carry all parent include groups. Same syntax as the
|
||||
included manifests carry all parent include groups. This also applies to all
|
||||
extend\-project elements in the included manifests. Same syntax as the
|
||||
corresponding element of `project`.
|
||||
.PP
|
||||
Attribute `revision`: Name of a Git branch (e.g. `main` or `refs/heads/main`)
|
||||
default to which all projects in the included manifest belong.
|
||||
default to which all projects in the included manifest belong. This recurses,
|
||||
meaning it will apply to all projects in all manifests included as a result of
|
||||
this element.
|
||||
.PP
|
||||
Local Manifests
|
||||
.PP
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||
.TH REPO "1" "September 2024" "repo smartsync" "Repo Manual"
|
||||
.TH REPO "1" "August 2025" "repo smartsync" "Repo Manual"
|
||||
.SH NAME
|
||||
repo \- repo smartsync - manual page for repo smartsync
|
||||
.SH SYNOPSIS
|
||||
@@ -20,11 +20,11 @@ number of CPU cores)
|
||||
.TP
|
||||
\fB\-\-jobs\-network\fR=\fI\,JOBS\/\fR
|
||||
number of network jobs to run in parallel (defaults to
|
||||
\fB\-\-jobs\fR or 1)
|
||||
\fB\-\-jobs\fR or 1). Ignored unless \fB\-\-no\-interleaved\fR is set
|
||||
.TP
|
||||
\fB\-\-jobs\-checkout\fR=\fI\,JOBS\/\fR
|
||||
number of local checkout jobs to run in parallel
|
||||
(defaults to \fB\-\-jobs\fR or 8)
|
||||
(defaults to \fB\-\-jobs\fR or 8). Ignored unless \fB\-\-nointerleaved\fR is set
|
||||
.TP
|
||||
\fB\-f\fR, \fB\-\-force\-broken\fR
|
||||
obsolete option (to be deleted in the future)
|
||||
@@ -58,6 +58,12 @@ only update working tree, don't fetch
|
||||
use the existing manifest checkout as\-is. (do not
|
||||
update to the latest revision)
|
||||
.TP
|
||||
\fB\-\-interleaved\fR
|
||||
fetch and checkout projects in parallel (default)
|
||||
.TP
|
||||
\fB\-\-no\-interleaved\fR
|
||||
fetch and checkout projects in phases
|
||||
.TP
|
||||
\fB\-n\fR, \fB\-\-network\-only\fR
|
||||
fetch only, don't update working tree
|
||||
.TP
|
||||
@@ -145,6 +151,16 @@ operate on this manifest and its submanifests
|
||||
.TP
|
||||
\fB\-\-no\-repo\-verify\fR
|
||||
do not verify repo source code
|
||||
.SS post\-sync hooks:
|
||||
.TP
|
||||
\fB\-\-no\-verify\fR
|
||||
Do not run the post\-sync hook.
|
||||
.TP
|
||||
\fB\-\-verify\fR
|
||||
Run the post\-sync hook without prompting.
|
||||
.TP
|
||||
\fB\-\-ignore\-hooks\fR
|
||||
Do not abort if post\-sync hooks fail.
|
||||
.PP
|
||||
Run `repo help smartsync` to view the detailed manual.
|
||||
.SH DETAILS
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||
.TH REPO "1" "September 2024" "repo sync" "Repo Manual"
|
||||
.TH REPO "1" "August 2025" "repo sync" "Repo Manual"
|
||||
.SH NAME
|
||||
repo \- repo sync - manual page for repo sync
|
||||
.SH SYNOPSIS
|
||||
@@ -20,11 +20,11 @@ number of CPU cores)
|
||||
.TP
|
||||
\fB\-\-jobs\-network\fR=\fI\,JOBS\/\fR
|
||||
number of network jobs to run in parallel (defaults to
|
||||
\fB\-\-jobs\fR or 1)
|
||||
\fB\-\-jobs\fR or 1). Ignored unless \fB\-\-no\-interleaved\fR is set
|
||||
.TP
|
||||
\fB\-\-jobs\-checkout\fR=\fI\,JOBS\/\fR
|
||||
number of local checkout jobs to run in parallel
|
||||
(defaults to \fB\-\-jobs\fR or 8)
|
||||
(defaults to \fB\-\-jobs\fR or 8). Ignored unless \fB\-\-nointerleaved\fR is set
|
||||
.TP
|
||||
\fB\-f\fR, \fB\-\-force\-broken\fR
|
||||
obsolete option (to be deleted in the future)
|
||||
@@ -58,6 +58,12 @@ only update working tree, don't fetch
|
||||
use the existing manifest checkout as\-is. (do not
|
||||
update to the latest revision)
|
||||
.TP
|
||||
\fB\-\-interleaved\fR
|
||||
fetch and checkout projects in parallel (default)
|
||||
.TP
|
||||
\fB\-\-no\-interleaved\fR
|
||||
fetch and checkout projects in phases
|
||||
.TP
|
||||
\fB\-n\fR, \fB\-\-network\-only\fR
|
||||
fetch only, don't update working tree
|
||||
.TP
|
||||
@@ -152,6 +158,16 @@ operate on this manifest and its submanifests
|
||||
.TP
|
||||
\fB\-\-no\-repo\-verify\fR
|
||||
do not verify repo source code
|
||||
.SS post\-sync hooks:
|
||||
.TP
|
||||
\fB\-\-no\-verify\fR
|
||||
Do not run the post\-sync hook.
|
||||
.TP
|
||||
\fB\-\-verify\fR
|
||||
Run the post\-sync hook without prompting.
|
||||
.TP
|
||||
\fB\-\-ignore\-hooks\fR
|
||||
Do not abort if post\-sync hooks fail.
|
||||
.PP
|
||||
Run `repo help sync` to view the detailed manual.
|
||||
.SH DETAILS
|
||||
|
||||
61
man/repo-wipe.1
Normal file
61
man/repo-wipe.1
Normal file
@@ -0,0 +1,61 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||
.TH REPO "1" "November 2025" "repo wipe" "Repo Manual"
|
||||
.SH NAME
|
||||
repo \- repo wipe - manual page for repo wipe
|
||||
.SH SYNOPSIS
|
||||
.B repo
|
||||
\fI\,wipe <project>\/\fR...
|
||||
.SH DESCRIPTION
|
||||
Summary
|
||||
.PP
|
||||
Wipe projects from the worktree
|
||||
.SH OPTIONS
|
||||
.TP
|
||||
\fB\-h\fR, \fB\-\-help\fR
|
||||
show this help message and exit
|
||||
.TP
|
||||
\fB\-f\fR, \fB\-\-force\fR
|
||||
force wipe shared projects and uncommitted changes
|
||||
.TP
|
||||
\fB\-\-force\-uncommitted\fR
|
||||
force wipe even if there are uncommitted changes
|
||||
.TP
|
||||
\fB\-\-force\-shared\fR
|
||||
force wipe even if the project shares an object
|
||||
directory
|
||||
.SS Logging options:
|
||||
.TP
|
||||
\fB\-v\fR, \fB\-\-verbose\fR
|
||||
show all output
|
||||
.TP
|
||||
\fB\-q\fR, \fB\-\-quiet\fR
|
||||
only show errors
|
||||
.SS Multi\-manifest options:
|
||||
.TP
|
||||
\fB\-\-outer\-manifest\fR
|
||||
operate starting at the outermost manifest
|
||||
.TP
|
||||
\fB\-\-no\-outer\-manifest\fR
|
||||
do not operate on outer manifests
|
||||
.TP
|
||||
\fB\-\-this\-manifest\-only\fR
|
||||
only operate on this (sub)manifest
|
||||
.TP
|
||||
\fB\-\-no\-this\-manifest\-only\fR, \fB\-\-all\-manifests\fR
|
||||
operate on this manifest and its submanifests
|
||||
.PP
|
||||
Run `repo help wipe` to view the detailed manual.
|
||||
.SH DETAILS
|
||||
.PP
|
||||
The 'repo wipe' command removes the specified projects from the worktree (the
|
||||
checked out source code) and deletes the project's git data from `.repo`.
|
||||
.PP
|
||||
This is a destructive operation and cannot be undone.
|
||||
.PP
|
||||
Projects can be specified either by name, or by a relative or absolute path to
|
||||
the project's local directory.
|
||||
.SH EXAMPLES
|
||||
.SS # Wipe the project "platform/build" by name:
|
||||
$ repo wipe platform/build
|
||||
.SS # Wipe the project at the path "build/make":
|
||||
$ repo wipe build/make
|
||||
@@ -1,5 +1,5 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||
.TH REPO "1" "December 2024" "repo" "Repo Manual"
|
||||
.TH REPO "1" "November 2025" "repo" "Repo Manual"
|
||||
.SH NAME
|
||||
repo \- repository management tool built on top of git
|
||||
.SH SYNOPSIS
|
||||
@@ -80,7 +80,7 @@ forall
|
||||
Run a shell command in each project
|
||||
.TP
|
||||
gc
|
||||
Cleaning up internal repo state.
|
||||
Cleaning up internal repo and Git state.
|
||||
.TP
|
||||
grep
|
||||
Print lines matching a pattern
|
||||
@@ -132,6 +132,9 @@ Upload changes for code review
|
||||
.TP
|
||||
version
|
||||
Display the version of repo
|
||||
.TP
|
||||
wipe
|
||||
Wipe projects from the worktree
|
||||
.PP
|
||||
See 'repo help <command>' for more information on a specific command.
|
||||
Bug reports: https://issues.gerritcodereview.com/issues/new?component=1370071
|
||||
|
||||
150
manifest_xml.py
150
manifest_xml.py
@@ -255,7 +255,7 @@ class _XmlSubmanifest:
|
||||
project: a string, the name of the manifest project.
|
||||
revision: a string, the commitish.
|
||||
manifestName: a string, the submanifest file name.
|
||||
groups: a list of strings, the groups to add to all projects in the
|
||||
groups: a set of strings, the groups to add to all projects in the
|
||||
submanifest.
|
||||
default_groups: a list of strings, the default groups to sync.
|
||||
path: a string, the relative path for the submanifest checkout.
|
||||
@@ -281,7 +281,7 @@ class _XmlSubmanifest:
|
||||
self.project = project
|
||||
self.revision = revision
|
||||
self.manifestName = manifestName
|
||||
self.groups = groups
|
||||
self.groups = groups or set()
|
||||
self.default_groups = default_groups
|
||||
self.path = path
|
||||
self.parent = parent
|
||||
@@ -304,7 +304,7 @@ class _XmlSubmanifest:
|
||||
self.repo_client = RepoClient(
|
||||
parent.repodir,
|
||||
linkFile,
|
||||
parent_groups=",".join(groups) or "",
|
||||
parent_groups=groups,
|
||||
submanifest_path=os.path.join(parent.path_prefix, self.relpath),
|
||||
outer_client=outer_client,
|
||||
default_groups=default_groups,
|
||||
@@ -345,7 +345,7 @@ class _XmlSubmanifest:
|
||||
manifestName = self.manifestName or "default.xml"
|
||||
revision = self.revision or self.name
|
||||
path = self.path or revision.split("/")[-1]
|
||||
groups = self.groups or []
|
||||
groups = self.groups
|
||||
|
||||
return SubmanifestSpec(
|
||||
self.name, manifestUrl, manifestName, revision, path, groups
|
||||
@@ -359,9 +359,7 @@ class _XmlSubmanifest:
|
||||
|
||||
def GetGroupsStr(self):
|
||||
"""Returns the `groups` given for this submanifest."""
|
||||
if self.groups:
|
||||
return ",".join(self.groups)
|
||||
return ""
|
||||
return ",".join(sorted(self.groups))
|
||||
|
||||
def GetDefaultGroupsStr(self):
|
||||
"""Returns the `default-groups` given for this submanifest."""
|
||||
@@ -381,7 +379,7 @@ class SubmanifestSpec:
|
||||
self.manifestName = manifestName
|
||||
self.revision = revision
|
||||
self.path = path
|
||||
self.groups = groups or []
|
||||
self.groups = groups
|
||||
|
||||
|
||||
class XmlManifest:
|
||||
@@ -393,7 +391,7 @@ class XmlManifest:
|
||||
manifest_file,
|
||||
local_manifests=None,
|
||||
outer_client=None,
|
||||
parent_groups="",
|
||||
parent_groups=None,
|
||||
submanifest_path="",
|
||||
default_groups=None,
|
||||
):
|
||||
@@ -409,7 +407,8 @@ class XmlManifest:
|
||||
manifests. This will usually be
|
||||
|repodir|/|LOCAL_MANIFESTS_DIR_NAME|.
|
||||
outer_client: RepoClient of the outer manifest.
|
||||
parent_groups: a string, the groups to apply to this projects.
|
||||
parent_groups: a set of strings, the groups to apply to this
|
||||
manifest.
|
||||
submanifest_path: The submanifest root relative to the repo root.
|
||||
default_groups: a string, the default manifest groups to use.
|
||||
"""
|
||||
@@ -432,7 +431,7 @@ class XmlManifest:
|
||||
self.manifestFileOverrides = {}
|
||||
self.local_manifests = local_manifests
|
||||
self._load_local_manifests = True
|
||||
self.parent_groups = parent_groups
|
||||
self.parent_groups = parent_groups or set()
|
||||
self.default_groups = default_groups
|
||||
|
||||
if submanifest_path and not outer_client:
|
||||
@@ -567,21 +566,29 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
||||
"""
|
||||
return [x for x in re.split(r"[,\s]+", field) if x]
|
||||
|
||||
def _ParseSet(self, field):
|
||||
"""Parse fields that contain flattened sets.
|
||||
|
||||
These are whitespace & comma separated. Empty elements will be
|
||||
discarded.
|
||||
"""
|
||||
return set(self._ParseList(field))
|
||||
|
||||
def ToXml(
|
||||
self,
|
||||
peg_rev=False,
|
||||
peg_rev_upstream=True,
|
||||
peg_rev_dest_branch=True,
|
||||
groups=None,
|
||||
filter_groups=None,
|
||||
omit_local=False,
|
||||
):
|
||||
"""Return the current manifest XML."""
|
||||
mp = self.manifestProject
|
||||
|
||||
if groups is None:
|
||||
groups = mp.manifest_groups
|
||||
if groups:
|
||||
groups = self._ParseList(groups)
|
||||
if filter_groups is None:
|
||||
filter_groups = mp.manifest_groups
|
||||
if filter_groups:
|
||||
filter_groups = self._ParseList(filter_groups)
|
||||
|
||||
doc = xml.dom.minidom.Document()
|
||||
root = doc.createElement("manifest")
|
||||
@@ -654,7 +661,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
||||
output_project(parent, parent_node, project)
|
||||
|
||||
def output_project(parent, parent_node, p):
|
||||
if not p.MatchesGroups(groups):
|
||||
if not p.MatchesGroups(filter_groups):
|
||||
return
|
||||
|
||||
if omit_local and self.IsFromLocalManifest(p):
|
||||
@@ -725,10 +732,9 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
||||
le.setAttribute("dest", lf.dest)
|
||||
e.appendChild(le)
|
||||
|
||||
default_groups = ["all", "name:%s" % p.name, "path:%s" % p.relpath]
|
||||
egroups = [g for g in p.groups if g not in default_groups]
|
||||
if egroups:
|
||||
e.setAttribute("groups", ",".join(egroups))
|
||||
groups = p.groups - {"all", f"name:{p.name}", f"path:{p.relpath}"}
|
||||
if groups:
|
||||
e.setAttribute("groups", ",".join(sorted(groups)))
|
||||
|
||||
for a in p.annotations:
|
||||
if a.keep == "true":
|
||||
@@ -1116,7 +1122,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
||||
groups += f",platform-{platform.system().lower()}"
|
||||
return groups
|
||||
|
||||
def GetGroupsStr(self):
|
||||
def GetManifestGroupsStr(self):
|
||||
"""Returns the manifest group string that should be synced."""
|
||||
return (
|
||||
self.manifestProject.manifest_groups or self.GetDefaultGroupsStr()
|
||||
@@ -1171,12 +1177,12 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
||||
b = b[len(R_HEADS) :]
|
||||
self.branch = b
|
||||
|
||||
parent_groups = self.parent_groups
|
||||
parent_groups = self.parent_groups.copy()
|
||||
if self.path_prefix:
|
||||
parent_groups = (
|
||||
parent_groups |= {
|
||||
f"{SUBMANIFEST_GROUP_PREFIX}:path:"
|
||||
f"{self.path_prefix},{parent_groups}"
|
||||
)
|
||||
f"{self.path_prefix}"
|
||||
}
|
||||
|
||||
# The manifestFile was specified by the user which is why we
|
||||
# allow include paths to point anywhere.
|
||||
@@ -1202,16 +1208,16 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
||||
# Since local manifests are entirely managed by
|
||||
# the user, allow them to point anywhere the
|
||||
# user wants.
|
||||
local_group = (
|
||||
local_group = {
|
||||
f"{LOCAL_MANIFEST_GROUP_PREFIX}:"
|
||||
f"{local_file[:-4]}"
|
||||
)
|
||||
}
|
||||
nodes.append(
|
||||
self._ParseManifestXml(
|
||||
local,
|
||||
self.subdir,
|
||||
parent_groups=(
|
||||
f"{local_group},{parent_groups}"
|
||||
local_group | parent_groups
|
||||
),
|
||||
restrict_includes=False,
|
||||
)
|
||||
@@ -1262,7 +1268,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
||||
self,
|
||||
path,
|
||||
include_root,
|
||||
parent_groups="",
|
||||
parent_groups=None,
|
||||
restrict_includes=True,
|
||||
parent_node=None,
|
||||
):
|
||||
@@ -1271,11 +1277,11 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
||||
Args:
|
||||
path: The XML file to read & parse.
|
||||
include_root: The path to interpret include "name"s relative to.
|
||||
parent_groups: The groups to apply to this projects.
|
||||
parent_groups: The set of groups to apply to this manifest.
|
||||
restrict_includes: Whether to constrain the "name" attribute of
|
||||
includes.
|
||||
parent_node: The parent include node, to apply attribute to this
|
||||
projects.
|
||||
parent_node: The parent include node, to apply attributes to this
|
||||
manifest.
|
||||
|
||||
Returns:
|
||||
List of XML nodes.
|
||||
@@ -1299,6 +1305,14 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
||||
|
||||
nodes = []
|
||||
for node in manifest.childNodes:
|
||||
if (
|
||||
parent_node
|
||||
and node.nodeName in ("include", "project")
|
||||
and not node.hasAttribute("revision")
|
||||
):
|
||||
node.setAttribute(
|
||||
"revision", parent_node.getAttribute("revision")
|
||||
)
|
||||
if node.nodeName == "include":
|
||||
name = self._reqatt(node, "name")
|
||||
if restrict_includes:
|
||||
@@ -1307,12 +1321,10 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
||||
raise ManifestInvalidPathError(
|
||||
f'<include> invalid "name": {name}: {msg}'
|
||||
)
|
||||
include_groups = ""
|
||||
if parent_groups:
|
||||
include_groups = parent_groups
|
||||
include_groups = (parent_groups or set()).copy()
|
||||
if node.hasAttribute("groups"):
|
||||
include_groups = (
|
||||
node.getAttribute("groups") + "," + include_groups
|
||||
include_groups |= self._ParseSet(
|
||||
node.getAttribute("groups")
|
||||
)
|
||||
fp = os.path.join(include_root, name)
|
||||
if not os.path.isfile(fp):
|
||||
@@ -1328,33 +1340,23 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
||||
)
|
||||
# should isolate this to the exact exception, but that's
|
||||
# tricky. actual parsing implementation may vary.
|
||||
except (
|
||||
KeyboardInterrupt,
|
||||
RuntimeError,
|
||||
SystemExit,
|
||||
ManifestParseError,
|
||||
):
|
||||
except (RuntimeError, ManifestParseError):
|
||||
raise
|
||||
except Exception as e:
|
||||
raise ManifestParseError(
|
||||
f"failed parsing included manifest {name}: {e}"
|
||||
)
|
||||
else:
|
||||
if parent_groups and node.nodeName == "project":
|
||||
nodeGroups = parent_groups
|
||||
if node.hasAttribute("groups"):
|
||||
nodeGroups = (
|
||||
node.getAttribute("groups") + "," + nodeGroups
|
||||
)
|
||||
node.setAttribute("groups", nodeGroups)
|
||||
if (
|
||||
parent_node
|
||||
and node.nodeName == "project"
|
||||
and not node.hasAttribute("revision")
|
||||
if parent_groups and node.nodeName in (
|
||||
"project",
|
||||
"extend-project",
|
||||
):
|
||||
node.setAttribute(
|
||||
"revision", parent_node.getAttribute("revision")
|
||||
)
|
||||
nodeGroups = parent_groups.copy()
|
||||
if node.hasAttribute("groups"):
|
||||
nodeGroups |= self._ParseSet(
|
||||
node.getAttribute("groups")
|
||||
)
|
||||
node.setAttribute("groups", ",".join(sorted(nodeGroups)))
|
||||
nodes.append(node)
|
||||
return nodes
|
||||
|
||||
@@ -1463,7 +1465,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
||||
dest_path = node.getAttribute("dest-path")
|
||||
groups = node.getAttribute("groups")
|
||||
if groups:
|
||||
groups = self._ParseList(groups)
|
||||
groups = self._ParseSet(groups or "")
|
||||
revision = node.getAttribute("revision")
|
||||
remote_name = node.getAttribute("remote")
|
||||
if not remote_name:
|
||||
@@ -1484,7 +1486,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
||||
if path and p.relpath != path:
|
||||
continue
|
||||
if groups:
|
||||
p.groups.extend(groups)
|
||||
p.groups |= groups
|
||||
if revision:
|
||||
if base_revision:
|
||||
if p.revisionExpr != base_revision:
|
||||
@@ -1514,6 +1516,14 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
||||
p.UpdatePaths(relpath, worktree, gitdir, objdir)
|
||||
self._paths[p.relpath] = p
|
||||
|
||||
for n in node.childNodes:
|
||||
if n.nodeName == "copyfile":
|
||||
self._ParseCopyFile(p, n)
|
||||
elif n.nodeName == "linkfile":
|
||||
self._ParseLinkFile(p, n)
|
||||
elif n.nodeName == "annotation":
|
||||
self._ParseAnnotation(p, n)
|
||||
|
||||
if node.nodeName == "repo-hooks":
|
||||
# Only one project can be the hooks project
|
||||
if repo_hooks_project is not None:
|
||||
@@ -1807,7 +1817,7 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
||||
groups = ""
|
||||
if node.hasAttribute("groups"):
|
||||
groups = node.getAttribute("groups")
|
||||
groups = self._ParseList(groups)
|
||||
groups = self._ParseSet(groups)
|
||||
default_groups = self._ParseList(node.getAttribute("default-groups"))
|
||||
path = node.getAttribute("path")
|
||||
if path == "":
|
||||
@@ -1916,11 +1926,6 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
||||
|
||||
upstream = node.getAttribute("upstream") or self._default.upstreamExpr
|
||||
|
||||
groups = ""
|
||||
if node.hasAttribute("groups"):
|
||||
groups = node.getAttribute("groups")
|
||||
groups = self._ParseList(groups)
|
||||
|
||||
if parent is None:
|
||||
(
|
||||
relpath,
|
||||
@@ -1935,8 +1940,11 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
||||
parent, name, path
|
||||
)
|
||||
|
||||
default_groups = ["all", "name:%s" % name, "path:%s" % relpath]
|
||||
groups.extend(set(default_groups).difference(groups))
|
||||
groups = ""
|
||||
if node.hasAttribute("groups"):
|
||||
groups = node.getAttribute("groups")
|
||||
groups = self._ParseSet(groups)
|
||||
groups |= {"all", f"name:{name}", f"path:{relpath}"}
|
||||
|
||||
if self.IsMirror and node.hasAttribute("force-path"):
|
||||
if XmlBool(node, "force-path", False):
|
||||
@@ -1968,11 +1976,11 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
|
||||
for n in node.childNodes:
|
||||
if n.nodeName == "copyfile":
|
||||
self._ParseCopyFile(project, n)
|
||||
if n.nodeName == "linkfile":
|
||||
elif n.nodeName == "linkfile":
|
||||
self._ParseLinkFile(project, n)
|
||||
if n.nodeName == "annotation":
|
||||
elif n.nodeName == "annotation":
|
||||
self._ParseAnnotation(project, n)
|
||||
if n.nodeName == "project":
|
||||
elif n.nodeName == "project":
|
||||
project.subprojects.append(
|
||||
self._ParseProject(n, parent=project)
|
||||
)
|
||||
|
||||
36
progress.py
36
progress.py
@@ -25,7 +25,10 @@ except ImportError:
|
||||
from repo_trace import IsTraceToStderr
|
||||
|
||||
|
||||
_TTY = sys.stderr.isatty()
|
||||
# Capture the original stderr stream. We use this exclusively for progress
|
||||
# updates to ensure we talk to the terminal even if stderr is redirected.
|
||||
_STDERR = sys.stderr
|
||||
_TTY = _STDERR.isatty()
|
||||
|
||||
# This will erase all content in the current line (wherever the cursor is).
|
||||
# It does not move the cursor, so this is usually followed by \r to move to
|
||||
@@ -101,6 +104,7 @@ class Progress:
|
||||
self._units = units
|
||||
self._elide = elide and _TTY
|
||||
self._quiet = quiet
|
||||
self._ended = False
|
||||
|
||||
# Only show the active jobs section if we run more than one in parallel.
|
||||
self._show_jobs = False
|
||||
@@ -118,6 +122,11 @@ class Progress:
|
||||
if not quiet and show_elapsed:
|
||||
self._update_thread.start()
|
||||
|
||||
def update_total(self, new_total):
|
||||
"""Updates the total if the new total is larger."""
|
||||
if new_total > self._total:
|
||||
self._total = new_total
|
||||
|
||||
def _update_loop(self):
|
||||
while True:
|
||||
self.update(inc=0)
|
||||
@@ -127,11 +136,11 @@ class Progress:
|
||||
def _write(self, s):
|
||||
s = "\r" + s
|
||||
if self._elide:
|
||||
col = os.get_terminal_size(sys.stderr.fileno()).columns
|
||||
col = os.get_terminal_size(_STDERR.fileno()).columns
|
||||
if len(s) > col:
|
||||
s = s[: col - 1] + ".."
|
||||
sys.stderr.write(s)
|
||||
sys.stderr.flush()
|
||||
_STDERR.write(s)
|
||||
_STDERR.flush()
|
||||
|
||||
def start(self, name):
|
||||
self._active += 1
|
||||
@@ -195,7 +204,26 @@ class Progress:
|
||||
)
|
||||
)
|
||||
|
||||
def display_message(self, msg):
|
||||
"""Clears the current progress line and prints a message above it.
|
||||
|
||||
The progress bar is then redrawn on the next line.
|
||||
"""
|
||||
if not _TTY or IsTraceToStderr() or self._quiet:
|
||||
return
|
||||
|
||||
# Erase the current line, print the message with a newline,
|
||||
# and then immediately redraw the progress bar on the new line.
|
||||
_STDERR.write("\r" + CSI_ERASE_LINE)
|
||||
_STDERR.write(msg + "\n")
|
||||
_STDERR.flush()
|
||||
self.update(inc=0)
|
||||
|
||||
def end(self):
|
||||
if self._ended:
|
||||
return
|
||||
self._ended = True
|
||||
|
||||
self._update_event.set()
|
||||
if not _TTY or IsTraceToStderr() or self._quiet:
|
||||
return
|
||||
|
||||
321
project.py
321
project.py
@@ -12,6 +12,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import datetime
|
||||
import errno
|
||||
import filecmp
|
||||
import glob
|
||||
@@ -390,22 +391,17 @@ def _SafeExpandPath(base, subpath, skipfinal=False):
|
||||
return path
|
||||
|
||||
|
||||
class _CopyFile:
|
||||
class _CopyFile(NamedTuple):
|
||||
"""Container for <copyfile> manifest element."""
|
||||
|
||||
def __init__(self, git_worktree, src, topdir, dest):
|
||||
"""Register a <copyfile> request.
|
||||
|
||||
Args:
|
||||
git_worktree: Absolute path to the git project checkout.
|
||||
src: Relative path under |git_worktree| of file to read.
|
||||
topdir: Absolute path to the top of the repo client checkout.
|
||||
dest: Relative path under |topdir| of file to write.
|
||||
"""
|
||||
self.git_worktree = git_worktree
|
||||
self.topdir = topdir
|
||||
self.src = src
|
||||
self.dest = dest
|
||||
# Absolute path to the git project checkout.
|
||||
git_worktree: str
|
||||
# Relative path under |git_worktree| of file to read.
|
||||
src: str
|
||||
# Absolute path to the top of the repo client checkout.
|
||||
topdir: str
|
||||
# Relative path under |topdir| of file to write.
|
||||
dest: str
|
||||
|
||||
def _Copy(self):
|
||||
src = _SafeExpandPath(self.git_worktree, self.src)
|
||||
@@ -439,22 +435,17 @@ class _CopyFile:
|
||||
logger.error("error: Cannot copy file %s to %s", src, dest)
|
||||
|
||||
|
||||
class _LinkFile:
|
||||
class _LinkFile(NamedTuple):
|
||||
"""Container for <linkfile> manifest element."""
|
||||
|
||||
def __init__(self, git_worktree, src, topdir, dest):
|
||||
"""Register a <linkfile> request.
|
||||
|
||||
Args:
|
||||
git_worktree: Absolute path to the git project checkout.
|
||||
src: Target of symlink relative to path under |git_worktree|.
|
||||
topdir: Absolute path to the top of the repo client checkout.
|
||||
dest: Relative path under |topdir| of symlink to create.
|
||||
"""
|
||||
self.git_worktree = git_worktree
|
||||
self.topdir = topdir
|
||||
self.src = src
|
||||
self.dest = dest
|
||||
# Absolute path to the git project checkout.
|
||||
git_worktree: str
|
||||
# Target of symlink relative to path under |git_worktree|.
|
||||
src: str
|
||||
# Absolute path to the top of the repo client checkout.
|
||||
topdir: str
|
||||
# Relative path under |topdir| of symlink to create.
|
||||
dest: str
|
||||
|
||||
def __linkIt(self, relSrc, absDest):
|
||||
# Link file if it does not exist or is out of date.
|
||||
@@ -471,9 +462,7 @@ class _LinkFile:
|
||||
os.makedirs(dest_dir)
|
||||
platform_utils.symlink(relSrc, absDest)
|
||||
except OSError:
|
||||
logger.error(
|
||||
"error: Cannot link file %s to %s", relSrc, absDest
|
||||
)
|
||||
logger.error("error: Cannot symlink %s to %s", absDest, relSrc)
|
||||
|
||||
def _Link(self):
|
||||
"""Link the self.src & self.dest paths.
|
||||
@@ -564,7 +553,7 @@ class Project:
|
||||
revisionExpr,
|
||||
revisionId,
|
||||
rebase=True,
|
||||
groups=None,
|
||||
groups=set(),
|
||||
sync_c=False,
|
||||
sync_s=False,
|
||||
sync_tags=True,
|
||||
@@ -633,8 +622,9 @@ class Project:
|
||||
self.subprojects = []
|
||||
|
||||
self.snapshots = {}
|
||||
self.copyfiles = []
|
||||
self.linkfiles = []
|
||||
# Use dicts to dedupe while maintaining declared order.
|
||||
self.copyfiles = {}
|
||||
self.linkfiles = {}
|
||||
self.annotations = []
|
||||
self.dest_branch = dest_branch
|
||||
|
||||
@@ -642,10 +632,6 @@ class Project:
|
||||
# project containing repo hooks.
|
||||
self.enabled_repo_hooks = []
|
||||
|
||||
# This will be updated later if the project has submodules and
|
||||
# if they will be synced.
|
||||
self.has_subprojects = False
|
||||
|
||||
def RelPath(self, local=True):
|
||||
"""Return the path for the project relative to a manifest.
|
||||
|
||||
@@ -852,9 +838,9 @@ class Project:
|
||||
"""
|
||||
default_groups = self.manifest.default_groups or ["default"]
|
||||
expanded_manifest_groups = manifest_groups or default_groups
|
||||
expanded_project_groups = ["all"] + (self.groups or [])
|
||||
expanded_project_groups = {"all"} | self.groups
|
||||
if "notdefault" not in expanded_project_groups:
|
||||
expanded_project_groups += ["default"]
|
||||
expanded_project_groups |= {"default"}
|
||||
|
||||
matched = False
|
||||
for group in expanded_manifest_groups:
|
||||
@@ -1539,18 +1525,14 @@ class Project:
|
||||
force_checkout=False,
|
||||
force_rebase=False,
|
||||
submodules=False,
|
||||
errors=None,
|
||||
verbose=False,
|
||||
):
|
||||
"""Perform only the local IO portion of the sync process.
|
||||
|
||||
Network access is not required.
|
||||
"""
|
||||
if errors is None:
|
||||
errors = []
|
||||
|
||||
def fail(error: Exception):
|
||||
errors.append(error)
|
||||
syncbuf.fail(self, error)
|
||||
|
||||
if not os.path.exists(self.gitdir):
|
||||
@@ -1567,8 +1549,8 @@ class Project:
|
||||
# TODO(https://git-scm.com/docs/git-worktree#_bugs): Re-evaluate if
|
||||
# submodules can be init when using worktrees once its support is
|
||||
# complete.
|
||||
if self.has_subprojects and not self.use_git_worktrees:
|
||||
self._InitSubmodules()
|
||||
if self.parent and not self.use_git_worktrees:
|
||||
self._InitSubmodule()
|
||||
all_refs = self.bare_ref.all
|
||||
self.CleanPublishedCache(all_refs)
|
||||
revid = self.GetRevisionId(all_refs)
|
||||
@@ -1597,6 +1579,9 @@ class Project:
|
||||
self._FastForward(revid)
|
||||
self._CopyAndLinkFiles()
|
||||
|
||||
def _dorebase():
|
||||
self._Rebase(upstream="@{upstream}")
|
||||
|
||||
def _dosubmodules():
|
||||
self._SyncSubmodules(quiet=True)
|
||||
|
||||
@@ -1688,19 +1673,24 @@ class Project:
|
||||
if pub:
|
||||
not_merged = self._revlist(not_rev(revid), pub)
|
||||
if not_merged:
|
||||
if upstream_gain and not force_rebase:
|
||||
# The user has published this branch and some of those
|
||||
# commits are not yet merged upstream. We do not want
|
||||
# to rewrite the published commits so we punt.
|
||||
fail(
|
||||
LocalSyncFail(
|
||||
"branch %s is published (but not merged) and is "
|
||||
"now %d commits behind. Fix this manually or rerun "
|
||||
"with the --rebase option to force a rebase."
|
||||
% (branch.name, len(upstream_gain)),
|
||||
project=self.name,
|
||||
if upstream_gain:
|
||||
if force_rebase:
|
||||
# Try to rebase local published but not merged changes
|
||||
# on top of the upstream changes.
|
||||
syncbuf.later1(self, _dorebase, not verbose)
|
||||
else:
|
||||
# The user has published this branch and some of those
|
||||
# commits are not yet merged upstream. We do not want
|
||||
# to rewrite the published commits so we punt.
|
||||
fail(
|
||||
LocalSyncFail(
|
||||
"branch %s is published (but not merged) and "
|
||||
"is now %d commits behind. Fix this manually "
|
||||
"or rerun with the --rebase option to force a "
|
||||
"rebase." % (branch.name, len(upstream_gain)),
|
||||
project=self.name,
|
||||
)
|
||||
)
|
||||
)
|
||||
return
|
||||
syncbuf.later1(self, _doff, not verbose)
|
||||
return
|
||||
@@ -1794,7 +1784,7 @@ class Project:
|
||||
Paths should have basic validation run on them before being queued.
|
||||
Further checking will be handled when the actual copy happens.
|
||||
"""
|
||||
self.copyfiles.append(_CopyFile(self.worktree, src, topdir, dest))
|
||||
self.copyfiles[_CopyFile(self.worktree, src, topdir, dest)] = True
|
||||
|
||||
def AddLinkFile(self, src, dest, topdir):
|
||||
"""Mark |dest| to create a symlink (relative to |topdir|) pointing to
|
||||
@@ -1805,7 +1795,7 @@ class Project:
|
||||
Paths should have basic validation run on them before being queued.
|
||||
Further checking will be handled when the actual link happens.
|
||||
"""
|
||||
self.linkfiles.append(_LinkFile(self.worktree, src, topdir, dest))
|
||||
self.linkfiles[_LinkFile(self.worktree, src, topdir, dest)] = True
|
||||
|
||||
def AddAnnotation(self, name, value, keep):
|
||||
self.annotations.append(Annotation(name, value, keep))
|
||||
@@ -2061,10 +2051,7 @@ class Project:
|
||||
if head == revid:
|
||||
# Same revision; just update HEAD to point to the new
|
||||
# target branch, but otherwise take no other action.
|
||||
_lwrite(
|
||||
self.work_git.GetDotgitPath(subpath=HEAD),
|
||||
f"ref: {R_HEADS}{name}\n",
|
||||
)
|
||||
self.work_git.SetHead(R_HEADS + name)
|
||||
return True
|
||||
|
||||
GitCommand(
|
||||
@@ -2100,9 +2087,7 @@ class Project:
|
||||
|
||||
revid = self.GetRevisionId(all_refs)
|
||||
if head == revid:
|
||||
_lwrite(
|
||||
self.work_git.GetDotgitPath(subpath=HEAD), "%s\n" % revid
|
||||
)
|
||||
self.work_git.DetachHead(revid)
|
||||
else:
|
||||
self._Checkout(revid, quiet=True)
|
||||
GitCommand(
|
||||
@@ -2368,8 +2353,6 @@ class Project:
|
||||
)
|
||||
result.append(subproject)
|
||||
result.extend(subproject.GetDerivedSubprojects())
|
||||
if result:
|
||||
self.has_subprojects = True
|
||||
return result
|
||||
|
||||
def EnableRepositoryExtension(self, key, value="true", version=1):
|
||||
@@ -2420,7 +2403,9 @@ class Project:
|
||||
# throws an error.
|
||||
revs = [f"{self.revisionExpr}^0"]
|
||||
upstream_rev = None
|
||||
if self.upstream:
|
||||
|
||||
# Only check upstream when using superproject.
|
||||
if self.upstream and self.manifest.manifestProject.use_superproject:
|
||||
upstream_rev = self.GetRemote().ToLocal(self.upstream)
|
||||
revs.append(upstream_rev)
|
||||
|
||||
@@ -2432,7 +2417,9 @@ class Project:
|
||||
log_as_error=False,
|
||||
)
|
||||
|
||||
if self.upstream:
|
||||
# Only verify upstream relationship for superproject scenarios
|
||||
# without affecting plain usage.
|
||||
if self.upstream and self.manifest.manifestProject.use_superproject:
|
||||
self.bare_git.merge_base(
|
||||
"--is-ancestor",
|
||||
self.revisionExpr,
|
||||
@@ -2585,6 +2572,16 @@ class Project:
|
||||
if os.path.exists(os.path.join(self.gitdir, "shallow")):
|
||||
cmd.append("--depth=2147483647")
|
||||
|
||||
# Use clone-depth="1" as a heuristic for repositories containing
|
||||
# large binaries and disable auto GC to prevent potential hangs.
|
||||
# Check the configured depth because the `depth` argument might be None
|
||||
# if REPO_ALLOW_SHALLOW=0 converted it to a partial clone.
|
||||
effective_depth = (
|
||||
self.clone_depth or self.manifest.manifestProject.depth
|
||||
)
|
||||
if effective_depth == 1 and git_require((2, 23, 0)):
|
||||
cmd.append("--no-auto-gc")
|
||||
|
||||
if not verbose:
|
||||
cmd.append("--quiet")
|
||||
if not quiet and sys.stdout.isatty():
|
||||
@@ -2755,6 +2752,14 @@ class Project:
|
||||
# field; it doesn't exist, thus abort the optimization attempt
|
||||
# and do a full sync.
|
||||
break
|
||||
elif depth and is_sha1 and ret == 1:
|
||||
# In sha1 mode, when depth is enabled, syncing the revision
|
||||
# from upstream may not work because some servers only allow
|
||||
# fetching named refs. Fetching a specific sha1 may result
|
||||
# in an error like 'server does not allow request for
|
||||
# unadvertised object'. In this case, attempt a full sync
|
||||
# without depth.
|
||||
break
|
||||
elif ret < 0:
|
||||
# Git died with a signal, exit immediately.
|
||||
break
|
||||
@@ -3027,16 +3032,39 @@ class Project:
|
||||
project=self.name,
|
||||
)
|
||||
|
||||
def _InitSubmodules(self, quiet=True):
|
||||
"""Initialize the submodules for the project."""
|
||||
def _InitSubmodule(self, quiet=True):
|
||||
"""Initialize the submodule."""
|
||||
cmd = ["submodule", "init"]
|
||||
if quiet:
|
||||
cmd.append("-q")
|
||||
if GitCommand(self, cmd).Wait() != 0:
|
||||
raise GitError(
|
||||
f"{self.name} submodule init",
|
||||
project=self.name,
|
||||
cmd.extend(["--", self.worktree])
|
||||
max_retries = 3
|
||||
base_delay_secs = 1
|
||||
jitter_ratio = 1 / 3
|
||||
for attempt in range(max_retries):
|
||||
git_cmd = GitCommand(
|
||||
None,
|
||||
cmd,
|
||||
cwd=self.parent.worktree,
|
||||
capture_stdout=True,
|
||||
capture_stderr=True,
|
||||
)
|
||||
if git_cmd.Wait() == 0:
|
||||
return
|
||||
error = git_cmd.stderr or git_cmd.stdout
|
||||
if "lock" in error:
|
||||
delay = base_delay_secs * (2**attempt)
|
||||
delay += random.uniform(0, delay * jitter_ratio)
|
||||
logger.warning(
|
||||
f"Attempt {attempt+1}/{max_retries}: "
|
||||
+ f"git {' '.join(cmd)} failed."
|
||||
+ f" Error: {error}."
|
||||
+ f" Sleeping {delay:.2f}s before retrying."
|
||||
)
|
||||
time.sleep(delay)
|
||||
else:
|
||||
break
|
||||
git_cmd.VerifyCommand()
|
||||
|
||||
def _Rebase(self, upstream, onto=None):
|
||||
cmd = ["rebase"]
|
||||
@@ -3056,8 +3084,13 @@ class Project:
|
||||
raise GitError(f"{self.name} merge {head} ", project=self.name)
|
||||
|
||||
def _InitGitDir(self, mirror_git=None, force_sync=False, quiet=False):
|
||||
# Prefix for temporary directories created during gitdir initialization.
|
||||
TMP_GITDIR_PREFIX = ".tmp-project-initgitdir-"
|
||||
init_git_dir = not os.path.exists(self.gitdir)
|
||||
init_obj_dir = not os.path.exists(self.objdir)
|
||||
tmp_gitdir = None
|
||||
curr_gitdir = self.gitdir
|
||||
curr_config = self.config
|
||||
try:
|
||||
# Initialize the bare repository, which contains all of the objects.
|
||||
if init_obj_dir:
|
||||
@@ -3077,27 +3110,33 @@ class Project:
|
||||
# well.
|
||||
if self.objdir != self.gitdir:
|
||||
if init_git_dir:
|
||||
os.makedirs(self.gitdir)
|
||||
os.makedirs(os.path.dirname(self.gitdir), exist_ok=True)
|
||||
tmp_gitdir = tempfile.mkdtemp(
|
||||
prefix=TMP_GITDIR_PREFIX,
|
||||
dir=os.path.dirname(self.gitdir),
|
||||
)
|
||||
curr_config = GitConfig.ForRepository(
|
||||
gitdir=tmp_gitdir, defaults=self.manifest.globalConfig
|
||||
)
|
||||
curr_gitdir = tmp_gitdir
|
||||
|
||||
if init_obj_dir or init_git_dir:
|
||||
self._ReferenceGitDir(
|
||||
self.objdir, self.gitdir, copy_all=True
|
||||
self.objdir, curr_gitdir, copy_all=True
|
||||
)
|
||||
try:
|
||||
self._CheckDirReference(self.objdir, self.gitdir)
|
||||
self._CheckDirReference(self.objdir, curr_gitdir)
|
||||
except GitError as e:
|
||||
if force_sync:
|
||||
logger.error(
|
||||
"Retrying clone after deleting %s", self.gitdir
|
||||
)
|
||||
try:
|
||||
platform_utils.rmtree(os.path.realpath(self.gitdir))
|
||||
if self.worktree and os.path.exists(
|
||||
os.path.realpath(self.worktree)
|
||||
):
|
||||
platform_utils.rmtree(
|
||||
os.path.realpath(self.worktree)
|
||||
)
|
||||
rm_dirs = (
|
||||
tmp_gitdir,
|
||||
self.gitdir,
|
||||
self.worktree,
|
||||
)
|
||||
for d in rm_dirs:
|
||||
if d and os.path.exists(d):
|
||||
platform_utils.rmtree(os.path.realpath(d))
|
||||
return self._InitGitDir(
|
||||
mirror_git=mirror_git,
|
||||
force_sync=False,
|
||||
@@ -3148,18 +3187,21 @@ class Project:
|
||||
m = self.manifest.manifestProject.config
|
||||
for key in ["user.name", "user.email"]:
|
||||
if m.Has(key, include_defaults=False):
|
||||
self.config.SetString(key, m.GetString(key))
|
||||
curr_config.SetString(key, m.GetString(key))
|
||||
if not self.manifest.EnableGitLfs:
|
||||
self.config.SetString(
|
||||
curr_config.SetString(
|
||||
"filter.lfs.smudge", "git-lfs smudge --skip -- %f"
|
||||
)
|
||||
self.config.SetString(
|
||||
curr_config.SetString(
|
||||
"filter.lfs.process", "git-lfs filter-process --skip"
|
||||
)
|
||||
self.config.SetBoolean(
|
||||
curr_config.SetBoolean(
|
||||
"core.bare", True if self.manifest.IsMirror else None
|
||||
)
|
||||
|
||||
if tmp_gitdir:
|
||||
platform_utils.rename(tmp_gitdir, self.gitdir)
|
||||
tmp_gitdir = None
|
||||
if not init_obj_dir:
|
||||
# The project might be shared (obj_dir already initialized), but
|
||||
# such information is not available here. Instead of passing it,
|
||||
@@ -3176,6 +3218,27 @@ class Project:
|
||||
if init_git_dir and os.path.exists(self.gitdir):
|
||||
platform_utils.rmtree(self.gitdir)
|
||||
raise
|
||||
finally:
|
||||
# Clean up the temporary directory created during the process,
|
||||
# as well as any stale ones left over from previous attempts.
|
||||
if tmp_gitdir and os.path.exists(tmp_gitdir):
|
||||
platform_utils.rmtree(tmp_gitdir)
|
||||
|
||||
age_threshold = datetime.timedelta(days=1)
|
||||
now = datetime.datetime.now()
|
||||
for tmp_dir in glob.glob(
|
||||
os.path.join(
|
||||
os.path.dirname(self.gitdir), f"{TMP_GITDIR_PREFIX}*"
|
||||
)
|
||||
):
|
||||
try:
|
||||
mtime = datetime.datetime.fromtimestamp(
|
||||
os.path.getmtime(tmp_dir)
|
||||
)
|
||||
if now - mtime > age_threshold:
|
||||
platform_utils.rmtree(tmp_dir)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
def _UpdateHooks(self, quiet=False):
|
||||
if os.path.exists(self.objdir):
|
||||
@@ -3484,9 +3547,7 @@ class Project:
|
||||
self._createDotGit(dotgit)
|
||||
|
||||
if init_dotgit:
|
||||
_lwrite(
|
||||
os.path.join(self.gitdir, HEAD), f"{self.GetRevisionId()}\n"
|
||||
)
|
||||
self.work_git.UpdateRef(HEAD, self.GetRevisionId(), detach=True)
|
||||
|
||||
# Finish checking out the worktree.
|
||||
cmd = ["read-tree", "--reset", "-u", "-v", HEAD]
|
||||
@@ -3833,19 +3894,36 @@ class Project:
|
||||
|
||||
def GetHead(self):
|
||||
"""Return the ref that HEAD points to."""
|
||||
path = self.GetDotgitPath(subpath=HEAD)
|
||||
try:
|
||||
with open(path) as fd:
|
||||
line = fd.readline()
|
||||
except OSError as e:
|
||||
raise NoManifestException(path, str(e))
|
||||
try:
|
||||
line = line.decode()
|
||||
except AttributeError:
|
||||
pass
|
||||
if line.startswith("ref: "):
|
||||
return line[5:-1]
|
||||
return line[:-1]
|
||||
symbolic_head = self.rev_parse("--symbolic-full-name", HEAD)
|
||||
if symbolic_head == HEAD:
|
||||
# Detached HEAD. Return the commit SHA instead.
|
||||
return self.rev_parse(HEAD)
|
||||
return symbolic_head
|
||||
except GitError as e:
|
||||
logger.warning(
|
||||
"project %s: unparseable HEAD; trying to recover.\n"
|
||||
"Check that HEAD ref in .git/HEAD is valid. The error "
|
||||
"was: %s",
|
||||
self._project.RelPath(local=False),
|
||||
e,
|
||||
)
|
||||
|
||||
# Fallback to direct file reading for compatibility with broken
|
||||
# repos, e.g. if HEAD points to an unborn branch.
|
||||
path = self.GetDotgitPath(subpath=HEAD)
|
||||
try:
|
||||
with open(path) as fd:
|
||||
line = fd.readline()
|
||||
except OSError:
|
||||
raise NoManifestException(path, str(e))
|
||||
try:
|
||||
line = line.decode()
|
||||
except AttributeError:
|
||||
pass
|
||||
if line.startswith("ref: "):
|
||||
return line[5:-1]
|
||||
return line[:-1]
|
||||
|
||||
def SetHead(self, ref, message=None):
|
||||
cmdv = []
|
||||
@@ -4009,7 +4087,8 @@ class _Later:
|
||||
if not self.quiet:
|
||||
out.nl()
|
||||
return True
|
||||
except GitError:
|
||||
except GitError as e:
|
||||
syncbuf.fail(self.project, e)
|
||||
out.nl()
|
||||
return False
|
||||
|
||||
@@ -4025,7 +4104,12 @@ class _SyncColoring(Coloring):
|
||||
class SyncBuffer:
|
||||
def __init__(self, config, detach_head=False):
|
||||
self._messages = []
|
||||
self._failures = []
|
||||
|
||||
# Failures that have not yet been printed. Cleared after printing.
|
||||
self._pending_failures = []
|
||||
# A persistent record of all failures during the buffer's lifetime.
|
||||
self._all_failures = []
|
||||
|
||||
self._later_queue1 = []
|
||||
self._later_queue2 = []
|
||||
|
||||
@@ -4040,7 +4124,9 @@ class SyncBuffer:
|
||||
self._messages.append(_InfoMessage(project, fmt % args))
|
||||
|
||||
def fail(self, project, err=None):
|
||||
self._failures.append(_Failure(project, err))
|
||||
failure = _Failure(project, err)
|
||||
self._pending_failures.append(failure)
|
||||
self._all_failures.append(failure)
|
||||
self._MarkUnclean()
|
||||
|
||||
def later1(self, project, what, quiet):
|
||||
@@ -4060,6 +4146,11 @@ class SyncBuffer:
|
||||
self.recent_clean = True
|
||||
return recent_clean
|
||||
|
||||
@property
|
||||
def errors(self):
|
||||
"""Returns a list of all exceptions accumulated in the buffer."""
|
||||
return [f.why for f in self._all_failures if f.why]
|
||||
|
||||
def _MarkUnclean(self):
|
||||
self.clean = False
|
||||
self.recent_clean = False
|
||||
@@ -4078,18 +4169,18 @@ class SyncBuffer:
|
||||
return True
|
||||
|
||||
def _PrintMessages(self):
|
||||
if self._messages or self._failures:
|
||||
if self._messages or self._pending_failures:
|
||||
if os.isatty(2):
|
||||
self.out.write(progress.CSI_ERASE_LINE)
|
||||
self.out.write("\r")
|
||||
|
||||
for m in self._messages:
|
||||
m.Print(self)
|
||||
for m in self._failures:
|
||||
for m in self._pending_failures:
|
||||
m.Print(self)
|
||||
|
||||
self._messages = []
|
||||
self._failures = []
|
||||
self._pending_failures = []
|
||||
|
||||
|
||||
class MetaProject(Project):
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2023 The Android Open Source Project
|
||||
# Copyright (C) 2023 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
152
release/check-metadata.py
Executable file
152
release/check-metadata.py
Executable file
@@ -0,0 +1,152 @@
|
||||
#!/usr/bin/env python3
|
||||
# Copyright (C) 2025 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Helper tool to check various metadata (e.g. licensing) in source files."""
|
||||
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
import re
|
||||
import sys
|
||||
|
||||
import util
|
||||
|
||||
|
||||
_FILE_HEADER_RE = re.compile(
|
||||
r"""# Copyright \(C\) 20[0-9]{2} The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2\.0 \(the "License"\);
|
||||
# you may not use this file except in compliance with the License\.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www\.apache\.org/licenses/LICENSE-2\.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied\.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License\.
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
def check_license(path: Path, lines: list[str]) -> bool:
|
||||
"""Check license header."""
|
||||
# Enforce licensing on configs & scripts.
|
||||
if not (
|
||||
path.suffix in (".bash", ".cfg", ".ini", ".py", ".toml")
|
||||
or lines[0] in ("#!/bin/bash", "#!/bin/sh", "#!/usr/bin/env python3")
|
||||
):
|
||||
return True
|
||||
|
||||
# Extract the file header.
|
||||
header_lines = []
|
||||
for line in lines:
|
||||
if line.startswith("#"):
|
||||
header_lines.append(line)
|
||||
else:
|
||||
break
|
||||
if not header_lines:
|
||||
print(
|
||||
f"error: {path.relative_to(util.TOPDIR)}: "
|
||||
"missing file header (copyright+licensing)",
|
||||
file=sys.stderr,
|
||||
)
|
||||
return False
|
||||
|
||||
# Skip the shebang.
|
||||
if header_lines[0].startswith("#!"):
|
||||
header_lines.pop(0)
|
||||
|
||||
# If this file is imported into the tree, then leave it be.
|
||||
if header_lines[0] == "# DO NOT EDIT THIS FILE":
|
||||
return True
|
||||
|
||||
header = "".join(f"{x}\n" for x in header_lines)
|
||||
if not _FILE_HEADER_RE.match(header):
|
||||
print(
|
||||
f"error: {path.relative_to(util.TOPDIR)}: "
|
||||
"file header incorrectly formatted",
|
||||
file=sys.stderr,
|
||||
)
|
||||
print(
|
||||
"".join(f"> {x}\n" for x in header_lines), end="", file=sys.stderr
|
||||
)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def check_path(opts: argparse.Namespace, path: Path) -> bool:
|
||||
"""Check a single path."""
|
||||
data = path.read_text(encoding="utf-8")
|
||||
lines = data.splitlines()
|
||||
# NB: Use list comprehension and not a generator so we run all the checks.
|
||||
return all(
|
||||
[
|
||||
check_license(path, lines),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def check_paths(opts: argparse.Namespace, paths: list[Path]) -> bool:
|
||||
"""Check all the paths."""
|
||||
# NB: Use list comprehension and not a generator so we check all paths.
|
||||
return all([check_path(opts, x) for x in paths])
|
||||
|
||||
|
||||
def find_files(opts: argparse.Namespace) -> list[Path]:
|
||||
"""Find all the files in the source tree."""
|
||||
result = util.run(
|
||||
opts,
|
||||
["git", "ls-tree", "-r", "-z", "--name-only", "HEAD"],
|
||||
cwd=util.TOPDIR,
|
||||
capture_output=True,
|
||||
encoding="utf-8",
|
||||
)
|
||||
return [util.TOPDIR / x for x in result.stdout.split("\0")[:-1]]
|
||||
|
||||
|
||||
def get_parser() -> argparse.ArgumentParser:
|
||||
"""Get a CLI parser."""
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument(
|
||||
"-n",
|
||||
"--dry-run",
|
||||
dest="dryrun",
|
||||
action="store_true",
|
||||
help="show everything that would be done",
|
||||
)
|
||||
parser.add_argument(
|
||||
"paths",
|
||||
nargs="*",
|
||||
help="the paths to scan",
|
||||
)
|
||||
return parser
|
||||
|
||||
|
||||
def main(argv: list[str]) -> int:
|
||||
"""The main func!"""
|
||||
parser = get_parser()
|
||||
opts = parser.parse_args(argv)
|
||||
|
||||
paths = opts.paths
|
||||
if not opts.paths:
|
||||
paths = find_files(opts)
|
||||
|
||||
return 0 if check_paths(opts, paths) else 1
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
@@ -27,9 +27,11 @@ import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import List
|
||||
|
||||
|
||||
TOPDIR = Path(__file__).resolve().parent.parent
|
||||
THIS_FILE = Path(__file__).resolve()
|
||||
TOPDIR = THIS_FILE.parent.parent
|
||||
MANDIR = TOPDIR.joinpath("man")
|
||||
|
||||
# Load repo local modules.
|
||||
@@ -42,9 +44,23 @@ def worker(cmd, **kwargs):
|
||||
subprocess.run(cmd, **kwargs)
|
||||
|
||||
|
||||
def main(argv):
|
||||
def get_parser() -> argparse.ArgumentParser:
|
||||
"""Get argument parser."""
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.parse_args(argv)
|
||||
parser.add_argument(
|
||||
"-n",
|
||||
"--check",
|
||||
"--dry-run",
|
||||
action="store_const",
|
||||
const=True,
|
||||
help="Check if changes are necessary; don't actually change files",
|
||||
)
|
||||
return parser
|
||||
|
||||
|
||||
def main(argv: List[str]) -> int:
|
||||
parser = get_parser()
|
||||
opts = parser.parse_args(argv)
|
||||
|
||||
if not shutil.which("help2man"):
|
||||
sys.exit("Please install help2man to continue.")
|
||||
@@ -117,6 +133,7 @@ def main(argv):
|
||||
functools.partial(worker, cwd=tempdir, check=True), cmdlist
|
||||
)
|
||||
|
||||
ret = 0
|
||||
for tmp_path in MANDIR.glob("*.1.tmp"):
|
||||
path = tmp_path.parent / tmp_path.stem
|
||||
old_data = path.read_text() if path.exists() else ""
|
||||
@@ -133,7 +150,17 @@ def main(argv):
|
||||
)
|
||||
new_data = re.sub(r'^(\.TH REPO "1" ")([^"]+)', r"\1", data, flags=re.M)
|
||||
if old_data != new_data:
|
||||
path.write_text(data)
|
||||
if opts.check:
|
||||
ret = 1
|
||||
print(
|
||||
f"{THIS_FILE.name}: {path.name}: "
|
||||
"man page needs regenerating",
|
||||
file=sys.stderr,
|
||||
)
|
||||
else:
|
||||
path.write_text(data)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def replace_regex(data):
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
|
||||
"""Random utility code for release tools."""
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
import re
|
||||
import shlex
|
||||
import subprocess
|
||||
@@ -24,8 +24,9 @@ import sys
|
||||
assert sys.version_info >= (3, 6), "This module requires Python 3.6+"
|
||||
|
||||
|
||||
TOPDIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
HOMEDIR = os.path.expanduser("~")
|
||||
THIS_FILE = Path(__file__).resolve()
|
||||
TOPDIR = THIS_FILE.parent.parent
|
||||
HOMEDIR = Path("~").expanduser()
|
||||
|
||||
|
||||
# These are the release keys we sign with.
|
||||
@@ -54,7 +55,7 @@ def run(opts, cmd, check=True, **kwargs):
|
||||
def import_release_key(opts):
|
||||
"""Import the public key of the official release repo signing key."""
|
||||
# Extract the key from our repo launcher.
|
||||
launcher = getattr(opts, "launcher", os.path.join(TOPDIR, "repo"))
|
||||
launcher = getattr(opts, "launcher", TOPDIR / "repo")
|
||||
print(f'Importing keys from "{launcher}" launcher script')
|
||||
with open(launcher, encoding="utf-8") as fp:
|
||||
data = fp.read()
|
||||
|
||||
1
repo
1
repo
@@ -1,5 +1,4 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# Copyright (C) 2008 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
||||
52
run_tests
52
run_tests
@@ -1,5 +1,5 @@
|
||||
#!/usr/bin/env python3
|
||||
# Copyright 2019 The Android Open Source Project
|
||||
# Copyright (C) 2019 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -17,6 +17,8 @@
|
||||
|
||||
import functools
|
||||
import os
|
||||
import shlex
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import List
|
||||
@@ -25,6 +27,11 @@ from typing import List
|
||||
ROOT_DIR = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
|
||||
def log_cmd(cmd: str, argv: list[str]) -> None:
|
||||
"""Log a debug message to make history easier to track."""
|
||||
print("+", cmd, shlex.join(argv), file=sys.stderr)
|
||||
|
||||
|
||||
@functools.lru_cache()
|
||||
def is_ci() -> bool:
|
||||
"""Whether we're running in our CI system."""
|
||||
@@ -36,6 +43,7 @@ def run_pytest(argv: List[str]) -> int:
|
||||
if is_ci():
|
||||
argv = ["-m", "not skip_cq"] + argv
|
||||
|
||||
log_cmd("pytest", argv)
|
||||
return subprocess.run(
|
||||
[sys.executable, "-m", "pytest"] + argv,
|
||||
check=False,
|
||||
@@ -48,6 +56,7 @@ def run_pytest_py38(argv: List[str]) -> int:
|
||||
if is_ci():
|
||||
argv = ["-m", "not skip_cq"] + argv
|
||||
|
||||
log_cmd("[vpython 3.8] pytest", argv)
|
||||
try:
|
||||
return subprocess.run(
|
||||
[
|
||||
@@ -76,8 +85,10 @@ def run_black():
|
||||
"release/update-hooks",
|
||||
"release/update-manpages",
|
||||
]
|
||||
argv = ["--diff", "--check", ROOT_DIR] + extra_programs
|
||||
log_cmd("black", argv)
|
||||
return subprocess.run(
|
||||
[sys.executable, "-m", "black", "--check", ROOT_DIR] + extra_programs,
|
||||
[sys.executable, "-m", "black"] + argv,
|
||||
check=False,
|
||||
cwd=ROOT_DIR,
|
||||
).returncode
|
||||
@@ -85,8 +96,10 @@ def run_black():
|
||||
|
||||
def run_flake8():
|
||||
"""Returns the exit code from flake8."""
|
||||
argv = [ROOT_DIR]
|
||||
log_cmd("flake8", argv)
|
||||
return subprocess.run(
|
||||
[sys.executable, "-m", "flake8", ROOT_DIR],
|
||||
[sys.executable, "-m", "flake8"] + argv,
|
||||
check=False,
|
||||
cwd=ROOT_DIR,
|
||||
).returncode
|
||||
@@ -94,8 +107,37 @@ def run_flake8():
|
||||
|
||||
def run_isort():
|
||||
"""Returns the exit code from isort."""
|
||||
argv = ["--check", ROOT_DIR]
|
||||
log_cmd("isort", argv)
|
||||
return subprocess.run(
|
||||
[sys.executable, "-m", "isort", "--check", ROOT_DIR],
|
||||
[sys.executable, "-m", "isort"] + argv,
|
||||
check=False,
|
||||
cwd=ROOT_DIR,
|
||||
).returncode
|
||||
|
||||
|
||||
def run_check_metadata():
|
||||
"""Returns the exit code from check-metadata."""
|
||||
argv = []
|
||||
log_cmd("release/check-metadata.py", argv)
|
||||
return subprocess.run(
|
||||
[sys.executable, "release/check-metadata.py"] + argv,
|
||||
check=False,
|
||||
cwd=ROOT_DIR,
|
||||
).returncode
|
||||
|
||||
|
||||
def run_update_manpages() -> int:
|
||||
"""Returns the exit code from release/update-manpages."""
|
||||
# Allow this to fail on CI, but not local devs.
|
||||
if is_ci() and not shutil.which("help2man"):
|
||||
print("update-manpages: help2man not found; skipping test")
|
||||
return 0
|
||||
|
||||
argv = ["--check"]
|
||||
log_cmd("release/update-manpages", argv)
|
||||
return subprocess.run(
|
||||
[sys.executable, "release/update-manpages"] + argv,
|
||||
check=False,
|
||||
cwd=ROOT_DIR,
|
||||
).returncode
|
||||
@@ -109,6 +151,8 @@ def main(argv):
|
||||
run_black,
|
||||
run_flake8,
|
||||
run_isort,
|
||||
run_check_metadata,
|
||||
run_update_manpages,
|
||||
)
|
||||
# Run all the tests all the time to get full feedback. Don't exit on the
|
||||
# first error as that makes it more difficult to iterate in the CQ.
|
||||
|
||||
4
setup.py
4
setup.py
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python3
|
||||
# Copyright 2019 The Android Open Source Project
|
||||
# Copyright (C) 2019 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the 'License");
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
|
||||
@@ -48,7 +48,6 @@ It is equivalent to "git branch -D <branchname>".
|
||||
def _Options(self, p):
|
||||
p.add_option(
|
||||
"--all",
|
||||
dest="all",
|
||||
action="store_true",
|
||||
help="delete all branches in all projects",
|
||||
)
|
||||
|
||||
@@ -35,7 +35,6 @@ to the Unix 'patch' command.
|
||||
p.add_option(
|
||||
"-u",
|
||||
"--absolute",
|
||||
dest="absolute",
|
||||
action="store_true",
|
||||
help="paths are relative to the repository root",
|
||||
)
|
||||
|
||||
@@ -67,7 +67,9 @@ synced and their revisions won't be found.
|
||||
|
||||
def _Options(self, p):
|
||||
p.add_option(
|
||||
"--raw", dest="raw", action="store_true", help="display raw diff"
|
||||
"--raw",
|
||||
action="store_true",
|
||||
help="display raw diff",
|
||||
)
|
||||
p.add_option(
|
||||
"--no-color",
|
||||
@@ -78,7 +80,6 @@ synced and their revisions won't be found.
|
||||
)
|
||||
p.add_option(
|
||||
"--pretty-format",
|
||||
dest="pretty_format",
|
||||
action="store",
|
||||
metavar="<FORMAT>",
|
||||
help="print the log using a custom git pretty format string",
|
||||
|
||||
@@ -60,7 +60,6 @@ If no project is specified try to use current directory as a project.
|
||||
p.add_option(
|
||||
"-r",
|
||||
"--revert",
|
||||
dest="revert",
|
||||
action="store_true",
|
||||
help="revert instead of checkout",
|
||||
)
|
||||
|
||||
@@ -141,7 +141,6 @@ without iterating through the remaining projects.
|
||||
p.add_option(
|
||||
"-r",
|
||||
"--regex",
|
||||
dest="regex",
|
||||
action="store_true",
|
||||
help="execute the command only on projects matching regex or "
|
||||
"wildcard expression",
|
||||
@@ -149,7 +148,6 @@ without iterating through the remaining projects.
|
||||
p.add_option(
|
||||
"-i",
|
||||
"--inverse-regex",
|
||||
dest="inverse_regex",
|
||||
action="store_true",
|
||||
help="execute the command only on projects not matching regex or "
|
||||
"wildcard expression",
|
||||
@@ -157,22 +155,20 @@ without iterating through the remaining projects.
|
||||
p.add_option(
|
||||
"-g",
|
||||
"--groups",
|
||||
dest="groups",
|
||||
help="execute the command only on projects matching the specified "
|
||||
"groups",
|
||||
)
|
||||
p.add_option(
|
||||
"-c",
|
||||
"--command",
|
||||
help="command (and arguments) to execute",
|
||||
dest="command",
|
||||
help="command (and arguments) to execute",
|
||||
action="callback",
|
||||
callback=self._cmd_option,
|
||||
)
|
||||
p.add_option(
|
||||
"-e",
|
||||
"--abort-on-errors",
|
||||
dest="abort_on_errors",
|
||||
action="store_true",
|
||||
help="abort if a command exits unsuccessfully",
|
||||
)
|
||||
|
||||
@@ -120,7 +120,6 @@ contain a line that matches both expressions:
|
||||
g.add_option(
|
||||
"-r",
|
||||
"--revision",
|
||||
dest="revision",
|
||||
action="append",
|
||||
metavar="TREEish",
|
||||
help="Search TREEish, instead of the work tree",
|
||||
|
||||
@@ -43,14 +43,12 @@ class Info(PagedCommand):
|
||||
p.add_option(
|
||||
"-o",
|
||||
"--overview",
|
||||
dest="overview",
|
||||
action="store_true",
|
||||
help="show overview of all local commits",
|
||||
)
|
||||
p.add_option(
|
||||
"-c",
|
||||
"--current-branch",
|
||||
dest="current_branch",
|
||||
action="store_true",
|
||||
help="consider only checked out branches",
|
||||
)
|
||||
@@ -90,7 +88,7 @@ class Info(PagedCommand):
|
||||
self.manifest = self.manifest.outer_client
|
||||
manifestConfig = self.manifest.manifestProject.config
|
||||
mergeBranch = manifestConfig.GetBranch("default").merge
|
||||
manifestGroups = self.manifest.GetGroupsStr()
|
||||
manifestGroups = self.manifest.GetManifestGroupsStr()
|
||||
|
||||
self.heading("Manifest branch: ")
|
||||
if self.manifest.default.revisionExpr:
|
||||
@@ -104,6 +102,11 @@ class Info(PagedCommand):
|
||||
self.heading("Manifest groups: ")
|
||||
self.headtext(manifestGroups)
|
||||
self.out.nl()
|
||||
sp = self.manifest.superproject
|
||||
srev = sp.commit_id if sp and sp.commit_id else "None"
|
||||
self.heading("Superproject revision: ")
|
||||
self.headtext(srev)
|
||||
self.out.nl()
|
||||
|
||||
self.printSeparator()
|
||||
|
||||
|
||||
@@ -127,6 +127,7 @@ to update the working directory files.
|
||||
return {
|
||||
"REPO_MANIFEST_URL": "manifest_url",
|
||||
"REPO_MIRROR_LOCATION": "reference",
|
||||
"REPO_GIT_LFS": "git_lfs",
|
||||
}
|
||||
|
||||
def _SyncManifest(self, opt):
|
||||
|
||||
@@ -40,7 +40,6 @@ This is similar to running: repo forall -c 'echo "$REPO_PATH : $REPO_PROJECT"'.
|
||||
p.add_option(
|
||||
"-r",
|
||||
"--regex",
|
||||
dest="regex",
|
||||
action="store_true",
|
||||
help="filter the project list based on regex or wildcard matching "
|
||||
"of strings",
|
||||
@@ -48,7 +47,6 @@ This is similar to running: repo forall -c 'echo "$REPO_PATH : $REPO_PROJECT"'.
|
||||
p.add_option(
|
||||
"-g",
|
||||
"--groups",
|
||||
dest="groups",
|
||||
help="filter the project list based on the groups the project is "
|
||||
"in",
|
||||
)
|
||||
@@ -61,21 +59,18 @@ This is similar to running: repo forall -c 'echo "$REPO_PATH : $REPO_PROJECT"'.
|
||||
p.add_option(
|
||||
"-n",
|
||||
"--name-only",
|
||||
dest="name_only",
|
||||
action="store_true",
|
||||
help="display only the name of the repository",
|
||||
)
|
||||
p.add_option(
|
||||
"-p",
|
||||
"--path-only",
|
||||
dest="path_only",
|
||||
action="store_true",
|
||||
help="display only the path of the repository",
|
||||
)
|
||||
p.add_option(
|
||||
"-f",
|
||||
"--fullpath",
|
||||
dest="fullpath",
|
||||
action="store_true",
|
||||
help="display the full work tree path instead of the relative path",
|
||||
)
|
||||
|
||||
@@ -12,7 +12,9 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import enum
|
||||
import json
|
||||
import optparse
|
||||
import os
|
||||
import sys
|
||||
|
||||
@@ -23,6 +25,16 @@ from repo_logging import RepoLogger
|
||||
logger = RepoLogger(__file__)
|
||||
|
||||
|
||||
class OutputFormat(enum.Enum):
|
||||
"""Type for the requested output format."""
|
||||
|
||||
# Canonicalized manifest in XML format.
|
||||
XML = enum.auto()
|
||||
|
||||
# Canonicalized manifest in JSON format.
|
||||
JSON = enum.auto()
|
||||
|
||||
|
||||
class Manifest(PagedCommand):
|
||||
COMMON = False
|
||||
helpSummary = "Manifest inspection utility"
|
||||
@@ -42,6 +54,10 @@ revisions set to the current commit hash. These are known as
|
||||
In this case, the 'upstream' attribute is set to the ref we were on
|
||||
when the manifest was generated. The 'dest-branch' attribute is set
|
||||
to indicate the remote ref to push changes to via 'repo upload'.
|
||||
|
||||
Multiple output formats are supported via --format. The default output
|
||||
is XML, and formats are generally "condensed". Use --pretty for more
|
||||
human-readable variations.
|
||||
"""
|
||||
|
||||
@property
|
||||
@@ -86,11 +102,21 @@ to indicate the remote ref to push changes to via 'repo upload'.
|
||||
"(only of use if the branch names for a sha1 manifest are "
|
||||
"sensitive)",
|
||||
)
|
||||
# Replaced with --format=json. Kept for backwards compatibility.
|
||||
# Can delete in Jun 2026 or later.
|
||||
p.add_option(
|
||||
"--json",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="output manifest in JSON format (experimental)",
|
||||
action="store_const",
|
||||
dest="format",
|
||||
const=OutputFormat.JSON.name.lower(),
|
||||
help=optparse.SUPPRESS_HELP,
|
||||
)
|
||||
formats = tuple(x.lower() for x in OutputFormat.__members__.keys())
|
||||
p.add_option(
|
||||
"--format",
|
||||
default=OutputFormat.XML.name.lower(),
|
||||
choices=formats,
|
||||
help=f"output format: {', '.join(formats)} (default: %default)",
|
||||
)
|
||||
p.add_option(
|
||||
"--pretty",
|
||||
@@ -108,7 +134,6 @@ to indicate the remote ref to push changes to via 'repo upload'.
|
||||
p.add_option(
|
||||
"-o",
|
||||
"--output-file",
|
||||
dest="output_file",
|
||||
default="-",
|
||||
help="file to save the manifest to. (Filename prefix for "
|
||||
"multi-tree.)",
|
||||
@@ -121,6 +146,8 @@ to indicate the remote ref to push changes to via 'repo upload'.
|
||||
if opt.manifest_name:
|
||||
self.manifest.Override(opt.manifest_name, False)
|
||||
|
||||
output_format = OutputFormat[opt.format.upper()]
|
||||
|
||||
for manifest in self.ManifestList(opt):
|
||||
output_file = opt.output_file
|
||||
if output_file == "-":
|
||||
@@ -135,8 +162,7 @@ to indicate the remote ref to push changes to via 'repo upload'.
|
||||
|
||||
manifest.SetUseLocalManifests(not opt.ignore_local_manifests)
|
||||
|
||||
if opt.json:
|
||||
logger.warning("warning: --json is experimental!")
|
||||
if output_format == OutputFormat.JSON:
|
||||
doc = manifest.ToDict(
|
||||
peg_rev=opt.peg_rev,
|
||||
peg_rev_upstream=opt.peg_rev_upstream,
|
||||
@@ -152,7 +178,7 @@ to indicate the remote ref to push changes to via 'repo upload'.
|
||||
"separators": (",", ": ") if opt.pretty else (",", ":"),
|
||||
"sort_keys": True,
|
||||
}
|
||||
fd.write(json.dumps(doc, **json_settings))
|
||||
fd.write(json.dumps(doc, **json_settings) + "\n")
|
||||
else:
|
||||
manifest.Save(
|
||||
fd,
|
||||
|
||||
@@ -37,7 +37,6 @@ are displayed.
|
||||
p.add_option(
|
||||
"-c",
|
||||
"--current-branch",
|
||||
dest="current_branch",
|
||||
action="store_true",
|
||||
help="consider only checked out branches",
|
||||
)
|
||||
|
||||
@@ -47,21 +47,18 @@ branch but need to incorporate new upstream changes "underneath" them.
|
||||
g.add_option(
|
||||
"-i",
|
||||
"--interactive",
|
||||
dest="interactive",
|
||||
action="store_true",
|
||||
help="interactive rebase (single project only)",
|
||||
)
|
||||
|
||||
p.add_option(
|
||||
"--fail-fast",
|
||||
dest="fail_fast",
|
||||
action="store_true",
|
||||
help="stop rebasing after first error is hit",
|
||||
)
|
||||
p.add_option(
|
||||
"-f",
|
||||
"--force-rebase",
|
||||
dest="force_rebase",
|
||||
action="store_true",
|
||||
help="pass --force-rebase to git rebase",
|
||||
)
|
||||
@@ -74,27 +71,23 @@ branch but need to incorporate new upstream changes "underneath" them.
|
||||
)
|
||||
p.add_option(
|
||||
"--autosquash",
|
||||
dest="autosquash",
|
||||
action="store_true",
|
||||
help="pass --autosquash to git rebase",
|
||||
)
|
||||
p.add_option(
|
||||
"--whitespace",
|
||||
dest="whitespace",
|
||||
action="store",
|
||||
metavar="WS",
|
||||
help="pass --whitespace to git rebase",
|
||||
)
|
||||
p.add_option(
|
||||
"--auto-stash",
|
||||
dest="auto_stash",
|
||||
action="store_true",
|
||||
help="stash local modifications before starting",
|
||||
)
|
||||
p.add_option(
|
||||
"-m",
|
||||
"--onto-manifest",
|
||||
dest="onto_manifest",
|
||||
action="store_true",
|
||||
help="rebase onto the manifest version instead of upstream "
|
||||
"HEAD (this helps to make sure the local tree stays "
|
||||
|
||||
@@ -54,7 +54,6 @@ need to be performed by an end-user.
|
||||
)
|
||||
g.add_option(
|
||||
"--repo-upgraded",
|
||||
dest="repo_upgraded",
|
||||
action="store_true",
|
||||
help=optparse.SUPPRESS_HELP,
|
||||
)
|
||||
|
||||
@@ -46,7 +46,6 @@ The '%prog' command stages files to prepare the next commit.
|
||||
g.add_option(
|
||||
"-i",
|
||||
"--interactive",
|
||||
dest="interactive",
|
||||
action="store_true",
|
||||
help="use interactive staging",
|
||||
)
|
||||
|
||||
@@ -51,7 +51,6 @@ revision specified in the manifest.
|
||||
def _Options(self, p):
|
||||
p.add_option(
|
||||
"--all",
|
||||
dest="all",
|
||||
action="store_true",
|
||||
help="begin branch in all projects",
|
||||
)
|
||||
|
||||
@@ -82,7 +82,6 @@ the following meanings:
|
||||
p.add_option(
|
||||
"-o",
|
||||
"--orphans",
|
||||
dest="orphans",
|
||||
action="store_true",
|
||||
help="include objects in working directory outside of repo "
|
||||
"projects",
|
||||
|
||||
1110
subcmds/sync.py
1110
subcmds/sync.py
File diff suppressed because it is too large
Load Diff
@@ -267,7 +267,6 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
||||
"--cc",
|
||||
type="string",
|
||||
action="append",
|
||||
dest="cc",
|
||||
help="also send email to these email addresses",
|
||||
)
|
||||
p.add_option(
|
||||
@@ -281,7 +280,6 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
||||
p.add_option(
|
||||
"-c",
|
||||
"--current-branch",
|
||||
dest="current_branch",
|
||||
action="store_true",
|
||||
help="upload current git branch",
|
||||
)
|
||||
@@ -310,7 +308,6 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
||||
"-p",
|
||||
"--private",
|
||||
action="store_true",
|
||||
dest="private",
|
||||
default=False,
|
||||
help="upload as a private change (deprecated; use --wip)",
|
||||
)
|
||||
@@ -318,7 +315,6 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
||||
"-w",
|
||||
"--wip",
|
||||
action="store_true",
|
||||
dest="wip",
|
||||
default=False,
|
||||
help="upload as a work-in-progress change",
|
||||
)
|
||||
@@ -628,6 +624,16 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
||||
branch.uploaded = False
|
||||
return
|
||||
|
||||
# If using superproject, add the root repo as a push option.
|
||||
manifest = branch.project.manifest
|
||||
push_options = list(opt.push_options)
|
||||
if manifest.manifestProject.use_superproject:
|
||||
sp = manifest.superproject
|
||||
if sp:
|
||||
r_id = sp.repo_id
|
||||
if r_id:
|
||||
push_options.append(f"custom-keyed-value=rootRepo:{r_id}")
|
||||
|
||||
branch.UploadForReview(
|
||||
people,
|
||||
dryrun=opt.dryrun,
|
||||
@@ -640,7 +646,7 @@ Gerrit Code Review: https://www.gerritcodereview.com/
|
||||
ready=opt.ready,
|
||||
dest_branch=destination,
|
||||
validate_certs=opt.validate_certs,
|
||||
push_options=opt.push_options,
|
||||
push_options=push_options,
|
||||
patchset_description=opt.patchset_description,
|
||||
)
|
||||
|
||||
|
||||
184
subcmds/wipe.py
Normal file
184
subcmds/wipe.py
Normal file
@@ -0,0 +1,184 @@
|
||||
# Copyright (C) 2025 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import sys
|
||||
from typing import List
|
||||
|
||||
from command import Command
|
||||
from error import GitError
|
||||
from error import RepoExitError
|
||||
import platform_utils
|
||||
from project import DeleteWorktreeError
|
||||
|
||||
|
||||
class Error(RepoExitError):
|
||||
"""Exit error when wipe command fails."""
|
||||
|
||||
|
||||
class Wipe(Command):
|
||||
"""Delete projects from the worktree and .repo"""
|
||||
|
||||
COMMON = True
|
||||
helpSummary = "Wipe projects from the worktree"
|
||||
helpUsage = """
|
||||
%prog <project>...
|
||||
"""
|
||||
helpDescription = """
|
||||
The '%prog' command removes the specified projects from the worktree
|
||||
(the checked out source code) and deletes the project's git data from `.repo`.
|
||||
|
||||
This is a destructive operation and cannot be undone.
|
||||
|
||||
Projects can be specified either by name, or by a relative or absolute path
|
||||
to the project's local directory.
|
||||
|
||||
Examples:
|
||||
|
||||
# Wipe the project "platform/build" by name:
|
||||
$ repo wipe platform/build
|
||||
|
||||
# Wipe the project at the path "build/make":
|
||||
$ repo wipe build/make
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
# TODO(crbug.com/gerrit/393383056): Add --broken option to scan and
|
||||
# wipe broken projects.
|
||||
p.add_option(
|
||||
"-f",
|
||||
"--force",
|
||||
action="store_true",
|
||||
help="force wipe shared projects and uncommitted changes",
|
||||
)
|
||||
p.add_option(
|
||||
"--force-uncommitted",
|
||||
action="store_true",
|
||||
help="force wipe even if there are uncommitted changes",
|
||||
)
|
||||
p.add_option(
|
||||
"--force-shared",
|
||||
action="store_true",
|
||||
help="force wipe even if the project shares an object directory",
|
||||
)
|
||||
|
||||
def ValidateOptions(self, opt, args: List[str]):
|
||||
if not args:
|
||||
self.Usage()
|
||||
|
||||
def Execute(self, opt, args: List[str]):
|
||||
# Get all projects to handle shared object directories.
|
||||
all_projects = self.GetProjects(None, all_manifests=True, groups="all")
|
||||
projects_to_wipe = self.GetProjects(args, all_manifests=True)
|
||||
relpaths_to_wipe = {p.relpath for p in projects_to_wipe}
|
||||
|
||||
# Build a map from objdir to the relpaths of projects that use it.
|
||||
objdir_map = {}
|
||||
for p in all_projects:
|
||||
objdir_map.setdefault(p.objdir, set()).add(p.relpath)
|
||||
|
||||
uncommitted_projects = []
|
||||
shared_objdirs = {}
|
||||
objdirs_to_delete = set()
|
||||
|
||||
for project in projects_to_wipe:
|
||||
if project == self.manifest.manifestProject:
|
||||
raise Error(
|
||||
f"error: cannot wipe the manifest project: {project.name}"
|
||||
)
|
||||
|
||||
try:
|
||||
if project.HasChanges():
|
||||
uncommitted_projects.append(project.name)
|
||||
except GitError:
|
||||
uncommitted_projects.append(f"{project.name} (corrupted)")
|
||||
|
||||
users = objdir_map.get(project.objdir, {project.relpath})
|
||||
is_shared = not users.issubset(relpaths_to_wipe)
|
||||
if is_shared:
|
||||
shared_objdirs.setdefault(project.objdir, set()).update(users)
|
||||
else:
|
||||
objdirs_to_delete.add(project.objdir)
|
||||
|
||||
block_uncommitted = uncommitted_projects and not (
|
||||
opt.force or opt.force_uncommitted
|
||||
)
|
||||
block_shared = shared_objdirs and not (opt.force or opt.force_shared)
|
||||
|
||||
if block_uncommitted or block_shared:
|
||||
error_messages = []
|
||||
if block_uncommitted:
|
||||
error_messages.append(
|
||||
"The following projects have uncommitted changes or are "
|
||||
"corrupted:\n"
|
||||
+ "\n".join(f" - {p}" for p in sorted(uncommitted_projects))
|
||||
)
|
||||
if block_shared:
|
||||
shared_dir_messages = []
|
||||
for objdir, users in sorted(shared_objdirs.items()):
|
||||
other_users = users - relpaths_to_wipe
|
||||
projects_to_wipe_in_dir = users & relpaths_to_wipe
|
||||
message = f"""Object directory {objdir} is shared by:
|
||||
Projects to be wiped: {', '.join(sorted(projects_to_wipe_in_dir))}
|
||||
Projects not to be wiped: {', '.join(sorted(other_users))}"""
|
||||
shared_dir_messages.append(message)
|
||||
error_messages.append(
|
||||
"The following projects have shared object directories:\n"
|
||||
+ "\n".join(sorted(shared_dir_messages))
|
||||
)
|
||||
|
||||
if block_uncommitted and block_shared:
|
||||
error_messages.append(
|
||||
"Use --force to wipe anyway, or --force-uncommitted and "
|
||||
"--force-shared to specify."
|
||||
)
|
||||
elif block_uncommitted:
|
||||
error_messages.append("Use --force-uncommitted to wipe anyway.")
|
||||
else:
|
||||
error_messages.append("Use --force-shared to wipe anyway.")
|
||||
|
||||
raise Error("\n\n".join(error_messages))
|
||||
|
||||
# If we are here, either there were no issues, or --force was used.
|
||||
# Proceed with wiping.
|
||||
successful_wipes = set()
|
||||
|
||||
for project in projects_to_wipe:
|
||||
try:
|
||||
# Force the delete here since we've already performed our
|
||||
# own safety checks above.
|
||||
project.DeleteWorktree(force=True, verbose=opt.verbose)
|
||||
successful_wipes.add(project.relpath)
|
||||
except DeleteWorktreeError as e:
|
||||
print(
|
||||
f"error: failed to wipe {project.name}: {e}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
|
||||
# Clean up object directories only if all projects using them were
|
||||
# successfully wiped.
|
||||
for objdir in objdirs_to_delete:
|
||||
users = objdir_map.get(objdir, set())
|
||||
# Check if every project that uses this objdir has been
|
||||
# successfully processed. If a project failed to be wiped, don't
|
||||
# delete the object directory, or we'll corrupt the remaining
|
||||
# project.
|
||||
if users.issubset(successful_wipes):
|
||||
if os.path.exists(objdir):
|
||||
if opt.verbose:
|
||||
print(
|
||||
f"Deleting objects directory: {objdir}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
platform_utils.rmtree(objdir)
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2022 The Android Open Source Project
|
||||
# Copyright (C) 2022 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2021 The Android Open Source Project
|
||||
# Copyright (C) 2021 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2019 The Android Open Source Project
|
||||
# Copyright (C) 2019 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
"""Unittests for the manifest_xml.py module."""
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
import platform
|
||||
import re
|
||||
import tempfile
|
||||
@@ -97,36 +98,34 @@ class ManifestParseTestCase(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.tempdirobj = tempfile.TemporaryDirectory(prefix="repo_tests")
|
||||
self.tempdir = self.tempdirobj.name
|
||||
self.repodir = os.path.join(self.tempdir, ".repo")
|
||||
self.manifest_dir = os.path.join(self.repodir, "manifests")
|
||||
self.manifest_file = os.path.join(
|
||||
self.repodir, manifest_xml.MANIFEST_FILE_NAME
|
||||
self.tempdir = Path(self.tempdirobj.name)
|
||||
self.repodir = self.tempdir / ".repo"
|
||||
self.manifest_dir = self.repodir / "manifests"
|
||||
self.manifest_file = self.repodir / manifest_xml.MANIFEST_FILE_NAME
|
||||
self.local_manifest_dir = (
|
||||
self.repodir / manifest_xml.LOCAL_MANIFESTS_DIR_NAME
|
||||
)
|
||||
self.local_manifest_dir = os.path.join(
|
||||
self.repodir, manifest_xml.LOCAL_MANIFESTS_DIR_NAME
|
||||
)
|
||||
os.mkdir(self.repodir)
|
||||
os.mkdir(self.manifest_dir)
|
||||
self.repodir.mkdir()
|
||||
self.manifest_dir.mkdir()
|
||||
|
||||
# The manifest parsing really wants a git repo currently.
|
||||
gitdir = os.path.join(self.repodir, "manifests.git")
|
||||
os.mkdir(gitdir)
|
||||
with open(os.path.join(gitdir, "config"), "w") as fp:
|
||||
fp.write(
|
||||
"""[remote "origin"]
|
||||
gitdir = self.repodir / "manifests.git"
|
||||
gitdir.mkdir()
|
||||
(gitdir / "config").write_text(
|
||||
"""[remote "origin"]
|
||||
url = https://localhost:0/manifest
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
def tearDown(self):
|
||||
self.tempdirobj.cleanup()
|
||||
|
||||
def getXmlManifest(self, data):
|
||||
"""Helper to initialize a manifest for testing."""
|
||||
with open(self.manifest_file, "w", encoding="utf-8") as fp:
|
||||
fp.write(data)
|
||||
return manifest_xml.XmlManifest(self.repodir, self.manifest_file)
|
||||
self.manifest_file.write_text(data, encoding="utf-8")
|
||||
return manifest_xml.XmlManifest(
|
||||
str(self.repodir), str(self.manifest_file)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def encodeXmlAttr(attr):
|
||||
@@ -243,12 +242,14 @@ class XmlManifestTests(ManifestParseTestCase):
|
||||
|
||||
def test_link(self):
|
||||
"""Verify Link handling with new names."""
|
||||
manifest = manifest_xml.XmlManifest(self.repodir, self.manifest_file)
|
||||
with open(os.path.join(self.manifest_dir, "foo.xml"), "w") as fp:
|
||||
fp.write("<manifest></manifest>")
|
||||
manifest = manifest_xml.XmlManifest(
|
||||
str(self.repodir), str(self.manifest_file)
|
||||
)
|
||||
(self.manifest_dir / "foo.xml").write_text("<manifest></manifest>")
|
||||
manifest.Link("foo.xml")
|
||||
with open(self.manifest_file) as fp:
|
||||
self.assertIn('<include name="foo.xml" />', fp.read())
|
||||
self.assertIn(
|
||||
'<include name="foo.xml" />', self.manifest_file.read_text()
|
||||
)
|
||||
|
||||
def test_toxml_empty(self):
|
||||
"""Verify the ToXml() helper."""
|
||||
@@ -406,10 +407,9 @@ class IncludeElementTests(ManifestParseTestCase):
|
||||
|
||||
def test_revision_default(self):
|
||||
"""Check handling of revision attribute."""
|
||||
root_m = os.path.join(self.manifest_dir, "root.xml")
|
||||
with open(root_m, "w") as fp:
|
||||
fp.write(
|
||||
"""
|
||||
root_m = self.manifest_dir / "root.xml"
|
||||
root_m.write_text(
|
||||
"""
|
||||
<manifest>
|
||||
<remote name="test-remote" fetch="http://localhost" />
|
||||
<default remote="test-remote" revision="refs/heads/main" />
|
||||
@@ -418,17 +418,34 @@ class IncludeElementTests(ManifestParseTestCase):
|
||||
<project name="root-name2" path="root-path2" />
|
||||
</manifest>
|
||||
"""
|
||||
)
|
||||
with open(os.path.join(self.manifest_dir, "stable.xml"), "w") as fp:
|
||||
fp.write(
|
||||
"""
|
||||
)
|
||||
(self.manifest_dir / "stable.xml").write_text(
|
||||
"""
|
||||
<manifest>
|
||||
<include name="man1.xml" />
|
||||
<include name="man2.xml" revision="stable-branch2" />
|
||||
<project name="stable-name1" path="stable-path1" />
|
||||
<project name="stable-name2" path="stable-path2" revision="stable-branch2" />
|
||||
</manifest>
|
||||
"""
|
||||
)
|
||||
include_m = manifest_xml.XmlManifest(self.repodir, root_m)
|
||||
)
|
||||
(self.manifest_dir / "man1.xml").write_text(
|
||||
"""
|
||||
<manifest>
|
||||
<project name="man1-name1" />
|
||||
<project name="man1-name2" revision="stable-branch3" />
|
||||
</manifest>
|
||||
"""
|
||||
)
|
||||
(self.manifest_dir / "man2.xml").write_text(
|
||||
"""
|
||||
<manifest>
|
||||
<project name="man2-name1" />
|
||||
<project name="man2-name2" revision="stable-branch3" />
|
||||
</manifest>
|
||||
"""
|
||||
)
|
||||
include_m = manifest_xml.XmlManifest(str(self.repodir), str(root_m))
|
||||
for proj in include_m.projects:
|
||||
if proj.name == "root-name1":
|
||||
# Check include revision not set on root level proj.
|
||||
@@ -442,12 +459,19 @@ class IncludeElementTests(ManifestParseTestCase):
|
||||
if proj.name == "stable-name2":
|
||||
# Check stable proj revision can override include node.
|
||||
self.assertEqual("stable-branch2", proj.revisionExpr)
|
||||
if proj.name == "man1-name1":
|
||||
self.assertEqual("stable-branch", proj.revisionExpr)
|
||||
if proj.name == "man1-name2":
|
||||
self.assertEqual("stable-branch3", proj.revisionExpr)
|
||||
if proj.name == "man2-name1":
|
||||
self.assertEqual("stable-branch2", proj.revisionExpr)
|
||||
if proj.name == "man2-name2":
|
||||
self.assertEqual("stable-branch3", proj.revisionExpr)
|
||||
|
||||
def test_group_levels(self):
|
||||
root_m = os.path.join(self.manifest_dir, "root.xml")
|
||||
with open(root_m, "w") as fp:
|
||||
fp.write(
|
||||
"""
|
||||
root_m = self.manifest_dir / "root.xml"
|
||||
root_m.write_text(
|
||||
"""
|
||||
<manifest>
|
||||
<remote name="test-remote" fetch="http://localhost" />
|
||||
<default remote="test-remote" revision="refs/heads/main" />
|
||||
@@ -456,25 +480,23 @@ class IncludeElementTests(ManifestParseTestCase):
|
||||
<project name="root-name2" path="root-path2" groups="r2g1,r2g2" />
|
||||
</manifest>
|
||||
"""
|
||||
)
|
||||
with open(os.path.join(self.manifest_dir, "level1.xml"), "w") as fp:
|
||||
fp.write(
|
||||
"""
|
||||
)
|
||||
(self.manifest_dir / "level1.xml").write_text(
|
||||
"""
|
||||
<manifest>
|
||||
<include name="level2.xml" groups="level2-group" />
|
||||
<project name="level1-name1" path="level1-path1" />
|
||||
</manifest>
|
||||
"""
|
||||
)
|
||||
with open(os.path.join(self.manifest_dir, "level2.xml"), "w") as fp:
|
||||
fp.write(
|
||||
"""
|
||||
)
|
||||
(self.manifest_dir / "level2.xml").write_text(
|
||||
"""
|
||||
<manifest>
|
||||
<project name="level2-name1" path="level2-path1" groups="l2g1,l2g2" />
|
||||
</manifest>
|
||||
"""
|
||||
)
|
||||
include_m = manifest_xml.XmlManifest(self.repodir, root_m)
|
||||
)
|
||||
include_m = manifest_xml.XmlManifest(str(self.repodir), str(root_m))
|
||||
for proj in include_m.projects:
|
||||
if proj.name == "root-name1":
|
||||
# Check include group not set on root level proj.
|
||||
@@ -492,6 +514,41 @@ class IncludeElementTests(ManifestParseTestCase):
|
||||
# Check level2 proj group not removed.
|
||||
self.assertIn("l2g1", proj.groups)
|
||||
|
||||
def test_group_levels_with_extend_project(self):
|
||||
root_m = self.manifest_dir / "root.xml"
|
||||
root_m.write_text(
|
||||
"""
|
||||
<manifest>
|
||||
<remote name="test-remote" fetch="http://localhost" />
|
||||
<default remote="test-remote" revision="refs/heads/main" />
|
||||
<include name="man1.xml" groups="top-group1" />
|
||||
<include name="man2.xml" groups="top-group2" />
|
||||
</manifest>
|
||||
"""
|
||||
)
|
||||
(self.manifest_dir / "man1.xml").write_text(
|
||||
"""
|
||||
<manifest>
|
||||
<project name="project1" path="project1" />
|
||||
</manifest>
|
||||
"""
|
||||
)
|
||||
(self.manifest_dir / "man2.xml").write_text(
|
||||
"""
|
||||
<manifest>
|
||||
<extend-project name="project1" groups="eg1" />
|
||||
</manifest>
|
||||
"""
|
||||
)
|
||||
include_m = manifest_xml.XmlManifest(str(self.repodir), str(root_m))
|
||||
proj = include_m.projects[0]
|
||||
# Check project has inherited group via project element.
|
||||
self.assertIn("top-group1", proj.groups)
|
||||
# Check project has inherited group via extend-project element.
|
||||
self.assertIn("top-group2", proj.groups)
|
||||
# Check project has set group via extend-project element.
|
||||
self.assertIn("eg1", proj.groups)
|
||||
|
||||
def test_allow_bad_name_from_user(self):
|
||||
"""Check handling of bad name attribute from the user's input."""
|
||||
|
||||
@@ -510,9 +567,8 @@ class IncludeElementTests(ManifestParseTestCase):
|
||||
manifest.ToXml()
|
||||
|
||||
# Setup target of the include.
|
||||
target = os.path.join(self.tempdir, "target.xml")
|
||||
with open(target, "w") as fp:
|
||||
fp.write("<manifest></manifest>")
|
||||
target = self.tempdir / "target.xml"
|
||||
target.write_text("<manifest></manifest>")
|
||||
|
||||
# Include with absolute path.
|
||||
parse(os.path.abspath(target))
|
||||
@@ -526,12 +582,9 @@ class IncludeElementTests(ManifestParseTestCase):
|
||||
def parse(name):
|
||||
name = self.encodeXmlAttr(name)
|
||||
# Setup target of the include.
|
||||
with open(
|
||||
os.path.join(self.manifest_dir, "target.xml"),
|
||||
"w",
|
||||
encoding="utf-8",
|
||||
) as fp:
|
||||
fp.write(f'<manifest><include name="{name}"/></manifest>')
|
||||
(self.manifest_dir / "target.xml").write_text(
|
||||
f'<manifest><include name="{name}"/></manifest>'
|
||||
)
|
||||
|
||||
manifest = self.getXmlManifest(
|
||||
"""
|
||||
@@ -578,18 +631,18 @@ class ProjectElementTests(ManifestParseTestCase):
|
||||
manifest.projects[0].name: manifest.projects[0].groups,
|
||||
manifest.projects[1].name: manifest.projects[1].groups,
|
||||
}
|
||||
self.assertCountEqual(
|
||||
result["test-name"], ["name:test-name", "all", "path:test-path"]
|
||||
self.assertEqual(
|
||||
result["test-name"], {"name:test-name", "all", "path:test-path"}
|
||||
)
|
||||
self.assertCountEqual(
|
||||
self.assertEqual(
|
||||
result["extras"],
|
||||
["g1", "g2", "g1", "name:extras", "all", "path:path"],
|
||||
{"g1", "g2", "name:extras", "all", "path:path"},
|
||||
)
|
||||
groupstr = "default,platform-" + platform.system().lower()
|
||||
self.assertEqual(groupstr, manifest.GetGroupsStr())
|
||||
self.assertEqual(groupstr, manifest.GetManifestGroupsStr())
|
||||
groupstr = "g1,g2,g1"
|
||||
manifest.manifestProject.config.SetString("manifest.groups", groupstr)
|
||||
self.assertEqual(groupstr, manifest.GetGroupsStr())
|
||||
self.assertEqual(groupstr, manifest.GetManifestGroupsStr())
|
||||
|
||||
def test_set_revision_id(self):
|
||||
"""Check setting of project's revisionId."""
|
||||
@@ -1214,6 +1267,166 @@ class ExtendProjectElementTests(ManifestParseTestCase):
|
||||
self.assertEqual(len(manifest.projects), 1)
|
||||
self.assertEqual(manifest.projects[0].upstream, "bar")
|
||||
|
||||
def test_extend_project_copyfiles(self):
|
||||
manifest = self.getXmlManifest(
|
||||
"""
|
||||
<manifest>
|
||||
<remote name="default-remote" fetch="http://localhost" />
|
||||
<default remote="default-remote" revision="refs/heads/main" />
|
||||
<project name="myproject" />
|
||||
<extend-project name="myproject">
|
||||
<copyfile src="foo" dest="bar" />
|
||||
</extend-project>
|
||||
</manifest>
|
||||
"""
|
||||
)
|
||||
self.assertEqual(list(manifest.projects[0].copyfiles)[0].src, "foo")
|
||||
self.assertEqual(list(manifest.projects[0].copyfiles)[0].dest, "bar")
|
||||
self.assertEqual(
|
||||
sort_attributes(manifest.ToXml().toxml()),
|
||||
'<?xml version="1.0" ?><manifest>'
|
||||
'<remote fetch="http://localhost" name="default-remote"/>'
|
||||
'<default remote="default-remote" revision="refs/heads/main"/>'
|
||||
'<project name="myproject">'
|
||||
'<copyfile dest="bar" src="foo"/>'
|
||||
"</project>"
|
||||
"</manifest>",
|
||||
)
|
||||
|
||||
def test_extend_project_duplicate_copyfiles(self):
|
||||
root_m = self.manifest_dir / "root.xml"
|
||||
root_m.write_text(
|
||||
"""
|
||||
<manifest>
|
||||
<remote name="test-remote" fetch="http://localhost" />
|
||||
<default remote="test-remote" revision="refs/heads/main" />
|
||||
<project name="myproject" />
|
||||
<include name="man1.xml" />
|
||||
<include name="man2.xml" />
|
||||
</manifest>
|
||||
"""
|
||||
)
|
||||
(self.manifest_dir / "man1.xml").write_text(
|
||||
"""
|
||||
<manifest>
|
||||
<include name="common.xml" />
|
||||
</manifest>
|
||||
"""
|
||||
)
|
||||
(self.manifest_dir / "man2.xml").write_text(
|
||||
"""
|
||||
<manifest>
|
||||
<include name="common.xml" />
|
||||
</manifest>
|
||||
"""
|
||||
)
|
||||
(self.manifest_dir / "common.xml").write_text(
|
||||
"""
|
||||
<manifest>
|
||||
<extend-project name="myproject">
|
||||
<copyfile dest="bar" src="foo"/>
|
||||
</extend-project>
|
||||
</manifest>
|
||||
"""
|
||||
)
|
||||
manifest = manifest_xml.XmlManifest(str(self.repodir), str(root_m))
|
||||
self.assertEqual(len(manifest.projects[0].copyfiles), 1)
|
||||
self.assertEqual(list(manifest.projects[0].copyfiles)[0].src, "foo")
|
||||
self.assertEqual(list(manifest.projects[0].copyfiles)[0].dest, "bar")
|
||||
|
||||
def test_extend_project_linkfiles(self):
|
||||
manifest = self.getXmlManifest(
|
||||
"""
|
||||
<manifest>
|
||||
<remote name="default-remote" fetch="http://localhost" />
|
||||
<default remote="default-remote" revision="refs/heads/main" />
|
||||
<project name="myproject" />
|
||||
<extend-project name="myproject">
|
||||
<linkfile src="foo" dest="bar" />
|
||||
</extend-project>
|
||||
</manifest>
|
||||
"""
|
||||
)
|
||||
self.assertEqual(list(manifest.projects[0].linkfiles)[0].src, "foo")
|
||||
self.assertEqual(list(manifest.projects[0].linkfiles)[0].dest, "bar")
|
||||
self.assertEqual(
|
||||
sort_attributes(manifest.ToXml().toxml()),
|
||||
'<?xml version="1.0" ?><manifest>'
|
||||
'<remote fetch="http://localhost" name="default-remote"/>'
|
||||
'<default remote="default-remote" revision="refs/heads/main"/>'
|
||||
'<project name="myproject">'
|
||||
'<linkfile dest="bar" src="foo"/>'
|
||||
"</project>"
|
||||
"</manifest>",
|
||||
)
|
||||
|
||||
def test_extend_project_duplicate_linkfiles(self):
|
||||
root_m = self.manifest_dir / "root.xml"
|
||||
root_m.write_text(
|
||||
"""
|
||||
<manifest>
|
||||
<remote name="test-remote" fetch="http://localhost" />
|
||||
<default remote="test-remote" revision="refs/heads/main" />
|
||||
<project name="myproject" />
|
||||
<include name="man1.xml" />
|
||||
<include name="man2.xml" />
|
||||
</manifest>
|
||||
"""
|
||||
)
|
||||
(self.manifest_dir / "man1.xml").write_text(
|
||||
"""
|
||||
<manifest>
|
||||
<include name="common.xml" />
|
||||
</manifest>
|
||||
"""
|
||||
)
|
||||
(self.manifest_dir / "man2.xml").write_text(
|
||||
"""
|
||||
<manifest>
|
||||
<include name="common.xml" />
|
||||
</manifest>
|
||||
"""
|
||||
)
|
||||
(self.manifest_dir / "common.xml").write_text(
|
||||
"""
|
||||
<manifest>
|
||||
<extend-project name="myproject">
|
||||
<linkfile dest="bar" src="foo"/>
|
||||
</extend-project>
|
||||
</manifest>
|
||||
"""
|
||||
)
|
||||
manifest = manifest_xml.XmlManifest(str(self.repodir), str(root_m))
|
||||
self.assertEqual(len(manifest.projects[0].linkfiles), 1)
|
||||
self.assertEqual(list(manifest.projects[0].linkfiles)[0].src, "foo")
|
||||
self.assertEqual(list(manifest.projects[0].linkfiles)[0].dest, "bar")
|
||||
|
||||
def test_extend_project_annotations(self):
|
||||
manifest = self.getXmlManifest(
|
||||
"""
|
||||
<manifest>
|
||||
<remote name="default-remote" fetch="http://localhost" />
|
||||
<default remote="default-remote" revision="refs/heads/main" />
|
||||
<project name="myproject" />
|
||||
<extend-project name="myproject">
|
||||
<annotation name="foo" value="bar" />
|
||||
</extend-project>
|
||||
</manifest>
|
||||
"""
|
||||
)
|
||||
self.assertEqual(manifest.projects[0].annotations[0].name, "foo")
|
||||
self.assertEqual(manifest.projects[0].annotations[0].value, "bar")
|
||||
self.assertEqual(
|
||||
sort_attributes(manifest.ToXml().toxml()),
|
||||
'<?xml version="1.0" ?><manifest>'
|
||||
'<remote fetch="http://localhost" name="default-remote"/>'
|
||||
'<default remote="default-remote" revision="refs/heads/main"/>'
|
||||
'<project name="myproject">'
|
||||
'<annotation name="foo" value="bar"/>'
|
||||
"</project>"
|
||||
"</manifest>",
|
||||
)
|
||||
|
||||
|
||||
class NormalizeUrlTests(ManifestParseTestCase):
|
||||
"""Tests for normalize_url() in manifest_xml.py"""
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2021 The Android Open Source Project
|
||||
# Copyright (C) 2021 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2022 The Android Open Source Project
|
||||
# Copyright (C) 2022 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2019 The Android Open Source Project
|
||||
# Copyright (C) 2019 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -89,3 +89,49 @@ class AllCommands(unittest.TestCase):
|
||||
msg=f"subcmds/{name}.py: {opt}: only use dashes in "
|
||||
"options, not underscores",
|
||||
)
|
||||
|
||||
def test_cli_option_dest(self):
|
||||
"""Block redundant dest= arguments."""
|
||||
|
||||
def _check_dest(opt):
|
||||
"""Check the dest= setting."""
|
||||
# If the destination is not set, nothing to check.
|
||||
# If long options are not set, then there's no implicit destination.
|
||||
# If callback is used, then a destination might be needed because
|
||||
# optparse cannot assume a value is always stored.
|
||||
if opt.dest is None or not opt._long_opts or opt.callback:
|
||||
return
|
||||
|
||||
long = opt._long_opts[0]
|
||||
assert long.startswith("--")
|
||||
# This matches optparse's behavior.
|
||||
implicit_dest = long[2:].replace("-", "_")
|
||||
if implicit_dest == opt.dest:
|
||||
bad_opts.append((str(opt), opt.dest))
|
||||
|
||||
# Hook the option check list.
|
||||
optparse.Option.CHECK_METHODS.insert(0, _check_dest)
|
||||
|
||||
# Gather all the bad options up front so people can see all bad options
|
||||
# instead of failing at the first one.
|
||||
all_bad_opts = {}
|
||||
for name, cls in subcmds.all_commands.items():
|
||||
bad_opts = all_bad_opts[name] = []
|
||||
cmd = cls()
|
||||
# Trigger construction of parser.
|
||||
cmd.OptionParser
|
||||
|
||||
errmsg = None
|
||||
for name, bad_opts in sorted(all_bad_opts.items()):
|
||||
if bad_opts:
|
||||
if not errmsg:
|
||||
errmsg = "Omit redundant dest= when defining options.\n"
|
||||
errmsg += f"\nSubcommand {name} (subcmds/{name}.py):\n"
|
||||
errmsg += "".join(
|
||||
f" {opt}: dest='{dest}'\n" for opt, dest in bad_opts
|
||||
)
|
||||
if errmsg:
|
||||
self.fail(errmsg)
|
||||
|
||||
# Make sure we aren't popping the wrong stuff.
|
||||
assert optparse.Option.CHECK_METHODS.pop(0) is _check_dest
|
||||
|
||||
156
tests/test_subcmds_manifest.py
Normal file
156
tests/test_subcmds_manifest.py
Normal file
@@ -0,0 +1,156 @@
|
||||
# Copyright (C) 2025 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Unittests for the subcmds/manifest.py module."""
|
||||
|
||||
import json
|
||||
from pathlib import Path
|
||||
from unittest import mock
|
||||
|
||||
import manifest_xml
|
||||
from subcmds import manifest
|
||||
|
||||
|
||||
_EXAMPLE_MANIFEST = """\
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<manifest>
|
||||
<remote name="test-remote" fetch="http://localhost" />
|
||||
<default remote="test-remote" revision="refs/heads/main" />
|
||||
<project name="repohooks" path="src/repohooks"/>
|
||||
<repo-hooks in-project="repohooks" enabled-list="a, b"/>
|
||||
</manifest>
|
||||
"""
|
||||
|
||||
|
||||
def _get_cmd(repodir: Path) -> manifest.Manifest:
|
||||
"""Instantiate a manifest command object to test."""
|
||||
manifests_git = repodir / "manifests.git"
|
||||
manifests_git.mkdir()
|
||||
(manifests_git / "config").write_text(
|
||||
"""
|
||||
[remote "origin"]
|
||||
\turl = http://localhost/manifest
|
||||
"""
|
||||
)
|
||||
client = manifest_xml.RepoClient(repodir=str(repodir))
|
||||
git_event_log = mock.MagicMock(ErrorEvent=mock.Mock(return_value=None))
|
||||
return manifest.Manifest(
|
||||
repodir=client.repodir,
|
||||
client=client,
|
||||
manifest=client.manifest,
|
||||
outer_client=client,
|
||||
outer_manifest=client.manifest,
|
||||
git_event_log=git_event_log,
|
||||
)
|
||||
|
||||
|
||||
def test_output_format_xml_file(tmp_path):
|
||||
"""Test writing XML to a file."""
|
||||
path = tmp_path / "manifest.xml"
|
||||
path.write_text(_EXAMPLE_MANIFEST)
|
||||
outpath = tmp_path / "output.xml"
|
||||
cmd = _get_cmd(tmp_path)
|
||||
opt, args = cmd.OptionParser.parse_args(["--output-file", str(outpath)])
|
||||
cmd.Execute(opt, args)
|
||||
# Normalize the output a bit as we don't exactly care.
|
||||
normalize = lambda data: "\n".join(
|
||||
x.strip() for x in data.splitlines() if x.strip()
|
||||
)
|
||||
assert (
|
||||
normalize(outpath.read_text())
|
||||
== """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<manifest>
|
||||
<remote name="test-remote" fetch="http://localhost"/>
|
||||
<default remote="test-remote" revision="refs/heads/main"/>
|
||||
<project name="repohooks" path="src/repohooks"/>
|
||||
<repo-hooks in-project="repohooks" enabled-list="a b"/>
|
||||
</manifest>"""
|
||||
)
|
||||
|
||||
|
||||
def test_output_format_xml_stdout(tmp_path, capsys):
|
||||
"""Test writing XML to stdout."""
|
||||
path = tmp_path / "manifest.xml"
|
||||
path.write_text(_EXAMPLE_MANIFEST)
|
||||
cmd = _get_cmd(tmp_path)
|
||||
opt, args = cmd.OptionParser.parse_args(["--format", "xml"])
|
||||
cmd.Execute(opt, args)
|
||||
# Normalize the output a bit as we don't exactly care.
|
||||
normalize = lambda data: "\n".join(
|
||||
x.strip() for x in data.splitlines() if x.strip()
|
||||
)
|
||||
stdout = capsys.readouterr().out
|
||||
assert (
|
||||
normalize(stdout)
|
||||
== """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<manifest>
|
||||
<remote name="test-remote" fetch="http://localhost"/>
|
||||
<default remote="test-remote" revision="refs/heads/main"/>
|
||||
<project name="repohooks" path="src/repohooks"/>
|
||||
<repo-hooks in-project="repohooks" enabled-list="a b"/>
|
||||
</manifest>"""
|
||||
)
|
||||
|
||||
|
||||
def test_output_format_json(tmp_path, capsys):
|
||||
"""Test writing JSON."""
|
||||
path = tmp_path / "manifest.xml"
|
||||
path.write_text(_EXAMPLE_MANIFEST)
|
||||
cmd = _get_cmd(tmp_path)
|
||||
opt, args = cmd.OptionParser.parse_args(["--format", "json"])
|
||||
cmd.Execute(opt, args)
|
||||
obj = json.loads(capsys.readouterr().out)
|
||||
assert obj == {
|
||||
"default": {"remote": "test-remote", "revision": "refs/heads/main"},
|
||||
"project": [{"name": "repohooks", "path": "src/repohooks"}],
|
||||
"remote": [{"fetch": "http://localhost", "name": "test-remote"}],
|
||||
"repo-hooks": {"enabled-list": "a b", "in-project": "repohooks"},
|
||||
}
|
||||
|
||||
|
||||
def test_output_format_json_pretty(tmp_path, capsys):
|
||||
"""Test writing pretty JSON."""
|
||||
path = tmp_path / "manifest.xml"
|
||||
path.write_text(_EXAMPLE_MANIFEST)
|
||||
cmd = _get_cmd(tmp_path)
|
||||
opt, args = cmd.OptionParser.parse_args(["--format", "json", "--pretty"])
|
||||
cmd.Execute(opt, args)
|
||||
stdout = capsys.readouterr().out
|
||||
assert (
|
||||
stdout
|
||||
== """\
|
||||
{
|
||||
"default": {
|
||||
"remote": "test-remote",
|
||||
"revision": "refs/heads/main"
|
||||
},
|
||||
"project": [
|
||||
{
|
||||
"name": "repohooks",
|
||||
"path": "src/repohooks"
|
||||
}
|
||||
],
|
||||
"remote": [
|
||||
{
|
||||
"fetch": "http://localhost",
|
||||
"name": "test-remote"
|
||||
}
|
||||
],
|
||||
"repo-hooks": {
|
||||
"enabled-list": "a b",
|
||||
"in-project": "repohooks"
|
||||
}
|
||||
}
|
||||
"""
|
||||
)
|
||||
@@ -305,8 +305,21 @@ class LocalSyncState(unittest.TestCase):
|
||||
|
||||
|
||||
class FakeProject:
|
||||
def __init__(self, relpath):
|
||||
def __init__(self, relpath, name=None, objdir=None):
|
||||
self.relpath = relpath
|
||||
self.name = name or relpath
|
||||
self.objdir = objdir or relpath
|
||||
self.worktree = relpath
|
||||
|
||||
self.use_git_worktrees = False
|
||||
self.UseAlternates = False
|
||||
self.manifest = mock.MagicMock()
|
||||
self.manifest.GetProjectsWithName.return_value = [self]
|
||||
self.config = mock.MagicMock()
|
||||
self.EnableRepositoryExtension = mock.MagicMock()
|
||||
|
||||
def RelPath(self, local=None):
|
||||
return self.relpath
|
||||
|
||||
def __str__(self):
|
||||
return f"project: {self.relpath}"
|
||||
@@ -513,3 +526,418 @@ class SyncCommand(unittest.TestCase):
|
||||
self.cmd.Execute(self.opt, [])
|
||||
self.assertIn(self.sync_local_half_error, e.aggregate_errors)
|
||||
self.assertIn(self.sync_network_half_error, e.aggregate_errors)
|
||||
|
||||
|
||||
class SyncUpdateRepoProject(unittest.TestCase):
|
||||
"""Tests for Sync._UpdateRepoProject."""
|
||||
|
||||
def setUp(self):
|
||||
"""Common setup."""
|
||||
self.repodir = tempfile.mkdtemp(".repo")
|
||||
self.manifest = manifest = mock.MagicMock(repodir=self.repodir)
|
||||
# Create a repoProject with a mock Sync_NetworkHalf.
|
||||
repoProject = mock.MagicMock(name="repo")
|
||||
repoProject.Sync_NetworkHalf = mock.Mock(
|
||||
return_value=SyncNetworkHalfResult(True, None)
|
||||
)
|
||||
manifest.repoProject = repoProject
|
||||
manifest.IsArchive = False
|
||||
manifest.CloneFilter = None
|
||||
manifest.PartialCloneExclude = None
|
||||
manifest.CloneFilterForDepth = None
|
||||
|
||||
git_event_log = mock.MagicMock(ErrorEvent=mock.Mock(return_value=None))
|
||||
self.cmd = sync.Sync(manifest=manifest, git_event_log=git_event_log)
|
||||
|
||||
opt, _ = self.cmd.OptionParser.parse_args([])
|
||||
opt.local_only = False
|
||||
opt.repo_verify = False
|
||||
opt.verbose = False
|
||||
opt.quiet = True
|
||||
opt.force_sync = False
|
||||
opt.clone_bundle = False
|
||||
opt.tags = False
|
||||
opt.optimized_fetch = False
|
||||
opt.retry_fetches = 0
|
||||
opt.prune = False
|
||||
self.opt = opt
|
||||
self.errors = []
|
||||
|
||||
mock.patch.object(sync.Sync, "_GetCurrentBranchOnly").start()
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self.repodir)
|
||||
mock.patch.stopall()
|
||||
|
||||
def test_fetches_when_stale(self):
|
||||
"""Test it fetches when the repo project is stale."""
|
||||
self.manifest.repoProject.LastFetch = time.time() - (
|
||||
sync._ONE_DAY_S + 1
|
||||
)
|
||||
|
||||
with mock.patch.object(sync, "_PostRepoFetch") as mock_post_fetch:
|
||||
self.cmd._UpdateRepoProject(self.opt, self.manifest, self.errors)
|
||||
self.manifest.repoProject.Sync_NetworkHalf.assert_called_once()
|
||||
mock_post_fetch.assert_called_once()
|
||||
self.assertEqual(self.errors, [])
|
||||
|
||||
def test_skips_when_fresh(self):
|
||||
"""Test it skips fetch when repo project is fresh."""
|
||||
self.manifest.repoProject.LastFetch = time.time()
|
||||
|
||||
with mock.patch.object(sync, "_PostRepoFetch") as mock_post_fetch:
|
||||
self.cmd._UpdateRepoProject(self.opt, self.manifest, self.errors)
|
||||
self.manifest.repoProject.Sync_NetworkHalf.assert_not_called()
|
||||
mock_post_fetch.assert_not_called()
|
||||
|
||||
def test_skips_local_only(self):
|
||||
"""Test it does nothing with --local-only."""
|
||||
self.opt.local_only = True
|
||||
self.manifest.repoProject.LastFetch = time.time() - (
|
||||
sync._ONE_DAY_S + 1
|
||||
)
|
||||
|
||||
with mock.patch.object(sync, "_PostRepoFetch") as mock_post_fetch:
|
||||
self.cmd._UpdateRepoProject(self.opt, self.manifest, self.errors)
|
||||
self.manifest.repoProject.Sync_NetworkHalf.assert_not_called()
|
||||
mock_post_fetch.assert_not_called()
|
||||
|
||||
def test_post_repo_fetch_skipped_on_env_var(self):
|
||||
"""Test _PostRepoFetch is skipped when REPO_SKIP_SELF_UPDATE is set."""
|
||||
self.manifest.repoProject.LastFetch = time.time()
|
||||
|
||||
with mock.patch.dict(os.environ, {"REPO_SKIP_SELF_UPDATE": "1"}):
|
||||
with mock.patch.object(sync, "_PostRepoFetch") as mock_post_fetch:
|
||||
self.cmd._UpdateRepoProject(
|
||||
self.opt, self.manifest, self.errors
|
||||
)
|
||||
mock_post_fetch.assert_not_called()
|
||||
|
||||
def test_fetch_failure_is_handled(self):
|
||||
"""Test that a fetch failure is recorded and doesn't crash."""
|
||||
self.manifest.repoProject.LastFetch = time.time() - (
|
||||
sync._ONE_DAY_S + 1
|
||||
)
|
||||
fetch_error = GitError("Fetch failed")
|
||||
self.manifest.repoProject.Sync_NetworkHalf.return_value = (
|
||||
SyncNetworkHalfResult(False, fetch_error)
|
||||
)
|
||||
|
||||
with mock.patch.object(sync, "_PostRepoFetch") as mock_post_fetch:
|
||||
self.cmd._UpdateRepoProject(self.opt, self.manifest, self.errors)
|
||||
self.manifest.repoProject.Sync_NetworkHalf.assert_called_once()
|
||||
mock_post_fetch.assert_not_called()
|
||||
self.assertEqual(self.errors, [fetch_error])
|
||||
|
||||
|
||||
class InterleavedSyncTest(unittest.TestCase):
|
||||
"""Tests for interleaved sync."""
|
||||
|
||||
def setUp(self):
|
||||
"""Set up a sync command with mocks."""
|
||||
self.repodir = tempfile.mkdtemp(".repo")
|
||||
self.manifest = mock.MagicMock(repodir=self.repodir)
|
||||
self.manifest.repoProject.LastFetch = time.time()
|
||||
self.manifest.repoProject.worktree = self.repodir
|
||||
self.manifest.manifestProject.worktree = self.repodir
|
||||
self.manifest.IsArchive = False
|
||||
self.manifest.CloneBundle = False
|
||||
self.manifest.default.sync_j = 1
|
||||
|
||||
self.outer_client = mock.MagicMock()
|
||||
self.outer_client.manifest.IsArchive = False
|
||||
self.cmd = sync.Sync(
|
||||
manifest=self.manifest, outer_client=self.outer_client
|
||||
)
|
||||
self.cmd.outer_manifest = self.manifest
|
||||
|
||||
# Mock projects.
|
||||
self.projA = FakeProject("projA", objdir="objA")
|
||||
self.projB = FakeProject("projB", objdir="objB")
|
||||
self.projA_sub = FakeProject(
|
||||
"projA/sub", name="projA_sub", objdir="objA_sub"
|
||||
)
|
||||
self.projC = FakeProject("projC", objdir="objC")
|
||||
|
||||
# Mock methods that are not part of the core interleaved sync logic.
|
||||
mock.patch.object(self.cmd, "_UpdateAllManifestProjects").start()
|
||||
mock.patch.object(self.cmd, "_UpdateProjectsRevisionId").start()
|
||||
mock.patch.object(self.cmd, "_ValidateOptionsWithManifest").start()
|
||||
mock.patch.object(sync, "_PostRepoUpgrade").start()
|
||||
mock.patch.object(sync, "_PostRepoFetch").start()
|
||||
|
||||
# Mock parallel context for worker tests.
|
||||
self.parallel_context_patcher = mock.patch(
|
||||
"subcmds.sync.Sync.get_parallel_context"
|
||||
)
|
||||
self.mock_get_parallel_context = self.parallel_context_patcher.start()
|
||||
self.sync_dict = {}
|
||||
self.mock_context = {
|
||||
"projects": [],
|
||||
"sync_dict": self.sync_dict,
|
||||
}
|
||||
self.mock_get_parallel_context.return_value = self.mock_context
|
||||
|
||||
# Mock _GetCurrentBranchOnly for worker tests.
|
||||
mock.patch.object(sync.Sync, "_GetCurrentBranchOnly").start()
|
||||
|
||||
self.cmd._fetch_times = mock.Mock()
|
||||
self.cmd._local_sync_state = mock.Mock()
|
||||
|
||||
def tearDown(self):
|
||||
"""Clean up resources."""
|
||||
shutil.rmtree(self.repodir)
|
||||
mock.patch.stopall()
|
||||
|
||||
def test_interleaved_fail_fast(self):
|
||||
"""Test that --fail-fast is respected in interleaved mode."""
|
||||
opt, args = self.cmd.OptionParser.parse_args(
|
||||
["--interleaved", "--fail-fast", "-j2"]
|
||||
)
|
||||
opt.quiet = True
|
||||
|
||||
# With projA/sub, _SafeCheckoutOrder creates two batches:
|
||||
# 1. [projA, projB]
|
||||
# 2. [projA/sub]
|
||||
# We want to fail on the first batch and ensure the second isn't run.
|
||||
all_projects = [self.projA, self.projB, self.projA_sub]
|
||||
mock.patch.object(
|
||||
self.cmd, "GetProjects", return_value=all_projects
|
||||
).start()
|
||||
|
||||
# Mock ExecuteInParallel to simulate a failed run on the first batch of
|
||||
# projects.
|
||||
execute_mock = mock.patch.object(
|
||||
self.cmd, "ExecuteInParallel", return_value=False
|
||||
).start()
|
||||
|
||||
with self.assertRaises(sync.SyncFailFastError):
|
||||
self.cmd._SyncInterleaved(
|
||||
opt,
|
||||
args,
|
||||
[],
|
||||
self.manifest,
|
||||
self.manifest.manifestProject,
|
||||
all_projects,
|
||||
{},
|
||||
)
|
||||
|
||||
execute_mock.assert_called_once()
|
||||
|
||||
def test_interleaved_shared_objdir_serial(self):
|
||||
"""Test that projects with shared objdir are processed serially."""
|
||||
opt, args = self.cmd.OptionParser.parse_args(["--interleaved", "-j4"])
|
||||
opt.quiet = True
|
||||
|
||||
# Setup projects with a shared objdir.
|
||||
self.projA.objdir = "common_objdir"
|
||||
self.projC.objdir = "common_objdir"
|
||||
|
||||
all_projects = [self.projA, self.projB, self.projC]
|
||||
mock.patch.object(
|
||||
self.cmd, "GetProjects", return_value=all_projects
|
||||
).start()
|
||||
|
||||
def execute_side_effect(jobs, target, work_items, **kwargs):
|
||||
# The callback is a partial object. The first arg is the set we
|
||||
# need to update to avoid the stall detection.
|
||||
synced_relpaths_set = kwargs["callback"].args[0]
|
||||
projects_in_pass = self.cmd.get_parallel_context()["projects"]
|
||||
for item in work_items:
|
||||
for project_idx in item:
|
||||
synced_relpaths_set.add(
|
||||
projects_in_pass[project_idx].relpath
|
||||
)
|
||||
return True
|
||||
|
||||
execute_mock = mock.patch.object(
|
||||
self.cmd, "ExecuteInParallel", side_effect=execute_side_effect
|
||||
).start()
|
||||
|
||||
self.cmd._SyncInterleaved(
|
||||
opt,
|
||||
args,
|
||||
[],
|
||||
self.manifest,
|
||||
self.manifest.manifestProject,
|
||||
all_projects,
|
||||
{},
|
||||
)
|
||||
|
||||
execute_mock.assert_called_once()
|
||||
jobs_arg, _, work_items = execute_mock.call_args.args
|
||||
self.assertEqual(jobs_arg, 2)
|
||||
work_items_sets = {frozenset(item) for item in work_items}
|
||||
expected_sets = {frozenset([0, 2]), frozenset([1])}
|
||||
self.assertEqual(work_items_sets, expected_sets)
|
||||
|
||||
def _get_opts(self, args=None):
|
||||
"""Helper to get default options for worker tests."""
|
||||
if args is None:
|
||||
args = ["--interleaved"]
|
||||
opt, _ = self.cmd.OptionParser.parse_args(args)
|
||||
# Set defaults for options used by the worker.
|
||||
opt.quiet = True
|
||||
opt.verbose = False
|
||||
opt.force_sync = False
|
||||
opt.clone_bundle = False
|
||||
opt.tags = False
|
||||
opt.optimized_fetch = False
|
||||
opt.retry_fetches = 0
|
||||
opt.prune = False
|
||||
opt.detach_head = False
|
||||
opt.force_checkout = False
|
||||
opt.rebase = False
|
||||
return opt
|
||||
|
||||
def test_worker_successful_sync(self):
|
||||
"""Test _SyncProjectList with a successful fetch and checkout."""
|
||||
opt = self._get_opts()
|
||||
project = self.projA
|
||||
project.Sync_NetworkHalf = mock.Mock(
|
||||
return_value=SyncNetworkHalfResult(error=None, remote_fetched=True)
|
||||
)
|
||||
project.Sync_LocalHalf = mock.Mock()
|
||||
project.manifest.manifestProject.config = mock.MagicMock()
|
||||
self.mock_context["projects"] = [project]
|
||||
|
||||
with mock.patch("subcmds.sync.SyncBuffer") as mock_sync_buffer:
|
||||
mock_sync_buf_instance = mock.MagicMock()
|
||||
mock_sync_buf_instance.Finish.return_value = True
|
||||
mock_sync_buf_instance.errors = []
|
||||
mock_sync_buffer.return_value = mock_sync_buf_instance
|
||||
|
||||
result_obj = self.cmd._SyncProjectList(opt, [0])
|
||||
|
||||
self.assertEqual(len(result_obj.results), 1)
|
||||
result = result_obj.results[0]
|
||||
self.assertTrue(result.fetch_success)
|
||||
self.assertTrue(result.checkout_success)
|
||||
self.assertEqual(result.fetch_errors, [])
|
||||
self.assertEqual(result.checkout_errors, [])
|
||||
project.Sync_NetworkHalf.assert_called_once()
|
||||
project.Sync_LocalHalf.assert_called_once()
|
||||
|
||||
def test_worker_fetch_fails(self):
|
||||
"""Test _SyncProjectList with a failed fetch."""
|
||||
opt = self._get_opts()
|
||||
project = self.projA
|
||||
fetch_error = GitError("Fetch failed")
|
||||
project.Sync_NetworkHalf = mock.Mock(
|
||||
return_value=SyncNetworkHalfResult(
|
||||
error=fetch_error, remote_fetched=False
|
||||
)
|
||||
)
|
||||
project.Sync_LocalHalf = mock.Mock()
|
||||
self.mock_context["projects"] = [project]
|
||||
|
||||
result_obj = self.cmd._SyncProjectList(opt, [0])
|
||||
result = result_obj.results[0]
|
||||
|
||||
self.assertFalse(result.fetch_success)
|
||||
self.assertFalse(result.checkout_success)
|
||||
self.assertEqual(result.fetch_errors, [fetch_error])
|
||||
self.assertEqual(result.checkout_errors, [])
|
||||
project.Sync_NetworkHalf.assert_called_once()
|
||||
project.Sync_LocalHalf.assert_not_called()
|
||||
|
||||
def test_worker_no_worktree(self):
|
||||
"""Test interleaved sync does not checkout with no worktree."""
|
||||
opt = self._get_opts()
|
||||
project = self.projA
|
||||
project.worktree = None
|
||||
project.Sync_NetworkHalf = mock.Mock(
|
||||
return_value=SyncNetworkHalfResult(error=None, remote_fetched=True)
|
||||
)
|
||||
project.Sync_LocalHalf = mock.Mock()
|
||||
self.mock_context["projects"] = [project]
|
||||
|
||||
result_obj = self.cmd._SyncProjectList(opt, [0])
|
||||
result = result_obj.results[0]
|
||||
|
||||
self.assertTrue(result.fetch_success)
|
||||
self.assertTrue(result.checkout_success)
|
||||
project.Sync_NetworkHalf.assert_called_once()
|
||||
project.Sync_LocalHalf.assert_not_called()
|
||||
|
||||
def test_worker_fetch_fails_exception(self):
|
||||
"""Test _SyncProjectList with an exception during fetch."""
|
||||
opt = self._get_opts()
|
||||
project = self.projA
|
||||
fetch_error = GitError("Fetch failed")
|
||||
project.Sync_NetworkHalf = mock.Mock(side_effect=fetch_error)
|
||||
project.Sync_LocalHalf = mock.Mock()
|
||||
self.mock_context["projects"] = [project]
|
||||
|
||||
result_obj = self.cmd._SyncProjectList(opt, [0])
|
||||
result = result_obj.results[0]
|
||||
|
||||
self.assertFalse(result.fetch_success)
|
||||
self.assertFalse(result.checkout_success)
|
||||
self.assertEqual(result.fetch_errors, [fetch_error])
|
||||
project.Sync_NetworkHalf.assert_called_once()
|
||||
project.Sync_LocalHalf.assert_not_called()
|
||||
|
||||
def test_worker_checkout_fails(self):
|
||||
"""Test _SyncProjectList with an exception during checkout."""
|
||||
opt = self._get_opts()
|
||||
project = self.projA
|
||||
project.Sync_NetworkHalf = mock.Mock(
|
||||
return_value=SyncNetworkHalfResult(error=None, remote_fetched=True)
|
||||
)
|
||||
checkout_error = GitError("Checkout failed")
|
||||
project.Sync_LocalHalf = mock.Mock(side_effect=checkout_error)
|
||||
project.manifest.manifestProject.config = mock.MagicMock()
|
||||
self.mock_context["projects"] = [project]
|
||||
|
||||
with mock.patch("subcmds.sync.SyncBuffer"):
|
||||
result_obj = self.cmd._SyncProjectList(opt, [0])
|
||||
result = result_obj.results[0]
|
||||
|
||||
self.assertTrue(result.fetch_success)
|
||||
self.assertFalse(result.checkout_success)
|
||||
self.assertEqual(result.fetch_errors, [])
|
||||
self.assertEqual(result.checkout_errors, [checkout_error])
|
||||
project.Sync_NetworkHalf.assert_called_once()
|
||||
project.Sync_LocalHalf.assert_called_once()
|
||||
|
||||
def test_worker_local_only(self):
|
||||
"""Test _SyncProjectList with --local-only."""
|
||||
opt = self._get_opts(["--interleaved", "--local-only"])
|
||||
project = self.projA
|
||||
project.Sync_NetworkHalf = mock.Mock()
|
||||
project.Sync_LocalHalf = mock.Mock()
|
||||
project.manifest.manifestProject.config = mock.MagicMock()
|
||||
self.mock_context["projects"] = [project]
|
||||
|
||||
with mock.patch("subcmds.sync.SyncBuffer") as mock_sync_buffer:
|
||||
mock_sync_buf_instance = mock.MagicMock()
|
||||
mock_sync_buf_instance.Finish.return_value = True
|
||||
mock_sync_buf_instance.errors = []
|
||||
mock_sync_buffer.return_value = mock_sync_buf_instance
|
||||
|
||||
result_obj = self.cmd._SyncProjectList(opt, [0])
|
||||
result = result_obj.results[0]
|
||||
|
||||
self.assertTrue(result.fetch_success)
|
||||
self.assertTrue(result.checkout_success)
|
||||
project.Sync_NetworkHalf.assert_not_called()
|
||||
project.Sync_LocalHalf.assert_called_once()
|
||||
|
||||
def test_worker_network_only(self):
|
||||
"""Test _SyncProjectList with --network-only."""
|
||||
opt = self._get_opts(["--interleaved", "--network-only"])
|
||||
project = self.projA
|
||||
project.Sync_NetworkHalf = mock.Mock(
|
||||
return_value=SyncNetworkHalfResult(error=None, remote_fetched=True)
|
||||
)
|
||||
project.Sync_LocalHalf = mock.Mock()
|
||||
self.mock_context["projects"] = [project]
|
||||
|
||||
result_obj = self.cmd._SyncProjectList(opt, [0])
|
||||
result = result_obj.results[0]
|
||||
|
||||
self.assertTrue(result.fetch_success)
|
||||
self.assertTrue(result.checkout_success)
|
||||
project.Sync_NetworkHalf.assert_called_once()
|
||||
project.Sync_LocalHalf.assert_not_called()
|
||||
|
||||
263
tests/test_subcmds_wipe.py
Normal file
263
tests/test_subcmds_wipe.py
Normal file
@@ -0,0 +1,263 @@
|
||||
# Copyright (C) 2025 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import shutil
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
|
||||
import project
|
||||
from subcmds import wipe
|
||||
|
||||
|
||||
def _create_mock_project(tempdir, name, objdir_path=None, has_changes=False):
|
||||
"""Creates a mock project with necessary attributes and directories."""
|
||||
worktree = os.path.join(tempdir, name)
|
||||
gitdir = os.path.join(tempdir, ".repo/projects", f"{name}.git")
|
||||
if objdir_path:
|
||||
objdir = objdir_path
|
||||
else:
|
||||
objdir = os.path.join(tempdir, ".repo/project-objects", f"{name}.git")
|
||||
|
||||
os.makedirs(worktree, exist_ok=True)
|
||||
os.makedirs(gitdir, exist_ok=True)
|
||||
os.makedirs(objdir, exist_ok=True)
|
||||
|
||||
proj = project.Project(
|
||||
manifest=mock.MagicMock(),
|
||||
name=name,
|
||||
remote=mock.MagicMock(),
|
||||
gitdir=gitdir,
|
||||
objdir=objdir,
|
||||
worktree=worktree,
|
||||
relpath=name,
|
||||
revisionExpr="main",
|
||||
revisionId="abcd",
|
||||
)
|
||||
|
||||
proj.HasChanges = mock.MagicMock(return_value=has_changes)
|
||||
|
||||
def side_effect_delete_worktree(force=False, verbose=False):
|
||||
if os.path.exists(proj.worktree):
|
||||
shutil.rmtree(proj.worktree)
|
||||
if os.path.exists(proj.gitdir):
|
||||
shutil.rmtree(proj.gitdir)
|
||||
return True
|
||||
|
||||
proj.DeleteWorktree = mock.MagicMock(
|
||||
side_effect=side_effect_delete_worktree
|
||||
)
|
||||
|
||||
return proj
|
||||
|
||||
|
||||
def _run_wipe(all_projects, projects_to_wipe_names, options=None):
|
||||
"""Helper to run the Wipe command with mocked projects."""
|
||||
cmd = wipe.Wipe()
|
||||
cmd.manifest = mock.MagicMock()
|
||||
|
||||
def get_projects_mock(projects, all_manifests=False, **kwargs):
|
||||
if projects is None:
|
||||
return all_projects
|
||||
names_to_find = set(projects)
|
||||
return [p for p in all_projects if p.name in names_to_find]
|
||||
|
||||
cmd.GetProjects = mock.MagicMock(side_effect=get_projects_mock)
|
||||
|
||||
if options is None:
|
||||
options = []
|
||||
|
||||
opts = cmd.OptionParser.parse_args(options + projects_to_wipe_names)[0]
|
||||
cmd.CommonValidateOptions(opts, projects_to_wipe_names)
|
||||
cmd.ValidateOptions(opts, projects_to_wipe_names)
|
||||
cmd.Execute(opts, projects_to_wipe_names)
|
||||
|
||||
|
||||
def test_wipe_single_unshared_project(tmp_path):
|
||||
"""Test wiping a single project that is not shared."""
|
||||
p1 = _create_mock_project(str(tmp_path), "project/one")
|
||||
_run_wipe([p1], ["project/one"])
|
||||
|
||||
assert not os.path.exists(p1.worktree)
|
||||
assert not os.path.exists(p1.gitdir)
|
||||
assert not os.path.exists(p1.objdir)
|
||||
|
||||
|
||||
def test_wipe_multiple_unshared_projects(tmp_path):
|
||||
"""Test wiping multiple projects that are not shared."""
|
||||
p1 = _create_mock_project(str(tmp_path), "project/one")
|
||||
p2 = _create_mock_project(str(tmp_path), "project/two")
|
||||
_run_wipe([p1, p2], ["project/one", "project/two"])
|
||||
|
||||
assert not os.path.exists(p1.worktree)
|
||||
assert not os.path.exists(p1.gitdir)
|
||||
assert not os.path.exists(p1.objdir)
|
||||
assert not os.path.exists(p2.worktree)
|
||||
assert not os.path.exists(p2.gitdir)
|
||||
assert not os.path.exists(p2.objdir)
|
||||
|
||||
|
||||
def test_wipe_shared_project_no_force_raises_error(tmp_path):
|
||||
"""Test that wiping a shared project without --force raises an error."""
|
||||
shared_objdir = os.path.join(
|
||||
str(tmp_path), ".repo/project-objects", "shared.git"
|
||||
)
|
||||
p1 = _create_mock_project(
|
||||
str(tmp_path), "project/one", objdir_path=shared_objdir
|
||||
)
|
||||
p2 = _create_mock_project(
|
||||
str(tmp_path), "project/two", objdir_path=shared_objdir
|
||||
)
|
||||
|
||||
with pytest.raises(wipe.Error) as e:
|
||||
_run_wipe([p1, p2], ["project/one"])
|
||||
|
||||
assert "shared object directories" in str(e.value)
|
||||
assert "project/one" in str(e.value)
|
||||
assert "project/two" in str(e.value)
|
||||
|
||||
assert os.path.exists(p1.worktree)
|
||||
assert os.path.exists(p1.gitdir)
|
||||
assert os.path.exists(p2.worktree)
|
||||
assert os.path.exists(p2.gitdir)
|
||||
assert os.path.exists(shared_objdir)
|
||||
|
||||
|
||||
def test_wipe_shared_project_with_force(tmp_path):
|
||||
"""Test wiping a shared project with --force."""
|
||||
shared_objdir = os.path.join(
|
||||
str(tmp_path), ".repo/project-objects", "shared.git"
|
||||
)
|
||||
p1 = _create_mock_project(
|
||||
str(tmp_path), "project/one", objdir_path=shared_objdir
|
||||
)
|
||||
p2 = _create_mock_project(
|
||||
str(tmp_path), "project/two", objdir_path=shared_objdir
|
||||
)
|
||||
|
||||
_run_wipe([p1, p2], ["project/one"], options=["--force"])
|
||||
|
||||
assert not os.path.exists(p1.worktree)
|
||||
assert not os.path.exists(p1.gitdir)
|
||||
assert os.path.exists(shared_objdir)
|
||||
assert os.path.exists(p2.worktree)
|
||||
assert os.path.exists(p2.gitdir)
|
||||
|
||||
|
||||
def test_wipe_all_sharing_projects(tmp_path):
|
||||
"""Test wiping all projects that share an object directory."""
|
||||
shared_objdir = os.path.join(
|
||||
str(tmp_path), ".repo/project-objects", "shared.git"
|
||||
)
|
||||
p1 = _create_mock_project(
|
||||
str(tmp_path), "project/one", objdir_path=shared_objdir
|
||||
)
|
||||
p2 = _create_mock_project(
|
||||
str(tmp_path), "project/two", objdir_path=shared_objdir
|
||||
)
|
||||
|
||||
_run_wipe([p1, p2], ["project/one", "project/two"])
|
||||
|
||||
assert not os.path.exists(p1.worktree)
|
||||
assert not os.path.exists(p1.gitdir)
|
||||
assert not os.path.exists(p2.worktree)
|
||||
assert not os.path.exists(p2.gitdir)
|
||||
assert not os.path.exists(shared_objdir)
|
||||
|
||||
|
||||
def test_wipe_with_uncommitted_changes_raises_error(tmp_path):
|
||||
"""Test wiping a project with uncommitted changes raises an error."""
|
||||
p1 = _create_mock_project(str(tmp_path), "project/one", has_changes=True)
|
||||
|
||||
with pytest.raises(wipe.Error) as e:
|
||||
_run_wipe([p1], ["project/one"])
|
||||
|
||||
assert "uncommitted changes" in str(e.value)
|
||||
assert "project/one" in str(e.value)
|
||||
|
||||
assert os.path.exists(p1.worktree)
|
||||
assert os.path.exists(p1.gitdir)
|
||||
assert os.path.exists(p1.objdir)
|
||||
|
||||
|
||||
def test_wipe_with_uncommitted_changes_with_force(tmp_path):
|
||||
"""Test wiping a project with uncommitted changes with --force."""
|
||||
p1 = _create_mock_project(str(tmp_path), "project/one", has_changes=True)
|
||||
_run_wipe([p1], ["project/one"], options=["--force"])
|
||||
|
||||
assert not os.path.exists(p1.worktree)
|
||||
assert not os.path.exists(p1.gitdir)
|
||||
assert not os.path.exists(p1.objdir)
|
||||
|
||||
|
||||
def test_wipe_uncommitted_and_shared_raises_combined_error(tmp_path):
|
||||
"""Test that uncommitted and shared projects raise a combined error."""
|
||||
shared_objdir = os.path.join(
|
||||
str(tmp_path), ".repo/project-objects", "shared.git"
|
||||
)
|
||||
p1 = _create_mock_project(
|
||||
str(tmp_path),
|
||||
"project/one",
|
||||
objdir_path=shared_objdir,
|
||||
has_changes=True,
|
||||
)
|
||||
p2 = _create_mock_project(
|
||||
str(tmp_path), "project/two", objdir_path=shared_objdir
|
||||
)
|
||||
|
||||
with pytest.raises(wipe.Error) as e:
|
||||
_run_wipe([p1, p2], ["project/one"])
|
||||
|
||||
assert "uncommitted changes" in str(e.value)
|
||||
assert "shared object directories" in str(e.value)
|
||||
assert "project/one" in str(e.value)
|
||||
assert "project/two" in str(e.value)
|
||||
|
||||
assert os.path.exists(p1.worktree)
|
||||
assert os.path.exists(p1.gitdir)
|
||||
assert os.path.exists(p2.worktree)
|
||||
assert os.path.exists(p2.gitdir)
|
||||
assert os.path.exists(shared_objdir)
|
||||
|
||||
|
||||
def test_wipe_shared_project_with_force_shared(tmp_path):
|
||||
"""Test wiping a shared project with --force-shared."""
|
||||
shared_objdir = os.path.join(
|
||||
str(tmp_path), ".repo/project-objects", "shared.git"
|
||||
)
|
||||
p1 = _create_mock_project(
|
||||
str(tmp_path), "project/one", objdir_path=shared_objdir
|
||||
)
|
||||
p2 = _create_mock_project(
|
||||
str(tmp_path), "project/two", objdir_path=shared_objdir
|
||||
)
|
||||
|
||||
_run_wipe([p1, p2], ["project/one"], options=["--force-shared"])
|
||||
|
||||
assert not os.path.exists(p1.worktree)
|
||||
assert not os.path.exists(p1.gitdir)
|
||||
assert os.path.exists(shared_objdir)
|
||||
assert os.path.exists(p2.worktree)
|
||||
assert os.path.exists(p2.gitdir)
|
||||
|
||||
|
||||
def test_wipe_with_uncommitted_changes_with_force_uncommitted(tmp_path):
|
||||
"""Test wiping uncommitted changes with --force-uncommitted."""
|
||||
p1 = _create_mock_project(str(tmp_path), "project/one", has_changes=True)
|
||||
_run_wipe([p1], ["project/one"], options=["--force-uncommitted"])
|
||||
|
||||
assert not os.path.exists(p1.worktree)
|
||||
assert not os.path.exists(p1.gitdir)
|
||||
assert not os.path.exists(p1.objdir)
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2022 The Android Open Source Project
|
||||
# Copyright (C) 2022 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
Reference in New Issue
Block a user