mirror of
https://gerrit.googlesource.com/git-repo
synced 2026-01-12 17:40:52 +00:00
Compare commits
286 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
edadb25c02 | ||
|
|
96edb9b573 | ||
|
|
5554572f02 | ||
|
|
97ca50f5f9 | ||
|
|
8896b68926 | ||
|
|
fec8cd6704 | ||
|
|
b8139bdcf8 | ||
|
|
26fa3180fb | ||
|
|
d379e77f44 | ||
|
|
4217a82bec | ||
|
|
208f344950 | ||
|
|
138c8a9ff5 | ||
|
|
9b57aa00f6 | ||
|
|
b1d1ece2fb | ||
|
|
449b23b698 | ||
|
|
e5fb6e585f | ||
|
|
48e4137eba | ||
|
|
172c58398b | ||
|
|
aa506db8a7 | ||
|
|
14c61d2c9d | ||
|
|
4c80921d22 | ||
|
|
f56484c05b | ||
|
|
a50c4e3bc0 | ||
|
|
0dd0a830b0 | ||
|
|
9f0ef5d926 | ||
|
|
c287428b37 | ||
|
|
c984e8d4f6 | ||
|
|
6d821124e0 | ||
|
|
560a79727f | ||
|
|
8a6d1724d9 | ||
|
|
3652b497bb | ||
|
|
89f761cfef | ||
|
|
d32b2dcd15 | ||
|
|
b32ccbb66b | ||
|
|
b99272c601 | ||
|
|
b0430b5bc5 | ||
|
|
1fd5c4bdf2 | ||
|
|
9267d58727 | ||
|
|
ae824fb2fc | ||
|
|
034950b9ee | ||
|
|
0bcffd8656 | ||
|
|
7393f6bc41 | ||
|
|
8dd8521854 | ||
|
|
49c9b06838 | ||
|
|
3d58d219cb | ||
|
|
c0aad7de18 | ||
|
|
d4aee6570b | ||
|
|
024df06ec1 | ||
|
|
45809e51ca | ||
|
|
331c5dd3e7 | ||
|
|
e848e9f72c | ||
|
|
1544afe460 | ||
|
|
3b8f9535c7 | ||
|
|
8f4f98582e | ||
|
|
8bc5000423 | ||
|
|
6a7f73bb9a | ||
|
|
23d063bdcd | ||
|
|
ce0ed799b6 | ||
|
|
2844a5f3cc | ||
|
|
47944bbe2e | ||
|
|
83c66ec661 | ||
|
|
87058c6ca5 | ||
|
|
b5644160b7 | ||
|
|
aadd12cb08 | ||
|
|
b8fd19215f | ||
|
|
7a1f1f70f0 | ||
|
|
c993c5068e | ||
|
|
c3d7c8536c | ||
|
|
880c621dc6 | ||
|
|
da6ae1da8b | ||
|
|
5771897459 | ||
|
|
56a5a01c65 | ||
|
|
e9cb391117 | ||
|
|
25d6c7cc10 | ||
|
|
f19b310f15 | ||
|
|
712e62b9b0 | ||
|
|
daf2ad38eb | ||
|
|
b861511db9 | ||
|
|
e914ec293a | ||
|
|
1e9f7b9e9e | ||
|
|
1dbf8b4346 | ||
|
|
6447733eb2 | ||
|
|
06ddc8c50a | ||
|
|
16109a66b7 | ||
|
|
321b7934b5 | ||
|
|
5a3a5f7cec | ||
|
|
11cb96030e | ||
|
|
8914b1f86d | ||
|
|
082487dcd1 | ||
|
|
f767f7d5c4 | ||
|
|
1a3612fe6d | ||
|
|
f0aeb220de | ||
|
|
f1ddaaa553 | ||
|
|
f9aacd4087 | ||
|
|
b8a7b4a629 | ||
|
|
32b59565b7 | ||
|
|
a6413f5d88 | ||
|
|
8c35d948cf | ||
|
|
1d2e99d028 | ||
|
|
c657844efe | ||
|
|
1d3b4fbeec | ||
|
|
be71c2f80f | ||
|
|
696e0c48a9 | ||
|
|
b2263ba124 | ||
|
|
945c006f40 | ||
|
|
71122f941f | ||
|
|
07a4529278 | ||
|
|
17833322d9 | ||
|
|
04cba4add5 | ||
|
|
3eacfdf309 | ||
|
|
aafed29d34 | ||
|
|
90f574f02e | ||
|
|
551285fa35 | ||
|
|
131fc96381 | ||
|
|
2ad5d50874 | ||
|
|
acb9523eaa | ||
|
|
041f97725a | ||
|
|
3e3340d94f | ||
|
|
edcaa94ca8 | ||
|
|
7ef5b465cd | ||
|
|
e7e20f4686 | ||
|
|
99ebf627db | ||
|
|
57cb42861d | ||
|
|
e74d9046ee | ||
|
|
21cc3a9d53 | ||
|
|
ea2e330e43 | ||
|
|
1604cf255f | ||
|
|
75eb8ea935 | ||
|
|
7fa149b47a | ||
|
|
a56e0e17e2 | ||
|
|
3ed84466f4 | ||
|
|
48067714ec | ||
|
|
69427da8c9 | ||
|
|
dccf38e34f | ||
|
|
7f44d366d0 | ||
|
|
2aa5d32d70 | ||
|
|
016a25447f | ||
|
|
7eab0eedf2 | ||
|
|
7e3b65beb7 | ||
|
|
c3d61ec252 | ||
|
|
78e82ec78e | ||
|
|
37ae75f27d | ||
|
|
7438aef1ca | ||
|
|
e641281d14 | ||
|
|
035f22abec | ||
|
|
e0728a5ecd | ||
|
|
d98f393524 | ||
|
|
0324e43242 | ||
|
|
8d25584f69 | ||
|
|
0e4f1e7fba | ||
|
|
e815286492 | ||
|
|
0ab6b11688 | ||
|
|
a621254b26 | ||
|
|
f159ce0f9e | ||
|
|
802cd0c601 | ||
|
|
100a214315 | ||
|
|
8051cdb629 | ||
|
|
43549d8d08 | ||
|
|
55b7125d6a | ||
|
|
d793553804 | ||
|
|
ea5239ddd9 | ||
|
|
1b8714937c | ||
|
|
50a2c0e368 | ||
|
|
35af2f8daf | ||
|
|
e287fa760b | ||
|
|
3593a10643 | ||
|
|
003684b6e5 | ||
|
|
0297f8312c | ||
|
|
7b3afcab7a | ||
|
|
eda6b1ead7 | ||
|
|
4364a79088 | ||
|
|
a98a5ebc6d | ||
|
|
f8d342beac | ||
|
|
6d2e8c8237 | ||
|
|
a24185ee6c | ||
|
|
d686365449 | ||
|
|
d3cadf1856 | ||
|
|
fa90f7a36f | ||
|
|
bee4efb874 | ||
|
|
f8af33c9f0 | ||
|
|
ed25be569e | ||
|
|
afd767103e | ||
|
|
b240d28bc0 | ||
|
|
47020ba249 | ||
|
|
5ed8c63942 | ||
|
|
24c6314fca | ||
|
|
7efab539f0 | ||
|
|
a3ff64cae5 | ||
|
|
776138a938 | ||
|
|
5fb9c6a5b3 | ||
|
|
859d3d9580 | ||
|
|
fa8d939c8f | ||
|
|
a6c52f566a | ||
|
|
0d130d2da0 | ||
|
|
b750b48f50 | ||
|
|
6c8b894d8d | ||
|
|
b6cfa09500 | ||
|
|
78dcd3799b | ||
|
|
acc4c857a0 | ||
|
|
a39af3d432 | ||
|
|
4cdfdb7734 | ||
|
|
1eddca8476 | ||
|
|
aefa4d3a29 | ||
|
|
4ba29c42ca | ||
|
|
45ef9011c2 | ||
|
|
891e8f72ce | ||
|
|
af8fb132d5 | ||
|
|
4112c07688 | ||
|
|
fbd5dd3a30 | ||
|
|
3d27c71dd9 | ||
|
|
488d54d4ee | ||
|
|
5a5cfce1b2 | ||
|
|
e6d4b84060 | ||
|
|
d75ca2eb9d | ||
|
|
a010a9f4a0 | ||
|
|
8a54a7eac3 | ||
|
|
63a5657ecf | ||
|
|
07d21e6bde | ||
|
|
076d54652e | ||
|
|
790f4cea7a | ||
|
|
39cb17f7a3 | ||
|
|
ad1b7bd2e2 | ||
|
|
3c2d807905 | ||
|
|
7fa8eedd8f | ||
|
|
dede564c3d | ||
|
|
ac76fd3e3a | ||
|
|
a8c34d1075 | ||
|
|
5951e3043f | ||
|
|
48ea25c6a7 | ||
|
|
355f4398d8 | ||
|
|
bddc964d93 | ||
|
|
a8cf575d68 | ||
|
|
8501d4602a | ||
|
|
8db78c7d4d | ||
|
|
9fb64ae29c | ||
|
|
d47d9ff1cb | ||
|
|
68d69635c7 | ||
|
|
ff6b1dae1e | ||
|
|
bdcba7dc36 | ||
|
|
1d00a7e2ae | ||
|
|
3a0a145b0e | ||
|
|
74737da1ab | ||
|
|
0ddb677611 | ||
|
|
501733c2ab | ||
|
|
0165e20fcc | ||
|
|
0de4fc3001 | ||
|
|
4c11aebeb9 | ||
|
|
b90a422ab6 | ||
|
|
a46047a822 | ||
|
|
5fa912b0d1 | ||
|
|
4ada043dc0 | ||
|
|
d8de29c447 | ||
|
|
2cc3ab7663 | ||
|
|
d56e2eb421 | ||
|
|
d52ca421d5 | ||
|
|
a2ff20dd20 | ||
|
|
55ee304304 | ||
|
|
409407a731 | ||
|
|
d82be3e672 | ||
|
|
9b03f15e8e | ||
|
|
9b72cf2ba5 | ||
|
|
5d3291d818 | ||
|
|
244c9a71a6 | ||
|
|
b308db1e2a | ||
|
|
cc879a97c3 | ||
|
|
87cce68b28 | ||
|
|
adaa1d8734 | ||
|
|
8e91248655 | ||
|
|
630876f9e4 | ||
|
|
4aa8584ec6 | ||
|
|
b550501254 | ||
|
|
a535ae4418 | ||
|
|
67d6cdf2bc | ||
|
|
152032cca2 | ||
|
|
a3ac816278 | ||
|
|
98bb76577d | ||
|
|
d33dce0b77 | ||
|
|
89ed8acdbe | ||
|
|
71e48b7672 | ||
|
|
13576a8caf | ||
|
|
2345906d04 | ||
|
|
41289c62b4 | ||
|
|
c72bd8486a | ||
|
|
d53cb9549a | ||
|
|
cf0ba48649 | ||
|
|
2a089cfee4 |
21
.flake8
21
.flake8
@@ -1,15 +1,16 @@
|
||||
[flake8]
|
||||
max-line-length=100
|
||||
ignore=
|
||||
# E111: Indentation is not a multiple of four
|
||||
E111,
|
||||
# E114: Indentation is not a multiple of four (comment)
|
||||
E114,
|
||||
max-line-length = 80
|
||||
per-file-ignores =
|
||||
# E501: line too long
|
||||
tests/test_git_superproject.py: E501
|
||||
extend-ignore =
|
||||
# E203: Whitespace before ':'
|
||||
# See https://github.com/PyCQA/pycodestyle/issues/373
|
||||
E203,
|
||||
# E402: Module level import not at top of file
|
||||
E402,
|
||||
# E731: do not assign a lambda expression, use a def
|
||||
E731,
|
||||
# W503: Line break before binary operator
|
||||
W503,
|
||||
# W504: Line break after binary operator
|
||||
W504
|
||||
exclude =
|
||||
venv,
|
||||
.tox,
|
||||
|
||||
22
.github/workflows/close-pull-request.yml
vendored
Normal file
22
.github/workflows/close-pull-request.yml
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
# GitHub actions workflow.
|
||||
# https://docs.github.com/en/actions/learn-github-actions/workflow-syntax-for-github-actions
|
||||
|
||||
# https://github.com/superbrothers/close-pull-request
|
||||
name: Close Pull Request
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [opened]
|
||||
|
||||
jobs:
|
||||
run:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: superbrothers/close-pull-request@v3
|
||||
with:
|
||||
comment: >
|
||||
Thanks for your contribution!
|
||||
Unfortunately, we don't use GitHub pull requests to manage code
|
||||
contributions to this repository.
|
||||
Instead, please see [README.md](../blob/HEAD/SUBMITTING_PATCHES.md)
|
||||
which provides full instructions on how to get involved.
|
||||
23
.github/workflows/flake8-postsubmit.yml
vendored
Normal file
23
.github/workflows/flake8-postsubmit.yml
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
# GitHub actions workflow.
|
||||
# https://help.github.com/en/actions/automating-your-workflow-with-github-actions/workflow-syntax-for-github-actions
|
||||
# https://github.com/marketplace/actions/python-flake8
|
||||
|
||||
name: Flake8
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
name: Python Lint
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.9"
|
||||
- name: Run flake8
|
||||
uses: julianwachholz/flake8-action@v2
|
||||
with:
|
||||
checkName: "Python Lint"
|
||||
11
.github/workflows/test-ci.yml
vendored
11
.github/workflows/test-ci.yml
vendored
@@ -13,19 +13,20 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||
python-version: [3.6, 3.7, 3.8, 3.9]
|
||||
# ubuntu-20.04 is the last version that supports python 3.6
|
||||
os: [ubuntu-20.04, macos-latest, windows-latest]
|
||||
python-version: ['3.6', '3.7', '3.8', '3.9', '3.10', '3.11', '3.12']
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v1
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install tox tox-gh-actions
|
||||
python -m pip install tox tox-gh-actions
|
||||
- name: Test with tox
|
||||
run: tox
|
||||
|
||||
41
.isort.cfg
Normal file
41
.isort.cfg
Normal file
@@ -0,0 +1,41 @@
|
||||
# Copyright 2023 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# Config file for the isort python module.
|
||||
# This is used to enforce import sorting standards.
|
||||
#
|
||||
# https://pycqa.github.io/isort/docs/configuration/options.html
|
||||
|
||||
[settings]
|
||||
# Be compatible with `black` since it also matches what we want.
|
||||
profile = black
|
||||
|
||||
line_length = 80
|
||||
length_sort = false
|
||||
force_single_line = true
|
||||
lines_after_imports = 2
|
||||
from_first = false
|
||||
case_sensitive = false
|
||||
force_sort_within_sections = true
|
||||
order_by_type = false
|
||||
|
||||
# Ignore generated files.
|
||||
extend_skip_glob = *_pb2.py
|
||||
|
||||
# Allow importing multiple classes on a single line from these modules.
|
||||
# https://google.github.io/styleguide/pyguide#s2.2-imports
|
||||
single_line_exclusions =
|
||||
abc,
|
||||
collections.abc,
|
||||
typing,
|
||||
@@ -8,7 +8,7 @@ that you can put anywhere in your path.
|
||||
|
||||
* Homepage: <https://gerrit.googlesource.com/git-repo/>
|
||||
* Mailing list: [repo-discuss on Google Groups][repo-discuss]
|
||||
* Bug reports: <https://bugs.chromium.org/p/gerrit/issues/list?q=component:repo>
|
||||
* Bug reports: <https://issues.gerritcodereview.com/issues?q=is:open%20componentid:1370071>
|
||||
* Source: <https://gerrit.googlesource.com/git-repo/>
|
||||
* Overview: <https://source.android.com/source/developing.html>
|
||||
* Docs: <https://source.android.com/source/using-repo.html>
|
||||
@@ -50,6 +50,6 @@ $ chmod a+rx ~/.bin/repo
|
||||
```
|
||||
|
||||
|
||||
[new-bug]: https://bugs.chromium.org/p/gerrit/issues/entry?template=Repo+tool+issue
|
||||
[issue tracker]: https://bugs.chromium.org/p/gerrit/issues/list?q=component:repo
|
||||
[new-bug]: https://issues.gerritcodereview.com/issues/new?component=1370071
|
||||
[issue tracker]: https://issues.gerritcodereview.com/issues?q=is:open%20componentid:1370071
|
||||
[repo-discuss]: https://groups.google.com/forum/#!forum/repo-discuss
|
||||
|
||||
@@ -1,19 +1,19 @@
|
||||
# Submitting Changes
|
||||
|
||||
Here's a short overview of the process.
|
||||
|
||||
* Make small logical changes.
|
||||
* [Provide a meaningful commit message][commit-message-style].
|
||||
* Make sure all code is under the Apache License, 2.0.
|
||||
* Publish your changes for review.
|
||||
* `git push origin HEAD:refs/for/main`
|
||||
* Make corrections if requested.
|
||||
* [Verify your changes on Gerrit.](#verify)
|
||||
* [Send to the commit queue for testing & merging.](#cq)
|
||||
|
||||
[TOC]
|
||||
|
||||
# Short Version
|
||||
|
||||
- Make small logical changes.
|
||||
- [Provide a meaningful commit message][commit-message-style].
|
||||
- Check for coding errors and style nits with flake8.
|
||||
- Make sure all code is under the Apache License, 2.0.
|
||||
- Publish your changes for review.
|
||||
- Make corrections if requested.
|
||||
- Verify your changes on gerrit so they can be submitted.
|
||||
|
||||
`git push https://gerrit-review.googlesource.com/git-repo HEAD:refs/for/main`
|
||||
|
||||
|
||||
# Long Version
|
||||
## Long Version
|
||||
|
||||
I wanted a file describing how to submit patches for repo,
|
||||
so I started with the one found in the core Git distribution
|
||||
@@ -39,17 +39,26 @@ If your description starts to get too long, that's a sign that you
|
||||
probably need to split up your commit to finer grained pieces.
|
||||
|
||||
|
||||
## Check for coding errors and style violations with flake8
|
||||
## Linting and formatting code
|
||||
|
||||
Run `flake8` on changed modules:
|
||||
Lint any changes by running:
|
||||
```sh
|
||||
$ tox -e lint -- file.py
|
||||
```
|
||||
|
||||
flake8 file.py
|
||||
And format with:
|
||||
```sh
|
||||
$ tox -e format -- file.py
|
||||
```
|
||||
|
||||
Note that repo generally follows [Google's Python Style Guide] rather than
|
||||
[PEP 8], with a couple of notable exceptions:
|
||||
Or format everything:
|
||||
```sh
|
||||
$ tox -e format
|
||||
```
|
||||
|
||||
* Indentation is at 2 columns rather than 4
|
||||
* The maximum line length is 100 columns rather than 80
|
||||
Repo uses [black](https://black.readthedocs.io/) with line length of 80 as its
|
||||
formatter and flake8 as its linter. Repo also follows
|
||||
[Google's Python Style Guide].
|
||||
|
||||
There should be no new errors or warnings introduced.
|
||||
|
||||
@@ -166,12 +175,16 @@ commit. If you make the requested changes you will need to amend your commit
|
||||
and push it to the review server again.
|
||||
|
||||
|
||||
## Verify your changes on gerrit
|
||||
## Verify your changes on Gerrit {#verify}
|
||||
|
||||
After you receive a Code-Review+2 from the maintainer, select the Verified
|
||||
button on the gerrit page for the change. This verifies that you have tested
|
||||
button on the Gerrit page for the change. This verifies that you have tested
|
||||
your changes and notifies the maintainer that they are ready to be submitted.
|
||||
The maintainer will then submit your changes to the repository.
|
||||
|
||||
## Merge your changes via the commit queue {#cq}
|
||||
|
||||
Once a change is ready to be merged, select the Commit-Queue+2 setting on the
|
||||
Gerrit page for it. This tells the CI system to test the change, and if it
|
||||
passes all the checks, automatically merges it.
|
||||
|
||||
[commit-message-style]: https://chris.beams.io/posts/git-commit/
|
||||
|
||||
327
color.py
327
color.py
@@ -17,196 +17,201 @@ import sys
|
||||
|
||||
import pager
|
||||
|
||||
COLORS = {None: -1,
|
||||
'normal': -1,
|
||||
'black': 0,
|
||||
'red': 1,
|
||||
'green': 2,
|
||||
'yellow': 3,
|
||||
'blue': 4,
|
||||
'magenta': 5,
|
||||
'cyan': 6,
|
||||
'white': 7}
|
||||
|
||||
ATTRS = {None: -1,
|
||||
'bold': 1,
|
||||
'dim': 2,
|
||||
'ul': 4,
|
||||
'blink': 5,
|
||||
'reverse': 7}
|
||||
COLORS = {
|
||||
None: -1,
|
||||
"normal": -1,
|
||||
"black": 0,
|
||||
"red": 1,
|
||||
"green": 2,
|
||||
"yellow": 3,
|
||||
"blue": 4,
|
||||
"magenta": 5,
|
||||
"cyan": 6,
|
||||
"white": 7,
|
||||
}
|
||||
|
||||
ATTRS = {None: -1, "bold": 1, "dim": 2, "ul": 4, "blink": 5, "reverse": 7}
|
||||
|
||||
RESET = "\033[m"
|
||||
|
||||
|
||||
def is_color(s):
|
||||
return s in COLORS
|
||||
return s in COLORS
|
||||
|
||||
|
||||
def is_attr(s):
|
||||
return s in ATTRS
|
||||
return s in ATTRS
|
||||
|
||||
|
||||
def _Color(fg=None, bg=None, attr=None):
|
||||
fg = COLORS[fg]
|
||||
bg = COLORS[bg]
|
||||
attr = ATTRS[attr]
|
||||
fg = COLORS[fg]
|
||||
bg = COLORS[bg]
|
||||
attr = ATTRS[attr]
|
||||
|
||||
if attr >= 0 or fg >= 0 or bg >= 0:
|
||||
need_sep = False
|
||||
code = "\033["
|
||||
if attr >= 0 or fg >= 0 or bg >= 0:
|
||||
need_sep = False
|
||||
code = "\033["
|
||||
|
||||
if attr >= 0:
|
||||
code += chr(ord('0') + attr)
|
||||
need_sep = True
|
||||
if attr >= 0:
|
||||
code += chr(ord("0") + attr)
|
||||
need_sep = True
|
||||
|
||||
if fg >= 0:
|
||||
if need_sep:
|
||||
code += ';'
|
||||
need_sep = True
|
||||
if fg >= 0:
|
||||
if need_sep:
|
||||
code += ";"
|
||||
need_sep = True
|
||||
|
||||
if fg < 8:
|
||||
code += '3%c' % (ord('0') + fg)
|
||||
else:
|
||||
code += '38;5;%d' % fg
|
||||
if fg < 8:
|
||||
code += "3%c" % (ord("0") + fg)
|
||||
else:
|
||||
code += "38;5;%d" % fg
|
||||
|
||||
if bg >= 0:
|
||||
if need_sep:
|
||||
code += ';'
|
||||
if bg >= 0:
|
||||
if need_sep:
|
||||
code += ";"
|
||||
|
||||
if bg < 8:
|
||||
code += '4%c' % (ord('0') + bg)
|
||||
else:
|
||||
code += '48;5;%d' % bg
|
||||
code += 'm'
|
||||
else:
|
||||
code = ''
|
||||
return code
|
||||
if bg < 8:
|
||||
code += "4%c" % (ord("0") + bg)
|
||||
else:
|
||||
code += "48;5;%d" % bg
|
||||
code += "m"
|
||||
else:
|
||||
code = ""
|
||||
return code
|
||||
|
||||
|
||||
DEFAULT = None
|
||||
|
||||
|
||||
def SetDefaultColoring(state):
|
||||
"""Set coloring behavior to |state|.
|
||||
"""Set coloring behavior to |state|.
|
||||
|
||||
This is useful for overriding config options via the command line.
|
||||
"""
|
||||
if state is None:
|
||||
# Leave it alone -- return quick!
|
||||
return
|
||||
This is useful for overriding config options via the command line.
|
||||
"""
|
||||
if state is None:
|
||||
# Leave it alone -- return quick!
|
||||
return
|
||||
|
||||
global DEFAULT
|
||||
state = state.lower()
|
||||
if state in ('auto',):
|
||||
DEFAULT = state
|
||||
elif state in ('always', 'yes', 'true', True):
|
||||
DEFAULT = 'always'
|
||||
elif state in ('never', 'no', 'false', False):
|
||||
DEFAULT = 'never'
|
||||
global DEFAULT
|
||||
state = state.lower()
|
||||
if state in ("auto",):
|
||||
DEFAULT = state
|
||||
elif state in ("always", "yes", "true", True):
|
||||
DEFAULT = "always"
|
||||
elif state in ("never", "no", "false", False):
|
||||
DEFAULT = "never"
|
||||
|
||||
|
||||
class Coloring(object):
|
||||
def __init__(self, config, section_type):
|
||||
self._section = 'color.%s' % section_type
|
||||
self._config = config
|
||||
self._out = sys.stdout
|
||||
class Coloring:
|
||||
def __init__(self, config, section_type):
|
||||
self._section = "color.%s" % section_type
|
||||
self._config = config
|
||||
self._out = sys.stdout
|
||||
|
||||
on = DEFAULT
|
||||
if on is None:
|
||||
on = self._config.GetString(self._section)
|
||||
if on is None:
|
||||
on = self._config.GetString('color.ui')
|
||||
on = DEFAULT
|
||||
if on is None:
|
||||
on = self._config.GetString(self._section)
|
||||
if on is None:
|
||||
on = self._config.GetString("color.ui")
|
||||
|
||||
if on == 'auto':
|
||||
if pager.active or os.isatty(1):
|
||||
self._on = True
|
||||
else:
|
||||
self._on = False
|
||||
elif on in ('true', 'always'):
|
||||
self._on = True
|
||||
else:
|
||||
self._on = False
|
||||
|
||||
def redirect(self, out):
|
||||
self._out = out
|
||||
|
||||
@property
|
||||
def is_on(self):
|
||||
return self._on
|
||||
|
||||
def write(self, fmt, *args):
|
||||
self._out.write(fmt % args)
|
||||
|
||||
def flush(self):
|
||||
self._out.flush()
|
||||
|
||||
def nl(self):
|
||||
self._out.write('\n')
|
||||
|
||||
def printer(self, opt=None, fg=None, bg=None, attr=None):
|
||||
s = self
|
||||
c = self.colorer(opt, fg, bg, attr)
|
||||
|
||||
def f(fmt, *args):
|
||||
s._out.write(c(fmt, *args))
|
||||
return f
|
||||
|
||||
def nofmt_printer(self, opt=None, fg=None, bg=None, attr=None):
|
||||
s = self
|
||||
c = self.nofmt_colorer(opt, fg, bg, attr)
|
||||
|
||||
def f(fmt):
|
||||
s._out.write(c(fmt))
|
||||
return f
|
||||
|
||||
def colorer(self, opt=None, fg=None, bg=None, attr=None):
|
||||
if self._on:
|
||||
c = self._parse(opt, fg, bg, attr)
|
||||
|
||||
def f(fmt, *args):
|
||||
output = fmt % args
|
||||
return ''.join([c, output, RESET])
|
||||
return f
|
||||
else:
|
||||
|
||||
def f(fmt, *args):
|
||||
return fmt % args
|
||||
return f
|
||||
|
||||
def nofmt_colorer(self, opt=None, fg=None, bg=None, attr=None):
|
||||
if self._on:
|
||||
c = self._parse(opt, fg, bg, attr)
|
||||
|
||||
def f(fmt):
|
||||
return ''.join([c, fmt, RESET])
|
||||
return f
|
||||
else:
|
||||
def f(fmt):
|
||||
return fmt
|
||||
return f
|
||||
|
||||
def _parse(self, opt, fg, bg, attr):
|
||||
if not opt:
|
||||
return _Color(fg, bg, attr)
|
||||
|
||||
v = self._config.GetString('%s.%s' % (self._section, opt))
|
||||
if v is None:
|
||||
return _Color(fg, bg, attr)
|
||||
|
||||
v = v.strip().lower()
|
||||
if v == "reset":
|
||||
return RESET
|
||||
elif v == '':
|
||||
return _Color(fg, bg, attr)
|
||||
|
||||
have_fg = False
|
||||
for a in v.split(' '):
|
||||
if is_color(a):
|
||||
if have_fg:
|
||||
bg = a
|
||||
if on == "auto":
|
||||
if pager.active or os.isatty(1):
|
||||
self._on = True
|
||||
else:
|
||||
self._on = False
|
||||
elif on in ("true", "always"):
|
||||
self._on = True
|
||||
else:
|
||||
fg = a
|
||||
elif is_attr(a):
|
||||
attr = a
|
||||
self._on = False
|
||||
|
||||
return _Color(fg, bg, attr)
|
||||
def redirect(self, out):
|
||||
self._out = out
|
||||
|
||||
@property
|
||||
def is_on(self):
|
||||
return self._on
|
||||
|
||||
def write(self, fmt, *args):
|
||||
self._out.write(fmt % args)
|
||||
|
||||
def flush(self):
|
||||
self._out.flush()
|
||||
|
||||
def nl(self):
|
||||
self._out.write("\n")
|
||||
|
||||
def printer(self, opt=None, fg=None, bg=None, attr=None):
|
||||
s = self
|
||||
c = self.colorer(opt, fg, bg, attr)
|
||||
|
||||
def f(fmt, *args):
|
||||
s._out.write(c(fmt, *args))
|
||||
|
||||
return f
|
||||
|
||||
def nofmt_printer(self, opt=None, fg=None, bg=None, attr=None):
|
||||
s = self
|
||||
c = self.nofmt_colorer(opt, fg, bg, attr)
|
||||
|
||||
def f(fmt):
|
||||
s._out.write(c(fmt))
|
||||
|
||||
return f
|
||||
|
||||
def colorer(self, opt=None, fg=None, bg=None, attr=None):
|
||||
if self._on:
|
||||
c = self._parse(opt, fg, bg, attr)
|
||||
|
||||
def f(fmt, *args):
|
||||
output = fmt % args
|
||||
return "".join([c, output, RESET])
|
||||
|
||||
return f
|
||||
else:
|
||||
|
||||
def f(fmt, *args):
|
||||
return fmt % args
|
||||
|
||||
return f
|
||||
|
||||
def nofmt_colorer(self, opt=None, fg=None, bg=None, attr=None):
|
||||
if self._on:
|
||||
c = self._parse(opt, fg, bg, attr)
|
||||
|
||||
def f(fmt):
|
||||
return "".join([c, fmt, RESET])
|
||||
|
||||
return f
|
||||
else:
|
||||
|
||||
def f(fmt):
|
||||
return fmt
|
||||
|
||||
return f
|
||||
|
||||
def _parse(self, opt, fg, bg, attr):
|
||||
if not opt:
|
||||
return _Color(fg, bg, attr)
|
||||
|
||||
v = self._config.GetString(f"{self._section}.{opt}")
|
||||
if v is None:
|
||||
return _Color(fg, bg, attr)
|
||||
|
||||
v = v.strip().lower()
|
||||
if v == "reset":
|
||||
return RESET
|
||||
elif v == "":
|
||||
return _Color(fg, bg, attr)
|
||||
|
||||
have_fg = False
|
||||
for a in v.split(" "):
|
||||
if is_color(a):
|
||||
if have_fg:
|
||||
bg = a
|
||||
else:
|
||||
fg = a
|
||||
elif is_attr(a):
|
||||
attr = a
|
||||
|
||||
return _Color(fg, bg, attr)
|
||||
|
||||
680
command.py
680
command.py
@@ -13,19 +13,19 @@
|
||||
# limitations under the License.
|
||||
|
||||
import multiprocessing
|
||||
import os
|
||||
import optparse
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
from event_log import EventLog
|
||||
from error import NoSuchProjectError
|
||||
from error import InvalidProjectGroupsError
|
||||
from error import NoSuchProjectError
|
||||
from error import RepoExitError
|
||||
from event_log import EventLog
|
||||
import progress
|
||||
|
||||
|
||||
# Are we generating man-pages?
|
||||
GENERATE_MANPAGES = os.environ.get('_REPO_GENERATE_MANPAGES_') == ' indeed! '
|
||||
GENERATE_MANPAGES = os.environ.get("_REPO_GENERATE_MANPAGES_") == " indeed! "
|
||||
|
||||
|
||||
# Number of projects to submit to a single worker process at a time.
|
||||
@@ -42,332 +42,466 @@ WORKER_BATCH_SIZE = 32
|
||||
DEFAULT_LOCAL_JOBS = min(os.cpu_count(), 8)
|
||||
|
||||
|
||||
class Command(object):
|
||||
"""Base class for any command line action in repo.
|
||||
"""
|
||||
class UsageError(RepoExitError):
|
||||
"""Exception thrown with invalid command usage."""
|
||||
|
||||
# Singleton for all commands to track overall repo command execution and
|
||||
# provide event summary to callers. Only used by sync subcommand currently.
|
||||
#
|
||||
# NB: This is being replaced by git trace2 events. See git_trace2_event_log.
|
||||
event_log = EventLog()
|
||||
|
||||
# Whether this command is a "common" one, i.e. whether the user would commonly
|
||||
# use it or it's a more uncommon command. This is used by the help command to
|
||||
# show short-vs-full summaries.
|
||||
COMMON = False
|
||||
class Command:
|
||||
"""Base class for any command line action in repo."""
|
||||
|
||||
# Whether this command supports running in parallel. If greater than 0,
|
||||
# it is the number of parallel jobs to default to.
|
||||
PARALLEL_JOBS = None
|
||||
# Singleton for all commands to track overall repo command execution and
|
||||
# provide event summary to callers. Only used by sync subcommand currently.
|
||||
#
|
||||
# NB: This is being replaced by git trace2 events. See git_trace2_event_log.
|
||||
event_log = EventLog()
|
||||
|
||||
def __init__(self, repodir=None, client=None, manifest=None, gitc_manifest=None,
|
||||
git_event_log=None):
|
||||
self.repodir = repodir
|
||||
self.client = client
|
||||
self.manifest = manifest
|
||||
self.gitc_manifest = gitc_manifest
|
||||
self.git_event_log = git_event_log
|
||||
# Whether this command is a "common" one, i.e. whether the user would
|
||||
# commonly use it or it's a more uncommon command. This is used by the help
|
||||
# command to show short-vs-full summaries.
|
||||
COMMON = False
|
||||
|
||||
# Cache for the OptionParser property.
|
||||
self._optparse = None
|
||||
# Whether this command supports running in parallel. If greater than 0,
|
||||
# it is the number of parallel jobs to default to.
|
||||
PARALLEL_JOBS = None
|
||||
|
||||
def WantPager(self, _opt):
|
||||
return False
|
||||
# Whether this command supports Multi-manifest. If False, then main.py will
|
||||
# iterate over the manifests and invoke the command once per (sub)manifest.
|
||||
# This is only checked after calling ValidateOptions, so that partially
|
||||
# migrated subcommands can set it to False.
|
||||
MULTI_MANIFEST_SUPPORT = True
|
||||
|
||||
def ReadEnvironmentOptions(self, opts):
|
||||
""" Set options from environment variables. """
|
||||
def __init__(
|
||||
self,
|
||||
repodir=None,
|
||||
client=None,
|
||||
manifest=None,
|
||||
git_event_log=None,
|
||||
outer_client=None,
|
||||
outer_manifest=None,
|
||||
):
|
||||
self.repodir = repodir
|
||||
self.client = client
|
||||
self.outer_client = outer_client or client
|
||||
self.manifest = manifest
|
||||
self.git_event_log = git_event_log
|
||||
self.outer_manifest = outer_manifest
|
||||
|
||||
env_options = self._RegisteredEnvironmentOptions()
|
||||
# Cache for the OptionParser property.
|
||||
self._optparse = None
|
||||
|
||||
for env_key, opt_key in env_options.items():
|
||||
# Get the user-set option value if any
|
||||
opt_value = getattr(opts, opt_key)
|
||||
def WantPager(self, _opt):
|
||||
return False
|
||||
|
||||
# If the value is set, it means the user has passed it as a command
|
||||
# line option, and we should use that. Otherwise we can try to set it
|
||||
# with the value from the corresponding environment variable.
|
||||
if opt_value is not None:
|
||||
continue
|
||||
def ReadEnvironmentOptions(self, opts):
|
||||
"""Set options from environment variables."""
|
||||
|
||||
env_value = os.environ.get(env_key)
|
||||
if env_value is not None:
|
||||
setattr(opts, opt_key, env_value)
|
||||
env_options = self._RegisteredEnvironmentOptions()
|
||||
|
||||
return opts
|
||||
for env_key, opt_key in env_options.items():
|
||||
# Get the user-set option value if any
|
||||
opt_value = getattr(opts, opt_key)
|
||||
|
||||
@property
|
||||
def OptionParser(self):
|
||||
if self._optparse is None:
|
||||
try:
|
||||
me = 'repo %s' % self.NAME
|
||||
usage = self.helpUsage.strip().replace('%prog', me)
|
||||
except AttributeError:
|
||||
usage = 'repo %s' % self.NAME
|
||||
epilog = 'Run `repo help %s` to view the detailed manual.' % self.NAME
|
||||
self._optparse = optparse.OptionParser(usage=usage, epilog=epilog)
|
||||
self._CommonOptions(self._optparse)
|
||||
self._Options(self._optparse)
|
||||
return self._optparse
|
||||
# If the value is set, it means the user has passed it as a command
|
||||
# line option, and we should use that. Otherwise we can try to set
|
||||
# it with the value from the corresponding environment variable.
|
||||
if opt_value is not None:
|
||||
continue
|
||||
|
||||
def _CommonOptions(self, p, opt_v=True):
|
||||
"""Initialize the option parser with common options.
|
||||
env_value = os.environ.get(env_key)
|
||||
if env_value is not None:
|
||||
setattr(opts, opt_key, env_value)
|
||||
|
||||
These will show up for *all* subcommands, so use sparingly.
|
||||
NB: Keep in sync with repo:InitParser().
|
||||
"""
|
||||
g = p.add_option_group('Logging options')
|
||||
opts = ['-v'] if opt_v else []
|
||||
g.add_option(*opts, '--verbose',
|
||||
dest='output_mode', action='store_true',
|
||||
help='show all output')
|
||||
g.add_option('-q', '--quiet',
|
||||
dest='output_mode', action='store_false',
|
||||
help='only show errors')
|
||||
return opts
|
||||
|
||||
if self.PARALLEL_JOBS is not None:
|
||||
default = 'based on number of CPU cores'
|
||||
if not GENERATE_MANPAGES:
|
||||
# Only include active cpu count if we aren't generating man pages.
|
||||
default = f'%default; {default}'
|
||||
p.add_option(
|
||||
'-j', '--jobs',
|
||||
type=int, default=self.PARALLEL_JOBS,
|
||||
help=f'number of jobs to run in parallel (default: {default})')
|
||||
@property
|
||||
def OptionParser(self):
|
||||
if self._optparse is None:
|
||||
try:
|
||||
me = "repo %s" % self.NAME
|
||||
usage = self.helpUsage.strip().replace("%prog", me)
|
||||
except AttributeError:
|
||||
usage = "repo %s" % self.NAME
|
||||
epilog = (
|
||||
"Run `repo help %s` to view the detailed manual." % self.NAME
|
||||
)
|
||||
self._optparse = optparse.OptionParser(usage=usage, epilog=epilog)
|
||||
self._CommonOptions(self._optparse)
|
||||
self._Options(self._optparse)
|
||||
return self._optparse
|
||||
|
||||
def _Options(self, p):
|
||||
"""Initialize the option parser with subcommand-specific options."""
|
||||
def _CommonOptions(self, p, opt_v=True):
|
||||
"""Initialize the option parser with common options.
|
||||
|
||||
def _RegisteredEnvironmentOptions(self):
|
||||
"""Get options that can be set from environment variables.
|
||||
These will show up for *all* subcommands, so use sparingly.
|
||||
NB: Keep in sync with repo:InitParser().
|
||||
"""
|
||||
g = p.add_option_group("Logging options")
|
||||
opts = ["-v"] if opt_v else []
|
||||
g.add_option(
|
||||
*opts,
|
||||
"--verbose",
|
||||
dest="output_mode",
|
||||
action="store_true",
|
||||
help="show all output",
|
||||
)
|
||||
g.add_option(
|
||||
"-q",
|
||||
"--quiet",
|
||||
dest="output_mode",
|
||||
action="store_false",
|
||||
help="only show errors",
|
||||
)
|
||||
|
||||
Return a dictionary mapping environment variable name
|
||||
to option key name that it can override.
|
||||
if self.PARALLEL_JOBS is not None:
|
||||
default = "based on number of CPU cores"
|
||||
if not GENERATE_MANPAGES:
|
||||
# Only include active cpu count if we aren't generating man
|
||||
# pages.
|
||||
default = f"%default; {default}"
|
||||
p.add_option(
|
||||
"-j",
|
||||
"--jobs",
|
||||
type=int,
|
||||
default=self.PARALLEL_JOBS,
|
||||
help=f"number of jobs to run in parallel (default: {default})",
|
||||
)
|
||||
|
||||
Example: {'REPO_MY_OPTION': 'my_option'}
|
||||
m = p.add_option_group("Multi-manifest options")
|
||||
m.add_option(
|
||||
"--outer-manifest",
|
||||
action="store_true",
|
||||
default=None,
|
||||
help="operate starting at the outermost manifest",
|
||||
)
|
||||
m.add_option(
|
||||
"--no-outer-manifest",
|
||||
dest="outer_manifest",
|
||||
action="store_false",
|
||||
help="do not operate on outer manifests",
|
||||
)
|
||||
m.add_option(
|
||||
"--this-manifest-only",
|
||||
action="store_true",
|
||||
default=None,
|
||||
help="only operate on this (sub)manifest",
|
||||
)
|
||||
m.add_option(
|
||||
"--no-this-manifest-only",
|
||||
"--all-manifests",
|
||||
dest="this_manifest_only",
|
||||
action="store_false",
|
||||
help="operate on this manifest and its submanifests",
|
||||
)
|
||||
|
||||
Will allow the option with key value 'my_option' to be set
|
||||
from the value in the environment variable named 'REPO_MY_OPTION'.
|
||||
def _Options(self, p):
|
||||
"""Initialize the option parser with subcommand-specific options."""
|
||||
|
||||
Note: This does not work properly for options that are explicitly
|
||||
set to None by the user, or options that are defined with a
|
||||
default value other than None.
|
||||
def _RegisteredEnvironmentOptions(self):
|
||||
"""Get options that can be set from environment variables.
|
||||
|
||||
"""
|
||||
return {}
|
||||
Return a dictionary mapping environment variable name
|
||||
to option key name that it can override.
|
||||
|
||||
def Usage(self):
|
||||
"""Display usage and terminate.
|
||||
"""
|
||||
self.OptionParser.print_usage()
|
||||
sys.exit(1)
|
||||
Example: {'REPO_MY_OPTION': 'my_option'}
|
||||
|
||||
def CommonValidateOptions(self, opt, args):
|
||||
"""Validate common options."""
|
||||
opt.quiet = opt.output_mode is False
|
||||
opt.verbose = opt.output_mode is True
|
||||
Will allow the option with key value 'my_option' to be set
|
||||
from the value in the environment variable named 'REPO_MY_OPTION'.
|
||||
|
||||
def ValidateOptions(self, opt, args):
|
||||
"""Validate the user options & arguments before executing.
|
||||
Note: This does not work properly for options that are explicitly
|
||||
set to None by the user, or options that are defined with a
|
||||
default value other than None.
|
||||
|
||||
This is meant to help break the code up into logical steps. Some tips:
|
||||
* Use self.OptionParser.error to display CLI related errors.
|
||||
* Adjust opt member defaults as makes sense.
|
||||
* Adjust the args list, but do so inplace so the caller sees updates.
|
||||
* Try to avoid updating self state. Leave that to Execute.
|
||||
"""
|
||||
"""
|
||||
return {}
|
||||
|
||||
def Execute(self, opt, args):
|
||||
"""Perform the action, after option parsing is complete.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
def Usage(self):
|
||||
"""Display usage and terminate."""
|
||||
self.OptionParser.print_usage()
|
||||
raise UsageError()
|
||||
|
||||
@staticmethod
|
||||
def ExecuteInParallel(jobs, func, inputs, callback, output=None, ordered=False):
|
||||
"""Helper for managing parallel execution boiler plate.
|
||||
def CommonValidateOptions(self, opt, args):
|
||||
"""Validate common options."""
|
||||
opt.quiet = opt.output_mode is False
|
||||
opt.verbose = opt.output_mode is True
|
||||
if opt.outer_manifest is None:
|
||||
# By default, treat multi-manifest instances as a single manifest
|
||||
# from the user's perspective.
|
||||
opt.outer_manifest = True
|
||||
|
||||
For subcommands that can easily split their work up.
|
||||
def ValidateOptions(self, opt, args):
|
||||
"""Validate the user options & arguments before executing.
|
||||
|
||||
Args:
|
||||
jobs: How many parallel processes to use.
|
||||
func: The function to apply to each of the |inputs|. Usually a
|
||||
functools.partial for wrapping additional arguments. It will be run
|
||||
in a separate process, so it must be pickalable, so nested functions
|
||||
won't work. Methods on the subcommand Command class should work.
|
||||
inputs: The list of items to process. Must be a list.
|
||||
callback: The function to pass the results to for processing. It will be
|
||||
executed in the main thread and process the results of |func| as they
|
||||
become available. Thus it may be a local nested function. Its return
|
||||
value is passed back directly. It takes three arguments:
|
||||
- The processing pool (or None with one job).
|
||||
- The |output| argument.
|
||||
- An iterator for the results.
|
||||
output: An output manager. May be progress.Progess or color.Coloring.
|
||||
ordered: Whether the jobs should be processed in order.
|
||||
This is meant to help break the code up into logical steps. Some tips:
|
||||
* Use self.OptionParser.error to display CLI related errors.
|
||||
* Adjust opt member defaults as makes sense.
|
||||
* Adjust the args list, but do so inplace so the caller sees updates.
|
||||
* Try to avoid updating self state. Leave that to Execute.
|
||||
"""
|
||||
|
||||
Returns:
|
||||
The |callback| function's results are returned.
|
||||
"""
|
||||
try:
|
||||
# NB: Multiprocessing is heavy, so don't spin it up for one job.
|
||||
if len(inputs) == 1 or jobs == 1:
|
||||
return callback(None, output, (func(x) for x in inputs))
|
||||
else:
|
||||
with multiprocessing.Pool(jobs) as pool:
|
||||
submit = pool.imap if ordered else pool.imap_unordered
|
||||
return callback(pool, output, submit(func, inputs, chunksize=WORKER_BATCH_SIZE))
|
||||
finally:
|
||||
if isinstance(output, progress.Progress):
|
||||
output.end()
|
||||
def Execute(self, opt, args):
|
||||
"""Perform the action, after option parsing is complete."""
|
||||
raise NotImplementedError
|
||||
|
||||
def _ResetPathToProjectMap(self, projects):
|
||||
self._by_path = dict((p.worktree, p) for p in projects)
|
||||
@staticmethod
|
||||
def ExecuteInParallel(
|
||||
jobs, func, inputs, callback, output=None, ordered=False
|
||||
):
|
||||
"""Helper for managing parallel execution boiler plate.
|
||||
|
||||
def _UpdatePathToProjectMap(self, project):
|
||||
self._by_path[project.worktree] = project
|
||||
For subcommands that can easily split their work up.
|
||||
|
||||
def _GetProjectByPath(self, manifest, path):
|
||||
project = None
|
||||
if os.path.exists(path):
|
||||
oldpath = None
|
||||
while (path and
|
||||
path != oldpath and
|
||||
path != manifest.topdir):
|
||||
Args:
|
||||
jobs: How many parallel processes to use.
|
||||
func: The function to apply to each of the |inputs|. Usually a
|
||||
functools.partial for wrapping additional arguments. It will be
|
||||
run in a separate process, so it must be pickalable, so nested
|
||||
functions won't work. Methods on the subcommand Command class
|
||||
should work.
|
||||
inputs: The list of items to process. Must be a list.
|
||||
callback: The function to pass the results to for processing. It
|
||||
will be executed in the main thread and process the results of
|
||||
|func| as they become available. Thus it may be a local nested
|
||||
function. Its return value is passed back directly. It takes
|
||||
three arguments:
|
||||
- The processing pool (or None with one job).
|
||||
- The |output| argument.
|
||||
- An iterator for the results.
|
||||
output: An output manager. May be progress.Progess or
|
||||
color.Coloring.
|
||||
ordered: Whether the jobs should be processed in order.
|
||||
|
||||
Returns:
|
||||
The |callback| function's results are returned.
|
||||
"""
|
||||
try:
|
||||
project = self._by_path[path]
|
||||
break
|
||||
except KeyError:
|
||||
oldpath = path
|
||||
path = os.path.dirname(path)
|
||||
if not project and path == manifest.topdir:
|
||||
try:
|
||||
project = self._by_path[path]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
project = self._by_path[path]
|
||||
except KeyError:
|
||||
pass
|
||||
return project
|
||||
# NB: Multiprocessing is heavy, so don't spin it up for one job.
|
||||
if len(inputs) == 1 or jobs == 1:
|
||||
return callback(None, output, (func(x) for x in inputs))
|
||||
else:
|
||||
with multiprocessing.Pool(jobs) as pool:
|
||||
submit = pool.imap if ordered else pool.imap_unordered
|
||||
return callback(
|
||||
pool,
|
||||
output,
|
||||
submit(func, inputs, chunksize=WORKER_BATCH_SIZE),
|
||||
)
|
||||
finally:
|
||||
if isinstance(output, progress.Progress):
|
||||
output.end()
|
||||
|
||||
def GetProjects(self, args, manifest=None, groups='', missing_ok=False,
|
||||
submodules_ok=False):
|
||||
"""A list of projects that match the arguments.
|
||||
"""
|
||||
if not manifest:
|
||||
manifest = self.manifest
|
||||
all_projects_list = manifest.projects
|
||||
result = []
|
||||
def _ResetPathToProjectMap(self, projects):
|
||||
self._by_path = {p.worktree: p for p in projects}
|
||||
|
||||
mp = manifest.manifestProject
|
||||
def _UpdatePathToProjectMap(self, project):
|
||||
self._by_path[project.worktree] = project
|
||||
|
||||
if not groups:
|
||||
groups = manifest.GetGroupsStr()
|
||||
groups = [x for x in re.split(r'[,\s]+', groups) if x]
|
||||
def _GetProjectByPath(self, manifest, path):
|
||||
project = None
|
||||
if os.path.exists(path):
|
||||
oldpath = None
|
||||
while path and path != oldpath and path != manifest.topdir:
|
||||
try:
|
||||
project = self._by_path[path]
|
||||
break
|
||||
except KeyError:
|
||||
oldpath = path
|
||||
path = os.path.dirname(path)
|
||||
if not project and path == manifest.topdir:
|
||||
try:
|
||||
project = self._by_path[path]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
project = self._by_path[path]
|
||||
except KeyError:
|
||||
pass
|
||||
return project
|
||||
|
||||
if not args:
|
||||
derived_projects = {}
|
||||
for project in all_projects_list:
|
||||
if submodules_ok or project.sync_s:
|
||||
derived_projects.update((p.name, p)
|
||||
for p in project.GetDerivedSubprojects())
|
||||
all_projects_list.extend(derived_projects.values())
|
||||
for project in all_projects_list:
|
||||
if (missing_ok or project.Exists) and project.MatchesGroups(groups):
|
||||
result.append(project)
|
||||
else:
|
||||
self._ResetPathToProjectMap(all_projects_list)
|
||||
def GetProjects(
|
||||
self,
|
||||
args,
|
||||
manifest=None,
|
||||
groups="",
|
||||
missing_ok=False,
|
||||
submodules_ok=False,
|
||||
all_manifests=False,
|
||||
):
|
||||
"""A list of projects that match the arguments.
|
||||
|
||||
for arg in args:
|
||||
# We have to filter by manifest groups in case the requested project is
|
||||
# checked out multiple times or differently based on them.
|
||||
projects = [project for project in manifest.GetProjectsWithName(arg)
|
||||
if project.MatchesGroups(groups)]
|
||||
Args:
|
||||
args: a list of (case-insensitive) strings, projects to search for.
|
||||
manifest: an XmlManifest, the manifest to use, or None for default.
|
||||
groups: a string, the manifest groups in use.
|
||||
missing_ok: a boolean, whether to allow missing projects.
|
||||
submodules_ok: a boolean, whether to allow submodules.
|
||||
all_manifests: a boolean, if True then all manifests and
|
||||
submanifests are used. If False, then only the local
|
||||
(sub)manifest is used.
|
||||
|
||||
if not projects:
|
||||
path = os.path.abspath(arg).replace('\\', '/')
|
||||
project = self._GetProjectByPath(manifest, path)
|
||||
Returns:
|
||||
A list of matching Project instances.
|
||||
"""
|
||||
if all_manifests:
|
||||
if not manifest:
|
||||
manifest = self.manifest.outer_client
|
||||
all_projects_list = manifest.all_projects
|
||||
else:
|
||||
if not manifest:
|
||||
manifest = self.manifest
|
||||
all_projects_list = manifest.projects
|
||||
result = []
|
||||
|
||||
# If it's not a derived project, update path->project mapping and
|
||||
# search again, as arg might actually point to a derived subproject.
|
||||
if (project and not project.Derived and (submodules_ok or
|
||||
project.sync_s)):
|
||||
search_again = False
|
||||
for subproject in project.GetDerivedSubprojects():
|
||||
self._UpdatePathToProjectMap(subproject)
|
||||
search_again = True
|
||||
if search_again:
|
||||
project = self._GetProjectByPath(manifest, path) or project
|
||||
if not groups:
|
||||
groups = manifest.GetGroupsStr()
|
||||
groups = [x for x in re.split(r"[,\s]+", groups) if x]
|
||||
|
||||
if project:
|
||||
projects = [project]
|
||||
if not args:
|
||||
derived_projects = {}
|
||||
for project in all_projects_list:
|
||||
if submodules_ok or project.sync_s:
|
||||
derived_projects.update(
|
||||
(p.name, p) for p in project.GetDerivedSubprojects()
|
||||
)
|
||||
all_projects_list.extend(derived_projects.values())
|
||||
for project in all_projects_list:
|
||||
if (missing_ok or project.Exists) and project.MatchesGroups(
|
||||
groups
|
||||
):
|
||||
result.append(project)
|
||||
else:
|
||||
self._ResetPathToProjectMap(all_projects_list)
|
||||
|
||||
if not projects:
|
||||
raise NoSuchProjectError(arg)
|
||||
for arg in args:
|
||||
# We have to filter by manifest groups in case the requested
|
||||
# project is checked out multiple times or differently based on
|
||||
# them.
|
||||
projects = [
|
||||
project
|
||||
for project in manifest.GetProjectsWithName(
|
||||
arg, all_manifests=all_manifests
|
||||
)
|
||||
if project.MatchesGroups(groups)
|
||||
]
|
||||
|
||||
for project in projects:
|
||||
if not missing_ok and not project.Exists:
|
||||
raise NoSuchProjectError('%s (%s)' % (arg, project.relpath))
|
||||
if not project.MatchesGroups(groups):
|
||||
raise InvalidProjectGroupsError(arg)
|
||||
if not projects:
|
||||
path = os.path.abspath(arg).replace("\\", "/")
|
||||
tree = manifest
|
||||
if all_manifests:
|
||||
# Look for the deepest matching submanifest.
|
||||
for tree in reversed(list(manifest.all_manifests)):
|
||||
if path.startswith(tree.topdir):
|
||||
break
|
||||
project = self._GetProjectByPath(tree, path)
|
||||
|
||||
result.extend(projects)
|
||||
# If it's not a derived project, update path->project
|
||||
# mapping and search again, as arg might actually point to
|
||||
# a derived subproject.
|
||||
if (
|
||||
project
|
||||
and not project.Derived
|
||||
and (submodules_ok or project.sync_s)
|
||||
):
|
||||
search_again = False
|
||||
for subproject in project.GetDerivedSubprojects():
|
||||
self._UpdatePathToProjectMap(subproject)
|
||||
search_again = True
|
||||
if search_again:
|
||||
project = (
|
||||
self._GetProjectByPath(manifest, path)
|
||||
or project
|
||||
)
|
||||
|
||||
def _getpath(x):
|
||||
return x.relpath
|
||||
result.sort(key=_getpath)
|
||||
return result
|
||||
if project:
|
||||
projects = [project]
|
||||
|
||||
def FindProjects(self, args, inverse=False):
|
||||
result = []
|
||||
patterns = [re.compile(r'%s' % a, re.IGNORECASE) for a in args]
|
||||
for project in self.GetProjects(''):
|
||||
for pattern in patterns:
|
||||
match = pattern.search(project.name) or pattern.search(project.relpath)
|
||||
if not inverse and match:
|
||||
result.append(project)
|
||||
break
|
||||
if inverse and match:
|
||||
break
|
||||
else:
|
||||
if inverse:
|
||||
result.append(project)
|
||||
result.sort(key=lambda project: project.relpath)
|
||||
return result
|
||||
if not projects:
|
||||
raise NoSuchProjectError(arg)
|
||||
|
||||
for project in projects:
|
||||
if not missing_ok and not project.Exists:
|
||||
raise NoSuchProjectError(
|
||||
"%s (%s)"
|
||||
% (arg, project.RelPath(local=not all_manifests))
|
||||
)
|
||||
if not project.MatchesGroups(groups):
|
||||
raise InvalidProjectGroupsError(arg)
|
||||
|
||||
result.extend(projects)
|
||||
|
||||
def _getpath(x):
|
||||
return x.relpath
|
||||
|
||||
result.sort(key=_getpath)
|
||||
return result
|
||||
|
||||
def FindProjects(self, args, inverse=False, all_manifests=False):
|
||||
"""Find projects from command line arguments.
|
||||
|
||||
Args:
|
||||
args: a list of (case-insensitive) strings, projects to search for.
|
||||
inverse: a boolean, if True, then projects not matching any |args|
|
||||
are returned.
|
||||
all_manifests: a boolean, if True then all manifests and
|
||||
submanifests are used. If False, then only the local
|
||||
(sub)manifest is used.
|
||||
"""
|
||||
result = []
|
||||
patterns = [re.compile(r"%s" % a, re.IGNORECASE) for a in args]
|
||||
for project in self.GetProjects("", all_manifests=all_manifests):
|
||||
paths = [project.name, project.RelPath(local=not all_manifests)]
|
||||
for pattern in patterns:
|
||||
match = any(pattern.search(x) for x in paths)
|
||||
if not inverse and match:
|
||||
result.append(project)
|
||||
break
|
||||
if inverse and match:
|
||||
break
|
||||
else:
|
||||
if inverse:
|
||||
result.append(project)
|
||||
result.sort(
|
||||
key=lambda project: (project.manifest.path_prefix, project.relpath)
|
||||
)
|
||||
return result
|
||||
|
||||
def ManifestList(self, opt):
|
||||
"""Yields all of the manifests to traverse.
|
||||
|
||||
Args:
|
||||
opt: The command options.
|
||||
"""
|
||||
top = self.outer_manifest
|
||||
if not opt.outer_manifest or opt.this_manifest_only:
|
||||
top = self.manifest
|
||||
yield top
|
||||
if not opt.this_manifest_only:
|
||||
yield from top.all_children
|
||||
|
||||
|
||||
class InteractiveCommand(Command):
|
||||
"""Command which requires user interaction on the tty and
|
||||
must not run within a pager, even if the user asks to.
|
||||
"""
|
||||
"""Command which requires user interaction on the tty and must not run
|
||||
within a pager, even if the user asks to.
|
||||
"""
|
||||
|
||||
def WantPager(self, _opt):
|
||||
return False
|
||||
def WantPager(self, _opt):
|
||||
return False
|
||||
|
||||
|
||||
class PagedCommand(Command):
|
||||
"""Command which defaults to output in a pager, as its
|
||||
display tends to be larger than one screen full.
|
||||
"""
|
||||
"""Command which defaults to output in a pager, as its display tends to be
|
||||
larger than one screen full.
|
||||
"""
|
||||
|
||||
def WantPager(self, _opt):
|
||||
return True
|
||||
def WantPager(self, _opt):
|
||||
return True
|
||||
|
||||
|
||||
class MirrorSafeCommand(object):
|
||||
"""Command permits itself to run within a mirror,
|
||||
and does not require a working directory.
|
||||
"""
|
||||
class MirrorSafeCommand:
|
||||
"""Command permits itself to run within a mirror, and does not require a
|
||||
working directory.
|
||||
"""
|
||||
|
||||
|
||||
class GitcAvailableCommand(object):
|
||||
"""Command that requires GITC to be available, but does
|
||||
not require the local client to be a GITC client.
|
||||
"""
|
||||
|
||||
|
||||
class GitcClientCommand(object):
|
||||
"""Command that requires the local client to be a GITC
|
||||
client.
|
||||
"""
|
||||
class GitcClientCommand:
|
||||
"""Command that requires the local client to be a GITC client."""
|
||||
|
||||
@@ -42,14 +42,22 @@ For example, if you want to change the manifest branch, you can simply run
|
||||
change the git URL/branch that this tracks, re-run `repo init` with the new
|
||||
settings.
|
||||
|
||||
* `.repo_fetchtimes.json`: Used by `repo sync` to record stats when syncing
|
||||
the various projects.
|
||||
* `.repo_fetchtimes.json`: Used by `repo sync` to record fetch times when
|
||||
syncing the various projects.
|
||||
|
||||
* `.repo_localsyncstate.json`: Used by `repo sync` to detect and warn on
|
||||
on partial tree syncs. Partial syncs are allowed by `repo` itself, but are
|
||||
unsupported by many projects where `repo` is used.
|
||||
|
||||
### Manifests
|
||||
|
||||
For more documentation on the manifest format, including the local_manifests
|
||||
support, see the [manifest-format.md] file.
|
||||
|
||||
* `submanifests/{submanifest.path}/`: The path prefix to the manifest state of
|
||||
a submanifest included in a multi-manifest checkout. The outermost manifest
|
||||
manifest state is found adjacent to `submanifests/`.
|
||||
|
||||
* `manifests/`: A git checkout of the manifest project. Its `.git/` state
|
||||
points to the `manifest.git` bare checkout (see below). It tracks the git
|
||||
branch specified at `repo init` time via `--manifest-branch`.
|
||||
@@ -163,6 +171,7 @@ User controlled settings are initialized when running `repo init`.
|
||||
| repo.clonefilter | `--clone-filter` | Filter setting when using [partial git clones] |
|
||||
| repo.depth | `--depth` | Create shallow checkouts when cloning |
|
||||
| repo.dissociate | `--dissociate` | Dissociate from any reference/mirrors after initial clone |
|
||||
| repo.git-lfs | `--git-lfs` | Enable [Git LFS] support |
|
||||
| repo.mirror | `--mirror` | Checkout is a repo mirror |
|
||||
| repo.partialclone | `--partial-clone` | Create [partial git clones] |
|
||||
| repo.partialcloneexclude | `--partial-clone-exclude` | Comma-delimited list of project names (not paths) to exclude while using [partial git clones] |
|
||||
@@ -217,27 +226,30 @@ The `[remote]` settings are automatically populated/updated from the manifest.
|
||||
|
||||
The `[branch]` settings are updated by `repo start` and `git branch`.
|
||||
|
||||
| Setting | Subcommands | Use/Meaning |
|
||||
|-------------------------------|---------------|-------------|
|
||||
| review.\<url\>.autocopy | upload | Automatically add to `--cc=<value>` |
|
||||
| review.\<url\>.autoreviewer | upload | Automatically add to `--reviewers=<value>` |
|
||||
| review.\<url\>.autoupload | upload | Automatically answer "yes" or "no" to all prompts |
|
||||
| review.\<url\>.uploadhashtags | upload | Automatically add to `--hashtag=<value>` |
|
||||
| review.\<url\>.uploadlabels | upload | Automatically add to `--label=<value>` |
|
||||
| review.\<url\>.uploadnotify | upload | [Notify setting][upload-notify] to use |
|
||||
| review.\<url\>.uploadtopic | upload | Default [topic] to use |
|
||||
| review.\<url\>.username | upload | Override username with `ssh://` review URIs |
|
||||
| remote.\<remote\>.fetch | sync | Set of refs to fetch |
|
||||
| remote.\<remote\>.projectname | \<network\> | The name of the project as it exists in Gerrit review |
|
||||
| remote.\<remote\>.pushurl | upload | The base URI for pushing CLs |
|
||||
| remote.\<remote\>.review | upload | The URI of the Gerrit review server |
|
||||
| remote.\<remote\>.url | sync & upload | The URI of the git project to fetch |
|
||||
| branch.\<branch\>.merge | sync & upload | The branch to merge & upload & track |
|
||||
| branch.\<branch\>.remote | sync & upload | The remote to track |
|
||||
| Setting | Subcommands | Use/Meaning |
|
||||
|---------------------------------------|---------------|-------------|
|
||||
| review.\<url\>.autocopy | upload | Automatically add to `--cc=<value>` |
|
||||
| review.\<url\>.autoreviewer | upload | Automatically add to `--reviewers=<value>` |
|
||||
| review.\<url\>.autoupload | upload | Automatically answer "yes" or "no" to all prompts |
|
||||
| review.\<url\>.uploadhashtags | upload | Automatically add to `--hashtag=<value>` |
|
||||
| review.\<url\>.uploadlabels | upload | Automatically add to `--label=<value>` |
|
||||
| review.\<url\>.uploadnotify | upload | [Notify setting][upload-notify] to use |
|
||||
| review.\<url\>.uploadtopic | upload | Default [topic] to use |
|
||||
| review.\<url\>.uploadwarningthreshold | upload | Warn when attempting to upload more than this many CLs |
|
||||
| review.\<url\>.username | upload | Override username with `ssh://` review URIs |
|
||||
| remote.\<remote\>.fetch | sync | Set of refs to fetch |
|
||||
| remote.\<remote\>.projectname | \<network\> | The name of the project as it exists in Gerrit review |
|
||||
| remote.\<remote\>.pushurl | upload | The base URI for pushing CLs |
|
||||
| remote.\<remote\>.review | upload | The URI of the Gerrit review server |
|
||||
| remote.\<remote\>.url | sync & upload | The URI of the git project to fetch |
|
||||
| branch.\<branch\>.merge | sync & upload | The branch to merge & upload & track |
|
||||
| branch.\<branch\>.remote | sync & upload | The remote to track |
|
||||
|
||||
## ~/ dotconfig layout
|
||||
|
||||
Repo will create & maintain a few files in the user's home directory.
|
||||
Repo will create & maintain a few files under the `.repoconfig/` directory.
|
||||
This is placed in the user's home directory by default but can be changed by
|
||||
setting `REPO_CONFIG_DIR`.
|
||||
|
||||
* `.repoconfig/`: Repo's per-user directory for all random config files/state.
|
||||
* `.repoconfig/config`: Per-user settings using [git-config] file format.
|
||||
@@ -254,6 +266,7 @@ Repo will create & maintain a few files in the user's home directory.
|
||||
|
||||
|
||||
[git-config]: https://git-scm.com/docs/git-config
|
||||
[Git LFS]: https://git-lfs.github.com/
|
||||
[git worktree]: https://git-scm.com/docs/git-worktree
|
||||
[gitsubmodules]: https://git-scm.com/docs/gitsubmodules
|
||||
[manifest-format.md]: ./manifest-format.md
|
||||
|
||||
@@ -26,6 +26,7 @@ following DTD:
|
||||
remote*,
|
||||
default?,
|
||||
manifest-server?,
|
||||
submanifest*?,
|
||||
remove-project*,
|
||||
project*,
|
||||
extend-project*,
|
||||
@@ -57,6 +58,16 @@ following DTD:
|
||||
<!ELEMENT manifest-server EMPTY>
|
||||
<!ATTLIST manifest-server url CDATA #REQUIRED>
|
||||
|
||||
<!ELEMENT submanifest EMPTY>
|
||||
<!ATTLIST submanifest name ID #REQUIRED>
|
||||
<!ATTLIST submanifest remote IDREF #IMPLIED>
|
||||
<!ATTLIST submanifest project CDATA #IMPLIED>
|
||||
<!ATTLIST submanifest manifest-name CDATA #IMPLIED>
|
||||
<!ATTLIST submanifest revision CDATA #IMPLIED>
|
||||
<!ATTLIST submanifest path CDATA #IMPLIED>
|
||||
<!ATTLIST submanifest groups CDATA #IMPLIED>
|
||||
<!ATTLIST submanifest default-groups CDATA #IMPLIED>
|
||||
|
||||
<!ELEMENT project (annotation*,
|
||||
project*,
|
||||
copyfile*,
|
||||
@@ -94,10 +105,13 @@ following DTD:
|
||||
<!ATTLIST extend-project groups CDATA #IMPLIED>
|
||||
<!ATTLIST extend-project revision CDATA #IMPLIED>
|
||||
<!ATTLIST extend-project remote CDATA #IMPLIED>
|
||||
<!ATTLIST extend-project dest-branch CDATA #IMPLIED>
|
||||
<!ATTLIST extend-project upstream CDATA #IMPLIED>
|
||||
|
||||
<!ELEMENT remove-project EMPTY>
|
||||
<!ATTLIST remove-project name CDATA #REQUIRED>
|
||||
<!ATTLIST remove-project optional CDATA #IMPLIED>
|
||||
<!ATTLIST remove-project name CDATA #IMPLIED>
|
||||
<!ATTLIST remove-project path CDATA #IMPLIED>
|
||||
<!ATTLIST remove-project optional CDATA #IMPLIED>
|
||||
|
||||
<!ELEMENT repo-hooks EMPTY>
|
||||
<!ATTLIST repo-hooks in-project CDATA #REQUIRED>
|
||||
@@ -112,8 +126,9 @@ following DTD:
|
||||
<!ATTLIST contactinfo bugurl CDATA #REQUIRED>
|
||||
|
||||
<!ELEMENT include EMPTY>
|
||||
<!ATTLIST include name CDATA #REQUIRED>
|
||||
<!ATTLIST include groups CDATA #IMPLIED>
|
||||
<!ATTLIST include name CDATA #REQUIRED>
|
||||
<!ATTLIST include groups CDATA #IMPLIED>
|
||||
<!ATTLIST include revision CDATA #IMPLIED>
|
||||
]>
|
||||
```
|
||||
|
||||
@@ -236,6 +251,66 @@ the specified tag. This is used by repo sync when the --smart-tag option
|
||||
is given.
|
||||
|
||||
|
||||
### Element submanifest
|
||||
|
||||
One or more submanifest elements may be specified. Each element describes a
|
||||
single manifest to be checked out as a child.
|
||||
|
||||
Attribute `name`: A unique name (within the current (sub)manifest) for this
|
||||
submanifest. It acts as a default for `revision` below. The same name can be
|
||||
used for submanifests with different parent (sub)manifests.
|
||||
|
||||
Attribute `remote`: Name of a previously defined remote element.
|
||||
If not supplied the remote given by the default element is used.
|
||||
|
||||
Attribute `project`: The manifest project name. The project's name is appended
|
||||
onto its remote's fetch URL to generate the actual URL to configure the Git
|
||||
remote with. The URL gets formed as:
|
||||
|
||||
${remote_fetch}/${project_name}.git
|
||||
|
||||
where ${remote_fetch} is the remote's fetch attribute and
|
||||
${project_name} is the project's name attribute. The suffix ".git"
|
||||
is always appended as repo assumes the upstream is a forest of
|
||||
bare Git repositories. If the project has a parent element, its
|
||||
name will be prefixed by the parent's.
|
||||
|
||||
The project name must match the name Gerrit knows, if Gerrit is
|
||||
being used for code reviews.
|
||||
|
||||
`project` must not be empty, and may not be an absolute path or use "." or ".."
|
||||
path components. It is always interpreted relative to the remote's fetch
|
||||
settings, so if a different base path is needed, declare a different remote
|
||||
with the new settings needed.
|
||||
|
||||
If not supplied the remote and project for this manifest will be used: `remote`
|
||||
cannot be supplied.
|
||||
|
||||
Projects from a submanifest and its submanifests are added to the
|
||||
submanifest::path:<path_prefix> group.
|
||||
|
||||
Attribute `manifest-name`: The manifest filename in the manifest project. If
|
||||
not supplied, `default.xml` is used.
|
||||
|
||||
Attribute `revision`: Name of a Git branch (e.g. "main" or "refs/heads/main"),
|
||||
tag (e.g. "refs/tags/stable"), or a commit hash. If not supplied, `name` is
|
||||
used.
|
||||
|
||||
Attribute `path`: An optional path relative to the top directory
|
||||
of the repo client where the submanifest repo client top directory
|
||||
should be placed. If not supplied, `revision` is used.
|
||||
|
||||
`path` may not be an absolute path or use "." or ".." path components.
|
||||
|
||||
Attribute `groups`: List of additional groups to which all projects
|
||||
in the included submanifest belong. This appends and recurses, meaning
|
||||
all projects in submanifests carry all parent submanifest groups.
|
||||
Same syntax as the corresponding element of `project`.
|
||||
|
||||
Attribute `default-groups`: The list of manifest groups to sync if no
|
||||
`--groups=` parameter was specified at init. When that list is empty, use this
|
||||
list instead of "default" as the list of groups to sync.
|
||||
|
||||
### Element project
|
||||
|
||||
One or more project elements may be specified. Each element
|
||||
@@ -352,6 +427,12 @@ project. Same syntax as the corresponding element of `project`.
|
||||
Attribute `remote`: If specified, overrides the remote of the original
|
||||
project. Same syntax as the corresponding element of `project`.
|
||||
|
||||
Attribute `dest-branch`: If specified, overrides the dest-branch of the original
|
||||
project. Same syntax as the corresponding element of `project`.
|
||||
|
||||
Attribute `upstream`: If specified, overrides the upstream of the original
|
||||
project. Same syntax as the corresponding element of `project`.
|
||||
|
||||
### Element annotation
|
||||
|
||||
Zero or more annotation elements may be specified as children of a
|
||||
@@ -393,7 +474,7 @@ of the repo client.
|
||||
|
||||
### Element remove-project
|
||||
|
||||
Deletes the named project from the internal manifest table, possibly
|
||||
Deletes a project from the internal manifest table, possibly
|
||||
allowing a subsequent project element in the same manifest file to
|
||||
replace the project with a different source.
|
||||
|
||||
@@ -401,6 +482,17 @@ This element is mostly useful in a local manifest file, where
|
||||
the user can remove a project, and possibly replace it with their
|
||||
own definition.
|
||||
|
||||
The project `name` or project `path` can be used to specify the remove target
|
||||
meaning one of them is required. If only name is specified, all
|
||||
projects with that name are removed.
|
||||
|
||||
If both name and path are specified, only projects with the same name and
|
||||
path are removed, meaning projects with the same name but in other
|
||||
locations are kept.
|
||||
|
||||
If only path is specified, a matching project is removed regardless of its
|
||||
name. Logic otherwise behaves like both are specified.
|
||||
|
||||
Attribute `optional`: Set to true to ignore remove-project elements with no
|
||||
matching `project` element.
|
||||
|
||||
@@ -471,9 +563,12 @@ These restrictions are not enforced for [Local Manifests].
|
||||
|
||||
Attribute `groups`: List of additional groups to which all projects
|
||||
in the included manifest belong. This appends and recurses, meaning
|
||||
all projects in sub-manifests carry all parent include groups.
|
||||
all projects in included manifests carry all parent include groups.
|
||||
Same syntax as the corresponding element of `project`.
|
||||
|
||||
Attribute `revision`: Name of a Git branch (e.g. `main` or `refs/heads/main`)
|
||||
default to which all projects in the included manifest belong.
|
||||
|
||||
## Local Manifests {#local-manifests}
|
||||
|
||||
Additional remotes and projects may be added through local manifest
|
||||
|
||||
@@ -1,47 +1,92 @@
|
||||
# Supported Python Versions
|
||||
|
||||
With Python 2.7 officially going EOL on [01 Jan 2020](https://pythonclock.org/),
|
||||
we need a support plan for the repo project itself.
|
||||
Inevitably, there will be a long tail of users who still want to use Python 2 on
|
||||
their old LTS/corp systems and have little power to change the system.
|
||||
This documents the current supported Python versions, and tries to provide
|
||||
guidance for when we decide to drop support for older versions.
|
||||
|
||||
## Summary
|
||||
|
||||
* Python 3.6 (released Dec 2016) is required by default starting with repo-2.x.
|
||||
* Older versions of Python (e.g. v2.7) may use the legacy feature-frozen branch
|
||||
based on repo-1.x.
|
||||
* Python 3.6 (released Dec 2016) is required starting with repo-2.0.
|
||||
* Older versions of Python (e.g. v2.7) may use old releases via the repo-1.x
|
||||
branch, but no support is provided.
|
||||
|
||||
## Overview
|
||||
|
||||
We provide a branch for Python 2 users that is feature-frozen.
|
||||
Bugfixes may be added on a best-effort basis or from the community, but largely
|
||||
no new features will be added, nor is support guaranteed.
|
||||
|
||||
Users can select this during `repo init` time via the [repo launcher].
|
||||
Otherwise the default branches (e.g. stable & main) will be used which will
|
||||
require Python 3.
|
||||
|
||||
This means the [repo launcher] needs to support both Python 2 & Python 3, but
|
||||
since it doesn't import any other repo code, this shouldn't be too problematic.
|
||||
|
||||
The main branch will require Python 3.6 at a minimum.
|
||||
If the system has an older version of Python 3, then users will have to select
|
||||
the legacy Python 2 branch instead.
|
||||
|
||||
### repo hooks
|
||||
## repo hooks
|
||||
|
||||
Projects that use [repo hooks] run on independent schedules.
|
||||
They might migrate to Python 3 earlier or later than us.
|
||||
To support them, we'll probe the shebang of the hook script and if we find an
|
||||
interpreter in there that indicates a different version than repo is currently
|
||||
running under, we'll attempt to reexec ourselves under that.
|
||||
Since it's not possible to detect what version of Python the hooks were written
|
||||
or tested against, we always import & exec them with the active Python version.
|
||||
|
||||
For example, a hook with a header like `#!/usr/bin/python2` will have repo
|
||||
execute `/usr/bin/python2` to execute the hook code specifically if repo is
|
||||
currently running Python 3.
|
||||
If the user's Python is too new for the [repo hooks], then it is up to the hooks
|
||||
maintainer to update.
|
||||
|
||||
For more details, consult the [repo hooks] documentation.
|
||||
## Repo launcher
|
||||
|
||||
The [repo launcher] is an independent script that can support older versions of
|
||||
Python without holding back the rest of the codebase.
|
||||
If it detects the current version of Python is too old, it will try to reexec
|
||||
via a newer version of Python via standard `pythonX.Y` interpreter names.
|
||||
|
||||
However, this is provided as a nicety when it is not onerous, and there is no
|
||||
official support for older versions of Python than the rest of the codebase.
|
||||
|
||||
If your default python interpreters are too old to run the launcher even though
|
||||
you have newer versions installed, your choices are:
|
||||
|
||||
* Modify the [repo launcher]'s shebang to suite your environment.
|
||||
* Download an older version of the [repo launcher] and don't upgrade it.
|
||||
Be aware that we do not guarantee old repo launchers will work with current
|
||||
versions of repo. Bug reports using old launchers will not be accepted.
|
||||
|
||||
## When to drop support
|
||||
|
||||
So far, Python 3.6 has provided most of the interesting features that we want
|
||||
(e.g. typing & f-strings), and there haven't been features in newer versions
|
||||
that are critical to us.
|
||||
|
||||
That said, let's assume we need functionality that only exists in Python 3.7.
|
||||
How do we decide when it's acceptable to drop Python 3.6?
|
||||
|
||||
1. Review the [Project References](./release-process.md#project-references) to
|
||||
see what major distros are using the previous version of Python, and when
|
||||
they go EOL. Generally we care about Ubuntu LTS & current/previous Debian
|
||||
stable versions.
|
||||
* If they're all EOL already, then go for it, drop support.
|
||||
* If they aren't EOL, start a thread on [repo-discuss] to see how the user
|
||||
base feels about the proposal.
|
||||
1. Update the "soft" versions in the codebase. This will start warning users
|
||||
that the older version is deprecated.
|
||||
* Update [repo](/repo) if the launcher needs updating.
|
||||
This only helps with people who download newer launchers.
|
||||
* Update [main.py](/main.py) for the main codebase.
|
||||
This warns for everyone regardless of [repo launcher] version.
|
||||
* Update [requirements.json](/requirements.json).
|
||||
This allows [repo launcher] to display warnings/errors without having
|
||||
to execute the new codebase. This helps in case of syntax or module
|
||||
changes where older versions won't even be able to import the new code.
|
||||
1. After some grace period (ideally at least 2 quarters after the first release
|
||||
with the updated soft requirements), update the "hard" versions, and then
|
||||
start using the new functionality.
|
||||
|
||||
## Python 2.7 & 3.0-3.5
|
||||
|
||||
> **There is no support for these versions.**
|
||||
> **Do not file bugs if you are using old Python versions.**
|
||||
> **Any such reports will be marked invalid and ignored.**
|
||||
> **Upgrade your distro and/or runtime instead.**
|
||||
|
||||
Fetch an old version of the [repo launcher]:
|
||||
|
||||
```sh
|
||||
$ curl https://storage.googleapis.com/git-repo-downloads/repo-2.32 > ~/.bin/repo-2.32
|
||||
$ chmod a+rx ~/.bin/repo-2.32
|
||||
```
|
||||
|
||||
Then initialize an old version of repo:
|
||||
|
||||
```sh
|
||||
$ repo-2.32 init --repo-rev=repo-1 ...
|
||||
```
|
||||
|
||||
|
||||
[repo-discuss]: https://groups.google.com/forum/#!forum/repo-discuss
|
||||
[repo hooks]: ./repo-hooks.md
|
||||
[repo launcher]: ../repo
|
||||
|
||||
@@ -143,23 +143,14 @@ internal processes for accessing the restricted keys.
|
||||
***
|
||||
|
||||
```sh
|
||||
# Set the gpg key directory.
|
||||
$ export GNUPGHOME=~/.gnupg/repo/
|
||||
|
||||
# Verify the listed key is “Repo Maintainer”.
|
||||
$ gpg -K
|
||||
|
||||
# Pick whatever branch or commit you want to tag.
|
||||
$ r=main
|
||||
|
||||
# Pick the new version.
|
||||
$ t=1.12.10
|
||||
$ t=v2.30
|
||||
|
||||
# Create the signed tag.
|
||||
$ git tag -s v$t -u "Repo Maintainer <repo@android.kernel.org>" -m "repo $t" $r
|
||||
# Create a new signed tag with the current HEAD.
|
||||
$ ./release/sign-tag.py $t
|
||||
|
||||
# Verify the signed tag.
|
||||
$ git show v$t
|
||||
$ git show $t
|
||||
```
|
||||
|
||||
### Push the new release
|
||||
@@ -168,11 +159,11 @@ Once you're ready to make the release available to everyone, push it to the
|
||||
`stable` branch.
|
||||
|
||||
Make sure you never push the tag itself to the stable branch!
|
||||
Only push the commit -- notice the use of `$t` and `$r` below.
|
||||
Only push the commit -- note the use of `^0` below.
|
||||
|
||||
```sh
|
||||
$ git push https://gerrit-review.googlesource.com/git-repo v$t
|
||||
$ git push https://gerrit-review.googlesource.com/git-repo $r:stable
|
||||
$ git push https://gerrit-review.googlesource.com/git-repo $t
|
||||
$ git push https://gerrit-review.googlesource.com/git-repo $t^0:stable
|
||||
```
|
||||
|
||||
If something goes horribly wrong, you can force push the previous version to the
|
||||
@@ -195,7 +186,9 @@ You can create a short changelog using the command:
|
||||
```sh
|
||||
# If you haven't pushed to the stable branch yet, you can use origin/stable.
|
||||
# If you have pushed, change origin/stable to the previous release tag.
|
||||
$ git log --format="%h (%aN) %s" --no-merges origin/stable..$r
|
||||
# This assumes "main" is the current tagged release. If it's newer, change it
|
||||
# to the current release tag too.
|
||||
$ git log --format="%h (%aN) %s" --no-merges origin/stable..main
|
||||
```
|
||||
|
||||
## Project References
|
||||
@@ -291,7 +284,7 @@ Things in italics are things we used to care about but probably don't anymore.
|
||||
| Apr 2018 | | | | 7.7 | 18.10 Cosmic |
|
||||
| Apr 2018 | **Apr 2028** | | | | **18.04 Bionic** | 2.17.0 | 2.7.15 3.6.5 | 7.6 |
|
||||
| Jun 2018 | *Mar 2021* | 2.18.0 |
|
||||
| Jun 2018 | **Jun 2023** | | 3.7.0 | | 19.04 Disco - **20.04 Focal** / **Buster** |
|
||||
| Jun 2018 | **Jun 2023** | | 3.7.0 | | 19.04 Disco - **Buster** |
|
||||
| Aug 2018 | | | | 7.8 |
|
||||
| Sep 2018 | *Mar 2021* | 2.19.0 | | | 18.10 Cosmic |
|
||||
| Oct 2018 | | | | 7.9 | 19.04 Disco / **Buster** |
|
||||
|
||||
154
editor.py
154
editor.py
@@ -14,102 +14,106 @@
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
from error import EditorError
|
||||
import platform_utils
|
||||
|
||||
|
||||
class Editor(object):
|
||||
"""Manages the user's preferred text editor."""
|
||||
class Editor:
|
||||
"""Manages the user's preferred text editor."""
|
||||
|
||||
_editor = None
|
||||
globalConfig = None
|
||||
_editor = None
|
||||
globalConfig = None
|
||||
|
||||
@classmethod
|
||||
def _GetEditor(cls):
|
||||
if cls._editor is None:
|
||||
cls._editor = cls._SelectEditor()
|
||||
return cls._editor
|
||||
@classmethod
|
||||
def _GetEditor(cls):
|
||||
if cls._editor is None:
|
||||
cls._editor = cls._SelectEditor()
|
||||
return cls._editor
|
||||
|
||||
@classmethod
|
||||
def _SelectEditor(cls):
|
||||
e = os.getenv('GIT_EDITOR')
|
||||
if e:
|
||||
return e
|
||||
@classmethod
|
||||
def _SelectEditor(cls):
|
||||
e = os.getenv("GIT_EDITOR")
|
||||
if e:
|
||||
return e
|
||||
|
||||
if cls.globalConfig:
|
||||
e = cls.globalConfig.GetString('core.editor')
|
||||
if e:
|
||||
return e
|
||||
if cls.globalConfig:
|
||||
e = cls.globalConfig.GetString("core.editor")
|
||||
if e:
|
||||
return e
|
||||
|
||||
e = os.getenv('VISUAL')
|
||||
if e:
|
||||
return e
|
||||
e = os.getenv("VISUAL")
|
||||
if e:
|
||||
return e
|
||||
|
||||
e = os.getenv('EDITOR')
|
||||
if e:
|
||||
return e
|
||||
e = os.getenv("EDITOR")
|
||||
if e:
|
||||
return e
|
||||
|
||||
if os.getenv('TERM') == 'dumb':
|
||||
print(
|
||||
"""No editor specified in GIT_EDITOR, core.editor, VISUAL or EDITOR.
|
||||
if os.getenv("TERM") == "dumb":
|
||||
print(
|
||||
"""No editor specified in GIT_EDITOR, core.editor, VISUAL or EDITOR.
|
||||
Tried to fall back to vi but terminal is dumb. Please configure at
|
||||
least one of these before using this command.""", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
least one of these before using this command.""", # noqa: E501
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
return 'vi'
|
||||
return "vi"
|
||||
|
||||
@classmethod
|
||||
def EditString(cls, data):
|
||||
"""Opens an editor to edit the given content.
|
||||
@classmethod
|
||||
def EditString(cls, data):
|
||||
"""Opens an editor to edit the given content.
|
||||
|
||||
Args:
|
||||
data: The text to edit.
|
||||
Args:
|
||||
data: The text to edit.
|
||||
|
||||
Returns:
|
||||
New value of edited text.
|
||||
Returns:
|
||||
New value of edited text.
|
||||
|
||||
Raises:
|
||||
EditorError: The editor failed to run.
|
||||
"""
|
||||
editor = cls._GetEditor()
|
||||
if editor == ':':
|
||||
return data
|
||||
Raises:
|
||||
EditorError: The editor failed to run.
|
||||
"""
|
||||
editor = cls._GetEditor()
|
||||
if editor == ":":
|
||||
return data
|
||||
|
||||
fd, path = tempfile.mkstemp()
|
||||
try:
|
||||
os.write(fd, data.encode('utf-8'))
|
||||
os.close(fd)
|
||||
fd = None
|
||||
fd, path = tempfile.mkstemp()
|
||||
try:
|
||||
os.write(fd, data.encode("utf-8"))
|
||||
os.close(fd)
|
||||
fd = None
|
||||
|
||||
if platform_utils.isWindows():
|
||||
# Split on spaces, respecting quoted strings
|
||||
import shlex
|
||||
args = shlex.split(editor)
|
||||
shell = False
|
||||
elif re.compile("^.*[$ \t'].*$").match(editor):
|
||||
args = [editor + ' "$@"', 'sh']
|
||||
shell = True
|
||||
else:
|
||||
args = [editor]
|
||||
shell = False
|
||||
args.append(path)
|
||||
if platform_utils.isWindows():
|
||||
# Split on spaces, respecting quoted strings
|
||||
import shlex
|
||||
|
||||
try:
|
||||
rc = subprocess.Popen(args, shell=shell).wait()
|
||||
except OSError as e:
|
||||
raise EditorError('editor failed, %s: %s %s'
|
||||
% (str(e), editor, path))
|
||||
if rc != 0:
|
||||
raise EditorError('editor failed with exit status %d: %s %s'
|
||||
% (rc, editor, path))
|
||||
args = shlex.split(editor)
|
||||
shell = False
|
||||
elif re.compile("^.*[$ \t'].*$").match(editor):
|
||||
args = [editor + ' "$@"', "sh"]
|
||||
shell = True
|
||||
else:
|
||||
args = [editor]
|
||||
shell = False
|
||||
args.append(path)
|
||||
|
||||
with open(path, mode='rb') as fd2:
|
||||
return fd2.read().decode('utf-8')
|
||||
finally:
|
||||
if fd:
|
||||
os.close(fd)
|
||||
platform_utils.remove(path)
|
||||
try:
|
||||
rc = subprocess.Popen(args, shell=shell).wait()
|
||||
except OSError as e:
|
||||
raise EditorError(f"editor failed, {str(e)}: {editor} {path}")
|
||||
if rc != 0:
|
||||
raise EditorError(
|
||||
"editor failed with exit status %d: %s %s"
|
||||
% (rc, editor, path)
|
||||
)
|
||||
|
||||
with open(path, mode="rb") as fd2:
|
||||
return fd2.read().decode("utf-8")
|
||||
finally:
|
||||
if fd:
|
||||
os.close(fd)
|
||||
platform_utils.remove(path)
|
||||
|
||||
224
error.py
224
error.py
@@ -12,124 +12,178 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from typing import List
|
||||
|
||||
class ManifestParseError(Exception):
|
||||
"""Failed to parse the manifest file.
|
||||
"""
|
||||
|
||||
class BaseRepoError(Exception):
|
||||
"""All repo specific exceptions derive from BaseRepoError."""
|
||||
|
||||
|
||||
class RepoError(BaseRepoError):
|
||||
"""Exceptions thrown inside repo that can be handled."""
|
||||
|
||||
def __init__(self, *args, project: str = None) -> None:
|
||||
super().__init__(*args)
|
||||
self.project = project
|
||||
|
||||
|
||||
class RepoExitError(BaseRepoError):
|
||||
"""Exception thrown that result in termination of repo program.
|
||||
- Should only be handled in main.py
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*args,
|
||||
exit_code: int = 1,
|
||||
aggregate_errors: List[Exception] = None,
|
||||
**kwargs,
|
||||
) -> None:
|
||||
super().__init__(*args, **kwargs)
|
||||
self.exit_code = exit_code
|
||||
self.aggregate_errors = aggregate_errors
|
||||
|
||||
|
||||
class RepoUnhandledExceptionError(RepoExitError):
|
||||
"""Exception that maintains error as reason for program exit."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
error: BaseException,
|
||||
**kwargs,
|
||||
) -> None:
|
||||
super().__init__(error, **kwargs)
|
||||
self.error = error
|
||||
|
||||
|
||||
class SilentRepoExitError(RepoExitError):
|
||||
"""RepoExitError that should no include CLI logging of issue/issues."""
|
||||
|
||||
|
||||
class ManifestParseError(RepoExitError):
|
||||
"""Failed to parse the manifest file."""
|
||||
|
||||
|
||||
class ManifestInvalidRevisionError(ManifestParseError):
|
||||
"""The revision value in a project is incorrect.
|
||||
"""
|
||||
"""The revision value in a project is incorrect."""
|
||||
|
||||
|
||||
class ManifestInvalidPathError(ManifestParseError):
|
||||
"""A path used in <copyfile> or <linkfile> is incorrect.
|
||||
"""
|
||||
"""A path used in <copyfile> or <linkfile> is incorrect."""
|
||||
|
||||
|
||||
class NoManifestException(Exception):
|
||||
"""The required manifest does not exist.
|
||||
"""
|
||||
class NoManifestException(RepoExitError):
|
||||
"""The required manifest does not exist."""
|
||||
|
||||
def __init__(self, path, reason):
|
||||
super().__init__(path, reason)
|
||||
self.path = path
|
||||
self.reason = reason
|
||||
def __init__(self, path, reason, **kwargs):
|
||||
super().__init__(path, reason, **kwargs)
|
||||
self.path = path
|
||||
self.reason = reason
|
||||
|
||||
def __str__(self):
|
||||
return self.reason
|
||||
def __str__(self):
|
||||
return self.reason
|
||||
|
||||
|
||||
class EditorError(Exception):
|
||||
"""Unspecified error from the user's text editor.
|
||||
"""
|
||||
class EditorError(RepoError):
|
||||
"""Unspecified error from the user's text editor."""
|
||||
|
||||
def __init__(self, reason):
|
||||
super().__init__(reason)
|
||||
self.reason = reason
|
||||
def __init__(self, reason, **kwargs):
|
||||
super().__init__(reason, **kwargs)
|
||||
self.reason = reason
|
||||
|
||||
def __str__(self):
|
||||
return self.reason
|
||||
def __str__(self):
|
||||
return self.reason
|
||||
|
||||
|
||||
class GitError(Exception):
|
||||
"""Unspecified internal error from git.
|
||||
"""
|
||||
class GitError(RepoError):
|
||||
"""Unspecified git related error."""
|
||||
|
||||
def __init__(self, command):
|
||||
super().__init__(command)
|
||||
self.command = command
|
||||
def __init__(self, message, command_args=None, **kwargs):
|
||||
super().__init__(message, **kwargs)
|
||||
self.message = message
|
||||
self.command_args = command_args
|
||||
|
||||
def __str__(self):
|
||||
return self.command
|
||||
def __str__(self):
|
||||
return self.message
|
||||
|
||||
|
||||
class UploadError(Exception):
|
||||
"""A bundle upload to Gerrit did not succeed.
|
||||
"""
|
||||
|
||||
def __init__(self, reason):
|
||||
super().__init__(reason)
|
||||
self.reason = reason
|
||||
|
||||
def __str__(self):
|
||||
return self.reason
|
||||
class GitcUnsupportedError(RepoExitError):
|
||||
"""Gitc no longer supported."""
|
||||
|
||||
|
||||
class DownloadError(Exception):
|
||||
"""Cannot download a repository.
|
||||
"""
|
||||
class UploadError(RepoError):
|
||||
"""A bundle upload to Gerrit did not succeed."""
|
||||
|
||||
def __init__(self, reason):
|
||||
super().__init__(reason)
|
||||
self.reason = reason
|
||||
def __init__(self, reason, **kwargs):
|
||||
super().__init__(reason, **kwargs)
|
||||
self.reason = reason
|
||||
|
||||
def __str__(self):
|
||||
return self.reason
|
||||
def __str__(self):
|
||||
return self.reason
|
||||
|
||||
|
||||
class NoSuchProjectError(Exception):
|
||||
"""A specified project does not exist in the work tree.
|
||||
"""
|
||||
class DownloadError(RepoExitError):
|
||||
"""Cannot download a repository."""
|
||||
|
||||
def __init__(self, name=None):
|
||||
super().__init__(name)
|
||||
self.name = name
|
||||
def __init__(self, reason, **kwargs):
|
||||
super().__init__(reason, **kwargs)
|
||||
self.reason = reason
|
||||
|
||||
def __str__(self):
|
||||
if self.name is None:
|
||||
return 'in current directory'
|
||||
return self.name
|
||||
def __str__(self):
|
||||
return self.reason
|
||||
|
||||
|
||||
class InvalidProjectGroupsError(Exception):
|
||||
"""A specified project is not suitable for the specified groups
|
||||
"""
|
||||
|
||||
def __init__(self, name=None):
|
||||
super().__init__(name)
|
||||
self.name = name
|
||||
|
||||
def __str__(self):
|
||||
if self.name is None:
|
||||
return 'in current directory'
|
||||
return self.name
|
||||
class InvalidArgumentsError(RepoExitError):
|
||||
"""Invalid command Arguments."""
|
||||
|
||||
|
||||
class RepoChangedException(Exception):
|
||||
"""Thrown if 'repo sync' results in repo updating its internal
|
||||
repo or manifest repositories. In this special case we must
|
||||
use exec to re-execute repo with the new code and manifest.
|
||||
"""
|
||||
|
||||
def __init__(self, extra_args=None):
|
||||
super().__init__(extra_args)
|
||||
self.extra_args = extra_args or []
|
||||
class SyncError(RepoExitError):
|
||||
"""Cannot sync repo."""
|
||||
|
||||
|
||||
class HookError(Exception):
|
||||
"""Thrown if a 'repo-hook' could not be run.
|
||||
class UpdateManifestError(RepoExitError):
|
||||
"""Cannot update manifest."""
|
||||
|
||||
The common case is that the file wasn't present when we tried to run it.
|
||||
"""
|
||||
|
||||
class NoSuchProjectError(RepoExitError):
|
||||
"""A specified project does not exist in the work tree."""
|
||||
|
||||
def __init__(self, name=None, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.name = name
|
||||
|
||||
def __str__(self):
|
||||
if self.name is None:
|
||||
return "in current directory"
|
||||
return self.name
|
||||
|
||||
|
||||
class InvalidProjectGroupsError(RepoExitError):
|
||||
"""A specified project is not suitable for the specified groups"""
|
||||
|
||||
def __init__(self, name=None, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.name = name
|
||||
|
||||
def __str__(self):
|
||||
if self.name is None:
|
||||
return "in current directory"
|
||||
return self.name
|
||||
|
||||
|
||||
class RepoChangedException(BaseRepoError):
|
||||
"""Thrown if 'repo sync' results in repo updating its internal
|
||||
repo or manifest repositories. In this special case we must
|
||||
use exec to re-execute repo with the new code and manifest.
|
||||
"""
|
||||
|
||||
def __init__(self, extra_args=None):
|
||||
super().__init__(extra_args)
|
||||
self.extra_args = extra_args or []
|
||||
|
||||
|
||||
class HookError(RepoError):
|
||||
"""Thrown if a 'repo-hook' could not be run.
|
||||
|
||||
The common case is that the file wasn't present when we tried to run it.
|
||||
"""
|
||||
|
||||
265
event_log.py
265
event_log.py
@@ -15,161 +15,170 @@
|
||||
import json
|
||||
import multiprocessing
|
||||
|
||||
TASK_COMMAND = 'command'
|
||||
TASK_SYNC_NETWORK = 'sync-network'
|
||||
TASK_SYNC_LOCAL = 'sync-local'
|
||||
|
||||
TASK_COMMAND = "command"
|
||||
TASK_SYNC_NETWORK = "sync-network"
|
||||
TASK_SYNC_LOCAL = "sync-local"
|
||||
|
||||
|
||||
class EventLog(object):
|
||||
"""Event log that records events that occurred during a repo invocation.
|
||||
class EventLog:
|
||||
"""Event log that records events that occurred during a repo invocation.
|
||||
|
||||
Events are written to the log as a consecutive JSON entries, one per line.
|
||||
Each entry contains the following keys:
|
||||
- id: A ('RepoOp', ID) tuple, suitable for storing in a datastore.
|
||||
The ID is only unique for the invocation of the repo command.
|
||||
- name: Name of the object being operated upon.
|
||||
- task_name: The task that was performed.
|
||||
- start: Timestamp of when the operation started.
|
||||
- finish: Timestamp of when the operation finished.
|
||||
- success: Boolean indicating if the operation was successful.
|
||||
- try_count: A counter indicating the try count of this task.
|
||||
Events are written to the log as a consecutive JSON entries, one per line.
|
||||
Each entry contains the following keys:
|
||||
- id: A ('RepoOp', ID) tuple, suitable for storing in a datastore.
|
||||
The ID is only unique for the invocation of the repo command.
|
||||
- name: Name of the object being operated upon.
|
||||
- task_name: The task that was performed.
|
||||
- start: Timestamp of when the operation started.
|
||||
- finish: Timestamp of when the operation finished.
|
||||
- success: Boolean indicating if the operation was successful.
|
||||
- try_count: A counter indicating the try count of this task.
|
||||
|
||||
Optionally:
|
||||
- parent: A ('RepoOp', ID) tuple indicating the parent event for nested
|
||||
events.
|
||||
Optionally:
|
||||
- parent: A ('RepoOp', ID) tuple indicating the parent event for nested
|
||||
events.
|
||||
|
||||
Valid task_names include:
|
||||
- command: The invocation of a subcommand.
|
||||
- sync-network: The network component of a sync command.
|
||||
- sync-local: The local component of a sync command.
|
||||
Valid task_names include:
|
||||
- command: The invocation of a subcommand.
|
||||
- sync-network: The network component of a sync command.
|
||||
- sync-local: The local component of a sync command.
|
||||
|
||||
Specific tasks may include additional informational properties.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""Initializes the event log."""
|
||||
self._log = []
|
||||
self._parent = None
|
||||
|
||||
def Add(self, name, task_name, start, finish=None, success=None,
|
||||
try_count=1, kind='RepoOp'):
|
||||
"""Add an event to the log.
|
||||
|
||||
Args:
|
||||
name: Name of the object being operated upon.
|
||||
task_name: A sub-task that was performed for name.
|
||||
start: Timestamp of when the operation started.
|
||||
finish: Timestamp of when the operation finished.
|
||||
success: Boolean indicating if the operation was successful.
|
||||
try_count: A counter indicating the try count of this task.
|
||||
kind: The kind of the object for the unique identifier.
|
||||
|
||||
Returns:
|
||||
A dictionary of the event added to the log.
|
||||
Specific tasks may include additional informational properties.
|
||||
"""
|
||||
event = {
|
||||
'id': (kind, _NextEventId()),
|
||||
'name': name,
|
||||
'task_name': task_name,
|
||||
'start_time': start,
|
||||
'try': try_count,
|
||||
}
|
||||
|
||||
if self._parent:
|
||||
event['parent'] = self._parent['id']
|
||||
def __init__(self):
|
||||
"""Initializes the event log."""
|
||||
self._log = []
|
||||
self._parent = None
|
||||
|
||||
if success is not None or finish is not None:
|
||||
self.FinishEvent(event, finish, success)
|
||||
def Add(
|
||||
self,
|
||||
name,
|
||||
task_name,
|
||||
start,
|
||||
finish=None,
|
||||
success=None,
|
||||
try_count=1,
|
||||
kind="RepoOp",
|
||||
):
|
||||
"""Add an event to the log.
|
||||
|
||||
self._log.append(event)
|
||||
return event
|
||||
Args:
|
||||
name: Name of the object being operated upon.
|
||||
task_name: A sub-task that was performed for name.
|
||||
start: Timestamp of when the operation started.
|
||||
finish: Timestamp of when the operation finished.
|
||||
success: Boolean indicating if the operation was successful.
|
||||
try_count: A counter indicating the try count of this task.
|
||||
kind: The kind of the object for the unique identifier.
|
||||
|
||||
def AddSync(self, project, task_name, start, finish, success):
|
||||
"""Add a event to the log for a sync command.
|
||||
Returns:
|
||||
A dictionary of the event added to the log.
|
||||
"""
|
||||
event = {
|
||||
"id": (kind, _NextEventId()),
|
||||
"name": name,
|
||||
"task_name": task_name,
|
||||
"start_time": start,
|
||||
"try": try_count,
|
||||
}
|
||||
|
||||
Args:
|
||||
project: Project being synced.
|
||||
task_name: A sub-task that was performed for name.
|
||||
One of (TASK_SYNC_NETWORK, TASK_SYNC_LOCAL)
|
||||
start: Timestamp of when the operation started.
|
||||
finish: Timestamp of when the operation finished.
|
||||
success: Boolean indicating if the operation was successful.
|
||||
if self._parent:
|
||||
event["parent"] = self._parent["id"]
|
||||
|
||||
Returns:
|
||||
A dictionary of the event added to the log.
|
||||
"""
|
||||
event = self.Add(project.relpath, task_name, start, finish, success)
|
||||
if event is not None:
|
||||
event['project'] = project.name
|
||||
if project.revisionExpr:
|
||||
event['revision'] = project.revisionExpr
|
||||
if project.remote.url:
|
||||
event['project_url'] = project.remote.url
|
||||
if project.remote.fetchUrl:
|
||||
event['remote_url'] = project.remote.fetchUrl
|
||||
try:
|
||||
event['git_hash'] = project.GetCommitRevisionId()
|
||||
except Exception:
|
||||
pass
|
||||
return event
|
||||
if success is not None or finish is not None:
|
||||
self.FinishEvent(event, finish, success)
|
||||
|
||||
def GetStatusString(self, success):
|
||||
"""Converst a boolean success to a status string.
|
||||
self._log.append(event)
|
||||
return event
|
||||
|
||||
Args:
|
||||
success: Boolean indicating if the operation was successful.
|
||||
def AddSync(self, project, task_name, start, finish, success):
|
||||
"""Add a event to the log for a sync command.
|
||||
|
||||
Returns:
|
||||
status string.
|
||||
"""
|
||||
return 'pass' if success else 'fail'
|
||||
Args:
|
||||
project: Project being synced.
|
||||
task_name: A sub-task that was performed for name.
|
||||
One of (TASK_SYNC_NETWORK, TASK_SYNC_LOCAL)
|
||||
start: Timestamp of when the operation started.
|
||||
finish: Timestamp of when the operation finished.
|
||||
success: Boolean indicating if the operation was successful.
|
||||
|
||||
def FinishEvent(self, event, finish, success):
|
||||
"""Finishes an incomplete event.
|
||||
Returns:
|
||||
A dictionary of the event added to the log.
|
||||
"""
|
||||
event = self.Add(project.relpath, task_name, start, finish, success)
|
||||
if event is not None:
|
||||
event["project"] = project.name
|
||||
if project.revisionExpr:
|
||||
event["revision"] = project.revisionExpr
|
||||
if project.remote.url:
|
||||
event["project_url"] = project.remote.url
|
||||
if project.remote.fetchUrl:
|
||||
event["remote_url"] = project.remote.fetchUrl
|
||||
try:
|
||||
event["git_hash"] = project.GetCommitRevisionId()
|
||||
except Exception:
|
||||
pass
|
||||
return event
|
||||
|
||||
Args:
|
||||
event: An event that has been added to the log.
|
||||
finish: Timestamp of when the operation finished.
|
||||
success: Boolean indicating if the operation was successful.
|
||||
def GetStatusString(self, success):
|
||||
"""Converst a boolean success to a status string.
|
||||
|
||||
Returns:
|
||||
A dictionary of the event added to the log.
|
||||
"""
|
||||
event['status'] = self.GetStatusString(success)
|
||||
event['finish_time'] = finish
|
||||
return event
|
||||
Args:
|
||||
success: Boolean indicating if the operation was successful.
|
||||
|
||||
def SetParent(self, event):
|
||||
"""Set a parent event for all new entities.
|
||||
Returns:
|
||||
status string.
|
||||
"""
|
||||
return "pass" if success else "fail"
|
||||
|
||||
Args:
|
||||
event: The event to use as a parent.
|
||||
"""
|
||||
self._parent = event
|
||||
def FinishEvent(self, event, finish, success):
|
||||
"""Finishes an incomplete event.
|
||||
|
||||
def Write(self, filename):
|
||||
"""Writes the log out to a file.
|
||||
Args:
|
||||
event: An event that has been added to the log.
|
||||
finish: Timestamp of when the operation finished.
|
||||
success: Boolean indicating if the operation was successful.
|
||||
|
||||
Args:
|
||||
filename: The file to write the log to.
|
||||
"""
|
||||
with open(filename, 'w+') as f:
|
||||
for e in self._log:
|
||||
json.dump(e, f, sort_keys=True)
|
||||
f.write('\n')
|
||||
Returns:
|
||||
A dictionary of the event added to the log.
|
||||
"""
|
||||
event["status"] = self.GetStatusString(success)
|
||||
event["finish_time"] = finish
|
||||
return event
|
||||
|
||||
def SetParent(self, event):
|
||||
"""Set a parent event for all new entities.
|
||||
|
||||
Args:
|
||||
event: The event to use as a parent.
|
||||
"""
|
||||
self._parent = event
|
||||
|
||||
def Write(self, filename):
|
||||
"""Writes the log out to a file.
|
||||
|
||||
Args:
|
||||
filename: The file to write the log to.
|
||||
"""
|
||||
with open(filename, "w+") as f:
|
||||
for e in self._log:
|
||||
json.dump(e, f, sort_keys=True)
|
||||
f.write("\n")
|
||||
|
||||
|
||||
# An integer id that is unique across this invocation of the program.
|
||||
_EVENT_ID = multiprocessing.Value('i', 1)
|
||||
_EVENT_ID = multiprocessing.Value("i", 1)
|
||||
|
||||
|
||||
def _NextEventId():
|
||||
"""Helper function for grabbing the next unique id.
|
||||
"""Helper function for grabbing the next unique id.
|
||||
|
||||
Returns:
|
||||
A unique, to this invocation of the program, integer id.
|
||||
"""
|
||||
with _EVENT_ID.get_lock():
|
||||
val = _EVENT_ID.value
|
||||
_EVENT_ID.value += 1
|
||||
return val
|
||||
Returns:
|
||||
A unique, to this invocation of the program, integer id.
|
||||
"""
|
||||
with _EVENT_ID.get_lock():
|
||||
val = _EVENT_ID.value
|
||||
_EVENT_ID.value += 1
|
||||
return val
|
||||
|
||||
54
fetch.py
54
fetch.py
@@ -19,27 +19,39 @@ import sys
|
||||
from urllib.parse import urlparse
|
||||
from urllib.request import urlopen
|
||||
|
||||
from error import RepoExitError
|
||||
|
||||
|
||||
class FetchFileError(RepoExitError):
|
||||
"""Exit error when fetch_file fails."""
|
||||
|
||||
|
||||
def fetch_file(url, verbose=False):
|
||||
"""Fetch a file from the specified source using the appropriate protocol.
|
||||
"""Fetch a file from the specified source using the appropriate protocol.
|
||||
|
||||
Returns:
|
||||
The contents of the file as bytes.
|
||||
"""
|
||||
scheme = urlparse(url).scheme
|
||||
if scheme == 'gs':
|
||||
cmd = ['gsutil', 'cat', url]
|
||||
try:
|
||||
result = subprocess.run(
|
||||
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
|
||||
check=True)
|
||||
if result.stderr and verbose:
|
||||
print('warning: non-fatal error running "gsutil": %s' % result.stderr,
|
||||
file=sys.stderr)
|
||||
return result.stdout
|
||||
except subprocess.CalledProcessError as e:
|
||||
print('fatal: error running "gsutil": %s' % e.stderr,
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
with urlopen(url) as f:
|
||||
return f.read()
|
||||
Returns:
|
||||
The contents of the file as bytes.
|
||||
"""
|
||||
scheme = urlparse(url).scheme
|
||||
if scheme == "gs":
|
||||
cmd = ["gsutil", "cat", url]
|
||||
errors = []
|
||||
try:
|
||||
result = subprocess.run(
|
||||
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True
|
||||
)
|
||||
if result.stderr and verbose:
|
||||
print(
|
||||
'warning: non-fatal error running "gsutil": %s'
|
||||
% result.stderr,
|
||||
file=sys.stderr,
|
||||
)
|
||||
return result.stdout
|
||||
except subprocess.CalledProcessError as e:
|
||||
errors.append(e)
|
||||
print(
|
||||
'fatal: error running "gsutil": %s' % e.stderr, file=sys.stderr
|
||||
)
|
||||
raise FetchFileError(aggregate_errors=errors)
|
||||
with urlopen(url) as f:
|
||||
return f.read()
|
||||
|
||||
802
git_command.py
802
git_command.py
@@ -13,17 +13,26 @@
|
||||
# limitations under the License.
|
||||
|
||||
import functools
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import Any, Optional
|
||||
|
||||
from error import GitError
|
||||
from error import RepoExitError
|
||||
from git_refs import HEAD
|
||||
from git_trace2_event_log_base import BaseEventLog
|
||||
import platform_utils
|
||||
from repo_trace import REPO_TRACE, IsTrace, Trace
|
||||
from repo_logging import RepoLogger
|
||||
from repo_trace import IsTrace
|
||||
from repo_trace import REPO_TRACE
|
||||
from repo_trace import Trace
|
||||
from wrapper import Wrapper
|
||||
|
||||
GIT = 'git'
|
||||
|
||||
GIT = "git"
|
||||
# NB: These do not need to be kept in sync with the repo launcher script.
|
||||
# These may be much newer as it allows the repo launcher to roll between
|
||||
# different repo releases while source versions might require a newer git.
|
||||
@@ -35,262 +44,615 @@ GIT = 'git'
|
||||
# git-1.7 is in (EOL) Ubuntu Precise. git-1.9 is in Ubuntu Trusty.
|
||||
MIN_GIT_VERSION_SOFT = (1, 9, 1)
|
||||
MIN_GIT_VERSION_HARD = (1, 7, 2)
|
||||
GIT_DIR = 'GIT_DIR'
|
||||
GIT_DIR = "GIT_DIR"
|
||||
|
||||
LAST_GITDIR = None
|
||||
LAST_CWD = None
|
||||
DEFAULT_GIT_FAIL_MESSAGE = "git command failure"
|
||||
ERROR_EVENT_LOGGING_PREFIX = "RepoGitCommandError"
|
||||
# Common line length limit
|
||||
GIT_ERROR_STDOUT_LINES = 1
|
||||
GIT_ERROR_STDERR_LINES = 10
|
||||
INVALID_GIT_EXIT_CODE = 126
|
||||
|
||||
logger = RepoLogger(__file__)
|
||||
|
||||
|
||||
class _GitCall(object):
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def version_tuple(self):
|
||||
ret = Wrapper().ParseGitVersion()
|
||||
if ret is None:
|
||||
print('fatal: unable to detect git version', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
return ret
|
||||
class _GitCall:
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def version_tuple(self):
|
||||
ret = Wrapper().ParseGitVersion()
|
||||
if ret is None:
|
||||
msg = "fatal: unable to detect git version"
|
||||
logger.error(msg)
|
||||
raise GitRequireError(msg)
|
||||
return ret
|
||||
|
||||
def __getattr__(self, name):
|
||||
name = name.replace('_', '-')
|
||||
def __getattr__(self, name):
|
||||
name = name.replace("_", "-")
|
||||
|
||||
def fun(*cmdv):
|
||||
command = [name]
|
||||
command.extend(cmdv)
|
||||
return GitCommand(None, command).Wait() == 0
|
||||
return fun
|
||||
def fun(*cmdv):
|
||||
command = [name]
|
||||
command.extend(cmdv)
|
||||
return GitCommand(None, command, add_event_log=False).Wait() == 0
|
||||
|
||||
return fun
|
||||
|
||||
|
||||
git = _GitCall()
|
||||
|
||||
|
||||
def RepoSourceVersion():
|
||||
"""Return the version of the repo.git tree."""
|
||||
ver = getattr(RepoSourceVersion, 'version', None)
|
||||
"""Return the version of the repo.git tree."""
|
||||
ver = getattr(RepoSourceVersion, "version", None)
|
||||
|
||||
# We avoid GitCommand so we don't run into circular deps -- GitCommand needs
|
||||
# to initialize version info we provide.
|
||||
if ver is None:
|
||||
env = GitCommand._GetBasicEnv()
|
||||
# We avoid GitCommand so we don't run into circular deps -- GitCommand needs
|
||||
# to initialize version info we provide.
|
||||
if ver is None:
|
||||
env = GitCommand._GetBasicEnv()
|
||||
|
||||
proj = os.path.dirname(os.path.abspath(__file__))
|
||||
env[GIT_DIR] = os.path.join(proj, '.git')
|
||||
result = subprocess.run([GIT, 'describe', HEAD], stdout=subprocess.PIPE,
|
||||
stderr=subprocess.DEVNULL, encoding='utf-8',
|
||||
env=env, check=False)
|
||||
if result.returncode == 0:
|
||||
ver = result.stdout.strip()
|
||||
if ver.startswith('v'):
|
||||
ver = ver[1:]
|
||||
else:
|
||||
ver = 'unknown'
|
||||
setattr(RepoSourceVersion, 'version', ver)
|
||||
proj = os.path.dirname(os.path.abspath(__file__))
|
||||
env[GIT_DIR] = os.path.join(proj, ".git")
|
||||
result = subprocess.run(
|
||||
[GIT, "describe", HEAD],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.DEVNULL,
|
||||
encoding="utf-8",
|
||||
env=env,
|
||||
check=False,
|
||||
)
|
||||
if result.returncode == 0:
|
||||
ver = result.stdout.strip()
|
||||
if ver.startswith("v"):
|
||||
ver = ver[1:]
|
||||
else:
|
||||
ver = "unknown"
|
||||
setattr(RepoSourceVersion, "version", ver)
|
||||
|
||||
return ver
|
||||
return ver
|
||||
|
||||
|
||||
class UserAgent(object):
|
||||
"""Mange User-Agent settings when talking to external services
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def GetEventTargetPath():
|
||||
"""Get the 'trace2.eventtarget' path from git configuration.
|
||||
|
||||
We follow the style as documented here:
|
||||
https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/User-Agent
|
||||
"""
|
||||
Returns:
|
||||
path: git config's 'trace2.eventtarget' path if it exists, or None
|
||||
"""
|
||||
path = None
|
||||
cmd = ["config", "--get", "trace2.eventtarget"]
|
||||
# TODO(https://crbug.com/gerrit/13706): Use GitConfig when it supports
|
||||
# system git config variables.
|
||||
p = GitCommand(
|
||||
None,
|
||||
cmd,
|
||||
capture_stdout=True,
|
||||
capture_stderr=True,
|
||||
bare=True,
|
||||
add_event_log=False,
|
||||
)
|
||||
retval = p.Wait()
|
||||
if retval == 0:
|
||||
# Strip trailing carriage-return in path.
|
||||
path = p.stdout.rstrip("\n")
|
||||
if path == "":
|
||||
return None
|
||||
elif retval != 1:
|
||||
# `git config --get` is documented to produce an exit status of `1`
|
||||
# if the requested variable is not present in the configuration.
|
||||
# Report any other return value as an error.
|
||||
logger.error(
|
||||
"repo: error: 'git config --get' call failed with return code: "
|
||||
"%r, stderr: %r",
|
||||
retval,
|
||||
p.stderr,
|
||||
)
|
||||
return path
|
||||
|
||||
_os = None
|
||||
_repo_ua = None
|
||||
_git_ua = None
|
||||
|
||||
@property
|
||||
def os(self):
|
||||
"""The operating system name."""
|
||||
if self._os is None:
|
||||
os_name = sys.platform
|
||||
if os_name.lower().startswith('linux'):
|
||||
os_name = 'Linux'
|
||||
elif os_name == 'win32':
|
||||
os_name = 'Win32'
|
||||
elif os_name == 'cygwin':
|
||||
os_name = 'Cygwin'
|
||||
elif os_name == 'darwin':
|
||||
os_name = 'Darwin'
|
||||
self._os = os_name
|
||||
class UserAgent:
|
||||
"""Mange User-Agent settings when talking to external services
|
||||
|
||||
return self._os
|
||||
We follow the style as documented here:
|
||||
https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/User-Agent
|
||||
"""
|
||||
|
||||
@property
|
||||
def repo(self):
|
||||
"""The UA when connecting directly from repo."""
|
||||
if self._repo_ua is None:
|
||||
py_version = sys.version_info
|
||||
self._repo_ua = 'git-repo/%s (%s) git/%s Python/%d.%d.%d' % (
|
||||
RepoSourceVersion(),
|
||||
self.os,
|
||||
git.version_tuple().full,
|
||||
py_version.major, py_version.minor, py_version.micro)
|
||||
_os = None
|
||||
_repo_ua = None
|
||||
_git_ua = None
|
||||
|
||||
return self._repo_ua
|
||||
@property
|
||||
def os(self):
|
||||
"""The operating system name."""
|
||||
if self._os is None:
|
||||
os_name = sys.platform
|
||||
if os_name.lower().startswith("linux"):
|
||||
os_name = "Linux"
|
||||
elif os_name == "win32":
|
||||
os_name = "Win32"
|
||||
elif os_name == "cygwin":
|
||||
os_name = "Cygwin"
|
||||
elif os_name == "darwin":
|
||||
os_name = "Darwin"
|
||||
self._os = os_name
|
||||
|
||||
@property
|
||||
def git(self):
|
||||
"""The UA when running git."""
|
||||
if self._git_ua is None:
|
||||
self._git_ua = 'git/%s (%s) git-repo/%s' % (
|
||||
git.version_tuple().full,
|
||||
self.os,
|
||||
RepoSourceVersion())
|
||||
return self._os
|
||||
|
||||
return self._git_ua
|
||||
@property
|
||||
def repo(self):
|
||||
"""The UA when connecting directly from repo."""
|
||||
if self._repo_ua is None:
|
||||
py_version = sys.version_info
|
||||
self._repo_ua = "git-repo/%s (%s) git/%s Python/%d.%d.%d" % (
|
||||
RepoSourceVersion(),
|
||||
self.os,
|
||||
git.version_tuple().full,
|
||||
py_version.major,
|
||||
py_version.minor,
|
||||
py_version.micro,
|
||||
)
|
||||
|
||||
return self._repo_ua
|
||||
|
||||
@property
|
||||
def git(self):
|
||||
"""The UA when running git."""
|
||||
if self._git_ua is None:
|
||||
self._git_ua = (
|
||||
f"git/{git.version_tuple().full} ({self.os}) "
|
||||
f"git-repo/{RepoSourceVersion()}"
|
||||
)
|
||||
return self._git_ua
|
||||
|
||||
|
||||
user_agent = UserAgent()
|
||||
|
||||
|
||||
def git_require(min_version, fail=False, msg=''):
|
||||
git_version = git.version_tuple()
|
||||
if min_version <= git_version:
|
||||
return True
|
||||
if fail:
|
||||
need = '.'.join(map(str, min_version))
|
||||
if msg:
|
||||
msg = ' for ' + msg
|
||||
print('fatal: git %s or later required%s' % (need, msg), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
return False
|
||||
def git_require(min_version, fail=False, msg=""):
|
||||
git_version = git.version_tuple()
|
||||
if min_version <= git_version:
|
||||
return True
|
||||
if fail:
|
||||
need = ".".join(map(str, min_version))
|
||||
if msg:
|
||||
msg = " for " + msg
|
||||
error_msg = f"fatal: git {need} or later required{msg}"
|
||||
logger.error(error_msg)
|
||||
raise GitRequireError(error_msg)
|
||||
return False
|
||||
|
||||
|
||||
class GitCommand(object):
|
||||
def __init__(self,
|
||||
project,
|
||||
cmdv,
|
||||
bare=False,
|
||||
input=None,
|
||||
capture_stdout=False,
|
||||
capture_stderr=False,
|
||||
merge_output=False,
|
||||
disable_editor=False,
|
||||
ssh_proxy=None,
|
||||
cwd=None,
|
||||
gitdir=None):
|
||||
env = self._GetBasicEnv()
|
||||
def _build_env(
|
||||
_kwargs_only=(),
|
||||
bare: Optional[bool] = False,
|
||||
disable_editor: Optional[bool] = False,
|
||||
ssh_proxy: Optional[Any] = None,
|
||||
gitdir: Optional[str] = None,
|
||||
objdir: Optional[str] = None,
|
||||
):
|
||||
"""Constucts an env dict for command execution."""
|
||||
|
||||
assert _kwargs_only == (), "_build_env only accepts keyword arguments."
|
||||
|
||||
env = GitCommand._GetBasicEnv()
|
||||
|
||||
if disable_editor:
|
||||
env['GIT_EDITOR'] = ':'
|
||||
env["GIT_EDITOR"] = ":"
|
||||
if ssh_proxy:
|
||||
env['REPO_SSH_SOCK'] = ssh_proxy.sock()
|
||||
env['GIT_SSH'] = ssh_proxy.proxy
|
||||
env['GIT_SSH_VARIANT'] = 'ssh'
|
||||
if 'http_proxy' in env and 'darwin' == sys.platform:
|
||||
s = "'http.proxy=%s'" % (env['http_proxy'],)
|
||||
p = env.get('GIT_CONFIG_PARAMETERS')
|
||||
if p is not None:
|
||||
s = p + ' ' + s
|
||||
env['GIT_CONFIG_PARAMETERS'] = s
|
||||
if 'GIT_ALLOW_PROTOCOL' not in env:
|
||||
env['GIT_ALLOW_PROTOCOL'] = (
|
||||
'file:git:http:https:ssh:persistent-http:persistent-https:sso:rpc')
|
||||
env['GIT_HTTP_USER_AGENT'] = user_agent.git
|
||||
env["REPO_SSH_SOCK"] = ssh_proxy.sock()
|
||||
env["GIT_SSH"] = ssh_proxy.proxy
|
||||
env["GIT_SSH_VARIANT"] = "ssh"
|
||||
if "http_proxy" in env and "darwin" == sys.platform:
|
||||
s = f"'http.proxy={env['http_proxy']}'"
|
||||
p = env.get("GIT_CONFIG_PARAMETERS")
|
||||
if p is not None:
|
||||
s = p + " " + s
|
||||
env["GIT_CONFIG_PARAMETERS"] = s
|
||||
if "GIT_ALLOW_PROTOCOL" not in env:
|
||||
env[
|
||||
"GIT_ALLOW_PROTOCOL"
|
||||
] = "file:git:http:https:ssh:persistent-http:persistent-https:sso:rpc"
|
||||
env["GIT_HTTP_USER_AGENT"] = user_agent.git
|
||||
|
||||
if project:
|
||||
if not cwd:
|
||||
cwd = project.worktree
|
||||
if not gitdir:
|
||||
gitdir = project.gitdir
|
||||
if objdir:
|
||||
# Set to the place we want to save the objects.
|
||||
env["GIT_OBJECT_DIRECTORY"] = objdir
|
||||
|
||||
command = [GIT]
|
||||
if bare:
|
||||
if gitdir:
|
||||
# Git on Windows wants its paths only using / for reliability.
|
||||
if platform_utils.isWindows():
|
||||
gitdir = gitdir.replace('\\', '/')
|
||||
alt_objects = os.path.join(gitdir, "objects") if gitdir else None
|
||||
if alt_objects and os.path.realpath(alt_objects) != os.path.realpath(
|
||||
objdir
|
||||
):
|
||||
# Allow git to search the original place in case of local or unique
|
||||
# refs that git will attempt to resolve even if we aren't fetching
|
||||
# them.
|
||||
env["GIT_ALTERNATE_OBJECT_DIRECTORIES"] = alt_objects
|
||||
if bare and gitdir is not None:
|
||||
env[GIT_DIR] = gitdir
|
||||
cwd = None
|
||||
command.append(cmdv[0])
|
||||
# Need to use the --progress flag for fetch/clone so output will be
|
||||
# displayed as by default git only does progress output if stderr is a TTY.
|
||||
if sys.stderr.isatty() and cmdv[0] in ('fetch', 'clone'):
|
||||
if '--progress' not in cmdv and '--quiet' not in cmdv:
|
||||
command.append('--progress')
|
||||
command.extend(cmdv[1:])
|
||||
|
||||
stdin = subprocess.PIPE if input else None
|
||||
stdout = subprocess.PIPE if capture_stdout else None
|
||||
stderr = (subprocess.STDOUT if merge_output else
|
||||
(subprocess.PIPE if capture_stderr else None))
|
||||
|
||||
if IsTrace():
|
||||
global LAST_CWD
|
||||
global LAST_GITDIR
|
||||
|
||||
dbg = ''
|
||||
|
||||
if cwd and LAST_CWD != cwd:
|
||||
if LAST_GITDIR or LAST_CWD:
|
||||
dbg += '\n'
|
||||
dbg += ': cd %s\n' % cwd
|
||||
LAST_CWD = cwd
|
||||
|
||||
if GIT_DIR in env and LAST_GITDIR != env[GIT_DIR]:
|
||||
if LAST_GITDIR or LAST_CWD:
|
||||
dbg += '\n'
|
||||
dbg += ': export GIT_DIR=%s\n' % env[GIT_DIR]
|
||||
LAST_GITDIR = env[GIT_DIR]
|
||||
|
||||
dbg += ': '
|
||||
dbg += ' '.join(command)
|
||||
if stdin == subprocess.PIPE:
|
||||
dbg += ' 0<|'
|
||||
if stdout == subprocess.PIPE:
|
||||
dbg += ' 1>|'
|
||||
if stderr == subprocess.PIPE:
|
||||
dbg += ' 2>|'
|
||||
elif stderr == subprocess.STDOUT:
|
||||
dbg += ' 2>&1'
|
||||
Trace('%s', dbg)
|
||||
|
||||
try:
|
||||
p = subprocess.Popen(command,
|
||||
cwd=cwd,
|
||||
env=env,
|
||||
encoding='utf-8',
|
||||
errors='backslashreplace',
|
||||
stdin=stdin,
|
||||
stdout=stdout,
|
||||
stderr=stderr)
|
||||
except Exception as e:
|
||||
raise GitError('%s: %s' % (command[1], e))
|
||||
|
||||
if ssh_proxy:
|
||||
ssh_proxy.add_client(p)
|
||||
|
||||
self.process = p
|
||||
if input:
|
||||
if isinstance(input, str):
|
||||
input = input.encode('utf-8')
|
||||
p.stdin.write(input)
|
||||
p.stdin.close()
|
||||
|
||||
try:
|
||||
self.stdout, self.stderr = p.communicate()
|
||||
finally:
|
||||
if ssh_proxy:
|
||||
ssh_proxy.remove_client(p)
|
||||
self.rc = p.wait()
|
||||
|
||||
@staticmethod
|
||||
def _GetBasicEnv():
|
||||
"""Return a basic env for running git under.
|
||||
|
||||
This is guaranteed to be side-effect free.
|
||||
"""
|
||||
env = os.environ.copy()
|
||||
for key in (REPO_TRACE,
|
||||
GIT_DIR,
|
||||
'GIT_ALTERNATE_OBJECT_DIRECTORIES',
|
||||
'GIT_OBJECT_DIRECTORY',
|
||||
'GIT_WORK_TREE',
|
||||
'GIT_GRAFT_FILE',
|
||||
'GIT_INDEX_FILE'):
|
||||
env.pop(key, None)
|
||||
return env
|
||||
|
||||
def Wait(self):
|
||||
return self.rc
|
||||
|
||||
class GitCommand:
|
||||
"""Wrapper around a single git invocation."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
project,
|
||||
cmdv,
|
||||
bare=False,
|
||||
input=None,
|
||||
capture_stdout=False,
|
||||
capture_stderr=False,
|
||||
merge_output=False,
|
||||
disable_editor=False,
|
||||
ssh_proxy=None,
|
||||
cwd=None,
|
||||
gitdir=None,
|
||||
objdir=None,
|
||||
verify_command=False,
|
||||
add_event_log=True,
|
||||
log_as_error=True,
|
||||
):
|
||||
if project:
|
||||
if not cwd:
|
||||
cwd = project.worktree
|
||||
if not gitdir:
|
||||
gitdir = project.gitdir
|
||||
|
||||
self.project = project
|
||||
self.cmdv = cmdv
|
||||
self.verify_command = verify_command
|
||||
self.stdout, self.stderr = None, None
|
||||
|
||||
# Git on Windows wants its paths only using / for reliability.
|
||||
if platform_utils.isWindows():
|
||||
if objdir:
|
||||
objdir = objdir.replace("\\", "/")
|
||||
if gitdir:
|
||||
gitdir = gitdir.replace("\\", "/")
|
||||
|
||||
env = _build_env(
|
||||
disable_editor=disable_editor,
|
||||
ssh_proxy=ssh_proxy,
|
||||
objdir=objdir,
|
||||
gitdir=gitdir,
|
||||
bare=bare,
|
||||
)
|
||||
|
||||
command = [GIT]
|
||||
if bare:
|
||||
cwd = None
|
||||
command_name = cmdv[0]
|
||||
command.append(command_name)
|
||||
# Need to use the --progress flag for fetch/clone so output will be
|
||||
# displayed as by default git only does progress output if stderr is a
|
||||
# TTY.
|
||||
if sys.stderr.isatty() and command_name in ("fetch", "clone"):
|
||||
if "--progress" not in cmdv and "--quiet" not in cmdv:
|
||||
command.append("--progress")
|
||||
command.extend(cmdv[1:])
|
||||
|
||||
event_log = (
|
||||
BaseEventLog(env=env, add_init_count=True)
|
||||
if add_event_log
|
||||
else None
|
||||
)
|
||||
|
||||
try:
|
||||
self._RunCommand(
|
||||
command,
|
||||
env,
|
||||
capture_stdout=capture_stdout,
|
||||
capture_stderr=capture_stderr,
|
||||
merge_output=merge_output,
|
||||
ssh_proxy=ssh_proxy,
|
||||
cwd=cwd,
|
||||
input=input,
|
||||
)
|
||||
self.VerifyCommand()
|
||||
except GitCommandError as e:
|
||||
if event_log is not None:
|
||||
error_info = json.dumps(
|
||||
{
|
||||
"ErrorType": type(e).__name__,
|
||||
"Project": e.project,
|
||||
"CommandName": command_name,
|
||||
"Message": str(e),
|
||||
"ReturnCode": str(e.git_rc)
|
||||
if e.git_rc is not None
|
||||
else None,
|
||||
"IsError": log_as_error,
|
||||
}
|
||||
)
|
||||
event_log.ErrorEvent(
|
||||
f"{ERROR_EVENT_LOGGING_PREFIX}:{error_info}"
|
||||
)
|
||||
event_log.Write(GetEventTargetPath())
|
||||
if isinstance(e, GitPopenCommandError):
|
||||
raise
|
||||
|
||||
def _RunCommand(
|
||||
self,
|
||||
command,
|
||||
env,
|
||||
capture_stdout=False,
|
||||
capture_stderr=False,
|
||||
merge_output=False,
|
||||
ssh_proxy=None,
|
||||
cwd=None,
|
||||
input=None,
|
||||
):
|
||||
# Set subprocess.PIPE for streams that need to be captured.
|
||||
stdin = subprocess.PIPE if input else None
|
||||
stdout = subprocess.PIPE if capture_stdout else None
|
||||
stderr = (
|
||||
subprocess.STDOUT
|
||||
if merge_output
|
||||
else (subprocess.PIPE if capture_stderr else None)
|
||||
)
|
||||
|
||||
# tee_stderr acts like a tee command for stderr, in that, it captures
|
||||
# stderr from the subprocess and streams it back to sys.stderr, while
|
||||
# keeping a copy in-memory.
|
||||
# This allows us to store stderr logs from the subprocess into
|
||||
# GitCommandError.
|
||||
# Certain git operations, such as `git push`, writes diagnostic logs,
|
||||
# such as, progress bar for pushing, into stderr. To ensure we don't
|
||||
# break git's UX, we need to write to sys.stderr as we read from the
|
||||
# subprocess. Setting encoding or errors makes subprocess return
|
||||
# io.TextIOWrapper, which is line buffered. To avoid line-buffering
|
||||
# while tee-ing stderr, we unset these kwargs. See GitCommand._Tee
|
||||
# for tee-ing between the streams.
|
||||
# We tee stderr iff the caller doesn't want to capture any stream to
|
||||
# not disrupt the existing flow.
|
||||
# See go/tee-repo-stderr for more context.
|
||||
tee_stderr = False
|
||||
kwargs = {"encoding": "utf-8", "errors": "backslashreplace"}
|
||||
if not (stdin or stdout or stderr):
|
||||
tee_stderr = True
|
||||
# stderr will be written back to sys.stderr even though it is
|
||||
# piped here.
|
||||
stderr = subprocess.PIPE
|
||||
kwargs = {}
|
||||
|
||||
dbg = ""
|
||||
if IsTrace():
|
||||
global LAST_CWD
|
||||
global LAST_GITDIR
|
||||
|
||||
if cwd and LAST_CWD != cwd:
|
||||
if LAST_GITDIR or LAST_CWD:
|
||||
dbg += "\n"
|
||||
dbg += ": cd %s\n" % cwd
|
||||
LAST_CWD = cwd
|
||||
|
||||
if GIT_DIR in env and LAST_GITDIR != env[GIT_DIR]:
|
||||
if LAST_GITDIR or LAST_CWD:
|
||||
dbg += "\n"
|
||||
dbg += ": export GIT_DIR=%s\n" % env[GIT_DIR]
|
||||
LAST_GITDIR = env[GIT_DIR]
|
||||
|
||||
if "GIT_OBJECT_DIRECTORY" in env:
|
||||
dbg += (
|
||||
": export GIT_OBJECT_DIRECTORY=%s\n"
|
||||
% env["GIT_OBJECT_DIRECTORY"]
|
||||
)
|
||||
if "GIT_ALTERNATE_OBJECT_DIRECTORIES" in env:
|
||||
dbg += ": export GIT_ALTERNATE_OBJECT_DIRECTORIES=%s\n" % (
|
||||
env["GIT_ALTERNATE_OBJECT_DIRECTORIES"]
|
||||
)
|
||||
|
||||
dbg += ": "
|
||||
dbg += " ".join(command)
|
||||
if stdin == subprocess.PIPE:
|
||||
dbg += " 0<|"
|
||||
if stdout == subprocess.PIPE:
|
||||
dbg += " 1>|"
|
||||
if stderr == subprocess.PIPE:
|
||||
dbg += " 2>|"
|
||||
elif stderr == subprocess.STDOUT:
|
||||
dbg += " 2>&1"
|
||||
|
||||
with Trace(
|
||||
"git command %s %s with debug: %s", LAST_GITDIR, command, dbg
|
||||
):
|
||||
try:
|
||||
p = subprocess.Popen(
|
||||
command,
|
||||
cwd=cwd,
|
||||
env=env,
|
||||
stdin=stdin,
|
||||
stdout=stdout,
|
||||
stderr=stderr,
|
||||
**kwargs,
|
||||
)
|
||||
except Exception as e:
|
||||
raise GitPopenCommandError(
|
||||
message=f"{command[1]}: {e}",
|
||||
project=self.project.name if self.project else None,
|
||||
command_args=self.cmdv,
|
||||
)
|
||||
|
||||
if ssh_proxy:
|
||||
ssh_proxy.add_client(p)
|
||||
|
||||
self.process = p
|
||||
|
||||
try:
|
||||
if tee_stderr:
|
||||
# tee_stderr streams stderr to sys.stderr while capturing
|
||||
# a copy within self.stderr. tee_stderr is only enabled
|
||||
# when the caller wants to pipe no stream.
|
||||
self.stderr = self._Tee(p.stderr, sys.stderr)
|
||||
else:
|
||||
self.stdout, self.stderr = p.communicate(input=input)
|
||||
finally:
|
||||
if ssh_proxy:
|
||||
ssh_proxy.remove_client(p)
|
||||
self.rc = p.wait()
|
||||
|
||||
@staticmethod
|
||||
def _Tee(in_stream, out_stream):
|
||||
"""Writes text from in_stream to out_stream while recording in buffer.
|
||||
|
||||
Args:
|
||||
in_stream: I/O stream to be read from.
|
||||
out_stream: I/O stream to write to.
|
||||
|
||||
Returns:
|
||||
A str containing everything read from the in_stream.
|
||||
"""
|
||||
buffer = ""
|
||||
read_size = 1024 if sys.version_info < (3, 7) else -1
|
||||
chunk = in_stream.read1(read_size)
|
||||
while chunk:
|
||||
# Convert to str.
|
||||
if not hasattr(chunk, "encode"):
|
||||
chunk = chunk.decode("utf-8", "backslashreplace")
|
||||
|
||||
buffer += chunk
|
||||
out_stream.write(chunk)
|
||||
out_stream.flush()
|
||||
|
||||
chunk = in_stream.read1(read_size)
|
||||
|
||||
return buffer
|
||||
|
||||
@staticmethod
|
||||
def _GetBasicEnv():
|
||||
"""Return a basic env for running git under.
|
||||
|
||||
This is guaranteed to be side-effect free.
|
||||
"""
|
||||
env = os.environ.copy()
|
||||
for key in (
|
||||
REPO_TRACE,
|
||||
GIT_DIR,
|
||||
"GIT_ALTERNATE_OBJECT_DIRECTORIES",
|
||||
"GIT_OBJECT_DIRECTORY",
|
||||
"GIT_WORK_TREE",
|
||||
"GIT_GRAFT_FILE",
|
||||
"GIT_INDEX_FILE",
|
||||
):
|
||||
env.pop(key, None)
|
||||
return env
|
||||
|
||||
def VerifyCommand(self):
|
||||
if self.rc == 0:
|
||||
return None
|
||||
stdout = (
|
||||
"\n".join(self.stdout.split("\n")[:GIT_ERROR_STDOUT_LINES])
|
||||
if self.stdout
|
||||
else None
|
||||
)
|
||||
stderr = (
|
||||
"\n".join(self.stderr.split("\n")[:GIT_ERROR_STDERR_LINES])
|
||||
if self.stderr
|
||||
else None
|
||||
)
|
||||
project = self.project.name if self.project else None
|
||||
raise GitCommandError(
|
||||
project=project,
|
||||
command_args=self.cmdv,
|
||||
git_rc=self.rc,
|
||||
git_stdout=stdout,
|
||||
git_stderr=stderr,
|
||||
)
|
||||
|
||||
def Wait(self):
|
||||
if self.verify_command:
|
||||
self.VerifyCommand()
|
||||
return self.rc
|
||||
|
||||
|
||||
class GitRequireError(RepoExitError):
|
||||
"""Error raised when git version is unavailable or invalid."""
|
||||
|
||||
def __init__(self, message, exit_code: int = INVALID_GIT_EXIT_CODE):
|
||||
super().__init__(message, exit_code=exit_code)
|
||||
|
||||
|
||||
class GitCommandError(GitError):
|
||||
"""
|
||||
Error raised from a failed git command.
|
||||
Note that GitError can refer to any Git related error (e.g. branch not
|
||||
specified for project.py 'UploadForReview'), while GitCommandError is
|
||||
raised exclusively from non-zero exit codes returned from git commands.
|
||||
"""
|
||||
|
||||
# Tuples with error formats and suggestions for those errors.
|
||||
_ERROR_TO_SUGGESTION = [
|
||||
(
|
||||
re.compile("couldn't find remote ref .*"),
|
||||
"Check if the provided ref exists in the remote.",
|
||||
),
|
||||
(
|
||||
re.compile("unable to access '.*': .*"),
|
||||
(
|
||||
"Please make sure you have the correct access rights and the "
|
||||
"repository exists."
|
||||
),
|
||||
),
|
||||
(
|
||||
re.compile("'.*' does not appear to be a git repository"),
|
||||
"Are you running this repo command outside of a repo workspace?",
|
||||
),
|
||||
(
|
||||
re.compile("not a git repository"),
|
||||
"Are you running this repo command outside of a repo workspace?",
|
||||
),
|
||||
]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message: str = DEFAULT_GIT_FAIL_MESSAGE,
|
||||
git_rc: int = None,
|
||||
git_stdout: str = None,
|
||||
git_stderr: str = None,
|
||||
**kwargs,
|
||||
):
|
||||
super().__init__(
|
||||
message,
|
||||
**kwargs,
|
||||
)
|
||||
self.git_rc = git_rc
|
||||
self.git_stdout = git_stdout
|
||||
self.git_stderr = git_stderr
|
||||
|
||||
@property
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def suggestion(self):
|
||||
"""Returns helpful next steps for the given stderr."""
|
||||
if not self.git_stderr:
|
||||
return self.git_stderr
|
||||
|
||||
for err, suggestion in self._ERROR_TO_SUGGESTION:
|
||||
if err.search(self.git_stderr):
|
||||
return suggestion
|
||||
|
||||
return None
|
||||
|
||||
def __str__(self):
|
||||
args = "[]" if not self.command_args else " ".join(self.command_args)
|
||||
error_type = type(self).__name__
|
||||
string = f"{error_type}: '{args}' on {self.project} failed"
|
||||
|
||||
if self.message != DEFAULT_GIT_FAIL_MESSAGE:
|
||||
string += f": {self.message}"
|
||||
|
||||
if self.git_stdout:
|
||||
string += f"\nstdout: {self.git_stdout}"
|
||||
|
||||
if self.git_stderr:
|
||||
string += f"\nstderr: {self.git_stderr}"
|
||||
|
||||
if self.suggestion:
|
||||
string += f"\nsuggestion: {self.suggestion}"
|
||||
|
||||
return string
|
||||
|
||||
|
||||
class GitPopenCommandError(GitError):
|
||||
"""
|
||||
Error raised when subprocess.Popen fails for a GitCommand
|
||||
"""
|
||||
|
||||
1387
git_config.py
1387
git_config.py
File diff suppressed because it is too large
Load Diff
266
git_refs.py
266
git_refs.py
@@ -13,153 +13,153 @@
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
from repo_trace import Trace
|
||||
|
||||
import platform_utils
|
||||
|
||||
HEAD = 'HEAD'
|
||||
R_CHANGES = 'refs/changes/'
|
||||
R_HEADS = 'refs/heads/'
|
||||
R_TAGS = 'refs/tags/'
|
||||
R_PUB = 'refs/published/'
|
||||
R_WORKTREE = 'refs/worktree/'
|
||||
R_WORKTREE_M = R_WORKTREE + 'm/'
|
||||
R_M = 'refs/remotes/m/'
|
||||
from repo_trace import Trace
|
||||
|
||||
|
||||
class GitRefs(object):
|
||||
def __init__(self, gitdir):
|
||||
self._gitdir = gitdir
|
||||
self._phyref = None
|
||||
self._symref = None
|
||||
self._mtime = {}
|
||||
HEAD = "HEAD"
|
||||
R_CHANGES = "refs/changes/"
|
||||
R_HEADS = "refs/heads/"
|
||||
R_TAGS = "refs/tags/"
|
||||
R_PUB = "refs/published/"
|
||||
R_WORKTREE = "refs/worktree/"
|
||||
R_WORKTREE_M = R_WORKTREE + "m/"
|
||||
R_M = "refs/remotes/m/"
|
||||
|
||||
@property
|
||||
def all(self):
|
||||
self._EnsureLoaded()
|
||||
return self._phyref
|
||||
|
||||
def get(self, name):
|
||||
try:
|
||||
return self.all[name]
|
||||
except KeyError:
|
||||
return ''
|
||||
class GitRefs:
|
||||
def __init__(self, gitdir):
|
||||
self._gitdir = gitdir
|
||||
self._phyref = None
|
||||
self._symref = None
|
||||
self._mtime = {}
|
||||
|
||||
def deleted(self, name):
|
||||
if self._phyref is not None:
|
||||
if name in self._phyref:
|
||||
del self._phyref[name]
|
||||
@property
|
||||
def all(self):
|
||||
self._EnsureLoaded()
|
||||
return self._phyref
|
||||
|
||||
if name in self._symref:
|
||||
del self._symref[name]
|
||||
def get(self, name):
|
||||
try:
|
||||
return self.all[name]
|
||||
except KeyError:
|
||||
return ""
|
||||
|
||||
if name in self._mtime:
|
||||
del self._mtime[name]
|
||||
def deleted(self, name):
|
||||
if self._phyref is not None:
|
||||
if name in self._phyref:
|
||||
del self._phyref[name]
|
||||
|
||||
def symref(self, name):
|
||||
try:
|
||||
self._EnsureLoaded()
|
||||
return self._symref[name]
|
||||
except KeyError:
|
||||
return ''
|
||||
if name in self._symref:
|
||||
del self._symref[name]
|
||||
|
||||
def _EnsureLoaded(self):
|
||||
if self._phyref is None or self._NeedUpdate():
|
||||
self._LoadAll()
|
||||
if name in self._mtime:
|
||||
del self._mtime[name]
|
||||
|
||||
def _NeedUpdate(self):
|
||||
Trace(': scan refs %s', self._gitdir)
|
||||
def symref(self, name):
|
||||
try:
|
||||
self._EnsureLoaded()
|
||||
return self._symref[name]
|
||||
except KeyError:
|
||||
return ""
|
||||
|
||||
for name, mtime in self._mtime.items():
|
||||
try:
|
||||
if mtime != os.path.getmtime(os.path.join(self._gitdir, name)):
|
||||
return True
|
||||
except OSError:
|
||||
return True
|
||||
return False
|
||||
def _EnsureLoaded(self):
|
||||
if self._phyref is None or self._NeedUpdate():
|
||||
self._LoadAll()
|
||||
|
||||
def _LoadAll(self):
|
||||
Trace(': load refs %s', self._gitdir)
|
||||
def _NeedUpdate(self):
|
||||
with Trace(": scan refs %s", self._gitdir):
|
||||
for name, mtime in self._mtime.items():
|
||||
try:
|
||||
if mtime != os.path.getmtime(
|
||||
os.path.join(self._gitdir, name)
|
||||
):
|
||||
return True
|
||||
except OSError:
|
||||
return True
|
||||
return False
|
||||
|
||||
self._phyref = {}
|
||||
self._symref = {}
|
||||
self._mtime = {}
|
||||
def _LoadAll(self):
|
||||
with Trace(": load refs %s", self._gitdir):
|
||||
self._phyref = {}
|
||||
self._symref = {}
|
||||
self._mtime = {}
|
||||
|
||||
self._ReadPackedRefs()
|
||||
self._ReadLoose('refs/')
|
||||
self._ReadLoose1(os.path.join(self._gitdir, HEAD), HEAD)
|
||||
self._ReadPackedRefs()
|
||||
self._ReadLoose("refs/")
|
||||
self._ReadLoose1(os.path.join(self._gitdir, HEAD), HEAD)
|
||||
|
||||
scan = self._symref
|
||||
attempts = 0
|
||||
while scan and attempts < 5:
|
||||
scan_next = {}
|
||||
for name, dest in scan.items():
|
||||
if dest in self._phyref:
|
||||
self._phyref[name] = self._phyref[dest]
|
||||
scan = self._symref
|
||||
attempts = 0
|
||||
while scan and attempts < 5:
|
||||
scan_next = {}
|
||||
for name, dest in scan.items():
|
||||
if dest in self._phyref:
|
||||
self._phyref[name] = self._phyref[dest]
|
||||
else:
|
||||
scan_next[name] = dest
|
||||
scan = scan_next
|
||||
attempts += 1
|
||||
|
||||
def _ReadPackedRefs(self):
|
||||
path = os.path.join(self._gitdir, "packed-refs")
|
||||
try:
|
||||
fd = open(path)
|
||||
mtime = os.path.getmtime(path)
|
||||
except OSError:
|
||||
return
|
||||
try:
|
||||
for line in fd:
|
||||
line = str(line)
|
||||
if line[0] == "#":
|
||||
continue
|
||||
if line[0] == "^":
|
||||
continue
|
||||
|
||||
line = line[:-1]
|
||||
p = line.split(" ")
|
||||
ref_id = p[0]
|
||||
name = p[1]
|
||||
|
||||
self._phyref[name] = ref_id
|
||||
finally:
|
||||
fd.close()
|
||||
self._mtime["packed-refs"] = mtime
|
||||
|
||||
def _ReadLoose(self, prefix):
|
||||
base = os.path.join(self._gitdir, prefix)
|
||||
for name in platform_utils.listdir(base):
|
||||
p = os.path.join(base, name)
|
||||
# We don't implement the full ref validation algorithm, just the
|
||||
# simple rules that would show up in local filesystems.
|
||||
# https://git-scm.com/docs/git-check-ref-format
|
||||
if name.startswith(".") or name.endswith(".lock"):
|
||||
pass
|
||||
elif platform_utils.isdir(p):
|
||||
self._mtime[prefix] = os.path.getmtime(base)
|
||||
self._ReadLoose(prefix + name + "/")
|
||||
else:
|
||||
self._ReadLoose1(p, prefix + name)
|
||||
|
||||
def _ReadLoose1(self, path, name):
|
||||
try:
|
||||
with open(path) as fd:
|
||||
mtime = os.path.getmtime(path)
|
||||
ref_id = fd.readline()
|
||||
except (OSError, UnicodeError):
|
||||
return
|
||||
|
||||
try:
|
||||
ref_id = ref_id.decode()
|
||||
except AttributeError:
|
||||
pass
|
||||
if not ref_id:
|
||||
return
|
||||
ref_id = ref_id[:-1]
|
||||
|
||||
if ref_id.startswith("ref: "):
|
||||
self._symref[name] = ref_id[5:]
|
||||
else:
|
||||
scan_next[name] = dest
|
||||
scan = scan_next
|
||||
attempts += 1
|
||||
|
||||
def _ReadPackedRefs(self):
|
||||
path = os.path.join(self._gitdir, 'packed-refs')
|
||||
try:
|
||||
fd = open(path, 'r')
|
||||
mtime = os.path.getmtime(path)
|
||||
except IOError:
|
||||
return
|
||||
except OSError:
|
||||
return
|
||||
try:
|
||||
for line in fd:
|
||||
line = str(line)
|
||||
if line[0] == '#':
|
||||
continue
|
||||
if line[0] == '^':
|
||||
continue
|
||||
|
||||
line = line[:-1]
|
||||
p = line.split(' ')
|
||||
ref_id = p[0]
|
||||
name = p[1]
|
||||
|
||||
self._phyref[name] = ref_id
|
||||
finally:
|
||||
fd.close()
|
||||
self._mtime['packed-refs'] = mtime
|
||||
|
||||
def _ReadLoose(self, prefix):
|
||||
base = os.path.join(self._gitdir, prefix)
|
||||
for name in platform_utils.listdir(base):
|
||||
p = os.path.join(base, name)
|
||||
# We don't implement the full ref validation algorithm, just the simple
|
||||
# rules that would show up in local filesystems.
|
||||
# https://git-scm.com/docs/git-check-ref-format
|
||||
if name.startswith('.') or name.endswith('.lock'):
|
||||
pass
|
||||
elif platform_utils.isdir(p):
|
||||
self._mtime[prefix] = os.path.getmtime(base)
|
||||
self._ReadLoose(prefix + name + '/')
|
||||
else:
|
||||
self._ReadLoose1(p, prefix + name)
|
||||
|
||||
def _ReadLoose1(self, path, name):
|
||||
try:
|
||||
with open(path) as fd:
|
||||
mtime = os.path.getmtime(path)
|
||||
ref_id = fd.readline()
|
||||
except (OSError, UnicodeError):
|
||||
return
|
||||
|
||||
try:
|
||||
ref_id = ref_id.decode()
|
||||
except AttributeError:
|
||||
pass
|
||||
if not ref_id:
|
||||
return
|
||||
ref_id = ref_id[:-1]
|
||||
|
||||
if ref_id.startswith('ref: '):
|
||||
self._symref[name] = ref_id[5:]
|
||||
else:
|
||||
self._phyref[name] = ref_id
|
||||
self._mtime[name] = mtime
|
||||
self._phyref[name] = ref_id
|
||||
self._mtime[name] = mtime
|
||||
|
||||
@@ -12,404 +12,549 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Provide functionality to get all projects and their commit ids from Superproject.
|
||||
"""Provide functionality to get projects and their commit ids from Superproject.
|
||||
|
||||
For more information on superproject, check out:
|
||||
https://en.wikibooks.org/wiki/Git/Submodules_and_Superprojects
|
||||
|
||||
Examples:
|
||||
superproject = Superproject()
|
||||
superproject = Superproject(manifest, name, remote, revision)
|
||||
UpdateProjectsResult = superproject.UpdateProjectsRevisionId(projects)
|
||||
"""
|
||||
|
||||
import hashlib
|
||||
import functools
|
||||
import hashlib
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
from typing import NamedTuple
|
||||
|
||||
from git_command import git_require, GitCommand
|
||||
from git_command import git_require
|
||||
from git_command import GitCommand
|
||||
from git_config import RepoConfig
|
||||
from git_refs import R_HEADS
|
||||
from manifest_xml import LOCAL_MANIFEST_GROUP_PREFIX
|
||||
from git_refs import GitRefs
|
||||
|
||||
_SUPERPROJECT_GIT_NAME = 'superproject.git'
|
||||
_SUPERPROJECT_MANIFEST_NAME = 'superproject_override.xml'
|
||||
|
||||
_SUPERPROJECT_GIT_NAME = "superproject.git"
|
||||
_SUPERPROJECT_MANIFEST_NAME = "superproject_override.xml"
|
||||
|
||||
|
||||
class SyncResult(NamedTuple):
|
||||
"""Return the status of sync and whether caller should exit."""
|
||||
"""Return the status of sync and whether caller should exit."""
|
||||
|
||||
# Whether the superproject sync was successful.
|
||||
success: bool
|
||||
# Whether the caller should exit.
|
||||
fatal: bool
|
||||
# Whether the superproject sync was successful.
|
||||
success: bool
|
||||
# Whether the caller should exit.
|
||||
fatal: bool
|
||||
|
||||
|
||||
class CommitIdsResult(NamedTuple):
|
||||
"""Return the commit ids and whether caller should exit."""
|
||||
"""Return the commit ids and whether caller should exit."""
|
||||
|
||||
# A dictionary with the projects/commit ids on success, otherwise None.
|
||||
commit_ids: dict
|
||||
# Whether the caller should exit.
|
||||
fatal: bool
|
||||
# A dictionary with the projects/commit ids on success, otherwise None.
|
||||
commit_ids: dict
|
||||
# Whether the caller should exit.
|
||||
fatal: bool
|
||||
|
||||
|
||||
class UpdateProjectsResult(NamedTuple):
|
||||
"""Return the overriding manifest file and whether caller should exit."""
|
||||
"""Return the overriding manifest file and whether caller should exit."""
|
||||
|
||||
# Path name of the overriding manifest file if successful, otherwise None.
|
||||
manifest_path: str
|
||||
# Whether the caller should exit.
|
||||
fatal: bool
|
||||
# Path name of the overriding manifest file if successful, otherwise None.
|
||||
manifest_path: str
|
||||
# Whether the caller should exit.
|
||||
fatal: bool
|
||||
|
||||
|
||||
class Superproject(object):
|
||||
"""Get commit ids from superproject.
|
||||
class Superproject:
|
||||
"""Get commit ids from superproject.
|
||||
|
||||
Initializes a local copy of a superproject for the manifest. This allows
|
||||
lookup of commit ids for all projects. It contains _project_commit_ids which
|
||||
is a dictionary with project/commit id entries.
|
||||
"""
|
||||
def __init__(self, manifest, repodir, git_event_log,
|
||||
superproject_dir='exp-superproject', quiet=False, print_messages=False):
|
||||
"""Initializes superproject.
|
||||
|
||||
Args:
|
||||
manifest: A Manifest object that is to be written to a file.
|
||||
repodir: Path to the .repo/ dir for holding all internal checkout state.
|
||||
It must be in the top directory of the repo client checkout.
|
||||
git_event_log: A git trace2 event log to log events.
|
||||
superproject_dir: Relative path under |repodir| to checkout superproject.
|
||||
quiet: If True then only print the progress messages.
|
||||
print_messages: if True then print error/warning messages.
|
||||
Initializes a bare local copy of a superproject for the manifest. This
|
||||
allows lookup of commit ids for all projects. It contains
|
||||
_project_commit_ids which is a dictionary with project/commit id entries.
|
||||
"""
|
||||
self._project_commit_ids = None
|
||||
self._manifest = manifest
|
||||
self._git_event_log = git_event_log
|
||||
self._quiet = quiet
|
||||
self._print_messages = print_messages
|
||||
self._branch = manifest.branch
|
||||
self._repodir = os.path.abspath(repodir)
|
||||
self._superproject_dir = superproject_dir
|
||||
self._superproject_path = os.path.join(self._repodir, superproject_dir)
|
||||
self._manifest_path = os.path.join(self._superproject_path,
|
||||
_SUPERPROJECT_MANIFEST_NAME)
|
||||
git_name = ''
|
||||
if self._manifest.superproject:
|
||||
remote = self._manifest.superproject['remote']
|
||||
git_name = hashlib.md5(remote.name.encode('utf8')).hexdigest() + '-'
|
||||
self._branch = self._manifest.superproject['revision']
|
||||
self._remote_url = remote.url
|
||||
else:
|
||||
self._remote_url = None
|
||||
self._work_git_name = git_name + _SUPERPROJECT_GIT_NAME
|
||||
self._work_git = os.path.join(self._superproject_path, self._work_git_name)
|
||||
|
||||
@property
|
||||
def project_commit_ids(self):
|
||||
"""Returns a dictionary of projects and their commit ids."""
|
||||
return self._project_commit_ids
|
||||
def __init__(
|
||||
self,
|
||||
manifest,
|
||||
name,
|
||||
remote,
|
||||
revision,
|
||||
superproject_dir="exp-superproject",
|
||||
):
|
||||
"""Initializes superproject.
|
||||
|
||||
@property
|
||||
def manifest_path(self):
|
||||
"""Returns the manifest path if the path exists or None."""
|
||||
return self._manifest_path if os.path.exists(self._manifest_path) else None
|
||||
Args:
|
||||
manifest: A Manifest object that is to be written to a file.
|
||||
name: The unique name of the superproject
|
||||
remote: The RemoteSpec for the remote.
|
||||
revision: The name of the git branch to track.
|
||||
superproject_dir: Relative path under |manifest.subdir| to checkout
|
||||
superproject.
|
||||
"""
|
||||
self._project_commit_ids = None
|
||||
self._manifest = manifest
|
||||
self.name = name
|
||||
self.remote = remote
|
||||
self.revision = self._branch = revision
|
||||
self._repodir = manifest.repodir
|
||||
self._superproject_dir = superproject_dir
|
||||
self._superproject_path = manifest.SubmanifestInfoDir(
|
||||
manifest.path_prefix, superproject_dir
|
||||
)
|
||||
self._manifest_path = os.path.join(
|
||||
self._superproject_path, _SUPERPROJECT_MANIFEST_NAME
|
||||
)
|
||||
git_name = hashlib.md5(remote.name.encode("utf8")).hexdigest() + "-"
|
||||
self._remote_url = remote.url
|
||||
self._work_git_name = git_name + _SUPERPROJECT_GIT_NAME
|
||||
self._work_git = os.path.join(
|
||||
self._superproject_path, self._work_git_name
|
||||
)
|
||||
|
||||
def _LogMessage(self, message):
|
||||
"""Logs message to stderr and _git_event_log."""
|
||||
if self._print_messages:
|
||||
print(message, file=sys.stderr)
|
||||
self._git_event_log.ErrorEvent(message, f'{message}')
|
||||
# The following are command arguemnts, rather than superproject
|
||||
# attributes, and were included here originally. They should eventually
|
||||
# become arguments that are passed down from the public methods, instead
|
||||
# of being treated as attributes.
|
||||
self._git_event_log = None
|
||||
self._quiet = False
|
||||
self._print_messages = False
|
||||
|
||||
def _LogMessagePrefix(self):
|
||||
"""Returns the prefix string to be logged in each log message"""
|
||||
return f'repo superproject branch: {self._branch} url: {self._remote_url}'
|
||||
def SetQuiet(self, value):
|
||||
"""Set the _quiet attribute."""
|
||||
self._quiet = value
|
||||
|
||||
def _LogError(self, message):
|
||||
"""Logs error message to stderr and _git_event_log."""
|
||||
self._LogMessage(f'{self._LogMessagePrefix()} error: {message}')
|
||||
def SetPrintMessages(self, value):
|
||||
"""Set the _print_messages attribute."""
|
||||
self._print_messages = value
|
||||
|
||||
def _LogWarning(self, message):
|
||||
"""Logs warning message to stderr and _git_event_log."""
|
||||
self._LogMessage(f'{self._LogMessagePrefix()} warning: {message}')
|
||||
@property
|
||||
def project_commit_ids(self):
|
||||
"""Returns a dictionary of projects and their commit ids."""
|
||||
return self._project_commit_ids
|
||||
|
||||
def _Init(self):
|
||||
"""Sets up a local Git repository to get a copy of a superproject.
|
||||
@property
|
||||
def manifest_path(self):
|
||||
"""Returns the manifest path if the path exists or None."""
|
||||
return (
|
||||
self._manifest_path if os.path.exists(self._manifest_path) else None
|
||||
)
|
||||
|
||||
Returns:
|
||||
True if initialization is successful, or False.
|
||||
"""
|
||||
if not os.path.exists(self._superproject_path):
|
||||
os.mkdir(self._superproject_path)
|
||||
if not self._quiet and not os.path.exists(self._work_git):
|
||||
print('%s: Performing initial setup for superproject; this might take '
|
||||
'several minutes.' % self._work_git)
|
||||
cmd = ['init', '--bare', self._work_git_name]
|
||||
p = GitCommand(None,
|
||||
cmd,
|
||||
cwd=self._superproject_path,
|
||||
capture_stdout=True,
|
||||
capture_stderr=True)
|
||||
retval = p.Wait()
|
||||
if retval:
|
||||
self._LogWarning(f'git init call failed, command: git {cmd}, '
|
||||
f'return code: {retval}, stderr: {p.stderr}')
|
||||
return False
|
||||
return True
|
||||
def _LogMessage(self, fmt, *inputs):
|
||||
"""Logs message to stderr and _git_event_log."""
|
||||
message = f"{self._LogMessagePrefix()} {fmt.format(*inputs)}"
|
||||
if self._print_messages:
|
||||
print(message, file=sys.stderr)
|
||||
self._git_event_log.ErrorEvent(message, fmt)
|
||||
|
||||
def _Fetch(self):
|
||||
"""Fetches a local copy of a superproject for the manifest based on |_remote_url|.
|
||||
def _LogMessagePrefix(self):
|
||||
"""Returns the prefix string to be logged in each log message"""
|
||||
return (
|
||||
f"repo superproject branch: {self._branch} url: {self._remote_url}"
|
||||
)
|
||||
|
||||
Returns:
|
||||
True if fetch is successful, or False.
|
||||
"""
|
||||
if not os.path.exists(self._work_git):
|
||||
self._LogWarning(f'git fetch missing directory: {self._work_git}')
|
||||
return False
|
||||
if not git_require((2, 28, 0)):
|
||||
self._LogWarning('superproject requires a git version 2.28 or later')
|
||||
return False
|
||||
cmd = ['fetch', self._remote_url, '--depth', '1', '--force', '--no-tags',
|
||||
'--filter', 'blob:none']
|
||||
if self._branch:
|
||||
cmd += [self._branch + ':' + self._branch]
|
||||
p = GitCommand(None,
|
||||
cmd,
|
||||
cwd=self._work_git,
|
||||
capture_stdout=True,
|
||||
capture_stderr=True)
|
||||
retval = p.Wait()
|
||||
if retval:
|
||||
self._LogWarning(f'git fetch call failed, command: git {cmd}, '
|
||||
f'return code: {retval}, stderr: {p.stderr}')
|
||||
return False
|
||||
return True
|
||||
def _LogError(self, fmt, *inputs):
|
||||
"""Logs error message to stderr and _git_event_log."""
|
||||
self._LogMessage(f"error: {fmt}", *inputs)
|
||||
|
||||
def _LsTree(self):
|
||||
"""Gets the commit ids for all projects.
|
||||
def _LogWarning(self, fmt, *inputs):
|
||||
"""Logs warning message to stderr and _git_event_log."""
|
||||
self._LogMessage(f"warning: {fmt}", *inputs)
|
||||
|
||||
Works only in git repositories.
|
||||
def _Init(self):
|
||||
"""Sets up a local Git repository to get a copy of a superproject.
|
||||
|
||||
Returns:
|
||||
data: data returned from 'git ls-tree ...' instead of None.
|
||||
"""
|
||||
if not os.path.exists(self._work_git):
|
||||
self._LogWarning(f'git ls-tree missing directory: {self._work_git}')
|
||||
return None
|
||||
data = None
|
||||
branch = 'HEAD' if not self._branch else self._branch
|
||||
cmd = ['ls-tree', '-z', '-r', branch]
|
||||
Returns:
|
||||
True if initialization is successful, or False.
|
||||
"""
|
||||
if not os.path.exists(self._superproject_path):
|
||||
os.mkdir(self._superproject_path)
|
||||
if not self._quiet and not os.path.exists(self._work_git):
|
||||
print(
|
||||
"%s: Performing initial setup for superproject; this might "
|
||||
"take several minutes." % self._work_git
|
||||
)
|
||||
cmd = ["init", "--bare", self._work_git_name]
|
||||
p = GitCommand(
|
||||
None,
|
||||
cmd,
|
||||
cwd=self._superproject_path,
|
||||
capture_stdout=True,
|
||||
capture_stderr=True,
|
||||
)
|
||||
retval = p.Wait()
|
||||
if retval:
|
||||
self._LogWarning(
|
||||
"git init call failed, command: git {}, "
|
||||
"return code: {}, stderr: {}",
|
||||
cmd,
|
||||
retval,
|
||||
p.stderr,
|
||||
)
|
||||
return False
|
||||
return True
|
||||
|
||||
p = GitCommand(None,
|
||||
cmd,
|
||||
cwd=self._work_git,
|
||||
capture_stdout=True,
|
||||
capture_stderr=True)
|
||||
retval = p.Wait()
|
||||
if retval == 0:
|
||||
data = p.stdout
|
||||
else:
|
||||
self._LogWarning(f'git ls-tree call failed, command: git {cmd}, '
|
||||
f'return code: {retval}, stderr: {p.stderr}')
|
||||
return data
|
||||
def _Fetch(self):
|
||||
"""Fetches a superproject for the manifest based on |_remote_url|.
|
||||
|
||||
def Sync(self):
|
||||
"""Gets a local copy of a superproject for the manifest.
|
||||
This runs git fetch which stores a local copy the superproject.
|
||||
|
||||
Returns:
|
||||
SyncResult
|
||||
"""
|
||||
if not self._manifest.superproject:
|
||||
self._LogWarning(f'superproject tag is not defined in manifest: '
|
||||
f'{self._manifest.manifestFile}')
|
||||
return SyncResult(False, False)
|
||||
Returns:
|
||||
True if fetch is successful, or False.
|
||||
"""
|
||||
if not os.path.exists(self._work_git):
|
||||
self._LogWarning("git fetch missing directory: {}", self._work_git)
|
||||
return False
|
||||
if not git_require((2, 28, 0)):
|
||||
self._LogWarning(
|
||||
"superproject requires a git version 2.28 or later"
|
||||
)
|
||||
return False
|
||||
cmd = [
|
||||
"fetch",
|
||||
self._remote_url,
|
||||
"--depth",
|
||||
"1",
|
||||
"--force",
|
||||
"--no-tags",
|
||||
"--filter",
|
||||
"blob:none",
|
||||
]
|
||||
|
||||
print('NOTICE: --use-superproject is in beta; report any issues to the '
|
||||
'address described in `repo version`', file=sys.stderr)
|
||||
should_exit = True
|
||||
if not self._remote_url:
|
||||
self._LogWarning(f'superproject URL is not defined in manifest: '
|
||||
f'{self._manifest.manifestFile}')
|
||||
return SyncResult(False, should_exit)
|
||||
# Check if there is a local ref that we can pass to --negotiation-tip.
|
||||
# If this is the first fetch, it does not exist yet.
|
||||
# We use --negotiation-tip to speed up the fetch. Superproject branches
|
||||
# do not share commits. So this lets git know it only needs to send
|
||||
# commits reachable from the specified local refs.
|
||||
rev_commit = GitRefs(self._work_git).get(f"refs/heads/{self.revision}")
|
||||
if rev_commit:
|
||||
cmd.extend(["--negotiation-tip", rev_commit])
|
||||
|
||||
if not self._Init():
|
||||
return SyncResult(False, should_exit)
|
||||
if not self._Fetch():
|
||||
return SyncResult(False, should_exit)
|
||||
if not self._quiet:
|
||||
print('%s: Initial setup for superproject completed.' % self._work_git)
|
||||
return SyncResult(True, False)
|
||||
if self._branch:
|
||||
cmd += [self._branch + ":" + self._branch]
|
||||
p = GitCommand(
|
||||
None,
|
||||
cmd,
|
||||
gitdir=self._work_git,
|
||||
bare=True,
|
||||
capture_stdout=True,
|
||||
capture_stderr=True,
|
||||
)
|
||||
retval = p.Wait()
|
||||
if retval:
|
||||
self._LogWarning(
|
||||
"git fetch call failed, command: git {}, "
|
||||
"return code: {}, stderr: {}",
|
||||
cmd,
|
||||
retval,
|
||||
p.stderr,
|
||||
)
|
||||
return False
|
||||
return True
|
||||
|
||||
def _GetAllProjectsCommitIds(self):
|
||||
"""Get commit ids for all projects from superproject and save them in _project_commit_ids.
|
||||
def _LsTree(self):
|
||||
"""Gets the commit ids for all projects.
|
||||
|
||||
Returns:
|
||||
CommitIdsResult
|
||||
"""
|
||||
sync_result = self.Sync()
|
||||
if not sync_result.success:
|
||||
return CommitIdsResult(None, sync_result.fatal)
|
||||
Works only in git repositories.
|
||||
|
||||
data = self._LsTree()
|
||||
if not data:
|
||||
self._LogWarning(f'git ls-tree failed to return data for manifest: '
|
||||
f'{self._manifest.manifestFile}')
|
||||
return CommitIdsResult(None, True)
|
||||
Returns:
|
||||
data: data returned from 'git ls-tree ...' instead of None.
|
||||
"""
|
||||
if not os.path.exists(self._work_git):
|
||||
self._LogWarning(
|
||||
"git ls-tree missing directory: {}", self._work_git
|
||||
)
|
||||
return None
|
||||
data = None
|
||||
branch = "HEAD" if not self._branch else self._branch
|
||||
cmd = ["ls-tree", "-z", "-r", branch]
|
||||
|
||||
# Parse lines like the following to select lines starting with '160000' and
|
||||
# build a dictionary with project path (last element) and its commit id (3rd element).
|
||||
#
|
||||
# 160000 commit 2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea\tart\x00
|
||||
# 120000 blob acc2cbdf438f9d2141f0ae424cec1d8fc4b5d97f\tbootstrap.bash\x00
|
||||
commit_ids = {}
|
||||
for line in data.split('\x00'):
|
||||
ls_data = line.split(None, 3)
|
||||
if not ls_data:
|
||||
break
|
||||
if ls_data[0] == '160000':
|
||||
commit_ids[ls_data[3]] = ls_data[2]
|
||||
p = GitCommand(
|
||||
None,
|
||||
cmd,
|
||||
gitdir=self._work_git,
|
||||
bare=True,
|
||||
capture_stdout=True,
|
||||
capture_stderr=True,
|
||||
)
|
||||
retval = p.Wait()
|
||||
if retval == 0:
|
||||
data = p.stdout
|
||||
else:
|
||||
self._LogWarning(
|
||||
"git ls-tree call failed, command: git {}, "
|
||||
"return code: {}, stderr: {}",
|
||||
cmd,
|
||||
retval,
|
||||
p.stderr,
|
||||
)
|
||||
return data
|
||||
|
||||
self._project_commit_ids = commit_ids
|
||||
return CommitIdsResult(commit_ids, False)
|
||||
def Sync(self, git_event_log):
|
||||
"""Gets a local copy of a superproject for the manifest.
|
||||
|
||||
def _WriteManifestFile(self):
|
||||
"""Writes manifest to a file.
|
||||
Args:
|
||||
git_event_log: an EventLog, for git tracing.
|
||||
|
||||
Returns:
|
||||
manifest_path: Path name of the file into which manifest is written instead of None.
|
||||
"""
|
||||
if not os.path.exists(self._superproject_path):
|
||||
self._LogWarning(f'missing superproject directory: {self._superproject_path}')
|
||||
return None
|
||||
manifest_str = self._manifest.ToXml(groups=self._manifest.GetGroupsStr()).toxml()
|
||||
manifest_path = self._manifest_path
|
||||
try:
|
||||
with open(manifest_path, 'w', encoding='utf-8') as fp:
|
||||
fp.write(manifest_str)
|
||||
except IOError as e:
|
||||
self._LogError(f'cannot write manifest to : {manifest_path} {e}')
|
||||
return None
|
||||
return manifest_path
|
||||
Returns:
|
||||
SyncResult
|
||||
"""
|
||||
self._git_event_log = git_event_log
|
||||
if not self._manifest.superproject:
|
||||
self._LogWarning(
|
||||
"superproject tag is not defined in manifest: {}",
|
||||
self._manifest.manifestFile,
|
||||
)
|
||||
return SyncResult(False, False)
|
||||
|
||||
def _SkipUpdatingProjectRevisionId(self, project):
|
||||
"""Checks if a project's revision id needs to be updated or not.
|
||||
_PrintBetaNotice()
|
||||
|
||||
Revision id for projects from local manifest will not be updated.
|
||||
should_exit = True
|
||||
if not self._remote_url:
|
||||
self._LogWarning(
|
||||
"superproject URL is not defined in manifest: {}",
|
||||
self._manifest.manifestFile,
|
||||
)
|
||||
return SyncResult(False, should_exit)
|
||||
|
||||
Args:
|
||||
project: project whose revision id is being updated.
|
||||
if not self._Init():
|
||||
return SyncResult(False, should_exit)
|
||||
if not self._Fetch():
|
||||
return SyncResult(False, should_exit)
|
||||
if not self._quiet:
|
||||
print(
|
||||
"%s: Initial setup for superproject completed." % self._work_git
|
||||
)
|
||||
return SyncResult(True, False)
|
||||
|
||||
Returns:
|
||||
True if a project's revision id should not be updated, or False,
|
||||
"""
|
||||
path = project.relpath
|
||||
if not path:
|
||||
return True
|
||||
# Skip the project with revisionId.
|
||||
if project.revisionId:
|
||||
return True
|
||||
# Skip the project if it comes from the local manifest.
|
||||
return any(s.startswith(LOCAL_MANIFEST_GROUP_PREFIX) for s in project.groups)
|
||||
def _GetAllProjectsCommitIds(self):
|
||||
"""Get commit ids for all projects from superproject and save them.
|
||||
|
||||
def UpdateProjectsRevisionId(self, projects):
|
||||
"""Update revisionId of every project in projects with the commit id.
|
||||
Commit ids are saved in _project_commit_ids.
|
||||
|
||||
Args:
|
||||
projects: List of projects whose revisionId needs to be updated.
|
||||
Returns:
|
||||
CommitIdsResult
|
||||
"""
|
||||
sync_result = self.Sync(self._git_event_log)
|
||||
if not sync_result.success:
|
||||
return CommitIdsResult(None, sync_result.fatal)
|
||||
|
||||
Returns:
|
||||
UpdateProjectsResult
|
||||
"""
|
||||
commit_ids_result = self._GetAllProjectsCommitIds()
|
||||
commit_ids = commit_ids_result.commit_ids
|
||||
if not commit_ids:
|
||||
return UpdateProjectsResult(None, commit_ids_result.fatal)
|
||||
data = self._LsTree()
|
||||
if not data:
|
||||
self._LogWarning(
|
||||
"git ls-tree failed to return data for manifest: {}",
|
||||
self._manifest.manifestFile,
|
||||
)
|
||||
return CommitIdsResult(None, True)
|
||||
|
||||
projects_missing_commit_ids = []
|
||||
for project in projects:
|
||||
if self._SkipUpdatingProjectRevisionId(project):
|
||||
continue
|
||||
path = project.relpath
|
||||
commit_id = commit_ids.get(path)
|
||||
if not commit_id:
|
||||
projects_missing_commit_ids.append(path)
|
||||
# Parse lines like the following to select lines starting with '160000'
|
||||
# and build a dictionary with project path (last element) and its commit
|
||||
# id (3rd element).
|
||||
#
|
||||
# 160000 commit 2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea\tart\x00
|
||||
# 120000 blob acc2cbdf438f9d2141f0ae424cec1d8fc4b5d97f\tbootstrap.bash\x00 # noqa: E501
|
||||
commit_ids = {}
|
||||
for line in data.split("\x00"):
|
||||
ls_data = line.split(None, 3)
|
||||
if not ls_data:
|
||||
break
|
||||
if ls_data[0] == "160000":
|
||||
commit_ids[ls_data[3]] = ls_data[2]
|
||||
|
||||
# If superproject doesn't have a commit id for a project, then report an
|
||||
# error event and continue as if do not use superproject is specified.
|
||||
if projects_missing_commit_ids:
|
||||
self._LogWarning(f'please file a bug using {self._manifest.contactinfo.bugurl} '
|
||||
f'to report missing commit_ids for: {projects_missing_commit_ids}')
|
||||
return UpdateProjectsResult(None, False)
|
||||
self._project_commit_ids = commit_ids
|
||||
return CommitIdsResult(commit_ids, False)
|
||||
|
||||
for project in projects:
|
||||
if not self._SkipUpdatingProjectRevisionId(project):
|
||||
project.SetRevisionId(commit_ids.get(project.relpath))
|
||||
def _WriteManifestFile(self):
|
||||
"""Writes manifest to a file.
|
||||
|
||||
manifest_path = self._WriteManifestFile()
|
||||
return UpdateProjectsResult(manifest_path, False)
|
||||
Returns:
|
||||
manifest_path: Path name of the file into which manifest is written
|
||||
instead of None.
|
||||
"""
|
||||
if not os.path.exists(self._superproject_path):
|
||||
self._LogWarning(
|
||||
"missing superproject directory: {}", self._superproject_path
|
||||
)
|
||||
return None
|
||||
manifest_str = self._manifest.ToXml(
|
||||
groups=self._manifest.GetGroupsStr(), omit_local=True
|
||||
).toxml()
|
||||
manifest_path = self._manifest_path
|
||||
try:
|
||||
with open(manifest_path, "w", encoding="utf-8") as fp:
|
||||
fp.write(manifest_str)
|
||||
except OSError as e:
|
||||
self._LogError("cannot write manifest to : {} {}", manifest_path, e)
|
||||
return None
|
||||
return manifest_path
|
||||
|
||||
def _SkipUpdatingProjectRevisionId(self, project):
|
||||
"""Checks if a project's revision id needs to be updated or not.
|
||||
|
||||
Revision id for projects from local manifest will not be updated.
|
||||
|
||||
Args:
|
||||
project: project whose revision id is being updated.
|
||||
|
||||
Returns:
|
||||
True if a project's revision id should not be updated, or False,
|
||||
"""
|
||||
path = project.relpath
|
||||
if not path:
|
||||
return True
|
||||
# Skip the project with revisionId.
|
||||
if project.revisionId:
|
||||
return True
|
||||
# Skip the project if it comes from the local manifest.
|
||||
return project.manifest.IsFromLocalManifest(project)
|
||||
|
||||
def UpdateProjectsRevisionId(self, projects, git_event_log):
|
||||
"""Update revisionId of every project in projects with the commit id.
|
||||
|
||||
Args:
|
||||
projects: a list of projects whose revisionId needs to be updated.
|
||||
git_event_log: an EventLog, for git tracing.
|
||||
|
||||
Returns:
|
||||
UpdateProjectsResult
|
||||
"""
|
||||
self._git_event_log = git_event_log
|
||||
commit_ids_result = self._GetAllProjectsCommitIds()
|
||||
commit_ids = commit_ids_result.commit_ids
|
||||
if not commit_ids:
|
||||
return UpdateProjectsResult(None, commit_ids_result.fatal)
|
||||
|
||||
projects_missing_commit_ids = []
|
||||
for project in projects:
|
||||
if self._SkipUpdatingProjectRevisionId(project):
|
||||
continue
|
||||
path = project.relpath
|
||||
commit_id = commit_ids.get(path)
|
||||
if not commit_id:
|
||||
projects_missing_commit_ids.append(path)
|
||||
|
||||
# If superproject doesn't have a commit id for a project, then report an
|
||||
# error event and continue as if do not use superproject is specified.
|
||||
if projects_missing_commit_ids:
|
||||
self._LogWarning(
|
||||
"please file a bug using {} to report missing "
|
||||
"commit_ids for: {}",
|
||||
self._manifest.contactinfo.bugurl,
|
||||
projects_missing_commit_ids,
|
||||
)
|
||||
return UpdateProjectsResult(None, False)
|
||||
|
||||
for project in projects:
|
||||
if not self._SkipUpdatingProjectRevisionId(project):
|
||||
project.SetRevisionId(commit_ids.get(project.relpath))
|
||||
|
||||
manifest_path = self._WriteManifestFile()
|
||||
return UpdateProjectsResult(manifest_path, False)
|
||||
|
||||
|
||||
@functools.lru_cache(maxsize=10)
|
||||
def _PrintBetaNotice():
|
||||
"""Print the notice of beta status."""
|
||||
print(
|
||||
"NOTICE: --use-superproject is in beta; report any issues to the "
|
||||
"address described in `repo version`",
|
||||
file=sys.stderr,
|
||||
)
|
||||
|
||||
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def _UseSuperprojectFromConfiguration():
|
||||
"""Returns the user choice of whether to use superproject."""
|
||||
user_cfg = RepoConfig.ForUser()
|
||||
time_now = int(time.time())
|
||||
"""Returns the user choice of whether to use superproject."""
|
||||
user_cfg = RepoConfig.ForUser()
|
||||
time_now = int(time.time())
|
||||
|
||||
user_value = user_cfg.GetBoolean('repo.superprojectChoice')
|
||||
if user_value is not None:
|
||||
user_expiration = user_cfg.GetInt('repo.superprojectChoiceExpire')
|
||||
if user_expiration is None or user_expiration <= 0 or user_expiration >= time_now:
|
||||
# TODO(b/190688390) - Remove prompt when we are comfortable with the new
|
||||
# default value.
|
||||
if user_value:
|
||||
print(('You are currently enrolled in Git submodules experiment '
|
||||
'(go/android-submodules-quickstart). Use --no-use-superproject '
|
||||
'to override.\n'), file=sys.stderr)
|
||||
else:
|
||||
print(('You are not currently enrolled in Git submodules experiment '
|
||||
'(go/android-submodules-quickstart). Use --use-superproject '
|
||||
'to override.\n'), file=sys.stderr)
|
||||
return user_value
|
||||
user_value = user_cfg.GetBoolean("repo.superprojectChoice")
|
||||
if user_value is not None:
|
||||
user_expiration = user_cfg.GetInt("repo.superprojectChoiceExpire")
|
||||
if (
|
||||
user_expiration is None
|
||||
or user_expiration <= 0
|
||||
or user_expiration >= time_now
|
||||
):
|
||||
# TODO(b/190688390) - Remove prompt when we are comfortable with the
|
||||
# new default value.
|
||||
if user_value:
|
||||
print(
|
||||
(
|
||||
"You are currently enrolled in Git submodules "
|
||||
"experiment (go/android-submodules-quickstart). Use "
|
||||
"--no-use-superproject to override.\n"
|
||||
),
|
||||
file=sys.stderr,
|
||||
)
|
||||
else:
|
||||
print(
|
||||
(
|
||||
"You are not currently enrolled in Git submodules "
|
||||
"experiment (go/android-submodules-quickstart). Use "
|
||||
"--use-superproject to override.\n"
|
||||
),
|
||||
file=sys.stderr,
|
||||
)
|
||||
return user_value
|
||||
|
||||
# We don't have an unexpired choice, ask for one.
|
||||
system_cfg = RepoConfig.ForSystem()
|
||||
system_value = system_cfg.GetBoolean('repo.superprojectChoice')
|
||||
if system_value:
|
||||
# The system configuration is proposing that we should enable the
|
||||
# use of superproject. Treat the user as enrolled for two weeks.
|
||||
#
|
||||
# TODO(b/190688390) - Remove prompt when we are comfortable with the new
|
||||
# default value.
|
||||
userchoice = True
|
||||
time_choiceexpire = time_now + (86400 * 14)
|
||||
user_cfg.SetString('repo.superprojectChoiceExpire', str(time_choiceexpire))
|
||||
user_cfg.SetBoolean('repo.superprojectChoice', userchoice)
|
||||
print('You are automatically enrolled in Git submodules experiment '
|
||||
'(go/android-submodules-quickstart) for another two weeks.\n',
|
||||
file=sys.stderr)
|
||||
return True
|
||||
# We don't have an unexpired choice, ask for one.
|
||||
system_cfg = RepoConfig.ForSystem()
|
||||
system_value = system_cfg.GetBoolean("repo.superprojectChoice")
|
||||
if system_value:
|
||||
# The system configuration is proposing that we should enable the
|
||||
# use of superproject. Treat the user as enrolled for two weeks.
|
||||
#
|
||||
# TODO(b/190688390) - Remove prompt when we are comfortable with the new
|
||||
# default value.
|
||||
userchoice = True
|
||||
time_choiceexpire = time_now + (86400 * 14)
|
||||
user_cfg.SetString(
|
||||
"repo.superprojectChoiceExpire", str(time_choiceexpire)
|
||||
)
|
||||
user_cfg.SetBoolean("repo.superprojectChoice", userchoice)
|
||||
print(
|
||||
"You are automatically enrolled in Git submodules experiment "
|
||||
"(go/android-submodules-quickstart) for another two weeks.\n",
|
||||
file=sys.stderr,
|
||||
)
|
||||
return True
|
||||
|
||||
# For all other cases, we would not use superproject by default.
|
||||
return False
|
||||
# For all other cases, we would not use superproject by default.
|
||||
return False
|
||||
|
||||
|
||||
def PrintMessages(opt, manifest):
|
||||
"""Returns a boolean if error/warning messages are to be printed."""
|
||||
return opt.use_superproject is not None or manifest.superproject
|
||||
def PrintMessages(use_superproject, manifest):
|
||||
"""Returns a boolean if error/warning messages are to be printed.
|
||||
|
||||
Args:
|
||||
use_superproject: option value from optparse.
|
||||
manifest: manifest to use.
|
||||
"""
|
||||
return use_superproject is not None or bool(manifest.superproject)
|
||||
|
||||
|
||||
def UseSuperproject(opt, manifest):
|
||||
"""Returns a boolean if use-superproject option is enabled."""
|
||||
def UseSuperproject(use_superproject, manifest):
|
||||
"""Returns a boolean if use-superproject option is enabled.
|
||||
|
||||
if opt.use_superproject is not None:
|
||||
return opt.use_superproject
|
||||
else:
|
||||
client_value = manifest.manifestProject.config.GetBoolean('repo.superproject')
|
||||
if client_value is not None:
|
||||
return client_value
|
||||
else:
|
||||
if not manifest.superproject:
|
||||
Args:
|
||||
use_superproject: option value from optparse.
|
||||
manifest: manifest to use.
|
||||
|
||||
Returns:
|
||||
Whether the superproject should be used.
|
||||
"""
|
||||
|
||||
if not manifest.superproject:
|
||||
# This (sub) manifest does not have a superproject definition.
|
||||
return False
|
||||
return _UseSuperprojectFromConfiguration()
|
||||
elif use_superproject is not None:
|
||||
return use_superproject
|
||||
else:
|
||||
client_value = manifest.manifestProject.use_superproject
|
||||
if client_value is not None:
|
||||
return client_value
|
||||
elif manifest.superproject:
|
||||
return _UseSuperprojectFromConfiguration()
|
||||
else:
|
||||
return False
|
||||
|
||||
@@ -1,273 +1,32 @@
|
||||
# Copyright (C) 2020 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Provide event logging in the git trace2 EVENT format.
|
||||
|
||||
The git trace2 EVENT format is defined at:
|
||||
https://www.kernel.org/pub/software/scm/git/docs/technical/api-trace2.html#_event_format
|
||||
https://git-scm.com/docs/api-trace2#_the_event_format_target
|
||||
|
||||
Usage:
|
||||
|
||||
git_trace_log = EventLog()
|
||||
git_trace_log.StartEvent()
|
||||
...
|
||||
git_trace_log.ExitEvent()
|
||||
git_trace_log.Write()
|
||||
"""
|
||||
from git_command import GetEventTargetPath
|
||||
from git_command import RepoSourceVersion
|
||||
from git_trace2_event_log_base import BaseEventLog
|
||||
|
||||
|
||||
import datetime
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
import threading
|
||||
class EventLog(BaseEventLog):
|
||||
"""Event log that records events that occurred during a repo invocation.
|
||||
|
||||
from git_command import GitCommand, RepoSourceVersion
|
||||
Events are written to the log as a consecutive JSON entries, one per line.
|
||||
Entries follow the git trace2 EVENT format.
|
||||
|
||||
Each entry contains the following common keys:
|
||||
- event: The event name
|
||||
- sid: session-id - Unique string to allow process instance to be
|
||||
identified.
|
||||
- thread: The thread name.
|
||||
- time: is the UTC time of the event.
|
||||
|
||||
class EventLog(object):
|
||||
"""Event log that records events that occurred during a repo invocation.
|
||||
|
||||
Events are written to the log as a consecutive JSON entries, one per line.
|
||||
Entries follow the git trace2 EVENT format.
|
||||
|
||||
Each entry contains the following common keys:
|
||||
- event: The event name
|
||||
- sid: session-id - Unique string to allow process instance to be identified.
|
||||
- thread: The thread name.
|
||||
- time: is the UTC time of the event.
|
||||
|
||||
Valid 'event' names and event specific fields are documented here:
|
||||
https://git-scm.com/docs/api-trace2#_event_format
|
||||
"""
|
||||
|
||||
def __init__(self, env=None):
|
||||
"""Initializes the event log."""
|
||||
self._log = []
|
||||
# Try to get session-id (sid) from environment (setup in repo launcher).
|
||||
KEY = 'GIT_TRACE2_PARENT_SID'
|
||||
if env is None:
|
||||
env = os.environ
|
||||
|
||||
now = datetime.datetime.utcnow()
|
||||
|
||||
# Save both our sid component and the complete sid.
|
||||
# We use our sid component (self._sid) as the unique filename prefix and
|
||||
# the full sid (self._full_sid) in the log itself.
|
||||
self._sid = 'repo-%s-P%08x' % (now.strftime('%Y%m%dT%H%M%SZ'), os.getpid())
|
||||
parent_sid = env.get(KEY)
|
||||
# Append our sid component to the parent sid (if it exists).
|
||||
if parent_sid is not None:
|
||||
self._full_sid = parent_sid + '/' + self._sid
|
||||
else:
|
||||
self._full_sid = self._sid
|
||||
|
||||
# Set/update the environment variable.
|
||||
# Environment handling across systems is messy.
|
||||
try:
|
||||
env[KEY] = self._full_sid
|
||||
except UnicodeEncodeError:
|
||||
env[KEY] = self._full_sid.encode()
|
||||
|
||||
# Add a version event to front of the log.
|
||||
self._AddVersionEvent()
|
||||
|
||||
@property
|
||||
def full_sid(self):
|
||||
return self._full_sid
|
||||
|
||||
def _AddVersionEvent(self):
|
||||
"""Adds a 'version' event at the beginning of current log."""
|
||||
version_event = self._CreateEventDict('version')
|
||||
version_event['evt'] = "2"
|
||||
version_event['exe'] = RepoSourceVersion()
|
||||
self._log.insert(0, version_event)
|
||||
|
||||
def _CreateEventDict(self, event_name):
|
||||
"""Returns a dictionary with the common keys/values for git trace2 events.
|
||||
|
||||
Args:
|
||||
event_name: The event name.
|
||||
|
||||
Returns:
|
||||
Dictionary with the common event fields populated.
|
||||
Valid 'event' names and event specific fields are documented here:
|
||||
https://git-scm.com/docs/api-trace2#_event_format
|
||||
"""
|
||||
return {
|
||||
'event': event_name,
|
||||
'sid': self._full_sid,
|
||||
'thread': threading.currentThread().getName(),
|
||||
'time': datetime.datetime.utcnow().isoformat() + 'Z',
|
||||
}
|
||||
|
||||
def StartEvent(self):
|
||||
"""Append a 'start' event to the current log."""
|
||||
start_event = self._CreateEventDict('start')
|
||||
start_event['argv'] = sys.argv
|
||||
self._log.append(start_event)
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(repo_source_version=RepoSourceVersion(), **kwargs)
|
||||
|
||||
def ExitEvent(self, result):
|
||||
"""Append an 'exit' event to the current log.
|
||||
def Write(self, path=None, **kwargs):
|
||||
if path is None:
|
||||
path = self._GetEventTargetPath()
|
||||
return super().Write(path=path, **kwargs)
|
||||
|
||||
Args:
|
||||
result: Exit code of the event
|
||||
"""
|
||||
exit_event = self._CreateEventDict('exit')
|
||||
|
||||
# Consider 'None' success (consistent with event_log result handling).
|
||||
if result is None:
|
||||
result = 0
|
||||
exit_event['code'] = result
|
||||
self._log.append(exit_event)
|
||||
|
||||
def CommandEvent(self, name, subcommands):
|
||||
"""Append a 'command' event to the current log.
|
||||
|
||||
Args:
|
||||
name: Name of the primary command (ex: repo, git)
|
||||
subcommands: List of the sub-commands (ex: version, init, sync)
|
||||
"""
|
||||
command_event = self._CreateEventDict('command')
|
||||
command_event['name'] = name
|
||||
command_event['subcommands'] = subcommands
|
||||
self._log.append(command_event)
|
||||
|
||||
def LogConfigEvents(self, config, event_dict_name):
|
||||
"""Append a |event_dict_name| event for each config key in |config|.
|
||||
|
||||
Args:
|
||||
config: Configuration dictionary.
|
||||
event_dict_name: Name of the event dictionary for items to be logged under.
|
||||
"""
|
||||
for param, value in config.items():
|
||||
event = self._CreateEventDict(event_dict_name)
|
||||
event['param'] = param
|
||||
event['value'] = value
|
||||
self._log.append(event)
|
||||
|
||||
def DefParamRepoEvents(self, config):
|
||||
"""Append a 'def_param' event for each repo.* config key to the current log.
|
||||
|
||||
Args:
|
||||
config: Repo configuration dictionary
|
||||
"""
|
||||
# Only output the repo.* config parameters.
|
||||
repo_config = {k: v for k, v in config.items() if k.startswith('repo.')}
|
||||
self.LogConfigEvents(repo_config, 'def_param')
|
||||
|
||||
def GetDataEventName(self, value):
|
||||
"""Returns 'data-json' if the value is an array else returns 'data'."""
|
||||
return 'data-json' if value[0] == '[' and value[-1] == ']' else 'data'
|
||||
|
||||
def LogDataConfigEvents(self, config, prefix):
|
||||
"""Append a 'data' event for each config key/value in |config| to the current log.
|
||||
|
||||
For each keyX and valueX of the config, "key" field of the event is '|prefix|/keyX'
|
||||
and the "value" of the "key" field is valueX.
|
||||
|
||||
Args:
|
||||
config: Configuration dictionary.
|
||||
prefix: Prefix for each key that is logged.
|
||||
"""
|
||||
for key, value in config.items():
|
||||
event = self._CreateEventDict(self.GetDataEventName(value))
|
||||
event['key'] = f'{prefix}/{key}'
|
||||
event['value'] = value
|
||||
self._log.append(event)
|
||||
|
||||
def ErrorEvent(self, msg, fmt):
|
||||
"""Append a 'error' event to the current log."""
|
||||
error_event = self._CreateEventDict('error')
|
||||
error_event['msg'] = msg
|
||||
error_event['fmt'] = fmt
|
||||
self._log.append(error_event)
|
||||
|
||||
def _GetEventTargetPath(self):
|
||||
"""Get the 'trace2.eventtarget' path from git configuration.
|
||||
|
||||
Returns:
|
||||
path: git config's 'trace2.eventtarget' path if it exists, or None
|
||||
"""
|
||||
path = None
|
||||
cmd = ['config', '--get', 'trace2.eventtarget']
|
||||
# TODO(https://crbug.com/gerrit/13706): Use GitConfig when it supports
|
||||
# system git config variables.
|
||||
p = GitCommand(None, cmd, capture_stdout=True, capture_stderr=True,
|
||||
bare=True)
|
||||
retval = p.Wait()
|
||||
if retval == 0:
|
||||
# Strip trailing carriage-return in path.
|
||||
path = p.stdout.rstrip('\n')
|
||||
elif retval != 1:
|
||||
# `git config --get` is documented to produce an exit status of `1` if
|
||||
# the requested variable is not present in the configuration. Report any
|
||||
# other return value as an error.
|
||||
print("repo: error: 'git config --get' call failed with return code: %r, stderr: %r" % (
|
||||
retval, p.stderr), file=sys.stderr)
|
||||
return path
|
||||
|
||||
def Write(self, path=None):
|
||||
"""Writes the log out to a file.
|
||||
|
||||
Log is only written if 'path' or 'git config --get trace2.eventtarget'
|
||||
provide a valid path to write logs to.
|
||||
|
||||
Logging filename format follows the git trace2 style of being a unique
|
||||
(exclusive writable) file.
|
||||
|
||||
Args:
|
||||
path: Path to where logs should be written.
|
||||
|
||||
Returns:
|
||||
log_path: Path to the log file if log is written, otherwise None
|
||||
"""
|
||||
log_path = None
|
||||
# If no logging path is specified, get the path from 'trace2.eventtarget'.
|
||||
if path is None:
|
||||
path = self._GetEventTargetPath()
|
||||
|
||||
# If no logging path is specified, exit.
|
||||
if path is None:
|
||||
return None
|
||||
|
||||
if isinstance(path, str):
|
||||
# Get absolute path.
|
||||
path = os.path.abspath(os.path.expanduser(path))
|
||||
else:
|
||||
raise TypeError('path: str required but got %s.' % type(path))
|
||||
|
||||
# Git trace2 requires a directory to write log to.
|
||||
|
||||
# TODO(https://crbug.com/gerrit/13706): Support file (append) mode also.
|
||||
if not os.path.isdir(path):
|
||||
return None
|
||||
# Use NamedTemporaryFile to generate a unique filename as required by git trace2.
|
||||
try:
|
||||
with tempfile.NamedTemporaryFile(mode='x', prefix=self._sid, dir=path,
|
||||
delete=False) as f:
|
||||
# TODO(https://crbug.com/gerrit/13706): Support writing events as they
|
||||
# occur.
|
||||
for e in self._log:
|
||||
# Dump in compact encoding mode.
|
||||
# See 'Compact encoding' in Python docs:
|
||||
# https://docs.python.org/3/library/json.html#module-json
|
||||
json.dump(e, f, indent=None, separators=(',', ':'))
|
||||
f.write('\n')
|
||||
log_path = f.name
|
||||
except FileExistsError as err:
|
||||
print('repo: warning: git trace2 logging failed: %r' % err,
|
||||
file=sys.stderr)
|
||||
return None
|
||||
return log_path
|
||||
def _GetEventTargetPath(self):
|
||||
return GetEventTargetPath()
|
||||
|
||||
354
git_trace2_event_log_base.py
Normal file
354
git_trace2_event_log_base.py
Normal file
@@ -0,0 +1,354 @@
|
||||
# Copyright (C) 2020 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Provide event logging in the git trace2 EVENT format.
|
||||
|
||||
The git trace2 EVENT format is defined at:
|
||||
https://www.kernel.org/pub/software/scm/git/docs/technical/api-trace2.html#_event_format
|
||||
https://git-scm.com/docs/api-trace2#_the_event_format_target
|
||||
|
||||
Usage:
|
||||
|
||||
git_trace_log = EventLog()
|
||||
git_trace_log.StartEvent()
|
||||
...
|
||||
git_trace_log.ExitEvent()
|
||||
git_trace_log.Write()
|
||||
"""
|
||||
|
||||
|
||||
import datetime
|
||||
import errno
|
||||
import json
|
||||
import os
|
||||
import socket
|
||||
import sys
|
||||
import tempfile
|
||||
import threading
|
||||
|
||||
|
||||
# Timeout when sending events via socket (applies to connect, send)
|
||||
SOCK_TIMEOUT = 0.5 # in seconds
|
||||
# BaseEventLog __init__ Counter that is consistent within the same process
|
||||
p_init_count = 0
|
||||
|
||||
|
||||
class BaseEventLog:
|
||||
"""Event log that records events that occurred during a repo invocation.
|
||||
|
||||
Events are written to the log as a consecutive JSON entries, one per line.
|
||||
Entries follow the git trace2 EVENT format.
|
||||
|
||||
Each entry contains the following common keys:
|
||||
- event: The event name
|
||||
- sid: session-id - Unique string to allow process instance to be
|
||||
identified.
|
||||
- thread: The thread name.
|
||||
- time: is the UTC time of the event.
|
||||
|
||||
Valid 'event' names and event specific fields are documented here:
|
||||
https://git-scm.com/docs/api-trace2#_event_format
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, env=None, repo_source_version=None, add_init_count=False
|
||||
):
|
||||
"""Initializes the event log."""
|
||||
global p_init_count
|
||||
p_init_count += 1
|
||||
self._log = []
|
||||
# Try to get session-id (sid) from environment (setup in repo launcher).
|
||||
KEY = "GIT_TRACE2_PARENT_SID"
|
||||
if env is None:
|
||||
env = os.environ
|
||||
|
||||
self.start = datetime.datetime.now(datetime.timezone.utc)
|
||||
|
||||
# Save both our sid component and the complete sid.
|
||||
# We use our sid component (self._sid) as the unique filename prefix and
|
||||
# the full sid (self._full_sid) in the log itself.
|
||||
self._sid = (
|
||||
f"repo-{self.start.strftime('%Y%m%dT%H%M%SZ')}-P{os.getpid():08x}"
|
||||
)
|
||||
|
||||
if add_init_count:
|
||||
self._sid = f"{self._sid}-{p_init_count}"
|
||||
|
||||
parent_sid = env.get(KEY)
|
||||
# Append our sid component to the parent sid (if it exists).
|
||||
if parent_sid is not None:
|
||||
self._full_sid = parent_sid + "/" + self._sid
|
||||
else:
|
||||
self._full_sid = self._sid
|
||||
|
||||
# Set/update the environment variable.
|
||||
# Environment handling across systems is messy.
|
||||
try:
|
||||
env[KEY] = self._full_sid
|
||||
except UnicodeEncodeError:
|
||||
env[KEY] = self._full_sid.encode()
|
||||
|
||||
if repo_source_version is not None:
|
||||
# Add a version event to front of the log.
|
||||
self._AddVersionEvent(repo_source_version)
|
||||
|
||||
@property
|
||||
def full_sid(self):
|
||||
return self._full_sid
|
||||
|
||||
def _AddVersionEvent(self, repo_source_version):
|
||||
"""Adds a 'version' event at the beginning of current log."""
|
||||
version_event = self._CreateEventDict("version")
|
||||
version_event["evt"] = "2"
|
||||
version_event["exe"] = repo_source_version
|
||||
self._log.insert(0, version_event)
|
||||
|
||||
def _CreateEventDict(self, event_name):
|
||||
"""Returns a dictionary with common keys/values for git trace2 events.
|
||||
|
||||
Args:
|
||||
event_name: The event name.
|
||||
|
||||
Returns:
|
||||
Dictionary with the common event fields populated.
|
||||
"""
|
||||
return {
|
||||
"event": event_name,
|
||||
"sid": self._full_sid,
|
||||
"thread": threading.current_thread().name,
|
||||
"time": datetime.datetime.now(datetime.timezone.utc).isoformat(),
|
||||
}
|
||||
|
||||
def StartEvent(self):
|
||||
"""Append a 'start' event to the current log."""
|
||||
start_event = self._CreateEventDict("start")
|
||||
start_event["argv"] = sys.argv
|
||||
self._log.append(start_event)
|
||||
|
||||
def ExitEvent(self, result):
|
||||
"""Append an 'exit' event to the current log.
|
||||
|
||||
Args:
|
||||
result: Exit code of the event
|
||||
"""
|
||||
exit_event = self._CreateEventDict("exit")
|
||||
|
||||
# Consider 'None' success (consistent with event_log result handling).
|
||||
if result is None:
|
||||
result = 0
|
||||
exit_event["code"] = result
|
||||
time_delta = datetime.datetime.now(datetime.timezone.utc) - self.start
|
||||
exit_event["t_abs"] = time_delta.total_seconds()
|
||||
self._log.append(exit_event)
|
||||
|
||||
def CommandEvent(self, name, subcommands):
|
||||
"""Append a 'command' event to the current log.
|
||||
|
||||
Args:
|
||||
name: Name of the primary command (ex: repo, git)
|
||||
subcommands: List of the sub-commands (ex: version, init, sync)
|
||||
"""
|
||||
command_event = self._CreateEventDict("command")
|
||||
command_event["name"] = name
|
||||
command_event["subcommands"] = subcommands
|
||||
self._log.append(command_event)
|
||||
|
||||
def LogConfigEvents(self, config, event_dict_name):
|
||||
"""Append a |event_dict_name| event for each config key in |config|.
|
||||
|
||||
Args:
|
||||
config: Configuration dictionary.
|
||||
event_dict_name: Name of the event dictionary for items to be logged
|
||||
under.
|
||||
"""
|
||||
for param, value in config.items():
|
||||
event = self._CreateEventDict(event_dict_name)
|
||||
event["param"] = param
|
||||
event["value"] = value
|
||||
self._log.append(event)
|
||||
|
||||
def DefParamRepoEvents(self, config):
|
||||
"""Append 'def_param' events for repo config keys to the current log.
|
||||
|
||||
This appends one event for each repo.* config key.
|
||||
|
||||
Args:
|
||||
config: Repo configuration dictionary
|
||||
"""
|
||||
# Only output the repo.* config parameters.
|
||||
repo_config = {k: v for k, v in config.items() if k.startswith("repo.")}
|
||||
self.LogConfigEvents(repo_config, "def_param")
|
||||
|
||||
def GetDataEventName(self, value):
|
||||
"""Returns 'data-json' if the value is an array else returns 'data'."""
|
||||
return "data-json" if value[0] == "[" and value[-1] == "]" else "data"
|
||||
|
||||
def LogDataConfigEvents(self, config, prefix):
|
||||
"""Append a 'data' event for each entry in |config| to the current log.
|
||||
|
||||
For each keyX and valueX of the config, "key" field of the event is
|
||||
'|prefix|/keyX' and the "value" of the "key" field is valueX.
|
||||
|
||||
Args:
|
||||
config: Configuration dictionary.
|
||||
prefix: Prefix for each key that is logged.
|
||||
"""
|
||||
for key, value in config.items():
|
||||
event = self._CreateEventDict(self.GetDataEventName(value))
|
||||
event["key"] = f"{prefix}/{key}"
|
||||
event["value"] = value
|
||||
self._log.append(event)
|
||||
|
||||
def ErrorEvent(self, msg, fmt=None):
|
||||
"""Append a 'error' event to the current log."""
|
||||
error_event = self._CreateEventDict("error")
|
||||
if fmt is None:
|
||||
fmt = msg
|
||||
error_event["msg"] = f"RepoErrorEvent:{msg}"
|
||||
error_event["fmt"] = f"RepoErrorEvent:{fmt}"
|
||||
self._log.append(error_event)
|
||||
|
||||
def _WriteLog(self, write_fn):
|
||||
"""Writes the log out using a provided writer function.
|
||||
|
||||
Generate compact JSON output for each item in the log, and write it
|
||||
using write_fn.
|
||||
|
||||
Args:
|
||||
write_fn: A function that accepts byts and writes them to a
|
||||
destination.
|
||||
"""
|
||||
|
||||
for e in self._log:
|
||||
# Dump in compact encoding mode.
|
||||
# See 'Compact encoding' in Python docs:
|
||||
# https://docs.python.org/3/library/json.html#module-json
|
||||
write_fn(
|
||||
json.dumps(e, indent=None, separators=(",", ":")).encode(
|
||||
"utf-8"
|
||||
)
|
||||
+ b"\n"
|
||||
)
|
||||
|
||||
def Write(self, path=None):
|
||||
"""Writes the log out to a file or socket.
|
||||
|
||||
Log is only written if 'path' or 'git config --get trace2.eventtarget'
|
||||
provide a valid path (or socket) to write logs to.
|
||||
|
||||
Logging filename format follows the git trace2 style of being a unique
|
||||
(exclusive writable) file.
|
||||
|
||||
Args:
|
||||
path: Path to where logs should be written. The path may have a
|
||||
prefix of the form "af_unix:[{stream|dgram}:]", in which case
|
||||
the path is treated as a Unix domain socket. See
|
||||
https://git-scm.com/docs/api-trace2#_enabling_a_target for
|
||||
details.
|
||||
|
||||
Returns:
|
||||
log_path: Path to the log file or socket if log is written,
|
||||
otherwise None
|
||||
"""
|
||||
log_path = None
|
||||
# If no logging path is specified, exit.
|
||||
if path is None:
|
||||
return None
|
||||
|
||||
path_is_socket = False
|
||||
socket_type = None
|
||||
if isinstance(path, str):
|
||||
parts = path.split(":", 1)
|
||||
if parts[0] == "af_unix" and len(parts) == 2:
|
||||
path_is_socket = True
|
||||
path = parts[1]
|
||||
parts = path.split(":", 1)
|
||||
if parts[0] == "stream" and len(parts) == 2:
|
||||
socket_type = socket.SOCK_STREAM
|
||||
path = parts[1]
|
||||
elif parts[0] == "dgram" and len(parts) == 2:
|
||||
socket_type = socket.SOCK_DGRAM
|
||||
path = parts[1]
|
||||
else:
|
||||
# Get absolute path.
|
||||
path = os.path.abspath(os.path.expanduser(path))
|
||||
else:
|
||||
raise TypeError("path: str required but got %s." % type(path))
|
||||
|
||||
# Git trace2 requires a directory to write log to.
|
||||
|
||||
# TODO(https://crbug.com/gerrit/13706): Support file (append) mode also.
|
||||
if not (path_is_socket or os.path.isdir(path)):
|
||||
return None
|
||||
|
||||
if path_is_socket:
|
||||
if socket_type == socket.SOCK_STREAM or socket_type is None:
|
||||
try:
|
||||
with socket.socket(
|
||||
socket.AF_UNIX, socket.SOCK_STREAM
|
||||
) as sock:
|
||||
sock.settimeout(SOCK_TIMEOUT)
|
||||
sock.connect(path)
|
||||
self._WriteLog(sock.sendall)
|
||||
return f"af_unix:stream:{path}"
|
||||
except OSError as err:
|
||||
# If we tried to connect to a DGRAM socket using STREAM,
|
||||
# ignore the attempt and continue to DGRAM below. Otherwise,
|
||||
# issue a warning.
|
||||
if err.errno != errno.EPROTOTYPE:
|
||||
print(
|
||||
f"repo: warning: git trace2 logging failed: {err}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
return None
|
||||
if socket_type == socket.SOCK_DGRAM or socket_type is None:
|
||||
try:
|
||||
with socket.socket(
|
||||
socket.AF_UNIX, socket.SOCK_DGRAM
|
||||
) as sock:
|
||||
self._WriteLog(lambda bs: sock.sendto(bs, path))
|
||||
return f"af_unix:dgram:{path}"
|
||||
except OSError as err:
|
||||
print(
|
||||
f"repo: warning: git trace2 logging failed: {err}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
return None
|
||||
# Tried to open a socket but couldn't connect (SOCK_STREAM) or write
|
||||
# (SOCK_DGRAM).
|
||||
print(
|
||||
"repo: warning: git trace2 logging failed: could not write to "
|
||||
"socket",
|
||||
file=sys.stderr,
|
||||
)
|
||||
return None
|
||||
|
||||
# Path is an absolute path
|
||||
# Use NamedTemporaryFile to generate a unique filename as required by
|
||||
# git trace2.
|
||||
try:
|
||||
with tempfile.NamedTemporaryFile(
|
||||
mode="xb", prefix=self._sid, dir=path, delete=False
|
||||
) as f:
|
||||
# TODO(https://crbug.com/gerrit/13706): Support writing events
|
||||
# as they occur.
|
||||
self._WriteLog(f.write)
|
||||
log_path = f.name
|
||||
except FileExistsError as err:
|
||||
print(
|
||||
"repo: warning: git trace2 logging failed: %r" % err,
|
||||
file=sys.stderr,
|
||||
)
|
||||
return None
|
||||
return log_path
|
||||
156
gitc_utils.py
156
gitc_utils.py
@@ -1,156 +0,0 @@
|
||||
# Copyright (C) 2015 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import multiprocessing
|
||||
import platform
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
|
||||
import git_command
|
||||
import git_config
|
||||
import wrapper
|
||||
|
||||
from error import ManifestParseError
|
||||
|
||||
NUM_BATCH_RETRIEVE_REVISIONID = 32
|
||||
|
||||
|
||||
def get_gitc_manifest_dir():
|
||||
return wrapper.Wrapper().get_gitc_manifest_dir()
|
||||
|
||||
|
||||
def parse_clientdir(gitc_fs_path):
|
||||
return wrapper.Wrapper().gitc_parse_clientdir(gitc_fs_path)
|
||||
|
||||
|
||||
def _get_project_revision(args):
|
||||
"""Worker for _set_project_revisions to lookup one project remote."""
|
||||
(i, url, expr) = args
|
||||
gitcmd = git_command.GitCommand(
|
||||
None, ['ls-remote', url, expr], capture_stdout=True, cwd='/tmp')
|
||||
rc = gitcmd.Wait()
|
||||
return (i, rc, gitcmd.stdout.split('\t', 1)[0])
|
||||
|
||||
|
||||
def _set_project_revisions(projects):
|
||||
"""Sets the revisionExpr for a list of projects.
|
||||
|
||||
Because of the limit of open file descriptors allowed, length of projects
|
||||
should not be overly large. Recommend calling this function multiple times
|
||||
with each call not exceeding NUM_BATCH_RETRIEVE_REVISIONID projects.
|
||||
|
||||
Args:
|
||||
projects: List of project objects to set the revionExpr for.
|
||||
"""
|
||||
# Retrieve the commit id for each project based off of it's current
|
||||
# revisionExpr and it is not already a commit id.
|
||||
with multiprocessing.Pool(NUM_BATCH_RETRIEVE_REVISIONID) as pool:
|
||||
results_iter = pool.imap_unordered(
|
||||
_get_project_revision,
|
||||
((i, project.remote.url, project.revisionExpr)
|
||||
for i, project in enumerate(projects)
|
||||
if not git_config.IsId(project.revisionExpr)),
|
||||
chunksize=8)
|
||||
for (i, rc, revisionExpr) in results_iter:
|
||||
project = projects[i]
|
||||
if rc:
|
||||
print('FATAL: Failed to retrieve revisionExpr for %s' % project.name)
|
||||
pool.terminate()
|
||||
sys.exit(1)
|
||||
if not revisionExpr:
|
||||
pool.terminate()
|
||||
raise ManifestParseError('Invalid SHA-1 revision project %s (%s)' %
|
||||
(project.remote.url, project.revisionExpr))
|
||||
project.revisionExpr = revisionExpr
|
||||
|
||||
|
||||
def generate_gitc_manifest(gitc_manifest, manifest, paths=None):
|
||||
"""Generate a manifest for shafsd to use for this GITC client.
|
||||
|
||||
Args:
|
||||
gitc_manifest: Current gitc manifest, or None if there isn't one yet.
|
||||
manifest: A GitcManifest object loaded with the current repo manifest.
|
||||
paths: List of project paths we want to update.
|
||||
"""
|
||||
|
||||
print('Generating GITC Manifest by fetching revision SHAs for each '
|
||||
'project.')
|
||||
if paths is None:
|
||||
paths = list(manifest.paths.keys())
|
||||
|
||||
groups = [x for x in re.split(r'[,\s]+', manifest.GetGroupsStr()) if x]
|
||||
|
||||
# Convert the paths to projects, and filter them to the matched groups.
|
||||
projects = [manifest.paths[p] for p in paths]
|
||||
projects = [p for p in projects if p.MatchesGroups(groups)]
|
||||
|
||||
if gitc_manifest is not None:
|
||||
for path, proj in manifest.paths.items():
|
||||
if not proj.MatchesGroups(groups):
|
||||
continue
|
||||
|
||||
if not proj.upstream and not git_config.IsId(proj.revisionExpr):
|
||||
proj.upstream = proj.revisionExpr
|
||||
|
||||
if path not in gitc_manifest.paths:
|
||||
# Any new projects need their first revision, even if we weren't asked
|
||||
# for them.
|
||||
projects.append(proj)
|
||||
elif path not in paths:
|
||||
# And copy revisions from the previous manifest if we're not updating
|
||||
# them now.
|
||||
gitc_proj = gitc_manifest.paths[path]
|
||||
if gitc_proj.old_revision:
|
||||
proj.revisionExpr = None
|
||||
proj.old_revision = gitc_proj.old_revision
|
||||
else:
|
||||
proj.revisionExpr = gitc_proj.revisionExpr
|
||||
|
||||
_set_project_revisions(projects)
|
||||
|
||||
if gitc_manifest is not None:
|
||||
for path, proj in gitc_manifest.paths.items():
|
||||
if proj.old_revision and path in paths:
|
||||
# If we updated a project that has been started, keep the old-revision
|
||||
# updated.
|
||||
repo_proj = manifest.paths[path]
|
||||
repo_proj.old_revision = repo_proj.revisionExpr
|
||||
repo_proj.revisionExpr = None
|
||||
|
||||
# Convert URLs from relative to absolute.
|
||||
for _name, remote in manifest.remotes.items():
|
||||
remote.fetchUrl = remote.resolvedFetchUrl
|
||||
|
||||
# Save the manifest.
|
||||
save_manifest(manifest)
|
||||
|
||||
|
||||
def save_manifest(manifest, client_dir=None):
|
||||
"""Save the manifest file in the client_dir.
|
||||
|
||||
Args:
|
||||
manifest: Manifest object to save.
|
||||
client_dir: Client directory to save the manifest in.
|
||||
"""
|
||||
if not client_dir:
|
||||
manifest_file = manifest.manifestFile
|
||||
else:
|
||||
manifest_file = os.path.join(client_dir, '.manifest')
|
||||
with open(manifest_file, 'w') as f:
|
||||
manifest.Save(f, groups=manifest.GetGroupsStr())
|
||||
# TODO(sbasi/jorg): Come up with a solution to remove the sleep below.
|
||||
# Give the GITC filesystem time to register the manifest changes.
|
||||
time.sleep(3)
|
||||
846
hooks.py
846
hooks.py
@@ -12,11 +12,8 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import errno
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import traceback
|
||||
import urllib.parse
|
||||
@@ -25,485 +22,482 @@ from error import HookError
|
||||
from git_refs import HEAD
|
||||
|
||||
|
||||
class RepoHook(object):
|
||||
"""A RepoHook contains information about a script to run as a hook.
|
||||
class RepoHook:
|
||||
"""A RepoHook contains information about a script to run as a hook.
|
||||
|
||||
Hooks are used to run a python script before running an upload (for instance,
|
||||
to run presubmit checks). Eventually, we may have hooks for other actions.
|
||||
Hooks are used to run a python script before running an upload (for
|
||||
instance, to run presubmit checks). Eventually, we may have hooks for other
|
||||
actions.
|
||||
|
||||
This shouldn't be confused with files in the 'repo/hooks' directory. Those
|
||||
files are copied into each '.git/hooks' folder for each project. Repo-level
|
||||
hooks are associated instead with repo actions.
|
||||
This shouldn't be confused with files in the 'repo/hooks' directory. Those
|
||||
files are copied into each '.git/hooks' folder for each project. Repo-level
|
||||
hooks are associated instead with repo actions.
|
||||
|
||||
Hooks are always python. When a hook is run, we will load the hook into the
|
||||
interpreter and execute its main() function.
|
||||
Hooks are always python. When a hook is run, we will load the hook into the
|
||||
interpreter and execute its main() function.
|
||||
|
||||
Combinations of hook option flags:
|
||||
- no-verify=False, verify=False (DEFAULT):
|
||||
If stdout is a tty, can prompt about running hooks if needed.
|
||||
If user denies running hooks, the action is cancelled. If stdout is
|
||||
not a tty and we would need to prompt about hooks, action is
|
||||
cancelled.
|
||||
- no-verify=False, verify=True:
|
||||
Always run hooks with no prompt.
|
||||
- no-verify=True, verify=False:
|
||||
Never run hooks, but run action anyway (AKA bypass hooks).
|
||||
- no-verify=True, verify=True:
|
||||
Invalid
|
||||
"""
|
||||
|
||||
def __init__(self,
|
||||
hook_type,
|
||||
hooks_project,
|
||||
repo_topdir,
|
||||
manifest_url,
|
||||
bypass_hooks=False,
|
||||
allow_all_hooks=False,
|
||||
ignore_hooks=False,
|
||||
abort_if_user_denies=False):
|
||||
"""RepoHook constructor.
|
||||
|
||||
Params:
|
||||
hook_type: A string representing the type of hook. This is also used
|
||||
to figure out the name of the file containing the hook. For
|
||||
example: 'pre-upload'.
|
||||
hooks_project: The project containing the repo hooks.
|
||||
If you have a manifest, this is manifest.repo_hooks_project.
|
||||
OK if this is None, which will make the hook a no-op.
|
||||
repo_topdir: The top directory of the repo client checkout.
|
||||
This is the one containing the .repo directory. Scripts will
|
||||
run with CWD as this directory.
|
||||
If you have a manifest, this is manifest.topdir.
|
||||
manifest_url: The URL to the manifest git repo.
|
||||
bypass_hooks: If True, then 'Do not run the hook'.
|
||||
allow_all_hooks: If True, then 'Run the hook without prompting'.
|
||||
ignore_hooks: If True, then 'Do not abort action if hooks fail'.
|
||||
abort_if_user_denies: If True, we'll abort running the hook if the user
|
||||
doesn't allow us to run the hook.
|
||||
Combinations of hook option flags:
|
||||
- no-verify=False, verify=False (DEFAULT):
|
||||
If stdout is a tty, can prompt about running hooks if needed.
|
||||
If user denies running hooks, the action is cancelled. If stdout is
|
||||
not a tty and we would need to prompt about hooks, action is
|
||||
cancelled.
|
||||
- no-verify=False, verify=True:
|
||||
Always run hooks with no prompt.
|
||||
- no-verify=True, verify=False:
|
||||
Never run hooks, but run action anyway (AKA bypass hooks).
|
||||
- no-verify=True, verify=True:
|
||||
Invalid
|
||||
"""
|
||||
self._hook_type = hook_type
|
||||
self._hooks_project = hooks_project
|
||||
self._repo_topdir = repo_topdir
|
||||
self._manifest_url = manifest_url
|
||||
self._bypass_hooks = bypass_hooks
|
||||
self._allow_all_hooks = allow_all_hooks
|
||||
self._ignore_hooks = ignore_hooks
|
||||
self._abort_if_user_denies = abort_if_user_denies
|
||||
|
||||
# Store the full path to the script for convenience.
|
||||
if self._hooks_project:
|
||||
self._script_fullpath = os.path.join(self._hooks_project.worktree,
|
||||
self._hook_type + '.py')
|
||||
else:
|
||||
self._script_fullpath = None
|
||||
def __init__(
|
||||
self,
|
||||
hook_type,
|
||||
hooks_project,
|
||||
repo_topdir,
|
||||
manifest_url,
|
||||
bypass_hooks=False,
|
||||
allow_all_hooks=False,
|
||||
ignore_hooks=False,
|
||||
abort_if_user_denies=False,
|
||||
):
|
||||
"""RepoHook constructor.
|
||||
|
||||
def _GetHash(self):
|
||||
"""Return a hash of the contents of the hooks directory.
|
||||
Params:
|
||||
hook_type: A string representing the type of hook. This is also used
|
||||
to figure out the name of the file containing the hook. For
|
||||
example: 'pre-upload'.
|
||||
hooks_project: The project containing the repo hooks.
|
||||
If you have a manifest, this is manifest.repo_hooks_project.
|
||||
OK if this is None, which will make the hook a no-op.
|
||||
repo_topdir: The top directory of the repo client checkout.
|
||||
This is the one containing the .repo directory. Scripts will
|
||||
run with CWD as this directory.
|
||||
If you have a manifest, this is manifest.topdir.
|
||||
manifest_url: The URL to the manifest git repo.
|
||||
bypass_hooks: If True, then 'Do not run the hook'.
|
||||
allow_all_hooks: If True, then 'Run the hook without prompting'.
|
||||
ignore_hooks: If True, then 'Do not abort action if hooks fail'.
|
||||
abort_if_user_denies: If True, we'll abort running the hook if the
|
||||
user doesn't allow us to run the hook.
|
||||
"""
|
||||
self._hook_type = hook_type
|
||||
self._hooks_project = hooks_project
|
||||
self._repo_topdir = repo_topdir
|
||||
self._manifest_url = manifest_url
|
||||
self._bypass_hooks = bypass_hooks
|
||||
self._allow_all_hooks = allow_all_hooks
|
||||
self._ignore_hooks = ignore_hooks
|
||||
self._abort_if_user_denies = abort_if_user_denies
|
||||
|
||||
We'll just use git to do this. This hash has the property that if anything
|
||||
changes in the directory we will return a different has.
|
||||
# Store the full path to the script for convenience.
|
||||
if self._hooks_project:
|
||||
self._script_fullpath = os.path.join(
|
||||
self._hooks_project.worktree, self._hook_type + ".py"
|
||||
)
|
||||
else:
|
||||
self._script_fullpath = None
|
||||
|
||||
SECURITY CONSIDERATION:
|
||||
This hash only represents the contents of files in the hook directory, not
|
||||
any other files imported or called by hooks. Changes to imported files
|
||||
can change the script behavior without affecting the hash.
|
||||
def _GetHash(self):
|
||||
"""Return a hash of the contents of the hooks directory.
|
||||
|
||||
Returns:
|
||||
A string representing the hash. This will always be ASCII so that it can
|
||||
be printed to the user easily.
|
||||
"""
|
||||
assert self._hooks_project, "Must have hooks to calculate their hash."
|
||||
We'll just use git to do this. This hash has the property that if
|
||||
anything changes in the directory we will return a different has.
|
||||
|
||||
# We will use the work_git object rather than just calling GetRevisionId().
|
||||
# That gives us a hash of the latest checked in version of the files that
|
||||
# the user will actually be executing. Specifically, GetRevisionId()
|
||||
# doesn't appear to change even if a user checks out a different version
|
||||
# of the hooks repo (via git checkout) nor if a user commits their own revs.
|
||||
#
|
||||
# NOTE: Local (non-committed) changes will not be factored into this hash.
|
||||
# I think this is OK, since we're really only worried about warning the user
|
||||
# about upstream changes.
|
||||
return self._hooks_project.work_git.rev_parse(HEAD)
|
||||
SECURITY CONSIDERATION:
|
||||
This hash only represents the contents of files in the hook
|
||||
directory, not any other files imported or called by hooks. Changes
|
||||
to imported files can change the script behavior without affecting
|
||||
the hash.
|
||||
|
||||
def _GetMustVerb(self):
|
||||
"""Return 'must' if the hook is required; 'should' if not."""
|
||||
if self._abort_if_user_denies:
|
||||
return 'must'
|
||||
else:
|
||||
return 'should'
|
||||
Returns:
|
||||
A string representing the hash. This will always be ASCII so that
|
||||
it can be printed to the user easily.
|
||||
"""
|
||||
assert self._hooks_project, "Must have hooks to calculate their hash."
|
||||
|
||||
def _CheckForHookApproval(self):
|
||||
"""Check to see whether this hook has been approved.
|
||||
# We will use the work_git object rather than just calling
|
||||
# GetRevisionId(). That gives us a hash of the latest checked in version
|
||||
# of the files that the user will actually be executing. Specifically,
|
||||
# GetRevisionId() doesn't appear to change even if a user checks out a
|
||||
# different version of the hooks repo (via git checkout) nor if a user
|
||||
# commits their own revs.
|
||||
#
|
||||
# NOTE: Local (non-committed) changes will not be factored into this
|
||||
# hash. I think this is OK, since we're really only worried about
|
||||
# warning the user about upstream changes.
|
||||
return self._hooks_project.work_git.rev_parse(HEAD)
|
||||
|
||||
We'll accept approval of manifest URLs if they're using secure transports.
|
||||
This way the user can say they trust the manifest hoster. For insecure
|
||||
hosts, we fall back to checking the hash of the hooks repo.
|
||||
def _GetMustVerb(self):
|
||||
"""Return 'must' if the hook is required; 'should' if not."""
|
||||
if self._abort_if_user_denies:
|
||||
return "must"
|
||||
else:
|
||||
return "should"
|
||||
|
||||
Note that we ask permission for each individual hook even though we use
|
||||
the hash of all hooks when detecting changes. We'd like the user to be
|
||||
able to approve / deny each hook individually. We only use the hash of all
|
||||
hooks because there is no other easy way to detect changes to local imports.
|
||||
def _CheckForHookApproval(self):
|
||||
"""Check to see whether this hook has been approved.
|
||||
|
||||
Returns:
|
||||
True if this hook is approved to run; False otherwise.
|
||||
We'll accept approval of manifest URLs if they're using secure
|
||||
transports. This way the user can say they trust the manifest hoster.
|
||||
For insecure hosts, we fall back to checking the hash of the hooks repo.
|
||||
|
||||
Raises:
|
||||
HookError: Raised if the user doesn't approve and abort_if_user_denies
|
||||
was passed to the consturctor.
|
||||
"""
|
||||
if self._ManifestUrlHasSecureScheme():
|
||||
return self._CheckForHookApprovalManifest()
|
||||
else:
|
||||
return self._CheckForHookApprovalHash()
|
||||
Note that we ask permission for each individual hook even though we use
|
||||
the hash of all hooks when detecting changes. We'd like the user to be
|
||||
able to approve / deny each hook individually. We only use the hash of
|
||||
all hooks because there is no other easy way to detect changes to local
|
||||
imports.
|
||||
|
||||
def _CheckForHookApprovalHelper(self, subkey, new_val, main_prompt,
|
||||
changed_prompt):
|
||||
"""Check for approval for a particular attribute and hook.
|
||||
Returns:
|
||||
True if this hook is approved to run; False otherwise.
|
||||
|
||||
Args:
|
||||
subkey: The git config key under [repo.hooks.<hook_type>] to store the
|
||||
last approved string.
|
||||
new_val: The new value to compare against the last approved one.
|
||||
main_prompt: Message to display to the user to ask for approval.
|
||||
changed_prompt: Message explaining why we're re-asking for approval.
|
||||
Raises:
|
||||
HookError: Raised if the user doesn't approve and
|
||||
abort_if_user_denies was passed to the consturctor.
|
||||
"""
|
||||
if self._ManifestUrlHasSecureScheme():
|
||||
return self._CheckForHookApprovalManifest()
|
||||
else:
|
||||
return self._CheckForHookApprovalHash()
|
||||
|
||||
Returns:
|
||||
True if this hook is approved to run; False otherwise.
|
||||
def _CheckForHookApprovalHelper(
|
||||
self, subkey, new_val, main_prompt, changed_prompt
|
||||
):
|
||||
"""Check for approval for a particular attribute and hook.
|
||||
|
||||
Raises:
|
||||
HookError: Raised if the user doesn't approve and abort_if_user_denies
|
||||
was passed to the consturctor.
|
||||
"""
|
||||
hooks_config = self._hooks_project.config
|
||||
git_approval_key = 'repo.hooks.%s.%s' % (self._hook_type, subkey)
|
||||
Args:
|
||||
subkey: The git config key under [repo.hooks.<hook_type>] to store
|
||||
the last approved string.
|
||||
new_val: The new value to compare against the last approved one.
|
||||
main_prompt: Message to display to the user to ask for approval.
|
||||
changed_prompt: Message explaining why we're re-asking for approval.
|
||||
|
||||
# Get the last value that the user approved for this hook; may be None.
|
||||
old_val = hooks_config.GetString(git_approval_key)
|
||||
Returns:
|
||||
True if this hook is approved to run; False otherwise.
|
||||
|
||||
if old_val is not None:
|
||||
# User previously approved hook and asked not to be prompted again.
|
||||
if new_val == old_val:
|
||||
# Approval matched. We're done.
|
||||
return True
|
||||
else:
|
||||
# Give the user a reason why we're prompting, since they last told
|
||||
# us to "never ask again".
|
||||
prompt = 'WARNING: %s\n\n' % (changed_prompt,)
|
||||
else:
|
||||
prompt = ''
|
||||
Raises:
|
||||
HookError: Raised if the user doesn't approve and
|
||||
abort_if_user_denies was passed to the consturctor.
|
||||
"""
|
||||
hooks_config = self._hooks_project.config
|
||||
git_approval_key = f"repo.hooks.{self._hook_type}.{subkey}"
|
||||
|
||||
# Prompt the user if we're not on a tty; on a tty we'll assume "no".
|
||||
if sys.stdout.isatty():
|
||||
prompt += main_prompt + ' (yes/always/NO)? '
|
||||
response = input(prompt).lower()
|
||||
print()
|
||||
# Get the last value that the user approved for this hook; may be None.
|
||||
old_val = hooks_config.GetString(git_approval_key)
|
||||
|
||||
# User is doing a one-time approval.
|
||||
if response in ('y', 'yes'):
|
||||
return True
|
||||
elif response == 'always':
|
||||
hooks_config.SetString(git_approval_key, new_val)
|
||||
return True
|
||||
if old_val is not None:
|
||||
# User previously approved hook and asked not to be prompted again.
|
||||
if new_val == old_val:
|
||||
# Approval matched. We're done.
|
||||
return True
|
||||
else:
|
||||
# Give the user a reason why we're prompting, since they last
|
||||
# told us to "never ask again".
|
||||
prompt = f"WARNING: {changed_prompt}\n\n"
|
||||
else:
|
||||
prompt = ""
|
||||
|
||||
# For anything else, we'll assume no approval.
|
||||
if self._abort_if_user_denies:
|
||||
raise HookError('You must allow the %s hook or use --no-verify.' %
|
||||
self._hook_type)
|
||||
# Prompt the user if we're not on a tty; on a tty we'll assume "no".
|
||||
if sys.stdout.isatty():
|
||||
prompt += main_prompt + " (yes/always/NO)? "
|
||||
response = input(prompt).lower()
|
||||
print()
|
||||
|
||||
return False
|
||||
# User is doing a one-time approval.
|
||||
if response in ("y", "yes"):
|
||||
return True
|
||||
elif response == "always":
|
||||
hooks_config.SetString(git_approval_key, new_val)
|
||||
return True
|
||||
|
||||
def _ManifestUrlHasSecureScheme(self):
|
||||
"""Check if the URI for the manifest is a secure transport."""
|
||||
secure_schemes = ('file', 'https', 'ssh', 'persistent-https', 'sso', 'rpc')
|
||||
parse_results = urllib.parse.urlparse(self._manifest_url)
|
||||
return parse_results.scheme in secure_schemes
|
||||
# For anything else, we'll assume no approval.
|
||||
if self._abort_if_user_denies:
|
||||
raise HookError(
|
||||
"You must allow the %s hook or use --no-verify."
|
||||
% self._hook_type
|
||||
)
|
||||
|
||||
def _CheckForHookApprovalManifest(self):
|
||||
"""Check whether the user has approved this manifest host.
|
||||
return False
|
||||
|
||||
Returns:
|
||||
True if this hook is approved to run; False otherwise.
|
||||
"""
|
||||
return self._CheckForHookApprovalHelper(
|
||||
'approvedmanifest',
|
||||
self._manifest_url,
|
||||
'Run hook scripts from %s' % (self._manifest_url,),
|
||||
'Manifest URL has changed since %s was allowed.' % (self._hook_type,))
|
||||
def _ManifestUrlHasSecureScheme(self):
|
||||
"""Check if the URI for the manifest is a secure transport."""
|
||||
secure_schemes = (
|
||||
"file",
|
||||
"https",
|
||||
"ssh",
|
||||
"persistent-https",
|
||||
"sso",
|
||||
"rpc",
|
||||
)
|
||||
parse_results = urllib.parse.urlparse(self._manifest_url)
|
||||
return parse_results.scheme in secure_schemes
|
||||
|
||||
def _CheckForHookApprovalHash(self):
|
||||
"""Check whether the user has approved the hooks repo.
|
||||
def _CheckForHookApprovalManifest(self):
|
||||
"""Check whether the user has approved this manifest host.
|
||||
|
||||
Returns:
|
||||
True if this hook is approved to run; False otherwise.
|
||||
"""
|
||||
prompt = ('Repo %s run the script:\n'
|
||||
' %s\n'
|
||||
'\n'
|
||||
'Do you want to allow this script to run')
|
||||
return self._CheckForHookApprovalHelper(
|
||||
'approvedhash',
|
||||
self._GetHash(),
|
||||
prompt % (self._GetMustVerb(), self._script_fullpath),
|
||||
'Scripts have changed since %s was allowed.' % (self._hook_type,))
|
||||
Returns:
|
||||
True if this hook is approved to run; False otherwise.
|
||||
"""
|
||||
return self._CheckForHookApprovalHelper(
|
||||
"approvedmanifest",
|
||||
self._manifest_url,
|
||||
f"Run hook scripts from {self._manifest_url}",
|
||||
f"Manifest URL has changed since {self._hook_type} was allowed.",
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _ExtractInterpFromShebang(data):
|
||||
"""Extract the interpreter used in the shebang.
|
||||
def _CheckForHookApprovalHash(self):
|
||||
"""Check whether the user has approved the hooks repo.
|
||||
|
||||
Try to locate the interpreter the script is using (ignoring `env`).
|
||||
Returns:
|
||||
True if this hook is approved to run; False otherwise.
|
||||
"""
|
||||
prompt = (
|
||||
"Repo %s run the script:\n"
|
||||
" %s\n"
|
||||
"\n"
|
||||
"Do you want to allow this script to run"
|
||||
)
|
||||
return self._CheckForHookApprovalHelper(
|
||||
"approvedhash",
|
||||
self._GetHash(),
|
||||
prompt % (self._GetMustVerb(), self._script_fullpath),
|
||||
f"Scripts have changed since {self._hook_type} was allowed.",
|
||||
)
|
||||
|
||||
Args:
|
||||
data: The file content of the script.
|
||||
@staticmethod
|
||||
def _ExtractInterpFromShebang(data):
|
||||
"""Extract the interpreter used in the shebang.
|
||||
|
||||
Returns:
|
||||
The basename of the main script interpreter, or None if a shebang is not
|
||||
used or could not be parsed out.
|
||||
"""
|
||||
firstline = data.splitlines()[:1]
|
||||
if not firstline:
|
||||
return None
|
||||
Try to locate the interpreter the script is using (ignoring `env`).
|
||||
|
||||
# The format here can be tricky.
|
||||
shebang = firstline[0].strip()
|
||||
m = re.match(r'^#!\s*([^\s]+)(?:\s+([^\s]+))?', shebang)
|
||||
if not m:
|
||||
return None
|
||||
Args:
|
||||
data: The file content of the script.
|
||||
|
||||
# If the using `env`, find the target program.
|
||||
interp = m.group(1)
|
||||
if os.path.basename(interp) == 'env':
|
||||
interp = m.group(2)
|
||||
Returns:
|
||||
The basename of the main script interpreter, or None if a shebang is
|
||||
not used or could not be parsed out.
|
||||
"""
|
||||
firstline = data.splitlines()[:1]
|
||||
if not firstline:
|
||||
return None
|
||||
|
||||
return interp
|
||||
# The format here can be tricky.
|
||||
shebang = firstline[0].strip()
|
||||
m = re.match(r"^#!\s*([^\s]+)(?:\s+([^\s]+))?", shebang)
|
||||
if not m:
|
||||
return None
|
||||
|
||||
def _ExecuteHookViaReexec(self, interp, context, **kwargs):
|
||||
"""Execute the hook script through |interp|.
|
||||
# If the using `env`, find the target program.
|
||||
interp = m.group(1)
|
||||
if os.path.basename(interp) == "env":
|
||||
interp = m.group(2)
|
||||
|
||||
Note: Support for this feature should be dropped ~Jun 2021.
|
||||
return interp
|
||||
|
||||
Args:
|
||||
interp: The Python program to run.
|
||||
context: Basic Python context to execute the hook inside.
|
||||
kwargs: Arbitrary arguments to pass to the hook script.
|
||||
def _ExecuteHookViaImport(self, data, context, **kwargs):
|
||||
"""Execute the hook code in |data| directly.
|
||||
|
||||
Raises:
|
||||
HookError: When the hooks failed for any reason.
|
||||
"""
|
||||
# This logic needs to be kept in sync with _ExecuteHookViaImport below.
|
||||
script = """
|
||||
import json, os, sys
|
||||
path = '''%(path)s'''
|
||||
kwargs = json.loads('''%(kwargs)s''')
|
||||
context = json.loads('''%(context)s''')
|
||||
sys.path.insert(0, os.path.dirname(path))
|
||||
data = open(path).read()
|
||||
exec(compile(data, path, 'exec'), context)
|
||||
context['main'](**kwargs)
|
||||
""" % {
|
||||
'path': self._script_fullpath,
|
||||
'kwargs': json.dumps(kwargs),
|
||||
'context': json.dumps(context),
|
||||
}
|
||||
Args:
|
||||
data: The code of the hook to execute.
|
||||
context: Basic Python context to execute the hook inside.
|
||||
kwargs: Arbitrary arguments to pass to the hook script.
|
||||
|
||||
# We pass the script via stdin to avoid OS argv limits. It also makes
|
||||
# unhandled exception tracebacks less verbose/confusing for users.
|
||||
cmd = [interp, '-c', 'import sys; exec(sys.stdin.read())']
|
||||
proc = subprocess.Popen(cmd, stdin=subprocess.PIPE)
|
||||
proc.communicate(input=script.encode('utf-8'))
|
||||
if proc.returncode:
|
||||
raise HookError('Failed to run %s hook.' % (self._hook_type,))
|
||||
|
||||
def _ExecuteHookViaImport(self, data, context, **kwargs):
|
||||
"""Execute the hook code in |data| directly.
|
||||
|
||||
Args:
|
||||
data: The code of the hook to execute.
|
||||
context: Basic Python context to execute the hook inside.
|
||||
kwargs: Arbitrary arguments to pass to the hook script.
|
||||
|
||||
Raises:
|
||||
HookError: When the hooks failed for any reason.
|
||||
"""
|
||||
# Exec, storing global context in the context dict. We catch exceptions
|
||||
# and convert to a HookError w/ just the failing traceback.
|
||||
try:
|
||||
exec(compile(data, self._script_fullpath, 'exec'), context)
|
||||
except Exception:
|
||||
raise HookError('%s\nFailed to import %s hook; see traceback above.' %
|
||||
(traceback.format_exc(), self._hook_type))
|
||||
|
||||
# Running the script should have defined a main() function.
|
||||
if 'main' not in context:
|
||||
raise HookError('Missing main() in: "%s"' % self._script_fullpath)
|
||||
|
||||
# Call the main function in the hook. If the hook should cause the
|
||||
# build to fail, it will raise an Exception. We'll catch that convert
|
||||
# to a HookError w/ just the failing traceback.
|
||||
try:
|
||||
context['main'](**kwargs)
|
||||
except Exception:
|
||||
raise HookError('%s\nFailed to run main() for %s hook; see traceback '
|
||||
'above.' % (traceback.format_exc(), self._hook_type))
|
||||
|
||||
def _ExecuteHook(self, **kwargs):
|
||||
"""Actually execute the given hook.
|
||||
|
||||
This will run the hook's 'main' function in our python interpreter.
|
||||
|
||||
Args:
|
||||
kwargs: Keyword arguments to pass to the hook. These are often specific
|
||||
to the hook type. For instance, pre-upload hooks will contain
|
||||
a project_list.
|
||||
"""
|
||||
# Keep sys.path and CWD stashed away so that we can always restore them
|
||||
# upon function exit.
|
||||
orig_path = os.getcwd()
|
||||
orig_syspath = sys.path
|
||||
|
||||
try:
|
||||
# Always run hooks with CWD as topdir.
|
||||
os.chdir(self._repo_topdir)
|
||||
|
||||
# Put the hook dir as the first item of sys.path so hooks can do
|
||||
# relative imports. We want to replace the repo dir as [0] so
|
||||
# hooks can't import repo files.
|
||||
sys.path = [os.path.dirname(self._script_fullpath)] + sys.path[1:]
|
||||
|
||||
# Initial global context for the hook to run within.
|
||||
context = {'__file__': self._script_fullpath}
|
||||
|
||||
# Add 'hook_should_take_kwargs' to the arguments to be passed to main.
|
||||
# We don't actually want hooks to define their main with this argument--
|
||||
# it's there to remind them that their hook should always take **kwargs.
|
||||
# For instance, a pre-upload hook should be defined like:
|
||||
# def main(project_list, **kwargs):
|
||||
#
|
||||
# This allows us to later expand the API without breaking old hooks.
|
||||
kwargs = kwargs.copy()
|
||||
kwargs['hook_should_take_kwargs'] = True
|
||||
|
||||
# See what version of python the hook has been written against.
|
||||
data = open(self._script_fullpath).read()
|
||||
interp = self._ExtractInterpFromShebang(data)
|
||||
reexec = False
|
||||
if interp:
|
||||
prog = os.path.basename(interp)
|
||||
if prog.startswith('python2') and sys.version_info.major != 2:
|
||||
reexec = True
|
||||
elif prog.startswith('python3') and sys.version_info.major == 2:
|
||||
reexec = True
|
||||
|
||||
# Attempt to execute the hooks through the requested version of Python.
|
||||
if reexec:
|
||||
Raises:
|
||||
HookError: When the hooks failed for any reason.
|
||||
"""
|
||||
# Exec, storing global context in the context dict. We catch exceptions
|
||||
# and convert to a HookError w/ just the failing traceback.
|
||||
try:
|
||||
self._ExecuteHookViaReexec(interp, context, **kwargs)
|
||||
except OSError as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
# We couldn't find the interpreter, so fallback to importing.
|
||||
reexec = False
|
||||
else:
|
||||
raise
|
||||
exec(compile(data, self._script_fullpath, "exec"), context)
|
||||
except Exception:
|
||||
raise HookError(
|
||||
"%s\nFailed to import %s hook; see traceback above."
|
||||
% (traceback.format_exc(), self._hook_type)
|
||||
)
|
||||
|
||||
# Run the hook by importing directly.
|
||||
if not reexec:
|
||||
self._ExecuteHookViaImport(data, context, **kwargs)
|
||||
finally:
|
||||
# Restore sys.path and CWD.
|
||||
sys.path = orig_syspath
|
||||
os.chdir(orig_path)
|
||||
# Running the script should have defined a main() function.
|
||||
if "main" not in context:
|
||||
raise HookError('Missing main() in: "%s"' % self._script_fullpath)
|
||||
|
||||
def _CheckHook(self):
|
||||
# Bail with a nice error if we can't find the hook.
|
||||
if not os.path.isfile(self._script_fullpath):
|
||||
raise HookError('Couldn\'t find repo hook: %s' % self._script_fullpath)
|
||||
# Call the main function in the hook. If the hook should cause the
|
||||
# build to fail, it will raise an Exception. We'll catch that convert
|
||||
# to a HookError w/ just the failing traceback.
|
||||
try:
|
||||
context["main"](**kwargs)
|
||||
except Exception:
|
||||
raise HookError(
|
||||
"%s\nFailed to run main() for %s hook; see traceback "
|
||||
"above." % (traceback.format_exc(), self._hook_type)
|
||||
)
|
||||
|
||||
def Run(self, **kwargs):
|
||||
"""Run the hook.
|
||||
def _ExecuteHook(self, **kwargs):
|
||||
"""Actually execute the given hook.
|
||||
|
||||
If the hook doesn't exist (because there is no hooks project or because
|
||||
this particular hook is not enabled), this is a no-op.
|
||||
This will run the hook's 'main' function in our python interpreter.
|
||||
|
||||
Args:
|
||||
user_allows_all_hooks: If True, we will never prompt about running the
|
||||
hook--we'll just assume it's OK to run it.
|
||||
kwargs: Keyword arguments to pass to the hook. These are often specific
|
||||
to the hook type. For instance, pre-upload hooks will contain
|
||||
a project_list.
|
||||
Args:
|
||||
kwargs: Keyword arguments to pass to the hook. These are often
|
||||
specific to the hook type. For instance, pre-upload hooks will
|
||||
contain a project_list.
|
||||
"""
|
||||
# Keep sys.path and CWD stashed away so that we can always restore them
|
||||
# upon function exit.
|
||||
orig_path = os.getcwd()
|
||||
orig_syspath = sys.path
|
||||
|
||||
Returns:
|
||||
True: On success or ignore hooks by user-request
|
||||
False: The hook failed. The caller should respond with aborting the action.
|
||||
Some examples in which False is returned:
|
||||
* Finding the hook failed while it was enabled, or
|
||||
* the user declined to run a required hook (from _CheckForHookApproval)
|
||||
In all these cases the user did not pass the proper arguments to
|
||||
ignore the result through the option combinations as listed in
|
||||
AddHookOptionGroup().
|
||||
"""
|
||||
# Do not do anything in case bypass_hooks is set, or
|
||||
# no-op if there is no hooks project or if hook is disabled.
|
||||
if (self._bypass_hooks or
|
||||
not self._hooks_project or
|
||||
self._hook_type not in self._hooks_project.enabled_repo_hooks):
|
||||
return True
|
||||
try:
|
||||
# Always run hooks with CWD as topdir.
|
||||
os.chdir(self._repo_topdir)
|
||||
|
||||
passed = True
|
||||
try:
|
||||
self._CheckHook()
|
||||
# Put the hook dir as the first item of sys.path so hooks can do
|
||||
# relative imports. We want to replace the repo dir as [0] so
|
||||
# hooks can't import repo files.
|
||||
sys.path = [os.path.dirname(self._script_fullpath)] + sys.path[1:]
|
||||
|
||||
# Make sure the user is OK with running the hook.
|
||||
if self._allow_all_hooks or self._CheckForHookApproval():
|
||||
# Run the hook with the same version of python we're using.
|
||||
self._ExecuteHook(**kwargs)
|
||||
except SystemExit as e:
|
||||
passed = False
|
||||
print('ERROR: %s hooks exited with exit code: %s' % (self._hook_type, str(e)),
|
||||
file=sys.stderr)
|
||||
except HookError as e:
|
||||
passed = False
|
||||
print('ERROR: %s' % str(e), file=sys.stderr)
|
||||
# Initial global context for the hook to run within.
|
||||
context = {"__file__": self._script_fullpath}
|
||||
|
||||
if not passed and self._ignore_hooks:
|
||||
print('\nWARNING: %s hooks failed, but continuing anyways.' % self._hook_type,
|
||||
file=sys.stderr)
|
||||
passed = True
|
||||
# Add 'hook_should_take_kwargs' to the arguments to be passed to
|
||||
# main. We don't actually want hooks to define their main with this
|
||||
# argument--it's there to remind them that their hook should always
|
||||
# take **kwargs.
|
||||
# For instance, a pre-upload hook should be defined like:
|
||||
# def main(project_list, **kwargs):
|
||||
#
|
||||
# This allows us to later expand the API without breaking old hooks.
|
||||
kwargs = kwargs.copy()
|
||||
kwargs["hook_should_take_kwargs"] = True
|
||||
|
||||
return passed
|
||||
# See what version of python the hook has been written against.
|
||||
data = open(self._script_fullpath).read()
|
||||
interp = self._ExtractInterpFromShebang(data)
|
||||
if interp:
|
||||
prog = os.path.basename(interp)
|
||||
if prog.startswith("python2"):
|
||||
raise HookError("Python 2 is not supported")
|
||||
|
||||
@classmethod
|
||||
def FromSubcmd(cls, manifest, opt, *args, **kwargs):
|
||||
"""Method to construct the repo hook class
|
||||
# Run the hook by importing directly.
|
||||
self._ExecuteHookViaImport(data, context, **kwargs)
|
||||
finally:
|
||||
# Restore sys.path and CWD.
|
||||
sys.path = orig_syspath
|
||||
os.chdir(orig_path)
|
||||
|
||||
Args:
|
||||
manifest: The current active manifest for this command from which we
|
||||
extract a couple of fields.
|
||||
opt: Contains the commandline options for the action of this hook.
|
||||
It should contain the options added by AddHookOptionGroup() in which
|
||||
we are interested in RepoHook execution.
|
||||
"""
|
||||
for key in ('bypass_hooks', 'allow_all_hooks', 'ignore_hooks'):
|
||||
kwargs.setdefault(key, getattr(opt, key))
|
||||
kwargs.update({
|
||||
'hooks_project': manifest.repo_hooks_project,
|
||||
'repo_topdir': manifest.topdir,
|
||||
'manifest_url': manifest.manifestProject.GetRemote('origin').url,
|
||||
})
|
||||
return cls(*args, **kwargs)
|
||||
def _CheckHook(self):
|
||||
# Bail with a nice error if we can't find the hook.
|
||||
if not os.path.isfile(self._script_fullpath):
|
||||
raise HookError(
|
||||
"Couldn't find repo hook: %s" % self._script_fullpath
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def AddOptionGroup(parser, name):
|
||||
"""Help options relating to the various hooks."""
|
||||
def Run(self, **kwargs):
|
||||
"""Run the hook.
|
||||
|
||||
# Note that verify and no-verify are NOT opposites of each other, which
|
||||
# is why they store to different locations. We are using them to match
|
||||
# 'git commit' syntax.
|
||||
group = parser.add_option_group(name + ' hooks')
|
||||
group.add_option('--no-verify',
|
||||
dest='bypass_hooks', action='store_true',
|
||||
help='Do not run the %s hook.' % name)
|
||||
group.add_option('--verify',
|
||||
dest='allow_all_hooks', action='store_true',
|
||||
help='Run the %s hook without prompting.' % name)
|
||||
group.add_option('--ignore-hooks',
|
||||
action='store_true',
|
||||
help='Do not abort if %s hooks fail.' % name)
|
||||
If the hook doesn't exist (because there is no hooks project or because
|
||||
this particular hook is not enabled), this is a no-op.
|
||||
|
||||
Args:
|
||||
user_allows_all_hooks: If True, we will never prompt about running
|
||||
the hook--we'll just assume it's OK to run it.
|
||||
kwargs: Keyword arguments to pass to the hook. These are often
|
||||
specific to the hook type. For instance, pre-upload hooks will
|
||||
contain a project_list.
|
||||
|
||||
Returns:
|
||||
True: On success or ignore hooks by user-request
|
||||
False: The hook failed. The caller should respond with aborting the
|
||||
action. Some examples in which False is returned:
|
||||
* Finding the hook failed while it was enabled, or
|
||||
* the user declined to run a required hook (from
|
||||
_CheckForHookApproval)
|
||||
In all these cases the user did not pass the proper arguments to
|
||||
ignore the result through the option combinations as listed in
|
||||
AddHookOptionGroup().
|
||||
"""
|
||||
# Do not do anything in case bypass_hooks is set, or
|
||||
# no-op if there is no hooks project or if hook is disabled.
|
||||
if (
|
||||
self._bypass_hooks
|
||||
or not self._hooks_project
|
||||
or self._hook_type not in self._hooks_project.enabled_repo_hooks
|
||||
):
|
||||
return True
|
||||
|
||||
passed = True
|
||||
try:
|
||||
self._CheckHook()
|
||||
|
||||
# Make sure the user is OK with running the hook.
|
||||
if self._allow_all_hooks or self._CheckForHookApproval():
|
||||
# Run the hook with the same version of python we're using.
|
||||
self._ExecuteHook(**kwargs)
|
||||
except SystemExit as e:
|
||||
passed = False
|
||||
print(
|
||||
"ERROR: %s hooks exited with exit code: %s"
|
||||
% (self._hook_type, str(e)),
|
||||
file=sys.stderr,
|
||||
)
|
||||
except HookError as e:
|
||||
passed = False
|
||||
print("ERROR: %s" % str(e), file=sys.stderr)
|
||||
|
||||
if not passed and self._ignore_hooks:
|
||||
print(
|
||||
"\nWARNING: %s hooks failed, but continuing anyways."
|
||||
% self._hook_type,
|
||||
file=sys.stderr,
|
||||
)
|
||||
passed = True
|
||||
|
||||
return passed
|
||||
|
||||
@classmethod
|
||||
def FromSubcmd(cls, manifest, opt, *args, **kwargs):
|
||||
"""Method to construct the repo hook class
|
||||
|
||||
Args:
|
||||
manifest: The current active manifest for this command from which we
|
||||
extract a couple of fields.
|
||||
opt: Contains the commandline options for the action of this hook.
|
||||
It should contain the options added by AddHookOptionGroup() in
|
||||
which we are interested in RepoHook execution.
|
||||
"""
|
||||
for key in ("bypass_hooks", "allow_all_hooks", "ignore_hooks"):
|
||||
kwargs.setdefault(key, getattr(opt, key))
|
||||
kwargs.update(
|
||||
{
|
||||
"hooks_project": manifest.repo_hooks_project,
|
||||
"repo_topdir": manifest.topdir,
|
||||
"manifest_url": manifest.manifestProject.GetRemote(
|
||||
"origin"
|
||||
).url,
|
||||
}
|
||||
)
|
||||
return cls(*args, **kwargs)
|
||||
|
||||
@staticmethod
|
||||
def AddOptionGroup(parser, name):
|
||||
"""Help options relating to the various hooks."""
|
||||
|
||||
# Note that verify and no-verify are NOT opposites of each other, which
|
||||
# is why they store to different locations. We are using them to match
|
||||
# 'git commit' syntax.
|
||||
group = parser.add_option_group(name + " hooks")
|
||||
group.add_option(
|
||||
"--no-verify",
|
||||
dest="bypass_hooks",
|
||||
action="store_true",
|
||||
help="Do not run the %s hook." % name,
|
||||
)
|
||||
group.add_option(
|
||||
"--verify",
|
||||
dest="allow_all_hooks",
|
||||
action="store_true",
|
||||
help="Run the %s hook without prompting." % name,
|
||||
)
|
||||
group.add_option(
|
||||
"--ignore-hooks",
|
||||
action="store_true",
|
||||
help="Do not abort if %s hooks fail." % name,
|
||||
)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
#!/bin/sh
|
||||
# From Gerrit Code Review 3.1.3
|
||||
# From Gerrit Code Review 3.10.0 d5403dbf335ba7d48977fc95170c3f7027c34659
|
||||
#
|
||||
# Part of Gerrit Code Review (https://www.gerritcodereview.com/)
|
||||
#
|
||||
@@ -17,6 +17,8 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
set -u
|
||||
|
||||
# avoid [[ which is not POSIX sh.
|
||||
if test "$#" != 1 ; then
|
||||
echo "$0 requires an argument."
|
||||
@@ -29,17 +31,34 @@ if test ! -f "$1" ; then
|
||||
fi
|
||||
|
||||
# Do not create a change id if requested
|
||||
if test "false" = "`git config --bool --get gerrit.createChangeId`" ; then
|
||||
exit 0
|
||||
create_setting=$(git config --get gerrit.createChangeId)
|
||||
case "$create_setting" in
|
||||
false)
|
||||
exit 0
|
||||
;;
|
||||
always)
|
||||
;;
|
||||
*)
|
||||
# Do not create a change id for squash/fixup commits.
|
||||
if head -n1 "$1" | LC_ALL=C grep -q '^[a-z][a-z]*! '; then
|
||||
exit 0
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
|
||||
if git rev-parse --verify HEAD >/dev/null 2>&1; then
|
||||
refhash="$(git rev-parse HEAD)"
|
||||
else
|
||||
refhash="$(git hash-object -t tree /dev/null)"
|
||||
fi
|
||||
|
||||
# $RANDOM will be undefined if not using bash, so don't use set -u
|
||||
random=$( (whoami ; hostname ; date; cat $1 ; echo $RANDOM) | git hash-object --stdin)
|
||||
random=$({ git var GIT_COMMITTER_IDENT ; echo "$refhash" ; cat "$1"; } | git hash-object --stdin)
|
||||
dest="$1.tmp.${random}"
|
||||
|
||||
trap 'rm -f "${dest}"' EXIT
|
||||
trap 'rm -f "$dest" "$dest-2"' EXIT
|
||||
|
||||
if ! git stripspace --strip-comments < "$1" > "${dest}" ; then
|
||||
if ! cat "$1" | sed -e '/>8/q' | git stripspace --strip-comments > "${dest}" ; then
|
||||
echo "cannot strip comments from $1"
|
||||
exit 1
|
||||
fi
|
||||
@@ -49,11 +68,40 @@ if test ! -s "${dest}" ; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
reviewurl="$(git config --get gerrit.reviewUrl)"
|
||||
if test -n "${reviewurl}" ; then
|
||||
token="Link"
|
||||
value="${reviewurl%/}/id/I$random"
|
||||
pattern=".*/id/I[0-9a-f]\{40\}"
|
||||
else
|
||||
token="Change-Id"
|
||||
value="I$random"
|
||||
pattern=".*"
|
||||
fi
|
||||
|
||||
if git interpret-trailers --parse < "$1" | grep -q "^$token: $pattern$" ; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# There must be a Signed-off-by trailer for the code below to work. Insert a
|
||||
# sentinel at the end to make sure there is one.
|
||||
# Avoid the --in-place option which only appeared in Git 2.8
|
||||
# Avoid the --if-exists option which only appeared in Git 2.15
|
||||
if ! git -c trailer.ifexists=doNothing interpret-trailers \
|
||||
--trailer "Change-Id: I${random}" < "$1" > "${dest}" ; then
|
||||
echo "cannot insert change-id line in $1"
|
||||
if ! git interpret-trailers \
|
||||
--trailer "Signed-off-by: SENTINEL" < "$1" > "$dest-2" ; then
|
||||
echo "cannot insert Signed-off-by sentinel line in $1"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Make sure the trailer appears before any Signed-off-by trailers by inserting
|
||||
# it as if it was a Signed-off-by trailer and then use sed to remove the
|
||||
# Signed-off-by prefix and the Signed-off-by sentinel line.
|
||||
# Avoid the --in-place option which only appeared in Git 2.8
|
||||
# Avoid the --where option which only appeared in Git 2.15
|
||||
if ! git -c trailer.where=before interpret-trailers \
|
||||
--trailer "Signed-off-by: $token: $value" < "$dest-2" |
|
||||
sed -e "s/^Signed-off-by: \($token: \)/\1/" \
|
||||
-e "/^Signed-off-by: SENTINEL/d" > "$dest" ; then
|
||||
echo "cannot insert $token line in $1"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||
.TH REPO "1" "July 2021" "repo abandon" "Repo Manual"
|
||||
.TH REPO "1" "July 2022" "repo abandon" "Repo Manual"
|
||||
.SH NAME
|
||||
repo \- repo abandon - manual page for repo abandon
|
||||
.SH SYNOPSIS
|
||||
@@ -32,5 +32,18 @@ show all output
|
||||
.TP
|
||||
\fB\-q\fR, \fB\-\-quiet\fR
|
||||
only show errors
|
||||
.SS Multi\-manifest options:
|
||||
.TP
|
||||
\fB\-\-outer\-manifest\fR
|
||||
operate starting at the outermost manifest
|
||||
.TP
|
||||
\fB\-\-no\-outer\-manifest\fR
|
||||
do not operate on outer manifests
|
||||
.TP
|
||||
\fB\-\-this\-manifest\-only\fR
|
||||
only operate on this (sub)manifest
|
||||
.TP
|
||||
\fB\-\-no\-this\-manifest\-only\fR, \fB\-\-all\-manifests\fR
|
||||
operate on this manifest and its submanifests
|
||||
.PP
|
||||
Run `repo help abandon` to view the detailed manual.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||
.TH REPO "1" "July 2021" "repo branches" "Repo Manual"
|
||||
.TH REPO "1" "July 2022" "repo branches" "Repo Manual"
|
||||
.SH NAME
|
||||
repo \- repo branches - manual page for repo branches
|
||||
.SH SYNOPSIS
|
||||
@@ -55,5 +55,18 @@ show all output
|
||||
.TP
|
||||
\fB\-q\fR, \fB\-\-quiet\fR
|
||||
only show errors
|
||||
.SS Multi\-manifest options:
|
||||
.TP
|
||||
\fB\-\-outer\-manifest\fR
|
||||
operate starting at the outermost manifest
|
||||
.TP
|
||||
\fB\-\-no\-outer\-manifest\fR
|
||||
do not operate on outer manifests
|
||||
.TP
|
||||
\fB\-\-this\-manifest\-only\fR
|
||||
only operate on this (sub)manifest
|
||||
.TP
|
||||
\fB\-\-no\-this\-manifest\-only\fR, \fB\-\-all\-manifests\fR
|
||||
operate on this manifest and its submanifests
|
||||
.PP
|
||||
Run `repo help branches` to view the detailed manual.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||
.TH REPO "1" "July 2021" "repo checkout" "Repo Manual"
|
||||
.TH REPO "1" "July 2022" "repo checkout" "Repo Manual"
|
||||
.SH NAME
|
||||
repo \- repo checkout - manual page for repo checkout
|
||||
.SH SYNOPSIS
|
||||
@@ -24,6 +24,19 @@ show all output
|
||||
.TP
|
||||
\fB\-q\fR, \fB\-\-quiet\fR
|
||||
only show errors
|
||||
.SS Multi\-manifest options:
|
||||
.TP
|
||||
\fB\-\-outer\-manifest\fR
|
||||
operate starting at the outermost manifest
|
||||
.TP
|
||||
\fB\-\-no\-outer\-manifest\fR
|
||||
do not operate on outer manifests
|
||||
.TP
|
||||
\fB\-\-this\-manifest\-only\fR
|
||||
only operate on this (sub)manifest
|
||||
.TP
|
||||
\fB\-\-no\-this\-manifest\-only\fR, \fB\-\-all\-manifests\fR
|
||||
operate on this manifest and its submanifests
|
||||
.PP
|
||||
Run `repo help checkout` to view the detailed manual.
|
||||
.SH DETAILS
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||
.TH REPO "1" "July 2021" "repo cherry-pick" "Repo Manual"
|
||||
.TH REPO "1" "July 2022" "repo cherry-pick" "Repo Manual"
|
||||
.SH NAME
|
||||
repo \- repo cherry-pick - manual page for repo cherry-pick
|
||||
.SH SYNOPSIS
|
||||
@@ -20,6 +20,19 @@ show all output
|
||||
.TP
|
||||
\fB\-q\fR, \fB\-\-quiet\fR
|
||||
only show errors
|
||||
.SS Multi\-manifest options:
|
||||
.TP
|
||||
\fB\-\-outer\-manifest\fR
|
||||
operate starting at the outermost manifest
|
||||
.TP
|
||||
\fB\-\-no\-outer\-manifest\fR
|
||||
do not operate on outer manifests
|
||||
.TP
|
||||
\fB\-\-this\-manifest\-only\fR
|
||||
only operate on this (sub)manifest
|
||||
.TP
|
||||
\fB\-\-no\-this\-manifest\-only\fR, \fB\-\-all\-manifests\fR
|
||||
operate on this manifest and its submanifests
|
||||
.PP
|
||||
Run `repo help cherry\-pick` to view the detailed manual.
|
||||
.SH DETAILS
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||
.TH REPO "1" "July 2021" "repo diff" "Repo Manual"
|
||||
.TH REPO "1" "July 2022" "repo diff" "Repo Manual"
|
||||
.SH NAME
|
||||
repo \- repo diff - manual page for repo diff
|
||||
.SH SYNOPSIS
|
||||
@@ -31,5 +31,18 @@ show all output
|
||||
.TP
|
||||
\fB\-q\fR, \fB\-\-quiet\fR
|
||||
only show errors
|
||||
.SS Multi\-manifest options:
|
||||
.TP
|
||||
\fB\-\-outer\-manifest\fR
|
||||
operate starting at the outermost manifest
|
||||
.TP
|
||||
\fB\-\-no\-outer\-manifest\fR
|
||||
do not operate on outer manifests
|
||||
.TP
|
||||
\fB\-\-this\-manifest\-only\fR
|
||||
only operate on this (sub)manifest
|
||||
.TP
|
||||
\fB\-\-no\-this\-manifest\-only\fR, \fB\-\-all\-manifests\fR
|
||||
operate on this manifest and its submanifests
|
||||
.PP
|
||||
Run `repo help diff` to view the detailed manual.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||
.TH REPO "1" "July 2021" "repo diffmanifests" "Repo Manual"
|
||||
.TH REPO "1" "July 2022" "repo diffmanifests" "Repo Manual"
|
||||
.SH NAME
|
||||
repo \- repo diffmanifests - manual page for repo diffmanifests
|
||||
.SH SYNOPSIS
|
||||
@@ -29,6 +29,19 @@ show all output
|
||||
.TP
|
||||
\fB\-q\fR, \fB\-\-quiet\fR
|
||||
only show errors
|
||||
.SS Multi\-manifest options:
|
||||
.TP
|
||||
\fB\-\-outer\-manifest\fR
|
||||
operate starting at the outermost manifest
|
||||
.TP
|
||||
\fB\-\-no\-outer\-manifest\fR
|
||||
do not operate on outer manifests
|
||||
.TP
|
||||
\fB\-\-this\-manifest\-only\fR
|
||||
only operate on this (sub)manifest
|
||||
.TP
|
||||
\fB\-\-no\-this\-manifest\-only\fR, \fB\-\-all\-manifests\fR
|
||||
operate on this manifest and its submanifests
|
||||
.PP
|
||||
Run `repo help diffmanifests` to view the detailed manual.
|
||||
.SH DETAILS
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||
.TH REPO "1" "July 2021" "repo download" "Repo Manual"
|
||||
.TH REPO "1" "July 2022" "repo download" "Repo Manual"
|
||||
.SH NAME
|
||||
repo \- repo download - manual page for repo download
|
||||
.SH SYNOPSIS
|
||||
@@ -35,6 +35,19 @@ show all output
|
||||
.TP
|
||||
\fB\-q\fR, \fB\-\-quiet\fR
|
||||
only show errors
|
||||
.SS Multi\-manifest options:
|
||||
.TP
|
||||
\fB\-\-outer\-manifest\fR
|
||||
operate starting at the outermost manifest
|
||||
.TP
|
||||
\fB\-\-no\-outer\-manifest\fR
|
||||
do not operate on outer manifests
|
||||
.TP
|
||||
\fB\-\-this\-manifest\-only\fR
|
||||
only operate on this (sub)manifest
|
||||
.TP
|
||||
\fB\-\-no\-this\-manifest\-only\fR, \fB\-\-all\-manifests\fR
|
||||
operate on this manifest and its submanifests
|
||||
.PP
|
||||
Run `repo help download` to view the detailed manual.
|
||||
.SH DETAILS
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||
.TH REPO "1" "July 2021" "repo forall" "Repo Manual"
|
||||
.TH REPO "1" "July 2022" "repo forall" "Repo Manual"
|
||||
.SH NAME
|
||||
repo \- repo forall - manual page for repo forall
|
||||
.SH SYNOPSIS
|
||||
@@ -54,6 +54,19 @@ only show errors
|
||||
.TP
|
||||
\fB\-p\fR
|
||||
show project headers before output
|
||||
.SS Multi\-manifest options:
|
||||
.TP
|
||||
\fB\-\-outer\-manifest\fR
|
||||
operate starting at the outermost manifest
|
||||
.TP
|
||||
\fB\-\-no\-outer\-manifest\fR
|
||||
do not operate on outer manifests
|
||||
.TP
|
||||
\fB\-\-this\-manifest\-only\fR
|
||||
only operate on this (sub)manifest
|
||||
.TP
|
||||
\fB\-\-no\-this\-manifest\-only\fR, \fB\-\-all\-manifests\fR
|
||||
operate on this manifest and its submanifests
|
||||
.PP
|
||||
Run `repo help forall` to view the detailed manual.
|
||||
.SH DETAILS
|
||||
@@ -93,6 +106,11 @@ REPO_PROJECT is set to the unique name of the project.
|
||||
.PP
|
||||
REPO_PATH is the path relative the the root of the client.
|
||||
.PP
|
||||
REPO_OUTERPATH is the path of the sub manifest's root relative to the root of
|
||||
the client.
|
||||
.PP
|
||||
REPO_INNERPATH is the path relative to the root of the sub manifest.
|
||||
.PP
|
||||
REPO_REMOTE is the name of the remote system from the manifest.
|
||||
.PP
|
||||
REPO_LREV is the name of the revision from the manifest, translated to a local
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||
.TH REPO "1" "July 2021" "repo gitc-delete" "Repo Manual"
|
||||
.TH REPO "1" "July 2022" "repo gitc-delete" "Repo Manual"
|
||||
.SH NAME
|
||||
repo \- repo gitc-delete - manual page for repo gitc-delete
|
||||
.SH SYNOPSIS
|
||||
@@ -23,6 +23,19 @@ show all output
|
||||
.TP
|
||||
\fB\-q\fR, \fB\-\-quiet\fR
|
||||
only show errors
|
||||
.SS Multi\-manifest options:
|
||||
.TP
|
||||
\fB\-\-outer\-manifest\fR
|
||||
operate starting at the outermost manifest
|
||||
.TP
|
||||
\fB\-\-no\-outer\-manifest\fR
|
||||
do not operate on outer manifests
|
||||
.TP
|
||||
\fB\-\-this\-manifest\-only\fR
|
||||
only operate on this (sub)manifest
|
||||
.TP
|
||||
\fB\-\-no\-this\-manifest\-only\fR, \fB\-\-all\-manifests\fR
|
||||
operate on this manifest and its submanifests
|
||||
.PP
|
||||
Run `repo help gitc\-delete` to view the detailed manual.
|
||||
.SH DETAILS
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||
.TH REPO "1" "November 2021" "repo gitc-init" "Repo Manual"
|
||||
.TH REPO "1" "October 2022" "repo gitc-init" "Repo Manual"
|
||||
.SH NAME
|
||||
repo \- repo gitc-init - manual page for repo gitc-init
|
||||
.SH SYNOPSIS
|
||||
@@ -45,10 +45,15 @@ sync any submodules associated with the manifest repo
|
||||
\fB\-\-standalone\-manifest\fR
|
||||
download the manifest as a static file rather then
|
||||
create a git checkout of the manifest repo
|
||||
.TP
|
||||
\fB\-\-manifest\-depth\fR=\fI\,DEPTH\/\fR
|
||||
create a shallow clone of the manifest repo with given
|
||||
depth (0 for full clone); see git clone (default: 0)
|
||||
.SS Manifest (only) checkout options:
|
||||
.TP
|
||||
\fB\-\-current\-branch\fR
|
||||
fetch only current manifest branch from server
|
||||
(default)
|
||||
.TP
|
||||
\fB\-\-no\-current\-branch\fR
|
||||
fetch all manifest branches from server
|
||||
@@ -109,6 +114,12 @@ not \fB\-\-partial\-clone\fR)
|
||||
\fB\-\-no\-clone\-bundle\fR
|
||||
disable use of \fI\,/clone.bundle\/\fP on HTTP/HTTPS (default if
|
||||
\fB\-\-partial\-clone\fR)
|
||||
.TP
|
||||
\fB\-\-git\-lfs\fR
|
||||
enable Git LFS support
|
||||
.TP
|
||||
\fB\-\-no\-git\-lfs\fR
|
||||
disable Git LFS support
|
||||
.SS repo Version options:
|
||||
.TP
|
||||
\fB\-\-repo\-url\fR=\fI\,URL\/\fR
|
||||
@@ -130,6 +141,19 @@ Optional manifest file to use for this GITC client.
|
||||
.TP
|
||||
\fB\-c\fR GITC_CLIENT, \fB\-\-gitc\-client\fR=\fI\,GITC_CLIENT\/\fR
|
||||
Name of the gitc_client instance to create or modify.
|
||||
.SS Multi\-manifest:
|
||||
.TP
|
||||
\fB\-\-outer\-manifest\fR
|
||||
operate starting at the outermost manifest
|
||||
.TP
|
||||
\fB\-\-no\-outer\-manifest\fR
|
||||
do not operate on outer manifests
|
||||
.TP
|
||||
\fB\-\-this\-manifest\-only\fR
|
||||
only operate on this (sub)manifest
|
||||
.TP
|
||||
\fB\-\-no\-this\-manifest\-only\fR, \fB\-\-all\-manifests\fR
|
||||
operate on this manifest and its submanifests
|
||||
.PP
|
||||
Run `repo help gitc\-init` to view the detailed manual.
|
||||
.SH DETAILS
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||
.TH REPO "1" "July 2021" "repo grep" "Repo Manual"
|
||||
.TH REPO "1" "July 2022" "repo grep" "Repo Manual"
|
||||
.SH NAME
|
||||
repo \- repo grep - manual page for repo grep
|
||||
.SH SYNOPSIS
|
||||
@@ -24,6 +24,19 @@ show all output
|
||||
.TP
|
||||
\fB\-q\fR, \fB\-\-quiet\fR
|
||||
only show errors
|
||||
.SS Multi\-manifest options:
|
||||
.TP
|
||||
\fB\-\-outer\-manifest\fR
|
||||
operate starting at the outermost manifest
|
||||
.TP
|
||||
\fB\-\-no\-outer\-manifest\fR
|
||||
do not operate on outer manifests
|
||||
.TP
|
||||
\fB\-\-this\-manifest\-only\fR
|
||||
only operate on this (sub)manifest
|
||||
.TP
|
||||
\fB\-\-no\-this\-manifest\-only\fR, \fB\-\-all\-manifests\fR
|
||||
operate on this manifest and its submanifests
|
||||
.SS Sources:
|
||||
.TP
|
||||
\fB\-\-cached\fR
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||
.TH REPO "1" "July 2021" "repo help" "Repo Manual"
|
||||
.TH REPO "1" "July 2022" "repo help" "Repo Manual"
|
||||
.SH NAME
|
||||
repo \- repo help - manual page for repo help
|
||||
.SH SYNOPSIS
|
||||
@@ -26,6 +26,19 @@ show all output
|
||||
.TP
|
||||
\fB\-q\fR, \fB\-\-quiet\fR
|
||||
only show errors
|
||||
.SS Multi\-manifest options:
|
||||
.TP
|
||||
\fB\-\-outer\-manifest\fR
|
||||
operate starting at the outermost manifest
|
||||
.TP
|
||||
\fB\-\-no\-outer\-manifest\fR
|
||||
do not operate on outer manifests
|
||||
.TP
|
||||
\fB\-\-this\-manifest\-only\fR
|
||||
only operate on this (sub)manifest
|
||||
.TP
|
||||
\fB\-\-no\-this\-manifest\-only\fR, \fB\-\-all\-manifests\fR
|
||||
operate on this manifest and its submanifests
|
||||
.PP
|
||||
Run `repo help help` to view the detailed manual.
|
||||
.SH DETAILS
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||
.TH REPO "1" "July 2021" "repo info" "Repo Manual"
|
||||
.TH REPO "1" "July 2022" "repo info" "Repo Manual"
|
||||
.SH NAME
|
||||
repo \- repo info - manual page for repo info
|
||||
.SH SYNOPSIS
|
||||
@@ -36,5 +36,18 @@ show all output
|
||||
.TP
|
||||
\fB\-q\fR, \fB\-\-quiet\fR
|
||||
only show errors
|
||||
.SS Multi\-manifest options:
|
||||
.TP
|
||||
\fB\-\-outer\-manifest\fR
|
||||
operate starting at the outermost manifest
|
||||
.TP
|
||||
\fB\-\-no\-outer\-manifest\fR
|
||||
do not operate on outer manifests
|
||||
.TP
|
||||
\fB\-\-this\-manifest\-only\fR
|
||||
only operate on this (sub)manifest
|
||||
.TP
|
||||
\fB\-\-no\-this\-manifest\-only\fR, \fB\-\-all\-manifests\fR
|
||||
operate on this manifest and its submanifests
|
||||
.PP
|
||||
Run `repo help info` to view the detailed manual.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||
.TH REPO "1" "November 2021" "repo init" "Repo Manual"
|
||||
.TH REPO "1" "October 2022" "repo init" "Repo Manual"
|
||||
.SH NAME
|
||||
repo \- repo init - manual page for repo init
|
||||
.SH SYNOPSIS
|
||||
@@ -45,10 +45,15 @@ sync any submodules associated with the manifest repo
|
||||
\fB\-\-standalone\-manifest\fR
|
||||
download the manifest as a static file rather then
|
||||
create a git checkout of the manifest repo
|
||||
.TP
|
||||
\fB\-\-manifest\-depth\fR=\fI\,DEPTH\/\fR
|
||||
create a shallow clone of the manifest repo with given
|
||||
depth (0 for full clone); see git clone (default: 0)
|
||||
.SS Manifest (only) checkout options:
|
||||
.TP
|
||||
\fB\-c\fR, \fB\-\-current\-branch\fR
|
||||
fetch only current manifest branch from server
|
||||
(default)
|
||||
.TP
|
||||
\fB\-\-no\-current\-branch\fR
|
||||
fetch all manifest branches from server
|
||||
@@ -109,6 +114,12 @@ not \fB\-\-partial\-clone\fR)
|
||||
\fB\-\-no\-clone\-bundle\fR
|
||||
disable use of \fI\,/clone.bundle\/\fP on HTTP/HTTPS (default if
|
||||
\fB\-\-partial\-clone\fR)
|
||||
.TP
|
||||
\fB\-\-git\-lfs\fR
|
||||
enable Git LFS support
|
||||
.TP
|
||||
\fB\-\-no\-git\-lfs\fR
|
||||
disable Git LFS support
|
||||
.SS repo Version options:
|
||||
.TP
|
||||
\fB\-\-repo\-url\fR=\fI\,URL\/\fR
|
||||
@@ -123,6 +134,19 @@ do not verify repo source code
|
||||
.TP
|
||||
\fB\-\-config\-name\fR
|
||||
Always prompt for name/e\-mail
|
||||
.SS Multi\-manifest:
|
||||
.TP
|
||||
\fB\-\-outer\-manifest\fR
|
||||
operate starting at the outermost manifest
|
||||
.TP
|
||||
\fB\-\-no\-outer\-manifest\fR
|
||||
do not operate on outer manifests
|
||||
.TP
|
||||
\fB\-\-this\-manifest\-only\fR
|
||||
only operate on this (sub)manifest
|
||||
.TP
|
||||
\fB\-\-no\-this\-manifest\-only\fR, \fB\-\-all\-manifests\fR
|
||||
operate on this manifest and its submanifests
|
||||
.PP
|
||||
Run `repo help init` to view the detailed manual.
|
||||
.SH DETAILS
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||
.TH REPO "1" "July 2021" "repo list" "Repo Manual"
|
||||
.TH REPO "1" "July 2022" "repo list" "Repo Manual"
|
||||
.SH NAME
|
||||
repo \- repo list - manual page for repo list
|
||||
.SH SYNOPSIS
|
||||
@@ -47,6 +47,19 @@ show all output
|
||||
.TP
|
||||
\fB\-q\fR, \fB\-\-quiet\fR
|
||||
only show errors
|
||||
.SS Multi\-manifest options:
|
||||
.TP
|
||||
\fB\-\-outer\-manifest\fR
|
||||
operate starting at the outermost manifest
|
||||
.TP
|
||||
\fB\-\-no\-outer\-manifest\fR
|
||||
do not operate on outer manifests
|
||||
.TP
|
||||
\fB\-\-this\-manifest\-only\fR
|
||||
only operate on this (sub)manifest
|
||||
.TP
|
||||
\fB\-\-no\-this\-manifest\-only\fR, \fB\-\-all\-manifests\fR
|
||||
operate on this manifest and its submanifests
|
||||
.PP
|
||||
Run `repo help list` to view the detailed manual.
|
||||
.SH DETAILS
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||
.TH REPO "1" "November 2021" "repo manifest" "Repo Manual"
|
||||
.TH REPO "1" "October 2022" "repo manifest" "Repo Manual"
|
||||
.SH NAME
|
||||
repo \- repo manifest - manual page for repo manifest
|
||||
.SH SYNOPSIS
|
||||
@@ -40,7 +40,8 @@ format output for humans to read
|
||||
ignore local manifests
|
||||
.TP
|
||||
\fB\-o\fR \-|NAME.xml, \fB\-\-output\-file\fR=\fI\,\-\/\fR|NAME.xml
|
||||
file to save the manifest to
|
||||
file to save the manifest to. (Filename prefix for
|
||||
multi\-tree.)
|
||||
.SS Logging options:
|
||||
.TP
|
||||
\fB\-v\fR, \fB\-\-verbose\fR
|
||||
@@ -48,6 +49,19 @@ show all output
|
||||
.TP
|
||||
\fB\-q\fR, \fB\-\-quiet\fR
|
||||
only show errors
|
||||
.SS Multi\-manifest options:
|
||||
.TP
|
||||
\fB\-\-outer\-manifest\fR
|
||||
operate starting at the outermost manifest
|
||||
.TP
|
||||
\fB\-\-no\-outer\-manifest\fR
|
||||
do not operate on outer manifests
|
||||
.TP
|
||||
\fB\-\-this\-manifest\-only\fR
|
||||
only operate on this (sub)manifest
|
||||
.TP
|
||||
\fB\-\-no\-this\-manifest\-only\fR, \fB\-\-all\-manifests\fR
|
||||
operate on this manifest and its submanifests
|
||||
.PP
|
||||
Run `repo help manifest` to view the detailed manual.
|
||||
.SH DETAILS
|
||||
@@ -88,6 +102,7 @@ A manifest XML file (e.g. `default.xml`) roughly conforms to the following DTD:
|
||||
remote*,
|
||||
default?,
|
||||
manifest\-server?,
|
||||
submanifest*?,
|
||||
remove\-project*,
|
||||
project*,
|
||||
extend\-project*,
|
||||
@@ -118,6 +133,16 @@ include*)>
|
||||
.IP
|
||||
<!ELEMENT manifest\-server EMPTY>
|
||||
<!ATTLIST manifest\-server url CDATA #REQUIRED>
|
||||
.IP
|
||||
<!ELEMENT submanifest EMPTY>
|
||||
<!ATTLIST submanifest name ID #REQUIRED>
|
||||
<!ATTLIST submanifest remote IDREF #IMPLIED>
|
||||
<!ATTLIST submanifest project CDATA #IMPLIED>
|
||||
<!ATTLIST submanifest manifest\-name CDATA #IMPLIED>
|
||||
<!ATTLIST submanifest revision CDATA #IMPLIED>
|
||||
<!ATTLIST submanifest path CDATA #IMPLIED>
|
||||
<!ATTLIST submanifest groups CDATA #IMPLIED>
|
||||
<!ATTLIST submanifest default\-groups CDATA #IMPLIED>
|
||||
.TP
|
||||
<!ELEMENT project (annotation*,
|
||||
project*,
|
||||
@@ -165,6 +190,8 @@ CDATA #IMPLIED>
|
||||
<!ATTLIST extend\-project groups CDATA #IMPLIED>
|
||||
<!ATTLIST extend\-project revision CDATA #IMPLIED>
|
||||
<!ATTLIST extend\-project remote CDATA #IMPLIED>
|
||||
<!ATTLIST extend\-project dest\-branch CDATA #IMPLIED>
|
||||
<!ATTLIST extend\-project upstream CDATA #IMPLIED>
|
||||
.IP
|
||||
<!ELEMENT remove\-project EMPTY>
|
||||
<!ATTLIST remove\-project name CDATA #REQUIRED>
|
||||
@@ -295,6 +322,65 @@ GetManifest(tag)
|
||||
Return a manifest in which each project is pegged to the revision at the
|
||||
specified tag. This is used by repo sync when the \fB\-\-smart\-tag\fR option is given.
|
||||
.PP
|
||||
Element submanifest
|
||||
.PP
|
||||
One or more submanifest elements may be specified. Each element describes a
|
||||
single manifest to be checked out as a child.
|
||||
.PP
|
||||
Attribute `name`: A unique name (within the current (sub)manifest) for this
|
||||
submanifest. It acts as a default for `revision` below. The same name can be
|
||||
used for submanifests with different parent (sub)manifests.
|
||||
.PP
|
||||
Attribute `remote`: Name of a previously defined remote element. If not supplied
|
||||
the remote given by the default element is used.
|
||||
.PP
|
||||
Attribute `project`: The manifest project name. The project's name is appended
|
||||
onto its remote's fetch URL to generate the actual URL to configure the Git
|
||||
remote with. The URL gets formed as:
|
||||
.IP
|
||||
${remote_fetch}/${project_name}.git
|
||||
.PP
|
||||
where ${remote_fetch} is the remote's fetch attribute and ${project_name} is the
|
||||
project's name attribute. The suffix ".git" is always appended as repo assumes
|
||||
the upstream is a forest of bare Git repositories. If the project has a parent
|
||||
element, its name will be prefixed by the parent's.
|
||||
.PP
|
||||
The project name must match the name Gerrit knows, if Gerrit is being used for
|
||||
code reviews.
|
||||
.PP
|
||||
`project` must not be empty, and may not be an absolute path or use "." or ".."
|
||||
path components. It is always interpreted relative to the remote's fetch
|
||||
settings, so if a different base path is needed, declare a different remote with
|
||||
the new settings needed.
|
||||
.PP
|
||||
If not supplied the remote and project for this manifest will be used: `remote`
|
||||
cannot be supplied.
|
||||
.PP
|
||||
Projects from a submanifest and its submanifests are added to the
|
||||
submanifest::path:<path_prefix> group.
|
||||
.PP
|
||||
Attribute `manifest\-name`: The manifest filename in the manifest project. If not
|
||||
supplied, `default.xml` is used.
|
||||
.PP
|
||||
Attribute `revision`: Name of a Git branch (e.g. "main" or "refs/heads/main"),
|
||||
tag (e.g. "refs/tags/stable"), or a commit hash. If not supplied, `name` is
|
||||
used.
|
||||
.PP
|
||||
Attribute `path`: An optional path relative to the top directory of the repo
|
||||
client where the submanifest repo client top directory should be placed. If not
|
||||
supplied, `revision` is used.
|
||||
.PP
|
||||
`path` may not be an absolute path or use "." or ".." path components.
|
||||
.PP
|
||||
Attribute `groups`: List of additional groups to which all projects in the
|
||||
included submanifest belong. This appends and recurses, meaning all projects in
|
||||
submanifests carry all parent submanifest groups. Same syntax as the
|
||||
corresponding element of `project`.
|
||||
.PP
|
||||
Attribute `default\-groups`: The list of manifest groups to sync if no
|
||||
`\-\-groups=` parameter was specified at init. When that list is empty, use this
|
||||
list instead of "default" as the list of groups to sync.
|
||||
.PP
|
||||
Element project
|
||||
.PP
|
||||
One or more project elements may be specified. Each element describes a single
|
||||
@@ -401,6 +487,12 @@ project. Same syntax as the corresponding element of `project`.
|
||||
Attribute `remote`: If specified, overrides the remote of the original project.
|
||||
Same syntax as the corresponding element of `project`.
|
||||
.PP
|
||||
Attribute `dest\-branch`: If specified, overrides the dest\-branch of the original
|
||||
project. Same syntax as the corresponding element of `project`.
|
||||
.PP
|
||||
Attribute `upstream`: If specified, overrides the upstream of the original
|
||||
project. Same syntax as the corresponding element of `project`.
|
||||
.PP
|
||||
Element annotation
|
||||
.PP
|
||||
Zero or more annotation elements may be specified as children of a project or
|
||||
@@ -513,10 +605,10 @@ restrictions are not enforced for [Local Manifests].
|
||||
.PP
|
||||
Attribute `groups`: List of additional groups to which all projects in the
|
||||
included manifest belong. This appends and recurses, meaning all projects in
|
||||
sub\-manifests carry all parent include groups. Same syntax as the corresponding
|
||||
element of `project`.
|
||||
included manifests carry all parent include groups. Same syntax as the
|
||||
corresponding element of `project`.
|
||||
.PP
|
||||
Local Manifests
|
||||
Local Manifests
|
||||
.PP
|
||||
Additional remotes and projects may be added through local manifest files stored
|
||||
in `$TOP_DIR/.repo/local_manifests/*.xml`.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||
.TH REPO "1" "July 2021" "repo overview" "Repo Manual"
|
||||
.TH REPO "1" "July 2022" "repo overview" "Repo Manual"
|
||||
.SH NAME
|
||||
repo \- repo overview - manual page for repo overview
|
||||
.SH SYNOPSIS
|
||||
@@ -26,6 +26,19 @@ show all output
|
||||
.TP
|
||||
\fB\-q\fR, \fB\-\-quiet\fR
|
||||
only show errors
|
||||
.SS Multi\-manifest options:
|
||||
.TP
|
||||
\fB\-\-outer\-manifest\fR
|
||||
operate starting at the outermost manifest
|
||||
.TP
|
||||
\fB\-\-no\-outer\-manifest\fR
|
||||
do not operate on outer manifests
|
||||
.TP
|
||||
\fB\-\-this\-manifest\-only\fR
|
||||
only operate on this (sub)manifest
|
||||
.TP
|
||||
\fB\-\-no\-this\-manifest\-only\fR, \fB\-\-all\-manifests\fR
|
||||
operate on this manifest and its submanifests
|
||||
.PP
|
||||
Run `repo help overview` to view the detailed manual.
|
||||
.SH DETAILS
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||
.TH REPO "1" "July 2021" "repo prune" "Repo Manual"
|
||||
.TH REPO "1" "July 2022" "repo prune" "Repo Manual"
|
||||
.SH NAME
|
||||
repo \- repo prune - manual page for repo prune
|
||||
.SH SYNOPSIS
|
||||
@@ -24,5 +24,18 @@ show all output
|
||||
.TP
|
||||
\fB\-q\fR, \fB\-\-quiet\fR
|
||||
only show errors
|
||||
.SS Multi\-manifest options:
|
||||
.TP
|
||||
\fB\-\-outer\-manifest\fR
|
||||
operate starting at the outermost manifest
|
||||
.TP
|
||||
\fB\-\-no\-outer\-manifest\fR
|
||||
do not operate on outer manifests
|
||||
.TP
|
||||
\fB\-\-this\-manifest\-only\fR
|
||||
only operate on this (sub)manifest
|
||||
.TP
|
||||
\fB\-\-no\-this\-manifest\-only\fR, \fB\-\-all\-manifests\fR
|
||||
operate on this manifest and its submanifests
|
||||
.PP
|
||||
Run `repo help prune` to view the detailed manual.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||
.TH REPO "1" "July 2021" "repo rebase" "Repo Manual"
|
||||
.TH REPO "1" "July 2022" "repo rebase" "Repo Manual"
|
||||
.SH NAME
|
||||
repo \- repo rebase - manual page for repo rebase
|
||||
.SH SYNOPSIS
|
||||
@@ -46,6 +46,19 @@ only show errors
|
||||
.TP
|
||||
\fB\-i\fR, \fB\-\-interactive\fR
|
||||
interactive rebase (single project only)
|
||||
.SS Multi\-manifest options:
|
||||
.TP
|
||||
\fB\-\-outer\-manifest\fR
|
||||
operate starting at the outermost manifest
|
||||
.TP
|
||||
\fB\-\-no\-outer\-manifest\fR
|
||||
do not operate on outer manifests
|
||||
.TP
|
||||
\fB\-\-this\-manifest\-only\fR
|
||||
only operate on this (sub)manifest
|
||||
.TP
|
||||
\fB\-\-no\-this\-manifest\-only\fR, \fB\-\-all\-manifests\fR
|
||||
operate on this manifest and its submanifests
|
||||
.PP
|
||||
Run `repo help rebase` to view the detailed manual.
|
||||
.SH DETAILS
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||
.TH REPO "1" "July 2021" "repo selfupdate" "Repo Manual"
|
||||
.TH REPO "1" "July 2022" "repo selfupdate" "Repo Manual"
|
||||
.SH NAME
|
||||
repo \- repo selfupdate - manual page for repo selfupdate
|
||||
.SH SYNOPSIS
|
||||
@@ -20,6 +20,19 @@ show all output
|
||||
.TP
|
||||
\fB\-q\fR, \fB\-\-quiet\fR
|
||||
only show errors
|
||||
.SS Multi\-manifest options:
|
||||
.TP
|
||||
\fB\-\-outer\-manifest\fR
|
||||
operate starting at the outermost manifest
|
||||
.TP
|
||||
\fB\-\-no\-outer\-manifest\fR
|
||||
do not operate on outer manifests
|
||||
.TP
|
||||
\fB\-\-this\-manifest\-only\fR
|
||||
only operate on this (sub)manifest
|
||||
.TP
|
||||
\fB\-\-no\-this\-manifest\-only\fR, \fB\-\-all\-manifests\fR
|
||||
operate on this manifest and its submanifests
|
||||
.SS repo Version options:
|
||||
.TP
|
||||
\fB\-\-no\-repo\-verify\fR
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||
.TH REPO "1" "November 2021" "repo smartsync" "Repo Manual"
|
||||
.TH REPO "1" "November 2022" "repo smartsync" "Repo Manual"
|
||||
.SH NAME
|
||||
repo \- repo smartsync - manual page for repo smartsync
|
||||
.SH SYNOPSIS
|
||||
@@ -20,11 +20,11 @@ number of CPU cores)
|
||||
.TP
|
||||
\fB\-\-jobs\-network\fR=\fI\,JOBS\/\fR
|
||||
number of network jobs to run in parallel (defaults to
|
||||
\fB\-\-jobs\fR)
|
||||
\fB\-\-jobs\fR or 1)
|
||||
.TP
|
||||
\fB\-\-jobs\-checkout\fR=\fI\,JOBS\/\fR
|
||||
number of local checkout jobs to run in parallel
|
||||
(defaults to \fB\-\-jobs\fR)
|
||||
(defaults to \fB\-\-jobs\fR or 8)
|
||||
.TP
|
||||
\fB\-f\fR, \fB\-\-force\-broken\fR
|
||||
obsolete option (to be deleted in the future)
|
||||
@@ -105,6 +105,13 @@ delete refs that no longer exist on the remote
|
||||
.TP
|
||||
\fB\-\-no\-prune\fR
|
||||
do not delete refs that no longer exist on the remote
|
||||
.TP
|
||||
\fB\-\-auto\-gc\fR
|
||||
run garbage collection on all synced projects
|
||||
.TP
|
||||
\fB\-\-no\-auto\-gc\fR
|
||||
do not run garbage collection on any projects
|
||||
(default)
|
||||
.SS Logging options:
|
||||
.TP
|
||||
\fB\-v\fR, \fB\-\-verbose\fR
|
||||
@@ -112,6 +119,19 @@ show all output
|
||||
.TP
|
||||
\fB\-q\fR, \fB\-\-quiet\fR
|
||||
only show errors
|
||||
.SS Multi\-manifest options:
|
||||
.TP
|
||||
\fB\-\-outer\-manifest\fR
|
||||
operate starting at the outermost manifest
|
||||
.TP
|
||||
\fB\-\-no\-outer\-manifest\fR
|
||||
do not operate on outer manifests
|
||||
.TP
|
||||
\fB\-\-this\-manifest\-only\fR
|
||||
only operate on this (sub)manifest
|
||||
.TP
|
||||
\fB\-\-no\-this\-manifest\-only\fR, \fB\-\-all\-manifests\fR
|
||||
operate on this manifest and its submanifests
|
||||
.SS repo Version options:
|
||||
.TP
|
||||
\fB\-\-no\-repo\-verify\fR
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||
.TH REPO "1" "July 2021" "repo stage" "Repo Manual"
|
||||
.TH REPO "1" "July 2022" "repo stage" "Repo Manual"
|
||||
.SH NAME
|
||||
repo \- repo stage - manual page for repo stage
|
||||
.SH SYNOPSIS
|
||||
@@ -23,6 +23,19 @@ only show errors
|
||||
.TP
|
||||
\fB\-i\fR, \fB\-\-interactive\fR
|
||||
use interactive staging
|
||||
.SS Multi\-manifest options:
|
||||
.TP
|
||||
\fB\-\-outer\-manifest\fR
|
||||
operate starting at the outermost manifest
|
||||
.TP
|
||||
\fB\-\-no\-outer\-manifest\fR
|
||||
do not operate on outer manifests
|
||||
.TP
|
||||
\fB\-\-this\-manifest\-only\fR
|
||||
only operate on this (sub)manifest
|
||||
.TP
|
||||
\fB\-\-no\-this\-manifest\-only\fR, \fB\-\-all\-manifests\fR
|
||||
operate on this manifest and its submanifests
|
||||
.PP
|
||||
Run `repo help stage` to view the detailed manual.
|
||||
.SH DETAILS
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||
.TH REPO "1" "July 2021" "repo start" "Repo Manual"
|
||||
.TH REPO "1" "July 2022" "repo start" "Repo Manual"
|
||||
.SH NAME
|
||||
repo \- repo start - manual page for repo start
|
||||
.SH SYNOPSIS
|
||||
@@ -33,6 +33,19 @@ show all output
|
||||
.TP
|
||||
\fB\-q\fR, \fB\-\-quiet\fR
|
||||
only show errors
|
||||
.SS Multi\-manifest options:
|
||||
.TP
|
||||
\fB\-\-outer\-manifest\fR
|
||||
operate starting at the outermost manifest
|
||||
.TP
|
||||
\fB\-\-no\-outer\-manifest\fR
|
||||
do not operate on outer manifests
|
||||
.TP
|
||||
\fB\-\-this\-manifest\-only\fR
|
||||
only operate on this (sub)manifest
|
||||
.TP
|
||||
\fB\-\-no\-this\-manifest\-only\fR, \fB\-\-all\-manifests\fR
|
||||
operate on this manifest and its submanifests
|
||||
.PP
|
||||
Run `repo help start` to view the detailed manual.
|
||||
.SH DETAILS
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||
.TH REPO "1" "July 2021" "repo status" "Repo Manual"
|
||||
.TH REPO "1" "July 2022" "repo status" "Repo Manual"
|
||||
.SH NAME
|
||||
repo \- repo status - manual page for repo status
|
||||
.SH SYNOPSIS
|
||||
@@ -28,6 +28,19 @@ show all output
|
||||
.TP
|
||||
\fB\-q\fR, \fB\-\-quiet\fR
|
||||
only show errors
|
||||
.SS Multi\-manifest options:
|
||||
.TP
|
||||
\fB\-\-outer\-manifest\fR
|
||||
operate starting at the outermost manifest
|
||||
.TP
|
||||
\fB\-\-no\-outer\-manifest\fR
|
||||
do not operate on outer manifests
|
||||
.TP
|
||||
\fB\-\-this\-manifest\-only\fR
|
||||
only operate on this (sub)manifest
|
||||
.TP
|
||||
\fB\-\-no\-this\-manifest\-only\fR, \fB\-\-all\-manifests\fR
|
||||
operate on this manifest and its submanifests
|
||||
.PP
|
||||
Run `repo help status` to view the detailed manual.
|
||||
.SH DETAILS
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||
.TH REPO "1" "November 2021" "repo sync" "Repo Manual"
|
||||
.TH REPO "1" "November 2022" "repo sync" "Repo Manual"
|
||||
.SH NAME
|
||||
repo \- repo sync - manual page for repo sync
|
||||
.SH SYNOPSIS
|
||||
@@ -20,11 +20,11 @@ number of CPU cores)
|
||||
.TP
|
||||
\fB\-\-jobs\-network\fR=\fI\,JOBS\/\fR
|
||||
number of network jobs to run in parallel (defaults to
|
||||
\fB\-\-jobs\fR)
|
||||
\fB\-\-jobs\fR or 1)
|
||||
.TP
|
||||
\fB\-\-jobs\-checkout\fR=\fI\,JOBS\/\fR
|
||||
number of local checkout jobs to run in parallel
|
||||
(defaults to \fB\-\-jobs\fR)
|
||||
(defaults to \fB\-\-jobs\fR or 8)
|
||||
.TP
|
||||
\fB\-f\fR, \fB\-\-force\-broken\fR
|
||||
obsolete option (to be deleted in the future)
|
||||
@@ -106,6 +106,13 @@ delete refs that no longer exist on the remote
|
||||
\fB\-\-no\-prune\fR
|
||||
do not delete refs that no longer exist on the remote
|
||||
.TP
|
||||
\fB\-\-auto\-gc\fR
|
||||
run garbage collection on all synced projects
|
||||
.TP
|
||||
\fB\-\-no\-auto\-gc\fR
|
||||
do not run garbage collection on any projects
|
||||
(default)
|
||||
.TP
|
||||
\fB\-s\fR, \fB\-\-smart\-sync\fR
|
||||
smart sync using manifest from the latest known good
|
||||
build
|
||||
@@ -119,6 +126,19 @@ show all output
|
||||
.TP
|
||||
\fB\-q\fR, \fB\-\-quiet\fR
|
||||
only show errors
|
||||
.SS Multi\-manifest options:
|
||||
.TP
|
||||
\fB\-\-outer\-manifest\fR
|
||||
operate starting at the outermost manifest
|
||||
.TP
|
||||
\fB\-\-no\-outer\-manifest\fR
|
||||
do not operate on outer manifests
|
||||
.TP
|
||||
\fB\-\-this\-manifest\-only\fR
|
||||
only operate on this (sub)manifest
|
||||
.TP
|
||||
\fB\-\-no\-this\-manifest\-only\fR, \fB\-\-all\-manifests\fR
|
||||
operate on this manifest and its submanifests
|
||||
.SS repo Version options:
|
||||
.TP
|
||||
\fB\-\-no\-repo\-verify\fR
|
||||
@@ -187,6 +207,9 @@ to a sha1 revision if the sha1 revision does not already exist locally.
|
||||
The \fB\-\-prune\fR option can be used to remove any refs that no longer exist on the
|
||||
remote.
|
||||
.PP
|
||||
The \fB\-\-auto\-gc\fR option can be used to trigger garbage collection on all projects.
|
||||
By default, repo does not run garbage collection.
|
||||
.PP
|
||||
SSH Connections
|
||||
.PP
|
||||
If at least one project remote URL uses an SSH connection (ssh://, git+ssh://,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||
.TH REPO "1" "July 2021" "repo upload" "Repo Manual"
|
||||
.TH REPO "1" "August 2022" "repo upload" "Repo Manual"
|
||||
.SH NAME
|
||||
repo \- repo upload - manual page for repo upload
|
||||
.SH SYNOPSIS
|
||||
@@ -54,6 +54,9 @@ upload as a private change (deprecated; use \fB\-\-wip\fR)
|
||||
\fB\-w\fR, \fB\-\-wip\fR
|
||||
upload as a work\-in\-progress change
|
||||
.TP
|
||||
\fB\-r\fR, \fB\-\-ready\fR
|
||||
mark change as ready (clears work\-in\-progress setting)
|
||||
.TP
|
||||
\fB\-o\fR PUSH_OPTIONS, \fB\-\-push\-option\fR=\fI\,PUSH_OPTIONS\/\fR
|
||||
additional push options to transmit
|
||||
.TP
|
||||
@@ -66,6 +69,12 @@ do everything except actually upload the CL
|
||||
\fB\-y\fR, \fB\-\-yes\fR
|
||||
answer yes to all safe prompts
|
||||
.TP
|
||||
\fB\-\-ignore\-untracked\-files\fR
|
||||
ignore untracked files in the working copy
|
||||
.TP
|
||||
\fB\-\-no\-ignore\-untracked\-files\fR
|
||||
always ask about untracked files in the working copy
|
||||
.TP
|
||||
\fB\-\-no\-cert\-checks\fR
|
||||
disable verifying ssl certs (unsafe)
|
||||
.SS Logging options:
|
||||
@@ -75,6 +84,19 @@ show all output
|
||||
.TP
|
||||
\fB\-q\fR, \fB\-\-quiet\fR
|
||||
only show errors
|
||||
.SS Multi\-manifest options:
|
||||
.TP
|
||||
\fB\-\-outer\-manifest\fR
|
||||
operate starting at the outermost manifest
|
||||
.TP
|
||||
\fB\-\-no\-outer\-manifest\fR
|
||||
do not operate on outer manifests
|
||||
.TP
|
||||
\fB\-\-this\-manifest\-only\fR
|
||||
only operate on this (sub)manifest
|
||||
.TP
|
||||
\fB\-\-no\-this\-manifest\-only\fR, \fB\-\-all\-manifests\fR
|
||||
operate on this manifest and its submanifests
|
||||
.SS pre\-upload hooks:
|
||||
.TP
|
||||
\fB\-\-no\-verify\fR
|
||||
@@ -105,6 +127,12 @@ respective list of users, and emails are sent to any new users. Users passed as
|
||||
\fB\-\-reviewers\fR must already be registered with the code review system, or the
|
||||
upload will fail.
|
||||
.PP
|
||||
While most normal Gerrit options have dedicated command line options, direct
|
||||
access to the Gerit options is available via \fB\-\-push\-options\fR. This is useful when
|
||||
Gerrit has newer functionality that repo upload doesn't yet support, or doesn't
|
||||
have plans to support. See the Push Options documentation for more details:
|
||||
https://gerrit\-review.googlesource.com/Documentation/user\-upload.html#push_options
|
||||
.PP
|
||||
Configuration
|
||||
.PP
|
||||
review.URL.autoupload:
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||
.TH REPO "1" "July 2021" "repo version" "Repo Manual"
|
||||
.TH REPO "1" "July 2022" "repo version" "Repo Manual"
|
||||
.SH NAME
|
||||
repo \- repo version - manual page for repo version
|
||||
.SH SYNOPSIS
|
||||
@@ -20,5 +20,18 @@ show all output
|
||||
.TP
|
||||
\fB\-q\fR, \fB\-\-quiet\fR
|
||||
only show errors
|
||||
.SS Multi\-manifest options:
|
||||
.TP
|
||||
\fB\-\-outer\-manifest\fR
|
||||
operate starting at the outermost manifest
|
||||
.TP
|
||||
\fB\-\-no\-outer\-manifest\fR
|
||||
do not operate on outer manifests
|
||||
.TP
|
||||
\fB\-\-this\-manifest\-only\fR
|
||||
only operate on this (sub)manifest
|
||||
.TP
|
||||
\fB\-\-no\-this\-manifest\-only\fR, \fB\-\-all\-manifests\fR
|
||||
operate on this manifest and its submanifests
|
||||
.PP
|
||||
Run `repo help version` to view the detailed manual.
|
||||
|
||||
11
man/repo.1
11
man/repo.1
@@ -1,5 +1,5 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
|
||||
.TH REPO "1" "November 2021" "repo" "Repo Manual"
|
||||
.TH REPO "1" "June 2023" "repo" "Repo Manual"
|
||||
.SH NAME
|
||||
repo \- repository management tool built on top of git
|
||||
.SH SYNOPSIS
|
||||
@@ -25,6 +25,10 @@ control color usage: auto, always, never
|
||||
\fB\-\-trace\fR
|
||||
trace git command execution (REPO_TRACE=1)
|
||||
.TP
|
||||
\fB\-\-trace\-to\-stderr\fR
|
||||
trace outputs go to stderr in addition to
|
||||
\&.repo/TRACE_FILE
|
||||
.TP
|
||||
\fB\-\-trace\-python\fR
|
||||
trace python command execution
|
||||
.TP
|
||||
@@ -43,6 +47,9 @@ filename of event log to append timeline to
|
||||
.TP
|
||||
\fB\-\-git\-trace2\-event\-log\fR=\fI\,GIT_TRACE2_EVENT_LOG\/\fR
|
||||
directory to write git trace2 event log to
|
||||
.TP
|
||||
\fB\-\-submanifest\-path\fR=\fI\,REL_PATH\/\fR
|
||||
submanifest path
|
||||
.SS "The complete list of recognized repo commands is:"
|
||||
.TP
|
||||
abandon
|
||||
@@ -130,4 +137,4 @@ version
|
||||
Display the version of repo
|
||||
.PP
|
||||
See 'repo help <command>' for more information on a specific command.
|
||||
Bug reports: https://bugs.chromium.org/p/gerrit/issues/entry?template=Repo+tool+issue
|
||||
Bug reports: https://issues.gerritcodereview.com/issues/new?component=1370071
|
||||
|
||||
3536
manifest_xml.py
3536
manifest_xml.py
File diff suppressed because it is too large
Load Diff
152
pager.py
152
pager.py
@@ -19,6 +19,7 @@ import sys
|
||||
|
||||
import platform_utils
|
||||
|
||||
|
||||
active = False
|
||||
pager_process = None
|
||||
old_stdout = None
|
||||
@@ -26,99 +27,104 @@ old_stderr = None
|
||||
|
||||
|
||||
def RunPager(globalConfig):
|
||||
if not os.isatty(0) or not os.isatty(1):
|
||||
return
|
||||
pager = _SelectPager(globalConfig)
|
||||
if pager == '' or pager == 'cat':
|
||||
return
|
||||
if not os.isatty(0) or not os.isatty(1):
|
||||
return
|
||||
pager = _SelectPager(globalConfig)
|
||||
if pager == "" or pager == "cat":
|
||||
return
|
||||
|
||||
if platform_utils.isWindows():
|
||||
_PipePager(pager)
|
||||
else:
|
||||
_ForkPager(pager)
|
||||
if platform_utils.isWindows():
|
||||
_PipePager(pager)
|
||||
else:
|
||||
_ForkPager(pager)
|
||||
|
||||
|
||||
def TerminatePager():
|
||||
global pager_process, old_stdout, old_stderr
|
||||
if pager_process:
|
||||
sys.stdout.flush()
|
||||
sys.stderr.flush()
|
||||
pager_process.stdin.close()
|
||||
pager_process.wait()
|
||||
pager_process = None
|
||||
# Restore initial stdout/err in case there is more output in this process
|
||||
# after shutting down the pager process
|
||||
sys.stdout = old_stdout
|
||||
sys.stderr = old_stderr
|
||||
global pager_process, old_stdout, old_stderr
|
||||
if pager_process:
|
||||
sys.stdout.flush()
|
||||
sys.stderr.flush()
|
||||
pager_process.stdin.close()
|
||||
pager_process.wait()
|
||||
pager_process = None
|
||||
# Restore initial stdout/err in case there is more output in this
|
||||
# process after shutting down the pager process.
|
||||
sys.stdout = old_stdout
|
||||
sys.stderr = old_stderr
|
||||
|
||||
|
||||
def _PipePager(pager):
|
||||
global pager_process, old_stdout, old_stderr
|
||||
assert pager_process is None, "Only one active pager process at a time"
|
||||
# Create pager process, piping stdout/err into its stdin
|
||||
pager_process = subprocess.Popen([pager], stdin=subprocess.PIPE, stdout=sys.stdout,
|
||||
stderr=sys.stderr)
|
||||
old_stdout = sys.stdout
|
||||
old_stderr = sys.stderr
|
||||
sys.stdout = pager_process.stdin
|
||||
sys.stderr = pager_process.stdin
|
||||
global pager_process, old_stdout, old_stderr
|
||||
assert pager_process is None, "Only one active pager process at a time"
|
||||
# Create pager process, piping stdout/err into its stdin.
|
||||
try:
|
||||
pager_process = subprocess.Popen(
|
||||
[pager], stdin=subprocess.PIPE, stdout=sys.stdout, stderr=sys.stderr
|
||||
)
|
||||
except FileNotFoundError:
|
||||
sys.exit(f'fatal: cannot start pager "{pager}"')
|
||||
old_stdout = sys.stdout
|
||||
old_stderr = sys.stderr
|
||||
sys.stdout = pager_process.stdin
|
||||
sys.stderr = pager_process.stdin
|
||||
|
||||
|
||||
def _ForkPager(pager):
|
||||
global active
|
||||
# This process turns into the pager; a child it forks will
|
||||
# do the real processing and output back to the pager. This
|
||||
# is necessary to keep the pager in control of the tty.
|
||||
#
|
||||
try:
|
||||
r, w = os.pipe()
|
||||
pid = os.fork()
|
||||
if not pid:
|
||||
os.dup2(w, 1)
|
||||
os.dup2(w, 2)
|
||||
os.close(r)
|
||||
os.close(w)
|
||||
active = True
|
||||
return
|
||||
global active
|
||||
# This process turns into the pager; a child it forks will
|
||||
# do the real processing and output back to the pager. This
|
||||
# is necessary to keep the pager in control of the tty.
|
||||
try:
|
||||
r, w = os.pipe()
|
||||
pid = os.fork()
|
||||
if not pid:
|
||||
os.dup2(w, 1)
|
||||
os.dup2(w, 2)
|
||||
os.close(r)
|
||||
os.close(w)
|
||||
active = True
|
||||
return
|
||||
|
||||
os.dup2(r, 0)
|
||||
os.close(r)
|
||||
os.close(w)
|
||||
os.dup2(r, 0)
|
||||
os.close(r)
|
||||
os.close(w)
|
||||
|
||||
_BecomePager(pager)
|
||||
except Exception:
|
||||
print("fatal: cannot start pager '%s'" % pager, file=sys.stderr)
|
||||
sys.exit(255)
|
||||
_BecomePager(pager)
|
||||
except Exception:
|
||||
print("fatal: cannot start pager '%s'" % pager, file=sys.stderr)
|
||||
sys.exit(255)
|
||||
|
||||
|
||||
def _SelectPager(globalConfig):
|
||||
try:
|
||||
return os.environ['GIT_PAGER']
|
||||
except KeyError:
|
||||
pass
|
||||
try:
|
||||
return os.environ["GIT_PAGER"]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
pager = globalConfig.GetString('core.pager')
|
||||
if pager:
|
||||
return pager
|
||||
pager = globalConfig.GetString("core.pager")
|
||||
if pager:
|
||||
return pager
|
||||
|
||||
try:
|
||||
return os.environ['PAGER']
|
||||
except KeyError:
|
||||
pass
|
||||
try:
|
||||
return os.environ["PAGER"]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
return 'less'
|
||||
return "less"
|
||||
|
||||
|
||||
def _BecomePager(pager):
|
||||
# Delaying execution of the pager until we have output
|
||||
# ready works around a long-standing bug in popularly
|
||||
# available versions of 'less', a better 'more'.
|
||||
#
|
||||
_a, _b, _c = select.select([0], [], [0])
|
||||
# Delaying execution of the pager until we have output
|
||||
# ready works around a long-standing bug in popularly
|
||||
# available versions of 'less', a better 'more'.
|
||||
_a, _b, _c = select.select([0], [], [0])
|
||||
|
||||
os.environ['LESS'] = 'FRSX'
|
||||
# This matches the behavior of git, which sets $LESS to `FRX` if it is not
|
||||
# set. See:
|
||||
# https://git-scm.com/docs/git-config#Documentation/git-config.txt-corepager
|
||||
os.environ.setdefault("LESS", "FRX")
|
||||
|
||||
try:
|
||||
os.execvp(pager, [pager])
|
||||
except OSError:
|
||||
os.execv('/bin/sh', ['sh', '-c', pager])
|
||||
try:
|
||||
os.execvp(pager, [pager])
|
||||
except OSError:
|
||||
os.execv("/bin/sh", ["sh", "-c", pager])
|
||||
|
||||
@@ -20,246 +20,263 @@ import stat
|
||||
|
||||
|
||||
def isWindows():
|
||||
""" Returns True when running with the native port of Python for Windows,
|
||||
False when running on any other platform (including the Cygwin port of
|
||||
Python).
|
||||
"""
|
||||
# Note: The cygwin port of Python returns "CYGWIN_NT_xxx"
|
||||
return platform.system() == "Windows"
|
||||
"""Returns True when running with the native port of Python for Windows,
|
||||
False when running on any other platform (including the Cygwin port of
|
||||
Python).
|
||||
"""
|
||||
# Note: The cygwin port of Python returns "CYGWIN_NT_xxx"
|
||||
return platform.system() == "Windows"
|
||||
|
||||
|
||||
def symlink(source, link_name):
|
||||
"""Creates a symbolic link pointing to source named link_name.
|
||||
Note: On Windows, source must exist on disk, as the implementation needs
|
||||
to know whether to create a "File" or a "Directory" symbolic link.
|
||||
"""
|
||||
if isWindows():
|
||||
import platform_utils_win32
|
||||
source = _validate_winpath(source)
|
||||
link_name = _validate_winpath(link_name)
|
||||
target = os.path.join(os.path.dirname(link_name), source)
|
||||
if isdir(target):
|
||||
platform_utils_win32.create_dirsymlink(_makelongpath(source), link_name)
|
||||
"""Creates a symbolic link pointing to source named link_name.
|
||||
|
||||
Note: On Windows, source must exist on disk, as the implementation needs
|
||||
to know whether to create a "File" or a "Directory" symbolic link.
|
||||
"""
|
||||
if isWindows():
|
||||
import platform_utils_win32
|
||||
|
||||
source = _validate_winpath(source)
|
||||
link_name = _validate_winpath(link_name)
|
||||
target = os.path.join(os.path.dirname(link_name), source)
|
||||
if isdir(target):
|
||||
platform_utils_win32.create_dirsymlink(
|
||||
_makelongpath(source), link_name
|
||||
)
|
||||
else:
|
||||
platform_utils_win32.create_filesymlink(
|
||||
_makelongpath(source), link_name
|
||||
)
|
||||
else:
|
||||
platform_utils_win32.create_filesymlink(_makelongpath(source), link_name)
|
||||
else:
|
||||
return os.symlink(source, link_name)
|
||||
return os.symlink(source, link_name)
|
||||
|
||||
|
||||
def _validate_winpath(path):
|
||||
path = os.path.normpath(path)
|
||||
if _winpath_is_valid(path):
|
||||
return path
|
||||
raise ValueError("Path \"%s\" must be a relative path or an absolute "
|
||||
"path starting with a drive letter".format(path))
|
||||
path = os.path.normpath(path)
|
||||
if _winpath_is_valid(path):
|
||||
return path
|
||||
raise ValueError(
|
||||
f'Path "{path}" must be a relative path or an absolute '
|
||||
"path starting with a drive letter"
|
||||
)
|
||||
|
||||
|
||||
def _winpath_is_valid(path):
|
||||
"""Windows only: returns True if path is relative (e.g. ".\\foo") or is
|
||||
absolute including a drive letter (e.g. "c:\\foo"). Returns False if path
|
||||
is ambiguous (e.g. "x:foo" or "\\foo").
|
||||
"""
|
||||
assert isWindows()
|
||||
path = os.path.normpath(path)
|
||||
drive, tail = os.path.splitdrive(path)
|
||||
if tail:
|
||||
if not drive:
|
||||
return tail[0] != os.sep # "\\foo" is invalid
|
||||
"""Windows only: returns True if path is relative (e.g. ".\\foo") or is
|
||||
absolute including a drive letter (e.g. "c:\\foo"). Returns False if path
|
||||
is ambiguous (e.g. "x:foo" or "\\foo").
|
||||
"""
|
||||
assert isWindows()
|
||||
path = os.path.normpath(path)
|
||||
drive, tail = os.path.splitdrive(path)
|
||||
if tail:
|
||||
if not drive:
|
||||
return tail[0] != os.sep # "\\foo" is invalid
|
||||
else:
|
||||
return tail[0] == os.sep # "x:foo" is invalid
|
||||
else:
|
||||
return tail[0] == os.sep # "x:foo" is invalid
|
||||
else:
|
||||
return not drive # "x:" is invalid
|
||||
return not drive # "x:" is invalid
|
||||
|
||||
|
||||
def _makelongpath(path):
|
||||
"""Return the input path normalized to support the Windows long path syntax
|
||||
("\\\\?\\" prefix) if needed, i.e. if the input path is longer than the
|
||||
MAX_PATH limit.
|
||||
"""
|
||||
if isWindows():
|
||||
# Note: MAX_PATH is 260, but, for directories, the maximum value is actually 246.
|
||||
if len(path) < 246:
|
||||
return path
|
||||
if path.startswith(u"\\\\?\\"):
|
||||
return path
|
||||
if not os.path.isabs(path):
|
||||
return path
|
||||
# Append prefix and ensure unicode so that the special longpath syntax
|
||||
# is supported by underlying Win32 API calls
|
||||
return u"\\\\?\\" + os.path.normpath(path)
|
||||
else:
|
||||
return path
|
||||
"""Return the input path normalized to support the Windows long path syntax
|
||||
("\\\\?\\" prefix) if needed, i.e. if the input path is longer than the
|
||||
MAX_PATH limit.
|
||||
"""
|
||||
if isWindows():
|
||||
# Note: MAX_PATH is 260, but, for directories, the maximum value is
|
||||
# actually 246.
|
||||
if len(path) < 246:
|
||||
return path
|
||||
if path.startswith("\\\\?\\"):
|
||||
return path
|
||||
if not os.path.isabs(path):
|
||||
return path
|
||||
# Append prefix and ensure unicode so that the special longpath syntax
|
||||
# is supported by underlying Win32 API calls
|
||||
return "\\\\?\\" + os.path.normpath(path)
|
||||
else:
|
||||
return path
|
||||
|
||||
|
||||
def rmtree(path, ignore_errors=False):
|
||||
"""shutil.rmtree(path) wrapper with support for long paths on Windows.
|
||||
"""shutil.rmtree(path) wrapper with support for long paths on Windows.
|
||||
|
||||
Availability: Unix, Windows."""
|
||||
onerror = None
|
||||
if isWindows():
|
||||
path = _makelongpath(path)
|
||||
onerror = handle_rmtree_error
|
||||
shutil.rmtree(path, ignore_errors=ignore_errors, onerror=onerror)
|
||||
Availability: Unix, Windows.
|
||||
"""
|
||||
onerror = None
|
||||
if isWindows():
|
||||
path = _makelongpath(path)
|
||||
onerror = handle_rmtree_error
|
||||
shutil.rmtree(path, ignore_errors=ignore_errors, onerror=onerror)
|
||||
|
||||
|
||||
def handle_rmtree_error(function, path, excinfo):
|
||||
# Allow deleting read-only files
|
||||
os.chmod(path, stat.S_IWRITE)
|
||||
function(path)
|
||||
# Allow deleting read-only files.
|
||||
os.chmod(path, stat.S_IWRITE)
|
||||
function(path)
|
||||
|
||||
|
||||
def rename(src, dst):
|
||||
"""os.rename(src, dst) wrapper with support for long paths on Windows.
|
||||
"""os.rename(src, dst) wrapper with support for long paths on Windows.
|
||||
|
||||
Availability: Unix, Windows."""
|
||||
if isWindows():
|
||||
# On Windows, rename fails if destination exists, see
|
||||
# https://docs.python.org/2/library/os.html#os.rename
|
||||
try:
|
||||
os.rename(_makelongpath(src), _makelongpath(dst))
|
||||
except OSError as e:
|
||||
if e.errno == errno.EEXIST:
|
||||
os.remove(_makelongpath(dst))
|
||||
os.rename(_makelongpath(src), _makelongpath(dst))
|
||||
else:
|
||||
raise
|
||||
else:
|
||||
shutil.move(src, dst)
|
||||
Availability: Unix, Windows.
|
||||
"""
|
||||
if isWindows():
|
||||
# On Windows, rename fails if destination exists, see
|
||||
# https://docs.python.org/2/library/os.html#os.rename
|
||||
try:
|
||||
os.rename(_makelongpath(src), _makelongpath(dst))
|
||||
except OSError as e:
|
||||
if e.errno == errno.EEXIST:
|
||||
os.remove(_makelongpath(dst))
|
||||
os.rename(_makelongpath(src), _makelongpath(dst))
|
||||
else:
|
||||
raise
|
||||
else:
|
||||
shutil.move(src, dst)
|
||||
|
||||
|
||||
def remove(path, missing_ok=False):
|
||||
"""Remove (delete) the file path. This is a replacement for os.remove that
|
||||
allows deleting read-only files on Windows, with support for long paths and
|
||||
for deleting directory symbolic links.
|
||||
"""Remove (delete) the file path. This is a replacement for os.remove that
|
||||
allows deleting read-only files on Windows, with support for long paths and
|
||||
for deleting directory symbolic links.
|
||||
|
||||
Availability: Unix, Windows."""
|
||||
longpath = _makelongpath(path) if isWindows() else path
|
||||
try:
|
||||
os.remove(longpath)
|
||||
except OSError as e:
|
||||
if e.errno == errno.EACCES:
|
||||
os.chmod(longpath, stat.S_IWRITE)
|
||||
# Directory symbolic links must be deleted with 'rmdir'.
|
||||
if islink(longpath) and isdir(longpath):
|
||||
os.rmdir(longpath)
|
||||
else:
|
||||
Availability: Unix, Windows.
|
||||
"""
|
||||
longpath = _makelongpath(path) if isWindows() else path
|
||||
try:
|
||||
os.remove(longpath)
|
||||
elif missing_ok and e.errno == errno.ENOENT:
|
||||
pass
|
||||
else:
|
||||
raise
|
||||
except OSError as e:
|
||||
if e.errno == errno.EACCES:
|
||||
os.chmod(longpath, stat.S_IWRITE)
|
||||
# Directory symbolic links must be deleted with 'rmdir'.
|
||||
if islink(longpath) and isdir(longpath):
|
||||
os.rmdir(longpath)
|
||||
else:
|
||||
os.remove(longpath)
|
||||
elif missing_ok and e.errno == errno.ENOENT:
|
||||
pass
|
||||
else:
|
||||
raise
|
||||
|
||||
|
||||
def walk(top, topdown=True, onerror=None, followlinks=False):
|
||||
"""os.walk(path) wrapper with support for long paths on Windows.
|
||||
"""os.walk(path) wrapper with support for long paths on Windows.
|
||||
|
||||
Availability: Windows, Unix.
|
||||
"""
|
||||
if isWindows():
|
||||
return _walk_windows_impl(top, topdown, onerror, followlinks)
|
||||
else:
|
||||
return os.walk(top, topdown, onerror, followlinks)
|
||||
Availability: Windows, Unix.
|
||||
"""
|
||||
if isWindows():
|
||||
return _walk_windows_impl(top, topdown, onerror, followlinks)
|
||||
else:
|
||||
return os.walk(top, topdown, onerror, followlinks)
|
||||
|
||||
|
||||
def _walk_windows_impl(top, topdown, onerror, followlinks):
|
||||
try:
|
||||
names = listdir(top)
|
||||
except Exception as err:
|
||||
if onerror is not None:
|
||||
onerror(err)
|
||||
return
|
||||
try:
|
||||
names = listdir(top)
|
||||
except Exception as err:
|
||||
if onerror is not None:
|
||||
onerror(err)
|
||||
return
|
||||
|
||||
dirs, nondirs = [], []
|
||||
for name in names:
|
||||
if isdir(os.path.join(top, name)):
|
||||
dirs.append(name)
|
||||
else:
|
||||
nondirs.append(name)
|
||||
dirs, nondirs = [], []
|
||||
for name in names:
|
||||
if isdir(os.path.join(top, name)):
|
||||
dirs.append(name)
|
||||
else:
|
||||
nondirs.append(name)
|
||||
|
||||
if topdown:
|
||||
yield top, dirs, nondirs
|
||||
for name in dirs:
|
||||
new_path = os.path.join(top, name)
|
||||
if followlinks or not islink(new_path):
|
||||
for x in _walk_windows_impl(new_path, topdown, onerror, followlinks):
|
||||
yield x
|
||||
if not topdown:
|
||||
yield top, dirs, nondirs
|
||||
if topdown:
|
||||
yield top, dirs, nondirs
|
||||
for name in dirs:
|
||||
new_path = os.path.join(top, name)
|
||||
if followlinks or not islink(new_path):
|
||||
yield from _walk_windows_impl(
|
||||
new_path, topdown, onerror, followlinks
|
||||
)
|
||||
if not topdown:
|
||||
yield top, dirs, nondirs
|
||||
|
||||
|
||||
def listdir(path):
|
||||
"""os.listdir(path) wrapper with support for long paths on Windows.
|
||||
"""os.listdir(path) wrapper with support for long paths on Windows.
|
||||
|
||||
Availability: Windows, Unix.
|
||||
"""
|
||||
return os.listdir(_makelongpath(path))
|
||||
Availability: Windows, Unix.
|
||||
"""
|
||||
return os.listdir(_makelongpath(path))
|
||||
|
||||
|
||||
def rmdir(path):
|
||||
"""os.rmdir(path) wrapper with support for long paths on Windows.
|
||||
"""os.rmdir(path) wrapper with support for long paths on Windows.
|
||||
|
||||
Availability: Windows, Unix.
|
||||
"""
|
||||
os.rmdir(_makelongpath(path))
|
||||
Availability: Windows, Unix.
|
||||
"""
|
||||
os.rmdir(_makelongpath(path))
|
||||
|
||||
|
||||
def isdir(path):
|
||||
"""os.path.isdir(path) wrapper with support for long paths on Windows.
|
||||
"""os.path.isdir(path) wrapper with support for long paths on Windows.
|
||||
|
||||
Availability: Windows, Unix.
|
||||
"""
|
||||
return os.path.isdir(_makelongpath(path))
|
||||
Availability: Windows, Unix.
|
||||
"""
|
||||
return os.path.isdir(_makelongpath(path))
|
||||
|
||||
|
||||
def islink(path):
|
||||
"""os.path.islink(path) wrapper with support for long paths on Windows.
|
||||
"""os.path.islink(path) wrapper with support for long paths on Windows.
|
||||
|
||||
Availability: Windows, Unix.
|
||||
"""
|
||||
if isWindows():
|
||||
import platform_utils_win32
|
||||
return platform_utils_win32.islink(_makelongpath(path))
|
||||
else:
|
||||
return os.path.islink(path)
|
||||
Availability: Windows, Unix.
|
||||
"""
|
||||
if isWindows():
|
||||
import platform_utils_win32
|
||||
|
||||
return platform_utils_win32.islink(_makelongpath(path))
|
||||
else:
|
||||
return os.path.islink(path)
|
||||
|
||||
|
||||
def readlink(path):
|
||||
"""Return a string representing the path to which the symbolic link
|
||||
points. The result may be either an absolute or relative pathname;
|
||||
if it is relative, it may be converted to an absolute pathname using
|
||||
os.path.join(os.path.dirname(path), result).
|
||||
"""Return a string representing the path to which the symbolic link
|
||||
points. The result may be either an absolute or relative pathname;
|
||||
if it is relative, it may be converted to an absolute pathname using
|
||||
os.path.join(os.path.dirname(path), result).
|
||||
|
||||
Availability: Windows, Unix.
|
||||
"""
|
||||
if isWindows():
|
||||
import platform_utils_win32
|
||||
return platform_utils_win32.readlink(_makelongpath(path))
|
||||
else:
|
||||
return os.readlink(path)
|
||||
Availability: Windows, Unix.
|
||||
"""
|
||||
if isWindows():
|
||||
import platform_utils_win32
|
||||
|
||||
return platform_utils_win32.readlink(_makelongpath(path))
|
||||
else:
|
||||
return os.readlink(path)
|
||||
|
||||
|
||||
def realpath(path):
|
||||
"""Return the canonical path of the specified filename, eliminating
|
||||
any symbolic links encountered in the path.
|
||||
"""Return the canonical path of the specified filename, eliminating
|
||||
any symbolic links encountered in the path.
|
||||
|
||||
Availability: Windows, Unix.
|
||||
"""
|
||||
if isWindows():
|
||||
current_path = os.path.abspath(path)
|
||||
path_tail = []
|
||||
for c in range(0, 100): # Avoid cycles
|
||||
if islink(current_path):
|
||||
target = readlink(current_path)
|
||||
current_path = os.path.join(os.path.dirname(current_path), target)
|
||||
else:
|
||||
basename = os.path.basename(current_path)
|
||||
if basename == '':
|
||||
path_tail.append(current_path)
|
||||
break
|
||||
path_tail.append(basename)
|
||||
current_path = os.path.dirname(current_path)
|
||||
path_tail.reverse()
|
||||
result = os.path.normpath(os.path.join(*path_tail))
|
||||
return result
|
||||
else:
|
||||
return os.path.realpath(path)
|
||||
Availability: Windows, Unix.
|
||||
"""
|
||||
if isWindows():
|
||||
current_path = os.path.abspath(path)
|
||||
path_tail = []
|
||||
for c in range(0, 100): # Avoid cycles
|
||||
if islink(current_path):
|
||||
target = readlink(current_path)
|
||||
current_path = os.path.join(
|
||||
os.path.dirname(current_path), target
|
||||
)
|
||||
else:
|
||||
basename = os.path.basename(current_path)
|
||||
if basename == "":
|
||||
path_tail.append(current_path)
|
||||
break
|
||||
path_tail.append(basename)
|
||||
current_path = os.path.dirname(current_path)
|
||||
path_tail.reverse()
|
||||
result = os.path.normpath(os.path.join(*path_tail))
|
||||
return result
|
||||
else:
|
||||
return os.path.realpath(path)
|
||||
|
||||
@@ -12,14 +12,30 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from ctypes import addressof
|
||||
from ctypes import byref
|
||||
from ctypes import c_buffer
|
||||
from ctypes import c_ubyte
|
||||
from ctypes import FormatError
|
||||
from ctypes import get_last_error
|
||||
from ctypes import Structure
|
||||
from ctypes import Union
|
||||
from ctypes import WinDLL
|
||||
from ctypes import WinError
|
||||
from ctypes.wintypes import BOOL
|
||||
from ctypes.wintypes import BOOLEAN
|
||||
from ctypes.wintypes import DWORD
|
||||
from ctypes.wintypes import HANDLE
|
||||
from ctypes.wintypes import LPCWSTR
|
||||
from ctypes.wintypes import LPDWORD
|
||||
from ctypes.wintypes import LPVOID
|
||||
from ctypes.wintypes import ULONG
|
||||
from ctypes.wintypes import USHORT
|
||||
from ctypes.wintypes import WCHAR
|
||||
import errno
|
||||
|
||||
from ctypes import WinDLL, get_last_error, FormatError, WinError, addressof
|
||||
from ctypes import c_buffer, c_ubyte, Structure, Union, byref
|
||||
from ctypes.wintypes import BOOL, BOOLEAN, LPCWSTR, DWORD, HANDLE
|
||||
from ctypes.wintypes import WCHAR, USHORT, LPVOID, ULONG, LPDWORD
|
||||
|
||||
kernel32 = WinDLL('kernel32', use_last_error=True)
|
||||
kernel32 = WinDLL("kernel32", use_last_error=True)
|
||||
|
||||
UCHAR = c_ubyte
|
||||
|
||||
@@ -31,14 +47,17 @@ ERROR_PRIVILEGE_NOT_HELD = 1314
|
||||
# Win32 API entry points
|
||||
CreateSymbolicLinkW = kernel32.CreateSymbolicLinkW
|
||||
CreateSymbolicLinkW.restype = BOOLEAN
|
||||
CreateSymbolicLinkW.argtypes = (LPCWSTR, # lpSymlinkFileName In
|
||||
LPCWSTR, # lpTargetFileName In
|
||||
DWORD) # dwFlags In
|
||||
CreateSymbolicLinkW.argtypes = (
|
||||
LPCWSTR, # lpSymlinkFileName In
|
||||
LPCWSTR, # lpTargetFileName In
|
||||
DWORD, # dwFlags In
|
||||
)
|
||||
|
||||
# Symbolic link creation flags
|
||||
SYMBOLIC_LINK_FLAG_FILE = 0x00
|
||||
SYMBOLIC_LINK_FLAG_DIRECTORY = 0x01
|
||||
# symlink support for CreateSymbolicLink() starting with Windows 10 (1703, v10.0.14972)
|
||||
# symlink support for CreateSymbolicLink() starting with Windows 10 (1703,
|
||||
# v10.0.14972)
|
||||
SYMBOLIC_LINK_FLAG_ALLOW_UNPRIVILEGED_CREATE = 0x02
|
||||
|
||||
GetFileAttributesW = kernel32.GetFileAttributesW
|
||||
@@ -50,13 +69,15 @@ FILE_ATTRIBUTE_REPARSE_POINT = 0x00400
|
||||
|
||||
CreateFileW = kernel32.CreateFileW
|
||||
CreateFileW.restype = HANDLE
|
||||
CreateFileW.argtypes = (LPCWSTR, # lpFileName In
|
||||
DWORD, # dwDesiredAccess In
|
||||
DWORD, # dwShareMode In
|
||||
LPVOID, # lpSecurityAttributes In_opt
|
||||
DWORD, # dwCreationDisposition In
|
||||
DWORD, # dwFlagsAndAttributes In
|
||||
HANDLE) # hTemplateFile In_opt
|
||||
CreateFileW.argtypes = (
|
||||
LPCWSTR, # lpFileName In
|
||||
DWORD, # dwDesiredAccess In
|
||||
DWORD, # dwShareMode In
|
||||
LPVOID, # lpSecurityAttributes In_opt
|
||||
DWORD, # dwCreationDisposition In
|
||||
DWORD, # dwFlagsAndAttributes In
|
||||
HANDLE, # hTemplateFile In_opt
|
||||
)
|
||||
|
||||
CloseHandle = kernel32.CloseHandle
|
||||
CloseHandle.restype = BOOL
|
||||
@@ -69,14 +90,16 @@ FILE_FLAG_OPEN_REPARSE_POINT = 0x00200000
|
||||
|
||||
DeviceIoControl = kernel32.DeviceIoControl
|
||||
DeviceIoControl.restype = BOOL
|
||||
DeviceIoControl.argtypes = (HANDLE, # hDevice In
|
||||
DWORD, # dwIoControlCode In
|
||||
LPVOID, # lpInBuffer In_opt
|
||||
DWORD, # nInBufferSize In
|
||||
LPVOID, # lpOutBuffer Out_opt
|
||||
DWORD, # nOutBufferSize In
|
||||
LPDWORD, # lpBytesReturned Out_opt
|
||||
LPVOID) # lpOverlapped Inout_opt
|
||||
DeviceIoControl.argtypes = (
|
||||
HANDLE, # hDevice In
|
||||
DWORD, # dwIoControlCode In
|
||||
LPVOID, # lpInBuffer In_opt
|
||||
DWORD, # nInBufferSize In
|
||||
LPVOID, # lpOutBuffer Out_opt
|
||||
DWORD, # nOutBufferSize In
|
||||
LPDWORD, # lpBytesReturned Out_opt
|
||||
LPVOID, # lpOverlapped Inout_opt
|
||||
)
|
||||
|
||||
# Device I/O control flags and options
|
||||
FSCTL_GET_REPARSE_POINT = 0x000900A8
|
||||
@@ -86,124 +109,136 @@ MAXIMUM_REPARSE_DATA_BUFFER_SIZE = 0x4000
|
||||
|
||||
|
||||
class GENERIC_REPARSE_BUFFER(Structure):
|
||||
_fields_ = (('DataBuffer', UCHAR * 1),)
|
||||
_fields_ = (("DataBuffer", UCHAR * 1),)
|
||||
|
||||
|
||||
class SYMBOLIC_LINK_REPARSE_BUFFER(Structure):
|
||||
_fields_ = (('SubstituteNameOffset', USHORT),
|
||||
('SubstituteNameLength', USHORT),
|
||||
('PrintNameOffset', USHORT),
|
||||
('PrintNameLength', USHORT),
|
||||
('Flags', ULONG),
|
||||
('PathBuffer', WCHAR * 1))
|
||||
_fields_ = (
|
||||
("SubstituteNameOffset", USHORT),
|
||||
("SubstituteNameLength", USHORT),
|
||||
("PrintNameOffset", USHORT),
|
||||
("PrintNameLength", USHORT),
|
||||
("Flags", ULONG),
|
||||
("PathBuffer", WCHAR * 1),
|
||||
)
|
||||
|
||||
@property
|
||||
def PrintName(self):
|
||||
arrayt = WCHAR * (self.PrintNameLength // 2)
|
||||
offset = type(self).PathBuffer.offset + self.PrintNameOffset
|
||||
return arrayt.from_address(addressof(self) + offset).value
|
||||
@property
|
||||
def PrintName(self):
|
||||
arrayt = WCHAR * (self.PrintNameLength // 2)
|
||||
offset = type(self).PathBuffer.offset + self.PrintNameOffset
|
||||
return arrayt.from_address(addressof(self) + offset).value
|
||||
|
||||
|
||||
class MOUNT_POINT_REPARSE_BUFFER(Structure):
|
||||
_fields_ = (('SubstituteNameOffset', USHORT),
|
||||
('SubstituteNameLength', USHORT),
|
||||
('PrintNameOffset', USHORT),
|
||||
('PrintNameLength', USHORT),
|
||||
('PathBuffer', WCHAR * 1))
|
||||
_fields_ = (
|
||||
("SubstituteNameOffset", USHORT),
|
||||
("SubstituteNameLength", USHORT),
|
||||
("PrintNameOffset", USHORT),
|
||||
("PrintNameLength", USHORT),
|
||||
("PathBuffer", WCHAR * 1),
|
||||
)
|
||||
|
||||
@property
|
||||
def PrintName(self):
|
||||
arrayt = WCHAR * (self.PrintNameLength // 2)
|
||||
offset = type(self).PathBuffer.offset + self.PrintNameOffset
|
||||
return arrayt.from_address(addressof(self) + offset).value
|
||||
@property
|
||||
def PrintName(self):
|
||||
arrayt = WCHAR * (self.PrintNameLength // 2)
|
||||
offset = type(self).PathBuffer.offset + self.PrintNameOffset
|
||||
return arrayt.from_address(addressof(self) + offset).value
|
||||
|
||||
|
||||
class REPARSE_DATA_BUFFER(Structure):
|
||||
class REPARSE_BUFFER(Union):
|
||||
_fields_ = (('SymbolicLinkReparseBuffer', SYMBOLIC_LINK_REPARSE_BUFFER),
|
||||
('MountPointReparseBuffer', MOUNT_POINT_REPARSE_BUFFER),
|
||||
('GenericReparseBuffer', GENERIC_REPARSE_BUFFER))
|
||||
_fields_ = (('ReparseTag', ULONG),
|
||||
('ReparseDataLength', USHORT),
|
||||
('Reserved', USHORT),
|
||||
('ReparseBuffer', REPARSE_BUFFER))
|
||||
_anonymous_ = ('ReparseBuffer',)
|
||||
class REPARSE_BUFFER(Union):
|
||||
_fields_ = (
|
||||
("SymbolicLinkReparseBuffer", SYMBOLIC_LINK_REPARSE_BUFFER),
|
||||
("MountPointReparseBuffer", MOUNT_POINT_REPARSE_BUFFER),
|
||||
("GenericReparseBuffer", GENERIC_REPARSE_BUFFER),
|
||||
)
|
||||
|
||||
_fields_ = (
|
||||
("ReparseTag", ULONG),
|
||||
("ReparseDataLength", USHORT),
|
||||
("Reserved", USHORT),
|
||||
("ReparseBuffer", REPARSE_BUFFER),
|
||||
)
|
||||
_anonymous_ = ("ReparseBuffer",)
|
||||
|
||||
|
||||
def create_filesymlink(source, link_name):
|
||||
"""Creates a Windows file symbolic link source pointing to link_name."""
|
||||
_create_symlink(source, link_name, SYMBOLIC_LINK_FLAG_FILE)
|
||||
"""Creates a Windows file symbolic link source pointing to link_name."""
|
||||
_create_symlink(source, link_name, SYMBOLIC_LINK_FLAG_FILE)
|
||||
|
||||
|
||||
def create_dirsymlink(source, link_name):
|
||||
"""Creates a Windows directory symbolic link source pointing to link_name.
|
||||
"""
|
||||
_create_symlink(source, link_name, SYMBOLIC_LINK_FLAG_DIRECTORY)
|
||||
"""Creates a Windows directory symbolic link source pointing to link_name.""" # noqa: E501
|
||||
_create_symlink(source, link_name, SYMBOLIC_LINK_FLAG_DIRECTORY)
|
||||
|
||||
|
||||
def _create_symlink(source, link_name, dwFlags):
|
||||
if not CreateSymbolicLinkW(link_name, source,
|
||||
dwFlags | SYMBOLIC_LINK_FLAG_ALLOW_UNPRIVILEGED_CREATE):
|
||||
# See https://github.com/golang/go/pull/24307/files#diff-b87bc12e4da2497308f9ef746086e4f0
|
||||
# "the unprivileged create flag is unsupported below Windows 10 (1703, v10.0.14972).
|
||||
# retry without it."
|
||||
if not CreateSymbolicLinkW(link_name, source, dwFlags):
|
||||
code = get_last_error()
|
||||
error_desc = FormatError(code).strip()
|
||||
if code == ERROR_PRIVILEGE_NOT_HELD:
|
||||
raise OSError(errno.EPERM, error_desc, link_name)
|
||||
_raise_winerror(
|
||||
code,
|
||||
'Error creating symbolic link \"%s\"'.format(link_name))
|
||||
if not CreateSymbolicLinkW(
|
||||
link_name,
|
||||
source,
|
||||
dwFlags | SYMBOLIC_LINK_FLAG_ALLOW_UNPRIVILEGED_CREATE,
|
||||
):
|
||||
# See https://github.com/golang/go/pull/24307/files#diff-b87bc12e4da2497308f9ef746086e4f0 # noqa: E501
|
||||
# "the unprivileged create flag is unsupported below Windows 10 (1703,
|
||||
# v10.0.14972). retry without it."
|
||||
if not CreateSymbolicLinkW(link_name, source, dwFlags):
|
||||
code = get_last_error()
|
||||
error_desc = FormatError(code).strip()
|
||||
if code == ERROR_PRIVILEGE_NOT_HELD:
|
||||
raise OSError(errno.EPERM, error_desc, link_name)
|
||||
_raise_winerror(code, f'Error creating symbolic link "{link_name}"')
|
||||
|
||||
|
||||
def islink(path):
|
||||
result = GetFileAttributesW(path)
|
||||
if result == INVALID_FILE_ATTRIBUTES:
|
||||
return False
|
||||
return bool(result & FILE_ATTRIBUTE_REPARSE_POINT)
|
||||
result = GetFileAttributesW(path)
|
||||
if result == INVALID_FILE_ATTRIBUTES:
|
||||
return False
|
||||
return bool(result & FILE_ATTRIBUTE_REPARSE_POINT)
|
||||
|
||||
|
||||
def readlink(path):
|
||||
reparse_point_handle = CreateFileW(path,
|
||||
0,
|
||||
0,
|
||||
None,
|
||||
OPEN_EXISTING,
|
||||
FILE_FLAG_OPEN_REPARSE_POINT |
|
||||
FILE_FLAG_BACKUP_SEMANTICS,
|
||||
None)
|
||||
if reparse_point_handle == INVALID_HANDLE_VALUE:
|
||||
reparse_point_handle = CreateFileW(
|
||||
path,
|
||||
0,
|
||||
0,
|
||||
None,
|
||||
OPEN_EXISTING,
|
||||
FILE_FLAG_OPEN_REPARSE_POINT | FILE_FLAG_BACKUP_SEMANTICS,
|
||||
None,
|
||||
)
|
||||
if reparse_point_handle == INVALID_HANDLE_VALUE:
|
||||
_raise_winerror(
|
||||
get_last_error(), f'Error opening symbolic link "{path}"'
|
||||
)
|
||||
target_buffer = c_buffer(MAXIMUM_REPARSE_DATA_BUFFER_SIZE)
|
||||
n_bytes_returned = DWORD()
|
||||
io_result = DeviceIoControl(
|
||||
reparse_point_handle,
|
||||
FSCTL_GET_REPARSE_POINT,
|
||||
None,
|
||||
0,
|
||||
target_buffer,
|
||||
len(target_buffer),
|
||||
byref(n_bytes_returned),
|
||||
None,
|
||||
)
|
||||
CloseHandle(reparse_point_handle)
|
||||
if not io_result:
|
||||
_raise_winerror(
|
||||
get_last_error(), f'Error reading symbolic link "{path}"'
|
||||
)
|
||||
rdb = REPARSE_DATA_BUFFER.from_buffer(target_buffer)
|
||||
if rdb.ReparseTag == IO_REPARSE_TAG_SYMLINK:
|
||||
return rdb.SymbolicLinkReparseBuffer.PrintName
|
||||
elif rdb.ReparseTag == IO_REPARSE_TAG_MOUNT_POINT:
|
||||
return rdb.MountPointReparseBuffer.PrintName
|
||||
# Unsupported reparse point type.
|
||||
_raise_winerror(
|
||||
get_last_error(),
|
||||
'Error opening symbolic link \"%s\"'.format(path))
|
||||
target_buffer = c_buffer(MAXIMUM_REPARSE_DATA_BUFFER_SIZE)
|
||||
n_bytes_returned = DWORD()
|
||||
io_result = DeviceIoControl(reparse_point_handle,
|
||||
FSCTL_GET_REPARSE_POINT,
|
||||
None,
|
||||
0,
|
||||
target_buffer,
|
||||
len(target_buffer),
|
||||
byref(n_bytes_returned),
|
||||
None)
|
||||
CloseHandle(reparse_point_handle)
|
||||
if not io_result:
|
||||
_raise_winerror(
|
||||
get_last_error(),
|
||||
'Error reading symbolic link \"%s\"'.format(path))
|
||||
rdb = REPARSE_DATA_BUFFER.from_buffer(target_buffer)
|
||||
if rdb.ReparseTag == IO_REPARSE_TAG_SYMLINK:
|
||||
return rdb.SymbolicLinkReparseBuffer.PrintName
|
||||
elif rdb.ReparseTag == IO_REPARSE_TAG_MOUNT_POINT:
|
||||
return rdb.MountPointReparseBuffer.PrintName
|
||||
# Unsupported reparse point type
|
||||
_raise_winerror(
|
||||
ERROR_NOT_SUPPORTED,
|
||||
'Error reading symbolic link \"%s\"'.format(path))
|
||||
ERROR_NOT_SUPPORTED, f'Error reading symbolic link "{path}"'
|
||||
)
|
||||
|
||||
|
||||
def _raise_winerror(code, error_desc):
|
||||
win_error_desc = FormatError(code).strip()
|
||||
error_desc = "%s: %s".format(error_desc, win_error_desc)
|
||||
raise WinError(code, error_desc)
|
||||
win_error_desc = FormatError(code).strip()
|
||||
error_desc = f"{error_desc}: {win_error_desc}"
|
||||
raise WinError(code, error_desc)
|
||||
|
||||
297
progress.py
297
progress.py
@@ -14,118 +14,215 @@
|
||||
|
||||
import os
|
||||
import sys
|
||||
from time import time
|
||||
from repo_trace import IsTrace
|
||||
import time
|
||||
|
||||
_NOT_TTY = not os.isatty(2)
|
||||
|
||||
try:
|
||||
import threading as _threading
|
||||
except ImportError:
|
||||
import dummy_threading as _threading
|
||||
|
||||
from repo_trace import IsTraceToStderr
|
||||
|
||||
|
||||
_TTY = sys.stderr.isatty()
|
||||
|
||||
# This will erase all content in the current line (wherever the cursor is).
|
||||
# It does not move the cursor, so this is usually followed by \r to move to
|
||||
# column 0.
|
||||
CSI_ERASE_LINE = '\x1b[2K'
|
||||
CSI_ERASE_LINE = "\x1b[2K"
|
||||
|
||||
# This will erase all content in the current line after the cursor. This is
|
||||
# useful for partial updates & progress messages as the terminal can display
|
||||
# it better.
|
||||
CSI_ERASE_LINE_AFTER = "\x1b[K"
|
||||
|
||||
|
||||
def convert_to_hms(total):
|
||||
"""Converts a period of seconds to hours, minutes, and seconds."""
|
||||
hours, rem = divmod(total, 3600)
|
||||
mins, secs = divmod(rem, 60)
|
||||
return int(hours), int(mins), secs
|
||||
|
||||
|
||||
def duration_str(total):
|
||||
"""A less noisy timedelta.__str__.
|
||||
"""A less noisy timedelta.__str__.
|
||||
|
||||
The default timedelta stringification contains a lot of leading zeros and
|
||||
uses microsecond resolution. This makes for noisy output.
|
||||
"""
|
||||
hours, rem = divmod(total, 3600)
|
||||
mins, secs = divmod(rem, 60)
|
||||
ret = '%.3fs' % (secs,)
|
||||
if mins:
|
||||
ret = '%im%s' % (mins, ret)
|
||||
if hours:
|
||||
ret = '%ih%s' % (hours, ret)
|
||||
return ret
|
||||
The default timedelta stringification contains a lot of leading zeros and
|
||||
uses microsecond resolution. This makes for noisy output.
|
||||
"""
|
||||
hours, mins, secs = convert_to_hms(total)
|
||||
ret = f"{secs:.3f}s"
|
||||
if mins:
|
||||
ret = f"{mins}m{ret}"
|
||||
if hours:
|
||||
ret = f"{hours}h{ret}"
|
||||
return ret
|
||||
|
||||
|
||||
class Progress(object):
|
||||
def __init__(self, title, total=0, units='', print_newline=False, delay=True,
|
||||
quiet=False):
|
||||
self._title = title
|
||||
self._total = total
|
||||
self._done = 0
|
||||
self._start = time()
|
||||
self._show = not delay
|
||||
self._units = units
|
||||
self._print_newline = print_newline
|
||||
# Only show the active jobs section if we run more than one in parallel.
|
||||
self._show_jobs = False
|
||||
self._active = 0
|
||||
def elapsed_str(total):
|
||||
"""Returns seconds in the format [H:]MM:SS.
|
||||
|
||||
# When quiet, never show any output. It's a bit hacky, but reusing the
|
||||
# existing logic that delays initial output keeps the rest of the class
|
||||
# clean. Basically we set the start time to years in the future.
|
||||
if quiet:
|
||||
self._show = False
|
||||
self._start += 2**32
|
||||
|
||||
def start(self, name):
|
||||
self._active += 1
|
||||
if not self._show_jobs:
|
||||
self._show_jobs = self._active > 1
|
||||
self.update(inc=0, msg='started ' + name)
|
||||
|
||||
def finish(self, name):
|
||||
self.update(msg='finished ' + name)
|
||||
self._active -= 1
|
||||
|
||||
def update(self, inc=1, msg=''):
|
||||
self._done += inc
|
||||
|
||||
if _NOT_TTY or IsTrace():
|
||||
return
|
||||
|
||||
if not self._show:
|
||||
if 0.5 <= time() - self._start:
|
||||
self._show = True
|
||||
else:
|
||||
return
|
||||
|
||||
if self._total <= 0:
|
||||
sys.stderr.write('%s\r%s: %d,' % (
|
||||
CSI_ERASE_LINE,
|
||||
self._title,
|
||||
self._done))
|
||||
sys.stderr.flush()
|
||||
Does not display a leading zero for minutes if under 10 minutes. This should
|
||||
be used when displaying elapsed time in a progress indicator.
|
||||
"""
|
||||
hours, mins, secs = convert_to_hms(total)
|
||||
ret = f"{int(secs):>02d}"
|
||||
if total >= 3600:
|
||||
# Show leading zeroes if over an hour.
|
||||
ret = f"{mins:>02d}:{ret}"
|
||||
else:
|
||||
p = (100 * self._done) / self._total
|
||||
if self._show_jobs:
|
||||
jobs = '[%d job%s] ' % (self._active, 's' if self._active > 1 else '')
|
||||
else:
|
||||
jobs = ''
|
||||
sys.stderr.write('%s\r%s: %2d%% %s(%d%s/%d%s)%s%s%s' % (
|
||||
CSI_ERASE_LINE,
|
||||
self._title,
|
||||
p,
|
||||
jobs,
|
||||
self._done, self._units,
|
||||
self._total, self._units,
|
||||
' ' if msg else '', msg,
|
||||
'\n' if self._print_newline else ''))
|
||||
sys.stderr.flush()
|
||||
ret = f"{mins}:{ret}"
|
||||
if hours:
|
||||
ret = f"{hours}:{ret}"
|
||||
return ret
|
||||
|
||||
def end(self):
|
||||
if _NOT_TTY or IsTrace() or not self._show:
|
||||
return
|
||||
|
||||
duration = duration_str(time() - self._start)
|
||||
if self._total <= 0:
|
||||
sys.stderr.write('%s\r%s: %d, done in %s\n' % (
|
||||
CSI_ERASE_LINE,
|
||||
self._title,
|
||||
self._done,
|
||||
duration))
|
||||
sys.stderr.flush()
|
||||
else:
|
||||
p = (100 * self._done) / self._total
|
||||
sys.stderr.write('%s\r%s: %3d%% (%d%s/%d%s), done in %s\n' % (
|
||||
CSI_ERASE_LINE,
|
||||
self._title,
|
||||
p,
|
||||
self._done, self._units,
|
||||
self._total, self._units,
|
||||
duration))
|
||||
sys.stderr.flush()
|
||||
def jobs_str(total):
|
||||
return f"{total} job{'s' if total > 1 else ''}"
|
||||
|
||||
|
||||
class Progress:
|
||||
def __init__(
|
||||
self,
|
||||
title,
|
||||
total=0,
|
||||
units="",
|
||||
delay=True,
|
||||
quiet=False,
|
||||
show_elapsed=False,
|
||||
elide=False,
|
||||
):
|
||||
self._title = title
|
||||
self._total = total
|
||||
self._done = 0
|
||||
self._start = time.time()
|
||||
self._show = not delay
|
||||
self._units = units
|
||||
self._elide = elide and _TTY
|
||||
|
||||
# Only show the active jobs section if we run more than one in parallel.
|
||||
self._show_jobs = False
|
||||
self._active = 0
|
||||
|
||||
# Save the last message for displaying on refresh.
|
||||
self._last_msg = None
|
||||
self._show_elapsed = show_elapsed
|
||||
self._update_event = _threading.Event()
|
||||
self._update_thread = _threading.Thread(
|
||||
target=self._update_loop,
|
||||
)
|
||||
self._update_thread.daemon = True
|
||||
|
||||
# When quiet, never show any output. It's a bit hacky, but reusing the
|
||||
# existing logic that delays initial output keeps the rest of the class
|
||||
# clean. Basically we set the start time to years in the future.
|
||||
if quiet:
|
||||
self._show = False
|
||||
self._start += 2**32
|
||||
elif show_elapsed:
|
||||
self._update_thread.start()
|
||||
|
||||
def _update_loop(self):
|
||||
while True:
|
||||
self.update(inc=0)
|
||||
if self._update_event.wait(timeout=1):
|
||||
return
|
||||
|
||||
def _write(self, s):
|
||||
s = "\r" + s
|
||||
if self._elide:
|
||||
col = os.get_terminal_size(sys.stderr.fileno()).columns
|
||||
if len(s) > col:
|
||||
s = s[: col - 1] + ".."
|
||||
sys.stderr.write(s)
|
||||
sys.stderr.flush()
|
||||
|
||||
def start(self, name):
|
||||
self._active += 1
|
||||
if not self._show_jobs:
|
||||
self._show_jobs = self._active > 1
|
||||
self.update(inc=0, msg="started " + name)
|
||||
|
||||
def finish(self, name):
|
||||
self.update(msg="finished " + name)
|
||||
self._active -= 1
|
||||
|
||||
def update(self, inc=1, msg=None):
|
||||
"""Updates the progress indicator.
|
||||
|
||||
Args:
|
||||
inc: The number of items completed.
|
||||
msg: The message to display. If None, use the last message.
|
||||
"""
|
||||
self._done += inc
|
||||
if msg is None:
|
||||
msg = self._last_msg
|
||||
self._last_msg = msg
|
||||
|
||||
if not _TTY or IsTraceToStderr():
|
||||
return
|
||||
|
||||
elapsed_sec = time.time() - self._start
|
||||
if not self._show:
|
||||
if 0.5 <= elapsed_sec:
|
||||
self._show = True
|
||||
else:
|
||||
return
|
||||
|
||||
if self._total <= 0:
|
||||
self._write(
|
||||
"%s: %d,%s" % (self._title, self._done, CSI_ERASE_LINE_AFTER)
|
||||
)
|
||||
else:
|
||||
p = (100 * self._done) / self._total
|
||||
if self._show_jobs:
|
||||
jobs = f"[{jobs_str(self._active)}] "
|
||||
else:
|
||||
jobs = ""
|
||||
if self._show_elapsed:
|
||||
elapsed = f" {elapsed_str(elapsed_sec)} |"
|
||||
else:
|
||||
elapsed = ""
|
||||
self._write(
|
||||
"%s: %2d%% %s(%d%s/%d%s)%s %s%s"
|
||||
% (
|
||||
self._title,
|
||||
p,
|
||||
jobs,
|
||||
self._done,
|
||||
self._units,
|
||||
self._total,
|
||||
self._units,
|
||||
elapsed,
|
||||
msg,
|
||||
CSI_ERASE_LINE_AFTER,
|
||||
)
|
||||
)
|
||||
|
||||
def end(self):
|
||||
self._update_event.set()
|
||||
if not _TTY or IsTraceToStderr() or not self._show:
|
||||
return
|
||||
|
||||
duration = duration_str(time.time() - self._start)
|
||||
if self._total <= 0:
|
||||
self._write(
|
||||
"%s: %d, done in %s%s\n"
|
||||
% (self._title, self._done, duration, CSI_ERASE_LINE_AFTER)
|
||||
)
|
||||
else:
|
||||
p = (100 * self._done) / self._total
|
||||
self._write(
|
||||
"%s: %3d%% (%d%s/%d%s), done in %s%s\n"
|
||||
% (
|
||||
self._title,
|
||||
p,
|
||||
self._done,
|
||||
self._units,
|
||||
self._total,
|
||||
self._units,
|
||||
duration,
|
||||
CSI_ERASE_LINE_AFTER,
|
||||
)
|
||||
)
|
||||
|
||||
7570
project.py
7570
project.py
File diff suppressed because it is too large
Load Diff
18
pyproject.toml
Normal file
18
pyproject.toml
Normal file
@@ -0,0 +1,18 @@
|
||||
# Copyright 2023 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
[tool.black]
|
||||
line-length = 80
|
||||
# NB: Keep in sync with tox.ini.
|
||||
target-version = ['py36', 'py37', 'py38', 'py39', 'py310', 'py311'] #, 'py312'
|
||||
@@ -28,43 +28,56 @@ import util
|
||||
|
||||
|
||||
def sign(opts):
|
||||
"""Sign the launcher!"""
|
||||
output = ''
|
||||
for key in opts.keys:
|
||||
# We use ! at the end of the key so that gpg uses this specific key.
|
||||
# Otherwise it uses the key as a lookup into the overall key and uses the
|
||||
# default signing key. i.e. It will see that KEYID_RSA is a subkey of
|
||||
# another key, and use the primary key to sign instead of the subkey.
|
||||
cmd = ['gpg', '--homedir', opts.gpgdir, '-u', f'{key}!', '--batch', '--yes',
|
||||
'--armor', '--detach-sign', '--output', '-', opts.launcher]
|
||||
ret = util.run(opts, cmd, encoding='utf-8', stdout=subprocess.PIPE)
|
||||
output += ret.stdout
|
||||
"""Sign the launcher!"""
|
||||
output = ""
|
||||
for key in opts.keys:
|
||||
# We use ! at the end of the key so that gpg uses this specific key.
|
||||
# Otherwise it uses the key as a lookup into the overall key and uses
|
||||
# the default signing key. i.e. It will see that KEYID_RSA is a subkey
|
||||
# of another key, and use the primary key to sign instead of the subkey.
|
||||
cmd = [
|
||||
"gpg",
|
||||
"--homedir",
|
||||
opts.gpgdir,
|
||||
"-u",
|
||||
f"{key}!",
|
||||
"--batch",
|
||||
"--yes",
|
||||
"--armor",
|
||||
"--detach-sign",
|
||||
"--output",
|
||||
"-",
|
||||
opts.launcher,
|
||||
]
|
||||
ret = util.run(opts, cmd, encoding="utf-8", stdout=subprocess.PIPE)
|
||||
output += ret.stdout
|
||||
|
||||
# Save the combined signatures into one file.
|
||||
with open(f'{opts.launcher}.asc', 'w', encoding='utf-8') as fp:
|
||||
fp.write(output)
|
||||
# Save the combined signatures into one file.
|
||||
with open(f"{opts.launcher}.asc", "w", encoding="utf-8") as fp:
|
||||
fp.write(output)
|
||||
|
||||
|
||||
def check(opts):
|
||||
"""Check the signature."""
|
||||
util.run(opts, ['gpg', '--verify', f'{opts.launcher}.asc'])
|
||||
"""Check the signature."""
|
||||
util.run(opts, ["gpg", "--verify", f"{opts.launcher}.asc"])
|
||||
|
||||
|
||||
def get_version(opts):
|
||||
"""Get the version from |launcher|."""
|
||||
# Make sure we don't search $PATH when signing the "repo" file in the cwd.
|
||||
launcher = os.path.join('.', opts.launcher)
|
||||
cmd = [launcher, '--version']
|
||||
ret = util.run(opts, cmd, encoding='utf-8', stdout=subprocess.PIPE)
|
||||
m = re.search(r'repo launcher version ([0-9.]+)', ret.stdout)
|
||||
if not m:
|
||||
sys.exit(f'{opts.launcher}: unable to detect repo version')
|
||||
return m.group(1)
|
||||
"""Get the version from |launcher|."""
|
||||
# Make sure we don't search $PATH when signing the "repo" file in the cwd.
|
||||
launcher = os.path.join(".", opts.launcher)
|
||||
cmd = [launcher, "--version"]
|
||||
ret = util.run(opts, cmd, encoding="utf-8", stdout=subprocess.PIPE)
|
||||
m = re.search(r"repo launcher version ([0-9.]+)", ret.stdout)
|
||||
if not m:
|
||||
sys.exit(f"{opts.launcher}: unable to detect repo version")
|
||||
return m.group(1)
|
||||
|
||||
|
||||
def postmsg(opts, version):
|
||||
"""Helpful info to show at the end for release manager."""
|
||||
print(f"""
|
||||
"""Helpful info to show at the end for release manager."""
|
||||
print(
|
||||
f"""
|
||||
Repo launcher bucket:
|
||||
gs://git-repo-downloads/
|
||||
|
||||
@@ -81,55 +94,72 @@ NB: If a rollback is necessary, the GS bucket archives old versions, and may be
|
||||
gsutil ls -la gs://git-repo-downloads/repo gs://git-repo-downloads/repo.asc
|
||||
gsutil cp -a public-read gs://git-repo-downloads/repo#<unique id> gs://git-repo-downloads/repo
|
||||
gsutil cp -a public-read gs://git-repo-downloads/repo.asc#<unique id> gs://git-repo-downloads/repo.asc
|
||||
""")
|
||||
""" # noqa: E501
|
||||
)
|
||||
|
||||
|
||||
def get_parser():
|
||||
"""Get a CLI parser."""
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument('-n', '--dry-run',
|
||||
dest='dryrun', action='store_true',
|
||||
help='show everything that would be done')
|
||||
parser.add_argument('--gpgdir',
|
||||
default=os.path.join(util.HOMEDIR, '.gnupg', 'repo'),
|
||||
help='path to dedicated gpg dir with release keys '
|
||||
'(default: ~/.gnupg/repo/)')
|
||||
parser.add_argument('--keyid', dest='keys', default=[], action='append',
|
||||
help='alternative signing keys to use')
|
||||
parser.add_argument('launcher',
|
||||
default=os.path.join(util.TOPDIR, 'repo'), nargs='?',
|
||||
help='the launcher script to sign')
|
||||
return parser
|
||||
"""Get a CLI parser."""
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument(
|
||||
"-n",
|
||||
"--dry-run",
|
||||
dest="dryrun",
|
||||
action="store_true",
|
||||
help="show everything that would be done",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--gpgdir",
|
||||
default=os.path.join(util.HOMEDIR, ".gnupg", "repo"),
|
||||
help="path to dedicated gpg dir with release keys "
|
||||
"(default: ~/.gnupg/repo/)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--keyid",
|
||||
dest="keys",
|
||||
default=[],
|
||||
action="append",
|
||||
help="alternative signing keys to use",
|
||||
)
|
||||
parser.add_argument(
|
||||
"launcher",
|
||||
default=os.path.join(util.TOPDIR, "repo"),
|
||||
nargs="?",
|
||||
help="the launcher script to sign",
|
||||
)
|
||||
return parser
|
||||
|
||||
|
||||
def main(argv):
|
||||
"""The main func!"""
|
||||
parser = get_parser()
|
||||
opts = parser.parse_args(argv)
|
||||
"""The main func!"""
|
||||
parser = get_parser()
|
||||
opts = parser.parse_args(argv)
|
||||
|
||||
if not os.path.exists(opts.gpgdir):
|
||||
parser.error(f'--gpgdir does not exist: {opts.gpgdir}')
|
||||
if not os.path.exists(opts.launcher):
|
||||
parser.error(f'launcher does not exist: {opts.launcher}')
|
||||
if not os.path.exists(opts.gpgdir):
|
||||
parser.error(f"--gpgdir does not exist: {opts.gpgdir}")
|
||||
if not os.path.exists(opts.launcher):
|
||||
parser.error(f"launcher does not exist: {opts.launcher}")
|
||||
|
||||
opts.launcher = os.path.relpath(opts.launcher)
|
||||
print(f'Signing "{opts.launcher}" launcher script and saving to '
|
||||
f'"{opts.launcher}.asc"')
|
||||
opts.launcher = os.path.relpath(opts.launcher)
|
||||
print(
|
||||
f'Signing "{opts.launcher}" launcher script and saving to '
|
||||
f'"{opts.launcher}.asc"'
|
||||
)
|
||||
|
||||
if opts.keys:
|
||||
print(f'Using custom keys to sign: {" ".join(opts.keys)}')
|
||||
else:
|
||||
print('Using official Repo release keys to sign')
|
||||
opts.keys = [util.KEYID_DSA, util.KEYID_RSA, util.KEYID_ECC]
|
||||
util.import_release_key(opts)
|
||||
if opts.keys:
|
||||
print(f'Using custom keys to sign: {" ".join(opts.keys)}')
|
||||
else:
|
||||
print("Using official Repo release keys to sign")
|
||||
opts.keys = [util.KEYID_DSA, util.KEYID_RSA, util.KEYID_ECC]
|
||||
util.import_release_key(opts)
|
||||
|
||||
version = get_version(opts)
|
||||
sign(opts)
|
||||
check(opts)
|
||||
postmsg(opts, version)
|
||||
version = get_version(opts)
|
||||
sign(opts)
|
||||
check(opts)
|
||||
postmsg(opts, version)
|
||||
|
||||
return 0
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
|
||||
@@ -35,46 +35,61 @@ import util
|
||||
KEYID = util.KEYID_DSA
|
||||
|
||||
# Regular expression to validate tag names.
|
||||
RE_VALID_TAG = r'^v([0-9]+[.])+[0-9]+$'
|
||||
RE_VALID_TAG = r"^v([0-9]+[.])+[0-9]+$"
|
||||
|
||||
|
||||
def sign(opts):
|
||||
"""Tag the commit & sign it!"""
|
||||
# We use ! at the end of the key so that gpg uses this specific key.
|
||||
# Otherwise it uses the key as a lookup into the overall key and uses the
|
||||
# default signing key. i.e. It will see that KEYID_RSA is a subkey of
|
||||
# another key, and use the primary key to sign instead of the subkey.
|
||||
cmd = ['git', 'tag', '-s', opts.tag, '-u', f'{opts.key}!',
|
||||
'-m', f'repo {opts.tag}', opts.commit]
|
||||
"""Tag the commit & sign it!"""
|
||||
# We use ! at the end of the key so that gpg uses this specific key.
|
||||
# Otherwise it uses the key as a lookup into the overall key and uses the
|
||||
# default signing key. i.e. It will see that KEYID_RSA is a subkey of
|
||||
# another key, and use the primary key to sign instead of the subkey.
|
||||
cmd = [
|
||||
"git",
|
||||
"tag",
|
||||
"-s",
|
||||
opts.tag,
|
||||
"-u",
|
||||
f"{opts.key}!",
|
||||
"-m",
|
||||
f"repo {opts.tag}",
|
||||
opts.commit,
|
||||
]
|
||||
|
||||
key = 'GNUPGHOME'
|
||||
print('+', f'export {key}="{opts.gpgdir}"')
|
||||
oldvalue = os.getenv(key)
|
||||
os.putenv(key, opts.gpgdir)
|
||||
util.run(opts, cmd)
|
||||
if oldvalue is None:
|
||||
os.unsetenv(key)
|
||||
else:
|
||||
os.putenv(key, oldvalue)
|
||||
key = "GNUPGHOME"
|
||||
print("+", f'export {key}="{opts.gpgdir}"')
|
||||
oldvalue = os.getenv(key)
|
||||
os.putenv(key, opts.gpgdir)
|
||||
util.run(opts, cmd)
|
||||
if oldvalue is None:
|
||||
os.unsetenv(key)
|
||||
else:
|
||||
os.putenv(key, oldvalue)
|
||||
|
||||
|
||||
def check(opts):
|
||||
"""Check the signature."""
|
||||
util.run(opts, ['git', 'tag', '--verify', opts.tag])
|
||||
"""Check the signature."""
|
||||
util.run(opts, ["git", "tag", "--verify", opts.tag])
|
||||
|
||||
|
||||
def postmsg(opts):
|
||||
"""Helpful info to show at the end for release manager."""
|
||||
cmd = ['git', 'rev-parse', 'remotes/origin/stable']
|
||||
ret = util.run(opts, cmd, encoding='utf-8', stdout=subprocess.PIPE)
|
||||
current_release = ret.stdout.strip()
|
||||
"""Helpful info to show at the end for release manager."""
|
||||
cmd = ["git", "rev-parse", "remotes/origin/stable"]
|
||||
ret = util.run(opts, cmd, encoding="utf-8", stdout=subprocess.PIPE)
|
||||
current_release = ret.stdout.strip()
|
||||
|
||||
cmd = ['git', 'log', '--format=%h (%aN) %s', '--no-merges',
|
||||
f'remotes/origin/stable..{opts.tag}']
|
||||
ret = util.run(opts, cmd, encoding='utf-8', stdout=subprocess.PIPE)
|
||||
shortlog = ret.stdout.strip()
|
||||
cmd = [
|
||||
"git",
|
||||
"log",
|
||||
"--format=%h (%aN) %s",
|
||||
"--no-merges",
|
||||
f"remotes/origin/stable..{opts.tag}",
|
||||
]
|
||||
ret = util.run(opts, cmd, encoding="utf-8", stdout=subprocess.PIPE)
|
||||
shortlog = ret.stdout.strip()
|
||||
|
||||
print(f"""
|
||||
print(
|
||||
f"""
|
||||
Here's the short log since the last release.
|
||||
{shortlog}
|
||||
|
||||
@@ -84,57 +99,69 @@ NB: People will start upgrading to this version immediately.
|
||||
|
||||
To roll back a release:
|
||||
git push origin --force {current_release}:stable -n
|
||||
""")
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
def get_parser():
|
||||
"""Get a CLI parser."""
|
||||
parser = argparse.ArgumentParser(
|
||||
description=__doc__,
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter)
|
||||
parser.add_argument('-n', '--dry-run',
|
||||
dest='dryrun', action='store_true',
|
||||
help='show everything that would be done')
|
||||
parser.add_argument('--gpgdir',
|
||||
default=os.path.join(util.HOMEDIR, '.gnupg', 'repo'),
|
||||
help='path to dedicated gpg dir with release keys '
|
||||
'(default: ~/.gnupg/repo/)')
|
||||
parser.add_argument('-f', '--force', action='store_true',
|
||||
help='force signing of any tag')
|
||||
parser.add_argument('--keyid', dest='key',
|
||||
help='alternative signing key to use')
|
||||
parser.add_argument('tag',
|
||||
help='the tag to create (e.g. "v2.0")')
|
||||
parser.add_argument('commit', default='HEAD', nargs='?',
|
||||
help='the commit to tag')
|
||||
return parser
|
||||
"""Get a CLI parser."""
|
||||
parser = argparse.ArgumentParser(
|
||||
description=__doc__,
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
)
|
||||
parser.add_argument(
|
||||
"-n",
|
||||
"--dry-run",
|
||||
dest="dryrun",
|
||||
action="store_true",
|
||||
help="show everything that would be done",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--gpgdir",
|
||||
default=os.path.join(util.HOMEDIR, ".gnupg", "repo"),
|
||||
help="path to dedicated gpg dir with release keys "
|
||||
"(default: ~/.gnupg/repo/)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-f", "--force", action="store_true", help="force signing of any tag"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--keyid", dest="key", help="alternative signing key to use"
|
||||
)
|
||||
parser.add_argument("tag", help='the tag to create (e.g. "v2.0")')
|
||||
parser.add_argument(
|
||||
"commit", default="HEAD", nargs="?", help="the commit to tag"
|
||||
)
|
||||
return parser
|
||||
|
||||
|
||||
def main(argv):
|
||||
"""The main func!"""
|
||||
parser = get_parser()
|
||||
opts = parser.parse_args(argv)
|
||||
"""The main func!"""
|
||||
parser = get_parser()
|
||||
opts = parser.parse_args(argv)
|
||||
|
||||
if not os.path.exists(opts.gpgdir):
|
||||
parser.error(f'--gpgdir does not exist: {opts.gpgdir}')
|
||||
if not os.path.exists(opts.gpgdir):
|
||||
parser.error(f"--gpgdir does not exist: {opts.gpgdir}")
|
||||
|
||||
if not opts.force and not re.match(RE_VALID_TAG, opts.tag):
|
||||
parser.error(f'tag "{opts.tag}" does not match regex "{RE_VALID_TAG}"; '
|
||||
'use --force to sign anyways')
|
||||
if not opts.force and not re.match(RE_VALID_TAG, opts.tag):
|
||||
parser.error(
|
||||
f'tag "{opts.tag}" does not match regex "{RE_VALID_TAG}"; '
|
||||
"use --force to sign anyways"
|
||||
)
|
||||
|
||||
if opts.key:
|
||||
print(f'Using custom key to sign: {opts.key}')
|
||||
else:
|
||||
print('Using official Repo release key to sign')
|
||||
opts.key = KEYID
|
||||
util.import_release_key(opts)
|
||||
if opts.key:
|
||||
print(f"Using custom key to sign: {opts.key}")
|
||||
else:
|
||||
print("Using official Repo release key to sign")
|
||||
opts.key = KEYID
|
||||
util.import_release_key(opts)
|
||||
|
||||
sign(opts)
|
||||
check(opts)
|
||||
postmsg(opts)
|
||||
sign(opts)
|
||||
check(opts)
|
||||
postmsg(opts)
|
||||
|
||||
return 0
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
|
||||
@@ -18,85 +18,9 @@
|
||||
This is intended to be run before every official Repo release.
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
from functools import partial
|
||||
import argparse
|
||||
import multiprocessing
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
TOPDIR = Path(__file__).resolve().parent.parent
|
||||
MANDIR = TOPDIR.joinpath('man')
|
||||
|
||||
# Load repo local modules.
|
||||
sys.path.insert(0, str(TOPDIR))
|
||||
from git_command import RepoSourceVersion
|
||||
import subcmds
|
||||
|
||||
def worker(cmd, **kwargs):
|
||||
subprocess.run(cmd, **kwargs)
|
||||
|
||||
def main(argv):
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
opts = parser.parse_args(argv)
|
||||
|
||||
if not shutil.which('help2man'):
|
||||
sys.exit('Please install help2man to continue.')
|
||||
|
||||
# Let repo know we're generating man pages so it can avoid some dynamic
|
||||
# behavior (like probing active number of CPUs). We use a weird name &
|
||||
# value to make it less likely for users to set this var themselves.
|
||||
os.environ['_REPO_GENERATE_MANPAGES_'] = ' indeed! '
|
||||
|
||||
# "repo branch" is an alias for "repo branches".
|
||||
del subcmds.all_commands['branch']
|
||||
(MANDIR / 'repo-branch.1').write_text('.so man1/repo-branches.1')
|
||||
|
||||
version = RepoSourceVersion()
|
||||
cmdlist = [['help2man', '-N', '-n', f'repo {cmd} - manual page for repo {cmd}',
|
||||
'-S', f'repo {cmd}', '-m', 'Repo Manual', f'--version-string={version}',
|
||||
'-o', MANDIR.joinpath(f'repo-{cmd}.1.tmp'), TOPDIR.joinpath('repo'),
|
||||
'-h', f'help {cmd}'] for cmd in subcmds.all_commands]
|
||||
cmdlist.append(['help2man', '-N', '-n', 'repository management tool built on top of git',
|
||||
'-S', 'repo', '-m', 'Repo Manual', f'--version-string={version}',
|
||||
'-o', MANDIR.joinpath('repo.1.tmp'), TOPDIR.joinpath('repo'),
|
||||
'-h', '--help-all'])
|
||||
|
||||
with tempfile.TemporaryDirectory() as tempdir:
|
||||
repo_dir = Path(tempdir) / '.repo'
|
||||
repo_dir.mkdir()
|
||||
(repo_dir / 'repo').symlink_to(TOPDIR)
|
||||
|
||||
# Run all cmd in parallel, and wait for them to finish.
|
||||
with multiprocessing.Pool() as pool:
|
||||
pool.map(partial(worker, cwd=tempdir, check=True), cmdlist)
|
||||
|
||||
regex = (
|
||||
(r'(It was generated by help2man) [0-9.]+', '\g<1>.'),
|
||||
(r'^\.IP\n(.*:)\n', '.SS \g<1>\n'),
|
||||
(r'^\.PP\nDescription', '.SH DETAILS'),
|
||||
)
|
||||
for tmp_path in MANDIR.glob('*.1.tmp'):
|
||||
path = tmp_path.parent / tmp_path.stem
|
||||
old_data = path.read_text() if path.exists() else ''
|
||||
|
||||
data = tmp_path.read_text()
|
||||
tmp_path.unlink()
|
||||
|
||||
for pattern, replacement in regex:
|
||||
data = re.sub(pattern, replacement, data, flags=re.M)
|
||||
|
||||
# If the only thing that changed was the date, don't refresh. This avoids
|
||||
# a lot of noise when only one file actually updates.
|
||||
old_data = re.sub(r'^(\.TH REPO "1" ")([^"]+)', r'\1', old_data, flags=re.M)
|
||||
new_data = re.sub(r'^(\.TH REPO "1" ")([^"]+)', r'\1', data, flags=re.M)
|
||||
if old_data != new_data:
|
||||
path.write_text(data)
|
||||
import update_manpages
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
sys.exit(update_manpages.main(sys.argv[1:]))
|
||||
|
||||
156
release/update_manpages.py
Normal file
156
release/update_manpages.py
Normal file
@@ -0,0 +1,156 @@
|
||||
# Copyright (C) 2021 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Helper tool for generating manual page for all repo commands.
|
||||
|
||||
Most code lives in this module so it can be unittested.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import functools
|
||||
import multiprocessing
|
||||
import os
|
||||
from pathlib import Path
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
|
||||
TOPDIR = Path(__file__).resolve().parent.parent
|
||||
MANDIR = TOPDIR.joinpath("man")
|
||||
|
||||
# Load repo local modules.
|
||||
sys.path.insert(0, str(TOPDIR))
|
||||
from git_command import RepoSourceVersion
|
||||
import subcmds
|
||||
|
||||
|
||||
def worker(cmd, **kwargs):
|
||||
subprocess.run(cmd, **kwargs)
|
||||
|
||||
|
||||
def main(argv):
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.parse_args(argv)
|
||||
|
||||
if not shutil.which("help2man"):
|
||||
sys.exit("Please install help2man to continue.")
|
||||
|
||||
# Let repo know we're generating man pages so it can avoid some dynamic
|
||||
# behavior (like probing active number of CPUs). We use a weird name &
|
||||
# value to make it less likely for users to set this var themselves.
|
||||
os.environ["_REPO_GENERATE_MANPAGES_"] = " indeed! "
|
||||
|
||||
# "repo branch" is an alias for "repo branches".
|
||||
del subcmds.all_commands["branch"]
|
||||
(MANDIR / "repo-branch.1").write_text(".so man1/repo-branches.1")
|
||||
|
||||
version = RepoSourceVersion()
|
||||
cmdlist = [
|
||||
[
|
||||
"help2man",
|
||||
"-N",
|
||||
"-n",
|
||||
f"repo {cmd} - manual page for repo {cmd}",
|
||||
"-S",
|
||||
f"repo {cmd}",
|
||||
"-m",
|
||||
"Repo Manual",
|
||||
f"--version-string={version}",
|
||||
"-o",
|
||||
MANDIR.joinpath(f"repo-{cmd}.1.tmp"),
|
||||
"./repo",
|
||||
"-h",
|
||||
f"help {cmd}",
|
||||
]
|
||||
for cmd in subcmds.all_commands
|
||||
]
|
||||
cmdlist.append(
|
||||
[
|
||||
"help2man",
|
||||
"-N",
|
||||
"-n",
|
||||
"repository management tool built on top of git",
|
||||
"-S",
|
||||
"repo",
|
||||
"-m",
|
||||
"Repo Manual",
|
||||
f"--version-string={version}",
|
||||
"-o",
|
||||
MANDIR.joinpath("repo.1.tmp"),
|
||||
"./repo",
|
||||
"-h",
|
||||
"--help-all",
|
||||
]
|
||||
)
|
||||
|
||||
with tempfile.TemporaryDirectory() as tempdir:
|
||||
tempdir = Path(tempdir)
|
||||
repo_dir = tempdir / ".repo"
|
||||
repo_dir.mkdir()
|
||||
(repo_dir / "repo").symlink_to(TOPDIR)
|
||||
|
||||
# Create a repo wrapper using the active Python executable. We can't
|
||||
# pass this directly to help2man as it's too simple, so insert it via
|
||||
# shebang.
|
||||
data = (TOPDIR / "repo").read_text(encoding="utf-8")
|
||||
tempbin = tempdir / "repo"
|
||||
tempbin.write_text(f"#!{sys.executable}\n" + data, encoding="utf-8")
|
||||
tempbin.chmod(0o755)
|
||||
|
||||
# Run all cmd in parallel, and wait for them to finish.
|
||||
with multiprocessing.Pool() as pool:
|
||||
pool.map(
|
||||
functools.partial(worker, cwd=tempdir, check=True), cmdlist
|
||||
)
|
||||
|
||||
for tmp_path in MANDIR.glob("*.1.tmp"):
|
||||
path = tmp_path.parent / tmp_path.stem
|
||||
old_data = path.read_text() if path.exists() else ""
|
||||
|
||||
data = tmp_path.read_text()
|
||||
tmp_path.unlink()
|
||||
|
||||
data = replace_regex(data)
|
||||
|
||||
# If the only thing that changed was the date, don't refresh. This
|
||||
# avoids a lot of noise when only one file actually updates.
|
||||
old_data = re.sub(
|
||||
r'^(\.TH REPO "1" ")([^"]+)', r"\1", old_data, flags=re.M
|
||||
)
|
||||
new_data = re.sub(r'^(\.TH REPO "1" ")([^"]+)', r"\1", data, flags=re.M)
|
||||
if old_data != new_data:
|
||||
path.write_text(data)
|
||||
|
||||
|
||||
def replace_regex(data):
|
||||
"""Replace semantically null regexes in the data.
|
||||
|
||||
Args:
|
||||
data: manpage text.
|
||||
|
||||
Returns:
|
||||
Updated manpage text.
|
||||
"""
|
||||
regex = (
|
||||
(r"(It was generated by help2man) [0-9.]+", r"\g<1>."),
|
||||
(r"^\033\[[0-9;]*m([^\033]*)\033\[m", r"\g<1>"),
|
||||
(r"^\.IP\n(.*:)\n", r".SS \g<1>\n"),
|
||||
(r"^\.PP\nDescription", r".SH DETAILS"),
|
||||
)
|
||||
for pattern, replacement in regex:
|
||||
data = re.sub(pattern, replacement, data, flags=re.M)
|
||||
return data
|
||||
@@ -20,54 +20,60 @@ import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
assert sys.version_info >= (3, 6), 'This module requires Python 3.6+'
|
||||
assert sys.version_info >= (3, 6), "This module requires Python 3.6+"
|
||||
|
||||
|
||||
TOPDIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
HOMEDIR = os.path.expanduser('~')
|
||||
HOMEDIR = os.path.expanduser("~")
|
||||
|
||||
|
||||
# These are the release keys we sign with.
|
||||
KEYID_DSA = '8BB9AD793E8E6153AF0F9A4416530D5E920F5C65'
|
||||
KEYID_RSA = 'A34A13BE8E76BFF46A0C022DA2E75A824AAB9624'
|
||||
KEYID_ECC = 'E1F9040D7A3F6DAFAC897CD3D3B95DA243E48A39'
|
||||
KEYID_DSA = "8BB9AD793E8E6153AF0F9A4416530D5E920F5C65"
|
||||
KEYID_RSA = "A34A13BE8E76BFF46A0C022DA2E75A824AAB9624"
|
||||
KEYID_ECC = "E1F9040D7A3F6DAFAC897CD3D3B95DA243E48A39"
|
||||
|
||||
|
||||
def cmdstr(cmd):
|
||||
"""Get a nicely quoted shell command."""
|
||||
ret = []
|
||||
for arg in cmd:
|
||||
if not re.match(r'^[a-zA-Z0-9/_.=-]+$', arg):
|
||||
arg = f'"{arg}"'
|
||||
ret.append(arg)
|
||||
return ' '.join(ret)
|
||||
"""Get a nicely quoted shell command."""
|
||||
ret = []
|
||||
for arg in cmd:
|
||||
if not re.match(r"^[a-zA-Z0-9/_.=-]+$", arg):
|
||||
arg = f'"{arg}"'
|
||||
ret.append(arg)
|
||||
return " ".join(ret)
|
||||
|
||||
|
||||
def run(opts, cmd, check=True, **kwargs):
|
||||
"""Helper around subprocess.run to include logging."""
|
||||
print('+', cmdstr(cmd))
|
||||
if opts.dryrun:
|
||||
cmd = ['true', '--'] + cmd
|
||||
try:
|
||||
return subprocess.run(cmd, check=check, **kwargs)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f'aborting: {e}', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
"""Helper around subprocess.run to include logging."""
|
||||
print("+", cmdstr(cmd))
|
||||
if opts.dryrun:
|
||||
cmd = ["true", "--"] + cmd
|
||||
try:
|
||||
return subprocess.run(cmd, check=check, **kwargs)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"aborting: {e}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def import_release_key(opts):
|
||||
"""Import the public key of the official release repo signing key."""
|
||||
# Extract the key from our repo launcher.
|
||||
launcher = getattr(opts, 'launcher', os.path.join(TOPDIR, 'repo'))
|
||||
print(f'Importing keys from "{launcher}" launcher script')
|
||||
with open(launcher, encoding='utf-8') as fp:
|
||||
data = fp.read()
|
||||
"""Import the public key of the official release repo signing key."""
|
||||
# Extract the key from our repo launcher.
|
||||
launcher = getattr(opts, "launcher", os.path.join(TOPDIR, "repo"))
|
||||
print(f'Importing keys from "{launcher}" launcher script')
|
||||
with open(launcher, encoding="utf-8") as fp:
|
||||
data = fp.read()
|
||||
|
||||
keys = re.findall(
|
||||
r'\n-----BEGIN PGP PUBLIC KEY BLOCK-----\n[^-]*'
|
||||
r'\n-----END PGP PUBLIC KEY BLOCK-----\n', data, flags=re.M)
|
||||
run(opts, ['gpg', '--import'], input='\n'.join(keys).encode('utf-8'))
|
||||
keys = re.findall(
|
||||
r"\n-----BEGIN PGP PUBLIC KEY BLOCK-----\n[^-]*"
|
||||
r"\n-----END PGP PUBLIC KEY BLOCK-----\n",
|
||||
data,
|
||||
flags=re.M,
|
||||
)
|
||||
run(opts, ["gpg", "--import"], input="\n".join(keys).encode("utf-8"))
|
||||
|
||||
print('Marking keys as fully trusted')
|
||||
run(opts, ['gpg', '--import-ownertrust'],
|
||||
input=f'{KEYID_DSA}:6:\n'.encode('utf-8'))
|
||||
print("Marking keys as fully trusted")
|
||||
run(
|
||||
opts,
|
||||
["gpg", "--import-ownertrust"],
|
||||
input=f"{KEYID_DSA}:6:\n".encode("utf-8"),
|
||||
)
|
||||
|
||||
93
repo_logging.py
Normal file
93
repo_logging.py
Normal file
@@ -0,0 +1,93 @@
|
||||
# Copyright (C) 2023 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Logic for printing user-friendly logs in repo."""
|
||||
|
||||
import logging
|
||||
|
||||
from color import Coloring
|
||||
from error import RepoExitError
|
||||
|
||||
|
||||
SEPARATOR = "=" * 80
|
||||
MAX_PRINT_ERRORS = 5
|
||||
|
||||
|
||||
class _ConfigMock:
|
||||
"""Default coloring config to use when Logging.config is not set."""
|
||||
|
||||
def __init__(self):
|
||||
self.default_values = {"color.ui": "auto"}
|
||||
|
||||
def GetString(self, x):
|
||||
return self.default_values.get(x, None)
|
||||
|
||||
|
||||
class _LogColoring(Coloring):
|
||||
"""Coloring outstream for logging."""
|
||||
|
||||
def __init__(self, config):
|
||||
super().__init__(config, "logs")
|
||||
self.error = self.colorer("error", fg="red")
|
||||
self.warning = self.colorer("warn", fg="yellow")
|
||||
self.levelMap = {
|
||||
"WARNING": self.warning,
|
||||
"ERROR": self.error,
|
||||
}
|
||||
|
||||
|
||||
class _LogColoringFormatter(logging.Formatter):
|
||||
"""Coloring formatter for logging."""
|
||||
|
||||
def __init__(self, config=None, *args, **kwargs):
|
||||
self.config = config if config else _ConfigMock()
|
||||
self.colorer = _LogColoring(self.config)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def format(self, record):
|
||||
"""Formats |record| with color."""
|
||||
msg = super().format(record)
|
||||
colorer = self.colorer.levelMap.get(record.levelname)
|
||||
return msg if not colorer else colorer(msg)
|
||||
|
||||
|
||||
class RepoLogger(logging.Logger):
|
||||
"""Repo Logging Module."""
|
||||
|
||||
def __init__(self, name: str, config=None, **kwargs):
|
||||
super().__init__(name, **kwargs)
|
||||
handler = logging.StreamHandler()
|
||||
handler.setFormatter(_LogColoringFormatter(config))
|
||||
self.addHandler(handler)
|
||||
|
||||
def log_aggregated_errors(self, err: RepoExitError):
|
||||
"""Print all aggregated logs."""
|
||||
self.error(SEPARATOR)
|
||||
|
||||
if not err.aggregate_errors:
|
||||
self.error("Repo command failed: %s", type(err).__name__)
|
||||
self.error("\t%s", str(err))
|
||||
return
|
||||
|
||||
self.error(
|
||||
"Repo command failed due to the following `%s` errors:",
|
||||
type(err).__name__,
|
||||
)
|
||||
self.error(
|
||||
"\n".join(str(e) for e in err.aggregate_errors[:MAX_PRINT_ERRORS])
|
||||
)
|
||||
|
||||
diff = len(err.aggregate_errors) - MAX_PRINT_ERRORS
|
||||
if diff > 0:
|
||||
self.error("+%d additional errors...", diff)
|
||||
149
repo_trace.py
149
repo_trace.py
@@ -15,26 +15,157 @@
|
||||
"""Logic for tracing repo interactions.
|
||||
|
||||
Activated via `repo --trace ...` or `REPO_TRACE=1 repo ...`.
|
||||
|
||||
Temporary: Tracing is always on. Set `REPO_TRACE=0` to turn off.
|
||||
To also include trace outputs in stderr do `repo --trace_to_stderr ...`
|
||||
"""
|
||||
|
||||
import sys
|
||||
import contextlib
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
import time
|
||||
|
||||
import platform_utils
|
||||
|
||||
|
||||
# Env var to implicitly turn on tracing.
|
||||
REPO_TRACE = 'REPO_TRACE'
|
||||
REPO_TRACE = "REPO_TRACE"
|
||||
|
||||
_TRACE = os.environ.get(REPO_TRACE) == '1'
|
||||
# Temporarily set tracing to always on unless user expicitly sets to 0.
|
||||
_TRACE = os.environ.get(REPO_TRACE) != "0"
|
||||
_TRACE_TO_STDERR = False
|
||||
_TRACE_FILE = None
|
||||
_TRACE_FILE_NAME = "TRACE_FILE"
|
||||
_MAX_SIZE = 70 # in MiB
|
||||
_NEW_COMMAND_SEP = "+++++++++++++++NEW COMMAND+++++++++++++++++++"
|
||||
|
||||
|
||||
def IsTraceToStderr():
|
||||
"""Whether traces are written to stderr."""
|
||||
return _TRACE_TO_STDERR
|
||||
|
||||
|
||||
def IsTrace():
|
||||
return _TRACE
|
||||
"""Whether tracing is enabled."""
|
||||
return _TRACE
|
||||
|
||||
|
||||
def SetTraceToStderr():
|
||||
"""Enables tracing logging to stderr."""
|
||||
global _TRACE_TO_STDERR
|
||||
_TRACE_TO_STDERR = True
|
||||
|
||||
|
||||
def SetTrace():
|
||||
global _TRACE
|
||||
_TRACE = True
|
||||
"""Enables tracing."""
|
||||
global _TRACE
|
||||
_TRACE = True
|
||||
|
||||
|
||||
def Trace(fmt, *args):
|
||||
if IsTrace():
|
||||
print(fmt % args, file=sys.stderr)
|
||||
def _SetTraceFile(quiet):
|
||||
"""Sets the trace file location."""
|
||||
global _TRACE_FILE
|
||||
_TRACE_FILE = _GetTraceFile(quiet)
|
||||
|
||||
|
||||
class Trace(contextlib.ContextDecorator):
|
||||
"""Used to capture and save git traces."""
|
||||
|
||||
def _time(self):
|
||||
"""Generate nanoseconds of time in a py3.6 safe way"""
|
||||
return int(time.time() * 1e9)
|
||||
|
||||
def __init__(self, fmt, *args, first_trace=False, quiet=True):
|
||||
"""Initialize the object.
|
||||
|
||||
Args:
|
||||
fmt: The format string for the trace.
|
||||
*args: Arguments to pass to formatting.
|
||||
first_trace: Whether this is the first trace of a `repo` invocation.
|
||||
quiet: Whether to suppress notification of trace file location.
|
||||
"""
|
||||
if not IsTrace():
|
||||
return
|
||||
self._trace_msg = fmt % args
|
||||
|
||||
if not _TRACE_FILE:
|
||||
_SetTraceFile(quiet)
|
||||
|
||||
if first_trace:
|
||||
_ClearOldTraces()
|
||||
self._trace_msg = f"{_NEW_COMMAND_SEP} {self._trace_msg}"
|
||||
|
||||
def __enter__(self):
|
||||
if not IsTrace():
|
||||
return self
|
||||
|
||||
print_msg = (
|
||||
f"PID: {os.getpid()} START: {self._time()} :{self._trace_msg}\n"
|
||||
)
|
||||
|
||||
with open(_TRACE_FILE, "a") as f:
|
||||
print(print_msg, file=f)
|
||||
|
||||
if _TRACE_TO_STDERR:
|
||||
print(print_msg, file=sys.stderr)
|
||||
|
||||
return self
|
||||
|
||||
def __exit__(self, *exc):
|
||||
if not IsTrace():
|
||||
return False
|
||||
|
||||
print_msg = (
|
||||
f"PID: {os.getpid()} END: {self._time()} :{self._trace_msg}\n"
|
||||
)
|
||||
|
||||
with open(_TRACE_FILE, "a") as f:
|
||||
print(print_msg, file=f)
|
||||
|
||||
if _TRACE_TO_STDERR:
|
||||
print(print_msg, file=sys.stderr)
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def _GetTraceFile(quiet):
|
||||
"""Get the trace file or create one."""
|
||||
# TODO: refactor to pass repodir to Trace.
|
||||
repo_dir = os.path.dirname(os.path.dirname(__file__))
|
||||
trace_file = os.path.join(repo_dir, _TRACE_FILE_NAME)
|
||||
if not quiet:
|
||||
print(f"Trace outputs in {trace_file}", file=sys.stderr)
|
||||
return trace_file
|
||||
|
||||
|
||||
def _ClearOldTraces():
|
||||
"""Clear the oldest commands if trace file is too big."""
|
||||
try:
|
||||
with open(_TRACE_FILE, errors="ignore") as f:
|
||||
if os.path.getsize(f.name) / (1024 * 1024) <= _MAX_SIZE:
|
||||
return
|
||||
trace_lines = f.readlines()
|
||||
except FileNotFoundError:
|
||||
return
|
||||
|
||||
while sum(len(x) for x in trace_lines) / (1024 * 1024) > _MAX_SIZE:
|
||||
for i, line in enumerate(trace_lines):
|
||||
if "END:" in line and _NEW_COMMAND_SEP in line:
|
||||
trace_lines = trace_lines[i + 1 :]
|
||||
break
|
||||
else:
|
||||
# The last chunk is bigger than _MAX_SIZE, so just throw everything
|
||||
# away.
|
||||
trace_lines = []
|
||||
|
||||
while trace_lines and trace_lines[-1] == "\n":
|
||||
trace_lines = trace_lines[:-1]
|
||||
# Write to a temporary file with a unique name in the same filesystem
|
||||
# before replacing the original trace file.
|
||||
temp_dir, temp_prefix = os.path.split(_TRACE_FILE)
|
||||
with tempfile.NamedTemporaryFile(
|
||||
"w", dir=temp_dir, prefix=temp_prefix, delete=False
|
||||
) as f:
|
||||
f.writelines(trace_lines)
|
||||
platform_utils.rename(f.name, _TRACE_FILE)
|
||||
|
||||
72
run_tests
72
run_tests
@@ -13,49 +13,57 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Wrapper to run pytest with the right settings."""
|
||||
"""Wrapper to run linters and pytest with the right settings."""
|
||||
|
||||
import errno
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
def find_pytest():
|
||||
"""Try to locate a good version of pytest."""
|
||||
# If we're in a virtualenv, assume that it's provided the right pytest.
|
||||
if 'VIRTUAL_ENV' in os.environ:
|
||||
return 'pytest'
|
||||
|
||||
# Use the Python 3 version if available.
|
||||
ret = shutil.which('pytest-3')
|
||||
if ret:
|
||||
return ret
|
||||
ROOT_DIR = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
# Hopefully this is a Python 3 version.
|
||||
ret = shutil.which('pytest')
|
||||
if ret:
|
||||
return ret
|
||||
|
||||
print('%s: unable to find pytest.' % (__file__,), file=sys.stderr)
|
||||
print('%s: Try installing: sudo apt-get install python-pytest' % (__file__,),
|
||||
file=sys.stderr)
|
||||
def run_black():
|
||||
"""Returns the exit code from black."""
|
||||
# Black by default only matches .py files. We have to list standalone
|
||||
# scripts manually.
|
||||
extra_programs = [
|
||||
"repo",
|
||||
"run_tests",
|
||||
"release/update-manpages",
|
||||
]
|
||||
return subprocess.run(
|
||||
[sys.executable, "-m", "black", "--check", ROOT_DIR] + extra_programs,
|
||||
check=False,
|
||||
).returncode
|
||||
|
||||
|
||||
def run_flake8():
|
||||
"""Returns the exit code from flake8."""
|
||||
return subprocess.run(
|
||||
[sys.executable, "-m", "flake8", ROOT_DIR], check=False
|
||||
).returncode
|
||||
|
||||
|
||||
def run_isort():
|
||||
"""Returns the exit code from isort."""
|
||||
return subprocess.run(
|
||||
[sys.executable, "-m", "isort", "--check", ROOT_DIR], check=False
|
||||
).returncode
|
||||
|
||||
|
||||
def main(argv):
|
||||
"""The main entry."""
|
||||
# Add the repo tree to PYTHONPATH as the tests expect to be able to import
|
||||
# modules directly.
|
||||
pythonpath = os.path.dirname(os.path.realpath(__file__))
|
||||
oldpythonpath = os.environ.get('PYTHONPATH', None)
|
||||
if oldpythonpath is not None:
|
||||
pythonpath += os.pathsep + oldpythonpath
|
||||
os.environ['PYTHONPATH'] = pythonpath
|
||||
|
||||
pytest = find_pytest()
|
||||
return subprocess.run([pytest] + argv, check=False).returncode
|
||||
"""The main entry."""
|
||||
checks = (
|
||||
lambda: pytest.main(argv),
|
||||
run_black,
|
||||
run_flake8,
|
||||
run_isort,
|
||||
)
|
||||
return 0 if all(not c() for c in checks) else 1
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
|
||||
130
run_tests.vpython3
Normal file
130
run_tests.vpython3
Normal file
@@ -0,0 +1,130 @@
|
||||
# This is a vpython "spec" file.
|
||||
#
|
||||
# Read more about `vpython` and how to modify this file here:
|
||||
# https://chromium.googlesource.com/infra/infra/+/main/doc/users/vpython.md
|
||||
# List of available wheels:
|
||||
# https://chromium.googlesource.com/infra/infra/+/main/infra/tools/dockerbuild/wheels.md
|
||||
|
||||
python_version: "3.8"
|
||||
|
||||
wheel: <
|
||||
name: "infra/python/wheels/pytest-py3"
|
||||
version: "version:6.2.2"
|
||||
>
|
||||
|
||||
# Required by pytest==6.2.2
|
||||
wheel: <
|
||||
name: "infra/python/wheels/py-py2_py3"
|
||||
version: "version:1.10.0"
|
||||
>
|
||||
|
||||
# Required by pytest==6.2.2
|
||||
wheel: <
|
||||
name: "infra/python/wheels/iniconfig-py3"
|
||||
version: "version:1.1.1"
|
||||
>
|
||||
|
||||
# Required by pytest==6.2.2
|
||||
wheel: <
|
||||
name: "infra/python/wheels/packaging-py3"
|
||||
version: "version:23.0"
|
||||
>
|
||||
|
||||
# Required by pytest==6.2.2
|
||||
wheel: <
|
||||
name: "infra/python/wheels/pluggy-py3"
|
||||
version: "version:0.13.1"
|
||||
>
|
||||
|
||||
# Required by pytest==6.2.2
|
||||
wheel: <
|
||||
name: "infra/python/wheels/toml-py3"
|
||||
version: "version:0.10.1"
|
||||
>
|
||||
|
||||
# Required by pytest==6.2.2
|
||||
wheel: <
|
||||
name: "infra/python/wheels/pyparsing-py3"
|
||||
version: "version:3.0.7"
|
||||
>
|
||||
|
||||
# Required by pytest==6.2.2
|
||||
wheel: <
|
||||
name: "infra/python/wheels/attrs-py2_py3"
|
||||
version: "version:21.4.0"
|
||||
>
|
||||
|
||||
# Required by packaging==16.8
|
||||
wheel: <
|
||||
name: "infra/python/wheels/six-py2_py3"
|
||||
version: "version:1.16.0"
|
||||
>
|
||||
|
||||
wheel: <
|
||||
name: "infra/python/wheels/black-py3"
|
||||
version: "version:23.1.0"
|
||||
>
|
||||
|
||||
# Required by black==23.1.0
|
||||
wheel: <
|
||||
name: "infra/python/wheels/mypy-extensions-py3"
|
||||
version: "version:0.4.3"
|
||||
>
|
||||
|
||||
# Required by black==23.1.0
|
||||
wheel: <
|
||||
name: "infra/python/wheels/tomli-py3"
|
||||
version: "version:2.0.1"
|
||||
>
|
||||
|
||||
# Required by black==23.1.0
|
||||
wheel: <
|
||||
name: "infra/python/wheels/platformdirs-py3"
|
||||
version: "version:2.5.2"
|
||||
>
|
||||
|
||||
# Required by black==23.1.0
|
||||
wheel: <
|
||||
name: "infra/python/wheels/pathspec-py3"
|
||||
version: "version:0.9.0"
|
||||
>
|
||||
|
||||
# Required by black==23.1.0
|
||||
wheel: <
|
||||
name: "infra/python/wheels/typing-extensions-py3"
|
||||
version: "version:4.3.0"
|
||||
>
|
||||
|
||||
# Required by black==23.1.0
|
||||
wheel: <
|
||||
name: "infra/python/wheels/click-py3"
|
||||
version: "version:8.0.3"
|
||||
>
|
||||
|
||||
wheel: <
|
||||
name: "infra/python/wheels/flake8-py2_py3"
|
||||
version: "version:6.0.0"
|
||||
>
|
||||
|
||||
# Required by flake8==6.0.0
|
||||
wheel: <
|
||||
name: "infra/python/wheels/mccabe-py2_py3"
|
||||
version: "version:0.7.0"
|
||||
>
|
||||
|
||||
# Required by flake8==6.0.0
|
||||
wheel: <
|
||||
name: "infra/python/wheels/pyflakes-py2_py3"
|
||||
version: "version:3.0.1"
|
||||
>
|
||||
|
||||
# Required by flake8==6.0.0
|
||||
wheel: <
|
||||
name: "infra/python/wheels/pycodestyle-py2_py3"
|
||||
version: "version:2.10.0"
|
||||
>
|
||||
|
||||
wheel: <
|
||||
name: "infra/python/wheels/isort-py3"
|
||||
version: "version:5.10.1"
|
||||
>
|
||||
49
setup.py
49
setup.py
@@ -16,6 +16,7 @@
|
||||
"""Python packaging for repo."""
|
||||
|
||||
import os
|
||||
|
||||
import setuptools
|
||||
|
||||
|
||||
@@ -23,39 +24,39 @@ TOPDIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
# Rip out the first intro paragraph.
|
||||
with open(os.path.join(TOPDIR, 'README.md')) as fp:
|
||||
with open(os.path.join(TOPDIR, "README.md")) as fp:
|
||||
lines = fp.read().splitlines()[2:]
|
||||
end = lines.index('')
|
||||
long_description = ' '.join(lines[0:end])
|
||||
end = lines.index("")
|
||||
long_description = " ".join(lines[0:end])
|
||||
|
||||
|
||||
# https://packaging.python.org/tutorials/packaging-projects/
|
||||
setuptools.setup(
|
||||
name='repo',
|
||||
version='2',
|
||||
maintainer='Various',
|
||||
maintainer_email='repo-discuss@googlegroups.com',
|
||||
description='Repo helps manage many Git repositories',
|
||||
name="repo",
|
||||
version="2",
|
||||
maintainer="Various",
|
||||
maintainer_email="repo-discuss@googlegroups.com",
|
||||
description="Repo helps manage many Git repositories",
|
||||
long_description=long_description,
|
||||
long_description_content_type='text/plain',
|
||||
url='https://gerrit.googlesource.com/git-repo/',
|
||||
long_description_content_type="text/plain",
|
||||
url="https://gerrit.googlesource.com/git-repo/",
|
||||
project_urls={
|
||||
'Bug Tracker': 'https://bugs.chromium.org/p/gerrit/issues/list?q=component:repo',
|
||||
"Bug Tracker": "https://issues.gerritcodereview.com/issues?q=is:open%20componentid:1370071", # noqa: E501
|
||||
},
|
||||
# https://pypi.org/classifiers/
|
||||
classifiers=[
|
||||
'Development Status :: 6 - Mature',
|
||||
'Environment :: Console',
|
||||
'Intended Audience :: Developers',
|
||||
'License :: OSI Approved :: Apache Software License',
|
||||
'Natural Language :: English',
|
||||
'Operating System :: MacOS :: MacOS X',
|
||||
'Operating System :: Microsoft :: Windows :: Windows 10',
|
||||
'Operating System :: POSIX :: Linux',
|
||||
'Programming Language :: Python :: 3',
|
||||
'Programming Language :: Python :: 3 :: Only',
|
||||
'Topic :: Software Development :: Version Control :: Git',
|
||||
"Development Status :: 6 - Mature",
|
||||
"Environment :: Console",
|
||||
"Intended Audience :: Developers",
|
||||
"License :: OSI Approved :: Apache Software License",
|
||||
"Natural Language :: English",
|
||||
"Operating System :: MacOS :: MacOS X",
|
||||
"Operating System :: Microsoft :: Windows :: Windows 10",
|
||||
"Operating System :: POSIX :: Linux",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3 :: Only",
|
||||
"Topic :: Software Development :: Version Control :: Git",
|
||||
],
|
||||
python_requires='>=3.6',
|
||||
packages=['subcmds'],
|
||||
python_requires=">=3.6",
|
||||
packages=["subcmds"],
|
||||
)
|
||||
|
||||
435
ssh.py
435
ssh.py
@@ -28,253 +28,264 @@ import platform_utils
|
||||
from repo_trace import Trace
|
||||
|
||||
|
||||
PROXY_PATH = os.path.join(os.path.dirname(__file__), 'git_ssh')
|
||||
PROXY_PATH = os.path.join(os.path.dirname(__file__), "git_ssh")
|
||||
|
||||
|
||||
def _run_ssh_version():
|
||||
"""run ssh -V to display the version number"""
|
||||
return subprocess.check_output(['ssh', '-V'], stderr=subprocess.STDOUT).decode()
|
||||
"""run ssh -V to display the version number"""
|
||||
return subprocess.check_output(
|
||||
["ssh", "-V"], stderr=subprocess.STDOUT
|
||||
).decode()
|
||||
|
||||
|
||||
def _parse_ssh_version(ver_str=None):
|
||||
"""parse a ssh version string into a tuple"""
|
||||
if ver_str is None:
|
||||
ver_str = _run_ssh_version()
|
||||
m = re.match(r'^OpenSSH_([0-9.]+)(p[0-9]+)?\s', ver_str)
|
||||
if m:
|
||||
return tuple(int(x) for x in m.group(1).split('.'))
|
||||
else:
|
||||
return ()
|
||||
"""parse a ssh version string into a tuple"""
|
||||
if ver_str is None:
|
||||
ver_str = _run_ssh_version()
|
||||
m = re.match(r"^OpenSSH_([0-9.]+)(p[0-9]+)?[\s,]", ver_str)
|
||||
if m:
|
||||
return tuple(int(x) for x in m.group(1).split("."))
|
||||
else:
|
||||
return ()
|
||||
|
||||
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def version():
|
||||
"""return ssh version as a tuple"""
|
||||
try:
|
||||
return _parse_ssh_version()
|
||||
except FileNotFoundError:
|
||||
print('fatal: ssh not installed', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
except subprocess.CalledProcessError:
|
||||
print('fatal: unable to detect ssh version', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
"""return ssh version as a tuple"""
|
||||
try:
|
||||
return _parse_ssh_version()
|
||||
except FileNotFoundError:
|
||||
print("fatal: ssh not installed", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
except subprocess.CalledProcessError:
|
||||
print("fatal: unable to detect ssh version", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
URI_SCP = re.compile(r'^([^@:]*@?[^:/]{1,}):')
|
||||
URI_ALL = re.compile(r'^([a-z][a-z+-]*)://([^@/]*@?[^/]*)/')
|
||||
URI_SCP = re.compile(r"^([^@:]*@?[^:/]{1,}):")
|
||||
URI_ALL = re.compile(r"^([a-z][a-z+-]*)://([^@/]*@?[^/]*)/")
|
||||
|
||||
|
||||
class ProxyManager:
|
||||
"""Manage various ssh clients & masters that we spawn.
|
||||
"""Manage various ssh clients & masters that we spawn.
|
||||
|
||||
This will take care of sharing state between multiprocessing children, and
|
||||
make sure that if we crash, we don't leak any of the ssh sessions.
|
||||
This will take care of sharing state between multiprocessing children, and
|
||||
make sure that if we crash, we don't leak any of the ssh sessions.
|
||||
|
||||
The code should work with a single-process scenario too, and not add too much
|
||||
overhead due to the manager.
|
||||
"""
|
||||
|
||||
# Path to the ssh program to run which will pass our master settings along.
|
||||
# Set here more as a convenience API.
|
||||
proxy = PROXY_PATH
|
||||
|
||||
def __init__(self, manager):
|
||||
# Protect access to the list of active masters.
|
||||
self._lock = multiprocessing.Lock()
|
||||
# List of active masters (pid). These will be spawned on demand, and we are
|
||||
# responsible for shutting them all down at the end.
|
||||
self._masters = manager.list()
|
||||
# Set of active masters indexed by "host:port" information.
|
||||
# The value isn't used, but multiprocessing doesn't provide a set class.
|
||||
self._master_keys = manager.dict()
|
||||
# Whether ssh masters are known to be broken, so we give up entirely.
|
||||
self._master_broken = manager.Value('b', False)
|
||||
# List of active ssh sesssions. Clients will be added & removed as
|
||||
# connections finish, so this list is just for safety & cleanup if we crash.
|
||||
self._clients = manager.list()
|
||||
# Path to directory for holding master sockets.
|
||||
self._sock_path = None
|
||||
|
||||
def __enter__(self):
|
||||
"""Enter a new context."""
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
"""Exit a context & clean up all resources."""
|
||||
self.close()
|
||||
|
||||
def add_client(self, proc):
|
||||
"""Track a new ssh session."""
|
||||
self._clients.append(proc.pid)
|
||||
|
||||
def remove_client(self, proc):
|
||||
"""Remove a completed ssh session."""
|
||||
try:
|
||||
self._clients.remove(proc.pid)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
def add_master(self, proc):
|
||||
"""Track a new master connection."""
|
||||
self._masters.append(proc.pid)
|
||||
|
||||
def _terminate(self, procs):
|
||||
"""Kill all |procs|."""
|
||||
for pid in procs:
|
||||
try:
|
||||
os.kill(pid, signal.SIGTERM)
|
||||
os.waitpid(pid, 0)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
# The multiprocessing.list() API doesn't provide many standard list()
|
||||
# methods, so we have to manually clear the list.
|
||||
while True:
|
||||
try:
|
||||
procs.pop(0)
|
||||
except:
|
||||
break
|
||||
|
||||
def close(self):
|
||||
"""Close this active ssh session.
|
||||
|
||||
Kill all ssh clients & masters we created, and nuke the socket dir.
|
||||
The code should work with a single-process scenario too, and not add too
|
||||
much overhead due to the manager.
|
||||
"""
|
||||
self._terminate(self._clients)
|
||||
self._terminate(self._masters)
|
||||
|
||||
d = self.sock(create=False)
|
||||
if d:
|
||||
try:
|
||||
platform_utils.rmdir(os.path.dirname(d))
|
||||
except OSError:
|
||||
pass
|
||||
# Path to the ssh program to run which will pass our master settings along.
|
||||
# Set here more as a convenience API.
|
||||
proxy = PROXY_PATH
|
||||
|
||||
def _open_unlocked(self, host, port=None):
|
||||
"""Make sure a ssh master session exists for |host| & |port|.
|
||||
def __init__(self, manager):
|
||||
# Protect access to the list of active masters.
|
||||
self._lock = multiprocessing.Lock()
|
||||
# List of active masters (pid). These will be spawned on demand, and we
|
||||
# are responsible for shutting them all down at the end.
|
||||
self._masters = manager.list()
|
||||
# Set of active masters indexed by "host:port" information.
|
||||
# The value isn't used, but multiprocessing doesn't provide a set class.
|
||||
self._master_keys = manager.dict()
|
||||
# Whether ssh masters are known to be broken, so we give up entirely.
|
||||
self._master_broken = manager.Value("b", False)
|
||||
# List of active ssh sesssions. Clients will be added & removed as
|
||||
# connections finish, so this list is just for safety & cleanup if we
|
||||
# crash.
|
||||
self._clients = manager.list()
|
||||
# Path to directory for holding master sockets.
|
||||
self._sock_path = None
|
||||
|
||||
If one doesn't exist already, we'll create it.
|
||||
def __enter__(self):
|
||||
"""Enter a new context."""
|
||||
return self
|
||||
|
||||
We won't grab any locks, so the caller has to do that. This helps keep the
|
||||
business logic of actually creating the master separate from grabbing locks.
|
||||
"""
|
||||
# Check to see whether we already think that the master is running; if we
|
||||
# think it's already running, return right away.
|
||||
if port is not None:
|
||||
key = '%s:%s' % (host, port)
|
||||
else:
|
||||
key = host
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
"""Exit a context & clean up all resources."""
|
||||
self.close()
|
||||
|
||||
if key in self._master_keys:
|
||||
return True
|
||||
def add_client(self, proc):
|
||||
"""Track a new ssh session."""
|
||||
self._clients.append(proc.pid)
|
||||
|
||||
if self._master_broken.value or 'GIT_SSH' in os.environ:
|
||||
# Failed earlier, so don't retry.
|
||||
return False
|
||||
def remove_client(self, proc):
|
||||
"""Remove a completed ssh session."""
|
||||
try:
|
||||
self._clients.remove(proc.pid)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
# We will make two calls to ssh; this is the common part of both calls.
|
||||
command_base = ['ssh', '-o', 'ControlPath %s' % self.sock(), host]
|
||||
if port is not None:
|
||||
command_base[1:1] = ['-p', str(port)]
|
||||
def add_master(self, proc):
|
||||
"""Track a new master connection."""
|
||||
self._masters.append(proc.pid)
|
||||
|
||||
# Since the key wasn't in _master_keys, we think that master isn't running.
|
||||
# ...but before actually starting a master, we'll double-check. This can
|
||||
# be important because we can't tell that that 'git@myhost.com' is the same
|
||||
# as 'myhost.com' where "User git" is setup in the user's ~/.ssh/config file.
|
||||
check_command = command_base + ['-O', 'check']
|
||||
try:
|
||||
Trace(': %s', ' '.join(check_command))
|
||||
check_process = subprocess.Popen(check_command,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
check_process.communicate() # read output, but ignore it...
|
||||
isnt_running = check_process.wait()
|
||||
def _terminate(self, procs):
|
||||
"""Kill all |procs|."""
|
||||
for pid in procs:
|
||||
try:
|
||||
os.kill(pid, signal.SIGTERM)
|
||||
os.waitpid(pid, 0)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
if not isnt_running:
|
||||
# Our double-check found that the master _was_ infact running. Add to
|
||||
# the list of keys.
|
||||
# The multiprocessing.list() API doesn't provide many standard list()
|
||||
# methods, so we have to manually clear the list.
|
||||
while True:
|
||||
try:
|
||||
procs.pop(0)
|
||||
except: # noqa: E722
|
||||
break
|
||||
|
||||
def close(self):
|
||||
"""Close this active ssh session.
|
||||
|
||||
Kill all ssh clients & masters we created, and nuke the socket dir.
|
||||
"""
|
||||
self._terminate(self._clients)
|
||||
self._terminate(self._masters)
|
||||
|
||||
d = self.sock(create=False)
|
||||
if d:
|
||||
try:
|
||||
platform_utils.rmdir(os.path.dirname(d))
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
def _open_unlocked(self, host, port=None):
|
||||
"""Make sure a ssh master session exists for |host| & |port|.
|
||||
|
||||
If one doesn't exist already, we'll create it.
|
||||
|
||||
We won't grab any locks, so the caller has to do that. This helps keep
|
||||
the business logic of actually creating the master separate from
|
||||
grabbing locks.
|
||||
"""
|
||||
# Check to see whether we already think that the master is running; if
|
||||
# we think it's already running, return right away.
|
||||
if port is not None:
|
||||
key = f"{host}:{port}"
|
||||
else:
|
||||
key = host
|
||||
|
||||
if key in self._master_keys:
|
||||
return True
|
||||
|
||||
if self._master_broken.value or "GIT_SSH" in os.environ:
|
||||
# Failed earlier, so don't retry.
|
||||
return False
|
||||
|
||||
# We will make two calls to ssh; this is the common part of both calls.
|
||||
command_base = ["ssh", "-o", "ControlPath %s" % self.sock(), host]
|
||||
if port is not None:
|
||||
command_base[1:1] = ["-p", str(port)]
|
||||
|
||||
# Since the key wasn't in _master_keys, we think that master isn't
|
||||
# running... but before actually starting a master, we'll double-check.
|
||||
# This can be important because we can't tell that that 'git@myhost.com'
|
||||
# is the same as 'myhost.com' where "User git" is setup in the user's
|
||||
# ~/.ssh/config file.
|
||||
check_command = command_base + ["-O", "check"]
|
||||
with Trace("Call to ssh (check call): %s", " ".join(check_command)):
|
||||
try:
|
||||
check_process = subprocess.Popen(
|
||||
check_command,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
)
|
||||
check_process.communicate() # read output, but ignore it...
|
||||
isnt_running = check_process.wait()
|
||||
|
||||
if not isnt_running:
|
||||
# Our double-check found that the master _was_ infact
|
||||
# running. Add to the list of keys.
|
||||
self._master_keys[key] = True
|
||||
return True
|
||||
except Exception:
|
||||
# Ignore excpetions. We we will fall back to the normal command
|
||||
# and print to the log there.
|
||||
pass
|
||||
|
||||
command = command_base[:1] + ["-M", "-N"] + command_base[1:]
|
||||
p = None
|
||||
try:
|
||||
with Trace("Call to ssh: %s", " ".join(command)):
|
||||
p = subprocess.Popen(command)
|
||||
except Exception as e:
|
||||
self._master_broken.value = True
|
||||
print(
|
||||
"\nwarn: cannot enable ssh control master for %s:%s\n%s"
|
||||
% (host, port, str(e)),
|
||||
file=sys.stderr,
|
||||
)
|
||||
return False
|
||||
|
||||
time.sleep(1)
|
||||
ssh_died = p.poll() is not None
|
||||
if ssh_died:
|
||||
return False
|
||||
|
||||
self.add_master(p)
|
||||
self._master_keys[key] = True
|
||||
return True
|
||||
except Exception:
|
||||
# Ignore excpetions. We we will fall back to the normal command and print
|
||||
# to the log there.
|
||||
pass
|
||||
|
||||
command = command_base[:1] + ['-M', '-N'] + command_base[1:]
|
||||
try:
|
||||
Trace(': %s', ' '.join(command))
|
||||
p = subprocess.Popen(command)
|
||||
except Exception as e:
|
||||
self._master_broken.value = True
|
||||
print('\nwarn: cannot enable ssh control master for %s:%s\n%s'
|
||||
% (host, port, str(e)), file=sys.stderr)
|
||||
return False
|
||||
def _open(self, host, port=None):
|
||||
"""Make sure a ssh master session exists for |host| & |port|.
|
||||
|
||||
time.sleep(1)
|
||||
ssh_died = (p.poll() is not None)
|
||||
if ssh_died:
|
||||
return False
|
||||
If one doesn't exist already, we'll create it.
|
||||
|
||||
self.add_master(p)
|
||||
self._master_keys[key] = True
|
||||
return True
|
||||
This will obtain any necessary locks to avoid inter-process races.
|
||||
"""
|
||||
# Bail before grabbing the lock if we already know that we aren't going
|
||||
# to try creating new masters below.
|
||||
if sys.platform in ("win32", "cygwin"):
|
||||
return False
|
||||
|
||||
def _open(self, host, port=None):
|
||||
"""Make sure a ssh master session exists for |host| & |port|.
|
||||
# Acquire the lock. This is needed to prevent opening multiple masters
|
||||
# for the same host when we're running "repo sync -jN" (for N > 1) _and_
|
||||
# the manifest <remote fetch="ssh://xyz"> specifies a different host
|
||||
# from the one that was passed to repo init.
|
||||
with self._lock:
|
||||
return self._open_unlocked(host, port)
|
||||
|
||||
If one doesn't exist already, we'll create it.
|
||||
def preconnect(self, url):
|
||||
"""If |uri| will create a ssh connection, setup the ssh master for it.""" # noqa: E501
|
||||
m = URI_ALL.match(url)
|
||||
if m:
|
||||
scheme = m.group(1)
|
||||
host = m.group(2)
|
||||
if ":" in host:
|
||||
host, port = host.split(":")
|
||||
else:
|
||||
port = None
|
||||
if scheme in ("ssh", "git+ssh", "ssh+git"):
|
||||
return self._open(host, port)
|
||||
return False
|
||||
|
||||
This will obtain any necessary locks to avoid inter-process races.
|
||||
"""
|
||||
# Bail before grabbing the lock if we already know that we aren't going to
|
||||
# try creating new masters below.
|
||||
if sys.platform in ('win32', 'cygwin'):
|
||||
return False
|
||||
m = URI_SCP.match(url)
|
||||
if m:
|
||||
host = m.group(1)
|
||||
return self._open(host)
|
||||
|
||||
# Acquire the lock. This is needed to prevent opening multiple masters for
|
||||
# the same host when we're running "repo sync -jN" (for N > 1) _and_ the
|
||||
# manifest <remote fetch="ssh://xyz"> specifies a different host from the
|
||||
# one that was passed to repo init.
|
||||
with self._lock:
|
||||
return self._open_unlocked(host, port)
|
||||
return False
|
||||
|
||||
def preconnect(self, url):
|
||||
"""If |uri| will create a ssh connection, setup the ssh master for it."""
|
||||
m = URI_ALL.match(url)
|
||||
if m:
|
||||
scheme = m.group(1)
|
||||
host = m.group(2)
|
||||
if ':' in host:
|
||||
host, port = host.split(':')
|
||||
else:
|
||||
port = None
|
||||
if scheme in ('ssh', 'git+ssh', 'ssh+git'):
|
||||
return self._open(host, port)
|
||||
return False
|
||||
def sock(self, create=True):
|
||||
"""Return the path to the ssh socket dir.
|
||||
|
||||
m = URI_SCP.match(url)
|
||||
if m:
|
||||
host = m.group(1)
|
||||
return self._open(host)
|
||||
|
||||
return False
|
||||
|
||||
def sock(self, create=True):
|
||||
"""Return the path to the ssh socket dir.
|
||||
|
||||
This has all the master sockets so clients can talk to them.
|
||||
"""
|
||||
if self._sock_path is None:
|
||||
if not create:
|
||||
return None
|
||||
tmp_dir = '/tmp'
|
||||
if not os.path.exists(tmp_dir):
|
||||
tmp_dir = tempfile.gettempdir()
|
||||
if version() < (6, 7):
|
||||
tokens = '%r@%h:%p'
|
||||
else:
|
||||
tokens = '%C' # hash of %l%h%p%r
|
||||
self._sock_path = os.path.join(
|
||||
tempfile.mkdtemp('', 'ssh-', tmp_dir),
|
||||
'master-' + tokens)
|
||||
return self._sock_path
|
||||
This has all the master sockets so clients can talk to them.
|
||||
"""
|
||||
if self._sock_path is None:
|
||||
if not create:
|
||||
return None
|
||||
tmp_dir = "/tmp"
|
||||
if not os.path.exists(tmp_dir):
|
||||
tmp_dir = tempfile.gettempdir()
|
||||
if version() < (6, 7):
|
||||
tokens = "%r@%h:%p"
|
||||
else:
|
||||
tokens = "%C" # hash of %l%h%p%r
|
||||
self._sock_path = os.path.join(
|
||||
tempfile.mkdtemp("", "ssh-", tmp_dir), "master-" + tokens
|
||||
)
|
||||
return self._sock_path
|
||||
|
||||
@@ -14,36 +14,35 @@
|
||||
|
||||
import os
|
||||
|
||||
|
||||
# A mapping of the subcommand name to the class that implements it.
|
||||
all_commands = {}
|
||||
all_modules = []
|
||||
|
||||
my_dir = os.path.dirname(__file__)
|
||||
for py in os.listdir(my_dir):
|
||||
if py == '__init__.py':
|
||||
continue
|
||||
if py == "__init__.py":
|
||||
continue
|
||||
|
||||
if py.endswith('.py'):
|
||||
name = py[:-3]
|
||||
if py.endswith(".py"):
|
||||
name = py[:-3]
|
||||
|
||||
clsn = name.capitalize()
|
||||
while clsn.find('_') > 0:
|
||||
h = clsn.index('_')
|
||||
clsn = clsn[0:h] + clsn[h + 1:].capitalize()
|
||||
clsn = name.capitalize()
|
||||
while clsn.find("_") > 0:
|
||||
h = clsn.index("_")
|
||||
clsn = clsn[0:h] + clsn[h + 1 :].capitalize()
|
||||
|
||||
mod = __import__(__name__,
|
||||
globals(),
|
||||
locals(),
|
||||
['%s' % name])
|
||||
mod = getattr(mod, name)
|
||||
try:
|
||||
cmd = getattr(mod, clsn)
|
||||
except AttributeError:
|
||||
raise SyntaxError('%s/%s does not define class %s' % (
|
||||
__name__, py, clsn))
|
||||
mod = __import__(__name__, globals(), locals(), ["%s" % name])
|
||||
mod = getattr(mod, name)
|
||||
try:
|
||||
cmd = getattr(mod, clsn)
|
||||
except AttributeError:
|
||||
raise SyntaxError(f"{__name__}/{py} does not define class {clsn}")
|
||||
|
||||
name = name.replace('_', '-')
|
||||
cmd.NAME = name
|
||||
all_commands[name] = cmd
|
||||
name = name.replace("_", "-")
|
||||
cmd.NAME = name
|
||||
all_commands[name] = cmd
|
||||
all_modules.append(mod)
|
||||
|
||||
# Add 'branch' as an alias for 'branches'.
|
||||
all_commands['branch'] = all_commands['branches']
|
||||
all_commands["branch"] = all_commands["branches"]
|
||||
|
||||
@@ -12,20 +12,30 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from collections import defaultdict
|
||||
import collections
|
||||
import functools
|
||||
import itertools
|
||||
import sys
|
||||
|
||||
from command import Command, DEFAULT_LOCAL_JOBS
|
||||
from command import Command
|
||||
from command import DEFAULT_LOCAL_JOBS
|
||||
from error import RepoError
|
||||
from error import RepoExitError
|
||||
from git_command import git
|
||||
from progress import Progress
|
||||
from repo_logging import RepoLogger
|
||||
|
||||
|
||||
logger = RepoLogger(__file__)
|
||||
|
||||
|
||||
class AbandonError(RepoExitError):
|
||||
"""Exit error when abandon command fails."""
|
||||
|
||||
|
||||
class Abandon(Command):
|
||||
COMMON = True
|
||||
helpSummary = "Permanently abandon a development branch"
|
||||
helpUsage = """
|
||||
COMMON = True
|
||||
helpSummary = "Permanently abandon a development branch"
|
||||
helpUsage = """
|
||||
%prog [--all | <branchname>] [<project>...]
|
||||
|
||||
This subcommand permanently abandons a development branch by
|
||||
@@ -33,82 +43,113 @@ deleting it (and all its history) from your local repository.
|
||||
|
||||
It is equivalent to "git branch -D <branchname>".
|
||||
"""
|
||||
PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
|
||||
PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
|
||||
|
||||
def _Options(self, p):
|
||||
p.add_option('--all',
|
||||
dest='all', action='store_true',
|
||||
help='delete all branches in all projects')
|
||||
def _Options(self, p):
|
||||
p.add_option(
|
||||
"--all",
|
||||
dest="all",
|
||||
action="store_true",
|
||||
help="delete all branches in all projects",
|
||||
)
|
||||
|
||||
def ValidateOptions(self, opt, args):
|
||||
if not opt.all and not args:
|
||||
self.Usage()
|
||||
def ValidateOptions(self, opt, args):
|
||||
if not opt.all and not args:
|
||||
self.Usage()
|
||||
|
||||
if not opt.all:
|
||||
nb = args[0]
|
||||
if not git.check_ref_format('heads/%s' % nb):
|
||||
self.OptionParser.error("'%s' is not a valid branch name" % nb)
|
||||
else:
|
||||
args.insert(0, "'All local branches'")
|
||||
if not opt.all:
|
||||
branches = args[0].split()
|
||||
invalid_branches = [
|
||||
x for x in branches if not git.check_ref_format(f"heads/{x}")
|
||||
]
|
||||
|
||||
def _ExecuteOne(self, all_branches, nb, project):
|
||||
"""Abandon one project."""
|
||||
if all_branches:
|
||||
branches = project.GetBranches()
|
||||
else:
|
||||
branches = [nb]
|
||||
|
||||
ret = {}
|
||||
for name in branches:
|
||||
status = project.AbandonBranch(name)
|
||||
if status is not None:
|
||||
ret[name] = status
|
||||
return (ret, project)
|
||||
|
||||
def Execute(self, opt, args):
|
||||
nb = args[0]
|
||||
err = defaultdict(list)
|
||||
success = defaultdict(list)
|
||||
all_projects = self.GetProjects(args[1:])
|
||||
|
||||
def _ProcessResults(_pool, pm, states):
|
||||
for (results, project) in states:
|
||||
for branch, status in results.items():
|
||||
if status:
|
||||
success[branch].append(project)
|
||||
else:
|
||||
err[branch].append(project)
|
||||
pm.update()
|
||||
|
||||
self.ExecuteInParallel(
|
||||
opt.jobs,
|
||||
functools.partial(self._ExecuteOne, opt.all, nb),
|
||||
all_projects,
|
||||
callback=_ProcessResults,
|
||||
output=Progress('Abandon %s' % (nb,), len(all_projects), quiet=opt.quiet))
|
||||
|
||||
width = max(itertools.chain(
|
||||
[25], (len(x) for x in itertools.chain(success, err))))
|
||||
if err:
|
||||
for br in err.keys():
|
||||
err_msg = "error: cannot abandon %s" % br
|
||||
print(err_msg, file=sys.stderr)
|
||||
for proj in err[br]:
|
||||
print(' ' * len(err_msg) + " | %s" % proj.relpath, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
elif not success:
|
||||
print('error: no project has local branch(es) : %s' % nb,
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
else:
|
||||
# Everything below here is displaying status.
|
||||
if opt.quiet:
|
||||
return
|
||||
print('Abandoned branches:')
|
||||
for br in success.keys():
|
||||
if len(all_projects) > 1 and len(all_projects) == len(success[br]):
|
||||
result = "all project"
|
||||
if invalid_branches:
|
||||
self.OptionParser.error(
|
||||
f"{invalid_branches} are not valid branch names"
|
||||
)
|
||||
else:
|
||||
result = "%s" % (
|
||||
('\n' + ' ' * width + '| ').join(p.relpath for p in success[br]))
|
||||
print("%s%s| %s\n" % (br, ' ' * (width - len(br)), result))
|
||||
args.insert(0, "'All local branches'")
|
||||
|
||||
def _ExecuteOne(self, all_branches, nb, project):
|
||||
"""Abandon one project."""
|
||||
if all_branches:
|
||||
branches = project.GetBranches()
|
||||
else:
|
||||
branches = nb
|
||||
|
||||
ret = {}
|
||||
errors = []
|
||||
for name in branches:
|
||||
status = None
|
||||
try:
|
||||
status = project.AbandonBranch(name)
|
||||
except RepoError as e:
|
||||
status = False
|
||||
errors.append(e)
|
||||
if status is not None:
|
||||
ret[name] = status
|
||||
|
||||
return (ret, project, errors)
|
||||
|
||||
def Execute(self, opt, args):
|
||||
nb = args[0].split()
|
||||
err = collections.defaultdict(list)
|
||||
success = collections.defaultdict(list)
|
||||
aggregate_errors = []
|
||||
all_projects = self.GetProjects(
|
||||
args[1:], all_manifests=not opt.this_manifest_only
|
||||
)
|
||||
_RelPath = lambda p: p.RelPath(local=opt.this_manifest_only)
|
||||
|
||||
def _ProcessResults(_pool, pm, states):
|
||||
for results, project, errors in states:
|
||||
for branch, status in results.items():
|
||||
if status:
|
||||
success[branch].append(project)
|
||||
else:
|
||||
err[branch].append(project)
|
||||
aggregate_errors.extend(errors)
|
||||
pm.update(msg="")
|
||||
|
||||
self.ExecuteInParallel(
|
||||
opt.jobs,
|
||||
functools.partial(self._ExecuteOne, opt.all, nb),
|
||||
all_projects,
|
||||
callback=_ProcessResults,
|
||||
output=Progress(
|
||||
f"Abandon {nb}", len(all_projects), quiet=opt.quiet
|
||||
),
|
||||
)
|
||||
|
||||
width = max(
|
||||
itertools.chain(
|
||||
[25], (len(x) for x in itertools.chain(success, err))
|
||||
)
|
||||
)
|
||||
if err:
|
||||
for br in err.keys():
|
||||
err_msg = "error: cannot abandon %s" % br
|
||||
logger.error(err_msg)
|
||||
for proj in err[br]:
|
||||
logger.error(" " * len(err_msg) + " | %s", _RelPath(proj))
|
||||
raise AbandonError(aggregate_errors=aggregate_errors)
|
||||
elif not success:
|
||||
logger.error("error: no project has local branch(es) : %s", nb)
|
||||
raise AbandonError(aggregate_errors=aggregate_errors)
|
||||
else:
|
||||
# Everything below here is displaying status.
|
||||
if opt.quiet:
|
||||
return
|
||||
print("Abandoned branches:")
|
||||
for br in success.keys():
|
||||
if len(all_projects) > 1 and len(all_projects) == len(
|
||||
success[br]
|
||||
):
|
||||
result = "all project"
|
||||
else:
|
||||
result = "%s" % (
|
||||
("\n" + " " * width + "| ").join(
|
||||
_RelPath(p) for p in success[br]
|
||||
)
|
||||
)
|
||||
print(f"{br}{' ' * (width - len(br))}| {result}\n")
|
||||
|
||||
@@ -16,55 +16,56 @@ import itertools
|
||||
import sys
|
||||
|
||||
from color import Coloring
|
||||
from command import Command, DEFAULT_LOCAL_JOBS
|
||||
from command import Command
|
||||
from command import DEFAULT_LOCAL_JOBS
|
||||
|
||||
|
||||
class BranchColoring(Coloring):
|
||||
def __init__(self, config):
|
||||
Coloring.__init__(self, config, 'branch')
|
||||
self.current = self.printer('current', fg='green')
|
||||
self.local = self.printer('local')
|
||||
self.notinproject = self.printer('notinproject', fg='red')
|
||||
def __init__(self, config):
|
||||
Coloring.__init__(self, config, "branch")
|
||||
self.current = self.printer("current", fg="green")
|
||||
self.local = self.printer("local")
|
||||
self.notinproject = self.printer("notinproject", fg="red")
|
||||
|
||||
|
||||
class BranchInfo(object):
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
self.current = 0
|
||||
self.published = 0
|
||||
self.published_equal = 0
|
||||
self.projects = []
|
||||
class BranchInfo:
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
self.current = 0
|
||||
self.published = 0
|
||||
self.published_equal = 0
|
||||
self.projects = []
|
||||
|
||||
def add(self, b):
|
||||
if b.current:
|
||||
self.current += 1
|
||||
if b.published:
|
||||
self.published += 1
|
||||
if b.revision == b.published:
|
||||
self.published_equal += 1
|
||||
self.projects.append(b)
|
||||
def add(self, b):
|
||||
if b.current:
|
||||
self.current += 1
|
||||
if b.published:
|
||||
self.published += 1
|
||||
if b.revision == b.published:
|
||||
self.published_equal += 1
|
||||
self.projects.append(b)
|
||||
|
||||
@property
|
||||
def IsCurrent(self):
|
||||
return self.current > 0
|
||||
@property
|
||||
def IsCurrent(self):
|
||||
return self.current > 0
|
||||
|
||||
@property
|
||||
def IsSplitCurrent(self):
|
||||
return self.current != 0 and self.current != len(self.projects)
|
||||
@property
|
||||
def IsSplitCurrent(self):
|
||||
return self.current != 0 and self.current != len(self.projects)
|
||||
|
||||
@property
|
||||
def IsPublished(self):
|
||||
return self.published > 0
|
||||
@property
|
||||
def IsPublished(self):
|
||||
return self.published > 0
|
||||
|
||||
@property
|
||||
def IsPublishedEqual(self):
|
||||
return self.published_equal == len(self.projects)
|
||||
@property
|
||||
def IsPublishedEqual(self):
|
||||
return self.published_equal == len(self.projects)
|
||||
|
||||
|
||||
class Branches(Command):
|
||||
COMMON = True
|
||||
helpSummary = "View current topic branches"
|
||||
helpUsage = """
|
||||
COMMON = True
|
||||
helpSummary = "View current topic branches"
|
||||
helpUsage = """
|
||||
%prog [<project>...]
|
||||
|
||||
Summarizes the currently available topic branches.
|
||||
@@ -95,109 +96,114 @@ the branch appears in, or does not appear in. If no project list
|
||||
is shown, then the branch appears in all projects.
|
||||
|
||||
"""
|
||||
PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
|
||||
PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
|
||||
|
||||
def Execute(self, opt, args):
|
||||
projects = self.GetProjects(args)
|
||||
out = BranchColoring(self.manifest.manifestProject.config)
|
||||
all_branches = {}
|
||||
project_cnt = len(projects)
|
||||
def Execute(self, opt, args):
|
||||
projects = self.GetProjects(
|
||||
args, all_manifests=not opt.this_manifest_only
|
||||
)
|
||||
out = BranchColoring(self.manifest.manifestProject.config)
|
||||
all_branches = {}
|
||||
project_cnt = len(projects)
|
||||
|
||||
def _ProcessResults(_pool, _output, results):
|
||||
for name, b in itertools.chain.from_iterable(results):
|
||||
if name not in all_branches:
|
||||
all_branches[name] = BranchInfo(name)
|
||||
all_branches[name].add(b)
|
||||
def _ProcessResults(_pool, _output, results):
|
||||
for name, b in itertools.chain.from_iterable(results):
|
||||
if name not in all_branches:
|
||||
all_branches[name] = BranchInfo(name)
|
||||
all_branches[name].add(b)
|
||||
|
||||
self.ExecuteInParallel(
|
||||
opt.jobs,
|
||||
expand_project_to_branches,
|
||||
projects,
|
||||
callback=_ProcessResults)
|
||||
self.ExecuteInParallel(
|
||||
opt.jobs,
|
||||
expand_project_to_branches,
|
||||
projects,
|
||||
callback=_ProcessResults,
|
||||
)
|
||||
|
||||
names = sorted(all_branches)
|
||||
names = sorted(all_branches)
|
||||
|
||||
if not names:
|
||||
print(' (no branches)', file=sys.stderr)
|
||||
return
|
||||
if not names:
|
||||
print(" (no branches)", file=sys.stderr)
|
||||
return
|
||||
|
||||
width = 25
|
||||
for name in names:
|
||||
if width < len(name):
|
||||
width = len(name)
|
||||
width = 25
|
||||
for name in names:
|
||||
if width < len(name):
|
||||
width = len(name)
|
||||
|
||||
for name in names:
|
||||
i = all_branches[name]
|
||||
in_cnt = len(i.projects)
|
||||
for name in names:
|
||||
i = all_branches[name]
|
||||
in_cnt = len(i.projects)
|
||||
|
||||
if i.IsCurrent:
|
||||
current = '*'
|
||||
hdr = out.current
|
||||
else:
|
||||
current = ' '
|
||||
hdr = out.local
|
||||
|
||||
if i.IsPublishedEqual:
|
||||
published = 'P'
|
||||
elif i.IsPublished:
|
||||
published = 'p'
|
||||
else:
|
||||
published = ' '
|
||||
|
||||
hdr('%c%c %-*s' % (current, published, width, name))
|
||||
out.write(' |')
|
||||
|
||||
if in_cnt < project_cnt:
|
||||
fmt = out.write
|
||||
paths = []
|
||||
non_cur_paths = []
|
||||
if i.IsSplitCurrent or (in_cnt < project_cnt - in_cnt):
|
||||
in_type = 'in'
|
||||
for b in i.projects:
|
||||
if not i.IsSplitCurrent or b.current:
|
||||
paths.append(b.project.relpath)
|
||||
if i.IsCurrent:
|
||||
current = "*"
|
||||
hdr = out.current
|
||||
else:
|
||||
non_cur_paths.append(b.project.relpath)
|
||||
else:
|
||||
fmt = out.notinproject
|
||||
in_type = 'not in'
|
||||
have = set()
|
||||
for b in i.projects:
|
||||
have.add(b.project)
|
||||
for p in projects:
|
||||
if p not in have:
|
||||
paths.append(p.relpath)
|
||||
current = " "
|
||||
hdr = out.local
|
||||
|
||||
s = ' %s %s' % (in_type, ', '.join(paths))
|
||||
if not i.IsSplitCurrent and (width + 7 + len(s) < 80):
|
||||
fmt = out.current if i.IsCurrent else fmt
|
||||
fmt(s)
|
||||
else:
|
||||
fmt(' %s:' % in_type)
|
||||
fmt = out.current if i.IsCurrent else out.write
|
||||
for p in paths:
|
||||
if i.IsPublishedEqual:
|
||||
published = "P"
|
||||
elif i.IsPublished:
|
||||
published = "p"
|
||||
else:
|
||||
published = " "
|
||||
|
||||
hdr("%c%c %-*s" % (current, published, width, name))
|
||||
out.write(" |")
|
||||
|
||||
_RelPath = lambda p: p.RelPath(local=opt.this_manifest_only)
|
||||
if in_cnt < project_cnt:
|
||||
fmt = out.write
|
||||
paths = []
|
||||
non_cur_paths = []
|
||||
if i.IsSplitCurrent or (in_cnt <= project_cnt - in_cnt):
|
||||
in_type = "in"
|
||||
for b in i.projects:
|
||||
relpath = _RelPath(b.project)
|
||||
if not i.IsSplitCurrent or b.current:
|
||||
paths.append(relpath)
|
||||
else:
|
||||
non_cur_paths.append(relpath)
|
||||
else:
|
||||
fmt = out.notinproject
|
||||
in_type = "not in"
|
||||
have = set()
|
||||
for b in i.projects:
|
||||
have.add(_RelPath(b.project))
|
||||
for p in projects:
|
||||
if _RelPath(p) not in have:
|
||||
paths.append(_RelPath(p))
|
||||
|
||||
s = f" {in_type} {', '.join(paths)}"
|
||||
if not i.IsSplitCurrent and (width + 7 + len(s) < 80):
|
||||
fmt = out.current if i.IsCurrent else fmt
|
||||
fmt(s)
|
||||
else:
|
||||
fmt(" %s:" % in_type)
|
||||
fmt = out.current if i.IsCurrent else out.write
|
||||
for p in paths:
|
||||
out.nl()
|
||||
fmt(width * " " + " %s" % p)
|
||||
fmt = out.write
|
||||
for p in non_cur_paths:
|
||||
out.nl()
|
||||
fmt(width * " " + " %s" % p)
|
||||
else:
|
||||
out.write(" in all projects")
|
||||
out.nl()
|
||||
fmt(width * ' ' + ' %s' % p)
|
||||
fmt = out.write
|
||||
for p in non_cur_paths:
|
||||
out.nl()
|
||||
fmt(width * ' ' + ' %s' % p)
|
||||
else:
|
||||
out.write(' in all projects')
|
||||
out.nl()
|
||||
|
||||
|
||||
def expand_project_to_branches(project):
|
||||
"""Expands a project into a list of branch names & associated information.
|
||||
"""Expands a project into a list of branch names & associated information.
|
||||
|
||||
Args:
|
||||
project: project.Project
|
||||
Args:
|
||||
project: project.Project
|
||||
|
||||
Returns:
|
||||
List[Tuple[str, git_config.Branch]]
|
||||
"""
|
||||
branches = []
|
||||
for name, b in project.GetBranches().items():
|
||||
b.project = project
|
||||
branches.append((name, b))
|
||||
return branches
|
||||
Returns:
|
||||
List[Tuple[str, git_config.Branch]]
|
||||
"""
|
||||
branches = []
|
||||
for name, b in project.GetBranches().items():
|
||||
b.project = project
|
||||
branches.append((name, b))
|
||||
return branches
|
||||
|
||||
@@ -13,19 +13,42 @@
|
||||
# limitations under the License.
|
||||
|
||||
import functools
|
||||
import sys
|
||||
from typing import NamedTuple
|
||||
|
||||
from command import Command, DEFAULT_LOCAL_JOBS
|
||||
from command import Command
|
||||
from command import DEFAULT_LOCAL_JOBS
|
||||
from error import GitError
|
||||
from error import RepoExitError
|
||||
from progress import Progress
|
||||
from project import Project
|
||||
from repo_logging import RepoLogger
|
||||
|
||||
|
||||
logger = RepoLogger(__file__)
|
||||
|
||||
|
||||
class CheckoutBranchResult(NamedTuple):
|
||||
# Whether the Project is on the branch (i.e. branch exists and no errors)
|
||||
result: bool
|
||||
project: Project
|
||||
error: Exception
|
||||
|
||||
|
||||
class CheckoutCommandError(RepoExitError):
|
||||
"""Exception thrown when checkout command fails."""
|
||||
|
||||
|
||||
class MissingBranchError(RepoExitError):
|
||||
"""Exception thrown when no project has specified branch."""
|
||||
|
||||
|
||||
class Checkout(Command):
|
||||
COMMON = True
|
||||
helpSummary = "Checkout a branch for development"
|
||||
helpUsage = """
|
||||
COMMON = True
|
||||
helpSummary = "Checkout a branch for development"
|
||||
helpUsage = """
|
||||
%prog <branchname> [<project>...]
|
||||
"""
|
||||
helpDescription = """
|
||||
helpDescription = """
|
||||
The '%prog' command checks out an existing branch that was previously
|
||||
created by 'repo start'.
|
||||
|
||||
@@ -33,43 +56,55 @@ The command is equivalent to:
|
||||
|
||||
repo forall [<project>...] -c git checkout <branchname>
|
||||
"""
|
||||
PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
|
||||
PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
|
||||
|
||||
def ValidateOptions(self, opt, args):
|
||||
if not args:
|
||||
self.Usage()
|
||||
def ValidateOptions(self, opt, args):
|
||||
if not args:
|
||||
self.Usage()
|
||||
|
||||
def _ExecuteOne(self, nb, project):
|
||||
"""Checkout one project."""
|
||||
return (project.CheckoutBranch(nb), project)
|
||||
def _ExecuteOne(self, nb, project):
|
||||
"""Checkout one project."""
|
||||
error = None
|
||||
result = None
|
||||
try:
|
||||
result = project.CheckoutBranch(nb)
|
||||
except GitError as e:
|
||||
error = e
|
||||
return CheckoutBranchResult(result, project, error)
|
||||
|
||||
def Execute(self, opt, args):
|
||||
nb = args[0]
|
||||
err = []
|
||||
success = []
|
||||
all_projects = self.GetProjects(args[1:])
|
||||
def Execute(self, opt, args):
|
||||
nb = args[0]
|
||||
err = []
|
||||
err_projects = []
|
||||
success = []
|
||||
all_projects = self.GetProjects(
|
||||
args[1:], all_manifests=not opt.this_manifest_only
|
||||
)
|
||||
|
||||
def _ProcessResults(_pool, pm, results):
|
||||
for status, project in results:
|
||||
if status is not None:
|
||||
if status:
|
||||
success.append(project)
|
||||
else:
|
||||
err.append(project)
|
||||
pm.update()
|
||||
def _ProcessResults(_pool, pm, results):
|
||||
for result in results:
|
||||
if result.error is not None:
|
||||
err.append(result.error)
|
||||
err_projects.append(result.project)
|
||||
elif result.result:
|
||||
success.append(result.project)
|
||||
pm.update(msg="")
|
||||
|
||||
self.ExecuteInParallel(
|
||||
opt.jobs,
|
||||
functools.partial(self._ExecuteOne, nb),
|
||||
all_projects,
|
||||
callback=_ProcessResults,
|
||||
output=Progress('Checkout %s' % (nb,), len(all_projects), quiet=opt.quiet))
|
||||
self.ExecuteInParallel(
|
||||
opt.jobs,
|
||||
functools.partial(self._ExecuteOne, nb),
|
||||
all_projects,
|
||||
callback=_ProcessResults,
|
||||
output=Progress(
|
||||
f"Checkout {nb}", len(all_projects), quiet=opt.quiet
|
||||
),
|
||||
)
|
||||
|
||||
if err:
|
||||
for p in err:
|
||||
print("error: %s/: cannot checkout %s" % (p.relpath, nb),
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
elif not success:
|
||||
print('error: no project has branch %s' % nb, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
if err_projects:
|
||||
for p in err_projects:
|
||||
logger.error("error: %s/: cannot checkout %s", p.relpath, nb)
|
||||
raise CheckoutCommandError(aggregate_errors=err)
|
||||
elif not success:
|
||||
msg = f"error: no project has branch {nb}"
|
||||
logger.error(msg)
|
||||
raise MissingBranchError(msg)
|
||||
|
||||
@@ -14,97 +14,132 @@
|
||||
|
||||
import re
|
||||
import sys
|
||||
from command import Command
|
||||
from git_command import GitCommand
|
||||
|
||||
CHANGE_ID_RE = re.compile(r'^\s*Change-Id: I([0-9a-f]{40})\s*$')
|
||||
from command import Command
|
||||
from error import GitError
|
||||
from git_command import GitCommand
|
||||
from repo_logging import RepoLogger
|
||||
|
||||
|
||||
CHANGE_ID_RE = re.compile(r"^\s*Change-Id: I([0-9a-f]{40})\s*$")
|
||||
logger = RepoLogger(__file__)
|
||||
|
||||
|
||||
class CherryPick(Command):
|
||||
COMMON = True
|
||||
helpSummary = "Cherry-pick a change."
|
||||
helpUsage = """
|
||||
COMMON = True
|
||||
helpSummary = "Cherry-pick a change."
|
||||
helpUsage = """
|
||||
%prog <sha1>
|
||||
"""
|
||||
helpDescription = """
|
||||
helpDescription = """
|
||||
'%prog' cherry-picks a change from one branch to another.
|
||||
The change id will be updated, and a reference to the old
|
||||
change id will be added.
|
||||
"""
|
||||
|
||||
def ValidateOptions(self, opt, args):
|
||||
if len(args) != 1:
|
||||
self.Usage()
|
||||
def ValidateOptions(self, opt, args):
|
||||
if len(args) != 1:
|
||||
self.Usage()
|
||||
|
||||
def Execute(self, opt, args):
|
||||
reference = args[0]
|
||||
def Execute(self, opt, args):
|
||||
reference = args[0]
|
||||
|
||||
p = GitCommand(None,
|
||||
['rev-parse', '--verify', reference],
|
||||
capture_stdout=True,
|
||||
capture_stderr=True)
|
||||
if p.Wait() != 0:
|
||||
print(p.stderr, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
sha1 = p.stdout.strip()
|
||||
p = GitCommand(
|
||||
None,
|
||||
["rev-parse", "--verify", reference],
|
||||
capture_stdout=True,
|
||||
capture_stderr=True,
|
||||
verify_command=True,
|
||||
)
|
||||
try:
|
||||
p.Wait()
|
||||
except GitError:
|
||||
logger.error(p.stderr)
|
||||
raise
|
||||
|
||||
p = GitCommand(None, ['cat-file', 'commit', sha1], capture_stdout=True)
|
||||
if p.Wait() != 0:
|
||||
print("error: Failed to retrieve old commit message", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
old_msg = self._StripHeader(p.stdout)
|
||||
sha1 = p.stdout.strip()
|
||||
|
||||
p = GitCommand(None,
|
||||
['cherry-pick', sha1],
|
||||
capture_stdout=True,
|
||||
capture_stderr=True)
|
||||
status = p.Wait()
|
||||
p = GitCommand(
|
||||
None,
|
||||
["cat-file", "commit", sha1],
|
||||
capture_stdout=True,
|
||||
verify_command=True,
|
||||
)
|
||||
|
||||
print(p.stdout, file=sys.stdout)
|
||||
print(p.stderr, file=sys.stderr)
|
||||
try:
|
||||
p.Wait()
|
||||
except GitError:
|
||||
logger.error("error: Failed to retrieve old commit message")
|
||||
raise
|
||||
|
||||
if status == 0:
|
||||
# The cherry-pick was applied correctly. We just need to edit the
|
||||
# commit message.
|
||||
new_msg = self._Reformat(old_msg, sha1)
|
||||
old_msg = self._StripHeader(p.stdout)
|
||||
|
||||
p = GitCommand(None, ['commit', '--amend', '-F', '-'],
|
||||
input=new_msg,
|
||||
capture_stdout=True,
|
||||
capture_stderr=True)
|
||||
if p.Wait() != 0:
|
||||
print("error: Failed to update commit message", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
p = GitCommand(
|
||||
None,
|
||||
["cherry-pick", sha1],
|
||||
capture_stdout=True,
|
||||
capture_stderr=True,
|
||||
verify_command=True,
|
||||
)
|
||||
|
||||
else:
|
||||
print('NOTE: When committing (please see above) and editing the commit '
|
||||
'message, please remove the old Change-Id-line and add:')
|
||||
print(self._GetReference(sha1), file=sys.stderr)
|
||||
print(file=sys.stderr)
|
||||
try:
|
||||
p.Wait()
|
||||
except GitError as e:
|
||||
logger.error(e)
|
||||
logger.warning(
|
||||
"NOTE: When committing (please see above) and editing the "
|
||||
"commit message, please remove the old Change-Id-line and "
|
||||
"add:\n%s",
|
||||
self._GetReference(sha1),
|
||||
)
|
||||
raise
|
||||
|
||||
def _IsChangeId(self, line):
|
||||
return CHANGE_ID_RE.match(line)
|
||||
if p.stdout:
|
||||
print(p.stdout.strip(), file=sys.stdout)
|
||||
if p.stderr:
|
||||
print(p.stderr.strip(), file=sys.stderr)
|
||||
|
||||
def _GetReference(self, sha1):
|
||||
return "(cherry picked from commit %s)" % sha1
|
||||
# The cherry-pick was applied correctly. We just need to edit
|
||||
# the commit message.
|
||||
new_msg = self._Reformat(old_msg, sha1)
|
||||
|
||||
def _StripHeader(self, commit_msg):
|
||||
lines = commit_msg.splitlines()
|
||||
return "\n".join(lines[lines.index("") + 1:])
|
||||
p = GitCommand(
|
||||
None,
|
||||
["commit", "--amend", "-F", "-"],
|
||||
input=new_msg,
|
||||
capture_stdout=True,
|
||||
capture_stderr=True,
|
||||
verify_command=True,
|
||||
)
|
||||
try:
|
||||
p.Wait()
|
||||
except GitError:
|
||||
logger.error("error: Failed to update commit message")
|
||||
raise
|
||||
|
||||
def _Reformat(self, old_msg, sha1):
|
||||
new_msg = []
|
||||
def _IsChangeId(self, line):
|
||||
return CHANGE_ID_RE.match(line)
|
||||
|
||||
for line in old_msg.splitlines():
|
||||
if not self._IsChangeId(line):
|
||||
new_msg.append(line)
|
||||
def _GetReference(self, sha1):
|
||||
return "(cherry picked from commit %s)" % sha1
|
||||
|
||||
# Add a blank line between the message and the change id/reference
|
||||
try:
|
||||
if new_msg[-1].strip() != "":
|
||||
new_msg.append("")
|
||||
except IndexError:
|
||||
pass
|
||||
def _StripHeader(self, commit_msg):
|
||||
lines = commit_msg.splitlines()
|
||||
return "\n".join(lines[lines.index("") + 1 :])
|
||||
|
||||
new_msg.append(self._GetReference(sha1))
|
||||
return "\n".join(new_msg)
|
||||
def _Reformat(self, old_msg, sha1):
|
||||
new_msg = []
|
||||
|
||||
for line in old_msg.splitlines():
|
||||
if not self._IsChangeId(line):
|
||||
new_msg.append(line)
|
||||
|
||||
# Add a blank line between the message and the change id/reference.
|
||||
try:
|
||||
if new_msg[-1].strip() != "":
|
||||
new_msg.append("")
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
new_msg.append(self._GetReference(sha1))
|
||||
return "\n".join(new_msg)
|
||||
|
||||
@@ -15,55 +15,68 @@
|
||||
import functools
|
||||
import io
|
||||
|
||||
from command import DEFAULT_LOCAL_JOBS, PagedCommand
|
||||
from command import DEFAULT_LOCAL_JOBS
|
||||
from command import PagedCommand
|
||||
|
||||
|
||||
class Diff(PagedCommand):
|
||||
COMMON = True
|
||||
helpSummary = "Show changes between commit and working tree"
|
||||
helpUsage = """
|
||||
COMMON = True
|
||||
helpSummary = "Show changes between commit and working tree"
|
||||
helpUsage = """
|
||||
%prog [<project>...]
|
||||
|
||||
The -u option causes '%prog' to generate diff output with file paths
|
||||
relative to the repository root, so the output can be applied
|
||||
to the Unix 'patch' command.
|
||||
"""
|
||||
PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
|
||||
PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
|
||||
|
||||
def _Options(self, p):
|
||||
p.add_option('-u', '--absolute',
|
||||
dest='absolute', action='store_true',
|
||||
help='paths are relative to the repository root')
|
||||
def _Options(self, p):
|
||||
p.add_option(
|
||||
"-u",
|
||||
"--absolute",
|
||||
dest="absolute",
|
||||
action="store_true",
|
||||
help="paths are relative to the repository root",
|
||||
)
|
||||
|
||||
def _ExecuteOne(self, absolute, project):
|
||||
"""Obtains the diff for a specific project.
|
||||
def _ExecuteOne(self, absolute, local, project):
|
||||
"""Obtains the diff for a specific project.
|
||||
|
||||
Args:
|
||||
absolute: Paths are relative to the root.
|
||||
project: Project to get status of.
|
||||
Args:
|
||||
absolute: Paths are relative to the root.
|
||||
local: a boolean, if True, the path is relative to the local
|
||||
(sub)manifest. If false, the path is relative to the outermost
|
||||
manifest.
|
||||
project: Project to get status of.
|
||||
|
||||
Returns:
|
||||
The status of the project.
|
||||
"""
|
||||
buf = io.StringIO()
|
||||
ret = project.PrintWorkTreeDiff(absolute, output_redir=buf)
|
||||
return (ret, buf.getvalue())
|
||||
Returns:
|
||||
The status of the project.
|
||||
"""
|
||||
buf = io.StringIO()
|
||||
ret = project.PrintWorkTreeDiff(absolute, output_redir=buf, local=local)
|
||||
return (ret, buf.getvalue())
|
||||
|
||||
def Execute(self, opt, args):
|
||||
all_projects = self.GetProjects(args)
|
||||
def Execute(self, opt, args):
|
||||
all_projects = self.GetProjects(
|
||||
args, all_manifests=not opt.this_manifest_only
|
||||
)
|
||||
|
||||
def _ProcessResults(_pool, _output, results):
|
||||
ret = 0
|
||||
for (state, output) in results:
|
||||
if output:
|
||||
print(output, end='')
|
||||
if not state:
|
||||
ret = 1
|
||||
return ret
|
||||
def _ProcessResults(_pool, _output, results):
|
||||
ret = 0
|
||||
for state, output in results:
|
||||
if output:
|
||||
print(output, end="")
|
||||
if not state:
|
||||
ret = 1
|
||||
return ret
|
||||
|
||||
return self.ExecuteInParallel(
|
||||
opt.jobs,
|
||||
functools.partial(self._ExecuteOne, opt.absolute),
|
||||
all_projects,
|
||||
callback=_ProcessResults,
|
||||
ordered=True)
|
||||
return self.ExecuteInParallel(
|
||||
opt.jobs,
|
||||
functools.partial(
|
||||
self._ExecuteOne, opt.absolute, opt.this_manifest_only
|
||||
),
|
||||
all_projects,
|
||||
callback=_ProcessResults,
|
||||
ordered=True,
|
||||
)
|
||||
|
||||
@@ -18,24 +18,24 @@ from manifest_xml import RepoClient
|
||||
|
||||
|
||||
class _Coloring(Coloring):
|
||||
def __init__(self, config):
|
||||
Coloring.__init__(self, config, "status")
|
||||
def __init__(self, config):
|
||||
Coloring.__init__(self, config, "status")
|
||||
|
||||
|
||||
class Diffmanifests(PagedCommand):
|
||||
""" A command to see logs in projects represented by manifests
|
||||
"""A command to see logs in projects represented by manifests
|
||||
|
||||
This is used to see deeper differences between manifests. Where a simple
|
||||
diff would only show a diff of sha1s for example, this command will display
|
||||
the logs of the project between both sha1s, allowing user to see diff at a
|
||||
deeper level.
|
||||
"""
|
||||
This is used to see deeper differences between manifests. Where a simple
|
||||
diff would only show a diff of sha1s for example, this command will display
|
||||
the logs of the project between both sha1s, allowing user to see diff at a
|
||||
deeper level.
|
||||
"""
|
||||
|
||||
COMMON = True
|
||||
helpSummary = "Manifest diff utility"
|
||||
helpUsage = """%prog manifest1.xml [manifest2.xml] [options]"""
|
||||
COMMON = True
|
||||
helpSummary = "Manifest diff utility"
|
||||
helpUsage = """%prog manifest1.xml [manifest2.xml] [options]"""
|
||||
|
||||
helpDescription = """
|
||||
helpDescription = """
|
||||
The %prog command shows differences between project revisions of manifest1 and
|
||||
manifest2. if manifest2 is not specified, current manifest.xml will be used
|
||||
instead. Both absolute and relative paths may be used for manifests. Relative
|
||||
@@ -65,142 +65,197 @@ synced and their revisions won't be found.
|
||||
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
p.add_option('--raw',
|
||||
dest='raw', action='store_true',
|
||||
help='display raw diff')
|
||||
p.add_option('--no-color',
|
||||
dest='color', action='store_false', default=True,
|
||||
help='does not display the diff in color')
|
||||
p.add_option('--pretty-format',
|
||||
dest='pretty_format', action='store',
|
||||
metavar='<FORMAT>',
|
||||
help='print the log using a custom git pretty format string')
|
||||
def _Options(self, p):
|
||||
p.add_option(
|
||||
"--raw", dest="raw", action="store_true", help="display raw diff"
|
||||
)
|
||||
p.add_option(
|
||||
"--no-color",
|
||||
dest="color",
|
||||
action="store_false",
|
||||
default=True,
|
||||
help="does not display the diff in color",
|
||||
)
|
||||
p.add_option(
|
||||
"--pretty-format",
|
||||
dest="pretty_format",
|
||||
action="store",
|
||||
metavar="<FORMAT>",
|
||||
help="print the log using a custom git pretty format string",
|
||||
)
|
||||
|
||||
def _printRawDiff(self, diff, pretty_format=None):
|
||||
for project in diff['added']:
|
||||
self.printText("A %s %s" % (project.relpath, project.revisionExpr))
|
||||
self.out.nl()
|
||||
|
||||
for project in diff['removed']:
|
||||
self.printText("R %s %s" % (project.relpath, project.revisionExpr))
|
||||
self.out.nl()
|
||||
|
||||
for project, otherProject in diff['changed']:
|
||||
self.printText("C %s %s %s" % (project.relpath, project.revisionExpr,
|
||||
otherProject.revisionExpr))
|
||||
self.out.nl()
|
||||
self._printLogs(project, otherProject, raw=True, color=False, pretty_format=pretty_format)
|
||||
|
||||
for project, otherProject in diff['unreachable']:
|
||||
self.printText("U %s %s %s" % (project.relpath, project.revisionExpr,
|
||||
otherProject.revisionExpr))
|
||||
self.out.nl()
|
||||
|
||||
def _printDiff(self, diff, color=True, pretty_format=None):
|
||||
if diff['added']:
|
||||
self.out.nl()
|
||||
self.printText('added projects : \n')
|
||||
self.out.nl()
|
||||
for project in diff['added']:
|
||||
self.printProject('\t%s' % (project.relpath))
|
||||
self.printText(' at revision ')
|
||||
self.printRevision(project.revisionExpr)
|
||||
self.out.nl()
|
||||
|
||||
if diff['removed']:
|
||||
self.out.nl()
|
||||
self.printText('removed projects : \n')
|
||||
self.out.nl()
|
||||
for project in diff['removed']:
|
||||
self.printProject('\t%s' % (project.relpath))
|
||||
self.printText(' at revision ')
|
||||
self.printRevision(project.revisionExpr)
|
||||
self.out.nl()
|
||||
|
||||
if diff['changed']:
|
||||
self.out.nl()
|
||||
self.printText('changed projects : \n')
|
||||
self.out.nl()
|
||||
for project, otherProject in diff['changed']:
|
||||
self.printProject('\t%s' % (project.relpath))
|
||||
self.printText(' changed from ')
|
||||
self.printRevision(project.revisionExpr)
|
||||
self.printText(' to ')
|
||||
self.printRevision(otherProject.revisionExpr)
|
||||
self.out.nl()
|
||||
self._printLogs(project, otherProject, raw=False, color=color,
|
||||
pretty_format=pretty_format)
|
||||
self.out.nl()
|
||||
|
||||
if diff['unreachable']:
|
||||
self.out.nl()
|
||||
self.printText('projects with unreachable revisions : \n')
|
||||
self.out.nl()
|
||||
for project, otherProject in diff['unreachable']:
|
||||
self.printProject('\t%s ' % (project.relpath))
|
||||
self.printRevision(project.revisionExpr)
|
||||
self.printText(' or ')
|
||||
self.printRevision(otherProject.revisionExpr)
|
||||
self.printText(' not found')
|
||||
self.out.nl()
|
||||
|
||||
def _printLogs(self, project, otherProject, raw=False, color=True,
|
||||
pretty_format=None):
|
||||
|
||||
logs = project.getAddedAndRemovedLogs(otherProject,
|
||||
oneline=(pretty_format is None),
|
||||
color=color,
|
||||
pretty_format=pretty_format)
|
||||
if logs['removed']:
|
||||
removedLogs = logs['removed'].split('\n')
|
||||
for log in removedLogs:
|
||||
if log.strip():
|
||||
if raw:
|
||||
self.printText(' R ' + log)
|
||||
self.out.nl()
|
||||
else:
|
||||
self.printRemoved('\t\t[-] ')
|
||||
self.printText(log)
|
||||
def _printRawDiff(self, diff, pretty_format=None, local=False):
|
||||
_RelPath = lambda p: p.RelPath(local=local)
|
||||
for project in diff["added"]:
|
||||
self.printText(f"A {_RelPath(project)} {project.revisionExpr}")
|
||||
self.out.nl()
|
||||
|
||||
if logs['added']:
|
||||
addedLogs = logs['added'].split('\n')
|
||||
for log in addedLogs:
|
||||
if log.strip():
|
||||
if raw:
|
||||
self.printText(' A ' + log)
|
||||
self.out.nl()
|
||||
else:
|
||||
self.printAdded('\t\t[+] ')
|
||||
self.printText(log)
|
||||
for project in diff["removed"]:
|
||||
self.printText(f"R {_RelPath(project)} {project.revisionExpr}")
|
||||
self.out.nl()
|
||||
|
||||
def ValidateOptions(self, opt, args):
|
||||
if not args or len(args) > 2:
|
||||
self.OptionParser.error('missing manifests to diff')
|
||||
for project, otherProject in diff["changed"]:
|
||||
self.printText(
|
||||
f"C {_RelPath(project)} {project.revisionExpr} "
|
||||
f"{otherProject.revisionExpr}"
|
||||
)
|
||||
self.out.nl()
|
||||
self._printLogs(
|
||||
project,
|
||||
otherProject,
|
||||
raw=True,
|
||||
color=False,
|
||||
pretty_format=pretty_format,
|
||||
)
|
||||
|
||||
def Execute(self, opt, args):
|
||||
self.out = _Coloring(self.client.globalConfig)
|
||||
self.printText = self.out.nofmt_printer('text')
|
||||
if opt.color:
|
||||
self.printProject = self.out.nofmt_printer('project', attr='bold')
|
||||
self.printAdded = self.out.nofmt_printer('green', fg='green', attr='bold')
|
||||
self.printRemoved = self.out.nofmt_printer('red', fg='red', attr='bold')
|
||||
self.printRevision = self.out.nofmt_printer('revision', fg='yellow')
|
||||
else:
|
||||
self.printProject = self.printAdded = self.printRemoved = self.printRevision = self.printText
|
||||
for project, otherProject in diff["unreachable"]:
|
||||
self.printText(
|
||||
f"U {_RelPath(project)} {project.revisionExpr} "
|
||||
f"{otherProject.revisionExpr}"
|
||||
)
|
||||
self.out.nl()
|
||||
|
||||
manifest1 = RepoClient(self.repodir)
|
||||
manifest1.Override(args[0], load_local_manifests=False)
|
||||
if len(args) == 1:
|
||||
manifest2 = self.manifest
|
||||
else:
|
||||
manifest2 = RepoClient(self.repodir)
|
||||
manifest2.Override(args[1], load_local_manifests=False)
|
||||
def _printDiff(self, diff, color=True, pretty_format=None, local=False):
|
||||
_RelPath = lambda p: p.RelPath(local=local)
|
||||
if diff["added"]:
|
||||
self.out.nl()
|
||||
self.printText("added projects : \n")
|
||||
self.out.nl()
|
||||
for project in diff["added"]:
|
||||
self.printProject("\t%s" % (_RelPath(project)))
|
||||
self.printText(" at revision ")
|
||||
self.printRevision(project.revisionExpr)
|
||||
self.out.nl()
|
||||
|
||||
diff = manifest1.projectsDiff(manifest2)
|
||||
if opt.raw:
|
||||
self._printRawDiff(diff, pretty_format=opt.pretty_format)
|
||||
else:
|
||||
self._printDiff(diff, color=opt.color, pretty_format=opt.pretty_format)
|
||||
if diff["removed"]:
|
||||
self.out.nl()
|
||||
self.printText("removed projects : \n")
|
||||
self.out.nl()
|
||||
for project in diff["removed"]:
|
||||
self.printProject("\t%s" % (_RelPath(project)))
|
||||
self.printText(" at revision ")
|
||||
self.printRevision(project.revisionExpr)
|
||||
self.out.nl()
|
||||
|
||||
if diff["missing"]:
|
||||
self.out.nl()
|
||||
self.printText("missing projects : \n")
|
||||
self.out.nl()
|
||||
for project in diff["missing"]:
|
||||
self.printProject("\t%s" % (_RelPath(project)))
|
||||
self.printText(" at revision ")
|
||||
self.printRevision(project.revisionExpr)
|
||||
self.out.nl()
|
||||
|
||||
if diff["changed"]:
|
||||
self.out.nl()
|
||||
self.printText("changed projects : \n")
|
||||
self.out.nl()
|
||||
for project, otherProject in diff["changed"]:
|
||||
self.printProject("\t%s" % (_RelPath(project)))
|
||||
self.printText(" changed from ")
|
||||
self.printRevision(project.revisionExpr)
|
||||
self.printText(" to ")
|
||||
self.printRevision(otherProject.revisionExpr)
|
||||
self.out.nl()
|
||||
self._printLogs(
|
||||
project,
|
||||
otherProject,
|
||||
raw=False,
|
||||
color=color,
|
||||
pretty_format=pretty_format,
|
||||
)
|
||||
self.out.nl()
|
||||
|
||||
if diff["unreachable"]:
|
||||
self.out.nl()
|
||||
self.printText("projects with unreachable revisions : \n")
|
||||
self.out.nl()
|
||||
for project, otherProject in diff["unreachable"]:
|
||||
self.printProject("\t%s " % (_RelPath(project)))
|
||||
self.printRevision(project.revisionExpr)
|
||||
self.printText(" or ")
|
||||
self.printRevision(otherProject.revisionExpr)
|
||||
self.printText(" not found")
|
||||
self.out.nl()
|
||||
|
||||
def _printLogs(
|
||||
self, project, otherProject, raw=False, color=True, pretty_format=None
|
||||
):
|
||||
logs = project.getAddedAndRemovedLogs(
|
||||
otherProject,
|
||||
oneline=(pretty_format is None),
|
||||
color=color,
|
||||
pretty_format=pretty_format,
|
||||
)
|
||||
if logs["removed"]:
|
||||
removedLogs = logs["removed"].split("\n")
|
||||
for log in removedLogs:
|
||||
if log.strip():
|
||||
if raw:
|
||||
self.printText(" R " + log)
|
||||
self.out.nl()
|
||||
else:
|
||||
self.printRemoved("\t\t[-] ")
|
||||
self.printText(log)
|
||||
self.out.nl()
|
||||
|
||||
if logs["added"]:
|
||||
addedLogs = logs["added"].split("\n")
|
||||
for log in addedLogs:
|
||||
if log.strip():
|
||||
if raw:
|
||||
self.printText(" A " + log)
|
||||
self.out.nl()
|
||||
else:
|
||||
self.printAdded("\t\t[+] ")
|
||||
self.printText(log)
|
||||
self.out.nl()
|
||||
|
||||
def ValidateOptions(self, opt, args):
|
||||
if not args or len(args) > 2:
|
||||
self.OptionParser.error("missing manifests to diff")
|
||||
if opt.this_manifest_only is False:
|
||||
raise self.OptionParser.error(
|
||||
"`diffmanifest` only supports the current tree"
|
||||
)
|
||||
|
||||
def Execute(self, opt, args):
|
||||
self.out = _Coloring(self.client.globalConfig)
|
||||
self.printText = self.out.nofmt_printer("text")
|
||||
if opt.color:
|
||||
self.printProject = self.out.nofmt_printer("project", attr="bold")
|
||||
self.printAdded = self.out.nofmt_printer(
|
||||
"green", fg="green", attr="bold"
|
||||
)
|
||||
self.printRemoved = self.out.nofmt_printer(
|
||||
"red", fg="red", attr="bold"
|
||||
)
|
||||
self.printRevision = self.out.nofmt_printer("revision", fg="yellow")
|
||||
else:
|
||||
self.printProject = (
|
||||
self.printAdded
|
||||
) = self.printRemoved = self.printRevision = self.printText
|
||||
|
||||
manifest1 = RepoClient(self.repodir)
|
||||
manifest1.Override(args[0], load_local_manifests=False)
|
||||
if len(args) == 1:
|
||||
manifest2 = self.manifest
|
||||
else:
|
||||
manifest2 = RepoClient(self.repodir)
|
||||
manifest2.Override(args[1], load_local_manifests=False)
|
||||
|
||||
diff = manifest1.projectsDiff(manifest2)
|
||||
if opt.raw:
|
||||
self._printRawDiff(
|
||||
diff,
|
||||
pretty_format=opt.pretty_format,
|
||||
local=opt.this_manifest_only,
|
||||
)
|
||||
else:
|
||||
self._printDiff(
|
||||
diff,
|
||||
color=opt.color,
|
||||
pretty_format=opt.pretty_format,
|
||||
local=opt.this_manifest_only,
|
||||
)
|
||||
|
||||
@@ -16,145 +16,198 @@ import re
|
||||
import sys
|
||||
|
||||
from command import Command
|
||||
from error import GitError, NoSuchProjectError
|
||||
from error import GitError
|
||||
from error import NoSuchProjectError
|
||||
from error import RepoExitError
|
||||
from repo_logging import RepoLogger
|
||||
|
||||
CHANGE_RE = re.compile(r'^([1-9][0-9]*)(?:[/\.-]([1-9][0-9]*))?$')
|
||||
|
||||
CHANGE_RE = re.compile(r"^([1-9][0-9]*)(?:[/\.-]([1-9][0-9]*))?$")
|
||||
logger = RepoLogger(__file__)
|
||||
|
||||
|
||||
class DownloadCommandError(RepoExitError):
|
||||
"""Error raised when download command fails."""
|
||||
|
||||
|
||||
class Download(Command):
|
||||
COMMON = True
|
||||
helpSummary = "Download and checkout a change"
|
||||
helpUsage = """
|
||||
COMMON = True
|
||||
helpSummary = "Download and checkout a change"
|
||||
helpUsage = """
|
||||
%prog {[project] change[/patchset]}...
|
||||
"""
|
||||
helpDescription = """
|
||||
helpDescription = """
|
||||
The '%prog' command downloads a change from the review system and
|
||||
makes it available in your project's local working directory.
|
||||
If no project is specified try to use current directory as a project.
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
p.add_option('-b', '--branch',
|
||||
help='create a new branch first')
|
||||
p.add_option('-c', '--cherry-pick',
|
||||
dest='cherrypick', action='store_true',
|
||||
help="cherry-pick instead of checkout")
|
||||
p.add_option('-x', '--record-origin', action='store_true',
|
||||
help='pass -x when cherry-picking')
|
||||
p.add_option('-r', '--revert',
|
||||
dest='revert', action='store_true',
|
||||
help="revert instead of checkout")
|
||||
p.add_option('-f', '--ff-only',
|
||||
dest='ffonly', action='store_true',
|
||||
help="force fast-forward merge")
|
||||
def _Options(self, p):
|
||||
p.add_option("-b", "--branch", help="create a new branch first")
|
||||
p.add_option(
|
||||
"-c",
|
||||
"--cherry-pick",
|
||||
dest="cherrypick",
|
||||
action="store_true",
|
||||
help="cherry-pick instead of checkout",
|
||||
)
|
||||
p.add_option(
|
||||
"-x",
|
||||
"--record-origin",
|
||||
action="store_true",
|
||||
help="pass -x when cherry-picking",
|
||||
)
|
||||
p.add_option(
|
||||
"-r",
|
||||
"--revert",
|
||||
dest="revert",
|
||||
action="store_true",
|
||||
help="revert instead of checkout",
|
||||
)
|
||||
p.add_option(
|
||||
"-f",
|
||||
"--ff-only",
|
||||
dest="ffonly",
|
||||
action="store_true",
|
||||
help="force fast-forward merge",
|
||||
)
|
||||
|
||||
def _ParseChangeIds(self, args):
|
||||
if not args:
|
||||
self.Usage()
|
||||
def _ParseChangeIds(self, opt, args):
|
||||
if not args:
|
||||
self.Usage()
|
||||
|
||||
to_get = []
|
||||
project = None
|
||||
to_get = []
|
||||
project = None
|
||||
|
||||
for a in args:
|
||||
m = CHANGE_RE.match(a)
|
||||
if m:
|
||||
if not project:
|
||||
project = self.GetProjects(".")[0]
|
||||
print('Defaulting to cwd project', project.name)
|
||||
chg_id = int(m.group(1))
|
||||
if m.group(2):
|
||||
ps_id = int(m.group(2))
|
||||
else:
|
||||
ps_id = 1
|
||||
refs = 'refs/changes/%2.2d/%d/' % (chg_id % 100, chg_id)
|
||||
output = project._LsRemote(refs + '*')
|
||||
if output:
|
||||
regex = refs + r'(\d+)'
|
||||
rcomp = re.compile(regex, re.I)
|
||||
for line in output.splitlines():
|
||||
match = rcomp.search(line)
|
||||
if match:
|
||||
ps_id = max(int(match.group(1)), ps_id)
|
||||
to_get.append((project, chg_id, ps_id))
|
||||
else:
|
||||
projects = self.GetProjects([a])
|
||||
if len(projects) > 1:
|
||||
# If the cwd is one of the projects, assume they want that.
|
||||
try:
|
||||
project = self.GetProjects('.')[0]
|
||||
except NoSuchProjectError:
|
||||
project = None
|
||||
if project not in projects:
|
||||
print('error: %s matches too many projects; please re-run inside '
|
||||
'the project checkout.' % (a,), file=sys.stderr)
|
||||
for project in projects:
|
||||
print(' %s/ @ %s' % (project.relpath, project.revisionExpr),
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
else:
|
||||
project = projects[0]
|
||||
print('Defaulting to cwd project', project.name)
|
||||
return to_get
|
||||
for a in args:
|
||||
m = CHANGE_RE.match(a)
|
||||
if m:
|
||||
if not project:
|
||||
project = self.GetProjects(".")[0]
|
||||
print("Defaulting to cwd project", project.name)
|
||||
chg_id = int(m.group(1))
|
||||
if m.group(2):
|
||||
ps_id = int(m.group(2))
|
||||
else:
|
||||
ps_id = 1
|
||||
refs = "refs/changes/%2.2d/%d/" % (chg_id % 100, chg_id)
|
||||
output = project._LsRemote(refs + "*")
|
||||
if output:
|
||||
regex = refs + r"(\d+)"
|
||||
rcomp = re.compile(regex, re.I)
|
||||
for line in output.splitlines():
|
||||
match = rcomp.search(line)
|
||||
if match:
|
||||
ps_id = max(int(match.group(1)), ps_id)
|
||||
to_get.append((project, chg_id, ps_id))
|
||||
else:
|
||||
projects = self.GetProjects(
|
||||
[a], all_manifests=not opt.this_manifest_only
|
||||
)
|
||||
if len(projects) > 1:
|
||||
# If the cwd is one of the projects, assume they want that.
|
||||
try:
|
||||
project = self.GetProjects(".")[0]
|
||||
except NoSuchProjectError:
|
||||
project = None
|
||||
if project not in projects:
|
||||
logger.error(
|
||||
"error: %s matches too many projects; please "
|
||||
"re-run inside the project checkout.",
|
||||
a,
|
||||
)
|
||||
for project in projects:
|
||||
logger.error(
|
||||
" %s/ @ %s",
|
||||
project.RelPath(local=opt.this_manifest_only),
|
||||
project.revisionExpr,
|
||||
)
|
||||
raise NoSuchProjectError()
|
||||
else:
|
||||
project = projects[0]
|
||||
print("Defaulting to cwd project", project.name)
|
||||
return to_get
|
||||
|
||||
def ValidateOptions(self, opt, args):
|
||||
if opt.record_origin:
|
||||
if not opt.cherrypick:
|
||||
self.OptionParser.error('-x only makes sense with --cherry-pick')
|
||||
def ValidateOptions(self, opt, args):
|
||||
if opt.record_origin:
|
||||
if not opt.cherrypick:
|
||||
self.OptionParser.error(
|
||||
"-x only makes sense with --cherry-pick"
|
||||
)
|
||||
|
||||
if opt.ffonly:
|
||||
self.OptionParser.error('-x and --ff are mutually exclusive options')
|
||||
if opt.ffonly:
|
||||
self.OptionParser.error(
|
||||
"-x and --ff are mutually exclusive options"
|
||||
)
|
||||
|
||||
def Execute(self, opt, args):
|
||||
for project, change_id, ps_id in self._ParseChangeIds(args):
|
||||
dl = project.DownloadPatchSet(change_id, ps_id)
|
||||
if not dl:
|
||||
print('[%s] change %d/%d not found'
|
||||
% (project.name, change_id, ps_id),
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
def Execute(self, opt, args):
|
||||
try:
|
||||
self._ExecuteHelper(opt, args)
|
||||
except Exception as e:
|
||||
if isinstance(e, RepoExitError):
|
||||
raise e
|
||||
raise DownloadCommandError(aggregate_errors=[e])
|
||||
|
||||
if not opt.revert and not dl.commits:
|
||||
print('[%s] change %d/%d has already been merged'
|
||||
% (project.name, change_id, ps_id),
|
||||
file=sys.stderr)
|
||||
continue
|
||||
def _ExecuteHelper(self, opt, args):
|
||||
for project, change_id, ps_id in self._ParseChangeIds(opt, args):
|
||||
dl = project.DownloadPatchSet(change_id, ps_id)
|
||||
|
||||
if len(dl.commits) > 1:
|
||||
print('[%s] %d/%d depends on %d unmerged changes:'
|
||||
% (project.name, change_id, ps_id, len(dl.commits)),
|
||||
file=sys.stderr)
|
||||
for c in dl.commits:
|
||||
print(' %s' % (c), file=sys.stderr)
|
||||
if not opt.revert and not dl.commits:
|
||||
logger.error(
|
||||
"[%s] change %d/%d has already been merged",
|
||||
project.name,
|
||||
change_id,
|
||||
ps_id,
|
||||
)
|
||||
continue
|
||||
|
||||
if opt.cherrypick:
|
||||
mode = 'cherry-pick'
|
||||
elif opt.revert:
|
||||
mode = 'revert'
|
||||
elif opt.ffonly:
|
||||
mode = 'fast-forward merge'
|
||||
else:
|
||||
mode = 'checkout'
|
||||
if len(dl.commits) > 1:
|
||||
logger.error(
|
||||
"[%s] %d/%d depends on %d unmerged changes:",
|
||||
project.name,
|
||||
change_id,
|
||||
ps_id,
|
||||
len(dl.commits),
|
||||
)
|
||||
for c in dl.commits:
|
||||
print(" %s" % (c), file=sys.stderr)
|
||||
|
||||
# We'll combine the branch+checkout operation, but all the rest need a
|
||||
# dedicated branch start.
|
||||
if opt.branch and mode != 'checkout':
|
||||
project.StartBranch(opt.branch)
|
||||
if opt.cherrypick:
|
||||
mode = "cherry-pick"
|
||||
elif opt.revert:
|
||||
mode = "revert"
|
||||
elif opt.ffonly:
|
||||
mode = "fast-forward merge"
|
||||
else:
|
||||
mode = "checkout"
|
||||
|
||||
try:
|
||||
if opt.cherrypick:
|
||||
project._CherryPick(dl.commit, ffonly=opt.ffonly,
|
||||
record_origin=opt.record_origin)
|
||||
elif opt.revert:
|
||||
project._Revert(dl.commit)
|
||||
elif opt.ffonly:
|
||||
project._FastForward(dl.commit, ffonly=True)
|
||||
else:
|
||||
if opt.branch:
|
||||
project.StartBranch(opt.branch, revision=dl.commit)
|
||||
else:
|
||||
project._Checkout(dl.commit)
|
||||
# We'll combine the branch+checkout operation, but all the rest need
|
||||
# a dedicated branch start.
|
||||
if opt.branch and mode != "checkout":
|
||||
project.StartBranch(opt.branch)
|
||||
|
||||
except GitError:
|
||||
print('[%s] Could not complete the %s of %s'
|
||||
% (project.name, mode, dl.commit), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
try:
|
||||
if opt.cherrypick:
|
||||
project._CherryPick(
|
||||
dl.commit,
|
||||
ffonly=opt.ffonly,
|
||||
record_origin=opt.record_origin,
|
||||
)
|
||||
elif opt.revert:
|
||||
project._Revert(dl.commit)
|
||||
elif opt.ffonly:
|
||||
project._FastForward(dl.commit, ffonly=True)
|
||||
else:
|
||||
if opt.branch:
|
||||
project.StartBranch(opt.branch, revision=dl.commit)
|
||||
else:
|
||||
project._Checkout(dl.commit)
|
||||
|
||||
except GitError:
|
||||
logger.error(
|
||||
"[%s] Could not complete the %s of %s",
|
||||
project.name,
|
||||
mode,
|
||||
dl.commit,
|
||||
)
|
||||
raise
|
||||
|
||||
@@ -16,38 +16,44 @@ import errno
|
||||
import functools
|
||||
import io
|
||||
import multiprocessing
|
||||
import re
|
||||
import os
|
||||
import re
|
||||
import signal
|
||||
import sys
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from color import Coloring
|
||||
from command import DEFAULT_LOCAL_JOBS, Command, MirrorSafeCommand, WORKER_BATCH_SIZE
|
||||
from command import Command
|
||||
from command import DEFAULT_LOCAL_JOBS
|
||||
from command import MirrorSafeCommand
|
||||
from command import WORKER_BATCH_SIZE
|
||||
from error import ManifestInvalidRevisionError
|
||||
from repo_logging import RepoLogger
|
||||
|
||||
|
||||
logger = RepoLogger(__file__)
|
||||
_CAN_COLOR = [
|
||||
'branch',
|
||||
'diff',
|
||||
'grep',
|
||||
'log',
|
||||
"branch",
|
||||
"diff",
|
||||
"grep",
|
||||
"log",
|
||||
]
|
||||
|
||||
|
||||
class ForallColoring(Coloring):
|
||||
def __init__(self, config):
|
||||
Coloring.__init__(self, config, 'forall')
|
||||
self.project = self.printer('project', attr='bold')
|
||||
def __init__(self, config):
|
||||
Coloring.__init__(self, config, "forall")
|
||||
self.project = self.printer("project", attr="bold")
|
||||
|
||||
|
||||
class Forall(Command, MirrorSafeCommand):
|
||||
COMMON = False
|
||||
helpSummary = "Run a shell command in each project"
|
||||
helpUsage = """
|
||||
COMMON = False
|
||||
helpSummary = "Run a shell command in each project"
|
||||
helpUsage = """
|
||||
%prog [<project>...] -c <command> [<arg>...]
|
||||
%prog -r str1 [str2] ... -c <command> [<arg>...]
|
||||
"""
|
||||
helpDescription = """
|
||||
helpDescription = """
|
||||
Executes the same shell command in each project.
|
||||
|
||||
The -r option allows running the command only on projects matching
|
||||
@@ -84,6 +90,11 @@ REPO_PROJECT is set to the unique name of the project.
|
||||
|
||||
REPO_PATH is the path relative the the root of the client.
|
||||
|
||||
REPO_OUTERPATH is the path of the sub manifest's root relative to the root of
|
||||
the client.
|
||||
|
||||
REPO_INNERPATH is the path relative to the root of the sub manifest.
|
||||
|
||||
REPO_REMOTE is the name of the remote system from the manifest.
|
||||
|
||||
REPO_LREV is the name of the revision from the manifest, translated
|
||||
@@ -120,233 +131,285 @@ terminal and are not redirected.
|
||||
If -e is used, when a command exits unsuccessfully, '%prog' will abort
|
||||
without iterating through the remaining projects.
|
||||
"""
|
||||
PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
|
||||
PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
|
||||
|
||||
@staticmethod
|
||||
def _cmd_option(option, _opt_str, _value, parser):
|
||||
setattr(parser.values, option.dest, list(parser.rargs))
|
||||
while parser.rargs:
|
||||
del parser.rargs[0]
|
||||
@staticmethod
|
||||
def _cmd_option(option, _opt_str, _value, parser):
|
||||
setattr(parser.values, option.dest, list(parser.rargs))
|
||||
while parser.rargs:
|
||||
del parser.rargs[0]
|
||||
|
||||
def _Options(self, p):
|
||||
p.add_option('-r', '--regex',
|
||||
dest='regex', action='store_true',
|
||||
help='execute the command only on projects matching regex or wildcard expression')
|
||||
p.add_option('-i', '--inverse-regex',
|
||||
dest='inverse_regex', action='store_true',
|
||||
help='execute the command only on projects not matching regex or '
|
||||
'wildcard expression')
|
||||
p.add_option('-g', '--groups',
|
||||
dest='groups',
|
||||
help='execute the command only on projects matching the specified groups')
|
||||
p.add_option('-c', '--command',
|
||||
help='command (and arguments) to execute',
|
||||
dest='command',
|
||||
action='callback',
|
||||
callback=self._cmd_option)
|
||||
p.add_option('-e', '--abort-on-errors',
|
||||
dest='abort_on_errors', action='store_true',
|
||||
help='abort if a command exits unsuccessfully')
|
||||
p.add_option('--ignore-missing', action='store_true',
|
||||
help='silently skip & do not exit non-zero due missing '
|
||||
'checkouts')
|
||||
def _Options(self, p):
|
||||
p.add_option(
|
||||
"-r",
|
||||
"--regex",
|
||||
dest="regex",
|
||||
action="store_true",
|
||||
help="execute the command only on projects matching regex or "
|
||||
"wildcard expression",
|
||||
)
|
||||
p.add_option(
|
||||
"-i",
|
||||
"--inverse-regex",
|
||||
dest="inverse_regex",
|
||||
action="store_true",
|
||||
help="execute the command only on projects not matching regex or "
|
||||
"wildcard expression",
|
||||
)
|
||||
p.add_option(
|
||||
"-g",
|
||||
"--groups",
|
||||
dest="groups",
|
||||
help="execute the command only on projects matching the specified "
|
||||
"groups",
|
||||
)
|
||||
p.add_option(
|
||||
"-c",
|
||||
"--command",
|
||||
help="command (and arguments) to execute",
|
||||
dest="command",
|
||||
action="callback",
|
||||
callback=self._cmd_option,
|
||||
)
|
||||
p.add_option(
|
||||
"-e",
|
||||
"--abort-on-errors",
|
||||
dest="abort_on_errors",
|
||||
action="store_true",
|
||||
help="abort if a command exits unsuccessfully",
|
||||
)
|
||||
p.add_option(
|
||||
"--ignore-missing",
|
||||
action="store_true",
|
||||
help="silently skip & do not exit non-zero due missing "
|
||||
"checkouts",
|
||||
)
|
||||
|
||||
g = p.get_option_group('--quiet')
|
||||
g.add_option('-p',
|
||||
dest='project_header', action='store_true',
|
||||
help='show project headers before output')
|
||||
p.add_option('--interactive',
|
||||
action='store_true',
|
||||
help='force interactive usage')
|
||||
g = p.get_option_group("--quiet")
|
||||
g.add_option(
|
||||
"-p",
|
||||
dest="project_header",
|
||||
action="store_true",
|
||||
help="show project headers before output",
|
||||
)
|
||||
p.add_option(
|
||||
"--interactive", action="store_true", help="force interactive usage"
|
||||
)
|
||||
|
||||
def WantPager(self, opt):
|
||||
return opt.project_header and opt.jobs == 1
|
||||
def WantPager(self, opt):
|
||||
return opt.project_header and opt.jobs == 1
|
||||
|
||||
def ValidateOptions(self, opt, args):
|
||||
if not opt.command:
|
||||
self.Usage()
|
||||
def ValidateOptions(self, opt, args):
|
||||
if not opt.command:
|
||||
self.Usage()
|
||||
|
||||
def Execute(self, opt, args):
|
||||
cmd = [opt.command[0]]
|
||||
def Execute(self, opt, args):
|
||||
cmd = [opt.command[0]]
|
||||
all_trees = not opt.this_manifest_only
|
||||
|
||||
shell = True
|
||||
if re.compile(r'^[a-z0-9A-Z_/\.-]+$').match(cmd[0]):
|
||||
shell = False
|
||||
shell = True
|
||||
if re.compile(r"^[a-z0-9A-Z_/\.-]+$").match(cmd[0]):
|
||||
shell = False
|
||||
|
||||
if shell:
|
||||
cmd.append(cmd[0])
|
||||
cmd.extend(opt.command[1:])
|
||||
if shell:
|
||||
cmd.append(cmd[0])
|
||||
cmd.extend(opt.command[1:])
|
||||
|
||||
# Historically, forall operated interactively, and in serial. If the user
|
||||
# has selected 1 job, then default to interacive mode.
|
||||
if opt.jobs == 1:
|
||||
opt.interactive = True
|
||||
# Historically, forall operated interactively, and in serial. If the
|
||||
# user has selected 1 job, then default to interacive mode.
|
||||
if opt.jobs == 1:
|
||||
opt.interactive = True
|
||||
|
||||
if opt.project_header \
|
||||
and not shell \
|
||||
and cmd[0] == 'git':
|
||||
# If this is a direct git command that can enable colorized
|
||||
# output and the user prefers coloring, add --color into the
|
||||
# command line because we are going to wrap the command into
|
||||
# a pipe and git won't know coloring should activate.
|
||||
#
|
||||
for cn in cmd[1:]:
|
||||
if not cn.startswith('-'):
|
||||
break
|
||||
else:
|
||||
cn = None
|
||||
if cn and cn in _CAN_COLOR:
|
||||
class ColorCmd(Coloring):
|
||||
def __init__(self, config, cmd):
|
||||
Coloring.__init__(self, config, cmd)
|
||||
if ColorCmd(self.manifest.manifestProject.config, cn).is_on:
|
||||
cmd.insert(cmd.index(cn) + 1, '--color')
|
||||
if opt.project_header and not shell and cmd[0] == "git":
|
||||
# If this is a direct git command that can enable colorized
|
||||
# output and the user prefers coloring, add --color into the
|
||||
# command line because we are going to wrap the command into
|
||||
# a pipe and git won't know coloring should activate.
|
||||
#
|
||||
for cn in cmd[1:]:
|
||||
if not cn.startswith("-"):
|
||||
break
|
||||
else:
|
||||
cn = None
|
||||
if cn and cn in _CAN_COLOR:
|
||||
|
||||
mirror = self.manifest.IsMirror
|
||||
rc = 0
|
||||
class ColorCmd(Coloring):
|
||||
def __init__(self, config, cmd):
|
||||
Coloring.__init__(self, config, cmd)
|
||||
|
||||
smart_sync_manifest_name = "smart_sync_override.xml"
|
||||
smart_sync_manifest_path = os.path.join(
|
||||
self.manifest.manifestProject.worktree, smart_sync_manifest_name)
|
||||
if ColorCmd(self.manifest.manifestProject.config, cn).is_on:
|
||||
cmd.insert(cmd.index(cn) + 1, "--color")
|
||||
|
||||
if os.path.isfile(smart_sync_manifest_path):
|
||||
self.manifest.Override(smart_sync_manifest_path)
|
||||
mirror = self.manifest.IsMirror
|
||||
rc = 0
|
||||
|
||||
if opt.regex:
|
||||
projects = self.FindProjects(args)
|
||||
elif opt.inverse_regex:
|
||||
projects = self.FindProjects(args, inverse=True)
|
||||
else:
|
||||
projects = self.GetProjects(args, groups=opt.groups)
|
||||
smart_sync_manifest_name = "smart_sync_override.xml"
|
||||
smart_sync_manifest_path = os.path.join(
|
||||
self.manifest.manifestProject.worktree, smart_sync_manifest_name
|
||||
)
|
||||
|
||||
os.environ['REPO_COUNT'] = str(len(projects))
|
||||
if os.path.isfile(smart_sync_manifest_path):
|
||||
self.manifest.Override(smart_sync_manifest_path)
|
||||
|
||||
try:
|
||||
config = self.manifest.manifestProject.config
|
||||
with multiprocessing.Pool(opt.jobs, InitWorker) as pool:
|
||||
results_it = pool.imap(
|
||||
functools.partial(DoWorkWrapper, mirror, opt, cmd, shell, config),
|
||||
enumerate(projects),
|
||||
chunksize=WORKER_BATCH_SIZE)
|
||||
first = True
|
||||
for (r, output) in results_it:
|
||||
if output:
|
||||
if first:
|
||||
first = False
|
||||
elif opt.project_header:
|
||||
print()
|
||||
# To simplify the DoWorkWrapper, take care of automatic newlines.
|
||||
end = '\n'
|
||||
if output[-1] == '\n':
|
||||
end = ''
|
||||
print(output, end=end)
|
||||
rc = rc or r
|
||||
if r != 0 and opt.abort_on_errors:
|
||||
raise Exception('Aborting due to previous error')
|
||||
except (KeyboardInterrupt, WorkerKeyboardInterrupt):
|
||||
# Catch KeyboardInterrupt raised inside and outside of workers
|
||||
rc = rc or errno.EINTR
|
||||
except Exception as e:
|
||||
# Catch any other exceptions raised
|
||||
print('forall: unhandled error, terminating the pool: %s: %s' %
|
||||
(type(e).__name__, e),
|
||||
file=sys.stderr)
|
||||
rc = rc or getattr(e, 'errno', 1)
|
||||
if rc != 0:
|
||||
sys.exit(rc)
|
||||
if opt.regex:
|
||||
projects = self.FindProjects(args, all_manifests=all_trees)
|
||||
elif opt.inverse_regex:
|
||||
projects = self.FindProjects(
|
||||
args, inverse=True, all_manifests=all_trees
|
||||
)
|
||||
else:
|
||||
projects = self.GetProjects(
|
||||
args, groups=opt.groups, all_manifests=all_trees
|
||||
)
|
||||
|
||||
os.environ["REPO_COUNT"] = str(len(projects))
|
||||
|
||||
try:
|
||||
config = self.manifest.manifestProject.config
|
||||
with multiprocessing.Pool(opt.jobs, InitWorker) as pool:
|
||||
results_it = pool.imap(
|
||||
functools.partial(
|
||||
DoWorkWrapper, mirror, opt, cmd, shell, config
|
||||
),
|
||||
enumerate(projects),
|
||||
chunksize=WORKER_BATCH_SIZE,
|
||||
)
|
||||
first = True
|
||||
for r, output in results_it:
|
||||
if output:
|
||||
if first:
|
||||
first = False
|
||||
elif opt.project_header:
|
||||
print()
|
||||
# To simplify the DoWorkWrapper, take care of automatic
|
||||
# newlines.
|
||||
end = "\n"
|
||||
if output[-1] == "\n":
|
||||
end = ""
|
||||
print(output, end=end)
|
||||
rc = rc or r
|
||||
if r != 0 and opt.abort_on_errors:
|
||||
raise Exception("Aborting due to previous error")
|
||||
except (KeyboardInterrupt, WorkerKeyboardInterrupt):
|
||||
# Catch KeyboardInterrupt raised inside and outside of workers
|
||||
rc = rc or errno.EINTR
|
||||
except Exception as e:
|
||||
# Catch any other exceptions raised
|
||||
logger.error(
|
||||
"forall: unhandled error, terminating the pool: %s: %s",
|
||||
type(e).__name__,
|
||||
e,
|
||||
)
|
||||
rc = rc or getattr(e, "errno", 1)
|
||||
if rc != 0:
|
||||
sys.exit(rc)
|
||||
|
||||
|
||||
class WorkerKeyboardInterrupt(Exception):
|
||||
""" Keyboard interrupt exception for worker processes. """
|
||||
"""Keyboard interrupt exception for worker processes."""
|
||||
|
||||
|
||||
def InitWorker():
|
||||
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
||||
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
||||
|
||||
|
||||
def DoWorkWrapper(mirror, opt, cmd, shell, config, args):
|
||||
""" A wrapper around the DoWork() method.
|
||||
"""A wrapper around the DoWork() method.
|
||||
|
||||
Catch the KeyboardInterrupt exceptions here and re-raise them as a different,
|
||||
``Exception``-based exception to stop it flooding the console with stacktraces
|
||||
and making the parent hang indefinitely.
|
||||
Catch the KeyboardInterrupt exceptions here and re-raise them as a
|
||||
different, ``Exception``-based exception to stop it flooding the console
|
||||
with stacktraces and making the parent hang indefinitely.
|
||||
|
||||
"""
|
||||
cnt, project = args
|
||||
try:
|
||||
return DoWork(project, mirror, opt, cmd, shell, cnt, config)
|
||||
except KeyboardInterrupt:
|
||||
print('%s: Worker interrupted' % project.name)
|
||||
raise WorkerKeyboardInterrupt()
|
||||
"""
|
||||
cnt, project = args
|
||||
try:
|
||||
return DoWork(project, mirror, opt, cmd, shell, cnt, config)
|
||||
except KeyboardInterrupt:
|
||||
print("%s: Worker interrupted" % project.name)
|
||||
raise WorkerKeyboardInterrupt()
|
||||
|
||||
|
||||
def DoWork(project, mirror, opt, cmd, shell, cnt, config):
|
||||
env = os.environ.copy()
|
||||
env = os.environ.copy()
|
||||
|
||||
def setenv(name, val):
|
||||
if val is None:
|
||||
val = ''
|
||||
env[name] = val
|
||||
def setenv(name, val):
|
||||
if val is None:
|
||||
val = ""
|
||||
env[name] = val
|
||||
|
||||
setenv('REPO_PROJECT', project.name)
|
||||
setenv('REPO_PATH', project.relpath)
|
||||
setenv('REPO_REMOTE', project.remote.name)
|
||||
try:
|
||||
# If we aren't in a fully synced state and we don't have the ref the manifest
|
||||
# wants, then this will fail. Ignore it for the purposes of this code.
|
||||
lrev = '' if mirror else project.GetRevisionId()
|
||||
except ManifestInvalidRevisionError:
|
||||
lrev = ''
|
||||
setenv('REPO_LREV', lrev)
|
||||
setenv('REPO_RREV', project.revisionExpr)
|
||||
setenv('REPO_UPSTREAM', project.upstream)
|
||||
setenv('REPO_DEST_BRANCH', project.dest_branch)
|
||||
setenv('REPO_I', str(cnt + 1))
|
||||
for annotation in project.annotations:
|
||||
setenv("REPO__%s" % (annotation.name), annotation.value)
|
||||
setenv("REPO_PROJECT", project.name)
|
||||
setenv("REPO_OUTERPATH", project.manifest.path_prefix)
|
||||
setenv("REPO_INNERPATH", project.relpath)
|
||||
setenv("REPO_PATH", project.RelPath(local=opt.this_manifest_only))
|
||||
setenv("REPO_REMOTE", project.remote.name)
|
||||
try:
|
||||
# If we aren't in a fully synced state and we don't have the ref the
|
||||
# manifest wants, then this will fail. Ignore it for the purposes of
|
||||
# this code.
|
||||
lrev = "" if mirror else project.GetRevisionId()
|
||||
except ManifestInvalidRevisionError:
|
||||
lrev = ""
|
||||
setenv("REPO_LREV", lrev)
|
||||
setenv("REPO_RREV", project.revisionExpr)
|
||||
setenv("REPO_UPSTREAM", project.upstream)
|
||||
setenv("REPO_DEST_BRANCH", project.dest_branch)
|
||||
setenv("REPO_I", str(cnt + 1))
|
||||
for annotation in project.annotations:
|
||||
setenv("REPO__%s" % (annotation.name), annotation.value)
|
||||
|
||||
if mirror:
|
||||
setenv('GIT_DIR', project.gitdir)
|
||||
cwd = project.gitdir
|
||||
else:
|
||||
cwd = project.worktree
|
||||
if mirror:
|
||||
setenv("GIT_DIR", project.gitdir)
|
||||
cwd = project.gitdir
|
||||
else:
|
||||
cwd = project.worktree
|
||||
|
||||
if not os.path.exists(cwd):
|
||||
# Allow the user to silently ignore missing checkouts so they can run on
|
||||
# partial checkouts (good for infra recovery tools).
|
||||
if opt.ignore_missing:
|
||||
return (0, '')
|
||||
if not os.path.exists(cwd):
|
||||
# Allow the user to silently ignore missing checkouts so they can run on
|
||||
# partial checkouts (good for infra recovery tools).
|
||||
if opt.ignore_missing:
|
||||
return (0, "")
|
||||
|
||||
output = ''
|
||||
if ((opt.project_header and opt.verbose)
|
||||
or not opt.project_header):
|
||||
output = 'skipping %s/' % project.relpath
|
||||
return (1, output)
|
||||
output = ""
|
||||
if (opt.project_header and opt.verbose) or not opt.project_header:
|
||||
output = "skipping %s/" % project.RelPath(
|
||||
local=opt.this_manifest_only
|
||||
)
|
||||
return (1, output)
|
||||
|
||||
if opt.verbose:
|
||||
stderr = subprocess.STDOUT
|
||||
else:
|
||||
stderr = subprocess.DEVNULL
|
||||
if opt.verbose:
|
||||
stderr = subprocess.STDOUT
|
||||
else:
|
||||
stderr = subprocess.DEVNULL
|
||||
|
||||
stdin = None if opt.interactive else subprocess.DEVNULL
|
||||
stdin = None if opt.interactive else subprocess.DEVNULL
|
||||
|
||||
result = subprocess.run(
|
||||
cmd, cwd=cwd, shell=shell, env=env, check=False,
|
||||
encoding='utf-8', errors='replace',
|
||||
stdin=stdin, stdout=subprocess.PIPE, stderr=stderr)
|
||||
result = subprocess.run(
|
||||
cmd,
|
||||
cwd=cwd,
|
||||
shell=shell,
|
||||
env=env,
|
||||
check=False,
|
||||
encoding="utf-8",
|
||||
errors="replace",
|
||||
stdin=stdin,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=stderr,
|
||||
)
|
||||
|
||||
output = result.stdout
|
||||
if opt.project_header:
|
||||
if output:
|
||||
buf = io.StringIO()
|
||||
out = ForallColoring(config)
|
||||
out.redirect(buf)
|
||||
if mirror:
|
||||
project_header_path = project.name
|
||||
else:
|
||||
project_header_path = project.relpath
|
||||
out.project('project %s/' % project_header_path)
|
||||
out.nl()
|
||||
buf.write(output)
|
||||
output = buf.getvalue()
|
||||
return (result.returncode, output)
|
||||
output = result.stdout
|
||||
if opt.project_header:
|
||||
if output:
|
||||
buf = io.StringIO()
|
||||
out = ForallColoring(config)
|
||||
out.redirect(buf)
|
||||
if mirror:
|
||||
project_header_path = project.name
|
||||
else:
|
||||
project_header_path = project.RelPath(
|
||||
local=opt.this_manifest_only
|
||||
)
|
||||
out.project("project %s/" % project_header_path)
|
||||
out.nl()
|
||||
buf.write(output)
|
||||
output = buf.getvalue()
|
||||
return (result.returncode, output)
|
||||
|
||||
@@ -1,46 +0,0 @@
|
||||
# Copyright (C) 2015 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
|
||||
from command import Command, GitcClientCommand
|
||||
import platform_utils
|
||||
|
||||
|
||||
class GitcDelete(Command, GitcClientCommand):
|
||||
COMMON = True
|
||||
visible_everywhere = False
|
||||
helpSummary = "Delete a GITC Client."
|
||||
helpUsage = """
|
||||
%prog
|
||||
"""
|
||||
helpDescription = """
|
||||
This subcommand deletes the current GITC client, deleting the GITC manifest
|
||||
and all locally downloaded sources.
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
p.add_option('-f', '--force',
|
||||
dest='force', action='store_true',
|
||||
help='force the deletion (no prompt)')
|
||||
|
||||
def Execute(self, opt, args):
|
||||
if not opt.force:
|
||||
prompt = ('This will delete GITC client: %s\nAre you sure? (yes/no) ' %
|
||||
self.gitc_manifest.gitc_client_name)
|
||||
response = input(prompt).lower()
|
||||
if not response == 'yes':
|
||||
print('Response was not "yes"\n Exiting...')
|
||||
sys.exit(1)
|
||||
platform_utils.rmtree(self.gitc_manifest.gitc_client_dir)
|
||||
@@ -1,74 +0,0 @@
|
||||
# Copyright (C) 2015 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import gitc_utils
|
||||
from command import GitcAvailableCommand
|
||||
from manifest_xml import GitcManifest
|
||||
from subcmds import init
|
||||
import wrapper
|
||||
|
||||
|
||||
class GitcInit(init.Init, GitcAvailableCommand):
|
||||
COMMON = True
|
||||
helpSummary = "Initialize a GITC Client."
|
||||
helpUsage = """
|
||||
%prog [options] [client name]
|
||||
"""
|
||||
helpDescription = """
|
||||
The '%prog' command is ran to initialize a new GITC client for use
|
||||
with the GITC file system.
|
||||
|
||||
This command will setup the client directory, initialize repo, just
|
||||
like repo init does, and then downloads the manifest collection
|
||||
and installs it in the .repo/directory of the GITC client.
|
||||
|
||||
Once this is done, a GITC manifest is generated by pulling the HEAD
|
||||
SHA for each project and generates the properly formatted XML file
|
||||
and installs it as .manifest in the GITC client directory.
|
||||
|
||||
The -c argument is required to specify the GITC client name.
|
||||
|
||||
The optional -f argument can be used to specify the manifest file to
|
||||
use for this GITC client.
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
super()._Options(p, gitc_init=True)
|
||||
|
||||
def Execute(self, opt, args):
|
||||
gitc_client = gitc_utils.parse_clientdir(os.getcwd())
|
||||
if not gitc_client or (opt.gitc_client and gitc_client != opt.gitc_client):
|
||||
print('fatal: Please update your repo command. See go/gitc for instructions.',
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
self.client_dir = os.path.join(gitc_utils.get_gitc_manifest_dir(),
|
||||
gitc_client)
|
||||
super().Execute(opt, args)
|
||||
|
||||
manifest_file = self.manifest.manifestFile
|
||||
if opt.manifest_file:
|
||||
if not os.path.exists(opt.manifest_file):
|
||||
print('fatal: Specified manifest file %s does not exist.' %
|
||||
opt.manifest_file)
|
||||
sys.exit(1)
|
||||
manifest_file = opt.manifest_file
|
||||
|
||||
manifest = GitcManifest(self.repodir, gitc_client)
|
||||
manifest.Override(manifest_file)
|
||||
gitc_utils.generate_gitc_manifest(None, manifest)
|
||||
print('Please run `cd %s` to view your GITC client.' %
|
||||
os.path.join(wrapper.Wrapper().GITC_FS_ROOT_DIR, gitc_client))
|
||||
521
subcmds/grep.py
521
subcmds/grep.py
@@ -14,27 +14,52 @@
|
||||
|
||||
import functools
|
||||
import sys
|
||||
from typing import NamedTuple
|
||||
|
||||
from color import Coloring
|
||||
from command import DEFAULT_LOCAL_JOBS, PagedCommand
|
||||
from command import DEFAULT_LOCAL_JOBS
|
||||
from command import PagedCommand
|
||||
from error import GitError
|
||||
from error import InvalidArgumentsError
|
||||
from error import SilentRepoExitError
|
||||
from git_command import GitCommand
|
||||
from project import Project
|
||||
from repo_logging import RepoLogger
|
||||
|
||||
|
||||
logger = RepoLogger(__file__)
|
||||
|
||||
|
||||
class GrepColoring(Coloring):
|
||||
def __init__(self, config):
|
||||
Coloring.__init__(self, config, 'grep')
|
||||
self.project = self.printer('project', attr='bold')
|
||||
self.fail = self.printer('fail', fg='red')
|
||||
def __init__(self, config):
|
||||
Coloring.__init__(self, config, "grep")
|
||||
self.project = self.printer("project", attr="bold")
|
||||
self.fail = self.printer("fail", fg="red")
|
||||
|
||||
|
||||
class ExecuteOneResult(NamedTuple):
|
||||
"""Result from an execute instance."""
|
||||
|
||||
project: Project
|
||||
rc: int
|
||||
stdout: str
|
||||
stderr: str
|
||||
error: GitError
|
||||
|
||||
|
||||
class GrepCommandError(SilentRepoExitError):
|
||||
"""Grep command failure. Since Grep command
|
||||
output already outputs errors ensure that
|
||||
aggregate errors exit silently."""
|
||||
|
||||
|
||||
class Grep(PagedCommand):
|
||||
COMMON = True
|
||||
helpSummary = "Print lines matching a pattern"
|
||||
helpUsage = """
|
||||
COMMON = True
|
||||
helpSummary = "Print lines matching a pattern"
|
||||
helpUsage = """
|
||||
%prog {pattern | -e pattern} [<project>...]
|
||||
"""
|
||||
helpDescription = """
|
||||
helpDescription = """
|
||||
Search for the specified patterns in all project files.
|
||||
|
||||
# Boolean Options
|
||||
@@ -62,214 +87,318 @@ contain a line that matches both expressions:
|
||||
repo grep --all-match -e NODE -e Unexpected
|
||||
|
||||
"""
|
||||
PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
|
||||
PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
|
||||
|
||||
@staticmethod
|
||||
def _carry_option(_option, opt_str, value, parser):
|
||||
pt = getattr(parser.values, 'cmd_argv', None)
|
||||
if pt is None:
|
||||
pt = []
|
||||
setattr(parser.values, 'cmd_argv', pt)
|
||||
@staticmethod
|
||||
def _carry_option(_option, opt_str, value, parser):
|
||||
pt = getattr(parser.values, "cmd_argv", None)
|
||||
if pt is None:
|
||||
pt = []
|
||||
setattr(parser.values, "cmd_argv", pt)
|
||||
|
||||
if opt_str == '-(':
|
||||
pt.append('(')
|
||||
elif opt_str == '-)':
|
||||
pt.append(')')
|
||||
else:
|
||||
pt.append(opt_str)
|
||||
if opt_str == "-(":
|
||||
pt.append("(")
|
||||
elif opt_str == "-)":
|
||||
pt.append(")")
|
||||
else:
|
||||
pt.append(opt_str)
|
||||
|
||||
if value is not None:
|
||||
pt.append(value)
|
||||
if value is not None:
|
||||
pt.append(value)
|
||||
|
||||
def _CommonOptions(self, p):
|
||||
"""Override common options slightly."""
|
||||
super()._CommonOptions(p, opt_v=False)
|
||||
def _CommonOptions(self, p):
|
||||
"""Override common options slightly."""
|
||||
super()._CommonOptions(p, opt_v=False)
|
||||
|
||||
def _Options(self, p):
|
||||
g = p.add_option_group('Sources')
|
||||
g.add_option('--cached',
|
||||
action='callback', callback=self._carry_option,
|
||||
help='Search the index, instead of the work tree')
|
||||
g.add_option('-r', '--revision',
|
||||
dest='revision', action='append', metavar='TREEish',
|
||||
help='Search TREEish, instead of the work tree')
|
||||
def _Options(self, p):
|
||||
g = p.add_option_group("Sources")
|
||||
g.add_option(
|
||||
"--cached",
|
||||
action="callback",
|
||||
callback=self._carry_option,
|
||||
help="Search the index, instead of the work tree",
|
||||
)
|
||||
g.add_option(
|
||||
"-r",
|
||||
"--revision",
|
||||
dest="revision",
|
||||
action="append",
|
||||
metavar="TREEish",
|
||||
help="Search TREEish, instead of the work tree",
|
||||
)
|
||||
|
||||
g = p.add_option_group('Pattern')
|
||||
g.add_option('-e',
|
||||
action='callback', callback=self._carry_option,
|
||||
metavar='PATTERN', type='str',
|
||||
help='Pattern to search for')
|
||||
g.add_option('-i', '--ignore-case',
|
||||
action='callback', callback=self._carry_option,
|
||||
help='Ignore case differences')
|
||||
g.add_option('-a', '--text',
|
||||
action='callback', callback=self._carry_option,
|
||||
help="Process binary files as if they were text")
|
||||
g.add_option('-I',
|
||||
action='callback', callback=self._carry_option,
|
||||
help="Don't match the pattern in binary files")
|
||||
g.add_option('-w', '--word-regexp',
|
||||
action='callback', callback=self._carry_option,
|
||||
help='Match the pattern only at word boundaries')
|
||||
g.add_option('-v', '--invert-match',
|
||||
action='callback', callback=self._carry_option,
|
||||
help='Select non-matching lines')
|
||||
g.add_option('-G', '--basic-regexp',
|
||||
action='callback', callback=self._carry_option,
|
||||
help='Use POSIX basic regexp for patterns (default)')
|
||||
g.add_option('-E', '--extended-regexp',
|
||||
action='callback', callback=self._carry_option,
|
||||
help='Use POSIX extended regexp for patterns')
|
||||
g.add_option('-F', '--fixed-strings',
|
||||
action='callback', callback=self._carry_option,
|
||||
help='Use fixed strings (not regexp) for pattern')
|
||||
g = p.add_option_group("Pattern")
|
||||
g.add_option(
|
||||
"-e",
|
||||
action="callback",
|
||||
callback=self._carry_option,
|
||||
metavar="PATTERN",
|
||||
type="str",
|
||||
help="Pattern to search for",
|
||||
)
|
||||
g.add_option(
|
||||
"-i",
|
||||
"--ignore-case",
|
||||
action="callback",
|
||||
callback=self._carry_option,
|
||||
help="Ignore case differences",
|
||||
)
|
||||
g.add_option(
|
||||
"-a",
|
||||
"--text",
|
||||
action="callback",
|
||||
callback=self._carry_option,
|
||||
help="Process binary files as if they were text",
|
||||
)
|
||||
g.add_option(
|
||||
"-I",
|
||||
action="callback",
|
||||
callback=self._carry_option,
|
||||
help="Don't match the pattern in binary files",
|
||||
)
|
||||
g.add_option(
|
||||
"-w",
|
||||
"--word-regexp",
|
||||
action="callback",
|
||||
callback=self._carry_option,
|
||||
help="Match the pattern only at word boundaries",
|
||||
)
|
||||
g.add_option(
|
||||
"-v",
|
||||
"--invert-match",
|
||||
action="callback",
|
||||
callback=self._carry_option,
|
||||
help="Select non-matching lines",
|
||||
)
|
||||
g.add_option(
|
||||
"-G",
|
||||
"--basic-regexp",
|
||||
action="callback",
|
||||
callback=self._carry_option,
|
||||
help="Use POSIX basic regexp for patterns (default)",
|
||||
)
|
||||
g.add_option(
|
||||
"-E",
|
||||
"--extended-regexp",
|
||||
action="callback",
|
||||
callback=self._carry_option,
|
||||
help="Use POSIX extended regexp for patterns",
|
||||
)
|
||||
g.add_option(
|
||||
"-F",
|
||||
"--fixed-strings",
|
||||
action="callback",
|
||||
callback=self._carry_option,
|
||||
help="Use fixed strings (not regexp) for pattern",
|
||||
)
|
||||
|
||||
g = p.add_option_group('Pattern Grouping')
|
||||
g.add_option('--all-match',
|
||||
action='callback', callback=self._carry_option,
|
||||
help='Limit match to lines that have all patterns')
|
||||
g.add_option('--and', '--or', '--not',
|
||||
action='callback', callback=self._carry_option,
|
||||
help='Boolean operators to combine patterns')
|
||||
g.add_option('-(', '-)',
|
||||
action='callback', callback=self._carry_option,
|
||||
help='Boolean operator grouping')
|
||||
g = p.add_option_group("Pattern Grouping")
|
||||
g.add_option(
|
||||
"--all-match",
|
||||
action="callback",
|
||||
callback=self._carry_option,
|
||||
help="Limit match to lines that have all patterns",
|
||||
)
|
||||
g.add_option(
|
||||
"--and",
|
||||
"--or",
|
||||
"--not",
|
||||
action="callback",
|
||||
callback=self._carry_option,
|
||||
help="Boolean operators to combine patterns",
|
||||
)
|
||||
g.add_option(
|
||||
"-(",
|
||||
"-)",
|
||||
action="callback",
|
||||
callback=self._carry_option,
|
||||
help="Boolean operator grouping",
|
||||
)
|
||||
|
||||
g = p.add_option_group('Output')
|
||||
g.add_option('-n',
|
||||
action='callback', callback=self._carry_option,
|
||||
help='Prefix the line number to matching lines')
|
||||
g.add_option('-C',
|
||||
action='callback', callback=self._carry_option,
|
||||
metavar='CONTEXT', type='str',
|
||||
help='Show CONTEXT lines around match')
|
||||
g.add_option('-B',
|
||||
action='callback', callback=self._carry_option,
|
||||
metavar='CONTEXT', type='str',
|
||||
help='Show CONTEXT lines before match')
|
||||
g.add_option('-A',
|
||||
action='callback', callback=self._carry_option,
|
||||
metavar='CONTEXT', type='str',
|
||||
help='Show CONTEXT lines after match')
|
||||
g.add_option('-l', '--name-only', '--files-with-matches',
|
||||
action='callback', callback=self._carry_option,
|
||||
help='Show only file names containing matching lines')
|
||||
g.add_option('-L', '--files-without-match',
|
||||
action='callback', callback=self._carry_option,
|
||||
help='Show only file names not containing matching lines')
|
||||
g = p.add_option_group("Output")
|
||||
g.add_option(
|
||||
"-n",
|
||||
action="callback",
|
||||
callback=self._carry_option,
|
||||
help="Prefix the line number to matching lines",
|
||||
)
|
||||
g.add_option(
|
||||
"-C",
|
||||
action="callback",
|
||||
callback=self._carry_option,
|
||||
metavar="CONTEXT",
|
||||
type="str",
|
||||
help="Show CONTEXT lines around match",
|
||||
)
|
||||
g.add_option(
|
||||
"-B",
|
||||
action="callback",
|
||||
callback=self._carry_option,
|
||||
metavar="CONTEXT",
|
||||
type="str",
|
||||
help="Show CONTEXT lines before match",
|
||||
)
|
||||
g.add_option(
|
||||
"-A",
|
||||
action="callback",
|
||||
callback=self._carry_option,
|
||||
metavar="CONTEXT",
|
||||
type="str",
|
||||
help="Show CONTEXT lines after match",
|
||||
)
|
||||
g.add_option(
|
||||
"-l",
|
||||
"--name-only",
|
||||
"--files-with-matches",
|
||||
action="callback",
|
||||
callback=self._carry_option,
|
||||
help="Show only file names containing matching lines",
|
||||
)
|
||||
g.add_option(
|
||||
"-L",
|
||||
"--files-without-match",
|
||||
action="callback",
|
||||
callback=self._carry_option,
|
||||
help="Show only file names not containing matching lines",
|
||||
)
|
||||
|
||||
def _ExecuteOne(self, cmd_argv, project):
|
||||
"""Process one project."""
|
||||
try:
|
||||
p = GitCommand(project,
|
||||
cmd_argv,
|
||||
bare=False,
|
||||
capture_stdout=True,
|
||||
capture_stderr=True)
|
||||
except GitError as e:
|
||||
return (project, -1, None, str(e))
|
||||
def _ExecuteOne(self, cmd_argv, project):
|
||||
"""Process one project."""
|
||||
try:
|
||||
p = GitCommand(
|
||||
project,
|
||||
cmd_argv,
|
||||
bare=False,
|
||||
capture_stdout=True,
|
||||
capture_stderr=True,
|
||||
verify_command=True,
|
||||
)
|
||||
except GitError as e:
|
||||
return ExecuteOneResult(project, -1, None, str(e), e)
|
||||
|
||||
return (project, p.Wait(), p.stdout, p.stderr)
|
||||
try:
|
||||
error = None
|
||||
rc = p.Wait()
|
||||
except GitError as e:
|
||||
rc = 1
|
||||
error = e
|
||||
return ExecuteOneResult(project, rc, p.stdout, p.stderr, error)
|
||||
|
||||
@staticmethod
|
||||
def _ProcessResults(full_name, have_rev, _pool, out, results):
|
||||
git_failed = False
|
||||
bad_rev = False
|
||||
have_match = False
|
||||
@staticmethod
|
||||
def _ProcessResults(full_name, have_rev, opt, _pool, out, results):
|
||||
git_failed = False
|
||||
bad_rev = False
|
||||
have_match = False
|
||||
_RelPath = lambda p: p.RelPath(local=opt.this_manifest_only)
|
||||
errors = []
|
||||
|
||||
for project, rc, stdout, stderr in results:
|
||||
if rc < 0:
|
||||
git_failed = True
|
||||
out.project('--- project %s ---' % project.relpath)
|
||||
out.nl()
|
||||
out.fail('%s', stderr)
|
||||
out.nl()
|
||||
continue
|
||||
for result in results:
|
||||
if result.rc < 0:
|
||||
git_failed = True
|
||||
out.project("--- project %s ---" % _RelPath(result.project))
|
||||
out.nl()
|
||||
out.fail("%s", result.stderr)
|
||||
out.nl()
|
||||
errors.append(result.error)
|
||||
continue
|
||||
|
||||
if rc:
|
||||
# no results
|
||||
if stderr:
|
||||
if have_rev and 'fatal: ambiguous argument' in stderr:
|
||||
bad_rev = True
|
||||
else:
|
||||
out.project('--- project %s ---' % project.relpath)
|
||||
out.nl()
|
||||
out.fail('%s', stderr.strip())
|
||||
out.nl()
|
||||
continue
|
||||
have_match = True
|
||||
if result.rc:
|
||||
# no results
|
||||
if result.stderr:
|
||||
if (
|
||||
have_rev
|
||||
and "fatal: ambiguous argument" in result.stderr
|
||||
):
|
||||
bad_rev = True
|
||||
else:
|
||||
out.project(
|
||||
"--- project %s ---" % _RelPath(result.project)
|
||||
)
|
||||
out.nl()
|
||||
out.fail("%s", result.stderr.strip())
|
||||
out.nl()
|
||||
if result.error is not None:
|
||||
errors.append(result.error)
|
||||
continue
|
||||
have_match = True
|
||||
|
||||
# We cut the last element, to avoid a blank line.
|
||||
r = stdout.split('\n')
|
||||
r = r[0:-1]
|
||||
# We cut the last element, to avoid a blank line.
|
||||
r = result.stdout.split("\n")
|
||||
r = r[0:-1]
|
||||
|
||||
if have_rev and full_name:
|
||||
for line in r:
|
||||
rev, line = line.split(':', 1)
|
||||
out.write("%s", rev)
|
||||
out.write(':')
|
||||
out.project(project.relpath)
|
||||
out.write('/')
|
||||
out.write("%s", line)
|
||||
out.nl()
|
||||
elif full_name:
|
||||
for line in r:
|
||||
out.project(project.relpath)
|
||||
out.write('/')
|
||||
out.write("%s", line)
|
||||
out.nl()
|
||||
else:
|
||||
for line in r:
|
||||
print(line)
|
||||
if have_rev and full_name:
|
||||
for line in r:
|
||||
rev, line = line.split(":", 1)
|
||||
out.write("%s", rev)
|
||||
out.write(":")
|
||||
out.project(_RelPath(result.project))
|
||||
out.write("/")
|
||||
out.write("%s", line)
|
||||
out.nl()
|
||||
elif full_name:
|
||||
for line in r:
|
||||
out.project(_RelPath(result.project))
|
||||
out.write("/")
|
||||
out.write("%s", line)
|
||||
out.nl()
|
||||
else:
|
||||
for line in r:
|
||||
print(line)
|
||||
|
||||
return (git_failed, bad_rev, have_match)
|
||||
return (git_failed, bad_rev, have_match, errors)
|
||||
|
||||
def Execute(self, opt, args):
|
||||
out = GrepColoring(self.manifest.manifestProject.config)
|
||||
def Execute(self, opt, args):
|
||||
out = GrepColoring(self.manifest.manifestProject.config)
|
||||
|
||||
cmd_argv = ['grep']
|
||||
if out.is_on:
|
||||
cmd_argv.append('--color')
|
||||
cmd_argv.extend(getattr(opt, 'cmd_argv', []))
|
||||
cmd_argv = ["grep"]
|
||||
if out.is_on:
|
||||
cmd_argv.append("--color")
|
||||
cmd_argv.extend(getattr(opt, "cmd_argv", []))
|
||||
|
||||
if '-e' not in cmd_argv:
|
||||
if not args:
|
||||
self.Usage()
|
||||
cmd_argv.append('-e')
|
||||
cmd_argv.append(args[0])
|
||||
args = args[1:]
|
||||
if "-e" not in cmd_argv:
|
||||
if not args:
|
||||
self.Usage()
|
||||
cmd_argv.append("-e")
|
||||
cmd_argv.append(args[0])
|
||||
args = args[1:]
|
||||
|
||||
projects = self.GetProjects(args)
|
||||
projects = self.GetProjects(
|
||||
args, all_manifests=not opt.this_manifest_only
|
||||
)
|
||||
|
||||
full_name = False
|
||||
if len(projects) > 1:
|
||||
cmd_argv.append('--full-name')
|
||||
full_name = True
|
||||
full_name = False
|
||||
if len(projects) > 1:
|
||||
cmd_argv.append("--full-name")
|
||||
full_name = True
|
||||
|
||||
have_rev = False
|
||||
if opt.revision:
|
||||
if '--cached' in cmd_argv:
|
||||
print('fatal: cannot combine --cached and --revision', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
have_rev = True
|
||||
cmd_argv.extend(opt.revision)
|
||||
cmd_argv.append('--')
|
||||
have_rev = False
|
||||
if opt.revision:
|
||||
if "--cached" in cmd_argv:
|
||||
msg = "fatal: cannot combine --cached and --revision"
|
||||
logger.error(msg)
|
||||
raise InvalidArgumentsError(msg)
|
||||
have_rev = True
|
||||
cmd_argv.extend(opt.revision)
|
||||
cmd_argv.append("--")
|
||||
|
||||
git_failed, bad_rev, have_match = self.ExecuteInParallel(
|
||||
opt.jobs,
|
||||
functools.partial(self._ExecuteOne, cmd_argv),
|
||||
projects,
|
||||
callback=functools.partial(self._ProcessResults, full_name, have_rev),
|
||||
output=out,
|
||||
ordered=True)
|
||||
git_failed, bad_rev, have_match, errors = self.ExecuteInParallel(
|
||||
opt.jobs,
|
||||
functools.partial(self._ExecuteOne, cmd_argv),
|
||||
projects,
|
||||
callback=functools.partial(
|
||||
self._ProcessResults, full_name, have_rev, opt
|
||||
),
|
||||
output=out,
|
||||
ordered=True,
|
||||
)
|
||||
|
||||
if git_failed:
|
||||
sys.exit(1)
|
||||
elif have_match:
|
||||
sys.exit(0)
|
||||
elif have_rev and bad_rev:
|
||||
for r in opt.revision:
|
||||
print("error: can't search revision %s" % r, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
else:
|
||||
sys.exit(1)
|
||||
if git_failed:
|
||||
raise GrepCommandError(
|
||||
"error: git failures", aggregate_errors=errors
|
||||
)
|
||||
elif have_match:
|
||||
sys.exit(0)
|
||||
elif have_rev and bad_rev:
|
||||
for r in opt.revision:
|
||||
logger.error("error: can't search revision %s", r)
|
||||
raise GrepCommandError(aggregate_errors=errors)
|
||||
|
||||
271
subcmds/help.py
271
subcmds/help.py
@@ -16,165 +16,178 @@ import re
|
||||
import sys
|
||||
import textwrap
|
||||
|
||||
from subcmds import all_commands
|
||||
from color import Coloring
|
||||
from command import PagedCommand, MirrorSafeCommand, GitcAvailableCommand, GitcClientCommand
|
||||
import gitc_utils
|
||||
from command import MirrorSafeCommand
|
||||
from command import PagedCommand
|
||||
from error import RepoExitError
|
||||
from subcmds import all_commands
|
||||
from wrapper import Wrapper
|
||||
|
||||
|
||||
class InvalidHelpCommand(RepoExitError):
|
||||
"""Invalid command passed into help."""
|
||||
|
||||
|
||||
class Help(PagedCommand, MirrorSafeCommand):
|
||||
COMMON = False
|
||||
helpSummary = "Display detailed help on a command"
|
||||
helpUsage = """
|
||||
COMMON = False
|
||||
helpSummary = "Display detailed help on a command"
|
||||
helpUsage = """
|
||||
%prog [--all|command]
|
||||
"""
|
||||
helpDescription = """
|
||||
helpDescription = """
|
||||
Displays detailed usage information about a command.
|
||||
"""
|
||||
|
||||
def _PrintCommands(self, commandNames):
|
||||
"""Helper to display |commandNames| summaries."""
|
||||
maxlen = 0
|
||||
for name in commandNames:
|
||||
maxlen = max(maxlen, len(name))
|
||||
fmt = ' %%-%ds %%s' % maxlen
|
||||
def _PrintCommands(self, commandNames):
|
||||
"""Helper to display |commandNames| summaries."""
|
||||
maxlen = 0
|
||||
for name in commandNames:
|
||||
maxlen = max(maxlen, len(name))
|
||||
fmt = " %%-%ds %%s" % maxlen
|
||||
|
||||
for name in commandNames:
|
||||
command = all_commands[name]()
|
||||
try:
|
||||
summary = command.helpSummary.strip()
|
||||
except AttributeError:
|
||||
summary = ''
|
||||
print(fmt % (name, summary))
|
||||
for name in commandNames:
|
||||
command = all_commands[name]()
|
||||
try:
|
||||
summary = command.helpSummary.strip()
|
||||
except AttributeError:
|
||||
summary = ""
|
||||
print(fmt % (name, summary))
|
||||
|
||||
def _PrintAllCommands(self):
|
||||
print('usage: repo COMMAND [ARGS]')
|
||||
self.PrintAllCommandsBody()
|
||||
def _PrintAllCommands(self):
|
||||
print("usage: repo COMMAND [ARGS]")
|
||||
self.PrintAllCommandsBody()
|
||||
|
||||
def PrintAllCommandsBody(self):
|
||||
print('The complete list of recognized repo commands is:')
|
||||
commandNames = list(sorted(all_commands))
|
||||
self._PrintCommands(commandNames)
|
||||
print("See 'repo help <command>' for more information on a "
|
||||
'specific command.')
|
||||
print('Bug reports:', Wrapper().BUG_URL)
|
||||
def PrintAllCommandsBody(self):
|
||||
print("The complete list of recognized repo commands is:")
|
||||
commandNames = list(sorted(all_commands))
|
||||
self._PrintCommands(commandNames)
|
||||
print(
|
||||
"See 'repo help <command>' for more information on a "
|
||||
"specific command."
|
||||
)
|
||||
print("Bug reports:", Wrapper().BUG_URL)
|
||||
|
||||
def _PrintCommonCommands(self):
|
||||
print('usage: repo COMMAND [ARGS]')
|
||||
self.PrintCommonCommandsBody()
|
||||
def _PrintCommonCommands(self):
|
||||
print("usage: repo COMMAND [ARGS]")
|
||||
self.PrintCommonCommandsBody()
|
||||
|
||||
def PrintCommonCommandsBody(self):
|
||||
print('The most commonly used repo commands are:')
|
||||
def PrintCommonCommandsBody(self):
|
||||
print("The most commonly used repo commands are:")
|
||||
|
||||
def gitc_supported(cmd):
|
||||
if not isinstance(cmd, GitcAvailableCommand) and not isinstance(cmd, GitcClientCommand):
|
||||
return True
|
||||
if self.client.isGitcClient:
|
||||
return True
|
||||
if isinstance(cmd, GitcClientCommand):
|
||||
return False
|
||||
if gitc_utils.get_gitc_manifest_dir():
|
||||
return True
|
||||
return False
|
||||
commandNames = list(
|
||||
sorted(
|
||||
name for name, command in all_commands.items() if command.COMMON
|
||||
)
|
||||
)
|
||||
self._PrintCommands(commandNames)
|
||||
|
||||
commandNames = list(sorted([name
|
||||
for name, command in all_commands.items()
|
||||
if command.COMMON and gitc_supported(command)]))
|
||||
self._PrintCommands(commandNames)
|
||||
print(
|
||||
"See 'repo help <command>' for more information on a specific "
|
||||
"command.\nSee 'repo help --all' for a complete list of recognized "
|
||||
"commands."
|
||||
)
|
||||
print("Bug reports:", Wrapper().BUG_URL)
|
||||
|
||||
print(
|
||||
"See 'repo help <command>' for more information on a specific command.\n"
|
||||
"See 'repo help --all' for a complete list of recognized commands.")
|
||||
print('Bug reports:', Wrapper().BUG_URL)
|
||||
def _PrintCommandHelp(self, cmd, header_prefix=""):
|
||||
class _Out(Coloring):
|
||||
def __init__(self, gc):
|
||||
Coloring.__init__(self, gc, "help")
|
||||
self.heading = self.printer("heading", attr="bold")
|
||||
self._first = True
|
||||
|
||||
def _PrintCommandHelp(self, cmd, header_prefix=''):
|
||||
class _Out(Coloring):
|
||||
def __init__(self, gc):
|
||||
Coloring.__init__(self, gc, 'help')
|
||||
self.heading = self.printer('heading', attr='bold')
|
||||
self._first = True
|
||||
def _PrintSection(self, heading, bodyAttr):
|
||||
try:
|
||||
body = getattr(cmd, bodyAttr)
|
||||
except AttributeError:
|
||||
return
|
||||
if body == "" or body is None:
|
||||
return
|
||||
|
||||
def _PrintSection(self, heading, bodyAttr):
|
||||
try:
|
||||
body = getattr(cmd, bodyAttr)
|
||||
except AttributeError:
|
||||
return
|
||||
if body == '' or body is None:
|
||||
return
|
||||
if not self._first:
|
||||
self.nl()
|
||||
self._first = False
|
||||
|
||||
if not self._first:
|
||||
self.nl()
|
||||
self._first = False
|
||||
self.heading("%s%s", header_prefix, heading)
|
||||
self.nl()
|
||||
self.nl()
|
||||
|
||||
self.heading('%s%s', header_prefix, heading)
|
||||
self.nl()
|
||||
self.nl()
|
||||
me = "repo %s" % cmd.NAME
|
||||
body = body.strip()
|
||||
body = body.replace("%prog", me)
|
||||
|
||||
me = 'repo %s' % cmd.NAME
|
||||
body = body.strip()
|
||||
body = body.replace('%prog', me)
|
||||
# Extract the title, but skip any trailing {#anchors}.
|
||||
asciidoc_hdr = re.compile(r"^\n?#+ ([^{]+)(\{#.+\})?$")
|
||||
for para in body.split("\n\n"):
|
||||
if para.startswith(" "):
|
||||
self.write("%s", para)
|
||||
self.nl()
|
||||
self.nl()
|
||||
continue
|
||||
|
||||
# Extract the title, but skip any trailing {#anchors}.
|
||||
asciidoc_hdr = re.compile(r'^\n?#+ ([^{]+)(\{#.+\})?$')
|
||||
for para in body.split("\n\n"):
|
||||
if para.startswith(' '):
|
||||
self.write('%s', para)
|
||||
self.nl()
|
||||
self.nl()
|
||||
continue
|
||||
m = asciidoc_hdr.match(para)
|
||||
if m:
|
||||
self.heading("%s%s", header_prefix, m.group(1))
|
||||
self.nl()
|
||||
self.nl()
|
||||
continue
|
||||
|
||||
m = asciidoc_hdr.match(para)
|
||||
if m:
|
||||
self.heading('%s%s', header_prefix, m.group(1))
|
||||
self.nl()
|
||||
self.nl()
|
||||
continue
|
||||
lines = textwrap.wrap(
|
||||
para.replace(" ", " "),
|
||||
width=80,
|
||||
break_long_words=False,
|
||||
break_on_hyphens=False,
|
||||
)
|
||||
for line in lines:
|
||||
self.write("%s", line)
|
||||
self.nl()
|
||||
self.nl()
|
||||
|
||||
lines = textwrap.wrap(para.replace(' ', ' '), width=80,
|
||||
break_long_words=False, break_on_hyphens=False)
|
||||
for line in lines:
|
||||
self.write('%s', line)
|
||||
self.nl()
|
||||
self.nl()
|
||||
out = _Out(self.client.globalConfig)
|
||||
out._PrintSection("Summary", "helpSummary")
|
||||
cmd.OptionParser.print_help()
|
||||
out._PrintSection("Description", "helpDescription")
|
||||
|
||||
out = _Out(self.client.globalConfig)
|
||||
out._PrintSection('Summary', 'helpSummary')
|
||||
cmd.OptionParser.print_help()
|
||||
out._PrintSection('Description', 'helpDescription')
|
||||
def _PrintAllCommandHelp(self):
|
||||
for name in sorted(all_commands):
|
||||
cmd = all_commands[name](manifest=self.manifest)
|
||||
self._PrintCommandHelp(cmd, header_prefix=f"[{name}] ")
|
||||
|
||||
def _PrintAllCommandHelp(self):
|
||||
for name in sorted(all_commands):
|
||||
cmd = all_commands[name](manifest=self.manifest)
|
||||
self._PrintCommandHelp(cmd, header_prefix='[%s] ' % (name,))
|
||||
def _Options(self, p):
|
||||
p.add_option(
|
||||
"-a",
|
||||
"--all",
|
||||
dest="show_all",
|
||||
action="store_true",
|
||||
help="show the complete list of commands",
|
||||
)
|
||||
p.add_option(
|
||||
"--help-all",
|
||||
dest="show_all_help",
|
||||
action="store_true",
|
||||
help="show the --help of all commands",
|
||||
)
|
||||
|
||||
def _Options(self, p):
|
||||
p.add_option('-a', '--all',
|
||||
dest='show_all', action='store_true',
|
||||
help='show the complete list of commands')
|
||||
p.add_option('--help-all',
|
||||
dest='show_all_help', action='store_true',
|
||||
help='show the --help of all commands')
|
||||
def Execute(self, opt, args):
|
||||
if len(args) == 0:
|
||||
if opt.show_all_help:
|
||||
self._PrintAllCommandHelp()
|
||||
elif opt.show_all:
|
||||
self._PrintAllCommands()
|
||||
else:
|
||||
self._PrintCommonCommands()
|
||||
|
||||
def Execute(self, opt, args):
|
||||
if len(args) == 0:
|
||||
if opt.show_all_help:
|
||||
self._PrintAllCommandHelp()
|
||||
elif opt.show_all:
|
||||
self._PrintAllCommands()
|
||||
else:
|
||||
self._PrintCommonCommands()
|
||||
elif len(args) == 1:
|
||||
name = args[0]
|
||||
|
||||
elif len(args) == 1:
|
||||
name = args[0]
|
||||
try:
|
||||
cmd = all_commands[name](manifest=self.manifest)
|
||||
except KeyError:
|
||||
print(
|
||||
"repo: '%s' is not a repo command." % name, file=sys.stderr
|
||||
)
|
||||
raise InvalidHelpCommand(name)
|
||||
|
||||
try:
|
||||
cmd = all_commands[name](manifest=self.manifest)
|
||||
except KeyError:
|
||||
print("repo: '%s' is not a repo command." % name, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
self._PrintCommandHelp(cmd)
|
||||
|
||||
self._PrintCommandHelp(cmd)
|
||||
|
||||
else:
|
||||
self._PrintCommandHelp(self)
|
||||
else:
|
||||
self._PrintCommandHelp(self)
|
||||
|
||||
405
subcmds/info.py
405
subcmds/info.py
@@ -14,208 +14,243 @@
|
||||
|
||||
import optparse
|
||||
|
||||
from command import PagedCommand
|
||||
from color import Coloring
|
||||
from git_refs import R_M, R_HEADS
|
||||
from command import PagedCommand
|
||||
from git_refs import R_HEADS
|
||||
from git_refs import R_M
|
||||
|
||||
|
||||
class _Coloring(Coloring):
|
||||
def __init__(self, config):
|
||||
Coloring.__init__(self, config, "status")
|
||||
def __init__(self, config):
|
||||
Coloring.__init__(self, config, "status")
|
||||
|
||||
|
||||
class Info(PagedCommand):
|
||||
COMMON = True
|
||||
helpSummary = "Get info on the manifest branch, current branch or unmerged branches"
|
||||
helpUsage = "%prog [-dl] [-o [-c]] [<project>...]"
|
||||
COMMON = True
|
||||
helpSummary = (
|
||||
"Get info on the manifest branch, current branch or unmerged branches"
|
||||
)
|
||||
helpUsage = "%prog [-dl] [-o [-c]] [<project>...]"
|
||||
|
||||
def _Options(self, p):
|
||||
p.add_option('-d', '--diff',
|
||||
dest='all', action='store_true',
|
||||
help="show full info and commit diff including remote branches")
|
||||
p.add_option('-o', '--overview',
|
||||
dest='overview', action='store_true',
|
||||
help='show overview of all local commits')
|
||||
p.add_option('-c', '--current-branch',
|
||||
dest="current_branch", action="store_true",
|
||||
help="consider only checked out branches")
|
||||
p.add_option('--no-current-branch',
|
||||
dest='current_branch', action='store_false',
|
||||
help='consider all local branches')
|
||||
# Turn this into a warning & remove this someday.
|
||||
p.add_option('-b',
|
||||
dest='current_branch', action='store_true',
|
||||
help=optparse.SUPPRESS_HELP)
|
||||
p.add_option('-l', '--local-only',
|
||||
dest="local", action="store_true",
|
||||
help="disable all remote operations")
|
||||
def _Options(self, p):
|
||||
p.add_option(
|
||||
"-d",
|
||||
"--diff",
|
||||
dest="all",
|
||||
action="store_true",
|
||||
help="show full info and commit diff including remote branches",
|
||||
)
|
||||
p.add_option(
|
||||
"-o",
|
||||
"--overview",
|
||||
dest="overview",
|
||||
action="store_true",
|
||||
help="show overview of all local commits",
|
||||
)
|
||||
p.add_option(
|
||||
"-c",
|
||||
"--current-branch",
|
||||
dest="current_branch",
|
||||
action="store_true",
|
||||
help="consider only checked out branches",
|
||||
)
|
||||
p.add_option(
|
||||
"--no-current-branch",
|
||||
dest="current_branch",
|
||||
action="store_false",
|
||||
help="consider all local branches",
|
||||
)
|
||||
# Turn this into a warning & remove this someday.
|
||||
p.add_option(
|
||||
"-b",
|
||||
dest="current_branch",
|
||||
action="store_true",
|
||||
help=optparse.SUPPRESS_HELP,
|
||||
)
|
||||
p.add_option(
|
||||
"-l",
|
||||
"--local-only",
|
||||
dest="local",
|
||||
action="store_true",
|
||||
help="disable all remote operations",
|
||||
)
|
||||
|
||||
def Execute(self, opt, args):
|
||||
self.out = _Coloring(self.client.globalConfig)
|
||||
self.heading = self.out.printer('heading', attr='bold')
|
||||
self.headtext = self.out.nofmt_printer('headtext', fg='yellow')
|
||||
self.redtext = self.out.printer('redtext', fg='red')
|
||||
self.sha = self.out.printer("sha", fg='yellow')
|
||||
self.text = self.out.nofmt_printer('text')
|
||||
self.dimtext = self.out.printer('dimtext', attr='dim')
|
||||
def Execute(self, opt, args):
|
||||
self.out = _Coloring(self.client.globalConfig)
|
||||
self.heading = self.out.printer("heading", attr="bold")
|
||||
self.headtext = self.out.nofmt_printer("headtext", fg="yellow")
|
||||
self.redtext = self.out.printer("redtext", fg="red")
|
||||
self.sha = self.out.printer("sha", fg="yellow")
|
||||
self.text = self.out.nofmt_printer("text")
|
||||
self.dimtext = self.out.printer("dimtext", attr="dim")
|
||||
|
||||
self.opt = opt
|
||||
self.opt = opt
|
||||
|
||||
manifestConfig = self.manifest.manifestProject.config
|
||||
mergeBranch = manifestConfig.GetBranch("default").merge
|
||||
manifestGroups = (manifestConfig.GetString('manifest.groups')
|
||||
or 'all,-notdefault')
|
||||
if not opt.this_manifest_only:
|
||||
self.manifest = self.manifest.outer_client
|
||||
manifestConfig = self.manifest.manifestProject.config
|
||||
mergeBranch = manifestConfig.GetBranch("default").merge
|
||||
manifestGroups = self.manifest.GetGroupsStr()
|
||||
|
||||
self.heading("Manifest branch: ")
|
||||
if self.manifest.default.revisionExpr:
|
||||
self.headtext(self.manifest.default.revisionExpr)
|
||||
self.out.nl()
|
||||
self.heading("Manifest merge branch: ")
|
||||
self.headtext(mergeBranch)
|
||||
self.out.nl()
|
||||
self.heading("Manifest groups: ")
|
||||
self.headtext(manifestGroups)
|
||||
self.out.nl()
|
||||
|
||||
self.printSeparator()
|
||||
|
||||
if not opt.overview:
|
||||
self.printDiffInfo(args)
|
||||
else:
|
||||
self.printCommitOverview(args)
|
||||
|
||||
def printSeparator(self):
|
||||
self.text("----------------------------")
|
||||
self.out.nl()
|
||||
|
||||
def printDiffInfo(self, args):
|
||||
# We let exceptions bubble up to main as they'll be well structured.
|
||||
projs = self.GetProjects(args)
|
||||
|
||||
for p in projs:
|
||||
self.heading("Project: ")
|
||||
self.headtext(p.name)
|
||||
self.out.nl()
|
||||
|
||||
self.heading("Mount path: ")
|
||||
self.headtext(p.worktree)
|
||||
self.out.nl()
|
||||
|
||||
self.heading("Current revision: ")
|
||||
self.headtext(p.GetRevisionId())
|
||||
self.out.nl()
|
||||
|
||||
currentBranch = p.CurrentBranch
|
||||
if currentBranch:
|
||||
self.heading('Current branch: ')
|
||||
self.headtext(currentBranch)
|
||||
self.heading("Manifest branch: ")
|
||||
if self.manifest.default.revisionExpr:
|
||||
self.headtext(self.manifest.default.revisionExpr)
|
||||
self.out.nl()
|
||||
self.heading("Manifest merge branch: ")
|
||||
# The manifest might not have a merge branch if it isn't in a git repo,
|
||||
# e.g. if `repo init --standalone-manifest` is used.
|
||||
self.headtext(mergeBranch or "")
|
||||
self.out.nl()
|
||||
self.heading("Manifest groups: ")
|
||||
self.headtext(manifestGroups)
|
||||
self.out.nl()
|
||||
|
||||
self.heading("Manifest revision: ")
|
||||
self.headtext(p.revisionExpr)
|
||||
self.out.nl()
|
||||
self.printSeparator()
|
||||
|
||||
localBranches = list(p.GetBranches().keys())
|
||||
self.heading("Local Branches: ")
|
||||
self.redtext(str(len(localBranches)))
|
||||
if localBranches:
|
||||
self.text(" [")
|
||||
self.text(", ".join(localBranches))
|
||||
self.text("]")
|
||||
self.out.nl()
|
||||
if not opt.overview:
|
||||
self._printDiffInfo(opt, args)
|
||||
else:
|
||||
self._printCommitOverview(opt, args)
|
||||
|
||||
if self.opt.all:
|
||||
self.findRemoteLocalDiff(p)
|
||||
|
||||
self.printSeparator()
|
||||
|
||||
def findRemoteLocalDiff(self, project):
|
||||
# Fetch all the latest commits.
|
||||
if not self.opt.local:
|
||||
project.Sync_NetworkHalf(quiet=True, current_branch_only=True)
|
||||
|
||||
branch = self.manifest.manifestProject.config.GetBranch('default').merge
|
||||
if branch.startswith(R_HEADS):
|
||||
branch = branch[len(R_HEADS):]
|
||||
logTarget = R_M + branch
|
||||
|
||||
bareTmp = project.bare_git._bare
|
||||
project.bare_git._bare = False
|
||||
localCommits = project.bare_git.rev_list(
|
||||
'--abbrev=8',
|
||||
'--abbrev-commit',
|
||||
'--pretty=oneline',
|
||||
logTarget + "..",
|
||||
'--')
|
||||
|
||||
originCommits = project.bare_git.rev_list(
|
||||
'--abbrev=8',
|
||||
'--abbrev-commit',
|
||||
'--pretty=oneline',
|
||||
".." + logTarget,
|
||||
'--')
|
||||
project.bare_git._bare = bareTmp
|
||||
|
||||
self.heading("Local Commits: ")
|
||||
self.redtext(str(len(localCommits)))
|
||||
self.dimtext(" (on current branch)")
|
||||
self.out.nl()
|
||||
|
||||
for c in localCommits:
|
||||
split = c.split()
|
||||
self.sha(split[0] + " ")
|
||||
self.text(" ".join(split[1:]))
|
||||
self.out.nl()
|
||||
|
||||
self.printSeparator()
|
||||
|
||||
self.heading("Remote Commits: ")
|
||||
self.redtext(str(len(originCommits)))
|
||||
self.out.nl()
|
||||
|
||||
for c in originCommits:
|
||||
split = c.split()
|
||||
self.sha(split[0] + " ")
|
||||
self.text(" ".join(split[1:]))
|
||||
self.out.nl()
|
||||
|
||||
def printCommitOverview(self, args):
|
||||
all_branches = []
|
||||
for project in self.GetProjects(args):
|
||||
br = [project.GetUploadableBranch(x)
|
||||
for x in project.GetBranches()]
|
||||
br = [x for x in br if x]
|
||||
if self.opt.current_branch:
|
||||
br = [x for x in br if x.name == project.CurrentBranch]
|
||||
all_branches.extend(br)
|
||||
|
||||
if not all_branches:
|
||||
return
|
||||
|
||||
self.out.nl()
|
||||
self.heading('Projects Overview')
|
||||
project = None
|
||||
|
||||
for branch in all_branches:
|
||||
if project != branch.project:
|
||||
project = branch.project
|
||||
self.out.nl()
|
||||
self.headtext(project.relpath)
|
||||
def printSeparator(self):
|
||||
self.text("----------------------------")
|
||||
self.out.nl()
|
||||
|
||||
commits = branch.commits
|
||||
date = branch.date
|
||||
self.text('%s %-33s (%2d commit%s, %s)' % (
|
||||
branch.name == project.CurrentBranch and '*' or ' ',
|
||||
branch.name,
|
||||
len(commits),
|
||||
len(commits) != 1 and 's' or '',
|
||||
date))
|
||||
self.out.nl()
|
||||
def _printDiffInfo(self, opt, args):
|
||||
# We let exceptions bubble up to main as they'll be well structured.
|
||||
projs = self.GetProjects(args, all_manifests=not opt.this_manifest_only)
|
||||
|
||||
for commit in commits:
|
||||
split = commit.split()
|
||||
self.text('{0:38}{1} '.format('', '-'))
|
||||
self.sha(split[0] + " ")
|
||||
self.text(" ".join(split[1:]))
|
||||
for p in projs:
|
||||
self.heading("Project: ")
|
||||
self.headtext(p.name)
|
||||
self.out.nl()
|
||||
|
||||
self.heading("Mount path: ")
|
||||
self.headtext(p.worktree)
|
||||
self.out.nl()
|
||||
|
||||
self.heading("Current revision: ")
|
||||
self.headtext(p.GetRevisionId())
|
||||
self.out.nl()
|
||||
|
||||
currentBranch = p.CurrentBranch
|
||||
if currentBranch:
|
||||
self.heading("Current branch: ")
|
||||
self.headtext(currentBranch)
|
||||
self.out.nl()
|
||||
|
||||
self.heading("Manifest revision: ")
|
||||
self.headtext(p.revisionExpr)
|
||||
self.out.nl()
|
||||
|
||||
localBranches = list(p.GetBranches().keys())
|
||||
self.heading("Local Branches: ")
|
||||
self.redtext(str(len(localBranches)))
|
||||
if localBranches:
|
||||
self.text(" [")
|
||||
self.text(", ".join(localBranches))
|
||||
self.text("]")
|
||||
self.out.nl()
|
||||
|
||||
if self.opt.all:
|
||||
self.findRemoteLocalDiff(p)
|
||||
|
||||
self.printSeparator()
|
||||
|
||||
def findRemoteLocalDiff(self, project):
|
||||
# Fetch all the latest commits.
|
||||
if not self.opt.local:
|
||||
project.Sync_NetworkHalf(quiet=True, current_branch_only=True)
|
||||
|
||||
branch = self.manifest.manifestProject.config.GetBranch("default").merge
|
||||
if branch.startswith(R_HEADS):
|
||||
branch = branch[len(R_HEADS) :]
|
||||
logTarget = R_M + branch
|
||||
|
||||
bareTmp = project.bare_git._bare
|
||||
project.bare_git._bare = False
|
||||
localCommits = project.bare_git.rev_list(
|
||||
"--abbrev=8",
|
||||
"--abbrev-commit",
|
||||
"--pretty=oneline",
|
||||
logTarget + "..",
|
||||
"--",
|
||||
)
|
||||
|
||||
originCommits = project.bare_git.rev_list(
|
||||
"--abbrev=8",
|
||||
"--abbrev-commit",
|
||||
"--pretty=oneline",
|
||||
".." + logTarget,
|
||||
"--",
|
||||
)
|
||||
project.bare_git._bare = bareTmp
|
||||
|
||||
self.heading("Local Commits: ")
|
||||
self.redtext(str(len(localCommits)))
|
||||
self.dimtext(" (on current branch)")
|
||||
self.out.nl()
|
||||
|
||||
for c in localCommits:
|
||||
split = c.split()
|
||||
self.sha(split[0] + " ")
|
||||
self.text(" ".join(split[1:]))
|
||||
self.out.nl()
|
||||
|
||||
self.printSeparator()
|
||||
|
||||
self.heading("Remote Commits: ")
|
||||
self.redtext(str(len(originCommits)))
|
||||
self.out.nl()
|
||||
|
||||
for c in originCommits:
|
||||
split = c.split()
|
||||
self.sha(split[0] + " ")
|
||||
self.text(" ".join(split[1:]))
|
||||
self.out.nl()
|
||||
|
||||
def _printCommitOverview(self, opt, args):
|
||||
all_branches = []
|
||||
for project in self.GetProjects(
|
||||
args, all_manifests=not opt.this_manifest_only
|
||||
):
|
||||
br = [project.GetUploadableBranch(x) for x in project.GetBranches()]
|
||||
br = [x for x in br if x]
|
||||
if self.opt.current_branch:
|
||||
br = [x for x in br if x.name == project.CurrentBranch]
|
||||
all_branches.extend(br)
|
||||
|
||||
if not all_branches:
|
||||
return
|
||||
|
||||
self.out.nl()
|
||||
self.heading("Projects Overview")
|
||||
project = None
|
||||
|
||||
for branch in all_branches:
|
||||
if project != branch.project:
|
||||
project = branch.project
|
||||
self.out.nl()
|
||||
self.headtext(project.RelPath(local=opt.this_manifest_only))
|
||||
self.out.nl()
|
||||
|
||||
commits = branch.commits
|
||||
date = branch.date
|
||||
self.text(
|
||||
"%s %-33s (%2d commit%s, %s)"
|
||||
% (
|
||||
branch.name == project.CurrentBranch and "*" or " ",
|
||||
branch.name,
|
||||
len(commits),
|
||||
len(commits) != 1 and "s" or "",
|
||||
date,
|
||||
)
|
||||
)
|
||||
self.out.nl()
|
||||
|
||||
for commit in commits:
|
||||
split = commit.split()
|
||||
self.text(f"{'':38}{'-'} ")
|
||||
self.sha(split[0] + " ")
|
||||
self.text(" ".join(split[1:]))
|
||||
self.out.nl()
|
||||
|
||||
728
subcmds/init.py
728
subcmds/init.py
@@ -13,30 +13,33 @@
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import sys
|
||||
import urllib.parse
|
||||
|
||||
from color import Coloring
|
||||
from command import InteractiveCommand, MirrorSafeCommand
|
||||
from error import ManifestParseError
|
||||
from project import SyncBuffer
|
||||
from git_config import GitConfig
|
||||
from git_command import git_require, MIN_GIT_VERSION_SOFT, MIN_GIT_VERSION_HARD
|
||||
import fetch
|
||||
import git_superproject
|
||||
import platform_utils
|
||||
from command import InteractiveCommand
|
||||
from command import MirrorSafeCommand
|
||||
from error import RepoUnhandledExceptionError
|
||||
from error import UpdateManifestError
|
||||
from git_command import git_require
|
||||
from git_command import MIN_GIT_VERSION_HARD
|
||||
from git_command import MIN_GIT_VERSION_SOFT
|
||||
from repo_logging import RepoLogger
|
||||
from wrapper import Wrapper
|
||||
|
||||
|
||||
logger = RepoLogger(__file__)
|
||||
|
||||
_REPO_ALLOW_SHALLOW = os.environ.get("REPO_ALLOW_SHALLOW")
|
||||
|
||||
|
||||
class Init(InteractiveCommand, MirrorSafeCommand):
|
||||
COMMON = True
|
||||
helpSummary = "Initialize a repo client checkout in the current directory"
|
||||
helpUsage = """
|
||||
COMMON = True
|
||||
MULTI_MANIFEST_SUPPORT = True
|
||||
helpSummary = "Initialize a repo client checkout in the current directory"
|
||||
helpUsage = """
|
||||
%prog [options] [manifest url]
|
||||
"""
|
||||
helpDescription = """
|
||||
helpDescription = """
|
||||
The '%prog' command is run once to install and initialize repo.
|
||||
The latest repo source code and manifest collection is downloaded
|
||||
from the server and is installed in the .repo/ directory in the
|
||||
@@ -85,461 +88,310 @@ manifest, a subsequent `repo sync` (or `repo sync -d`) is necessary
|
||||
to update the working directory files.
|
||||
"""
|
||||
|
||||
def _CommonOptions(self, p):
|
||||
"""Disable due to re-use of Wrapper()."""
|
||||
def _CommonOptions(self, p):
|
||||
"""Disable due to re-use of Wrapper()."""
|
||||
|
||||
def _Options(self, p, gitc_init=False):
|
||||
Wrapper().InitParser(p, gitc_init=gitc_init)
|
||||
def _Options(self, p):
|
||||
Wrapper().InitParser(p)
|
||||
m = p.add_option_group("Multi-manifest")
|
||||
m.add_option(
|
||||
"--outer-manifest",
|
||||
action="store_true",
|
||||
default=True,
|
||||
help="operate starting at the outermost manifest",
|
||||
)
|
||||
m.add_option(
|
||||
"--no-outer-manifest",
|
||||
dest="outer_manifest",
|
||||
action="store_false",
|
||||
help="do not operate on outer manifests",
|
||||
)
|
||||
m.add_option(
|
||||
"--this-manifest-only",
|
||||
action="store_true",
|
||||
default=None,
|
||||
help="only operate on this (sub)manifest",
|
||||
)
|
||||
m.add_option(
|
||||
"--no-this-manifest-only",
|
||||
"--all-manifests",
|
||||
dest="this_manifest_only",
|
||||
action="store_false",
|
||||
help="operate on this manifest and its submanifests",
|
||||
)
|
||||
|
||||
def _RegisteredEnvironmentOptions(self):
|
||||
return {'REPO_MANIFEST_URL': 'manifest_url',
|
||||
'REPO_MIRROR_LOCATION': 'reference'}
|
||||
def _RegisteredEnvironmentOptions(self):
|
||||
return {
|
||||
"REPO_MANIFEST_URL": "manifest_url",
|
||||
"REPO_MIRROR_LOCATION": "reference",
|
||||
}
|
||||
|
||||
def _CloneSuperproject(self, opt):
|
||||
"""Clone the superproject based on the superproject's url and branch.
|
||||
def _SyncManifest(self, opt):
|
||||
"""Call manifestProject.Sync with arguments from opt.
|
||||
|
||||
Args:
|
||||
opt: Program options returned from optparse. See _Options().
|
||||
"""
|
||||
superproject = git_superproject.Superproject(self.manifest,
|
||||
self.repodir,
|
||||
self.git_event_log,
|
||||
quiet=opt.quiet)
|
||||
sync_result = superproject.Sync()
|
||||
if not sync_result.success:
|
||||
print('warning: git update of superproject failed, repo sync will not '
|
||||
'use superproject to fetch source; while this error is not fatal, '
|
||||
'and you can continue to run repo sync, please run repo init with '
|
||||
'the --no-use-superproject option to stop seeing this warning',
|
||||
file=sys.stderr)
|
||||
if sync_result.fatal and opt.use_superproject is not None:
|
||||
sys.exit(1)
|
||||
Args:
|
||||
opt: options from optparse.
|
||||
"""
|
||||
# Normally this value is set when instantiating the project, but the
|
||||
# manifest project is special and is created when instantiating the
|
||||
# manifest which happens before we parse options.
|
||||
self.manifest.manifestProject.clone_depth = opt.manifest_depth
|
||||
clone_filter_for_depth = (
|
||||
"blob:none" if (_REPO_ALLOW_SHALLOW == "0") else None
|
||||
)
|
||||
if not self.manifest.manifestProject.Sync(
|
||||
manifest_url=opt.manifest_url,
|
||||
manifest_branch=opt.manifest_branch,
|
||||
standalone_manifest=opt.standalone_manifest,
|
||||
groups=opt.groups,
|
||||
platform=opt.platform,
|
||||
mirror=opt.mirror,
|
||||
dissociate=opt.dissociate,
|
||||
reference=opt.reference,
|
||||
worktree=opt.worktree,
|
||||
submodules=opt.submodules,
|
||||
archive=opt.archive,
|
||||
partial_clone=opt.partial_clone,
|
||||
clone_filter=opt.clone_filter,
|
||||
partial_clone_exclude=opt.partial_clone_exclude,
|
||||
clone_filter_for_depth=clone_filter_for_depth,
|
||||
clone_bundle=opt.clone_bundle,
|
||||
git_lfs=opt.git_lfs,
|
||||
use_superproject=opt.use_superproject,
|
||||
verbose=opt.verbose,
|
||||
current_branch_only=opt.current_branch_only,
|
||||
tags=opt.tags,
|
||||
depth=opt.depth,
|
||||
git_event_log=self.git_event_log,
|
||||
manifest_name=opt.manifest_name,
|
||||
):
|
||||
manifest_name = opt.manifest_name
|
||||
raise UpdateManifestError(
|
||||
f"Unable to sync manifest {manifest_name}"
|
||||
)
|
||||
|
||||
def _SyncManifest(self, opt):
|
||||
m = self.manifest.manifestProject
|
||||
is_new = not m.Exists
|
||||
def _Prompt(self, prompt, value):
|
||||
print("%-10s [%s]: " % (prompt, value), end="", flush=True)
|
||||
a = sys.stdin.readline().strip()
|
||||
if a == "":
|
||||
return value
|
||||
return a
|
||||
|
||||
# If repo has already been initialized, we take -u with the absence of
|
||||
# --standalone-manifest to mean "transition to a standard repo set up",
|
||||
# which necessitates starting fresh.
|
||||
# If --standalone-manifest is set, we always tear everything down and start
|
||||
# anew.
|
||||
if not is_new:
|
||||
was_standalone_manifest = m.config.GetString('manifest.standalone')
|
||||
if was_standalone_manifest and not opt.manifest_url:
|
||||
print('fatal: repo was initialized with a standlone manifest, '
|
||||
'cannot be re-initialized without --manifest-url/-u')
|
||||
sys.exit(1)
|
||||
def _ShouldConfigureUser(self, opt, existing_checkout):
|
||||
gc = self.client.globalConfig
|
||||
mp = self.manifest.manifestProject
|
||||
|
||||
if opt.standalone_manifest or (was_standalone_manifest and
|
||||
opt.manifest_url):
|
||||
m.config.ClearCache()
|
||||
if m.gitdir and os.path.exists(m.gitdir):
|
||||
platform_utils.rmtree(m.gitdir)
|
||||
if m.worktree and os.path.exists(m.worktree):
|
||||
platform_utils.rmtree(m.worktree)
|
||||
# If we don't have local settings, get from global.
|
||||
if not mp.config.Has("user.name") or not mp.config.Has("user.email"):
|
||||
if not gc.Has("user.name") or not gc.Has("user.email"):
|
||||
return True
|
||||
|
||||
is_new = not m.Exists
|
||||
if is_new:
|
||||
if not opt.manifest_url:
|
||||
print('fatal: manifest url is required.', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
mp.config.SetString("user.name", gc.GetString("user.name"))
|
||||
mp.config.SetString("user.email", gc.GetString("user.email"))
|
||||
|
||||
if not opt.quiet:
|
||||
print('Downloading manifest from %s' %
|
||||
(GitConfig.ForUser().UrlInsteadOf(opt.manifest_url),),
|
||||
file=sys.stderr)
|
||||
if not opt.quiet and not existing_checkout or opt.verbose:
|
||||
print()
|
||||
print(
|
||||
"Your identity is: %s <%s>"
|
||||
% (
|
||||
mp.config.GetString("user.name"),
|
||||
mp.config.GetString("user.email"),
|
||||
)
|
||||
)
|
||||
print(
|
||||
"If you want to change this, please re-run 'repo init' with "
|
||||
"--config-name"
|
||||
)
|
||||
return False
|
||||
|
||||
# The manifest project object doesn't keep track of the path on the
|
||||
# server where this git is located, so let's save that here.
|
||||
mirrored_manifest_git = None
|
||||
if opt.reference:
|
||||
manifest_git_path = urllib.parse.urlparse(opt.manifest_url).path[1:]
|
||||
mirrored_manifest_git = os.path.join(opt.reference, manifest_git_path)
|
||||
if not mirrored_manifest_git.endswith(".git"):
|
||||
mirrored_manifest_git += ".git"
|
||||
if not os.path.exists(mirrored_manifest_git):
|
||||
mirrored_manifest_git = os.path.join(opt.reference,
|
||||
'.repo/manifests.git')
|
||||
def _ConfigureUser(self, opt):
|
||||
mp = self.manifest.manifestProject
|
||||
|
||||
m._InitGitDir(mirror_git=mirrored_manifest_git)
|
||||
while True:
|
||||
if not opt.quiet:
|
||||
print()
|
||||
name = self._Prompt("Your Name", mp.UserName)
|
||||
email = self._Prompt("Your Email", mp.UserEmail)
|
||||
|
||||
# If standalone_manifest is set, mark the project as "standalone" -- we'll
|
||||
# still do much of the manifests.git set up, but will avoid actual syncs to
|
||||
# a remote.
|
||||
standalone_manifest = False
|
||||
if opt.standalone_manifest:
|
||||
standalone_manifest = True
|
||||
m.config.SetString('manifest.standalone', opt.manifest_url)
|
||||
elif not opt.manifest_url and not opt.manifest_branch:
|
||||
# If -u is set and --standalone-manifest is not, then we're not in
|
||||
# standalone mode. Otherwise, use config to infer what we were in the last
|
||||
# init.
|
||||
standalone_manifest = bool(m.config.GetString('manifest.standalone'))
|
||||
if not standalone_manifest:
|
||||
m.config.SetString('manifest.standalone', None)
|
||||
if not opt.quiet:
|
||||
print()
|
||||
print(f"Your identity is: {name} <{email}>")
|
||||
print("is this correct [y/N]? ", end="", flush=True)
|
||||
a = sys.stdin.readline().strip().lower()
|
||||
if a in ("yes", "y", "t", "true"):
|
||||
break
|
||||
|
||||
self._ConfigureDepth(opt)
|
||||
if name != mp.UserName:
|
||||
mp.config.SetString("user.name", name)
|
||||
if email != mp.UserEmail:
|
||||
mp.config.SetString("user.email", email)
|
||||
|
||||
# Set the remote URL before the remote branch as we might need it below.
|
||||
if opt.manifest_url:
|
||||
r = m.GetRemote(m.remote.name)
|
||||
r.url = opt.manifest_url
|
||||
r.ResetFetch()
|
||||
r.Save()
|
||||
def _HasColorSet(self, gc):
|
||||
for n in ["ui", "diff", "status"]:
|
||||
if gc.Has("color.%s" % n):
|
||||
return True
|
||||
return False
|
||||
|
||||
if not standalone_manifest:
|
||||
if opt.manifest_branch:
|
||||
if opt.manifest_branch == 'HEAD':
|
||||
opt.manifest_branch = m.ResolveRemoteHead()
|
||||
if opt.manifest_branch is None:
|
||||
print('fatal: unable to resolve HEAD', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
m.revisionExpr = opt.manifest_branch
|
||||
else:
|
||||
if is_new:
|
||||
default_branch = m.ResolveRemoteHead()
|
||||
if default_branch is None:
|
||||
# If the remote doesn't have HEAD configured, default to master.
|
||||
default_branch = 'refs/heads/master'
|
||||
m.revisionExpr = default_branch
|
||||
def _ConfigureColor(self):
|
||||
gc = self.client.globalConfig
|
||||
if self._HasColorSet(gc):
|
||||
return
|
||||
|
||||
class _Test(Coloring):
|
||||
def __init__(self):
|
||||
Coloring.__init__(self, gc, "test color display")
|
||||
self._on = True
|
||||
|
||||
out = _Test()
|
||||
|
||||
print()
|
||||
print("Testing colorized output (for 'repo diff', 'repo status'):")
|
||||
|
||||
for c in ["black", "red", "green", "yellow", "blue", "magenta", "cyan"]:
|
||||
out.write(" ")
|
||||
out.printer(fg=c)(" %-6s ", c)
|
||||
out.write(" ")
|
||||
out.printer(fg="white", bg="black")(" %s " % "white")
|
||||
out.nl()
|
||||
|
||||
for c in ["bold", "dim", "ul", "reverse"]:
|
||||
out.write(" ")
|
||||
out.printer(fg="black", attr=c)(" %-6s ", c)
|
||||
out.nl()
|
||||
|
||||
print(
|
||||
"Enable color display in this user account (y/N)? ",
|
||||
end="",
|
||||
flush=True,
|
||||
)
|
||||
a = sys.stdin.readline().strip().lower()
|
||||
if a in ("y", "yes", "t", "true", "on"):
|
||||
gc.SetString("color.ui", "auto")
|
||||
|
||||
def _DisplayResult(self):
|
||||
if self.manifest.IsMirror:
|
||||
init_type = "mirror "
|
||||
else:
|
||||
m.PreSync()
|
||||
init_type = ""
|
||||
|
||||
groups = re.split(r'[,\s]+', opt.groups)
|
||||
all_platforms = ['linux', 'darwin', 'windows']
|
||||
platformize = lambda x: 'platform-' + x
|
||||
if opt.platform == 'auto':
|
||||
if (not opt.mirror and
|
||||
not m.config.GetString('repo.mirror') == 'true'):
|
||||
groups.append(platformize(platform.system().lower()))
|
||||
elif opt.platform == 'all':
|
||||
groups.extend(map(platformize, all_platforms))
|
||||
elif opt.platform in all_platforms:
|
||||
groups.append(platformize(opt.platform))
|
||||
elif opt.platform != 'none':
|
||||
print('fatal: invalid platform flag', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
groups = [x for x in groups if x]
|
||||
groupstr = ','.join(groups)
|
||||
if opt.platform == 'auto' and groupstr == self.manifest.GetDefaultGroupsStr():
|
||||
groupstr = None
|
||||
m.config.SetString('manifest.groups', groupstr)
|
||||
|
||||
if opt.reference:
|
||||
m.config.SetString('repo.reference', opt.reference)
|
||||
|
||||
if opt.dissociate:
|
||||
m.config.SetBoolean('repo.dissociate', opt.dissociate)
|
||||
|
||||
if opt.worktree:
|
||||
if opt.mirror:
|
||||
print('fatal: --mirror and --worktree are incompatible',
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
if opt.submodules:
|
||||
print('fatal: --submodules and --worktree are incompatible',
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
m.config.SetBoolean('repo.worktree', opt.worktree)
|
||||
if is_new:
|
||||
m.use_git_worktrees = True
|
||||
print('warning: --worktree is experimental!', file=sys.stderr)
|
||||
|
||||
if opt.archive:
|
||||
if is_new:
|
||||
m.config.SetBoolean('repo.archive', opt.archive)
|
||||
else:
|
||||
print('fatal: --archive is only supported when initializing a new '
|
||||
'workspace.', file=sys.stderr)
|
||||
print('Either delete the .repo folder in this workspace, or initialize '
|
||||
'in another location.', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
if opt.mirror:
|
||||
if is_new:
|
||||
m.config.SetBoolean('repo.mirror', opt.mirror)
|
||||
else:
|
||||
print('fatal: --mirror is only supported when initializing a new '
|
||||
'workspace.', file=sys.stderr)
|
||||
print('Either delete the .repo folder in this workspace, or initialize '
|
||||
'in another location.', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
if opt.partial_clone is not None:
|
||||
if opt.mirror:
|
||||
print('fatal: --mirror and --partial-clone are mutually exclusive',
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
m.config.SetBoolean('repo.partialclone', opt.partial_clone)
|
||||
if opt.clone_filter:
|
||||
m.config.SetString('repo.clonefilter', opt.clone_filter)
|
||||
elif m.config.GetBoolean('repo.partialclone'):
|
||||
opt.clone_filter = m.config.GetString('repo.clonefilter')
|
||||
else:
|
||||
opt.clone_filter = None
|
||||
|
||||
if opt.partial_clone_exclude is not None:
|
||||
m.config.SetString('repo.partialcloneexclude', opt.partial_clone_exclude)
|
||||
|
||||
if opt.clone_bundle is None:
|
||||
opt.clone_bundle = False if opt.partial_clone else True
|
||||
else:
|
||||
m.config.SetBoolean('repo.clonebundle', opt.clone_bundle)
|
||||
|
||||
if opt.submodules:
|
||||
m.config.SetBoolean('repo.submodules', opt.submodules)
|
||||
|
||||
if opt.use_superproject is not None:
|
||||
m.config.SetBoolean('repo.superproject', opt.use_superproject)
|
||||
|
||||
if standalone_manifest:
|
||||
if is_new:
|
||||
manifest_name = 'default.xml'
|
||||
manifest_data = fetch.fetch_file(opt.manifest_url, verbose=opt.verbose)
|
||||
dest = os.path.join(m.worktree, manifest_name)
|
||||
os.makedirs(os.path.dirname(dest), exist_ok=True)
|
||||
with open(dest, 'wb') as f:
|
||||
f.write(manifest_data)
|
||||
return
|
||||
|
||||
if not m.Sync_NetworkHalf(is_new=is_new, quiet=opt.quiet, verbose=opt.verbose,
|
||||
clone_bundle=opt.clone_bundle,
|
||||
current_branch_only=opt.current_branch_only,
|
||||
tags=opt.tags, submodules=opt.submodules,
|
||||
clone_filter=opt.clone_filter,
|
||||
partial_clone_exclude=self.manifest.PartialCloneExclude):
|
||||
r = m.GetRemote(m.remote.name)
|
||||
print('fatal: cannot obtain manifest %s' % r.url, file=sys.stderr)
|
||||
|
||||
# Better delete the manifest git dir if we created it; otherwise next
|
||||
# time (when user fixes problems) we won't go through the "is_new" logic.
|
||||
if is_new:
|
||||
platform_utils.rmtree(m.gitdir)
|
||||
sys.exit(1)
|
||||
|
||||
if opt.manifest_branch:
|
||||
m.MetaBranchSwitch(submodules=opt.submodules)
|
||||
|
||||
syncbuf = SyncBuffer(m.config)
|
||||
m.Sync_LocalHalf(syncbuf, submodules=opt.submodules)
|
||||
syncbuf.Finish()
|
||||
|
||||
if is_new or m.CurrentBranch is None:
|
||||
if not m.StartBranch('default'):
|
||||
print('fatal: cannot create default in manifest', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
def _LinkManifest(self, name):
|
||||
if not name:
|
||||
print('fatal: manifest name (-m) is required.', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
self.manifest.Link(name)
|
||||
except ManifestParseError as e:
|
||||
print("fatal: manifest '%s' not available" % name, file=sys.stderr)
|
||||
print('fatal: %s' % str(e), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
def _Prompt(self, prompt, value):
|
||||
print('%-10s [%s]: ' % (prompt, value), end='')
|
||||
# TODO: When we require Python 3, use flush=True w/print above.
|
||||
sys.stdout.flush()
|
||||
a = sys.stdin.readline().strip()
|
||||
if a == '':
|
||||
return value
|
||||
return a
|
||||
|
||||
def _ShouldConfigureUser(self, opt):
|
||||
gc = self.client.globalConfig
|
||||
mp = self.manifest.manifestProject
|
||||
|
||||
# If we don't have local settings, get from global.
|
||||
if not mp.config.Has('user.name') or not mp.config.Has('user.email'):
|
||||
if not gc.Has('user.name') or not gc.Has('user.email'):
|
||||
return True
|
||||
|
||||
mp.config.SetString('user.name', gc.GetString('user.name'))
|
||||
mp.config.SetString('user.email', gc.GetString('user.email'))
|
||||
|
||||
if not opt.quiet:
|
||||
print()
|
||||
print('Your identity is: %s <%s>' % (mp.config.GetString('user.name'),
|
||||
mp.config.GetString('user.email')))
|
||||
print("If you want to change this, please re-run 'repo init' with --config-name")
|
||||
return False
|
||||
|
||||
def _ConfigureUser(self, opt):
|
||||
mp = self.manifest.manifestProject
|
||||
|
||||
while True:
|
||||
if not opt.quiet:
|
||||
print()
|
||||
name = self._Prompt('Your Name', mp.UserName)
|
||||
email = self._Prompt('Your Email', mp.UserEmail)
|
||||
print(
|
||||
"repo %shas been initialized in %s"
|
||||
% (init_type, self.manifest.topdir)
|
||||
)
|
||||
|
||||
if not opt.quiet:
|
||||
print()
|
||||
print('Your identity is: %s <%s>' % (name, email))
|
||||
print('is this correct [y/N]? ', end='')
|
||||
# TODO: When we require Python 3, use flush=True w/print above.
|
||||
sys.stdout.flush()
|
||||
a = sys.stdin.readline().strip().lower()
|
||||
if a in ('yes', 'y', 't', 'true'):
|
||||
break
|
||||
current_dir = os.getcwd()
|
||||
if current_dir != self.manifest.topdir:
|
||||
print(
|
||||
"If this is not the directory in which you want to initialize "
|
||||
"repo, please run:"
|
||||
)
|
||||
print(" rm -r %s" % os.path.join(self.manifest.topdir, ".repo"))
|
||||
print("and try again.")
|
||||
|
||||
if name != mp.UserName:
|
||||
mp.config.SetString('user.name', name)
|
||||
if email != mp.UserEmail:
|
||||
mp.config.SetString('user.email', email)
|
||||
def ValidateOptions(self, opt, args):
|
||||
if opt.reference:
|
||||
opt.reference = os.path.expanduser(opt.reference)
|
||||
|
||||
def _HasColorSet(self, gc):
|
||||
for n in ['ui', 'diff', 'status']:
|
||||
if gc.Has('color.%s' % n):
|
||||
return True
|
||||
return False
|
||||
# Check this here, else manifest will be tagged "not new" and init won't
|
||||
# be possible anymore without removing the .repo/manifests directory.
|
||||
if opt.mirror:
|
||||
if opt.archive:
|
||||
self.OptionParser.error(
|
||||
"--mirror and --archive cannot be used " "together."
|
||||
)
|
||||
if opt.use_superproject is not None:
|
||||
self.OptionParser.error(
|
||||
"--mirror and --use-superproject cannot be "
|
||||
"used together."
|
||||
)
|
||||
if opt.archive and opt.use_superproject is not None:
|
||||
self.OptionParser.error(
|
||||
"--archive and --use-superproject cannot be used " "together."
|
||||
)
|
||||
|
||||
def _ConfigureColor(self):
|
||||
gc = self.client.globalConfig
|
||||
if self._HasColorSet(gc):
|
||||
return
|
||||
if opt.standalone_manifest and (
|
||||
opt.manifest_branch or opt.manifest_name != "default.xml"
|
||||
):
|
||||
self.OptionParser.error(
|
||||
"--manifest-branch and --manifest-name cannot"
|
||||
" be used with --standalone-manifest."
|
||||
)
|
||||
|
||||
class _Test(Coloring):
|
||||
def __init__(self):
|
||||
Coloring.__init__(self, gc, 'test color display')
|
||||
self._on = True
|
||||
out = _Test()
|
||||
if args:
|
||||
if opt.manifest_url:
|
||||
self.OptionParser.error(
|
||||
"--manifest-url option and URL argument both specified: "
|
||||
"only use one to select the manifest URL."
|
||||
)
|
||||
|
||||
print()
|
||||
print("Testing colorized output (for 'repo diff', 'repo status'):")
|
||||
opt.manifest_url = args.pop(0)
|
||||
|
||||
for c in ['black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan']:
|
||||
out.write(' ')
|
||||
out.printer(fg=c)(' %-6s ', c)
|
||||
out.write(' ')
|
||||
out.printer(fg='white', bg='black')(' %s ' % 'white')
|
||||
out.nl()
|
||||
if args:
|
||||
self.OptionParser.error("too many arguments to init")
|
||||
|
||||
for c in ['bold', 'dim', 'ul', 'reverse']:
|
||||
out.write(' ')
|
||||
out.printer(fg='black', attr=c)(' %-6s ', c)
|
||||
out.nl()
|
||||
def Execute(self, opt, args):
|
||||
git_require(MIN_GIT_VERSION_HARD, fail=True)
|
||||
if not git_require(MIN_GIT_VERSION_SOFT):
|
||||
logger.warning(
|
||||
"repo: warning: git-%s+ will soon be required; "
|
||||
"please upgrade your version of git to maintain "
|
||||
"support.",
|
||||
".".join(str(x) for x in MIN_GIT_VERSION_SOFT),
|
||||
)
|
||||
|
||||
print('Enable color display in this user account (y/N)? ', end='')
|
||||
# TODO: When we require Python 3, use flush=True w/print above.
|
||||
sys.stdout.flush()
|
||||
a = sys.stdin.readline().strip().lower()
|
||||
if a in ('y', 'yes', 't', 'true', 'on'):
|
||||
gc.SetString('color.ui', 'auto')
|
||||
rp = self.manifest.repoProject
|
||||
|
||||
def _ConfigureDepth(self, opt):
|
||||
"""Configure the depth we'll sync down.
|
||||
# Handle new --repo-url requests.
|
||||
if opt.repo_url:
|
||||
remote = rp.GetRemote("origin")
|
||||
remote.url = opt.repo_url
|
||||
remote.Save()
|
||||
|
||||
Args:
|
||||
opt: Options from optparse. We care about opt.depth.
|
||||
"""
|
||||
# Opt.depth will be non-None if user actually passed --depth to repo init.
|
||||
if opt.depth is not None:
|
||||
if opt.depth > 0:
|
||||
# Positive values will set the depth.
|
||||
depth = str(opt.depth)
|
||||
else:
|
||||
# Negative numbers will clear the depth; passing None to SetString
|
||||
# will do that.
|
||||
depth = None
|
||||
# Handle new --repo-rev requests.
|
||||
if opt.repo_rev:
|
||||
wrapper = Wrapper()
|
||||
try:
|
||||
remote_ref, rev = wrapper.check_repo_rev(
|
||||
rp.worktree,
|
||||
opt.repo_rev,
|
||||
repo_verify=opt.repo_verify,
|
||||
quiet=opt.quiet,
|
||||
)
|
||||
except wrapper.CloneFailure as e:
|
||||
err_msg = "fatal: double check your --repo-rev setting."
|
||||
logger.error(err_msg)
|
||||
self.git_event_log.ErrorEvent(err_msg)
|
||||
raise RepoUnhandledExceptionError(e)
|
||||
|
||||
# We store the depth in the main manifest project.
|
||||
self.manifest.manifestProject.config.SetString('repo.depth', depth)
|
||||
branch = rp.GetBranch("default")
|
||||
branch.merge = remote_ref
|
||||
rp.work_git.reset("--hard", rev)
|
||||
branch.Save()
|
||||
|
||||
def _DisplayResult(self, opt):
|
||||
if self.manifest.IsMirror:
|
||||
init_type = 'mirror '
|
||||
else:
|
||||
init_type = ''
|
||||
if opt.worktree:
|
||||
# Older versions of git supported worktree, but had dangerous gc
|
||||
# bugs.
|
||||
git_require((2, 15, 0), fail=True, msg="git gc worktree corruption")
|
||||
|
||||
if not opt.quiet:
|
||||
print()
|
||||
print('repo %shas been initialized in %s' %
|
||||
(init_type, self.manifest.topdir))
|
||||
# Provide a short notice that we're reinitializing an existing checkout.
|
||||
# Sometimes developers might not realize that they're in one, or that
|
||||
# repo doesn't do nested checkouts.
|
||||
existing_checkout = self.manifest.manifestProject.Exists
|
||||
if not opt.quiet and existing_checkout:
|
||||
print(
|
||||
"repo: reusing existing repo client checkout in",
|
||||
self.manifest.topdir,
|
||||
)
|
||||
|
||||
current_dir = os.getcwd()
|
||||
if current_dir != self.manifest.topdir:
|
||||
print('If this is not the directory in which you want to initialize '
|
||||
'repo, please run:')
|
||||
print(' rm -r %s/.repo' % self.manifest.topdir)
|
||||
print('and try again.')
|
||||
self._SyncManifest(opt)
|
||||
|
||||
def ValidateOptions(self, opt, args):
|
||||
if opt.reference:
|
||||
opt.reference = os.path.expanduser(opt.reference)
|
||||
if os.isatty(0) and os.isatty(1) and not self.manifest.IsMirror:
|
||||
if opt.config_name or self._ShouldConfigureUser(
|
||||
opt, existing_checkout
|
||||
):
|
||||
self._ConfigureUser(opt)
|
||||
self._ConfigureColor()
|
||||
|
||||
# Check this here, else manifest will be tagged "not new" and init won't be
|
||||
# possible anymore without removing the .repo/manifests directory.
|
||||
if opt.mirror:
|
||||
if opt.archive:
|
||||
self.OptionParser.error('--mirror and --archive cannot be used '
|
||||
'together.')
|
||||
if opt.use_superproject is not None:
|
||||
self.OptionParser.error('--mirror and --use-superproject cannot be '
|
||||
'used together.')
|
||||
|
||||
if opt.standalone_manifest and (opt.manifest_branch or
|
||||
opt.manifest_name != 'default.xml'):
|
||||
self.OptionParser.error('--manifest-branch and --manifest-name cannot'
|
||||
' be used with --standalone-manifest.')
|
||||
|
||||
if args:
|
||||
if opt.manifest_url:
|
||||
self.OptionParser.error(
|
||||
'--manifest-url option and URL argument both specified: only use '
|
||||
'one to select the manifest URL.')
|
||||
|
||||
opt.manifest_url = args.pop(0)
|
||||
|
||||
if args:
|
||||
self.OptionParser.error('too many arguments to init')
|
||||
|
||||
def Execute(self, opt, args):
|
||||
git_require(MIN_GIT_VERSION_HARD, fail=True)
|
||||
if not git_require(MIN_GIT_VERSION_SOFT):
|
||||
print('repo: warning: git-%s+ will soon be required; please upgrade your '
|
||||
'version of git to maintain support.'
|
||||
% ('.'.join(str(x) for x in MIN_GIT_VERSION_SOFT),),
|
||||
file=sys.stderr)
|
||||
|
||||
rp = self.manifest.repoProject
|
||||
|
||||
# Handle new --repo-url requests.
|
||||
if opt.repo_url:
|
||||
remote = rp.GetRemote('origin')
|
||||
remote.url = opt.repo_url
|
||||
remote.Save()
|
||||
|
||||
# Handle new --repo-rev requests.
|
||||
if opt.repo_rev:
|
||||
wrapper = Wrapper()
|
||||
remote_ref, rev = wrapper.check_repo_rev(
|
||||
rp.gitdir, opt.repo_rev, repo_verify=opt.repo_verify, quiet=opt.quiet)
|
||||
branch = rp.GetBranch('default')
|
||||
branch.merge = remote_ref
|
||||
rp.work_git.reset('--hard', rev)
|
||||
branch.Save()
|
||||
|
||||
if opt.worktree:
|
||||
# Older versions of git supported worktree, but had dangerous gc bugs.
|
||||
git_require((2, 15, 0), fail=True, msg='git gc worktree corruption')
|
||||
|
||||
self._SyncManifest(opt)
|
||||
self._LinkManifest(opt.manifest_name)
|
||||
|
||||
if self.manifest.manifestProject.config.GetBoolean('repo.superproject'):
|
||||
self._CloneSuperproject(opt)
|
||||
|
||||
if os.isatty(0) and os.isatty(1) and not self.manifest.IsMirror:
|
||||
if opt.config_name or self._ShouldConfigureUser(opt):
|
||||
self._ConfigureUser(opt)
|
||||
self._ConfigureColor()
|
||||
|
||||
self._DisplayResult(opt)
|
||||
if not opt.quiet:
|
||||
self._DisplayResult()
|
||||
|
||||
160
subcmds/list.py
160
subcmds/list.py
@@ -14,17 +14,18 @@
|
||||
|
||||
import os
|
||||
|
||||
from command import Command, MirrorSafeCommand
|
||||
from command import Command
|
||||
from command import MirrorSafeCommand
|
||||
|
||||
|
||||
class List(Command, MirrorSafeCommand):
|
||||
COMMON = True
|
||||
helpSummary = "List projects and their associated directories"
|
||||
helpUsage = """
|
||||
COMMON = True
|
||||
helpSummary = "List projects and their associated directories"
|
||||
helpUsage = """
|
||||
%prog [-f] [<project>...]
|
||||
%prog [-f] -r str1 [str2]...
|
||||
"""
|
||||
helpDescription = """
|
||||
helpDescription = """
|
||||
List all projects; pass '.' to list the project for the cwd.
|
||||
|
||||
By default, only projects that currently exist in the checkout are shown. If
|
||||
@@ -35,68 +36,103 @@ groups, then also pass --groups all.
|
||||
This is similar to running: repo forall -c 'echo "$REPO_PATH : $REPO_PROJECT"'.
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
p.add_option('-r', '--regex',
|
||||
dest='regex', action='store_true',
|
||||
help='filter the project list based on regex or wildcard matching of strings')
|
||||
p.add_option('-g', '--groups',
|
||||
dest='groups',
|
||||
help='filter the project list based on the groups the project is in')
|
||||
p.add_option('-a', '--all',
|
||||
action='store_true',
|
||||
help='show projects regardless of checkout state')
|
||||
p.add_option('-n', '--name-only',
|
||||
dest='name_only', action='store_true',
|
||||
help='display only the name of the repository')
|
||||
p.add_option('-p', '--path-only',
|
||||
dest='path_only', action='store_true',
|
||||
help='display only the path of the repository')
|
||||
p.add_option('-f', '--fullpath',
|
||||
dest='fullpath', action='store_true',
|
||||
help='display the full work tree path instead of the relative path')
|
||||
p.add_option('--relative-to', metavar='PATH',
|
||||
help='display paths relative to this one (default: top of repo client checkout)')
|
||||
def _Options(self, p):
|
||||
p.add_option(
|
||||
"-r",
|
||||
"--regex",
|
||||
dest="regex",
|
||||
action="store_true",
|
||||
help="filter the project list based on regex or wildcard matching "
|
||||
"of strings",
|
||||
)
|
||||
p.add_option(
|
||||
"-g",
|
||||
"--groups",
|
||||
dest="groups",
|
||||
help="filter the project list based on the groups the project is "
|
||||
"in",
|
||||
)
|
||||
p.add_option(
|
||||
"-a",
|
||||
"--all",
|
||||
action="store_true",
|
||||
help="show projects regardless of checkout state",
|
||||
)
|
||||
p.add_option(
|
||||
"-n",
|
||||
"--name-only",
|
||||
dest="name_only",
|
||||
action="store_true",
|
||||
help="display only the name of the repository",
|
||||
)
|
||||
p.add_option(
|
||||
"-p",
|
||||
"--path-only",
|
||||
dest="path_only",
|
||||
action="store_true",
|
||||
help="display only the path of the repository",
|
||||
)
|
||||
p.add_option(
|
||||
"-f",
|
||||
"--fullpath",
|
||||
dest="fullpath",
|
||||
action="store_true",
|
||||
help="display the full work tree path instead of the relative path",
|
||||
)
|
||||
p.add_option(
|
||||
"--relative-to",
|
||||
metavar="PATH",
|
||||
help="display paths relative to this one (default: top of repo "
|
||||
"client checkout)",
|
||||
)
|
||||
|
||||
def ValidateOptions(self, opt, args):
|
||||
if opt.fullpath and opt.name_only:
|
||||
self.OptionParser.error('cannot combine -f and -n')
|
||||
def ValidateOptions(self, opt, args):
|
||||
if opt.fullpath and opt.name_only:
|
||||
self.OptionParser.error("cannot combine -f and -n")
|
||||
|
||||
# Resolve any symlinks so the output is stable.
|
||||
if opt.relative_to:
|
||||
opt.relative_to = os.path.realpath(opt.relative_to)
|
||||
# Resolve any symlinks so the output is stable.
|
||||
if opt.relative_to:
|
||||
opt.relative_to = os.path.realpath(opt.relative_to)
|
||||
|
||||
def Execute(self, opt, args):
|
||||
"""List all projects and the associated directories.
|
||||
def Execute(self, opt, args):
|
||||
"""List all projects and the associated directories.
|
||||
|
||||
This may be possible to do with 'repo forall', but repo newbies have
|
||||
trouble figuring that out. The idea here is that it should be more
|
||||
discoverable.
|
||||
This may be possible to do with 'repo forall', but repo newbies have
|
||||
trouble figuring that out. The idea here is that it should be more
|
||||
discoverable.
|
||||
|
||||
Args:
|
||||
opt: The options.
|
||||
args: Positional args. Can be a list of projects to list, or empty.
|
||||
"""
|
||||
if not opt.regex:
|
||||
projects = self.GetProjects(args, groups=opt.groups, missing_ok=opt.all)
|
||||
else:
|
||||
projects = self.FindProjects(args)
|
||||
Args:
|
||||
opt: The options.
|
||||
args: Positional args. Can be a list of projects to list, or empty.
|
||||
"""
|
||||
if not opt.regex:
|
||||
projects = self.GetProjects(
|
||||
args,
|
||||
groups=opt.groups,
|
||||
missing_ok=opt.all,
|
||||
all_manifests=not opt.this_manifest_only,
|
||||
)
|
||||
else:
|
||||
projects = self.FindProjects(
|
||||
args, all_manifests=not opt.this_manifest_only
|
||||
)
|
||||
|
||||
def _getpath(x):
|
||||
if opt.fullpath:
|
||||
return x.worktree
|
||||
if opt.relative_to:
|
||||
return os.path.relpath(x.worktree, opt.relative_to)
|
||||
return x.relpath
|
||||
def _getpath(x):
|
||||
if opt.fullpath:
|
||||
return x.worktree
|
||||
if opt.relative_to:
|
||||
return os.path.relpath(x.worktree, opt.relative_to)
|
||||
return x.RelPath(local=opt.this_manifest_only)
|
||||
|
||||
lines = []
|
||||
for project in projects:
|
||||
if opt.name_only and not opt.path_only:
|
||||
lines.append("%s" % (project.name))
|
||||
elif opt.path_only and not opt.name_only:
|
||||
lines.append("%s" % (_getpath(project)))
|
||||
else:
|
||||
lines.append("%s : %s" % (_getpath(project), project.name))
|
||||
lines = []
|
||||
for project in projects:
|
||||
if opt.name_only and not opt.path_only:
|
||||
lines.append("%s" % (project.name))
|
||||
elif opt.path_only and not opt.name_only:
|
||||
lines.append("%s" % (_getpath(project)))
|
||||
else:
|
||||
lines.append(f"{_getpath(project)} : {project.name}")
|
||||
|
||||
if lines:
|
||||
lines.sort()
|
||||
print('\n'.join(lines))
|
||||
if lines:
|
||||
lines.sort()
|
||||
print("\n".join(lines))
|
||||
|
||||
@@ -17,15 +17,19 @@ import os
|
||||
import sys
|
||||
|
||||
from command import PagedCommand
|
||||
from repo_logging import RepoLogger
|
||||
|
||||
|
||||
logger = RepoLogger(__file__)
|
||||
|
||||
|
||||
class Manifest(PagedCommand):
|
||||
COMMON = False
|
||||
helpSummary = "Manifest inspection utility"
|
||||
helpUsage = """
|
||||
COMMON = False
|
||||
helpSummary = "Manifest inspection utility"
|
||||
helpUsage = """
|
||||
%prog [-o {-|NAME.xml}] [-m MANIFEST.xml] [-r]
|
||||
"""
|
||||
_helpDescription = """
|
||||
_helpDescription = """
|
||||
|
||||
With the -o option, exports the current manifest for inspection.
|
||||
The manifest and (if present) local_manifests/ are combined
|
||||
@@ -40,83 +44,136 @@ when the manifest was generated. The 'dest-branch' attribute is set
|
||||
to indicate the remote ref to push changes to via 'repo upload'.
|
||||
"""
|
||||
|
||||
@property
|
||||
def helpDescription(self):
|
||||
helptext = self._helpDescription + '\n'
|
||||
r = os.path.dirname(__file__)
|
||||
r = os.path.dirname(r)
|
||||
with open(os.path.join(r, 'docs', 'manifest-format.md')) as fd:
|
||||
for line in fd:
|
||||
helptext += line
|
||||
return helptext
|
||||
@property
|
||||
def helpDescription(self):
|
||||
helptext = self._helpDescription + "\n"
|
||||
r = os.path.dirname(__file__)
|
||||
r = os.path.dirname(r)
|
||||
with open(os.path.join(r, "docs", "manifest-format.md")) as fd:
|
||||
for line in fd:
|
||||
helptext += line
|
||||
return helptext
|
||||
|
||||
def _Options(self, p):
|
||||
p.add_option('-r', '--revision-as-HEAD',
|
||||
dest='peg_rev', action='store_true',
|
||||
help='save revisions as current HEAD')
|
||||
p.add_option('-m', '--manifest-name',
|
||||
help='temporary manifest to use for this sync', metavar='NAME.xml')
|
||||
p.add_option('--suppress-upstream-revision', dest='peg_rev_upstream',
|
||||
default=True, action='store_false',
|
||||
help='if in -r mode, do not write the upstream field '
|
||||
'(only of use if the branch names for a sha1 manifest are '
|
||||
'sensitive)')
|
||||
p.add_option('--suppress-dest-branch', dest='peg_rev_dest_branch',
|
||||
default=True, action='store_false',
|
||||
help='if in -r mode, do not write the dest-branch field '
|
||||
'(only of use if the branch names for a sha1 manifest are '
|
||||
'sensitive)')
|
||||
p.add_option('--json', default=False, action='store_true',
|
||||
help='output manifest in JSON format (experimental)')
|
||||
p.add_option('--pretty', default=False, action='store_true',
|
||||
help='format output for humans to read')
|
||||
p.add_option('--no-local-manifests', default=False, action='store_true',
|
||||
dest='ignore_local_manifests', help='ignore local manifests')
|
||||
p.add_option('-o', '--output-file',
|
||||
dest='output_file',
|
||||
default='-',
|
||||
help='file to save the manifest to',
|
||||
metavar='-|NAME.xml')
|
||||
def _Options(self, p):
|
||||
p.add_option(
|
||||
"-r",
|
||||
"--revision-as-HEAD",
|
||||
dest="peg_rev",
|
||||
action="store_true",
|
||||
help="save revisions as current HEAD",
|
||||
)
|
||||
p.add_option(
|
||||
"-m",
|
||||
"--manifest-name",
|
||||
help="temporary manifest to use for this sync",
|
||||
metavar="NAME.xml",
|
||||
)
|
||||
p.add_option(
|
||||
"--suppress-upstream-revision",
|
||||
dest="peg_rev_upstream",
|
||||
default=True,
|
||||
action="store_false",
|
||||
help="if in -r mode, do not write the upstream field "
|
||||
"(only of use if the branch names for a sha1 manifest are "
|
||||
"sensitive)",
|
||||
)
|
||||
p.add_option(
|
||||
"--suppress-dest-branch",
|
||||
dest="peg_rev_dest_branch",
|
||||
default=True,
|
||||
action="store_false",
|
||||
help="if in -r mode, do not write the dest-branch field "
|
||||
"(only of use if the branch names for a sha1 manifest are "
|
||||
"sensitive)",
|
||||
)
|
||||
p.add_option(
|
||||
"--json",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="output manifest in JSON format (experimental)",
|
||||
)
|
||||
p.add_option(
|
||||
"--pretty",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="format output for humans to read",
|
||||
)
|
||||
p.add_option(
|
||||
"--no-local-manifests",
|
||||
default=False,
|
||||
action="store_true",
|
||||
dest="ignore_local_manifests",
|
||||
help="ignore local manifests",
|
||||
)
|
||||
p.add_option(
|
||||
"-o",
|
||||
"--output-file",
|
||||
dest="output_file",
|
||||
default="-",
|
||||
help="file to save the manifest to. (Filename prefix for "
|
||||
"multi-tree.)",
|
||||
metavar="-|NAME.xml",
|
||||
)
|
||||
|
||||
def _Output(self, opt):
|
||||
# If alternate manifest is specified, override the manifest file that we're using.
|
||||
if opt.manifest_name:
|
||||
self.manifest.Override(opt.manifest_name, False)
|
||||
def _Output(self, opt):
|
||||
# If alternate manifest is specified, override the manifest file that
|
||||
# we're using.
|
||||
if opt.manifest_name:
|
||||
self.manifest.Override(opt.manifest_name, False)
|
||||
|
||||
if opt.output_file == '-':
|
||||
fd = sys.stdout
|
||||
else:
|
||||
fd = open(opt.output_file, 'w')
|
||||
for manifest in self.ManifestList(opt):
|
||||
output_file = opt.output_file
|
||||
if output_file == "-":
|
||||
fd = sys.stdout
|
||||
else:
|
||||
if manifest.path_prefix:
|
||||
output_file = (
|
||||
f"{opt.output_file}:"
|
||||
f'{manifest.path_prefix.replace("/", "%2f")}'
|
||||
)
|
||||
fd = open(output_file, "w")
|
||||
|
||||
self.manifest.SetUseLocalManifests(not opt.ignore_local_manifests)
|
||||
manifest.SetUseLocalManifests(not opt.ignore_local_manifests)
|
||||
|
||||
if opt.json:
|
||||
print('warning: --json is experimental!', file=sys.stderr)
|
||||
doc = self.manifest.ToDict(peg_rev=opt.peg_rev,
|
||||
peg_rev_upstream=opt.peg_rev_upstream,
|
||||
peg_rev_dest_branch=opt.peg_rev_dest_branch)
|
||||
if opt.json:
|
||||
logger.warning("warning: --json is experimental!")
|
||||
doc = manifest.ToDict(
|
||||
peg_rev=opt.peg_rev,
|
||||
peg_rev_upstream=opt.peg_rev_upstream,
|
||||
peg_rev_dest_branch=opt.peg_rev_dest_branch,
|
||||
)
|
||||
|
||||
json_settings = {
|
||||
# JSON style guide says Uunicode characters are fully allowed.
|
||||
'ensure_ascii': False,
|
||||
# We use 2 space indent to match JSON style guide.
|
||||
'indent': 2 if opt.pretty else None,
|
||||
'separators': (',', ': ') if opt.pretty else (',', ':'),
|
||||
'sort_keys': True,
|
||||
}
|
||||
fd.write(json.dumps(doc, **json_settings))
|
||||
else:
|
||||
self.manifest.Save(fd,
|
||||
peg_rev=opt.peg_rev,
|
||||
peg_rev_upstream=opt.peg_rev_upstream,
|
||||
peg_rev_dest_branch=opt.peg_rev_dest_branch)
|
||||
fd.close()
|
||||
if opt.output_file != '-':
|
||||
print('Saved manifest to %s' % opt.output_file, file=sys.stderr)
|
||||
json_settings = {
|
||||
# JSON style guide says Unicode characters are fully
|
||||
# allowed.
|
||||
"ensure_ascii": False,
|
||||
# We use 2 space indent to match JSON style guide.
|
||||
"indent": 2 if opt.pretty else None,
|
||||
"separators": (",", ": ") if opt.pretty else (",", ":"),
|
||||
"sort_keys": True,
|
||||
}
|
||||
fd.write(json.dumps(doc, **json_settings))
|
||||
else:
|
||||
manifest.Save(
|
||||
fd,
|
||||
peg_rev=opt.peg_rev,
|
||||
peg_rev_upstream=opt.peg_rev_upstream,
|
||||
peg_rev_dest_branch=opt.peg_rev_dest_branch,
|
||||
)
|
||||
if output_file != "-":
|
||||
fd.close()
|
||||
if manifest.path_prefix:
|
||||
logger.warning(
|
||||
"Saved %s submanifest to %s",
|
||||
manifest.path_prefix,
|
||||
output_file,
|
||||
)
|
||||
else:
|
||||
logger.warning("Saved manifest to %s", output_file)
|
||||
|
||||
def ValidateOptions(self, opt, args):
|
||||
if args:
|
||||
self.Usage()
|
||||
def ValidateOptions(self, opt, args):
|
||||
if args:
|
||||
self.Usage()
|
||||
|
||||
def Execute(self, opt, args):
|
||||
self._Output(opt)
|
||||
def Execute(self, opt, args):
|
||||
self._Output(opt)
|
||||
|
||||
@@ -19,12 +19,12 @@ from command import PagedCommand
|
||||
|
||||
|
||||
class Overview(PagedCommand):
|
||||
COMMON = True
|
||||
helpSummary = "Display overview of unmerged project branches"
|
||||
helpUsage = """
|
||||
COMMON = True
|
||||
helpSummary = "Display overview of unmerged project branches"
|
||||
helpUsage = """
|
||||
%prog [--current-branch] [<project>...]
|
||||
"""
|
||||
helpDescription = """
|
||||
helpDescription = """
|
||||
The '%prog' command is used to display an overview of the projects branches,
|
||||
and list any local commits that have not yet been merged into the project.
|
||||
|
||||
@@ -33,59 +33,77 @@ branches currently checked out in each project. By default, all branches
|
||||
are displayed.
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
p.add_option('-c', '--current-branch',
|
||||
dest="current_branch", action="store_true",
|
||||
help="consider only checked out branches")
|
||||
p.add_option('--no-current-branch',
|
||||
dest='current_branch', action='store_false',
|
||||
help='consider all local branches')
|
||||
# Turn this into a warning & remove this someday.
|
||||
p.add_option('-b',
|
||||
dest='current_branch', action='store_true',
|
||||
help=optparse.SUPPRESS_HELP)
|
||||
def _Options(self, p):
|
||||
p.add_option(
|
||||
"-c",
|
||||
"--current-branch",
|
||||
dest="current_branch",
|
||||
action="store_true",
|
||||
help="consider only checked out branches",
|
||||
)
|
||||
p.add_option(
|
||||
"--no-current-branch",
|
||||
dest="current_branch",
|
||||
action="store_false",
|
||||
help="consider all local branches",
|
||||
)
|
||||
# Turn this into a warning & remove this someday.
|
||||
p.add_option(
|
||||
"-b",
|
||||
dest="current_branch",
|
||||
action="store_true",
|
||||
help=optparse.SUPPRESS_HELP,
|
||||
)
|
||||
|
||||
def Execute(self, opt, args):
|
||||
all_branches = []
|
||||
for project in self.GetProjects(args):
|
||||
br = [project.GetUploadableBranch(x)
|
||||
for x in project.GetBranches()]
|
||||
br = [x for x in br if x]
|
||||
if opt.current_branch:
|
||||
br = [x for x in br if x.name == project.CurrentBranch]
|
||||
all_branches.extend(br)
|
||||
def Execute(self, opt, args):
|
||||
all_branches = []
|
||||
for project in self.GetProjects(
|
||||
args, all_manifests=not opt.this_manifest_only
|
||||
):
|
||||
br = [project.GetUploadableBranch(x) for x in project.GetBranches()]
|
||||
br = [x for x in br if x]
|
||||
if opt.current_branch:
|
||||
br = [x for x in br if x.name == project.CurrentBranch]
|
||||
all_branches.extend(br)
|
||||
|
||||
if not all_branches:
|
||||
return
|
||||
if not all_branches:
|
||||
return
|
||||
|
||||
class Report(Coloring):
|
||||
def __init__(self, config):
|
||||
Coloring.__init__(self, config, 'status')
|
||||
self.project = self.printer('header', attr='bold')
|
||||
self.text = self.printer('text')
|
||||
class Report(Coloring):
|
||||
def __init__(self, config):
|
||||
Coloring.__init__(self, config, "status")
|
||||
self.project = self.printer("header", attr="bold")
|
||||
self.text = self.printer("text")
|
||||
|
||||
out = Report(all_branches[0].project.config)
|
||||
out.text("Deprecated. See repo info -o.")
|
||||
out.nl()
|
||||
out.project('Projects Overview')
|
||||
out.nl()
|
||||
|
||||
project = None
|
||||
|
||||
for branch in all_branches:
|
||||
if project != branch.project:
|
||||
project = branch.project
|
||||
out = Report(all_branches[0].project.config)
|
||||
out.text("Deprecated. See repo info -o.")
|
||||
out.nl()
|
||||
out.project('project %s/' % project.relpath)
|
||||
out.project("Projects Overview")
|
||||
out.nl()
|
||||
|
||||
commits = branch.commits
|
||||
date = branch.date
|
||||
print('%s %-33s (%2d commit%s, %s)' % (
|
||||
branch.name == project.CurrentBranch and '*' or ' ',
|
||||
branch.name,
|
||||
len(commits),
|
||||
len(commits) != 1 and 's' or ' ',
|
||||
date))
|
||||
for commit in commits:
|
||||
print('%-35s - %s' % ('', commit))
|
||||
project = None
|
||||
|
||||
for branch in all_branches:
|
||||
if project != branch.project:
|
||||
project = branch.project
|
||||
out.nl()
|
||||
out.project(
|
||||
"project %s/"
|
||||
% project.RelPath(local=opt.this_manifest_only)
|
||||
)
|
||||
out.nl()
|
||||
|
||||
commits = branch.commits
|
||||
date = branch.date
|
||||
print(
|
||||
"%s %-33s (%2d commit%s, %s)"
|
||||
% (
|
||||
branch.name == project.CurrentBranch and "*" or " ",
|
||||
branch.name,
|
||||
len(commits),
|
||||
len(commits) != 1 and "s" or " ",
|
||||
date,
|
||||
)
|
||||
)
|
||||
for commit in commits:
|
||||
print("%-35s - %s" % ("", commit))
|
||||
|
||||
110
subcmds/prune.py
110
subcmds/prune.py
@@ -15,67 +15,79 @@
|
||||
import itertools
|
||||
|
||||
from color import Coloring
|
||||
from command import DEFAULT_LOCAL_JOBS, PagedCommand
|
||||
from command import DEFAULT_LOCAL_JOBS
|
||||
from command import PagedCommand
|
||||
|
||||
|
||||
class Prune(PagedCommand):
|
||||
COMMON = True
|
||||
helpSummary = "Prune (delete) already merged topics"
|
||||
helpUsage = """
|
||||
COMMON = True
|
||||
helpSummary = "Prune (delete) already merged topics"
|
||||
helpUsage = """
|
||||
%prog [<project>...]
|
||||
"""
|
||||
PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
|
||||
PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
|
||||
|
||||
def _ExecuteOne(self, project):
|
||||
"""Process one project."""
|
||||
return project.PruneHeads()
|
||||
def _ExecuteOne(self, project):
|
||||
"""Process one project."""
|
||||
return project.PruneHeads()
|
||||
|
||||
def Execute(self, opt, args):
|
||||
projects = self.GetProjects(args)
|
||||
def Execute(self, opt, args):
|
||||
projects = self.GetProjects(
|
||||
args, all_manifests=not opt.this_manifest_only
|
||||
)
|
||||
|
||||
# NB: Should be able to refactor this module to display summary as results
|
||||
# come back from children.
|
||||
def _ProcessResults(_pool, _output, results):
|
||||
return list(itertools.chain.from_iterable(results))
|
||||
# NB: Should be able to refactor this module to display summary as
|
||||
# results come back from children.
|
||||
def _ProcessResults(_pool, _output, results):
|
||||
return list(itertools.chain.from_iterable(results))
|
||||
|
||||
all_branches = self.ExecuteInParallel(
|
||||
opt.jobs,
|
||||
self._ExecuteOne,
|
||||
projects,
|
||||
callback=_ProcessResults,
|
||||
ordered=True)
|
||||
all_branches = self.ExecuteInParallel(
|
||||
opt.jobs,
|
||||
self._ExecuteOne,
|
||||
projects,
|
||||
callback=_ProcessResults,
|
||||
ordered=True,
|
||||
)
|
||||
|
||||
if not all_branches:
|
||||
return
|
||||
if not all_branches:
|
||||
return
|
||||
|
||||
class Report(Coloring):
|
||||
def __init__(self, config):
|
||||
Coloring.__init__(self, config, 'status')
|
||||
self.project = self.printer('header', attr='bold')
|
||||
class Report(Coloring):
|
||||
def __init__(self, config):
|
||||
Coloring.__init__(self, config, "status")
|
||||
self.project = self.printer("header", attr="bold")
|
||||
|
||||
out = Report(all_branches[0].project.config)
|
||||
out.project('Pending Branches')
|
||||
out.nl()
|
||||
|
||||
project = None
|
||||
|
||||
for branch in all_branches:
|
||||
if project != branch.project:
|
||||
project = branch.project
|
||||
out.nl()
|
||||
out.project('project %s/' % project.relpath)
|
||||
out = Report(all_branches[0].project.config)
|
||||
out.project("Pending Branches")
|
||||
out.nl()
|
||||
|
||||
print('%s %-33s ' % (
|
||||
branch.name == project.CurrentBranch and '*' or ' ',
|
||||
branch.name), end='')
|
||||
project = None
|
||||
|
||||
if not branch.base_exists:
|
||||
print('(ignoring: tracking branch is gone: %s)' % (branch.base,))
|
||||
else:
|
||||
commits = branch.commits
|
||||
date = branch.date
|
||||
print('(%2d commit%s, %s)' % (
|
||||
len(commits),
|
||||
len(commits) != 1 and 's' or ' ',
|
||||
date))
|
||||
for branch in all_branches:
|
||||
if project != branch.project:
|
||||
project = branch.project
|
||||
out.nl()
|
||||
out.project(
|
||||
"project %s/"
|
||||
% project.RelPath(local=opt.this_manifest_only)
|
||||
)
|
||||
out.nl()
|
||||
|
||||
print(
|
||||
"%s %-33s "
|
||||
% (
|
||||
branch.name == project.CurrentBranch and "*" or " ",
|
||||
branch.name,
|
||||
),
|
||||
end="",
|
||||
)
|
||||
|
||||
if not branch.base_exists:
|
||||
print(f"(ignoring: tracking branch is gone: {branch.base})")
|
||||
else:
|
||||
commits = branch.commits
|
||||
date = branch.date
|
||||
print(
|
||||
"(%2d commit%s, %s)"
|
||||
% (len(commits), len(commits) != 1 and "s" or " ", date)
|
||||
)
|
||||
|
||||
@@ -17,148 +17,198 @@ import sys
|
||||
from color import Coloring
|
||||
from command import Command
|
||||
from git_command import GitCommand
|
||||
from repo_logging import RepoLogger
|
||||
|
||||
|
||||
logger = RepoLogger(__file__)
|
||||
|
||||
|
||||
class RebaseColoring(Coloring):
|
||||
def __init__(self, config):
|
||||
Coloring.__init__(self, config, 'rebase')
|
||||
self.project = self.printer('project', attr='bold')
|
||||
self.fail = self.printer('fail', fg='red')
|
||||
def __init__(self, config):
|
||||
Coloring.__init__(self, config, "rebase")
|
||||
self.project = self.printer("project", attr="bold")
|
||||
self.fail = self.printer("fail", fg="red")
|
||||
|
||||
|
||||
class Rebase(Command):
|
||||
COMMON = True
|
||||
helpSummary = "Rebase local branches on upstream branch"
|
||||
helpUsage = """
|
||||
COMMON = True
|
||||
helpSummary = "Rebase local branches on upstream branch"
|
||||
helpUsage = """
|
||||
%prog {[<project>...] | -i <project>...}
|
||||
"""
|
||||
helpDescription = """
|
||||
helpDescription = """
|
||||
'%prog' uses git rebase to move local changes in the current topic branch to
|
||||
the HEAD of the upstream history, useful when you have made commits in a topic
|
||||
branch but need to incorporate new upstream changes "underneath" them.
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
g = p.get_option_group('--quiet')
|
||||
g.add_option('-i', '--interactive',
|
||||
dest="interactive", action="store_true",
|
||||
help="interactive rebase (single project only)")
|
||||
def _Options(self, p):
|
||||
g = p.get_option_group("--quiet")
|
||||
g.add_option(
|
||||
"-i",
|
||||
"--interactive",
|
||||
dest="interactive",
|
||||
action="store_true",
|
||||
help="interactive rebase (single project only)",
|
||||
)
|
||||
|
||||
p.add_option('--fail-fast',
|
||||
dest='fail_fast', action='store_true',
|
||||
help='stop rebasing after first error is hit')
|
||||
p.add_option('-f', '--force-rebase',
|
||||
dest='force_rebase', action='store_true',
|
||||
help='pass --force-rebase to git rebase')
|
||||
p.add_option('--no-ff',
|
||||
dest='ff', default=True, action='store_false',
|
||||
help='pass --no-ff to git rebase')
|
||||
p.add_option('--autosquash',
|
||||
dest='autosquash', action='store_true',
|
||||
help='pass --autosquash to git rebase')
|
||||
p.add_option('--whitespace',
|
||||
dest='whitespace', action='store', metavar='WS',
|
||||
help='pass --whitespace to git rebase')
|
||||
p.add_option('--auto-stash',
|
||||
dest='auto_stash', action='store_true',
|
||||
help='stash local modifications before starting')
|
||||
p.add_option('-m', '--onto-manifest',
|
||||
dest='onto_manifest', action='store_true',
|
||||
help='rebase onto the manifest version instead of upstream '
|
||||
'HEAD (this helps to make sure the local tree stays '
|
||||
'consistent if you previously synced to a manifest)')
|
||||
p.add_option(
|
||||
"--fail-fast",
|
||||
dest="fail_fast",
|
||||
action="store_true",
|
||||
help="stop rebasing after first error is hit",
|
||||
)
|
||||
p.add_option(
|
||||
"-f",
|
||||
"--force-rebase",
|
||||
dest="force_rebase",
|
||||
action="store_true",
|
||||
help="pass --force-rebase to git rebase",
|
||||
)
|
||||
p.add_option(
|
||||
"--no-ff",
|
||||
dest="ff",
|
||||
default=True,
|
||||
action="store_false",
|
||||
help="pass --no-ff to git rebase",
|
||||
)
|
||||
p.add_option(
|
||||
"--autosquash",
|
||||
dest="autosquash",
|
||||
action="store_true",
|
||||
help="pass --autosquash to git rebase",
|
||||
)
|
||||
p.add_option(
|
||||
"--whitespace",
|
||||
dest="whitespace",
|
||||
action="store",
|
||||
metavar="WS",
|
||||
help="pass --whitespace to git rebase",
|
||||
)
|
||||
p.add_option(
|
||||
"--auto-stash",
|
||||
dest="auto_stash",
|
||||
action="store_true",
|
||||
help="stash local modifications before starting",
|
||||
)
|
||||
p.add_option(
|
||||
"-m",
|
||||
"--onto-manifest",
|
||||
dest="onto_manifest",
|
||||
action="store_true",
|
||||
help="rebase onto the manifest version instead of upstream "
|
||||
"HEAD (this helps to make sure the local tree stays "
|
||||
"consistent if you previously synced to a manifest)",
|
||||
)
|
||||
|
||||
def Execute(self, opt, args):
|
||||
all_projects = self.GetProjects(args)
|
||||
one_project = len(all_projects) == 1
|
||||
def Execute(self, opt, args):
|
||||
all_projects = self.GetProjects(
|
||||
args, all_manifests=not opt.this_manifest_only
|
||||
)
|
||||
one_project = len(all_projects) == 1
|
||||
|
||||
if opt.interactive and not one_project:
|
||||
print('error: interactive rebase not supported with multiple projects',
|
||||
file=sys.stderr)
|
||||
if len(args) == 1:
|
||||
print('note: project %s is mapped to more than one path' % (args[0],),
|
||||
file=sys.stderr)
|
||||
return 1
|
||||
if opt.interactive and not one_project:
|
||||
logger.error(
|
||||
"error: interactive rebase not supported with multiple projects"
|
||||
)
|
||||
|
||||
# Setup the common git rebase args that we use for all projects.
|
||||
common_args = ['rebase']
|
||||
if opt.whitespace:
|
||||
common_args.append('--whitespace=%s' % opt.whitespace)
|
||||
if opt.quiet:
|
||||
common_args.append('--quiet')
|
||||
if opt.force_rebase:
|
||||
common_args.append('--force-rebase')
|
||||
if not opt.ff:
|
||||
common_args.append('--no-ff')
|
||||
if opt.autosquash:
|
||||
common_args.append('--autosquash')
|
||||
if opt.interactive:
|
||||
common_args.append('-i')
|
||||
if len(args) == 1:
|
||||
logger.warning(
|
||||
"note: project %s is mapped to more than one path", args[0]
|
||||
)
|
||||
|
||||
config = self.manifest.manifestProject.config
|
||||
out = RebaseColoring(config)
|
||||
out.redirect(sys.stdout)
|
||||
return 1
|
||||
|
||||
ret = 0
|
||||
for project in all_projects:
|
||||
if ret and opt.fail_fast:
|
||||
break
|
||||
# Setup the common git rebase args that we use for all projects.
|
||||
common_args = ["rebase"]
|
||||
if opt.whitespace:
|
||||
common_args.append("--whitespace=%s" % opt.whitespace)
|
||||
if opt.quiet:
|
||||
common_args.append("--quiet")
|
||||
if opt.force_rebase:
|
||||
common_args.append("--force-rebase")
|
||||
if not opt.ff:
|
||||
common_args.append("--no-ff")
|
||||
if opt.autosquash:
|
||||
common_args.append("--autosquash")
|
||||
if opt.interactive:
|
||||
common_args.append("-i")
|
||||
|
||||
cb = project.CurrentBranch
|
||||
if not cb:
|
||||
if one_project:
|
||||
print("error: project %s has a detached HEAD" % project.relpath,
|
||||
file=sys.stderr)
|
||||
return 1
|
||||
# ignore branches with detatched HEADs
|
||||
continue
|
||||
config = self.manifest.manifestProject.config
|
||||
out = RebaseColoring(config)
|
||||
out.redirect(sys.stdout)
|
||||
_RelPath = lambda p: p.RelPath(local=opt.this_manifest_only)
|
||||
|
||||
upbranch = project.GetBranch(cb)
|
||||
if not upbranch.LocalMerge:
|
||||
if one_project:
|
||||
print("error: project %s does not track any remote branches"
|
||||
% project.relpath, file=sys.stderr)
|
||||
return 1
|
||||
# ignore branches without remotes
|
||||
continue
|
||||
ret = 0
|
||||
for project in all_projects:
|
||||
if ret and opt.fail_fast:
|
||||
break
|
||||
|
||||
args = common_args[:]
|
||||
if opt.onto_manifest:
|
||||
args.append('--onto')
|
||||
args.append(project.revisionExpr)
|
||||
cb = project.CurrentBranch
|
||||
if not cb:
|
||||
if one_project:
|
||||
logger.error(
|
||||
"error: project %s has a detached HEAD",
|
||||
_RelPath(project),
|
||||
)
|
||||
return 1
|
||||
# Ignore branches with detached HEADs.
|
||||
continue
|
||||
|
||||
args.append(upbranch.LocalMerge)
|
||||
upbranch = project.GetBranch(cb)
|
||||
if not upbranch.LocalMerge:
|
||||
if one_project:
|
||||
logger.error(
|
||||
"error: project %s does not track any remote branches",
|
||||
_RelPath(project),
|
||||
)
|
||||
return 1
|
||||
# Ignore branches without remotes.
|
||||
continue
|
||||
|
||||
out.project('project %s: rebasing %s -> %s',
|
||||
project.relpath, cb, upbranch.LocalMerge)
|
||||
out.nl()
|
||||
out.flush()
|
||||
args = common_args[:]
|
||||
if opt.onto_manifest:
|
||||
args.append("--onto")
|
||||
args.append(project.revisionExpr)
|
||||
|
||||
needs_stash = False
|
||||
if opt.auto_stash:
|
||||
stash_args = ["update-index", "--refresh", "-q"]
|
||||
args.append(upbranch.LocalMerge)
|
||||
|
||||
if GitCommand(project, stash_args).Wait() != 0:
|
||||
needs_stash = True
|
||||
# Dirty index, requires stash...
|
||||
stash_args = ["stash"]
|
||||
out.project(
|
||||
"project %s: rebasing %s -> %s",
|
||||
_RelPath(project),
|
||||
cb,
|
||||
upbranch.LocalMerge,
|
||||
)
|
||||
out.nl()
|
||||
out.flush()
|
||||
|
||||
if GitCommand(project, stash_args).Wait() != 0:
|
||||
ret += 1
|
||||
continue
|
||||
needs_stash = False
|
||||
if opt.auto_stash:
|
||||
stash_args = ["update-index", "--refresh", "-q"]
|
||||
|
||||
if GitCommand(project, args).Wait() != 0:
|
||||
ret += 1
|
||||
continue
|
||||
if GitCommand(project, stash_args).Wait() != 0:
|
||||
needs_stash = True
|
||||
# Dirty index, requires stash...
|
||||
stash_args = ["stash"]
|
||||
|
||||
if needs_stash:
|
||||
stash_args.append('pop')
|
||||
stash_args.append('--quiet')
|
||||
if GitCommand(project, stash_args).Wait() != 0:
|
||||
ret += 1
|
||||
if GitCommand(project, stash_args).Wait() != 0:
|
||||
ret += 1
|
||||
continue
|
||||
|
||||
if ret:
|
||||
out.fail('%i projects had errors', ret)
|
||||
out.nl()
|
||||
if GitCommand(project, args).Wait() != 0:
|
||||
ret += 1
|
||||
continue
|
||||
|
||||
return ret
|
||||
if needs_stash:
|
||||
stash_args.append("pop")
|
||||
stash_args.append("--quiet")
|
||||
if GitCommand(project, stash_args).Wait() != 0:
|
||||
ret += 1
|
||||
|
||||
if ret:
|
||||
msg_fmt = "%d projects had errors"
|
||||
self.git_event_log.ErrorEvent(msg_fmt % (ret), msg_fmt)
|
||||
out.fail(msg_fmt, ret)
|
||||
out.nl()
|
||||
|
||||
return ret
|
||||
|
||||
@@ -12,21 +12,30 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from optparse import SUPPRESS_HELP
|
||||
import sys
|
||||
import optparse
|
||||
|
||||
from command import Command, MirrorSafeCommand
|
||||
from subcmds.sync import _PostRepoUpgrade
|
||||
from command import Command
|
||||
from command import MirrorSafeCommand
|
||||
from error import RepoExitError
|
||||
from repo_logging import RepoLogger
|
||||
from subcmds.sync import _PostRepoFetch
|
||||
from subcmds.sync import _PostRepoUpgrade
|
||||
|
||||
|
||||
logger = RepoLogger(__file__)
|
||||
|
||||
|
||||
class SelfupdateError(RepoExitError):
|
||||
"""Exit error for failed selfupdate command."""
|
||||
|
||||
|
||||
class Selfupdate(Command, MirrorSafeCommand):
|
||||
COMMON = False
|
||||
helpSummary = "Update repo to the latest version"
|
||||
helpUsage = """
|
||||
COMMON = False
|
||||
helpSummary = "Update repo to the latest version"
|
||||
helpUsage = """
|
||||
%prog
|
||||
"""
|
||||
helpDescription = """
|
||||
helpDescription = """
|
||||
The '%prog' command upgrades repo to the latest version, if a
|
||||
newer version is available.
|
||||
|
||||
@@ -34,28 +43,34 @@ Normally this is done automatically by 'repo sync' and does not
|
||||
need to be performed by an end-user.
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
g = p.add_option_group('repo Version options')
|
||||
g.add_option('--no-repo-verify',
|
||||
dest='repo_verify', default=True, action='store_false',
|
||||
help='do not verify repo source code')
|
||||
g.add_option('--repo-upgraded',
|
||||
dest='repo_upgraded', action='store_true',
|
||||
help=SUPPRESS_HELP)
|
||||
def _Options(self, p):
|
||||
g = p.add_option_group("repo Version options")
|
||||
g.add_option(
|
||||
"--no-repo-verify",
|
||||
dest="repo_verify",
|
||||
default=True,
|
||||
action="store_false",
|
||||
help="do not verify repo source code",
|
||||
)
|
||||
g.add_option(
|
||||
"--repo-upgraded",
|
||||
dest="repo_upgraded",
|
||||
action="store_true",
|
||||
help=optparse.SUPPRESS_HELP,
|
||||
)
|
||||
|
||||
def Execute(self, opt, args):
|
||||
rp = self.manifest.repoProject
|
||||
rp.PreSync()
|
||||
def Execute(self, opt, args):
|
||||
rp = self.manifest.repoProject
|
||||
rp.PreSync()
|
||||
|
||||
if opt.repo_upgraded:
|
||||
_PostRepoUpgrade(self.manifest)
|
||||
if opt.repo_upgraded:
|
||||
_PostRepoUpgrade(self.manifest)
|
||||
|
||||
else:
|
||||
if not rp.Sync_NetworkHalf():
|
||||
print("error: can't update repo", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
else:
|
||||
result = rp.Sync_NetworkHalf()
|
||||
if result.error:
|
||||
logger.error("error: can't update repo")
|
||||
raise SelfupdateError(aggregate_errors=[result.error])
|
||||
|
||||
rp.bare_git.gc('--auto')
|
||||
_PostRepoFetch(rp,
|
||||
repo_verify=opt.repo_verify,
|
||||
verbose=True)
|
||||
rp.bare_git.gc("--auto")
|
||||
_PostRepoFetch(rp, repo_verify=opt.repo_verify, verbose=True)
|
||||
|
||||
@@ -16,18 +16,18 @@ from subcmds.sync import Sync
|
||||
|
||||
|
||||
class Smartsync(Sync):
|
||||
COMMON = True
|
||||
helpSummary = "Update working tree to the latest known good revision"
|
||||
helpUsage = """
|
||||
COMMON = True
|
||||
helpSummary = "Update working tree to the latest known good revision"
|
||||
helpUsage = """
|
||||
%prog [<project>...]
|
||||
"""
|
||||
helpDescription = """
|
||||
helpDescription = """
|
||||
The '%prog' command is a shortcut for sync -s.
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
Sync._Options(self, p, show_smart=False)
|
||||
def _Options(self, p):
|
||||
Sync._Options(self, p, show_smart=False)
|
||||
|
||||
def Execute(self, opt, args):
|
||||
opt.smart_sync = True
|
||||
Sync.Execute(self, opt, args)
|
||||
def Execute(self, opt, args):
|
||||
opt.smart_sync = True
|
||||
Sync.Execute(self, opt, args)
|
||||
|
||||
159
subcmds/stage.py
159
subcmds/stage.py
@@ -17,95 +17,118 @@ import sys
|
||||
from color import Coloring
|
||||
from command import InteractiveCommand
|
||||
from git_command import GitCommand
|
||||
from repo_logging import RepoLogger
|
||||
|
||||
|
||||
logger = RepoLogger(__file__)
|
||||
|
||||
|
||||
class _ProjectList(Coloring):
|
||||
def __init__(self, gc):
|
||||
Coloring.__init__(self, gc, 'interactive')
|
||||
self.prompt = self.printer('prompt', fg='blue', attr='bold')
|
||||
self.header = self.printer('header', attr='bold')
|
||||
self.help = self.printer('help', fg='red', attr='bold')
|
||||
def __init__(self, gc):
|
||||
Coloring.__init__(self, gc, "interactive")
|
||||
self.prompt = self.printer("prompt", fg="blue", attr="bold")
|
||||
self.header = self.printer("header", attr="bold")
|
||||
self.help = self.printer("help", fg="red", attr="bold")
|
||||
|
||||
|
||||
class Stage(InteractiveCommand):
|
||||
COMMON = True
|
||||
helpSummary = "Stage file(s) for commit"
|
||||
helpUsage = """
|
||||
COMMON = True
|
||||
helpSummary = "Stage file(s) for commit"
|
||||
helpUsage = """
|
||||
%prog -i [<project>...]
|
||||
"""
|
||||
helpDescription = """
|
||||
helpDescription = """
|
||||
The '%prog' command stages files to prepare the next commit.
|
||||
"""
|
||||
|
||||
def _Options(self, p):
|
||||
g = p.get_option_group('--quiet')
|
||||
g.add_option('-i', '--interactive',
|
||||
dest='interactive', action='store_true',
|
||||
help='use interactive staging')
|
||||
def _Options(self, p):
|
||||
g = p.get_option_group("--quiet")
|
||||
g.add_option(
|
||||
"-i",
|
||||
"--interactive",
|
||||
dest="interactive",
|
||||
action="store_true",
|
||||
help="use interactive staging",
|
||||
)
|
||||
|
||||
def Execute(self, opt, args):
|
||||
if opt.interactive:
|
||||
self._Interactive(opt, args)
|
||||
else:
|
||||
self.Usage()
|
||||
def Execute(self, opt, args):
|
||||
if opt.interactive:
|
||||
self._Interactive(opt, args)
|
||||
else:
|
||||
self.Usage()
|
||||
|
||||
def _Interactive(self, opt, args):
|
||||
all_projects = [p for p in self.GetProjects(args) if p.IsDirty()]
|
||||
if not all_projects:
|
||||
print('no projects have uncommitted modifications', file=sys.stderr)
|
||||
return
|
||||
def _Interactive(self, opt, args):
|
||||
all_projects = [
|
||||
p
|
||||
for p in self.GetProjects(
|
||||
args, all_manifests=not opt.this_manifest_only
|
||||
)
|
||||
if p.IsDirty()
|
||||
]
|
||||
if not all_projects:
|
||||
logger.error("no projects have uncommitted modifications")
|
||||
return
|
||||
|
||||
out = _ProjectList(self.manifest.manifestProject.config)
|
||||
while True:
|
||||
out.header(' %s', 'project')
|
||||
out.nl()
|
||||
out = _ProjectList(self.manifest.manifestProject.config)
|
||||
while True:
|
||||
out.header(" %s", "project")
|
||||
out.nl()
|
||||
|
||||
for i in range(len(all_projects)):
|
||||
project = all_projects[i]
|
||||
out.write('%3d: %s', i + 1, project.relpath + '/')
|
||||
out.nl()
|
||||
out.nl()
|
||||
for i in range(len(all_projects)):
|
||||
project = all_projects[i]
|
||||
out.write(
|
||||
"%3d: %s",
|
||||
i + 1,
|
||||
project.RelPath(local=opt.this_manifest_only) + "/",
|
||||
)
|
||||
out.nl()
|
||||
out.nl()
|
||||
|
||||
out.write('%3d: (', 0)
|
||||
out.prompt('q')
|
||||
out.write('uit)')
|
||||
out.nl()
|
||||
out.write("%3d: (", 0)
|
||||
out.prompt("q")
|
||||
out.write("uit)")
|
||||
out.nl()
|
||||
|
||||
out.prompt('project> ')
|
||||
try:
|
||||
a = sys.stdin.readline()
|
||||
except KeyboardInterrupt:
|
||||
out.nl()
|
||||
break
|
||||
if a == '':
|
||||
out.nl()
|
||||
break
|
||||
out.prompt("project> ")
|
||||
out.flush()
|
||||
try:
|
||||
a = sys.stdin.readline()
|
||||
except KeyboardInterrupt:
|
||||
out.nl()
|
||||
break
|
||||
if a == "":
|
||||
out.nl()
|
||||
break
|
||||
|
||||
a = a.strip()
|
||||
if a.lower() in ('q', 'quit', 'exit'):
|
||||
break
|
||||
if not a:
|
||||
continue
|
||||
a = a.strip()
|
||||
if a.lower() in ("q", "quit", "exit"):
|
||||
break
|
||||
if not a:
|
||||
continue
|
||||
|
||||
try:
|
||||
a_index = int(a)
|
||||
except ValueError:
|
||||
a_index = None
|
||||
try:
|
||||
a_index = int(a)
|
||||
except ValueError:
|
||||
a_index = None
|
||||
|
||||
if a_index is not None:
|
||||
if a_index == 0:
|
||||
break
|
||||
if 0 < a_index and a_index <= len(all_projects):
|
||||
_AddI(all_projects[a_index - 1])
|
||||
continue
|
||||
if a_index is not None:
|
||||
if a_index == 0:
|
||||
break
|
||||
if 0 < a_index and a_index <= len(all_projects):
|
||||
_AddI(all_projects[a_index - 1])
|
||||
continue
|
||||
|
||||
projects = [p for p in all_projects if a in [p.name, p.relpath]]
|
||||
if len(projects) == 1:
|
||||
_AddI(projects[0])
|
||||
continue
|
||||
print('Bye.')
|
||||
projects = [
|
||||
p
|
||||
for p in all_projects
|
||||
if a in [p.name, p.RelPath(local=opt.this_manifest_only)]
|
||||
]
|
||||
if len(projects) == 1:
|
||||
_AddI(projects[0])
|
||||
continue
|
||||
print("Bye.")
|
||||
|
||||
|
||||
def _AddI(project):
|
||||
p = GitCommand(project, ['add', '--interactive'], bare=False)
|
||||
p.Wait()
|
||||
p = GitCommand(project, ["add", "--interactive"], bare=False)
|
||||
p.Wait()
|
||||
|
||||
218
subcmds/start.py
218
subcmds/start.py
@@ -13,130 +13,136 @@
|
||||
# limitations under the License.
|
||||
|
||||
import functools
|
||||
import os
|
||||
import sys
|
||||
from typing import NamedTuple
|
||||
|
||||
from command import Command, DEFAULT_LOCAL_JOBS
|
||||
from git_config import IsImmutable
|
||||
from command import Command
|
||||
from command import DEFAULT_LOCAL_JOBS
|
||||
from error import RepoExitError
|
||||
from git_command import git
|
||||
import gitc_utils
|
||||
from git_config import IsImmutable
|
||||
from progress import Progress
|
||||
from project import SyncBuffer
|
||||
from project import Project
|
||||
from repo_logging import RepoLogger
|
||||
|
||||
|
||||
logger = RepoLogger(__file__)
|
||||
|
||||
|
||||
class ExecuteOneResult(NamedTuple):
|
||||
project: Project
|
||||
error: Exception
|
||||
|
||||
|
||||
class StartError(RepoExitError):
|
||||
"""Exit error for failed start command."""
|
||||
|
||||
|
||||
class Start(Command):
|
||||
COMMON = True
|
||||
helpSummary = "Start a new branch for development"
|
||||
helpUsage = """
|
||||
COMMON = True
|
||||
helpSummary = "Start a new branch for development"
|
||||
helpUsage = """
|
||||
%prog <newbranchname> [--all | <project>...]
|
||||
"""
|
||||
helpDescription = """
|
||||
helpDescription = """
|
||||
'%prog' begins a new branch of development, starting from the
|
||||
revision specified in the manifest.
|
||||
"""
|
||||
PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
|
||||
PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
|
||||
|
||||
def _Options(self, p):
|
||||
p.add_option('--all',
|
||||
dest='all', action='store_true',
|
||||
help='begin branch in all projects')
|
||||
p.add_option('-r', '--rev', '--revision', dest='revision',
|
||||
help='point branch at this revision instead of upstream')
|
||||
p.add_option('--head', '--HEAD',
|
||||
dest='revision', action='store_const', const='HEAD',
|
||||
help='abbreviation for --rev HEAD')
|
||||
def _Options(self, p):
|
||||
p.add_option(
|
||||
"--all",
|
||||
dest="all",
|
||||
action="store_true",
|
||||
help="begin branch in all projects",
|
||||
)
|
||||
p.add_option(
|
||||
"-r",
|
||||
"--rev",
|
||||
"--revision",
|
||||
dest="revision",
|
||||
help="point branch at this revision instead of upstream",
|
||||
)
|
||||
p.add_option(
|
||||
"--head",
|
||||
"--HEAD",
|
||||
dest="revision",
|
||||
action="store_const",
|
||||
const="HEAD",
|
||||
help="abbreviation for --rev HEAD",
|
||||
)
|
||||
|
||||
def ValidateOptions(self, opt, args):
|
||||
if not args:
|
||||
self.Usage()
|
||||
def ValidateOptions(self, opt, args):
|
||||
if not args:
|
||||
self.Usage()
|
||||
|
||||
nb = args[0]
|
||||
if not git.check_ref_format('heads/%s' % nb):
|
||||
self.OptionParser.error("'%s' is not a valid name" % nb)
|
||||
nb = args[0]
|
||||
if not git.check_ref_format("heads/%s" % nb):
|
||||
self.OptionParser.error("'%s' is not a valid name" % nb)
|
||||
|
||||
def _ExecuteOne(self, revision, nb, project):
|
||||
"""Start one project."""
|
||||
# If the current revision is immutable, such as a SHA1, a tag or
|
||||
# a change, then we can't push back to it. Substitute with
|
||||
# dest_branch, if defined; or with manifest default revision instead.
|
||||
branch_merge = ''
|
||||
if IsImmutable(project.revisionExpr):
|
||||
if project.dest_branch:
|
||||
branch_merge = project.dest_branch
|
||||
else:
|
||||
branch_merge = self.manifest.default.revisionExpr
|
||||
def _ExecuteOne(self, revision, nb, project):
|
||||
"""Start one project."""
|
||||
# If the current revision is immutable, such as a SHA1, a tag or
|
||||
# a change, then we can't push back to it. Substitute with
|
||||
# dest_branch, if defined; or with manifest default revision instead.
|
||||
branch_merge = ""
|
||||
error = None
|
||||
if IsImmutable(project.revisionExpr):
|
||||
if project.dest_branch:
|
||||
branch_merge = project.dest_branch
|
||||
else:
|
||||
branch_merge = self.manifest.default.revisionExpr
|
||||
|
||||
try:
|
||||
ret = project.StartBranch(
|
||||
nb, branch_merge=branch_merge, revision=revision)
|
||||
except Exception as e:
|
||||
print('error: unable to checkout %s: %s' % (project.name, e), file=sys.stderr)
|
||||
ret = False
|
||||
return (ret, project)
|
||||
try:
|
||||
project.StartBranch(
|
||||
nb, branch_merge=branch_merge, revision=revision
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error("error: unable to checkout %s: %s", project.name, e)
|
||||
error = e
|
||||
return ExecuteOneResult(project, error)
|
||||
|
||||
def Execute(self, opt, args):
|
||||
nb = args[0]
|
||||
err = []
|
||||
projects = []
|
||||
if not opt.all:
|
||||
projects = args[1:]
|
||||
if len(projects) < 1:
|
||||
projects = ['.'] # start it in the local project by default
|
||||
def Execute(self, opt, args):
|
||||
nb = args[0]
|
||||
err_projects = []
|
||||
err = []
|
||||
projects = []
|
||||
if not opt.all:
|
||||
projects = args[1:]
|
||||
if len(projects) < 1:
|
||||
projects = ["."] # start it in the local project by default
|
||||
|
||||
all_projects = self.GetProjects(projects,
|
||||
missing_ok=bool(self.gitc_manifest))
|
||||
all_projects = self.GetProjects(
|
||||
projects,
|
||||
all_manifests=not opt.this_manifest_only,
|
||||
)
|
||||
|
||||
# This must happen after we find all_projects, since GetProjects may need
|
||||
# the local directory, which will disappear once we save the GITC manifest.
|
||||
if self.gitc_manifest:
|
||||
gitc_projects = self.GetProjects(projects, manifest=self.gitc_manifest,
|
||||
missing_ok=True)
|
||||
for project in gitc_projects:
|
||||
if project.old_revision:
|
||||
project.already_synced = True
|
||||
else:
|
||||
project.already_synced = False
|
||||
project.old_revision = project.revisionExpr
|
||||
project.revisionExpr = None
|
||||
# Save the GITC manifest.
|
||||
gitc_utils.save_manifest(self.gitc_manifest)
|
||||
def _ProcessResults(_pool, pm, results):
|
||||
for result in results:
|
||||
if result.error:
|
||||
err_projects.append(result.project)
|
||||
err.append(result.error)
|
||||
pm.update(msg="")
|
||||
|
||||
# Make sure we have a valid CWD
|
||||
if not os.path.exists(os.getcwd()):
|
||||
os.chdir(self.manifest.topdir)
|
||||
self.ExecuteInParallel(
|
||||
opt.jobs,
|
||||
functools.partial(self._ExecuteOne, opt.revision, nb),
|
||||
all_projects,
|
||||
callback=_ProcessResults,
|
||||
output=Progress(
|
||||
f"Starting {nb}", len(all_projects), quiet=opt.quiet
|
||||
),
|
||||
)
|
||||
|
||||
pm = Progress('Syncing %s' % nb, len(all_projects), quiet=opt.quiet)
|
||||
for project in all_projects:
|
||||
gitc_project = self.gitc_manifest.paths[project.relpath]
|
||||
# Sync projects that have not been opened.
|
||||
if not gitc_project.already_synced:
|
||||
proj_localdir = os.path.join(self.gitc_manifest.gitc_client_dir,
|
||||
project.relpath)
|
||||
project.worktree = proj_localdir
|
||||
if not os.path.exists(proj_localdir):
|
||||
os.makedirs(proj_localdir)
|
||||
project.Sync_NetworkHalf()
|
||||
sync_buf = SyncBuffer(self.manifest.manifestProject.config)
|
||||
project.Sync_LocalHalf(sync_buf)
|
||||
project.revisionId = gitc_project.old_revision
|
||||
pm.update()
|
||||
pm.end()
|
||||
|
||||
def _ProcessResults(_pool, pm, results):
|
||||
for (result, project) in results:
|
||||
if not result:
|
||||
err.append(project)
|
||||
pm.update()
|
||||
|
||||
self.ExecuteInParallel(
|
||||
opt.jobs,
|
||||
functools.partial(self._ExecuteOne, opt.revision, nb),
|
||||
all_projects,
|
||||
callback=_ProcessResults,
|
||||
output=Progress('Starting %s' % (nb,), len(all_projects), quiet=opt.quiet))
|
||||
|
||||
if err:
|
||||
for p in err:
|
||||
print("error: %s/: cannot start %s" % (p.relpath, nb),
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
if err_projects:
|
||||
for p in err_projects:
|
||||
logger.error(
|
||||
"error: %s/: cannot start %s",
|
||||
p.RelPath(local=opt.this_manifest_only),
|
||||
nb,
|
||||
)
|
||||
msg_fmt = "cannot start %d project(s)"
|
||||
self.git_event_log.ErrorEvent(
|
||||
msg_fmt % (len(err_projects)), msg_fmt
|
||||
)
|
||||
raise StartError(aggregate_errors=err)
|
||||
|
||||
@@ -17,19 +17,19 @@ import glob
|
||||
import io
|
||||
import os
|
||||
|
||||
from command import DEFAULT_LOCAL_JOBS, PagedCommand
|
||||
|
||||
from color import Coloring
|
||||
from command import DEFAULT_LOCAL_JOBS
|
||||
from command import PagedCommand
|
||||
import platform_utils
|
||||
|
||||
|
||||
class Status(PagedCommand):
|
||||
COMMON = True
|
||||
helpSummary = "Show the working tree status"
|
||||
helpUsage = """
|
||||
COMMON = True
|
||||
helpSummary = "Show the working tree status"
|
||||
helpUsage = """
|
||||
%prog [<project>...]
|
||||
"""
|
||||
helpDescription = """
|
||||
helpDescription = """
|
||||
'%prog' compares the working tree to the staging area (aka index),
|
||||
and the most recent commit on this branch (HEAD), in each project
|
||||
specified. A summary is displayed, one line per file where there
|
||||
@@ -76,104 +76,128 @@ the following meanings:
|
||||
d: deleted ( in index, not in work tree )
|
||||
|
||||
"""
|
||||
PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
|
||||
PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
|
||||
|
||||
def _Options(self, p):
|
||||
p.add_option('-o', '--orphans',
|
||||
dest='orphans', action='store_true',
|
||||
help="include objects in working directory outside of repo projects")
|
||||
def _Options(self, p):
|
||||
p.add_option(
|
||||
"-o",
|
||||
"--orphans",
|
||||
dest="orphans",
|
||||
action="store_true",
|
||||
help="include objects in working directory outside of repo "
|
||||
"projects",
|
||||
)
|
||||
|
||||
def _StatusHelper(self, quiet, project):
|
||||
"""Obtains the status for a specific project.
|
||||
def _StatusHelper(self, quiet, local, project):
|
||||
"""Obtains the status for a specific project.
|
||||
|
||||
Obtains the status for a project, redirecting the output to
|
||||
the specified object.
|
||||
Obtains the status for a project, redirecting the output to
|
||||
the specified object.
|
||||
|
||||
Args:
|
||||
quiet: Where to output the status.
|
||||
project: Project to get status of.
|
||||
Args:
|
||||
quiet: Where to output the status.
|
||||
local: a boolean, if True, the path is relative to the local
|
||||
(sub)manifest. If false, the path is relative to the outermost
|
||||
manifest.
|
||||
project: Project to get status of.
|
||||
|
||||
Returns:
|
||||
The status of the project.
|
||||
"""
|
||||
buf = io.StringIO()
|
||||
ret = project.PrintWorkTreeStatus(quiet=quiet, output_redir=buf)
|
||||
return (ret, buf.getvalue())
|
||||
Returns:
|
||||
The status of the project.
|
||||
"""
|
||||
buf = io.StringIO()
|
||||
ret = project.PrintWorkTreeStatus(
|
||||
quiet=quiet, output_redir=buf, local=local
|
||||
)
|
||||
return (ret, buf.getvalue())
|
||||
|
||||
def _FindOrphans(self, dirs, proj_dirs, proj_dirs_parents, outstring):
|
||||
"""find 'dirs' that are present in 'proj_dirs_parents' but not in 'proj_dirs'"""
|
||||
status_header = ' --\t'
|
||||
for item in dirs:
|
||||
if not platform_utils.isdir(item):
|
||||
outstring.append(''.join([status_header, item]))
|
||||
continue
|
||||
if item in proj_dirs:
|
||||
continue
|
||||
if item in proj_dirs_parents:
|
||||
self._FindOrphans(glob.glob('%s/.*' % item) +
|
||||
glob.glob('%s/*' % item),
|
||||
proj_dirs, proj_dirs_parents, outstring)
|
||||
continue
|
||||
outstring.append(''.join([status_header, item, '/']))
|
||||
def _FindOrphans(self, dirs, proj_dirs, proj_dirs_parents, outstring):
|
||||
"""find 'dirs' that are present in 'proj_dirs_parents' but not in 'proj_dirs'""" # noqa: E501
|
||||
status_header = " --\t"
|
||||
for item in dirs:
|
||||
if not platform_utils.isdir(item):
|
||||
outstring.append("".join([status_header, item]))
|
||||
continue
|
||||
if item in proj_dirs:
|
||||
continue
|
||||
if item in proj_dirs_parents:
|
||||
self._FindOrphans(
|
||||
glob.glob("%s/.*" % item) + glob.glob("%s/*" % item),
|
||||
proj_dirs,
|
||||
proj_dirs_parents,
|
||||
outstring,
|
||||
)
|
||||
continue
|
||||
outstring.append("".join([status_header, item, "/"]))
|
||||
|
||||
def Execute(self, opt, args):
|
||||
all_projects = self.GetProjects(args)
|
||||
def Execute(self, opt, args):
|
||||
all_projects = self.GetProjects(
|
||||
args, all_manifests=not opt.this_manifest_only
|
||||
)
|
||||
|
||||
def _ProcessResults(_pool, _output, results):
|
||||
ret = 0
|
||||
for (state, output) in results:
|
||||
if output:
|
||||
print(output, end='')
|
||||
if state == 'CLEAN':
|
||||
ret += 1
|
||||
return ret
|
||||
def _ProcessResults(_pool, _output, results):
|
||||
ret = 0
|
||||
for state, output in results:
|
||||
if output:
|
||||
print(output, end="")
|
||||
if state == "CLEAN":
|
||||
ret += 1
|
||||
return ret
|
||||
|
||||
counter = self.ExecuteInParallel(
|
||||
opt.jobs,
|
||||
functools.partial(self._StatusHelper, opt.quiet),
|
||||
all_projects,
|
||||
callback=_ProcessResults,
|
||||
ordered=True)
|
||||
counter = self.ExecuteInParallel(
|
||||
opt.jobs,
|
||||
functools.partial(
|
||||
self._StatusHelper, opt.quiet, opt.this_manifest_only
|
||||
),
|
||||
all_projects,
|
||||
callback=_ProcessResults,
|
||||
ordered=True,
|
||||
)
|
||||
|
||||
if not opt.quiet and len(all_projects) == counter:
|
||||
print('nothing to commit (working directory clean)')
|
||||
if not opt.quiet and len(all_projects) == counter:
|
||||
print("nothing to commit (working directory clean)")
|
||||
|
||||
if opt.orphans:
|
||||
proj_dirs = set()
|
||||
proj_dirs_parents = set()
|
||||
for project in self.GetProjects(None, missing_ok=True):
|
||||
proj_dirs.add(project.relpath)
|
||||
(head, _tail) = os.path.split(project.relpath)
|
||||
while head != "":
|
||||
proj_dirs_parents.add(head)
|
||||
(head, _tail) = os.path.split(head)
|
||||
proj_dirs.add('.repo')
|
||||
if opt.orphans:
|
||||
proj_dirs = set()
|
||||
proj_dirs_parents = set()
|
||||
for project in self.GetProjects(
|
||||
None, missing_ok=True, all_manifests=not opt.this_manifest_only
|
||||
):
|
||||
relpath = project.RelPath(local=opt.this_manifest_only)
|
||||
proj_dirs.add(relpath)
|
||||
(head, _tail) = os.path.split(relpath)
|
||||
while head != "":
|
||||
proj_dirs_parents.add(head)
|
||||
(head, _tail) = os.path.split(head)
|
||||
proj_dirs.add(".repo")
|
||||
|
||||
class StatusColoring(Coloring):
|
||||
def __init__(self, config):
|
||||
Coloring.__init__(self, config, 'status')
|
||||
self.project = self.printer('header', attr='bold')
|
||||
self.untracked = self.printer('untracked', fg='red')
|
||||
class StatusColoring(Coloring):
|
||||
def __init__(self, config):
|
||||
Coloring.__init__(self, config, "status")
|
||||
self.project = self.printer("header", attr="bold")
|
||||
self.untracked = self.printer("untracked", fg="red")
|
||||
|
||||
orig_path = os.getcwd()
|
||||
try:
|
||||
os.chdir(self.manifest.topdir)
|
||||
orig_path = os.getcwd()
|
||||
try:
|
||||
os.chdir(self.manifest.topdir)
|
||||
|
||||
outstring = []
|
||||
self._FindOrphans(glob.glob('.*') +
|
||||
glob.glob('*'),
|
||||
proj_dirs, proj_dirs_parents, outstring)
|
||||
outstring = []
|
||||
self._FindOrphans(
|
||||
glob.glob(".*") + glob.glob("*"),
|
||||
proj_dirs,
|
||||
proj_dirs_parents,
|
||||
outstring,
|
||||
)
|
||||
|
||||
if outstring:
|
||||
output = StatusColoring(self.client.globalConfig)
|
||||
output.project('Objects not within a project (orphans)')
|
||||
output.nl()
|
||||
for entry in outstring:
|
||||
output.untracked(entry)
|
||||
output.nl()
|
||||
else:
|
||||
print('No orphan files or directories')
|
||||
if outstring:
|
||||
output = StatusColoring(self.client.globalConfig)
|
||||
output.project("Objects not within a project (orphans)")
|
||||
output.nl()
|
||||
for entry in outstring:
|
||||
output.untracked(entry)
|
||||
output.nl()
|
||||
else:
|
||||
print("No orphan files or directories")
|
||||
|
||||
finally:
|
||||
# Restore CWD.
|
||||
os.chdir(orig_path)
|
||||
finally:
|
||||
# Restore CWD.
|
||||
os.chdir(orig_path)
|
||||
|
||||
3006
subcmds/sync.py
3006
subcmds/sync.py
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user