mirror of
https://gerrit.googlesource.com/git-repo
synced 2026-02-14 17:50:19 +00:00
Compare commits
23 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a621254b26 | ||
|
|
f159ce0f9e | ||
|
|
802cd0c601 | ||
|
|
100a214315 | ||
|
|
8051cdb629 | ||
|
|
43549d8d08 | ||
|
|
55b7125d6a | ||
|
|
d793553804 | ||
|
|
ea5239ddd9 | ||
|
|
1b8714937c | ||
|
|
50a2c0e368 | ||
|
|
35af2f8daf | ||
|
|
e287fa760b | ||
|
|
3593a10643 | ||
|
|
003684b6e5 | ||
|
|
0297f8312c | ||
|
|
7b3afcab7a | ||
|
|
eda6b1ead7 | ||
|
|
4364a79088 | ||
|
|
a98a5ebc6d | ||
|
|
f8d342beac | ||
|
|
6d2e8c8237 | ||
|
|
a24185ee6c |
8
.github/workflows/test-ci.yml
vendored
8
.github/workflows/test-ci.yml
vendored
@@ -14,18 +14,18 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||
python-version: [3.6, 3.7, 3.8, 3.9]
|
||||
python-version: ['3.6', '3.7', '3.8', '3.9', '3.10']
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v1
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install tox tox-gh-actions
|
||||
python -m pip install tox tox-gh-actions
|
||||
- name: Test with tox
|
||||
run: tox
|
||||
|
||||
@@ -143,23 +143,14 @@ internal processes for accessing the restricted keys.
|
||||
***
|
||||
|
||||
```sh
|
||||
# Set the gpg key directory.
|
||||
$ export GNUPGHOME=~/.gnupg/repo/
|
||||
|
||||
# Verify the listed key is “Repo Maintainer”.
|
||||
$ gpg -K
|
||||
|
||||
# Pick whatever branch or commit you want to tag.
|
||||
$ r=main
|
||||
|
||||
# Pick the new version.
|
||||
$ t=1.12.10
|
||||
$ t=v2.30
|
||||
|
||||
# Create the signed tag.
|
||||
$ git tag -s v$t -u "Repo Maintainer <repo@android.kernel.org>" -m "repo $t" $r
|
||||
# Create a new signed tag with the current HEAD.
|
||||
$ ./release/sign-tag.py $t
|
||||
|
||||
# Verify the signed tag.
|
||||
$ git show v$t
|
||||
$ git show $t
|
||||
```
|
||||
|
||||
### Push the new release
|
||||
@@ -168,11 +159,11 @@ Once you're ready to make the release available to everyone, push it to the
|
||||
`stable` branch.
|
||||
|
||||
Make sure you never push the tag itself to the stable branch!
|
||||
Only push the commit -- notice the use of `$t` and `$r` below.
|
||||
Only push the commit -- note the use of `^0` below.
|
||||
|
||||
```sh
|
||||
$ git push https://gerrit-review.googlesource.com/git-repo v$t
|
||||
$ git push https://gerrit-review.googlesource.com/git-repo $r:stable
|
||||
$ git push https://gerrit-review.googlesource.com/git-repo $t
|
||||
$ git push https://gerrit-review.googlesource.com/git-repo $t^0:stable
|
||||
```
|
||||
|
||||
If something goes horribly wrong, you can force push the previous version to the
|
||||
@@ -195,7 +186,9 @@ You can create a short changelog using the command:
|
||||
```sh
|
||||
# If you haven't pushed to the stable branch yet, you can use origin/stable.
|
||||
# If you have pushed, change origin/stable to the previous release tag.
|
||||
$ git log --format="%h (%aN) %s" --no-merges origin/stable..$r
|
||||
# This assumes "main" is the current tagged release. If it's newer, change it
|
||||
# to the current release tag too.
|
||||
$ git log --format="%h (%aN) %s" --no-merges origin/stable..main
|
||||
```
|
||||
|
||||
## Project References
|
||||
|
||||
@@ -22,6 +22,7 @@ import re
|
||||
import ssl
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import Union
|
||||
import urllib.error
|
||||
import urllib.request
|
||||
|
||||
@@ -117,7 +118,7 @@ class GitConfig(object):
|
||||
return self.defaults.Has(name, include_defaults=True)
|
||||
return False
|
||||
|
||||
def GetInt(self, name):
|
||||
def GetInt(self, name: str) -> Union[int, None]:
|
||||
"""Returns an integer from the configuration file.
|
||||
|
||||
This follows the git config syntax.
|
||||
@@ -126,7 +127,7 @@ class GitConfig(object):
|
||||
name: The key to lookup.
|
||||
|
||||
Returns:
|
||||
None if the value was not defined, or is not a boolean.
|
||||
None if the value was not defined, or is not an int.
|
||||
Otherwise, the number itself.
|
||||
"""
|
||||
v = self.GetString(name)
|
||||
@@ -152,6 +153,9 @@ class GitConfig(object):
|
||||
try:
|
||||
return int(v, base=base) * mult
|
||||
except ValueError:
|
||||
print(
|
||||
f"warning: expected {name} to represent an integer, got {v} instead",
|
||||
file=sys.stderr)
|
||||
return None
|
||||
|
||||
def DumpConfigDict(self):
|
||||
@@ -169,7 +173,7 @@ class GitConfig(object):
|
||||
config_dict[key] = self.GetString(key)
|
||||
return config_dict
|
||||
|
||||
def GetBoolean(self, name):
|
||||
def GetBoolean(self, name: str) -> Union[str, None]:
|
||||
"""Returns a boolean from the configuration file.
|
||||
None : The value was not defined, or is not a boolean.
|
||||
True : The value was set to true or yes.
|
||||
@@ -183,6 +187,8 @@ class GitConfig(object):
|
||||
return True
|
||||
if v in ('false', 'no'):
|
||||
return False
|
||||
print(f"warning: expected {name} to represent a boolean, got {v} instead",
|
||||
file=sys.stderr)
|
||||
return None
|
||||
|
||||
def SetBoolean(self, name, value):
|
||||
@@ -191,7 +197,7 @@ class GitConfig(object):
|
||||
value = 'true' if value else 'false'
|
||||
self.SetString(name, value)
|
||||
|
||||
def GetString(self, name, all_keys=False):
|
||||
def GetString(self, name: str, all_keys: bool = False) -> Union[str, None]:
|
||||
"""Get the first value for a key, or None if it is not defined.
|
||||
|
||||
This configuration file is used first, if the key is not
|
||||
|
||||
@@ -110,7 +110,7 @@ class EventLog(object):
|
||||
return {
|
||||
'event': event_name,
|
||||
'sid': self._full_sid,
|
||||
'thread': threading.currentThread().getName(),
|
||||
'thread': threading.current_thread().name,
|
||||
'time': datetime.datetime.utcnow().isoformat() + 'Z',
|
||||
}
|
||||
|
||||
|
||||
@@ -3505,7 +3505,7 @@ class ManifestProject(MetaProject):
|
||||
@property
|
||||
def partial_clone_exclude(self):
|
||||
"""Partial clone exclude string"""
|
||||
return self.config.GetBoolean('repo.partialcloneexclude')
|
||||
return self.config.GetString('repo.partialcloneexclude')
|
||||
|
||||
@property
|
||||
def manifest_platform(self):
|
||||
|
||||
2
repo
2
repo
@@ -149,7 +149,7 @@ if not REPO_REV:
|
||||
BUG_URL = 'https://bugs.chromium.org/p/gerrit/issues/entry?template=Repo+tool+issue'
|
||||
|
||||
# increment this whenever we make important changes to this script
|
||||
VERSION = (2, 29)
|
||||
VERSION = (2, 30)
|
||||
|
||||
# increment this if the MAINTAINER_KEYS block is modified
|
||||
KEYRING_VERSION = (2, 3)
|
||||
|
||||
158
subcmds/sync.py
158
subcmds/sync.py
@@ -21,7 +21,6 @@ import multiprocessing
|
||||
import netrc
|
||||
from optparse import SUPPRESS_HELP
|
||||
import os
|
||||
import shutil
|
||||
import socket
|
||||
import sys
|
||||
import tempfile
|
||||
@@ -66,11 +65,9 @@ from wrapper import Wrapper
|
||||
from manifest_xml import GitcManifest
|
||||
|
||||
_ONE_DAY_S = 24 * 60 * 60
|
||||
# Env var to implicitly turn off object backups.
|
||||
REPO_BACKUP_OBJECTS = 'REPO_BACKUP_OBJECTS'
|
||||
_BACKUP_OBJECTS = os.environ.get(REPO_BACKUP_OBJECTS) != '0'
|
||||
|
||||
# Env var to implicitly turn auto-gc back on.
|
||||
# Env var to implicitly turn auto-gc back on. This was added to allow a user to
|
||||
# revert a change in default behavior in v2.29.9. Remove after 2023-04-01.
|
||||
_REPO_AUTO_GC = 'REPO_AUTO_GC'
|
||||
_AUTO_GC = os.environ.get(_REPO_AUTO_GC) == '1'
|
||||
|
||||
@@ -473,6 +470,7 @@ later is required to fix a server side protocol bug.
|
||||
"""
|
||||
start = time.time()
|
||||
success = False
|
||||
remote_fetched = False
|
||||
buf = io.StringIO()
|
||||
try:
|
||||
sync_result = project.Sync_NetworkHalf(
|
||||
@@ -490,6 +488,7 @@ later is required to fix a server side protocol bug.
|
||||
clone_filter=project.manifest.CloneFilter,
|
||||
partial_clone_exclude=project.manifest.PartialCloneExclude)
|
||||
success = sync_result.success
|
||||
remote_fetched = sync_result.remote_fetched
|
||||
|
||||
output = buf.getvalue()
|
||||
if (opt.verbose or not success) and output:
|
||||
@@ -507,8 +506,7 @@ later is required to fix a server side protocol bug.
|
||||
raise
|
||||
|
||||
finish = time.time()
|
||||
return _FetchOneResult(success, project, start, finish,
|
||||
sync_result.remote_fetched)
|
||||
return _FetchOneResult(success, project, start, finish, remote_fetched)
|
||||
|
||||
@classmethod
|
||||
def _FetchInitChild(cls, ssh_proxy):
|
||||
@@ -738,33 +736,6 @@ later is required to fix a server side protocol bug.
|
||||
callback=_ProcessResults,
|
||||
output=Progress('Checking out', len(all_projects), quiet=opt.quiet)) and not err_results
|
||||
|
||||
def _backup_cruft(self, bare_git):
|
||||
"""Save a copy of any cruft from `git gc`."""
|
||||
# Find any cruft packs in the current gitdir, and save them.
|
||||
# b/221065125 (repo sync complains that objects are missing). This does
|
||||
# not prevent that state, but makes it so that the missing objects are
|
||||
# available.
|
||||
objdir = bare_git._project.objdir
|
||||
pack_dir = os.path.join(objdir, 'pack')
|
||||
bak_dir = os.path.join(objdir, '.repo', 'pack.bak')
|
||||
if not _BACKUP_OBJECTS or not platform_utils.isdir(pack_dir):
|
||||
return
|
||||
files = set(platform_utils.listdir(pack_dir))
|
||||
to_backup = []
|
||||
for f in files:
|
||||
base, ext = os.path.splitext(f)
|
||||
if base + '.mtimes' in files:
|
||||
to_backup.append(f)
|
||||
if to_backup:
|
||||
os.makedirs(bak_dir, exist_ok=True)
|
||||
for fname in to_backup:
|
||||
bak_fname = os.path.join(bak_dir, fname)
|
||||
if not os.path.exists(bak_fname):
|
||||
with Trace('%s saved %s', bare_git._project.name, fname):
|
||||
# Use a tmp file so that we are sure of a complete copy.
|
||||
shutil.copy(os.path.join(pack_dir, fname), bak_fname + '.tmp')
|
||||
shutil.move(bak_fname + '.tmp', bak_fname)
|
||||
|
||||
@staticmethod
|
||||
def _GetPreciousObjectsState(project: Project, opt):
|
||||
"""Get the preciousObjects state for the project.
|
||||
@@ -804,19 +775,18 @@ later is required to fix a server side protocol bug.
|
||||
print(f'\r{relpath}: project not found in manifest.', file=sys.stderr)
|
||||
return False
|
||||
|
||||
def _RepairPreciousObjectsState(self, project: Project, opt):
|
||||
def _SetPreciousObjectsState(self, project: Project, opt):
|
||||
"""Correct the preciousObjects state for the project.
|
||||
|
||||
Args:
|
||||
project (Project): the project to examine, and possibly correct.
|
||||
opt (optparse.Values): options given to sync.
|
||||
project: the project to examine, and possibly correct.
|
||||
opt: options given to sync.
|
||||
"""
|
||||
expected = self._GetPreciousObjectsState(project, opt)
|
||||
actual = project.config.GetBoolean('extensions.preciousObjects') or False
|
||||
relpath = project.RelPath(local = opt.this_manifest_only)
|
||||
relpath = project.RelPath(local=opt.this_manifest_only)
|
||||
|
||||
if (expected != actual and
|
||||
not project.config.GetBoolean('repo.preservePreciousObjects')):
|
||||
if expected != actual:
|
||||
# If this is unexpected, log it and repair.
|
||||
Trace(f'{relpath} expected preciousObjects={expected}, got {actual}')
|
||||
if expected:
|
||||
@@ -845,13 +815,19 @@ later is required to fix a server side protocol bug.
|
||||
to potentially mark objects precious, so that `git gc` does not discard
|
||||
shared objects.
|
||||
"""
|
||||
pm = Progress(f'{"" if opt.auto_gc else "NOT "}Garbage collecting',
|
||||
len(projects), delay=False, quiet=opt.quiet)
|
||||
if not opt.auto_gc:
|
||||
# Just repair preciousObjects state, and return.
|
||||
for project in projects:
|
||||
self._SetPreciousObjectsState(project, opt)
|
||||
return
|
||||
|
||||
pm = Progress('Garbage collecting', len(projects), delay=False,
|
||||
quiet=opt.quiet)
|
||||
pm.update(inc=0, msg='prescan')
|
||||
|
||||
tidy_dirs = {}
|
||||
for project in projects:
|
||||
self._RepairPreciousObjectsState(project, opt)
|
||||
self._SetPreciousObjectsState(project, opt)
|
||||
|
||||
project.config.SetString('gc.autoDetach', 'false')
|
||||
# Only call git gc once per objdir, but call pack-refs for the remainder.
|
||||
@@ -866,28 +842,16 @@ later is required to fix a server side protocol bug.
|
||||
project.bare_git,
|
||||
)
|
||||
|
||||
if not opt.auto_gc:
|
||||
pm.end()
|
||||
return
|
||||
|
||||
jobs = opt.jobs
|
||||
|
||||
gc_args = ['--auto']
|
||||
backup_cruft = False
|
||||
if git_require((2, 37, 0)):
|
||||
gc_args.append('--cruft')
|
||||
backup_cruft = True
|
||||
pack_refs_args = ()
|
||||
if jobs < 2:
|
||||
for (run_gc, bare_git) in tidy_dirs.values():
|
||||
pm.update(msg=bare_git._project.name)
|
||||
|
||||
if run_gc:
|
||||
bare_git.gc(*gc_args)
|
||||
bare_git.gc('--auto')
|
||||
else:
|
||||
bare_git.pack_refs(*pack_refs_args)
|
||||
if backup_cruft:
|
||||
self._backup_cruft(bare_git)
|
||||
bare_git.pack_refs()
|
||||
pm.end()
|
||||
return
|
||||
|
||||
@@ -902,17 +866,15 @@ later is required to fix a server side protocol bug.
|
||||
try:
|
||||
try:
|
||||
if run_gc:
|
||||
bare_git.gc(*gc_args, config=config)
|
||||
bare_git.gc('--auto', config=config)
|
||||
else:
|
||||
bare_git.pack_refs(*pack_refs_args, config=config)
|
||||
bare_git.pack_refs(config=config)
|
||||
except GitError:
|
||||
err_event.set()
|
||||
except Exception:
|
||||
err_event.set()
|
||||
raise
|
||||
finally:
|
||||
if backup_cruft:
|
||||
self._backup_cruft(bare_git)
|
||||
pm.finish(bare_git._project.name)
|
||||
sem.release()
|
||||
|
||||
@@ -1224,9 +1186,49 @@ later is required to fix a server side protocol bug.
|
||||
|
||||
if opt.auto_gc is None and _AUTO_GC:
|
||||
print(f"Will run `git gc --auto` because {_REPO_AUTO_GC} is set.",
|
||||
file=sys.stderr)
|
||||
f'{_REPO_AUTO_GC} is deprecated and will be removed in a future',
|
||||
'release. Use `--auto-gc` instead.', file=sys.stderr)
|
||||
opt.auto_gc = True
|
||||
|
||||
def _ValidateOptionsWithManifest(self, opt, mp):
|
||||
"""Like ValidateOptions, but after we've updated the manifest.
|
||||
|
||||
Needed to handle sync-xxx option defaults in the manifest.
|
||||
|
||||
Args:
|
||||
opt: The options to process.
|
||||
mp: The manifest project to pull defaults from.
|
||||
"""
|
||||
if not opt.jobs:
|
||||
# If the user hasn't made a choice, use the manifest value.
|
||||
opt.jobs = mp.manifest.default.sync_j
|
||||
if opt.jobs:
|
||||
# If --jobs has a non-default value, propagate it as the default for
|
||||
# --jobs-xxx flags too.
|
||||
if not opt.jobs_network:
|
||||
opt.jobs_network = opt.jobs
|
||||
if not opt.jobs_checkout:
|
||||
opt.jobs_checkout = opt.jobs
|
||||
else:
|
||||
# Neither user nor manifest have made a choice, so setup defaults.
|
||||
if not opt.jobs_network:
|
||||
opt.jobs_network = 1
|
||||
if not opt.jobs_checkout:
|
||||
opt.jobs_checkout = DEFAULT_LOCAL_JOBS
|
||||
opt.jobs = os.cpu_count()
|
||||
|
||||
# Try to stay under user rlimit settings.
|
||||
#
|
||||
# Since each worker requires at 3 file descriptors to run `git fetch`, use
|
||||
# that to scale down the number of jobs. Unfortunately there isn't an easy
|
||||
# way to determine this reliably as systems change, but it was last measured
|
||||
# by hand in 2011.
|
||||
soft_limit, _ = _rlimit_nofile()
|
||||
jobs_soft_limit = max(1, (soft_limit - 5) // 3)
|
||||
opt.jobs = min(opt.jobs, jobs_soft_limit)
|
||||
opt.jobs_network = min(opt.jobs_network, jobs_soft_limit)
|
||||
opt.jobs_checkout = min(opt.jobs_checkout, jobs_soft_limit)
|
||||
|
||||
def Execute(self, opt, args):
|
||||
manifest = self.outer_manifest
|
||||
if not opt.outer_manifest:
|
||||
@@ -1277,35 +1279,9 @@ later is required to fix a server side protocol bug.
|
||||
else:
|
||||
print('Skipping update of local manifest project.')
|
||||
|
||||
# Now that the manifests are up-to-date, setup the jobs value.
|
||||
if opt.jobs is None:
|
||||
# User has not made a choice, so use the manifest settings.
|
||||
opt.jobs = mp.default.sync_j
|
||||
if opt.jobs is not None:
|
||||
# Neither user nor manifest have made a choice.
|
||||
if opt.jobs_network is None:
|
||||
opt.jobs_network = opt.jobs
|
||||
if opt.jobs_checkout is None:
|
||||
opt.jobs_checkout = opt.jobs
|
||||
# Setup defaults if jobs==0.
|
||||
if not opt.jobs:
|
||||
if not opt.jobs_network:
|
||||
opt.jobs_network = 1
|
||||
if not opt.jobs_checkout:
|
||||
opt.jobs_checkout = DEFAULT_LOCAL_JOBS
|
||||
opt.jobs = os.cpu_count()
|
||||
|
||||
# Try to stay under user rlimit settings.
|
||||
#
|
||||
# Since each worker requires at 3 file descriptors to run `git fetch`, use
|
||||
# that to scale down the number of jobs. Unfortunately there isn't an easy
|
||||
# way to determine this reliably as systems change, but it was last measured
|
||||
# by hand in 2011.
|
||||
soft_limit, _ = _rlimit_nofile()
|
||||
jobs_soft_limit = max(1, (soft_limit - 5) // 3)
|
||||
opt.jobs = min(opt.jobs, jobs_soft_limit)
|
||||
opt.jobs_network = min(opt.jobs_network, jobs_soft_limit)
|
||||
opt.jobs_checkout = min(opt.jobs_checkout, jobs_soft_limit)
|
||||
# Now that the manifests are up-to-date, setup options whose defaults might
|
||||
# be in the manifest.
|
||||
self._ValidateOptionsWithManifest(opt, mp)
|
||||
|
||||
superproject_logging_data = {}
|
||||
self._UpdateProjectsRevisionId(opt, args, superproject_logging_data,
|
||||
|
||||
@@ -42,21 +42,21 @@ class GitCommandTest(unittest.TestCase):
|
||||
|
||||
def test_alternative_setting_when_matching(self):
|
||||
r = git_command._build_env(
|
||||
objdir = 'zap/objects',
|
||||
objdir = os.path.join('zap', 'objects'),
|
||||
gitdir = 'zap'
|
||||
)
|
||||
|
||||
self.assertIsNone(r.get('GIT_ALTERNATE_OBJECT_DIRECTORIES'))
|
||||
self.assertEqual(r.get('GIT_OBJECT_DIRECTORY'), 'zap/objects')
|
||||
self.assertEqual(r.get('GIT_OBJECT_DIRECTORY'), os.path.join('zap', 'objects'))
|
||||
|
||||
def test_alternative_setting_when_different(self):
|
||||
r = git_command._build_env(
|
||||
objdir = 'wow/objects',
|
||||
objdir = os.path.join('wow', 'objects'),
|
||||
gitdir = 'zap'
|
||||
)
|
||||
|
||||
self.assertEqual(r.get('GIT_ALTERNATE_OBJECT_DIRECTORIES'), 'zap/objects')
|
||||
self.assertEqual(r.get('GIT_OBJECT_DIRECTORY'), 'wow/objects')
|
||||
self.assertEqual(r.get('GIT_ALTERNATE_OBJECT_DIRECTORIES'), os.path.join('zap', 'objects'))
|
||||
self.assertEqual(r.get('GIT_OBJECT_DIRECTORY'), os.path.join('wow', 'objects'))
|
||||
|
||||
|
||||
class GitCallUnitTest(unittest.TestCase):
|
||||
@@ -117,7 +117,8 @@ class GitRequireTests(unittest.TestCase):
|
||||
"""Test the git_require helper."""
|
||||
|
||||
def setUp(self):
|
||||
ver = wrapper.GitVersion(1, 2, 3, 4)
|
||||
self.wrapper = wrapper.Wrapper()
|
||||
ver = self.wrapper.GitVersion(1, 2, 3, 4)
|
||||
mock.patch.object(git_command.git, 'version_tuple', return_value=ver).start()
|
||||
|
||||
def tearDown(self):
|
||||
|
||||
@@ -200,7 +200,3 @@ class GitConfigReadWriteTests(unittest.TestCase):
|
||||
for key, value in TESTS:
|
||||
self.assertEqual(sync_data[f'{git_config.SYNC_STATE_PREFIX}{key}'], value)
|
||||
self.assertTrue(sync_data[f'{git_config.SYNC_STATE_PREFIX}main.synctime'])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
||||
@@ -366,7 +366,3 @@ class SuperprojectTestCase(unittest.TestCase):
|
||||
'revision="52d3c9f7c107839ece2319d077de0cd922aa9d8f"/>'
|
||||
'<superproject name="superproject"/>'
|
||||
'</manifest>')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
||||
@@ -369,7 +369,7 @@ class EventLogTestCase(unittest.TestCase):
|
||||
server_thread.start()
|
||||
|
||||
with server_ready:
|
||||
server_ready.wait()
|
||||
server_ready.wait(timeout=120)
|
||||
|
||||
self._event_log_module.StartEvent()
|
||||
path = self._event_log_module.Write(path=f'af_unix:{socket_path}')
|
||||
@@ -385,7 +385,3 @@ class EventLogTestCase(unittest.TestCase):
|
||||
# Check for 'start' event specific fields.
|
||||
self.assertIn('argv', start_event)
|
||||
self.assertIsInstance(start_event['argv'], list)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
||||
@@ -117,7 +117,7 @@ class ManifestParseTestCase(unittest.TestCase):
|
||||
|
||||
def getXmlManifest(self, data):
|
||||
"""Helper to initialize a manifest for testing."""
|
||||
with open(self.manifest_file, 'w') as fp:
|
||||
with open(self.manifest_file, 'w', encoding="utf-8") as fp:
|
||||
fp.write(data)
|
||||
return manifest_xml.XmlManifest(self.repodir, self.manifest_file)
|
||||
|
||||
@@ -428,7 +428,7 @@ class IncludeElementTests(ManifestParseTestCase):
|
||||
def parse(name):
|
||||
name = self.encodeXmlAttr(name)
|
||||
# Setup target of the include.
|
||||
with open(os.path.join(self.manifest_dir, 'target.xml'), 'w') as fp:
|
||||
with open(os.path.join(self.manifest_dir, 'target.xml'), 'w', encoding="utf-8") as fp:
|
||||
fp.write(f'<manifest><include name="{name}"/></manifest>')
|
||||
|
||||
manifest = self.getXmlManifest("""
|
||||
@@ -519,22 +519,22 @@ class ProjectElementTests(ManifestParseTestCase):
|
||||
""")
|
||||
|
||||
manifest = parse('a/path/', 'foo')
|
||||
self.assertEqual(manifest.projects[0].gitdir,
|
||||
os.path.join(self.tempdir, '.repo/projects/foo.git'))
|
||||
self.assertEqual(manifest.projects[0].objdir,
|
||||
os.path.join(self.tempdir, '.repo/project-objects/a/path.git'))
|
||||
self.assertEqual(os.path.normpath(manifest.projects[0].gitdir),
|
||||
os.path.join(self.tempdir, '.repo', 'projects', 'foo.git'))
|
||||
self.assertEqual(os.path.normpath(manifest.projects[0].objdir),
|
||||
os.path.join(self.tempdir, '.repo', 'project-objects', 'a', 'path.git'))
|
||||
|
||||
manifest = parse('a/path', 'foo/')
|
||||
self.assertEqual(manifest.projects[0].gitdir,
|
||||
os.path.join(self.tempdir, '.repo/projects/foo.git'))
|
||||
self.assertEqual(manifest.projects[0].objdir,
|
||||
os.path.join(self.tempdir, '.repo/project-objects/a/path.git'))
|
||||
self.assertEqual(os.path.normpath(manifest.projects[0].gitdir),
|
||||
os.path.join(self.tempdir, '.repo', 'projects', 'foo.git'))
|
||||
self.assertEqual(os.path.normpath(manifest.projects[0].objdir),
|
||||
os.path.join(self.tempdir, '.repo', 'project-objects', 'a', 'path.git'))
|
||||
|
||||
manifest = parse('a/path', 'foo//////')
|
||||
self.assertEqual(manifest.projects[0].gitdir,
|
||||
os.path.join(self.tempdir, '.repo/projects/foo.git'))
|
||||
self.assertEqual(manifest.projects[0].objdir,
|
||||
os.path.join(self.tempdir, '.repo/project-objects/a/path.git'))
|
||||
self.assertEqual(os.path.normpath(manifest.projects[0].gitdir),
|
||||
os.path.join(self.tempdir, '.repo', 'projects', 'foo.git'))
|
||||
self.assertEqual(os.path.normpath(manifest.projects[0].objdir),
|
||||
os.path.join(self.tempdir, '.repo', 'project-objects', 'a', 'path.git'))
|
||||
|
||||
def test_toplevel_path(self):
|
||||
"""Check handling of path=. specially."""
|
||||
@@ -551,8 +551,8 @@ class ProjectElementTests(ManifestParseTestCase):
|
||||
|
||||
for path in ('.', './', './/', './//'):
|
||||
manifest = parse('server/path', path)
|
||||
self.assertEqual(manifest.projects[0].gitdir,
|
||||
os.path.join(self.tempdir, '.repo/projects/..git'))
|
||||
self.assertEqual(os.path.normpath(manifest.projects[0].gitdir),
|
||||
os.path.join(self.tempdir, '.repo', 'projects', '..git'))
|
||||
|
||||
def test_bad_path_name_checks(self):
|
||||
"""Check handling of bad path & name attributes."""
|
||||
@@ -578,7 +578,7 @@ class ProjectElementTests(ManifestParseTestCase):
|
||||
parse('', 'ok')
|
||||
|
||||
for path in INVALID_FS_PATHS:
|
||||
if not path or path.endswith('/'):
|
||||
if not path or path.endswith('/') or path.endswith(os.path.sep):
|
||||
continue
|
||||
|
||||
with self.assertRaises(error.ManifestInvalidPathError):
|
||||
|
||||
@@ -22,6 +22,7 @@ import tempfile
|
||||
import unittest
|
||||
|
||||
import error
|
||||
import manifest_xml
|
||||
import git_command
|
||||
import git_config
|
||||
import platform_utils
|
||||
@@ -384,7 +385,7 @@ class MigrateWorkTreeTests(unittest.TestCase):
|
||||
|
||||
# Make sure the dir was transformed into a symlink.
|
||||
self.assertTrue(dotgit.is_symlink())
|
||||
self.assertEqual(os.readlink(dotgit), '../../.repo/projects/src/test.git')
|
||||
self.assertEqual(os.readlink(dotgit), os.path.normpath('../../.repo/projects/src/test.git'))
|
||||
|
||||
# Make sure files were moved over.
|
||||
gitdir = tempdir / '.repo/projects/src/test.git'
|
||||
@@ -411,3 +412,81 @@ class MigrateWorkTreeTests(unittest.TestCase):
|
||||
self.assertTrue((dotgit / name).is_file())
|
||||
for name in self._SYMLINKS:
|
||||
self.assertTrue((dotgit / name).is_symlink())
|
||||
|
||||
|
||||
class ManifestPropertiesFetchedCorrectly(unittest.TestCase):
|
||||
"""Ensure properties are fetched properly."""
|
||||
|
||||
def setUpManifest(self, tempdir):
|
||||
repo_trace._TRACE_FILE = os.path.join(tempdir, 'TRACE_FILE_from_test')
|
||||
|
||||
repodir = os.path.join(tempdir, '.repo')
|
||||
manifest_dir = os.path.join(repodir, 'manifests')
|
||||
manifest_file = os.path.join(
|
||||
repodir, manifest_xml.MANIFEST_FILE_NAME)
|
||||
local_manifest_dir = os.path.join(
|
||||
repodir, manifest_xml.LOCAL_MANIFESTS_DIR_NAME)
|
||||
os.mkdir(repodir)
|
||||
os.mkdir(manifest_dir)
|
||||
manifest = manifest_xml.XmlManifest(repodir, manifest_file)
|
||||
|
||||
return project.ManifestProject(
|
||||
manifest, 'test/manifest', os.path.join(tempdir, '.git'), tempdir)
|
||||
|
||||
def test_manifest_config_properties(self):
|
||||
"""Test we are fetching the manifest config properties correctly."""
|
||||
|
||||
with TempGitTree() as tempdir:
|
||||
fakeproj = self.setUpManifest(tempdir)
|
||||
|
||||
# Set property using the expected Set method, then ensure
|
||||
# the porperty functions are using the correct Get methods.
|
||||
fakeproj.config.SetString(
|
||||
'manifest.standalone', 'https://chicken/manifest.git')
|
||||
self.assertEqual(
|
||||
fakeproj.standalone_manifest_url, 'https://chicken/manifest.git')
|
||||
|
||||
fakeproj.config.SetString('manifest.groups', 'test-group, admin-group')
|
||||
self.assertEqual(fakeproj.manifest_groups, 'test-group, admin-group')
|
||||
|
||||
fakeproj.config.SetString('repo.reference', 'mirror/ref')
|
||||
self.assertEqual(fakeproj.reference, 'mirror/ref')
|
||||
|
||||
fakeproj.config.SetBoolean('repo.dissociate', False)
|
||||
self.assertFalse(fakeproj.dissociate)
|
||||
|
||||
fakeproj.config.SetBoolean('repo.archive', False)
|
||||
self.assertFalse(fakeproj.archive)
|
||||
|
||||
fakeproj.config.SetBoolean('repo.mirror', False)
|
||||
self.assertFalse(fakeproj.mirror)
|
||||
|
||||
fakeproj.config.SetBoolean('repo.worktree', False)
|
||||
self.assertFalse(fakeproj.use_worktree)
|
||||
|
||||
fakeproj.config.SetBoolean('repo.clonebundle', False)
|
||||
self.assertFalse(fakeproj.clone_bundle)
|
||||
|
||||
fakeproj.config.SetBoolean('repo.submodules', False)
|
||||
self.assertFalse(fakeproj.submodules)
|
||||
|
||||
fakeproj.config.SetBoolean('repo.git-lfs', False)
|
||||
self.assertFalse(fakeproj.git_lfs)
|
||||
|
||||
fakeproj.config.SetBoolean('repo.superproject', False)
|
||||
self.assertFalse(fakeproj.use_superproject)
|
||||
|
||||
fakeproj.config.SetBoolean('repo.partialclone', False)
|
||||
self.assertFalse(fakeproj.partial_clone)
|
||||
|
||||
fakeproj.config.SetString('repo.depth', '48')
|
||||
self.assertEqual(fakeproj.depth, '48')
|
||||
|
||||
fakeproj.config.SetString('repo.clonefilter', 'blob:limit=10M')
|
||||
self.assertEqual(fakeproj.clone_filter, 'blob:limit=10M')
|
||||
|
||||
fakeproj.config.SetString('repo.partialcloneexclude', 'third_party/big_repo')
|
||||
self.assertEqual(fakeproj.partial_clone_exclude, 'third_party/big_repo')
|
||||
|
||||
fakeproj.config.SetString('manifest.platform', 'auto')
|
||||
self.assertEqual(fakeproj.manifest_platform, 'auto')
|
||||
|
||||
@@ -13,11 +13,13 @@
|
||||
# limitations under the License.
|
||||
"""Unittests for the subcmds/sync.py module."""
|
||||
|
||||
import os
|
||||
import unittest
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
|
||||
import command
|
||||
from subcmds import sync
|
||||
|
||||
|
||||
@@ -43,6 +45,51 @@ def test_get_current_branch_only(use_superproject, cli_args, result):
|
||||
assert cmd._GetCurrentBranchOnly(opts, cmd.manifest) == result
|
||||
|
||||
|
||||
# Used to patch os.cpu_count() for reliable results.
|
||||
OS_CPU_COUNT = 24
|
||||
|
||||
@pytest.mark.parametrize('argv, jobs_manifest, jobs, jobs_net, jobs_check', [
|
||||
# No user or manifest settings.
|
||||
([], None, OS_CPU_COUNT, 1, command.DEFAULT_LOCAL_JOBS),
|
||||
# No user settings, so manifest settings control.
|
||||
([], 3, 3, 3, 3),
|
||||
# User settings, but no manifest.
|
||||
(['--jobs=4'], None, 4, 4, 4),
|
||||
(['--jobs=4', '--jobs-network=5'], None, 4, 5, 4),
|
||||
(['--jobs=4', '--jobs-checkout=6'], None, 4, 4, 6),
|
||||
(['--jobs=4', '--jobs-network=5', '--jobs-checkout=6'], None, 4, 5, 6),
|
||||
(['--jobs-network=5'], None, OS_CPU_COUNT, 5, command.DEFAULT_LOCAL_JOBS),
|
||||
(['--jobs-checkout=6'], None, OS_CPU_COUNT, 1, 6),
|
||||
(['--jobs-network=5', '--jobs-checkout=6'], None, OS_CPU_COUNT, 5, 6),
|
||||
# User settings with manifest settings.
|
||||
(['--jobs=4'], 3, 4, 4, 4),
|
||||
(['--jobs=4', '--jobs-network=5'], 3, 4, 5, 4),
|
||||
(['--jobs=4', '--jobs-checkout=6'], 3, 4, 4, 6),
|
||||
(['--jobs=4', '--jobs-network=5', '--jobs-checkout=6'], 3, 4, 5, 6),
|
||||
(['--jobs-network=5'], 3, 3, 5, 3),
|
||||
(['--jobs-checkout=6'], 3, 3, 3, 6),
|
||||
(['--jobs-network=5', '--jobs-checkout=6'], 3, 3, 5, 6),
|
||||
# Settings that exceed rlimits get capped.
|
||||
(['--jobs=1000000'], None, 83, 83, 83),
|
||||
([], 1000000, 83, 83, 83),
|
||||
])
|
||||
def test_cli_jobs(argv, jobs_manifest, jobs, jobs_net, jobs_check):
|
||||
"""Tests --jobs option behavior."""
|
||||
mp = mock.MagicMock()
|
||||
mp.manifest.default.sync_j = jobs_manifest
|
||||
|
||||
cmd = sync.Sync()
|
||||
opts, args = cmd.OptionParser.parse_args(argv)
|
||||
cmd.ValidateOptions(opts, args)
|
||||
|
||||
with mock.patch.object(sync, '_rlimit_nofile', return_value=(256, 256)):
|
||||
with mock.patch.object(os, 'cpu_count', return_value=OS_CPU_COUNT):
|
||||
cmd._ValidateOptionsWithManifest(opts, mp)
|
||||
assert opts.jobs == jobs
|
||||
assert opts.jobs_network == jobs_net
|
||||
assert opts.jobs_checkout == jobs_check
|
||||
|
||||
|
||||
class GetPreciousObjectsState(unittest.TestCase):
|
||||
"""Tests for _GetPreciousObjectsState."""
|
||||
|
||||
|
||||
@@ -59,12 +59,12 @@ class RepoWrapperUnitTest(RepoWrapperTestCase):
|
||||
def test_python_constraints(self):
|
||||
"""The launcher should never require newer than main.py."""
|
||||
self.assertGreaterEqual(main.MIN_PYTHON_VERSION_HARD,
|
||||
wrapper.MIN_PYTHON_VERSION_HARD)
|
||||
self.wrapper.MIN_PYTHON_VERSION_HARD)
|
||||
self.assertGreaterEqual(main.MIN_PYTHON_VERSION_SOFT,
|
||||
wrapper.MIN_PYTHON_VERSION_SOFT)
|
||||
self.wrapper.MIN_PYTHON_VERSION_SOFT)
|
||||
# Make sure the versions are themselves in sync.
|
||||
self.assertGreaterEqual(wrapper.MIN_PYTHON_VERSION_SOFT,
|
||||
wrapper.MIN_PYTHON_VERSION_HARD)
|
||||
self.assertGreaterEqual(self.wrapper.MIN_PYTHON_VERSION_SOFT,
|
||||
self.wrapper.MIN_PYTHON_VERSION_HARD)
|
||||
|
||||
def test_init_parser(self):
|
||||
"""Make sure 'init' GetParser works."""
|
||||
@@ -159,7 +159,9 @@ class RunCommand(RepoWrapperTestCase):
|
||||
def test_capture(self):
|
||||
"""Check capture_output handling."""
|
||||
ret = self.wrapper.run_command(['echo', 'hi'], capture_output=True)
|
||||
self.assertEqual(ret.stdout, 'hi\n')
|
||||
# echo command appends OS specific linesep, but on Windows + Git Bash
|
||||
# we get UNIX ending, so we allow both.
|
||||
self.assertIn(ret.stdout, ['hi' + os.linesep, 'hi\n'])
|
||||
|
||||
def test_check(self):
|
||||
"""Check check handling."""
|
||||
@@ -456,7 +458,7 @@ class ResolveRepoRev(GitCheckoutTestCase):
|
||||
self.assertEqual('refs/heads/stable', rrev)
|
||||
self.assertEqual(self.REV_LIST[1], lrev)
|
||||
|
||||
with self.assertRaises(wrapper.CloneFailure):
|
||||
with self.assertRaises(self.wrapper.CloneFailure):
|
||||
self.wrapper.resolve_repo_rev(self.GIT_DIR, 'refs/heads/unknown')
|
||||
|
||||
def test_explicit_tag(self):
|
||||
@@ -465,7 +467,7 @@ class ResolveRepoRev(GitCheckoutTestCase):
|
||||
self.assertEqual('refs/tags/v1.0', rrev)
|
||||
self.assertEqual(self.REV_LIST[1], lrev)
|
||||
|
||||
with self.assertRaises(wrapper.CloneFailure):
|
||||
with self.assertRaises(self.wrapper.CloneFailure):
|
||||
self.wrapper.resolve_repo_rev(self.GIT_DIR, 'refs/tags/unknown')
|
||||
|
||||
def test_branch_name(self):
|
||||
@@ -500,7 +502,7 @@ class ResolveRepoRev(GitCheckoutTestCase):
|
||||
|
||||
def test_unknown(self):
|
||||
"""Check unknown ref/commit argument."""
|
||||
with self.assertRaises(wrapper.CloneFailure):
|
||||
with self.assertRaises(self.wrapper.CloneFailure):
|
||||
self.wrapper.resolve_repo_rev(self.GIT_DIR, 'boooooooya')
|
||||
|
||||
|
||||
@@ -551,7 +553,3 @@ class CheckRepoRev(GitCheckoutTestCase):
|
||||
rrev, lrev = self.wrapper.check_repo_rev(self.GIT_DIR, 'stable', repo_verify=False)
|
||||
self.assertEqual('refs/heads/stable', rrev)
|
||||
self.assertEqual(self.REV_LIST[1], lrev)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
||||
12
tox.ini
12
tox.ini
@@ -15,7 +15,7 @@
|
||||
# https://tox.readthedocs.io/
|
||||
|
||||
[tox]
|
||||
envlist = py36, py37, py38, py39
|
||||
envlist = py36, py37, py38, py39, py310
|
||||
|
||||
[gh-actions]
|
||||
python =
|
||||
@@ -23,11 +23,17 @@ python =
|
||||
3.7: py37
|
||||
3.8: py38
|
||||
3.9: py39
|
||||
3.10: py310
|
||||
|
||||
[testenv]
|
||||
deps = pytest
|
||||
commands = {envpython} run_tests
|
||||
deps =
|
||||
pytest
|
||||
pytest-timeout
|
||||
commands = {envpython} run_tests {posargs}
|
||||
setenv =
|
||||
GIT_AUTHOR_NAME = Repo test author
|
||||
GIT_COMMITTER_NAME = Repo test committer
|
||||
EMAIL = repo@gerrit.nodomain
|
||||
|
||||
[pytest]
|
||||
timeout = 300
|
||||
|
||||
14
wrapper.py
14
wrapper.py
@@ -12,12 +12,8 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
try:
|
||||
from importlib.machinery import SourceFileLoader
|
||||
_loader = lambda *args: SourceFileLoader(*args).load_module()
|
||||
except ImportError:
|
||||
import imp
|
||||
_loader = lambda *args: imp.load_source(*args)
|
||||
import importlib.machinery
|
||||
import importlib.util
|
||||
import os
|
||||
|
||||
|
||||
@@ -31,5 +27,9 @@ _wrapper_module = None
|
||||
def Wrapper():
|
||||
global _wrapper_module
|
||||
if not _wrapper_module:
|
||||
_wrapper_module = _loader('wrapper', WrapperPath())
|
||||
modname = 'wrapper'
|
||||
loader = importlib.machinery.SourceFileLoader(modname, WrapperPath())
|
||||
spec = importlib.util.spec_from_loader(modname, loader)
|
||||
_wrapper_module = importlib.util.module_from_spec(spec)
|
||||
loader.exec_module(_wrapper_module)
|
||||
return _wrapper_module
|
||||
|
||||
Reference in New Issue
Block a user