mirror of
https://gerrit.googlesource.com/git-repo
synced 2026-02-14 17:50:19 +00:00
Compare commits
18 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5554572f02 | ||
|
|
97ca50f5f9 | ||
|
|
8896b68926 | ||
|
|
fec8cd6704 | ||
|
|
b8139bdcf8 | ||
|
|
26fa3180fb | ||
|
|
d379e77f44 | ||
|
|
4217a82bec | ||
|
|
208f344950 | ||
|
|
138c8a9ff5 | ||
|
|
9b57aa00f6 | ||
|
|
b1d1ece2fb | ||
|
|
449b23b698 | ||
|
|
e5fb6e585f | ||
|
|
48e4137eba | ||
|
|
172c58398b | ||
|
|
aa506db8a7 | ||
|
|
14c61d2c9d |
@@ -33,9 +33,8 @@ you have newer versions installed, your choices are:
|
||||
|
||||
* Modify the [repo launcher]'s shebang to suite your environment.
|
||||
* Download an older version of the [repo launcher] and don't upgrade it.
|
||||
Be aware that there is no guarantee old repo launchers are WILL work with
|
||||
current versions of repo. Bug reports using old launchers will not be
|
||||
accepted.
|
||||
Be aware that we do not guarantee old repo launchers will work with current
|
||||
versions of repo. Bug reports using old launchers will not be accepted.
|
||||
|
||||
## When to drop support
|
||||
|
||||
|
||||
@@ -135,6 +135,8 @@ def GetEventTargetPath():
|
||||
if retval == 0:
|
||||
# Strip trailing carriage-return in path.
|
||||
path = p.stdout.rstrip("\n")
|
||||
if path == "":
|
||||
return None
|
||||
elif retval != 1:
|
||||
# `git config --get` is documented to produce an exit status of `1`
|
||||
# if the requested variable is not present in the configuration.
|
||||
|
||||
@@ -38,6 +38,8 @@ import tempfile
|
||||
import threading
|
||||
|
||||
|
||||
# Timeout when sending events via socket (applies to connect, send)
|
||||
SOCK_TIMEOUT = 0.5 # in seconds
|
||||
# BaseEventLog __init__ Counter that is consistent within the same process
|
||||
p_init_count = 0
|
||||
|
||||
@@ -296,6 +298,7 @@ class BaseEventLog:
|
||||
with socket.socket(
|
||||
socket.AF_UNIX, socket.SOCK_STREAM
|
||||
) as sock:
|
||||
sock.settimeout(SOCK_TIMEOUT)
|
||||
sock.connect(path)
|
||||
self._WriteLog(sock.sendall)
|
||||
return f"af_unix:stream:{path}"
|
||||
|
||||
11
main.py
11
main.py
@@ -270,10 +270,14 @@ class _Repo:
|
||||
self._PrintHelp(short=True)
|
||||
return 1
|
||||
|
||||
run = lambda: self._RunLong(name, gopts, argv) or 0
|
||||
git_trace2_event_log = EventLog()
|
||||
run = (
|
||||
lambda: self._RunLong(name, gopts, argv, git_trace2_event_log) or 0
|
||||
)
|
||||
with Trace(
|
||||
"starting new command: %s",
|
||||
"starting new command: %s [sid=%s]",
|
||||
", ".join([name] + argv),
|
||||
git_trace2_event_log.full_sid,
|
||||
first_trace=True,
|
||||
):
|
||||
if gopts.trace_python:
|
||||
@@ -290,12 +294,11 @@ class _Repo:
|
||||
result = run()
|
||||
return result
|
||||
|
||||
def _RunLong(self, name, gopts, argv):
|
||||
def _RunLong(self, name, gopts, argv, git_trace2_event_log):
|
||||
"""Execute the (longer running) requested subcommand."""
|
||||
result = 0
|
||||
SetDefaultColoring(gopts.color)
|
||||
|
||||
git_trace2_event_log = EventLog()
|
||||
outer_client = RepoClient(self.repodir)
|
||||
repo_client = outer_client
|
||||
if gopts.submanifest_path:
|
||||
|
||||
@@ -133,8 +133,8 @@ def normalize_url(url: str) -> str:
|
||||
url = url.rstrip("/")
|
||||
parsed_url = urllib.parse.urlparse(url)
|
||||
|
||||
# This matches patterns like "git@github.com:foo/bar".
|
||||
scp_like_url_re = r"^[^:]+@[^:]+:[^/]+/"
|
||||
# This matches patterns like "git@github.com:foo".
|
||||
scp_like_url_re = r"^[^/:]+@[^/:]+:[^/]+"
|
||||
|
||||
# If our URL is missing a schema and matches git's
|
||||
# SCP-like syntax we should convert it to a proper
|
||||
|
||||
84
project.py
84
project.py
@@ -1277,7 +1277,20 @@ class Project:
|
||||
if is_new:
|
||||
self._InitGitDir(force_sync=force_sync, quiet=quiet)
|
||||
else:
|
||||
self._UpdateHooks(quiet=quiet)
|
||||
try:
|
||||
# At this point, it's possible that gitdir points to an old
|
||||
# objdir (e.g. name changed, but objdir exists). Check
|
||||
# references to ensure that's not the case. See
|
||||
# https://issues.gerritcodereview.com/40013418 for more
|
||||
# details.
|
||||
self._CheckDirReference(self.objdir, self.gitdir)
|
||||
|
||||
self._UpdateHooks(quiet=quiet)
|
||||
except GitError as e:
|
||||
if not force_sync:
|
||||
raise e
|
||||
# Let _InitGitDir fix the issue, force_sync is always True here.
|
||||
self._InitGitDir(force_sync=True, quiet=quiet)
|
||||
self._InitRemote()
|
||||
|
||||
if self.UseAlternates:
|
||||
@@ -1623,9 +1636,9 @@ class Project:
|
||||
elif pub == head:
|
||||
# All published commits are merged, and thus we are a
|
||||
# strict subset. We can fast-forward safely.
|
||||
syncbuf.later1(self, _doff)
|
||||
syncbuf.later1(self, _doff, not verbose)
|
||||
if submodules:
|
||||
syncbuf.later1(self, _dosubmodules)
|
||||
syncbuf.later1(self, _dosubmodules, not verbose)
|
||||
return
|
||||
|
||||
# Examine the local commits not in the remote. Find the
|
||||
@@ -1684,10 +1697,10 @@ class Project:
|
||||
def _dorebase():
|
||||
self._Rebase(upstream="%s^1" % last_mine, onto=revid)
|
||||
|
||||
syncbuf.later2(self, _dorebase)
|
||||
syncbuf.later2(self, _dorebase, not verbose)
|
||||
if submodules:
|
||||
syncbuf.later2(self, _dosubmodules)
|
||||
syncbuf.later2(self, _docopyandlink)
|
||||
syncbuf.later2(self, _dosubmodules, not verbose)
|
||||
syncbuf.later2(self, _docopyandlink, not verbose)
|
||||
elif local_changes:
|
||||
try:
|
||||
self._ResetHard(revid)
|
||||
@@ -1698,9 +1711,9 @@ class Project:
|
||||
fail(e)
|
||||
return
|
||||
else:
|
||||
syncbuf.later1(self, _doff)
|
||||
syncbuf.later1(self, _doff, not verbose)
|
||||
if submodules:
|
||||
syncbuf.later1(self, _dosubmodules)
|
||||
syncbuf.later1(self, _dosubmodules, not verbose)
|
||||
|
||||
def AddCopyFile(self, src, dest, topdir):
|
||||
"""Mark |src| for copying to |dest| (relative to |topdir|).
|
||||
@@ -1835,7 +1848,7 @@ class Project:
|
||||
platform_utils.remove(path)
|
||||
except OSError as e:
|
||||
if e.errno != errno.ENOENT:
|
||||
logger.error("error: %s: Failed to remove: %s", path, e)
|
||||
logger.warning("%s: Failed to remove: %s", path, e)
|
||||
failed = True
|
||||
errors.append(e)
|
||||
dirs[:] = [
|
||||
@@ -1854,7 +1867,7 @@ class Project:
|
||||
platform_utils.remove(d)
|
||||
except OSError as e:
|
||||
if e.errno != errno.ENOENT:
|
||||
logger.error("error: %s: Failed to remove: %s", d, e)
|
||||
logger.warning("%s: Failed to remove: %s", d, e)
|
||||
failed = True
|
||||
errors.append(e)
|
||||
elif not platform_utils.listdir(d):
|
||||
@@ -1862,18 +1875,30 @@ class Project:
|
||||
platform_utils.rmdir(d)
|
||||
except OSError as e:
|
||||
if e.errno != errno.ENOENT:
|
||||
logger.error("error: %s: Failed to remove: %s", d, e)
|
||||
logger.warning("%s: Failed to remove: %s", d, e)
|
||||
failed = True
|
||||
errors.append(e)
|
||||
if failed:
|
||||
logger.error(
|
||||
"error: %s: Failed to delete obsolete checkout.",
|
||||
self.RelPath(local=False),
|
||||
rename_path = (
|
||||
f"{self.worktree}_repo_to_be_deleted_{int(time.time())}"
|
||||
)
|
||||
logger.error(
|
||||
" Remove manually, then run `repo sync -l`.",
|
||||
)
|
||||
raise DeleteWorktreeError(aggregate_errors=errors)
|
||||
try:
|
||||
platform_utils.rename(self.worktree, rename_path)
|
||||
logger.warning(
|
||||
"warning: renamed %s to %s. You can delete it, but you "
|
||||
"might need elevated permissions (e.g. root)",
|
||||
self.worktree,
|
||||
rename_path,
|
||||
)
|
||||
# Rename successful! Clear the errors.
|
||||
errors = []
|
||||
except OSError:
|
||||
logger.error(
|
||||
"%s: Failed to delete obsolete checkout.\n",
|
||||
" Remove manually, then run `repo sync -l`.",
|
||||
self.RelPath(local=False),
|
||||
)
|
||||
raise DeleteWorktreeError(aggregate_errors=errors)
|
||||
|
||||
# Try deleting parent dirs if they are empty.
|
||||
path = self.worktree
|
||||
@@ -2870,10 +2895,12 @@ class Project:
|
||||
if GitCommand(self, cmd).Wait() != 0:
|
||||
raise GitError(f"{self.name} rebase {upstream} ", project=self.name)
|
||||
|
||||
def _FastForward(self, head, ffonly=False):
|
||||
def _FastForward(self, head, ffonly=False, quiet=True):
|
||||
cmd = ["merge", "--no-stat", head]
|
||||
if ffonly:
|
||||
cmd.append("--ff-only")
|
||||
if quiet:
|
||||
cmd.append("-q")
|
||||
if GitCommand(self, cmd).Wait() != 0:
|
||||
raise GitError(f"{self.name} merge {head} ", project=self.name)
|
||||
|
||||
@@ -3746,17 +3773,20 @@ class _Failure:
|
||||
|
||||
|
||||
class _Later:
|
||||
def __init__(self, project, action):
|
||||
def __init__(self, project, action, quiet):
|
||||
self.project = project
|
||||
self.action = action
|
||||
self.quiet = quiet
|
||||
|
||||
def Run(self, syncbuf):
|
||||
out = syncbuf.out
|
||||
out.project("project %s/", self.project.RelPath(local=False))
|
||||
out.nl()
|
||||
if not self.quiet:
|
||||
out.project("project %s/", self.project.RelPath(local=False))
|
||||
out.nl()
|
||||
try:
|
||||
self.action()
|
||||
out.nl()
|
||||
if not self.quiet:
|
||||
out.nl()
|
||||
return True
|
||||
except GitError:
|
||||
out.nl()
|
||||
@@ -3792,11 +3822,11 @@ class SyncBuffer:
|
||||
self._failures.append(_Failure(project, err))
|
||||
self._MarkUnclean()
|
||||
|
||||
def later1(self, project, what):
|
||||
self._later_queue1.append(_Later(project, what))
|
||||
def later1(self, project, what, quiet):
|
||||
self._later_queue1.append(_Later(project, what, quiet))
|
||||
|
||||
def later2(self, project, what):
|
||||
self._later_queue2.append(_Later(project, what))
|
||||
def later2(self, project, what, quiet):
|
||||
self._later_queue2.append(_Later(project, what, quiet))
|
||||
|
||||
def Finish(self):
|
||||
self._PrintMessages()
|
||||
|
||||
45
repo
45
repo
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# Copyright (C) 2008 The Android Open Source Project
|
||||
#
|
||||
@@ -79,7 +79,7 @@ def check_python_version():
|
||||
major = ver.major
|
||||
minor = ver.minor
|
||||
|
||||
# Try to re-exec the version specific Python 3 if needed.
|
||||
# Try to re-exec the version specific Python if needed.
|
||||
if (major, minor) < MIN_PYTHON_VERSION_SOFT:
|
||||
# Python makes releases ~once a year, so try our min version +10 to help
|
||||
# bridge the gap. This is the fallback anyways so perf isn't critical.
|
||||
@@ -96,36 +96,10 @@ def check_python_version():
|
||||
break
|
||||
reexec(f"python{min_major}.{min_minor - inc}")
|
||||
|
||||
# Try the generic Python 3 wrapper, but only if it's new enough. If it
|
||||
# isn't, we want to just give up below and make the user resolve things.
|
||||
try:
|
||||
proc = subprocess.Popen(
|
||||
[
|
||||
"python3",
|
||||
"-c",
|
||||
"import sys; "
|
||||
"print(sys.version_info.major, sys.version_info.minor)",
|
||||
],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
)
|
||||
(output, _) = proc.communicate()
|
||||
python3_ver = tuple(int(x) for x in output.decode("utf-8").split())
|
||||
except (OSError, subprocess.CalledProcessError):
|
||||
python3_ver = None
|
||||
|
||||
# If the python3 version looks like it's new enough, give it a try.
|
||||
if (
|
||||
python3_ver
|
||||
and python3_ver >= MIN_PYTHON_VERSION_HARD
|
||||
and python3_ver != (major, minor)
|
||||
):
|
||||
reexec("python3")
|
||||
|
||||
# We're still here, so diagnose things for the user.
|
||||
if (major, minor) < MIN_PYTHON_VERSION_HARD:
|
||||
print(
|
||||
"repo: error: Python 3 version is too old; "
|
||||
"repo: error: Python version is too old; "
|
||||
"Please use Python {}.{} or newer.".format(
|
||||
*MIN_PYTHON_VERSION_HARD
|
||||
),
|
||||
@@ -150,7 +124,7 @@ if not REPO_REV:
|
||||
BUG_URL = "https://issues.gerritcodereview.com/issues/new?component=1370071"
|
||||
|
||||
# increment this whenever we make important changes to this script
|
||||
VERSION = (2, 40)
|
||||
VERSION = (2, 42)
|
||||
|
||||
# increment this if the MAINTAINER_KEYS block is modified
|
||||
KEYRING_VERSION = (2, 3)
|
||||
@@ -1245,7 +1219,6 @@ class Requirements:
|
||||
with open(path, "rb") as f:
|
||||
data = f.read()
|
||||
except OSError:
|
||||
# NB: EnvironmentError is used for Python 2 & 3 compatibility.
|
||||
# If we couldn't open the file, assume it's an old source tree.
|
||||
return None
|
||||
|
||||
@@ -1364,13 +1337,9 @@ def _Version():
|
||||
print(f"git {ParseGitVersion().full}")
|
||||
print(f"Python {sys.version}")
|
||||
uname = platform.uname()
|
||||
if sys.version_info.major < 3:
|
||||
# Python 3 returns a named tuple, but Python 2 is simpler.
|
||||
print(uname)
|
||||
else:
|
||||
print(f"OS {uname.system} {uname.release} ({uname.version})")
|
||||
processor = uname.processor if uname.processor else "unknown"
|
||||
print(f"CPU {uname.machine} ({processor})")
|
||||
print(f"OS {uname.system} {uname.release} ({uname.version})")
|
||||
processor = uname.processor if uname.processor else "unknown"
|
||||
print(f"CPU {uname.machine} ({processor})")
|
||||
print("Bug reports:", BUG_URL)
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
@@ -21,6 +21,7 @@ import multiprocessing
|
||||
import netrc
|
||||
import optparse
|
||||
import os
|
||||
from pathlib import Path
|
||||
import sys
|
||||
import tempfile
|
||||
import time
|
||||
@@ -82,16 +83,50 @@ from wrapper import Wrapper
|
||||
|
||||
_ONE_DAY_S = 24 * 60 * 60
|
||||
|
||||
# Env var to implicitly turn auto-gc back on. This was added to allow a user to
|
||||
# revert a change in default behavior in v2.29.9. Remove after 2023-04-01.
|
||||
_REPO_AUTO_GC = "REPO_AUTO_GC"
|
||||
_AUTO_GC = os.environ.get(_REPO_AUTO_GC) == "1"
|
||||
|
||||
_REPO_ALLOW_SHALLOW = os.environ.get("REPO_ALLOW_SHALLOW")
|
||||
|
||||
logger = RepoLogger(__file__)
|
||||
|
||||
|
||||
def _SafeCheckoutOrder(checkouts: List[Project]) -> List[List[Project]]:
|
||||
"""Generate a sequence of checkouts that is safe to perform. The client
|
||||
should checkout everything from n-th index before moving to n+1.
|
||||
|
||||
This is only useful if manifest contains nested projects.
|
||||
|
||||
E.g. if foo, foo/bar and foo/bar/baz are project paths, then foo needs to
|
||||
finish before foo/bar can proceed, and foo/bar needs to finish before
|
||||
foo/bar/baz."""
|
||||
res = [[]]
|
||||
current = res[0]
|
||||
|
||||
# depth_stack contains a current stack of parent paths.
|
||||
depth_stack = []
|
||||
# checkouts are iterated in asc order by relpath. That way, it can easily be
|
||||
# determined if the previous checkout is parent of the current checkout.
|
||||
for checkout in sorted(checkouts, key=lambda x: x.relpath):
|
||||
checkout_path = Path(checkout.relpath)
|
||||
while depth_stack:
|
||||
try:
|
||||
checkout_path.relative_to(depth_stack[-1])
|
||||
except ValueError:
|
||||
# Path.relative_to returns ValueError if paths are not relative.
|
||||
# TODO(sokcevic): Switch to is_relative_to once min supported
|
||||
# version is py3.9.
|
||||
depth_stack.pop()
|
||||
else:
|
||||
if len(depth_stack) >= len(res):
|
||||
# Another depth created.
|
||||
res.append([])
|
||||
break
|
||||
|
||||
current = res[len(depth_stack)]
|
||||
current.append(checkout)
|
||||
depth_stack.append(checkout_path)
|
||||
|
||||
return res
|
||||
|
||||
|
||||
class _FetchOneResult(NamedTuple):
|
||||
"""_FetchOne return value.
|
||||
|
||||
@@ -618,7 +653,7 @@ later is required to fix a server side protocol bug.
|
||||
|
||||
if not use_super:
|
||||
continue
|
||||
m.superproject.SetQuiet(opt.quiet)
|
||||
m.superproject.SetQuiet(not opt.verbose)
|
||||
print_messages = git_superproject.PrintMessages(
|
||||
opt.use_superproject, m
|
||||
)
|
||||
@@ -1040,15 +1075,21 @@ later is required to fix a server side protocol bug.
|
||||
pm.update(msg=project.name)
|
||||
return ret
|
||||
|
||||
proc_res = self.ExecuteInParallel(
|
||||
opt.jobs_checkout,
|
||||
functools.partial(
|
||||
self._CheckoutOne, opt.detach_head, opt.force_sync, opt.verbose
|
||||
),
|
||||
all_projects,
|
||||
callback=_ProcessResults,
|
||||
output=Progress("Checking out", len(all_projects), quiet=opt.quiet),
|
||||
)
|
||||
for projects in _SafeCheckoutOrder(all_projects):
|
||||
proc_res = self.ExecuteInParallel(
|
||||
opt.jobs_checkout,
|
||||
functools.partial(
|
||||
self._CheckoutOne,
|
||||
opt.detach_head,
|
||||
opt.force_sync,
|
||||
opt.verbose,
|
||||
),
|
||||
projects,
|
||||
callback=_ProcessResults,
|
||||
output=Progress(
|
||||
"Checking out", len(all_projects), quiet=opt.quiet
|
||||
),
|
||||
)
|
||||
|
||||
self._local_sync_state.Save()
|
||||
return proc_res and not err_results
|
||||
@@ -1501,7 +1542,7 @@ later is required to fix a server side protocol bug.
|
||||
buf = TeeStringIO(sys.stdout)
|
||||
try:
|
||||
result = mp.Sync_NetworkHalf(
|
||||
quiet=opt.quiet,
|
||||
quiet=not opt.verbose,
|
||||
output_redir=buf,
|
||||
verbose=opt.verbose,
|
||||
current_branch_only=self._GetCurrentBranchOnly(
|
||||
@@ -1544,9 +1585,7 @@ later is required to fix a server side protocol bug.
|
||||
mp, event_log.TASK_SYNC_LOCAL, start, time.time(), clean
|
||||
)
|
||||
if not clean:
|
||||
raise UpdateManifestError(
|
||||
aggregate_errors=errors, project=mp.name
|
||||
)
|
||||
raise UpdateManifestError(aggregate_errors=errors)
|
||||
self._ReloadManifest(manifest_name, mp.manifest)
|
||||
|
||||
def ValidateOptions(self, opt, args):
|
||||
@@ -1577,16 +1616,6 @@ later is required to fix a server side protocol bug.
|
||||
if opt.prune is None:
|
||||
opt.prune = True
|
||||
|
||||
if opt.auto_gc is None and _AUTO_GC:
|
||||
logger.error(
|
||||
"Will run `git gc --auto` because %s is set. %s is deprecated "
|
||||
"and will be removed in a future release. Use `--auto-gc` "
|
||||
"instead.",
|
||||
_REPO_AUTO_GC,
|
||||
_REPO_AUTO_GC,
|
||||
)
|
||||
opt.auto_gc = True
|
||||
|
||||
def _ValidateOptionsWithManifest(self, opt, mp):
|
||||
"""Like ValidateOptions, but after we've updated the manifest.
|
||||
|
||||
@@ -1630,7 +1659,7 @@ later is required to fix a server side protocol bug.
|
||||
errors = []
|
||||
try:
|
||||
self._ExecuteHelper(opt, args, errors)
|
||||
except RepoExitError:
|
||||
except (RepoExitError, RepoChangedException):
|
||||
raise
|
||||
except (KeyboardInterrupt, Exception) as e:
|
||||
raise RepoUnhandledExceptionError(e, aggregate_errors=errors)
|
||||
|
||||
@@ -72,3 +72,12 @@ def tmp_home_dir(monkeypatch, tmp_path_factory):
|
||||
the function scope.
|
||||
"""
|
||||
return _set_home(monkeypatch, tmp_path_factory.mktemp("home"))
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def setup_user_identity(monkeysession, scope="session"):
|
||||
"""Set env variables for author and committer name and email."""
|
||||
monkeysession.setenv("GIT_AUTHOR_NAME", "Foo Bar")
|
||||
monkeysession.setenv("GIT_COMMITTER_NAME", "Foo Bar")
|
||||
monkeysession.setenv("GIT_AUTHOR_EMAIL", "foo@bar.baz")
|
||||
monkeysession.setenv("GIT_COMMITTER_EMAIL", "foo@bar.baz")
|
||||
|
||||
@@ -1139,6 +1139,20 @@ class NormalizeUrlTests(ManifestParseTestCase):
|
||||
"http://foo.com/bar/baz", manifest_xml.normalize_url(url)
|
||||
)
|
||||
|
||||
url = "http://foo.com/bar/"
|
||||
self.assertEqual("http://foo.com/bar", manifest_xml.normalize_url(url))
|
||||
|
||||
def test_has_leading_slash(self):
|
||||
"""SCP-like syntax except a / comes before the : which git disallows."""
|
||||
url = "/git@foo.com:bar/baf"
|
||||
self.assertEqual(url, manifest_xml.normalize_url(url))
|
||||
|
||||
url = "gi/t@foo.com:bar/baf"
|
||||
self.assertEqual(url, manifest_xml.normalize_url(url))
|
||||
|
||||
url = "git@fo/o.com:bar/baf"
|
||||
self.assertEqual(url, manifest_xml.normalize_url(url))
|
||||
|
||||
def test_has_no_scheme(self):
|
||||
"""Deal with cases where we have no scheme, but we also
|
||||
aren't dealing with the git SCP-like syntax
|
||||
@@ -1146,9 +1160,15 @@ class NormalizeUrlTests(ManifestParseTestCase):
|
||||
url = "foo.com/baf/bat"
|
||||
self.assertEqual(url, manifest_xml.normalize_url(url))
|
||||
|
||||
url = "foo.com/baf"
|
||||
self.assertEqual(url, manifest_xml.normalize_url(url))
|
||||
|
||||
url = "git@foo.com/baf/bat"
|
||||
self.assertEqual(url, manifest_xml.normalize_url(url))
|
||||
|
||||
url = "git@foo.com/baf"
|
||||
self.assertEqual(url, manifest_xml.normalize_url(url))
|
||||
|
||||
url = "/file/path/here"
|
||||
self.assertEqual(url, manifest_xml.normalize_url(url))
|
||||
|
||||
@@ -1157,3 +1177,30 @@ class NormalizeUrlTests(ManifestParseTestCase):
|
||||
self.assertEqual(
|
||||
"ssh://git@foo.com/bar/baf", manifest_xml.normalize_url(url)
|
||||
)
|
||||
|
||||
url = "git@foo.com:bar/"
|
||||
self.assertEqual(
|
||||
"ssh://git@foo.com/bar", manifest_xml.normalize_url(url)
|
||||
)
|
||||
|
||||
def test_remote_url_resolution(self):
|
||||
remote = manifest_xml._XmlRemote(
|
||||
name="foo",
|
||||
fetch="git@github.com:org2/",
|
||||
manifestUrl="git@github.com:org2/custom_manifest.git",
|
||||
)
|
||||
self.assertEqual("ssh://git@github.com/org2", remote.resolvedFetchUrl)
|
||||
|
||||
remote = manifest_xml._XmlRemote(
|
||||
name="foo",
|
||||
fetch="ssh://git@github.com/org2/",
|
||||
manifestUrl="git@github.com:org2/custom_manifest.git",
|
||||
)
|
||||
self.assertEqual("ssh://git@github.com/org2", remote.resolvedFetchUrl)
|
||||
|
||||
remote = manifest_xml._XmlRemote(
|
||||
name="foo",
|
||||
fetch="git@github.com:org2/",
|
||||
manifestUrl="ssh://git@github.com/org2/custom_manifest.git",
|
||||
)
|
||||
self.assertEqual("ssh://git@github.com/org2", remote.resolvedFetchUrl)
|
||||
|
||||
@@ -304,6 +304,32 @@ class LocalSyncState(unittest.TestCase):
|
||||
self.assertEqual(self.state.GetFetchTime(projA), 5)
|
||||
|
||||
|
||||
class SafeCheckoutOrder(unittest.TestCase):
|
||||
def test_no_nested(self):
|
||||
p_f = mock.MagicMock(relpath="f")
|
||||
p_foo = mock.MagicMock(relpath="foo")
|
||||
out = sync._SafeCheckoutOrder([p_f, p_foo])
|
||||
self.assertEqual(out, [[p_f, p_foo]])
|
||||
|
||||
def test_basic_nested(self):
|
||||
p_foo = p_foo = mock.MagicMock(relpath="foo")
|
||||
p_foo_bar = mock.MagicMock(relpath="foo/bar")
|
||||
out = sync._SafeCheckoutOrder([p_foo, p_foo_bar])
|
||||
self.assertEqual(out, [[p_foo], [p_foo_bar]])
|
||||
|
||||
def test_complex_nested(self):
|
||||
p_foo = mock.MagicMock(relpath="foo")
|
||||
p_foo_bar = mock.MagicMock(relpath="foo/bar")
|
||||
p_foo_bar_baz_baq = mock.MagicMock(relpath="foo/bar/baz/baq")
|
||||
p_bar = mock.MagicMock(relpath="bar")
|
||||
out = sync._SafeCheckoutOrder(
|
||||
[p_foo_bar_baz_baq, p_foo, p_foo_bar, p_bar]
|
||||
)
|
||||
self.assertEqual(
|
||||
out, [[p_bar, p_foo], [p_foo_bar], [p_foo_bar_baz_baq]]
|
||||
)
|
||||
|
||||
|
||||
class GetPreciousObjectsState(unittest.TestCase):
|
||||
"""Tests for _GetPreciousObjectsState."""
|
||||
|
||||
|
||||
Reference in New Issue
Block a user