1
0
mirror of https://git.yoctoproject.org/meta-arm synced 2026-01-13 15:31:22 +00:00

arm/lib: Specify the FVP environment variables explicitly

It is sometimes useful to be able to configure the behavior of FVPs
using environment variables, e.g. for licensing or plugins.

Add a new FVP option: FVP_ENV_PASSTHROUGH, which allows the Bitbake
variables to be passed to the environment to be specified explicitly (in
a similar way to BB_ENV_PASSTHROUGH). This ensures that:

 * FVPs launched via runfvp have a reproducable environment
 * FVPs launched via testimage (which run from an isolated Bitbake task)
   can receive environment variables

Change the self-tests to use cwd instead of PATH to find the mock FVPs,
as the PATH environment variable is no longer passed through.

Issue-Id: SCM-4964
Signed-off-by: Peter Hoyes <Peter.Hoyes@arm.com>
Change-Id: Idf6ac6d41fda4cd5f950bc383c2fc1fa1acdf4e3
Signed-off-by: Jon Mason <jon.mason@arm.com>
This commit is contained in:
Peter Hoyes
2022-09-13 13:08:50 +01:00
committed by Jon Mason
parent 7c3e597b4c
commit 820a55d348
8 changed files with 28 additions and 7 deletions

View File

@@ -116,6 +116,14 @@ Arbitrary extra arguments that are passed directly to the FVP. For example:
FVP_EXTRA_ARGS = "--simlimit 60"
```
### `FVP_ENV_PASSTHROUGH`
The FVP is launched with an isolated set of environment variables. Add the name of a Bitbake variable to this list to pass it through to the FVP environment. For example:
```
FVP_ENV_PASSTHROUGH = "ARMLMD_LICENSE_FILE FM_TRACE_PLUGINS"
```
[AEM]: https://developer.arm.com/tools-and-software/simulation-models/fixed-virtual-platforms/arm-ecosystem-models
[FVP]: https://developer.arm.com/tools-and-software/simulation-models/fixed-virtual-platforms

View File

@@ -23,6 +23,8 @@ FVP_CONSOLE ?= ""
FVP_CONSOLES[default] ?= "${FVP_CONSOLE}"
# Arbitrary extra arguments
FVP_EXTRA_ARGS ?= ""
# Bitbake variables to pass to the FVP environment
FVP_ENV_PASSTHROUGH ?= ""
EXTRA_IMAGEDEPENDS += "${FVP_PROVIDER}"
@@ -66,6 +68,10 @@ python do_write_fvpboot_conf() {
data["terminals"] = getFlags("FVP_TERMINALS")
data["args"] = shlex.split(d.getVar("FVP_EXTRA_ARGS") or "")
data["env"] = {}
for var in d.getVar("FVP_ENV_PASSTHROUGH").split():
data["env"][var] = d.getVar(var)
os.makedirs(os.path.dirname(conffile), exist_ok=True)
with open(conffile, "wt") as f:
json.dump(data, f)

View File

@@ -51,6 +51,7 @@ def load(config_file):
sanitise("terminals", {})
sanitise("args", [])
sanitise("consoles", {})
sanitise("env", {})
if not config["exe"]:
raise ValueError("Required value FVP_EXE not set in machine configuration")

View File

@@ -60,7 +60,10 @@ class FVPRunner:
cli = cli_from_config(config, terminal_choice)
cli += extra_args
self._logger.debug(f"Constructed FVP call: {cli}")
self._fvp_process = await asyncio.create_subprocess_exec(*cli, stdin=subprocess.DEVNULL, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
self._fvp_process = await asyncio.create_subprocess_exec(
*cli,
stdin=subprocess.DEVNULL, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
env=config['env'])
def detect_terminals(line):
m = re.match(r"^(\S+): Listening for serial connection on port (\d+)$", line)

View File

@@ -20,11 +20,10 @@ class RunFVPTests(OESelftestTestCase):
on exit code 0 or fail the test, otherwise return the CompletedProcess
instance.
"""
# Put the test directory in PATH so that any mock FVPs are found first
newenv = {"PATH": str(testdir) + ":" + os.environ["PATH"]}
cli = [runfvp,] + list(args)
print(f"Calling {cli}")
ret = subprocess.run(cli, env=newenv, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True)
# Set cwd to testdir so that any mock FVPs are found
ret = subprocess.run(cli, cwd=testdir, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True)
if should_succeed:
self.assertEqual(ret.returncode, 0, f"runfvp exit {ret.returncode}, output: {ret.stdout}")
return ret.stdout
@@ -40,8 +39,6 @@ class RunFVPTests(OESelftestTestCase):
self.run_fvp("--this-is-an-invalid-option", should_succeed=False)
def test_run_auto_tests(self):
newenv = {"PATH": str(testdir) + ":" + os.environ["PATH"]}
cases = list(testdir.glob("auto-*.json"))
if not cases:
self.fail("No tests found")
@@ -79,6 +76,7 @@ class ConfFileTests(OESelftestTestCase):
self.assertTrue("terminals" in conf)
self.assertTrue("args" in conf)
self.assertTrue("consoles" in conf)
self.assertTrue("env" in conf)
class RunnerTests(OESelftestTestCase):
@@ -97,6 +95,7 @@ class RunnerTests(OESelftestTestCase):
"applications": {'a1': 'file'},
"terminals": {},
"args": ['--extra-arg'],
"env": {"FOO": "BAR"}
}))
m.assert_called_once_with('/usr/bin/FVP_Binary',
@@ -106,4 +105,5 @@ class RunnerTests(OESelftestTestCase):
'--extra-arg',
stdin=unittest.mock.ANY,
stdout=unittest.mock.ANY,
stderr=unittest.mock.ANY)
stderr=unittest.mock.ANY,
env={"FOO":"BAR"})

View File

@@ -1,3 +1,4 @@
{
"fvp-bindir": ".",
"exe": "auto-basic.sh"
}

View File

@@ -1,4 +1,5 @@
{
"fvp-bindir": ".",
"exe": "test-parameters.py",
"parameters": {
"board.cow": "moo",

View File

@@ -1,4 +1,5 @@
{
"fvp-bindir": ".",
"exe": "test-parameters.py",
"parameters": {
"board.cow": "moo"