From: Weilin Wang <[email protected]>
Add a check to return the metric validation test early when perf list metric
does not output any metric. This would happen when NO_JEVENTS=1 is set or in a
system that there is no metric supported.
Signed-off-by: Weilin Wang <[email protected]>
---
tools/perf/tests/shell/lib/perf_metric_validation.py | 10 ++++++++--
1 file changed, 8 insertions(+), 2 deletions(-)
diff --git a/tools/perf/tests/shell/lib/perf_metric_validation.py b/tools/perf/tests/shell/lib/perf_metric_validation.py
index a2d235252183..0b94216c9c46 100644
--- a/tools/perf/tests/shell/lib/perf_metric_validation.py
+++ b/tools/perf/tests/shell/lib/perf_metric_validation.py
@@ -95,7 +95,7 @@ class Validator:
indent=4)
def get_results(self, idx: int = 0):
- return self.results[idx]
+ return self.results.get(idx)
def get_bounds(self, lb, ub, error, alias={}, ridx: int = 0) -> list:
"""
@@ -173,7 +173,10 @@ class Validator:
pcnt = 0
tcnt = 0
rerun = list()
- for name, val in self.get_results().items():
+ results = self.get_results()
+ if not results:
+ return
+ for name, val in results.items():
if val < 0:
negmetric[name] = val
rerun.append(name)
@@ -532,6 +535,9 @@ class Validator:
'''
if not self.collectlist:
self.parse_perf_metrics()
+ if not self.metrics:
+ print("No metric found for testing")
+ return 0
self.create_rules()
for i in range(0, len(self.workloads)):
self.wlidx = i
--
2.42.0
On Wed, May 22, 2024 at 1:43 PM <[email protected]> wrote:
>
> From: Weilin Wang <[email protected]>
>
> Add a check to return the metric validation test early when perf list metric
> does not output any metric. This would happen when NO_JEVENTS=1 is set or in a
> system that there is no metric supported.
>
>
> Signed-off-by: Weilin Wang <[email protected]>
Tested-by: Ian Rogers <[email protected]>
Thanks,
Ian
> ---
> tools/perf/tests/shell/lib/perf_metric_validation.py | 10 ++++++++--
> 1 file changed, 8 insertions(+), 2 deletions(-)
>
> diff --git a/tools/perf/tests/shell/lib/perf_metric_validation.py b/tools/perf/tests/shell/lib/perf_metric_validation.py
> index a2d235252183..0b94216c9c46 100644
> --- a/tools/perf/tests/shell/lib/perf_metric_validation.py
> +++ b/tools/perf/tests/shell/lib/perf_metric_validation.py
> @@ -95,7 +95,7 @@ class Validator:
> indent=4)
>
> def get_results(self, idx: int = 0):
> - return self.results[idx]
> + return self.results.get(idx)
>
> def get_bounds(self, lb, ub, error, alias={}, ridx: int = 0) -> list:
> """
> @@ -173,7 +173,10 @@ class Validator:
> pcnt = 0
> tcnt = 0
> rerun = list()
> - for name, val in self.get_results().items():
> + results = self.get_results()
> + if not results:
> + return
> + for name, val in results.items():
> if val < 0:
> negmetric[name] = val
> rerun.append(name)
> @@ -532,6 +535,9 @@ class Validator:
> '''
> if not self.collectlist:
> self.parse_perf_metrics()
> + if not self.metrics:
> + print("No metric found for testing")
> + return 0
> self.create_rules()
> for i in range(0, len(self.workloads)):
> self.wlidx = i
> --
> 2.42.0
>