From 214078cfe75c610c6823cda46a2c5705b1bbd607 Mon Sep 17 00:00:00 2001 From: Jian Cai Date: Sat, 6 Jul 2019 11:49:37 -0700 Subject: toolchain-utils: migrate Telemetry tests results from chartjson to histograms Shift to histograms as charjson format is being deprected for Telemtry tests BUG=chromium:967868 TEST=Local tests. Change-Id: I0645c6f10a93a454cc50090d2b790c9f386d9358 Reviewed-on: https://chromium-review.googlesource.com/c/chromiumos/third_party/toolchain-utils/+/1691318 Reviewed-by: Manoj Gupta Tested-by: Jian Cai --- crosperf/default-telemetry-results.json | 17 +++++----- crosperf/results_cache.py | 59 ++++++++++++++++++++++++++++----- 2 files changed, 59 insertions(+), 17 deletions(-) (limited to 'crosperf') diff --git a/crosperf/default-telemetry-results.json b/crosperf/default-telemetry-results.json index 4f5ccf91..5352c161 100644 --- a/crosperf/default-telemetry-results.json +++ b/crosperf/default-telemetry-results.json @@ -68,15 +68,15 @@ "warm@@timeToOnload_avg__summary" ], "speedometer": [ - "Total__Total", - "Total__summary" + "RunsPerMinute", + "Total" ], "speedometer2": [ - "RunsPerMinute__summary", - "Total__summary" + "RunsPerMinute", + "Total" ], "octane": [ - "Total__Score" + "Total.Score" ], "jsgamebench": [ "Score__Score" @@ -148,13 +148,12 @@ "warm@@timeToOnload_avg__summary" ], "kraken": [ - "Total__Total", - "Total__summary" + "Total" ], "jetstream": [ - "Score__summary" + "Score" ], "cros_ui_smoothness": [ - "ui_percentage_smooth__summary" + "ui_percentage_smooth" ] } diff --git a/crosperf/results_cache.py b/crosperf/results_cache.py index bef78cb4..54569808 100644 --- a/crosperf/results_cache.py +++ b/crosperf/results_cache.py @@ -251,6 +251,8 @@ class Result(object): return out def GetResultsFile(self): + if self.suite == 'telemetry_Crosperf': + return self.FindFilesInResultsDir('-name histograms.json').splitlines() return self.FindFilesInResultsDir('-name results-chart.json').splitlines() def GetPerfDataFiles(self): @@ -262,8 +264,12 @@ class Result(object): def GetDataMeasurementsFiles(self): result = self.FindFilesInResultsDir('-name perf_measurements').splitlines() if not result: - result = \ - self.FindFilesInResultsDir('-name results-chart.json').splitlines() + if self.suite == 'telemetry_Crosperf': + result = \ + self.FindFilesInResultsDir('-name histograms.json').splitlines() + else: + result = \ + self.FindFilesInResultsDir('-name results-chart.json').splitlines() return result def _CheckDebugPath(self, option, path): @@ -367,16 +373,15 @@ class Result(object): # Grab keyvals from the directory. self.ProcessResults() - def ProcessJsonResults(self): + def ProcessChartResults(self): # Open and parse the json results file generated by telemetry/test_that. if not self.results_file: raise IOError('No results file found.') filename = self.results_file[0] if not filename.endswith('.json'): raise IOError('Attempt to call json on non-json file: %s' % filename) - if not os.path.exists(filename): - return {} + raise IOError('%s does not exist' % filename) keyvals = {} with open(filename, 'r') as f: @@ -406,13 +411,51 @@ class Result(object): keyvals[keyname] = new_value return keyvals + def ProcessHistogramsResults(self): + # Open and parse the json results file generated by telemetry/test_that. + if not self.results_file: + raise IOError('No results file found.') + filename = self.results_file[0] + if not filename.endswith('.json'): + raise IOError('Attempt to call json on non-json file: %s' % filename) + if not os.path.exists(filename): + raise IOError('%s does not exist' % filename) + + keyvals = {} + with open(filename) as f: + histograms = json.loads(f) + for obj in histograms: + if 'name' not in obj or 'sampleValues' not in obj: + continue + metric_name = obj['name'] + vals = obj['sampleValues'] + if isinstance(vals, list): + result = float(sum(vals)) / len(vals) + else: + result = vals + unit = obj['unit'] + if metric_name not in keyvals: + keyvals[metric_name] = [[result], unit] + else: + # in case the benchmark has multiple stories + keyvals[metric_name][0].append(result) + for metric_name in keyvals: + vals = keyvals[metric_name][0] + unit = keyvals[metric_name][1] + result = float(sum(vals)) / len(vals) + keyvals[metric_name] = [result, unit] + return keyvals + def ProcessResults(self, use_cache=False): # Note that this function doesn't know anything about whether there is a # cache hit or miss. It should process results agnostic of the cache hit # state. - if self.results_file and self.results_file[0].find( - 'results-chart.json') != -1: - self.keyvals = self.ProcessJsonResults() + if self.results_file and self.suite == 'telemetry_Crosperf' and \ + 'histograms.json' in self.results_file[0]: + self.keyvals = self.ProcessHistogramsResults() + elif self.results_file and self.suite != 'telemetry_Crosperf' and \ + 'results-chart.json' in self.results_file[0]: + self.keyvals = self.ProcessChartResults() else: if not use_cache: print('\n ** WARNING **: Had to use deprecated output-method to ' -- cgit v1.2.3