aboutsummaryrefslogtreecommitdiff
path: root/fdo_scripts
diff options
context:
space:
mode:
Diffstat (limited to 'fdo_scripts')
-rwxr-xr-xfdo_scripts/divide_and_merge_profiles.py145
-rwxr-xr-xfdo_scripts/divide_and_merge_profiles_test.py132
-rwxr-xr-xfdo_scripts/profile_cycler.py205
-rw-r--r--fdo_scripts/summarize_hot_blocks.py184
-rw-r--r--fdo_scripts/vanilla_vs_fdo.py334
5 files changed, 1000 insertions, 0 deletions
diff --git a/fdo_scripts/divide_and_merge_profiles.py b/fdo_scripts/divide_and_merge_profiles.py
new file mode 100755
index 00000000..c75e353a
--- /dev/null
+++ b/fdo_scripts/divide_and_merge_profiles.py
@@ -0,0 +1,145 @@
+#!/usr/bin/python2.6
+#
+# Copyright 2011 Google Inc. All Rights Reserved.
+
+"""Script to divide and merge profiles."""
+
+import copy
+import optparse
+import os
+import pickle
+import re
+import sys
+import tempfile
+
+import build_chrome_browser
+import lock_machine
+import run_tests
+from utils import command_executer
+from utils import logger
+
+
+class ProfileMerger:
+ def __init__(self, inputs, output, chunk_size, merge_program, multipliers):
+ self._inputs = inputs
+ self._output = output
+ self._chunk_size = chunk_size
+ self._merge_program = merge_program
+ self._multipliers = multipliers
+ self._ce = command_executer.GetCommandExecuter()
+ self._l = logger.GetLogger()
+
+ def _GetFilesSetForInputDir(self, input_dir):
+ output_file = tempfile.mktemp()
+ command = "find %s -name '*.gcda' -o -name '*.imports' > %s" % (input_dir, output_file)
+ self._ce.RunCommand(command)
+ files = open(output_file, "r").read()
+ files_set = set([])
+ for f in files.splitlines():
+ stripped_file = f.replace(input_dir, "", 1)
+ stripped_file = stripped_file.lstrip("/")
+ files_set.add(stripped_file)
+ return files_set
+
+ def _PopulateFilesSet(self):
+ self._files_set = set([])
+ for i in self._inputs:
+ current_files_set = self._GetFilesSetForInputDir(i)
+ self._files_set.update(current_files_set)
+
+ def _GetSubset(self):
+ ret = []
+ for i in range(self._chunk_size):
+ if not self._files_set:
+ break
+ ret.append(self._files_set.pop())
+ return ret
+
+ def _CopyFilesTree(self, input_dir, files, output_dir):
+ for f in files:
+ src_file = os.path.join(input_dir, f)
+ dst_file = os.path.join(output_dir, f)
+ if not os.path.isdir(os.path.dirname(dst_file)):
+ command = "mkdir -p %s" % os.path.dirname(dst_file)
+ self._ce.RunCommand(command)
+ command = "cp %s %s" % (src_file, dst_file)
+ self._ce.RunCommand(command)
+
+ def _DoChunkMerge(self, current_files):
+ temp_dirs = []
+ for i in self._inputs:
+ temp_dir = tempfile.mkdtemp()
+ temp_dirs.append(temp_dir)
+ self._CopyFilesTree(i, current_files, temp_dir)
+ # Now do the merge.
+ command = ("%s --inputs=%s --output=%s" %
+ (self._merge_program,
+ ",".join(temp_dirs),
+ self._output))
+ if self._multipliers:
+ command = ("%s --multipliers=%s" %
+ (command, self._multipliers))
+ ret = self._ce.RunCommand(command)
+ assert ret == 0, "%s command failed!" % command
+ for temp_dir in temp_dirs:
+ command = "rm -rf %s" % temp_dir
+ self._ce.RunCommand(command)
+
+ def DoMerge(self):
+ self._PopulateFilesSet()
+ while True:
+ current_files = self._GetSubset()
+ if not current_files:
+ break
+ self._DoChunkMerge(current_files)
+
+
+def Main(argv):
+ """The main function."""
+ # Common initializations
+### command_executer.InitCommandExecuter(True)
+ command_executer.InitCommandExecuter()
+ l = logger.GetLogger()
+ ce = command_executer.GetCommandExecuter()
+ parser = optparse.OptionParser()
+ parser.add_option("--inputs",
+ dest="inputs",
+ help="Comma-separated input profile directories to merge.")
+ parser.add_option("--output",
+ dest="output",
+ help="Output profile directory.")
+ parser.add_option("--chunk_size",
+ dest="chunk_size",
+ default="50",
+ help="Chunk size to divide up the profiles into.")
+ parser.add_option("--merge_program",
+ dest="merge_program",
+ default="/home/xur/bin/profile_merge_v15.par",
+ help="Merge program to use to do the actual merge.")
+ parser.add_option("--multipliers",
+ dest="multipliers",
+ help="multipliers to use when merging. (optional)")
+
+ options, _ = parser.parse_args(argv)
+
+ if not all([options.inputs,
+ options.output,]):
+ l.LogError("Must supply --inputs and --output")
+ return 1
+
+ try:
+ pm = ProfileMerger(options.inputs.split(","), options.output,
+ int(options.chunk_size), options.merge_program,
+ options.multipliers)
+ pm.DoMerge()
+ retval = 0
+ except:
+ retval = 1
+ finally:
+ print "My work is done..."
+ return retval
+
+
+if __name__ == "__main__":
+ retval = Main(sys.argv)
+ sys.exit(retval)
diff --git a/fdo_scripts/divide_and_merge_profiles_test.py b/fdo_scripts/divide_and_merge_profiles_test.py
new file mode 100755
index 00000000..f42db4e9
--- /dev/null
+++ b/fdo_scripts/divide_and_merge_profiles_test.py
@@ -0,0 +1,132 @@
+#!/usr/bin/python2.6
+#
+# Copyright 2010 Google Inc. All Rights Reserved.
+
+__author__ = "asharif@google.com (Ahmad Sharif)"
+
+import os
+import random
+import shutil
+import tempfile
+import unittest
+
+from utils import command_executer
+from utils import misc
+
+
+class DivideAndMergeProfilesTest(unittest.TestCase):
+ def tearDown(self):
+ shutil.rmtree(self._program_dir)
+ for profile_dir in self._profile_dirs:
+ shutil.rmtree(profile_dir)
+
+ def setUp(self):
+ self._ce = command_executer.GetCommandExecuter()
+ self._program_dir = tempfile.mkdtemp()
+ self._writeProgram()
+ self._writeMakefile()
+ with misc.WorkingDirectory(self._program_dir):
+ self._ce.RunCommand("make")
+ num_profile_dirs = 2
+ self._profile_dirs = []
+ for i in range(num_profile_dirs):
+ profile_dir = tempfile.mkdtemp()
+ command = ("GCOV_PREFIX_STRIP=%s GCOV_PREFIX=$(/bin/pwd) "
+ " %s/program" %
+ (profile_dir.count("/"),
+ self._program_dir))
+ with misc.WorkingDirectory(profile_dir):
+ self._ce.RunCommand(command)
+ self._profile_dirs.append(profile_dir)
+ self._merge_program = "/home/build/static/projects/crosstool/profile-merge/v14.5/profile_merge.par"
+
+ def _writeMakefile(self):
+ makefile_contents = """
+CC = gcc
+
+CFLAGS = -fprofile-generate
+
+SRCS=$(wildcard *.c)
+
+OBJS=$(SRCS:.c=.o)
+
+all: program
+
+program: $(OBJS)
+ $(CC) -o $@ $^ $(CFLAGS)
+
+%.o: %.c
+ $(CC) -c -o $@ $^ $(CFLAGS)"""
+ makefile = os.path.join(self._program_dir, "Makefile")
+ with open(makefile, "w") as f:
+ print >> f, makefile_contents
+
+ def _writeProgram(self, num_files=100):
+ for i in range(num_files):
+ current_file = os.path.join(self._program_dir, "%s.c" % i)
+ with open(current_file, "w") as f:
+ if i != num_files - 1:
+ print >> f, "extern void foo%s();" % (i + 1)
+ print >> f, "void foo%s(){foo%s();}" % (i, i + 1)
+ else:
+ print >> f, "void foo%s(){printf(\"\");}" % i
+ if i == 0:
+ print >> f, "int main(){foo%s(); return 0;}" % i
+
+ def testMerge(self):
+ reference_output = self._getReferenceOutput()
+ my_output = self._getMyOutput()
+
+ ret = self._diffOutputs(reference_output, my_output)
+ shutil.rmtree(my_output)
+ shutil.rmtree(reference_output)
+ self.assertTrue(ret == 0)
+
+ def _diffOutputs(self, reference, mine):
+ command = "diff -uNr %s %s" % (reference, mine)
+ return self._ce.RunCommand(command)
+
+ def _getMyOutput(self, args=""):
+ my_output = tempfile.mkdtemp()
+ my_merge_program = os.path.join(os.path.dirname(__file__),
+ "divide_and_merge_profiles.py")
+ command = ("python %s --inputs=%s --output=%s "
+ "--chunk_size=10 "
+ "--merge_program=%s "
+ "%s" %
+ (my_merge_program,
+ ",".join(self._profile_dirs),
+ my_output,
+ self._merge_program,
+ args))
+ self._ce.RunCommand(command)
+ return my_output
+
+ def _getReferenceOutput(self, args=""):
+ # First do a regular merge.
+ reference_output = tempfile.mkdtemp()
+ command = ("%s --inputs=%s --output=%s %s" %
+ (self._merge_program,
+ ",".join(self._profile_dirs),
+ reference_output,
+ args))
+ self._ce.RunCommand(command)
+ return reference_output
+
+ def testMergeWithMultipliers(self):
+ num_profiles = len(self._profile_dirs)
+ multipliers = [str(random.randint(0, num_profiles)) \
+ for _ in range(num_profiles)]
+ args = "--multipliers=%s" % ",".join(multipliers)
+
+ reference_output = self._getReferenceOutput(args)
+ my_output = self._getMyOutput(args)
+
+ ret = self._diffOutputs(reference_output, my_output)
+
+ shutil.rmtree(my_output)
+ shutil.rmtree(reference_output)
+ self.assertTrue(ret == 0)
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/fdo_scripts/profile_cycler.py b/fdo_scripts/profile_cycler.py
new file mode 100755
index 00000000..efdafffa
--- /dev/null
+++ b/fdo_scripts/profile_cycler.py
@@ -0,0 +1,205 @@
+#!/usr/bin/python2.6
+#
+# Copyright 2011 Google Inc. All Rights Reserved.
+
+"""Script to profile a page cycler, and get it back to the host."""
+
+import copy
+import optparse
+import os
+import pickle
+import re
+import sys
+import tempfile
+import time
+
+import build_chrome_browser
+import cros_login
+import lock_machine
+import run_tests
+from utils import command_executer
+from utils import logger
+from utils import misc
+
+
+class CyclerProfiler:
+ REMOTE_TMP_DIR = "/tmp"
+
+ def __init__(self, chromeos_root, board, cycler, profile_dir, remote):
+ self._chromeos_root = chromeos_root
+ self._cycler = cycler
+ self._profile_dir = profile_dir
+ self._remote = remote
+ self._board = board
+ self._ce = command_executer.GetCommandExecuter()
+ self._l = logger.GetLogger()
+
+ self._gcov_prefix = os.path.join(self.REMOTE_TMP_DIR,
+ self._GetProfileDir())
+
+ def _GetProfileDir(self):
+ return misc.GetCtargetFromBoard(self._board, self._chromeos_root)
+
+ def _CopyTestData(self):
+ page_cycler_dir = os.path.join(self._chromeos_root,
+ "distfiles",
+ "target",
+ "chrome-src-internal",
+ "src",
+ "data",
+ "page_cycler")
+ if not os.path.isdir(page_cycler_dir):
+ raise Exception("Page cycler dir %s not found!" % page_cycler_dir)
+ self._ce.CopyFiles(page_cycler_dir,
+ os.path.join(self.REMOTE_TMP_DIR, "page_cycler"),
+ dest_machine=self._remote,
+ chromeos_root=self._chromeos_root,
+ recursive=True,
+ dest_cros=True)
+
+ def _PrepareTestData(self):
+ # chmod files so everyone can read them.
+ command = ("cd %s && find page_cycler -type f | xargs chmod a+r" %
+ self.REMOTE_TMP_DIR)
+ self._ce.CrosRunCommand(command, chromeos_root=self._chromeos_root,
+ machine=self._remote)
+ command = ("cd %s && find page_cycler -type d | xargs chmod a+rx" %
+ self.REMOTE_TMP_DIR)
+ self._ce.CrosRunCommand(command, chromeos_root=self._chromeos_root,
+ machine=self._remote)
+
+ def _CopyProfileToHost(self):
+ dest_dir = os.path.join(self._profile_dir,
+ os.path.basename(self._gcov_prefix))
+ # First remove the dir if it exists already
+ if os.path.exists(dest_dir):
+ command = "rm -rf %s" % dest_dir
+ self._ce.RunCommand(command)
+
+ # Strip out the initial prefix for the Chrome directory before doing the
+ # copy.
+ chrome_dir_prefix = misc.GetChromeSrcDir()
+
+ command = "mkdir -p %s" % dest_dir
+ self._ce.RunCommand(command)
+ self._ce.CopyFiles(self._gcov_prefix,
+ dest_dir,
+ src_machine=self._remote,
+ chromeos_root=self._chromeos_root,
+ recursive=True,
+ src_cros=True)
+
+ def _RemoveRemoteProfileDir(self):
+ command = "rm -rf %s" % self._gcov_prefix
+ self._ce.CrosRunCommand(command, chromeos_root=self._chromeos_root,
+ machine=self._remote)
+
+ def _LaunchCycler(self, cycler):
+ command = ("DISPLAY=:0 "
+ "XAUTHORITY=/home/chronos/.Xauthority "
+ "GCOV_PREFIX=%s "
+ "GCOV_PREFIX_STRIP=3 "
+ "/opt/google/chrome/chrome "
+ "--no-sandbox "
+ "--renderer-clean-exit "
+ "--user-data-dir=$(mktemp -d) "
+ "--url \"file:///%s/page_cycler/%s/start.html?iterations=10&auto=1\" "
+ "--enable-file-cookies "
+ "--no-first-run "
+ "--js-flags=expose_gc &" %
+ (self._gcov_prefix,
+ self.REMOTE_TMP_DIR,
+ cycler))
+
+ self._ce.CrosRunCommand(command, chromeos_root=self._chromeos_root,
+ machine=self._remote,
+ command_timeout=60)
+
+ def _PkillChrome(self, signal="9"):
+ command = "pkill -%s chrome" % signal
+ self._ce.CrosRunCommand(command, chromeos_root=self._chromeos_root,
+ machine=self._remote)
+
+ def DoProfile(self):
+ # Copy the page cycler data to the remote
+ self._CopyTestData()
+ self._PrepareTestData()
+ self._RemoveRemoteProfileDir()
+
+ for cycler in self._cycler.split(","):
+ self._ProfileOneCycler(cycler)
+
+ # Copy the profile back
+ self._CopyProfileToHost()
+
+ def _ProfileOneCycler(self, cycler):
+ # With aura, all that's needed is a stop/start ui.
+ self._PkillChrome()
+ cros_login.RestartUI(self._remote, self._chromeos_root, login=False)
+ # Run the cycler
+ self._LaunchCycler(cycler)
+ self._PkillChrome(signal="INT")
+ # Let libgcov dump the profile.
+ # TODO(asharif): There is a race condition here. Fix it later.
+ time.sleep(30)
+
+
+def Main(argv):
+ """The main function."""
+ # Common initializations
+### command_executer.InitCommandExecuter(True)
+ command_executer.InitCommandExecuter()
+ l = logger.GetLogger()
+ ce = command_executer.GetCommandExecuter()
+ parser = optparse.OptionParser()
+ parser.add_option("--cycler",
+ dest="cycler",
+ default="alexa_us",
+ help=("Comma-separated cyclers to profile. "
+ "Example: alexa_us,moz,moz2"
+ "Use all to profile all cyclers."))
+ parser.add_option("--chromeos_root",
+ dest="chromeos_root",
+ default="../../",
+ help="Output profile directory.")
+ parser.add_option("--board",
+ dest="board",
+ default="x86-zgb",
+ help="The target board.")
+ parser.add_option("--remote",
+ dest="remote",
+ help=("The remote chromeos machine that"
+ " has the profile image."))
+ parser.add_option("--profile_dir",
+ dest="profile_dir",
+ default="profile_dir",
+ help="Store profiles in this directory.")
+
+ options, _ = parser.parse_args(argv)
+
+ all_cyclers = ["alexa_us", "bloat", "dhtml", "dom",
+ "intl1", "intl2", "morejs", "morejsnp",
+ "moz", "moz2"]
+
+ if options.cycler == "all":
+ options.cycler = ",".join(all_cyclers)
+
+ try:
+ cp = CyclerProfiler(options.chromeos_root,
+ options.board,
+ options.cycler,
+ options.profile_dir,
+ options.remote)
+ cp.DoProfile()
+ retval = 0
+ except Exception as e:
+ retval = 1
+ print e
+ finally:
+ print "Exiting..."
+ return retval
+
+
+if __name__ == "__main__":
+ retval = Main(sys.argv)
+ sys.exit(retval)
diff --git a/fdo_scripts/summarize_hot_blocks.py b/fdo_scripts/summarize_hot_blocks.py
new file mode 100644
index 00000000..c19f5477
--- /dev/null
+++ b/fdo_scripts/summarize_hot_blocks.py
@@ -0,0 +1,184 @@
+#!/usr/bin/python2.6
+#
+# Copyright 2011 Google Inc. All Rights Reserved.
+
+"""Summarize hottest basic blocks found while doing a ChromeOS FDO build.
+
+Here is an example execution:
+
+ summarize_hot_blocks.py
+ --data_dir=~/chromeos/chroot/var/cache/chromeos-chrome/ --cutoff=10000
+ --output_dir=/home/x/y
+
+With the cutoff, it will ignore any basic blocks that have a count less
+than what is specified (in this example 10000)
+The script looks inside the directory (this is typically a directory where
+the object files are generated) for files with *.profile and *.optimized
+suffixes. To get these, the following flags were added to the compiler
+invokation within vanilla_vs_fdo.py in the profile-use phase.
+
+ "-fdump-tree-optimized-blocks-lineno "
+ "-fdump-ipa-profile-blocks-lineno "
+
+Here is an example of the *.profile and *.optimized files contents:
+
+# BLOCK 7 freq:3901 count:60342, starting at line 92
+# PRED: 6 [39.0%] count:60342 (true,exec)
+ [url_canon_internal.cc : 92:28] MEM[(const char * *)source_6(D) + 16B] = D.28080_17;
+ [url_canon_internal.cc : 93:41] MEM[(struct Component *)parsed_4(D) + 16B] = MEM[(const struct Component &)repl_1(D) + 80];
+# SUCC: 8 [100.0%] count:60342 (fallthru,exec)
+# BLOCK 8 freq:10000 count:154667, starting at line 321
+# PRED: 7 [100.0%] count:60342 (fallthru,exec) 6 [61.0%] count:94325 (false,exec)
+ [url_canon_internal.cc : 321:51] # DEBUG D#10 => [googleurl/src/url_canon_internal.cc : 321] &parsed_4(D)->host
+
+this script finds the blocks with highest count and shows the first line
+of each block so that it is easy to identify the origin of the basic block.
+
+"""
+
+__author__ = "llozano@google.com (Luis Lozano)"
+
+import optparse
+import os
+import re
+import shutil
+import sys
+import tempfile
+
+from utils import command_executer
+
+
+# Given a line, check if it has a block count and return it.
+# Return -1 if there is no match
+def GetBlockCount(line):
+ match_obj = re.match(".*# BLOCK \d+ .*count:(\d+)", line)
+ if match_obj:
+ return int(match_obj.group(1))
+ else:
+ return -1
+
+
+class Collector(object):
+ def __init__(self, data_dir, cutoff, output_dir, tempdir):
+ self._data_dir = data_dir
+ self._cutoff = cutoff
+ self._output_dir = output_dir
+ self._tempdir = tempdir
+ self._ce = command_executer.GetCommandExecuter()
+
+ def CollectFileList(self, file_exp, list_file):
+ command = ("find %s -type f -name '%s' > %s" %
+ (self._data_dir, file_exp,
+ os.path.join(self._tempdir, list_file)))
+ ret = self._ce.RunCommand(command)
+ if ret:
+ raise Exception("Failed: %s" % command)
+
+ def SummarizeLines(self, data_file):
+ sum_lines = []
+ search_lno = False
+ for line in data_file:
+ count = GetBlockCount(line)
+ if count != -1:
+ if count >= self._cutoff:
+ search_lno = True
+ sum_line = line.strip()
+ sum_count = count
+ # look for a line that starts with line number information
+ elif search_lno and re.match("^\s*\[.*: \d*:\d*]", line):
+ search_lno = False
+ sum_lines.append("%d:%s: %s %s" %
+ (sum_count, data_file.name, sum_line, line))
+ return sum_lines
+
+ # Look for blocks in the data file that have a count larger than the cutoff
+ # and generate a sorted summary file of the hottest blocks.
+ def SummarizeFile(self, data_file, sum_file):
+ with open(data_file, "r") as f:
+ sum_lines = self.SummarizeLines(f)
+
+ # sort reverse the list in place by the block count number
+ sum_lines.sort(key=GetBlockCount, reverse=True)
+
+ with open(sum_file, "w") as sf:
+ sf.write("".join(sum_lines))
+
+ print "Generated file Summary: ", sum_file
+
+ # Find hottest blocks in the list of files, generate a sorted summary for
+ # each file and then do a sorted merge of all the summaries.
+ def SummarizeList(self, list_file, summary_file):
+ with open(os.path.join(self._tempdir, list_file)) as f:
+ sort_list = []
+ for file_name in f:
+ file_name = file_name.strip()
+ sum_file = "%s.sum" % file_name
+ sort_list.append("%s%s" % (sum_file, chr(0)))
+ self.SummarizeFile(file_name, sum_file)
+
+ tmp_list_file = os.path.join(self._tempdir, "file_list.dat")
+ with open(tmp_list_file, "w") as file_list_file:
+ for x in sort_list:
+ file_list_file.write(x)
+
+ merge_command = ("sort -nr -t: -k1 --merge --files0-from=%s > %s " %
+ (tmp_list_file, summary_file))
+
+ ret = self._ce.RunCommand(merge_command)
+ if ret:
+ raise Exception("Failed: %s" % merge_command)
+ print "Generated general summary: ", summary_file
+
+ def SummarizePreOptimized(self, summary_file):
+ self.CollectFileList("*.profile", "chrome.profile.list")
+ self.SummarizeList("chrome.profile.list",
+ os.path.join(self._output_dir, summary_file))
+
+ def SummarizeOptimized(self, summary_file):
+ self.CollectFileList("*.optimized", "chrome.optimized.list")
+ self.SummarizeList("chrome.optimized.list",
+ os.path.join(self._output_dir, summary_file))
+
+
+def Main(argv):
+ command_executer.InitCommandExecuter()
+ usage = ("usage: %prog --data_dir=<dir> --cutoff=<value> "
+ "--output_dir=<dir> [--keep_tmp]")
+ parser = optparse.OptionParser(usage=usage)
+ parser.add_option("--data_dir",
+ dest="data_dir",
+ help=("directory where the FDO (*.profile and "
+ "*.optimized) files are located"))
+ parser.add_option("--cutoff",
+ dest="cutoff",
+ help="Minimum count to consider for each basic block")
+ parser.add_option("--output_dir",
+ dest="output_dir",
+ help=("directory where summary data will be generated"
+ "(pre_optimized.txt, optimized.txt)"))
+ parser.add_option("--keep_tmp",
+ action="store_true",
+ dest="keep_tmp",
+ default=False,
+ help=("Keep directory with temporary files"
+ "(for debugging purposes)"))
+ options = parser.parse_args(argv)[0]
+ if not all((options.data_dir, options.cutoff, options.output_dir)):
+ parser.print_help()
+ sys.exit(1)
+
+ tempdir = tempfile.mkdtemp()
+
+ co = Collector(options.data_dir, int(options.cutoff), options.output_dir,
+ tempdir)
+ co.SummarizePreOptimized("pre_optimized.txt")
+ co.SummarizeOptimized("optimized.txt")
+
+ if not options.keep_tmp:
+ shutil.rmtree(tempdir, ignore_errors=True)
+
+ return 0
+
+if __name__ == "__main__":
+ retval = Main(sys.argv)
+ sys.exit(retval)
diff --git a/fdo_scripts/vanilla_vs_fdo.py b/fdo_scripts/vanilla_vs_fdo.py
new file mode 100644
index 00000000..3cf5f4b6
--- /dev/null
+++ b/fdo_scripts/vanilla_vs_fdo.py
@@ -0,0 +1,334 @@
+#!/usr/bin/python2.6
+#
+# Copyright 2011 Google Inc. All Rights Reserved.
+
+"""Script to build chrome with FDO and compare performance against no FDO."""
+
+import getpass
+import optparse
+import os
+import sys
+
+import image_chromeos
+import setup_chromeos
+from utils import command_executer
+from utils import misc
+from utils import logger
+
+
+class Patcher(object):
+ def __init__(self, dir_to_patch, patch_file):
+ self._dir_to_patch = dir_to_patch
+ self._patch_file = patch_file
+ self._base_patch_command = "patch -p0 %%s < %s" % patch_file
+ self._ce = command_executer.GetCommandExecuter()
+
+ def _RunPatchCommand(self, args):
+ patch_command = self._base_patch_command % args
+ command = ("cd %s && %s" % (self._dir_to_patch,
+ patch_command))
+ return self._ce.RunCommand(command)
+
+ def _ApplyPatch(self, args):
+ full_args = "%s --dry-run" % args
+ ret = self._RunPatchCommand(full_args)
+ if ret:
+ raise Exception("Patch dry run failed!")
+ ret = self._RunPatchCommand(args)
+ if ret:
+ raise Exception("Patch application failed!")
+
+ def __enter__(self):
+ self._ApplyPatch("")
+
+ def __exit__(self, type, value, traceback):
+ self._ApplyPatch("-R")
+
+
+class FDOComparator(object):
+ def __init__(self, board, remotes, ebuild_version, plus_pgo, minus_pgo,
+ update_pgo, chromeos_root):
+ self._board = board
+ self._remotes = remotes
+ self._ebuild_version = ebuild_version
+ self._remote = remotes.split(",")[0]
+ self._chromeos_root = chromeos_root
+ self._profile_dir = "profile_dir"
+ self._profile_path = os.path.join(self._chromeos_root,
+ "src",
+ "scripts",
+ os.path.basename(self._profile_dir))
+ self._plus_pgo = plus_pgo
+ self._minus_pgo = minus_pgo
+ self._update_pgo = update_pgo
+
+ self._ce = command_executer.GetCommandExecuter()
+ self._l = logger.GetLogger()
+
+ def _CheckoutChromeOS(self):
+ if not os.path.exists(self._chromeos_root):
+ setup_chromeos_args = [setup_chromeos.__file__,
+ "--dir=%s" % self._chromeos_root,
+ "--minilayout"]
+ setup_chromeos.Main(setup_chromeos_args)
+
+ def _BuildChromeOSUsingBinaries(self):
+ image_dir = misc.GetImageDir(self._chromeos_root, self._board)
+ command = "equery-%s l chromeos" % self._board
+ ret = self._ce.ChrootRunCommand(self._chromeos_root, command)
+ if ret:
+ command = misc.GetSetupBoardCommand(self._board,
+ usepkg=True)
+ ret = self._ce.ChrootRunCommand(self._chromeos_root,
+ command)
+ if ret:
+ raise Exception("Couldn't run setup_board!")
+ command = misc.GetBuildPackagesCommand(self._board,
+ True)
+ ret = self._ce.ChrootRunCommand(self._chromeos_root,
+ command)
+ if ret:
+ raise Exception("Couldn't run build_packages!")
+
+ def _ReportMismatches(self, build_log):
+ mismatch_signature = "-Wcoverage-mismatch"
+ mismatches = build_log.count(mismatch_signature)
+ self._l.LogOutput("Total mismatches: %s" % mismatches)
+ stale_files = set([])
+ for line in build_log.splitlines():
+ if mismatch_signature in line:
+ filename = line.split(":")[0]
+ stale_files.add(filename)
+ self._l.LogOutput("Total stale files: %s" % len(stale_files))
+
+ def _BuildChromeAndImage(self, ebuild_version="", env_dict={}, cflags="",
+ cxxflags="", ldflags="", label="",
+ build_image_args=""):
+ env_string = misc.GetEnvStringFromDict(env_dict)
+ if not label:
+ label = " ".join([env_string,
+ cflags,
+ cxxflags,
+ ldflags,
+ ebuild_version])
+ label = label.strip()
+ label = misc.GetFilenameFromString(label)
+ if not misc.DoesLabelExist(self._chromeos_root, self._board, label):
+ build_chrome_browser_args = ["--clean",
+ "--chromeos_root=%s" % self._chromeos_root,
+ "--board=%s" % self._board,
+ "--env=%r" % env_string,
+ "--cflags=%r" % cflags,
+ "--cxxflags=%r" % cxxflags,
+ "--ldflags=%r" % ldflags,
+ "--ebuild_version=%s" % ebuild_version,
+ "--build_image_args=%s" % build_image_args]
+
+ build_chrome_browser = os.path.join(os.path.dirname(__file__),
+ "..",
+ "build_chrome_browser.py")
+ command = "python %s %s" % (build_chrome_browser,
+ " ".join(build_chrome_browser_args))
+ ret, out, err = self._ce.RunCommand(command,
+ return_output=True)
+ if "-fprofile-use" in cxxflags:
+ self._ReportMismatches(out)
+
+ if ret:
+ raise Exception("Couldn't build chrome browser!")
+ misc.LabelLatestImage(self._chromeos_root, self._board, label)
+ return label
+
+ def _TestLabels(self, labels):
+ experiment_file = "pgo_experiment.txt"
+ experiment_header = """
+ board: %s
+ remote: %s
+ """ % (self._board, self._remotes)
+ experiment_tests = """
+ benchmark: desktopui_PyAutoPerfTests {
+ iterations: 1
+ }
+ """
+ with open(experiment_file, "w") as f:
+ print >>f, experiment_header
+ print >>f, experiment_tests
+ for label in labels:
+ # TODO(asharif): Fix crosperf so it accepts labels with symbols
+ crosperf_label = label
+ crosperf_label = crosperf_label.replace("-", "minus")
+ crosperf_label = crosperf_label.replace("+", "plus")
+ experiment_image = """
+ %s {
+ chromeos_image: %s
+ }
+ """ % (crosperf_label,
+ os.path.join(misc.GetImageDir(self._chromeos_root, self._board),
+ label,
+ "chromiumos_test_image.bin"))
+ print >>f, experiment_image
+ crosperf = os.path.join(os.path.dirname(__file__),
+ "..",
+ "crosperf",
+ "crosperf")
+ command = "%s %s" % (crosperf, experiment_file)
+ ret = self._ce.RunCommand(command)
+ if ret:
+ raise Exception("Couldn't run crosperf!")
+
+ def _ImageRemote(self, label):
+ image_path = os.path.join(misc.GetImageDir(self._chromeos_root,
+ self._board),
+ label,
+ "chromiumos_test_image.bin")
+ image_chromeos_args = [image_chromeos.__file__,
+ "--chromeos_root=%s" % self._chromeos_root,
+ "--image=%s" % image_path,
+ "--remote=%s" % self._remote,
+ "--board=%s" % self._board]
+ image_chromeos.Main(image_chromeos_args)
+
+ def _ProfileRemote(self):
+ profile_cycler = os.path.join(os.path.dirname(__file__),
+ "profile_cycler.py")
+ profile_cycler_args = ["--chromeos_root=%s" % self._chromeos_root,
+ "--cycler=all",
+ "--board=%s" % self._board,
+ "--profile_dir=%s" % self._profile_path,
+ "--remote=%s" % self._remote]
+ command = "python %s %s" % (profile_cycler, " ".join(profile_cycler_args))
+ ret = self._ce.RunCommand(command)
+ if ret:
+ raise Exception("Couldn't profile cycler!")
+
+ def _BuildGenerateImage(self):
+ # TODO(asharif): add cflags as well.
+ labels_list = ["fprofile-generate", self._ebuild_version]
+ label = "_".join(labels_list)
+ generate_label = self._BuildChromeAndImage(
+ env_dict={"USE": "chrome_internal -pgo pgo_generate"},
+ label=label,
+ ebuild_version=self._ebuild_version,
+ build_image_args="--rootfs_boost_size=400")
+ return generate_label
+
+ def _BuildUseImage(self):
+ ctarget = misc.GetCtargetFromBoard(self._board, self._chromeos_root)
+ chroot_profile_dir = os.path.join("/home/%s/trunk" % getpass.getuser(),
+ "src",
+ "scripts",
+ self._profile_dir,
+ ctarget)
+ cflags = ("-fprofile-use "
+ "-fprofile-correction "
+ "-Wno-error "
+ "-fdump-tree-optimized-blocks-lineno "
+ "-fdump-ipa-profile-blocks-lineno "
+ "-fno-vpt "
+ "-fprofile-dir=%s" %
+ chroot_profile_dir)
+ labels_list = ["updated_pgo", self._ebuild_version]
+ label = "_".join(labels_list)
+ pgo_use_label = self._BuildChromeAndImage(
+ env_dict={"USE": "chrome_internal -pgo"},
+ cflags=cflags,
+ cxxflags=cflags,
+ ldflags=cflags,
+ label=label,
+ ebuild_version=self._ebuild_version)
+ return pgo_use_label
+
+ def DoAll(self):
+ self._CheckoutChromeOS()
+ self._BuildChromeOSUsingBinaries()
+ labels = []
+
+ if self._minus_pgo:
+ minus_pgo = self._BuildChromeAndImage(env_dict={"USE": "chrome_internal -pgo"},
+ ebuild_version=self._ebuild_version)
+ labels.append(minus_pgo)
+ if self._plus_pgo:
+ plus_pgo = self._BuildChromeAndImage(env_dict={"USE": "chrome_internal pgo"},
+ ebuild_version=self._ebuild_version)
+ labels.append(plus_pgo)
+
+ if self._update_pgo:
+ if not os.path.exists(self._profile_path):
+ # Build Chrome with -fprofile-generate
+ generate_label = self._BuildGenerateImage()
+ # Image to the remote box.
+ self._ImageRemote(generate_label)
+ # Profile it using all page cyclers.
+ self._ProfileRemote()
+
+ # Use the profile directory to rebuild it.
+ updated_pgo_label = self._BuildUseImage()
+ labels.append(updated_pgo_label)
+
+ # Run crosperf on all images now.
+ self._TestLabels(labels)
+ return 0
+
+
+def Main(argv):
+ """The main function."""
+ # Common initializations
+### command_executer.InitCommandExecuter(True)
+ command_executer.InitCommandExecuter()
+ parser = optparse.OptionParser()
+ parser.add_option("--remote",
+ dest="remote",
+ help="Remote machines to run tests on.")
+ parser.add_option("--board",
+ dest="board",
+ default="x86-zgb",
+ help="The target board.")
+ parser.add_option("--ebuild_version",
+ dest="ebuild_version",
+ default="",
+ help="The Chrome ebuild version to use.")
+ parser.add_option("--plus_pgo",
+ dest="plus_pgo",
+ action="store_true",
+ default=False,
+ help="Build USE=+pgo.")
+ parser.add_option("--minus_pgo",
+ dest="minus_pgo",
+ action="store_true",
+ default=False,
+ help="Build USE=-pgo.")
+ parser.add_option("--update_pgo",
+ dest="update_pgo",
+ action="store_true",
+ default=False,
+ help="Update pgo and build Chrome with the update.")
+ parser.add_option("--chromeos_root",
+ dest="chromeos_root",
+ default=False,
+ help="The chromeos root directory")
+ options, _ = parser.parse_args(argv)
+ if not options.board:
+ print "Please give a board."
+ return 1
+ if not options.remote:
+ print "Please give at least one remote machine."
+ return 1
+ if not options.chromeos_root:
+ print "Please provide the chromeos root directory."
+ return 1
+ if not any((options.minus_pgo, options.plus_pgo, options.update_pgo)):
+ print "Please provide at least one build option."
+ return 1
+ fc = FDOComparator(options.board,
+ options.remote,
+ options.ebuild_version,
+ options.plus_pgo,
+ options.minus_pgo,
+ options.update_pgo,
+ os.path.expanduser(options.chromeos_root))
+ return fc.DoAll()
+
+
+if __name__ == "__main__":
+ retval = Main(sys.argv)
+ sys.exit(retval)