aboutsummaryrefslogtreecommitdiff
path: root/cwp/interpreter
diff options
context:
space:
mode:
authorLuis Lozano <llozano@chromium.org>2013-03-15 14:44:13 -0700
committerChromeBot <chrome-bot@google.com>2013-03-15 15:51:37 -0700
commitf81680c018729fd4499e1e200d04b48c4b90127c (patch)
tree940608da8374604b82edfdb2d7df55d065f05d4c /cwp/interpreter
parent2296ee0b914aba5bba07becab4ff68884ce9b8a5 (diff)
downloadtoolchain-utils-f81680c018729fd4499e1e200d04b48c4b90127c.tar.gz
Cleaned up directory after copy of tools from perforce directory
Got rid of stale copies of some tools like "crosperf" and moved all files under v14 directory (that came from perforce) into the top directory. BUG=None TEST=None Change-Id: I408d17a36ceb00e74db71403d2351fd466a14f8e Reviewed-on: https://gerrit-int.chromium.org/33887 Tested-by: Luis Lozano <llozano@chromium.org> Reviewed-by: Yunlian Jiang <yunlian@google.com> Commit-Queue: Luis Lozano <llozano@chromium.org>
Diffstat (limited to 'cwp/interpreter')
-rw-r--r--cwp/interpreter/app_engine_pull.py245
-rw-r--r--cwp/interpreter/symbolizer.py131
2 files changed, 376 insertions, 0 deletions
diff --git a/cwp/interpreter/app_engine_pull.py b/cwp/interpreter/app_engine_pull.py
new file mode 100644
index 00000000..65f67940
--- /dev/null
+++ b/cwp/interpreter/app_engine_pull.py
@@ -0,0 +1,245 @@
+#!/usr/bin/python
+# Copyright 2012 Google Inc. All Rights Reserved.
+# Author: mrdmnd@ (Matt Redmond)
+"""A client to pull data from Bartlett.
+
+Inspired by //depot/google3/experimental/mobile_gwp/database/app_engine_pull.py
+
+The server houses perf.data.gz, board, chrome version for each upload.
+This script first authenticates with a proper @google.com account, then
+downloads a sample (if it's not already cached) and unzips perf.data
+
+ Authenticate(): Gets login info and returns an auth token
+ DownloadSamples(): Download and unzip samples.
+ _GetServePage(): Pulls /serve page from the app engine server
+ _DownloadSampleFromServer(): Downloads a local compressed copy of a sample
+ _UncompressSample(): Decompresses a sample, deleting the compressed version.
+"""
+import cookielib
+import getpass
+import gzip
+import optparse
+import os
+import urllib
+import urllib2
+
+SERVER_NAME = "http://chromeoswideprofiling.appspot.com"
+APP_NAME = "chromeoswideprofiling"
+DELIMITER = "~"
+
+
+def Authenticate(server_name):
+ """Gets credentials from user and attempts to retrieve auth token.
+ TODO: Accept OAuth2 instead of password.
+ Args:
+ server_name: (string) URL that the app engine code is living on.
+ Returns:
+ authtoken: (string) The authorization token that can be used
+ to grab other pages.
+ """
+
+ if server_name.endswith("/"):
+ server_name = server_name.rstrip("/")
+ # Grab username and password from user through stdin.
+ username = raw_input("Email (must be @google.com account): ")
+ password = getpass.getpass("Password: ")
+ # Use a cookie to authenticate with GAE.
+ cookiejar = cookielib.LWPCookieJar()
+ opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookiejar))
+ urllib2.install_opener(opener)
+ # Get an AuthToken from Google accounts service.
+ auth_uri = "https://www.google.com/accounts/ClientLogin"
+ authreq_data = urllib.urlencode({"Email": username,
+ "Passwd": password,
+ "service": "ah",
+ "source": APP_NAME,
+ "accountType": "HOSTED_OR_GOOGLE"})
+ auth_req = urllib2.Request(auth_uri, data=authreq_data)
+ try:
+ auth_resp = urllib2.urlopen(auth_req)
+ except urllib2.URLError:
+ print "Error logging in to Google accounts service."
+ return None
+ body = auth_resp.read()
+ # Auth response contains several fields.
+ # We care about the part after Auth=
+ auth_resp_dict = dict(x.split("=") for x in body.split("\n") if x)
+ authtoken = auth_resp_dict["Auth"]
+ return authtoken
+
+
+def DownloadSamples(server_name, authtoken, output_dir, start, stop):
+ """Download every sample and write unzipped version
+ to output directory.
+ Args:
+ server_name: (string) URL that the app engine code is living on.
+ authtoken: (string) Authorization token.
+ output_dir (string) Filepath to write output to.
+ start: (int) Index to start downloading from, starting at top.
+ stop: (int) Index to stop downloading, non-inclusive. -1 for end.
+ Returns:
+ None
+ """
+
+ if server_name.endswith("/"):
+ server_name = server_name.rstrip("/")
+
+ serve_page_string = _GetServePage(server_name, authtoken)
+ if serve_page_string is None:
+ print "Error getting /serve page."
+ return
+
+ sample_list = serve_page_string.split("</br>")
+ print "Will download:"
+ sample_list_subset = sample_list[start:stop]
+ for sample in sample_list_subset:
+ print sample
+ for sample in sample_list_subset:
+ assert sample, "Sample should be valid."
+ sample_info = [s.strip() for s in sample.split(DELIMITER)]
+ key = sample_info[0]
+ time = sample_info[1]
+ time = time.replace(" ", "_") # No space between date and time.
+ # sample_md5 = sample_info[2]
+ board = sample_info[3]
+ version = sample_info[4]
+
+ # Put a compressed copy of the samples in output directory.
+ _DownloadSampleFromServer(server_name, authtoken, key, time, board,
+ version, output_dir)
+ _UncompressSample(key, time, board, version, output_dir)
+
+
+def _BuildFilenameFromParams(key, time, board, version):
+ """Return the filename for our sample.
+ Args:
+ key: (string) Key indexing our sample in the datastore.
+ time: (string) Date that the sample was uploaded.
+ board: (string) Board that the sample was taken on.
+ version: (string) Version string from /etc/lsb-release
+ Returns:
+ filename (string)
+ """
+ filename = DELIMITER.join([key, time, board, version])
+ return filename
+
+
+def _DownloadSampleFromServer(server_name, authtoken, key, time, board,
+ version, output_dir):
+ """Downloads sample_$(samplekey).gz to current dir.
+ Args:
+ server_name: (string) URL that the app engine code is living on.
+ authtoken: (string) Authorization token.
+ key: (string) Key indexing our sample in the datastore
+ time: (string) Date that the sample was uploaded.
+ board: (string) Board that the sample was taken on.
+ version: (string) Version string from /etc/lsb-release
+ output_dir: (string) Filepath to write to output to.
+ Returns:
+ None
+ """
+ filename = _BuildFilenameFromParams(key, time, board, version)
+ compressed_filename = filename+".gz"
+
+ if os.path.exists(os.path.join(output_dir, filename)):
+ print "Already downloaded %s, skipping." % filename
+ return
+
+ serv_uri = server_name + "/serve/" + key
+ serv_args = {"continue": serv_uri, "auth": authtoken}
+ full_serv_uri = server_name + "/_ah/login?%s" % urllib.urlencode(serv_args)
+ serv_req = urllib2.Request(full_serv_uri)
+ serv_resp = urllib2.urlopen(serv_req)
+ f = open(os.path.join(output_dir, compressed_filename), "w+")
+ f.write(serv_resp.read())
+ f.close()
+
+
+def _UncompressSample(key, time, board, version, output_dir):
+ """Uncompresses a given sample.gz file and deletes the compressed version.
+ Args:
+ key: (string) Sample key to uncompress.
+ time: (string) Date that the sample was uploaded.
+ board: (string) Board that the sample was taken on.
+ version: (string) Version string from /etc/lsb-release
+ output_dir: (string) Filepath to find sample key in.
+ Returns:
+ None
+ """
+ filename = _BuildFilenameFromParams(key, time, board, version)
+ compressed_filename = filename+".gz"
+
+ if os.path.exists(os.path.join(output_dir, filename)):
+ print "Already decompressed %s, skipping." % filename
+ return
+
+ out_file = open(os.path.join(output_dir, filename), "wb")
+ in_file = gzip.open(os.path.join(output_dir, compressed_filename), "rb")
+ out_file.write(in_file.read())
+ in_file.close()
+ out_file.close()
+ os.remove(os.path.join(output_dir, compressed_filename))
+
+
+def _DeleteSampleFromServer(server_name, authtoken, key):
+ """Opens the /delete page with the specified key
+ to delete the sample off the datastore.
+ Args:
+ server_name: (string) URL that the app engine code is living on.
+ authtoken: (string) Authorization token.
+ key: (string) Key to delete.
+ Returns:
+ None
+ """
+
+ serv_uri = server_name + "/del/" + key
+ serv_args = {"continue": serv_uri, "auth": authtoken}
+ full_serv_uri = server_name + "/_ah/login?%s" % urllib.urlencode(serv_args)
+ serv_req = urllib2.Request(full_serv_uri)
+ urllib2.urlopen(serv_req)
+
+
+def _GetServePage(server_name, authtoken):
+ """Opens the /serve page and lists all keys.
+ Args:
+ server_name: (string) URL the app engine code is living on.
+ authtoken: (string) Authorization token.
+ Returns:
+ The text of the /serve page (including HTML tags)
+ """
+
+ serv_uri = server_name + "/serve"
+ serv_args = {"continue": serv_uri, "auth": authtoken}
+ full_serv_uri = server_name + "/_ah/login?%s" % urllib.urlencode(serv_args)
+ serv_req = urllib2.Request(full_serv_uri)
+ serv_resp = urllib2.urlopen(serv_req)
+ return serv_resp.read()
+
+
+def main():
+ parser = optparse.OptionParser()
+ parser.add_option("--output_dir", dest="output_dir", action="store",
+ help="Path to output perf data files.")
+ parser.add_option("--start", dest="start_ind", action="store",
+ default=0, help="Start index.")
+ parser.add_option("--stop", dest="stop_ind", action="store",
+ default=-1, help="Stop index.")
+ options = parser.parse_args()[0]
+ if not options.output_dir:
+ print "Must specify --output_dir."
+ return 1
+ if not os.path.exists(options.output_dir):
+ print "Specified output_dir does not exist."
+ return 1
+
+ authtoken = Authenticate(SERVER_NAME)
+ if not authtoken:
+ print "Could not obtain authtoken, exiting."
+ return 1
+ DownloadSamples(SERVER_NAME, authtoken, options.output_dir,
+ options.start_ind, options.stop_ind)
+ print "Downloaded samples."
+ return 0
+
+if __name__ == "__main__":
+ exit(main())
diff --git a/cwp/interpreter/symbolizer.py b/cwp/interpreter/symbolizer.py
new file mode 100644
index 00000000..3e589538
--- /dev/null
+++ b/cwp/interpreter/symbolizer.py
@@ -0,0 +1,131 @@
+#!/usr/bin/python
+# Copyright 2012 Google Inc. All Rights Reserved.
+"""A script that symbolizes perf.data files."""
+import optparse
+import os
+import shutil
+from subprocess import call
+from subprocess import PIPE
+from subprocess import Popen
+from utils import misc
+
+
+GSUTIL_CMD = "gsutil cp gs://chromeos-image-archive/%s-release/%s/debug.tgz %s"
+TAR_CMD = "tar -zxvf %s -C %s"
+PERF_BINARY = "/google/data/ro/projects/perf/perf"
+VMLINUX_FLAG = " --vmlinux=/usr/lib/debug/boot/vmlinux"
+PERF_CMD = PERF_BINARY +" report -i %s -n --symfs=%s" + VMLINUX_FLAG
+
+
+def main():
+ parser = optparse.OptionParser()
+ parser.add_option("--in", dest="in_dir")
+ parser.add_option("--out", dest="out_dir")
+ parser.add_option("--cache", dest="cache")
+ (opts, _) = parser.parse_args()
+ if not _ValidateOpts(opts):
+ return 1
+ else:
+ for filename in os.listdir(opts.in_dir):
+ try:
+ _DownloadSymbols(filename, opts.cache)
+ _PerfReport(filename, opts.in_dir, opts.out_dir, opts.cache)
+ except:
+ print "Exception caught. Continuing..."
+ return 0
+
+
+def _ValidateOpts(opts):
+ """Ensures all directories exist, before attempting to populate."""
+ if not os.path.exists(opts.in_dir):
+ print "Input directory doesn't exist."
+ return False
+ if not os.path.exists(opts.out_dir):
+ print "Output directory doesn't exist. Creating it..."
+ os.makedirs(opts.out_dir)
+ if not os.path.exists(opts.cache):
+ print "Cache directory doesn't exist."
+ return False
+ return True
+
+
+def _ParseFilename(filename, canonical=False):
+ """Returns a tuple (key, time, board, lsb_version).
+ If canonical is True, instead returns (database_key, board, canonical_vers)
+ canonical_vers includes the revision string.
+ """
+ key, time, board, vers = filename.split("~")
+ if canonical:
+ vers = misc.GetChromeOSVersionFromLSBVersion(vers)
+ return (key, time, board, vers)
+
+
+def _FormReleaseDir(board, version):
+ return "%s-release~%s" % (board, version)
+
+
+def _DownloadSymbols(filename, cache):
+ """ Incrementally downloads appropriate symbols.
+ We store the downloads in cache, with each set of symbols in a TLD
+ named like cache/$board-release~$canonical_vers/usr/lib/debug
+ """
+ _, _, board, vers = _ParseFilename(filename, canonical=True)
+ tmp_suffix = ".tmp"
+
+ tarball_subdir = _FormReleaseDir(board, vers)
+ tarball_dir = os.path.join(cache, tarball_subdir)
+ tarball_path = os.path.join(tarball_dir, "debug.tgz")
+
+ symbol_subdir = os.path.join("usr", "lib")
+ symbol_dir = os.path.join(tarball_dir, symbol_subdir)
+
+ if os.path.isdir(symbol_dir):
+ print "Symbol directory %s exists, skipping download." % symbol_dir
+ return
+ else:
+ # First download using gsutil.
+ if not os.path.isfile(tarball_path):
+ download_cmd = GSUTIL_CMD % (board, vers, tarball_path + tmp_suffix)
+ print "Downloading symbols for %s" % filename
+ print download_cmd
+ ret = call(download_cmd.split())
+ if ret != 0:
+ print "gsutil returned non-zero error code: %s." % ret
+ # Clean up the empty directory structures.
+ os.remove(tarball_path + tmp_suffix)
+ raise IOError
+
+ shutil.move(tarball_path + tmp_suffix, tarball_path)
+
+ # Next, untar the tarball.
+ os.makedirs(symbol_dir + tmp_suffix)
+ extract_cmd = TAR_CMD % (tarball_path, symbol_dir + tmp_suffix)
+ print "Extracting symbols for %s" % filename
+ print extract_cmd
+ ret = call(extract_cmd.split())
+ if ret != 0:
+ print "tar returned non-zero code: %s." % ret
+ raise IOError
+ shutil.move(symbol_dir + tmp_suffix, symbol_dir)
+ os.remove(tarball_path)
+
+
+def _PerfReport(filename, in_dir, out_dir, cache):
+ """ Call perf report on the file, storing output to the output dir.
+ The output is currently stored as $out_dir/$filename
+ """
+ _, _, board, vers = _ParseFilename(filename, canonical=True)
+ symbol_cache_tld = _FormReleaseDir(board, vers)
+ input_file = os.path.join(in_dir, filename)
+ symfs = os.path.join(cache, symbol_cache_tld)
+ report_cmd = PERF_CMD % (input_file, symfs)
+ print "Reporting."
+ print report_cmd
+ report_proc = Popen(report_cmd.split(), stdout=PIPE)
+ outfile = open(os.path.join(out_dir, filename), "w")
+ outfile.write(report_proc.stdout.read())
+ outfile.close()
+
+
+if __name__ == "__main__":
+ exit(main())